xref: /linux/drivers/gpu/drm/i915/display/intel_psr.c (revision de848da12f752170c2ebe114804a985314fd5a6a)
1 /*
2  * Copyright © 2014 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21  * DEALINGS IN THE SOFTWARE.
22  */
23 
24 #include <drm/drm_atomic_helper.h>
25 #include <drm/drm_damage_helper.h>
26 #include <drm/drm_debugfs.h>
27 
28 #include "i915_drv.h"
29 #include "i915_reg.h"
30 #include "intel_alpm.h"
31 #include "intel_atomic.h"
32 #include "intel_crtc.h"
33 #include "intel_cursor_regs.h"
34 #include "intel_ddi.h"
35 #include "intel_de.h"
36 #include "intel_display_types.h"
37 #include "intel_dp.h"
38 #include "intel_dp_aux.h"
39 #include "intel_frontbuffer.h"
40 #include "intel_hdmi.h"
41 #include "intel_psr.h"
42 #include "intel_psr_regs.h"
43 #include "intel_snps_phy.h"
44 #include "skl_universal_plane.h"
45 
46 /**
47  * DOC: Panel Self Refresh (PSR/SRD)
48  *
49  * Since Haswell Display controller supports Panel Self-Refresh on display
50  * panels witch have a remote frame buffer (RFB) implemented according to PSR
51  * spec in eDP1.3. PSR feature allows the display to go to lower standby states
52  * when system is idle but display is on as it eliminates display refresh
53  * request to DDR memory completely as long as the frame buffer for that
54  * display is unchanged.
55  *
56  * Panel Self Refresh must be supported by both Hardware (source) and
57  * Panel (sink).
58  *
59  * PSR saves power by caching the framebuffer in the panel RFB, which allows us
60  * to power down the link and memory controller. For DSI panels the same idea
61  * is called "manual mode".
62  *
63  * The implementation uses the hardware-based PSR support which automatically
64  * enters/exits self-refresh mode. The hardware takes care of sending the
65  * required DP aux message and could even retrain the link (that part isn't
66  * enabled yet though). The hardware also keeps track of any frontbuffer
67  * changes to know when to exit self-refresh mode again. Unfortunately that
68  * part doesn't work too well, hence why the i915 PSR support uses the
69  * software frontbuffer tracking to make sure it doesn't miss a screen
70  * update. For this integration intel_psr_invalidate() and intel_psr_flush()
71  * get called by the frontbuffer tracking code. Note that because of locking
72  * issues the self-refresh re-enable code is done from a work queue, which
73  * must be correctly synchronized/cancelled when shutting down the pipe."
74  *
75  * DC3CO (DC3 clock off)
76  *
77  * On top of PSR2, GEN12 adds a intermediate power savings state that turns
78  * clock off automatically during PSR2 idle state.
79  * The smaller overhead of DC3co entry/exit vs. the overhead of PSR2 deep sleep
80  * entry/exit allows the HW to enter a low-power state even when page flipping
81  * periodically (for instance a 30fps video playback scenario).
82  *
83  * Every time a flips occurs PSR2 will get out of deep sleep state(if it was),
84  * so DC3CO is enabled and tgl_dc3co_disable_work is schedule to run after 6
85  * frames, if no other flip occurs and the function above is executed, DC3CO is
86  * disabled and PSR2 is configured to enter deep sleep, resetting again in case
87  * of another flip.
88  * Front buffer modifications do not trigger DC3CO activation on purpose as it
89  * would bring a lot of complexity and most of the moderns systems will only
90  * use page flips.
91  */
92 
93 /*
94  * Description of PSR mask bits:
95  *
96  * EDP_PSR_DEBUG[16]/EDP_PSR_DEBUG_MASK_DISP_REG_WRITE (hsw-skl):
97  *
98  *  When unmasked (nearly) all display register writes (eg. even
99  *  SWF) trigger a PSR exit. Some registers are excluded from this
100  *  and they have a more specific mask (described below). On icl+
101  *  this bit no longer exists and is effectively always set.
102  *
103  * PIPE_MISC[21]/PIPE_MISC_PSR_MASK_PIPE_REG_WRITE (skl+):
104  *
105  *  When unmasked (nearly) all pipe/plane register writes
106  *  trigger a PSR exit. Some plane registers are excluded from this
107  *  and they have a more specific mask (described below).
108  *
109  * CHICKEN_PIPESL_1[11]/SKL_PSR_MASK_PLANE_FLIP (skl+):
110  * PIPE_MISC[23]/PIPE_MISC_PSR_MASK_PRIMARY_FLIP (bdw):
111  * EDP_PSR_DEBUG[23]/EDP_PSR_DEBUG_MASK_PRIMARY_FLIP (hsw):
112  *
113  *  When unmasked PRI_SURF/PLANE_SURF writes trigger a PSR exit.
114  *  SPR_SURF/CURBASE are not included in this and instead are
115  *  controlled by PIPE_MISC_PSR_MASK_PIPE_REG_WRITE (skl+) or
116  *  EDP_PSR_DEBUG_MASK_DISP_REG_WRITE (hsw/bdw).
117  *
118  * PIPE_MISC[22]/PIPE_MISC_PSR_MASK_SPRITE_ENABLE (bdw):
119  * EDP_PSR_DEBUG[21]/EDP_PSR_DEBUG_MASK_SPRITE_ENABLE (hsw):
120  *
121  *  When unmasked PSR is blocked as long as the sprite
122  *  plane is enabled. skl+ with their universal planes no
123  *  longer have a mask bit like this, and no plane being
124  *  enabledb blocks PSR.
125  *
126  * PIPE_MISC[21]/PIPE_MISC_PSR_MASK_CURSOR_MOVE (bdw):
127  * EDP_PSR_DEBUG[20]/EDP_PSR_DEBUG_MASK_CURSOR_MOVE (hsw):
128  *
129  *  When umasked CURPOS writes trigger a PSR exit. On skl+
130  *  this doesn't exit but CURPOS is included in the
131  *  PIPE_MISC_PSR_MASK_PIPE_REG_WRITE mask.
132  *
133  * PIPE_MISC[20]/PIPE_MISC_PSR_MASK_VBLANK_VSYNC_INT (bdw+):
134  * EDP_PSR_DEBUG[19]/EDP_PSR_DEBUG_MASK_VBLANK_VSYNC_INT (hsw):
135  *
136  *  When unmasked PSR is blocked as long as vblank and/or vsync
137  *  interrupt is unmasked in IMR *and* enabled in IER.
138  *
139  * CHICKEN_TRANS[30]/SKL_UNMASK_VBL_TO_PIPE_IN_SRD (skl+):
140  * CHICKEN_PAR1_1[15]/HSW_MASK_VBL_TO_PIPE_IN_SRD (hsw/bdw):
141  *
142  *  Selectcs whether PSR exit generates an extra vblank before
143  *  the first frame is transmitted. Also note the opposite polarity
144  *  if the bit on hsw/bdw vs. skl+ (masked==generate the extra vblank,
145  *  unmasked==do not generate the extra vblank).
146  *
147  *  With DC states enabled the extra vblank happens after link training,
148  *  with DC states disabled it happens immediately upuon PSR exit trigger.
149  *  No idea as of now why there is a difference. HSW/BDW (which don't
150  *  even have DMC) always generate it after link training. Go figure.
151  *
152  *  Unfortunately CHICKEN_TRANS itself seems to be double buffered
153  *  and thus won't latch until the first vblank. So with DC states
154  *  enabled the register effctively uses the reset value during DC5
155  *  exit+PSR exit sequence, and thus the bit does nothing until
156  *  latched by the vblank that it was trying to prevent from being
157  *  generated in the first place. So we should probably call this
158  *  one a chicken/egg bit instead on skl+.
159  *
160  *  In standby mode (as opposed to link-off) this makes no difference
161  *  as the timing generator keeps running the whole time generating
162  *  normal periodic vblanks.
163  *
164  *  WaPsrDPAMaskVBlankInSRD asks us to set the bit on hsw/bdw,
165  *  and doing so makes the behaviour match the skl+ reset value.
166  *
167  * CHICKEN_PIPESL_1[0]/BDW_UNMASK_VBL_TO_REGS_IN_SRD (bdw):
168  * CHICKEN_PIPESL_1[15]/HSW_UNMASK_VBL_TO_REGS_IN_SRD (hsw):
169  *
170  *  On BDW without this bit is no vblanks whatsoever are
171  *  generated after PSR exit. On HSW this has no apparant effect.
172  *  WaPsrDPRSUnmaskVBlankInSRD says to set this.
173  *
174  * The rest of the bits are more self-explanatory and/or
175  * irrelevant for normal operation.
176  *
177  * Description of intel_crtc_state variables. has_psr, has_panel_replay and
178  * has_sel_update:
179  *
180  *  has_psr (alone):					PSR1
181  *  has_psr + has_sel_update:				PSR2
182  *  has_psr + has_panel_replay:				Panel Replay
183  *  has_psr + has_panel_replay + has_sel_update:	Panel Replay Selective Update
184  *
185  * Description of some intel_psr varibles. enabled, panel_replay_enabled,
186  * sel_update_enabled
187  *
188  *  enabled (alone):						PSR1
189  *  enabled + sel_update_enabled:				PSR2
190  *  enabled + panel_replay_enabled:				Panel Replay
191  *  enabled + panel_replay_enabled + sel_update_enabled:	Panel Replay SU
192  */
193 
194 #define CAN_PSR(intel_dp) ((intel_dp)->psr.sink_support && \
195 			   (intel_dp)->psr.source_support)
196 
197 bool intel_encoder_can_psr(struct intel_encoder *encoder)
198 {
199 	if (intel_encoder_is_dp(encoder) || encoder->type == INTEL_OUTPUT_DP_MST)
200 		return CAN_PSR(enc_to_intel_dp(encoder)) ||
201 		       CAN_PANEL_REPLAY(enc_to_intel_dp(encoder));
202 	else
203 		return false;
204 }
205 
206 static bool psr_global_enabled(struct intel_dp *intel_dp)
207 {
208 	struct intel_display *display = to_intel_display(intel_dp);
209 	struct intel_connector *connector = intel_dp->attached_connector;
210 
211 	switch (intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK) {
212 	case I915_PSR_DEBUG_DEFAULT:
213 		if (display->params.enable_psr == -1)
214 			return connector->panel.vbt.psr.enable;
215 		return display->params.enable_psr;
216 	case I915_PSR_DEBUG_DISABLE:
217 		return false;
218 	default:
219 		return true;
220 	}
221 }
222 
223 static bool psr2_global_enabled(struct intel_dp *intel_dp)
224 {
225 	struct intel_display *display = to_intel_display(intel_dp);
226 
227 	switch (intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK) {
228 	case I915_PSR_DEBUG_DISABLE:
229 	case I915_PSR_DEBUG_FORCE_PSR1:
230 		return false;
231 	default:
232 		if (display->params.enable_psr == 1)
233 			return false;
234 		return true;
235 	}
236 }
237 
238 static bool psr2_su_region_et_global_enabled(struct intel_dp *intel_dp)
239 {
240 	struct intel_display *display = to_intel_display(intel_dp);
241 
242 	if (display->params.enable_psr != -1)
243 		return false;
244 
245 	return true;
246 }
247 
248 static bool panel_replay_global_enabled(struct intel_dp *intel_dp)
249 {
250 	struct intel_display *display = to_intel_display(intel_dp);
251 
252 	if ((display->params.enable_psr != -1) ||
253 	    (intel_dp->psr.debug & I915_PSR_DEBUG_PANEL_REPLAY_DISABLE))
254 		return false;
255 	return true;
256 }
257 
258 static u32 psr_irq_psr_error_bit_get(struct intel_dp *intel_dp)
259 {
260 	struct intel_display *display = to_intel_display(intel_dp);
261 
262 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_ERROR :
263 		EDP_PSR_ERROR(intel_dp->psr.transcoder);
264 }
265 
266 static u32 psr_irq_post_exit_bit_get(struct intel_dp *intel_dp)
267 {
268 	struct intel_display *display = to_intel_display(intel_dp);
269 
270 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_POST_EXIT :
271 		EDP_PSR_POST_EXIT(intel_dp->psr.transcoder);
272 }
273 
274 static u32 psr_irq_pre_entry_bit_get(struct intel_dp *intel_dp)
275 {
276 	struct intel_display *display = to_intel_display(intel_dp);
277 
278 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_PRE_ENTRY :
279 		EDP_PSR_PRE_ENTRY(intel_dp->psr.transcoder);
280 }
281 
282 static u32 psr_irq_mask_get(struct intel_dp *intel_dp)
283 {
284 	struct intel_display *display = to_intel_display(intel_dp);
285 
286 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_MASK :
287 		EDP_PSR_MASK(intel_dp->psr.transcoder);
288 }
289 
290 static i915_reg_t psr_ctl_reg(struct intel_display *display,
291 			      enum transcoder cpu_transcoder)
292 {
293 	if (DISPLAY_VER(display) >= 8)
294 		return EDP_PSR_CTL(display, cpu_transcoder);
295 	else
296 		return HSW_SRD_CTL;
297 }
298 
299 static i915_reg_t psr_debug_reg(struct intel_display *display,
300 				enum transcoder cpu_transcoder)
301 {
302 	if (DISPLAY_VER(display) >= 8)
303 		return EDP_PSR_DEBUG(display, cpu_transcoder);
304 	else
305 		return HSW_SRD_DEBUG;
306 }
307 
308 static i915_reg_t psr_perf_cnt_reg(struct intel_display *display,
309 				   enum transcoder cpu_transcoder)
310 {
311 	if (DISPLAY_VER(display) >= 8)
312 		return EDP_PSR_PERF_CNT(display, cpu_transcoder);
313 	else
314 		return HSW_SRD_PERF_CNT;
315 }
316 
317 static i915_reg_t psr_status_reg(struct intel_display *display,
318 				 enum transcoder cpu_transcoder)
319 {
320 	if (DISPLAY_VER(display) >= 8)
321 		return EDP_PSR_STATUS(display, cpu_transcoder);
322 	else
323 		return HSW_SRD_STATUS;
324 }
325 
326 static i915_reg_t psr_imr_reg(struct intel_display *display,
327 			      enum transcoder cpu_transcoder)
328 {
329 	if (DISPLAY_VER(display) >= 12)
330 		return TRANS_PSR_IMR(display, cpu_transcoder);
331 	else
332 		return EDP_PSR_IMR;
333 }
334 
335 static i915_reg_t psr_iir_reg(struct intel_display *display,
336 			      enum transcoder cpu_transcoder)
337 {
338 	if (DISPLAY_VER(display) >= 12)
339 		return TRANS_PSR_IIR(display, cpu_transcoder);
340 	else
341 		return EDP_PSR_IIR;
342 }
343 
344 static i915_reg_t psr_aux_ctl_reg(struct intel_display *display,
345 				  enum transcoder cpu_transcoder)
346 {
347 	if (DISPLAY_VER(display) >= 8)
348 		return EDP_PSR_AUX_CTL(display, cpu_transcoder);
349 	else
350 		return HSW_SRD_AUX_CTL;
351 }
352 
353 static i915_reg_t psr_aux_data_reg(struct intel_display *display,
354 				   enum transcoder cpu_transcoder, int i)
355 {
356 	if (DISPLAY_VER(display) >= 8)
357 		return EDP_PSR_AUX_DATA(display, cpu_transcoder, i);
358 	else
359 		return HSW_SRD_AUX_DATA(i);
360 }
361 
362 static void psr_irq_control(struct intel_dp *intel_dp)
363 {
364 	struct intel_display *display = to_intel_display(intel_dp);
365 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
366 	u32 mask;
367 
368 	if (intel_dp->psr.panel_replay_enabled)
369 		return;
370 
371 	mask = psr_irq_psr_error_bit_get(intel_dp);
372 	if (intel_dp->psr.debug & I915_PSR_DEBUG_IRQ)
373 		mask |= psr_irq_post_exit_bit_get(intel_dp) |
374 			psr_irq_pre_entry_bit_get(intel_dp);
375 
376 	intel_de_rmw(display, psr_imr_reg(display, cpu_transcoder),
377 		     psr_irq_mask_get(intel_dp), ~mask);
378 }
379 
380 static void psr_event_print(struct intel_display *display,
381 			    u32 val, bool sel_update_enabled)
382 {
383 	drm_dbg_kms(display->drm, "PSR exit events: 0x%x\n", val);
384 	if (val & PSR_EVENT_PSR2_WD_TIMER_EXPIRE)
385 		drm_dbg_kms(display->drm, "\tPSR2 watchdog timer expired\n");
386 	if ((val & PSR_EVENT_PSR2_DISABLED) && sel_update_enabled)
387 		drm_dbg_kms(display->drm, "\tPSR2 disabled\n");
388 	if (val & PSR_EVENT_SU_DIRTY_FIFO_UNDERRUN)
389 		drm_dbg_kms(display->drm, "\tSU dirty FIFO underrun\n");
390 	if (val & PSR_EVENT_SU_CRC_FIFO_UNDERRUN)
391 		drm_dbg_kms(display->drm, "\tSU CRC FIFO underrun\n");
392 	if (val & PSR_EVENT_GRAPHICS_RESET)
393 		drm_dbg_kms(display->drm, "\tGraphics reset\n");
394 	if (val & PSR_EVENT_PCH_INTERRUPT)
395 		drm_dbg_kms(display->drm, "\tPCH interrupt\n");
396 	if (val & PSR_EVENT_MEMORY_UP)
397 		drm_dbg_kms(display->drm, "\tMemory up\n");
398 	if (val & PSR_EVENT_FRONT_BUFFER_MODIFY)
399 		drm_dbg_kms(display->drm, "\tFront buffer modification\n");
400 	if (val & PSR_EVENT_WD_TIMER_EXPIRE)
401 		drm_dbg_kms(display->drm, "\tPSR watchdog timer expired\n");
402 	if (val & PSR_EVENT_PIPE_REGISTERS_UPDATE)
403 		drm_dbg_kms(display->drm, "\tPIPE registers updated\n");
404 	if (val & PSR_EVENT_REGISTER_UPDATE)
405 		drm_dbg_kms(display->drm, "\tRegister updated\n");
406 	if (val & PSR_EVENT_HDCP_ENABLE)
407 		drm_dbg_kms(display->drm, "\tHDCP enabled\n");
408 	if (val & PSR_EVENT_KVMR_SESSION_ENABLE)
409 		drm_dbg_kms(display->drm, "\tKVMR session enabled\n");
410 	if (val & PSR_EVENT_VBI_ENABLE)
411 		drm_dbg_kms(display->drm, "\tVBI enabled\n");
412 	if (val & PSR_EVENT_LPSP_MODE_EXIT)
413 		drm_dbg_kms(display->drm, "\tLPSP mode exited\n");
414 	if ((val & PSR_EVENT_PSR_DISABLE) && !sel_update_enabled)
415 		drm_dbg_kms(display->drm, "\tPSR disabled\n");
416 }
417 
418 void intel_psr_irq_handler(struct intel_dp *intel_dp, u32 psr_iir)
419 {
420 	struct intel_display *display = to_intel_display(intel_dp);
421 	struct drm_i915_private *dev_priv = to_i915(display->drm);
422 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
423 	ktime_t time_ns =  ktime_get();
424 
425 	if (psr_iir & psr_irq_pre_entry_bit_get(intel_dp)) {
426 		intel_dp->psr.last_entry_attempt = time_ns;
427 		drm_dbg_kms(display->drm,
428 			    "[transcoder %s] PSR entry attempt in 2 vblanks\n",
429 			    transcoder_name(cpu_transcoder));
430 	}
431 
432 	if (psr_iir & psr_irq_post_exit_bit_get(intel_dp)) {
433 		intel_dp->psr.last_exit = time_ns;
434 		drm_dbg_kms(display->drm,
435 			    "[transcoder %s] PSR exit completed\n",
436 			    transcoder_name(cpu_transcoder));
437 
438 		if (DISPLAY_VER(display) >= 9) {
439 			u32 val;
440 
441 			val = intel_de_rmw(dev_priv,
442 					   PSR_EVENT(dev_priv, cpu_transcoder),
443 					   0, 0);
444 
445 			psr_event_print(display, val, intel_dp->psr.sel_update_enabled);
446 		}
447 	}
448 
449 	if (psr_iir & psr_irq_psr_error_bit_get(intel_dp)) {
450 		drm_warn(display->drm, "[transcoder %s] PSR aux error\n",
451 			 transcoder_name(cpu_transcoder));
452 
453 		intel_dp->psr.irq_aux_error = true;
454 
455 		/*
456 		 * If this interruption is not masked it will keep
457 		 * interrupting so fast that it prevents the scheduled
458 		 * work to run.
459 		 * Also after a PSR error, we don't want to arm PSR
460 		 * again so we don't care about unmask the interruption
461 		 * or unset irq_aux_error.
462 		 */
463 		intel_de_rmw(display, psr_imr_reg(display, cpu_transcoder),
464 			     0, psr_irq_psr_error_bit_get(intel_dp));
465 
466 		queue_work(dev_priv->unordered_wq, &intel_dp->psr.work);
467 	}
468 }
469 
470 static u8 intel_dp_get_sink_sync_latency(struct intel_dp *intel_dp)
471 {
472 	struct intel_display *display = to_intel_display(intel_dp);
473 	u8 val = 8; /* assume the worst if we can't read the value */
474 
475 	if (drm_dp_dpcd_readb(&intel_dp->aux,
476 			      DP_SYNCHRONIZATION_LATENCY_IN_SINK, &val) == 1)
477 		val &= DP_MAX_RESYNC_FRAME_COUNT_MASK;
478 	else
479 		drm_dbg_kms(display->drm,
480 			    "Unable to get sink synchronization latency, assuming 8 frames\n");
481 	return val;
482 }
483 
484 static u8 intel_dp_get_su_capability(struct intel_dp *intel_dp)
485 {
486 	u8 su_capability = 0;
487 
488 	if (intel_dp->psr.sink_panel_replay_su_support)
489 		drm_dp_dpcd_readb(&intel_dp->aux,
490 				  DP_PANEL_PANEL_REPLAY_CAPABILITY,
491 				  &su_capability);
492 	else
493 		su_capability = intel_dp->psr_dpcd[1];
494 
495 	return su_capability;
496 }
497 
498 static unsigned int
499 intel_dp_get_su_x_granularity_offset(struct intel_dp *intel_dp)
500 {
501 	return intel_dp->psr.sink_panel_replay_su_support ?
502 		DP_PANEL_PANEL_REPLAY_X_GRANULARITY :
503 		DP_PSR2_SU_X_GRANULARITY;
504 }
505 
506 static unsigned int
507 intel_dp_get_su_y_granularity_offset(struct intel_dp *intel_dp)
508 {
509 	return intel_dp->psr.sink_panel_replay_su_support ?
510 		DP_PANEL_PANEL_REPLAY_Y_GRANULARITY :
511 		DP_PSR2_SU_Y_GRANULARITY;
512 }
513 
514 /*
515  * Note: Bits related to granularity are same in panel replay and psr
516  * registers. Rely on PSR definitions on these "common" bits.
517  */
518 static void intel_dp_get_su_granularity(struct intel_dp *intel_dp)
519 {
520 	struct intel_display *display = to_intel_display(intel_dp);
521 	ssize_t r;
522 	u16 w;
523 	u8 y;
524 
525 	/*
526 	 * TODO: Do we need to take into account panel supporting both PSR and
527 	 * Panel replay?
528 	 */
529 
530 	/*
531 	 * If sink don't have specific granularity requirements set legacy
532 	 * ones.
533 	 */
534 	if (!(intel_dp_get_su_capability(intel_dp) &
535 	      DP_PSR2_SU_GRANULARITY_REQUIRED)) {
536 		/* As PSR2 HW sends full lines, we do not care about x granularity */
537 		w = 4;
538 		y = 4;
539 		goto exit;
540 	}
541 
542 	r = drm_dp_dpcd_read(&intel_dp->aux,
543 			     intel_dp_get_su_x_granularity_offset(intel_dp),
544 			     &w, 2);
545 	if (r != 2)
546 		drm_dbg_kms(display->drm,
547 			    "Unable to read selective update x granularity\n");
548 	/*
549 	 * Spec says that if the value read is 0 the default granularity should
550 	 * be used instead.
551 	 */
552 	if (r != 2 || w == 0)
553 		w = 4;
554 
555 	r = drm_dp_dpcd_read(&intel_dp->aux,
556 			     intel_dp_get_su_y_granularity_offset(intel_dp),
557 			     &y, 1);
558 	if (r != 1) {
559 		drm_dbg_kms(display->drm,
560 			    "Unable to read selective update y granularity\n");
561 		y = 4;
562 	}
563 	if (y == 0)
564 		y = 1;
565 
566 exit:
567 	intel_dp->psr.su_w_granularity = w;
568 	intel_dp->psr.su_y_granularity = y;
569 }
570 
571 static void _panel_replay_init_dpcd(struct intel_dp *intel_dp)
572 {
573 	struct intel_display *display = to_intel_display(intel_dp);
574 
575 	if (intel_dp_is_edp(intel_dp)) {
576 		if (!intel_alpm_aux_less_wake_supported(intel_dp)) {
577 			drm_dbg_kms(display->drm,
578 				    "Panel doesn't support AUX-less ALPM, eDP Panel Replay not possible\n");
579 			return;
580 		}
581 
582 		if (!(intel_dp->pr_dpcd & DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT)) {
583 			drm_dbg_kms(display->drm,
584 				    "Panel doesn't support early transport, eDP Panel Replay not possible\n");
585 			return;
586 		}
587 	}
588 
589 	intel_dp->psr.sink_panel_replay_support = true;
590 
591 	if (intel_dp->pr_dpcd & DP_PANEL_REPLAY_SU_SUPPORT)
592 		intel_dp->psr.sink_panel_replay_su_support = true;
593 
594 	drm_dbg_kms(display->drm,
595 		    "Panel replay %sis supported by panel\n",
596 		    intel_dp->psr.sink_panel_replay_su_support ?
597 		    "selective_update " : "");
598 }
599 
600 static void _psr_init_dpcd(struct intel_dp *intel_dp)
601 {
602 	struct intel_display *display = to_intel_display(intel_dp);
603 
604 	drm_dbg_kms(display->drm, "eDP panel supports PSR version %x\n",
605 		    intel_dp->psr_dpcd[0]);
606 
607 	if (drm_dp_has_quirk(&intel_dp->desc, DP_DPCD_QUIRK_NO_PSR)) {
608 		drm_dbg_kms(display->drm,
609 			    "PSR support not currently available for this panel\n");
610 		return;
611 	}
612 
613 	if (!(intel_dp->edp_dpcd[1] & DP_EDP_SET_POWER_CAP)) {
614 		drm_dbg_kms(display->drm,
615 			    "Panel lacks power state control, PSR cannot be enabled\n");
616 		return;
617 	}
618 
619 	intel_dp->psr.sink_support = true;
620 	intel_dp->psr.sink_sync_latency =
621 		intel_dp_get_sink_sync_latency(intel_dp);
622 
623 	if (DISPLAY_VER(display) >= 9 &&
624 	    intel_dp->psr_dpcd[0] >= DP_PSR2_WITH_Y_COORD_IS_SUPPORTED) {
625 		bool y_req = intel_dp->psr_dpcd[1] &
626 			     DP_PSR2_SU_Y_COORDINATE_REQUIRED;
627 
628 		/*
629 		 * All panels that supports PSR version 03h (PSR2 +
630 		 * Y-coordinate) can handle Y-coordinates in VSC but we are
631 		 * only sure that it is going to be used when required by the
632 		 * panel. This way panel is capable to do selective update
633 		 * without a aux frame sync.
634 		 *
635 		 * To support PSR version 02h and PSR version 03h without
636 		 * Y-coordinate requirement panels we would need to enable
637 		 * GTC first.
638 		 */
639 		intel_dp->psr.sink_psr2_support = y_req &&
640 			intel_alpm_aux_wake_supported(intel_dp);
641 		drm_dbg_kms(display->drm, "PSR2 %ssupported\n",
642 			    intel_dp->psr.sink_psr2_support ? "" : "not ");
643 	}
644 }
645 
646 void intel_psr_init_dpcd(struct intel_dp *intel_dp)
647 {
648 	drm_dp_dpcd_read(&intel_dp->aux, DP_PSR_SUPPORT, intel_dp->psr_dpcd,
649 			 sizeof(intel_dp->psr_dpcd));
650 	drm_dp_dpcd_readb(&intel_dp->aux, DP_PANEL_REPLAY_CAP,
651 			  &intel_dp->pr_dpcd);
652 
653 	if (intel_dp->pr_dpcd & DP_PANEL_REPLAY_SUPPORT)
654 		_panel_replay_init_dpcd(intel_dp);
655 
656 	if (intel_dp->psr_dpcd[0])
657 		_psr_init_dpcd(intel_dp);
658 
659 	if (intel_dp->psr.sink_psr2_support ||
660 	    intel_dp->psr.sink_panel_replay_su_support)
661 		intel_dp_get_su_granularity(intel_dp);
662 }
663 
664 static void hsw_psr_setup_aux(struct intel_dp *intel_dp)
665 {
666 	struct intel_display *display = to_intel_display(intel_dp);
667 	struct drm_i915_private *dev_priv = to_i915(display->drm);
668 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
669 	u32 aux_clock_divider, aux_ctl;
670 	/* write DP_SET_POWER=D0 */
671 	static const u8 aux_msg[] = {
672 		[0] = (DP_AUX_NATIVE_WRITE << 4) | ((DP_SET_POWER >> 16) & 0xf),
673 		[1] = (DP_SET_POWER >> 8) & 0xff,
674 		[2] = DP_SET_POWER & 0xff,
675 		[3] = 1 - 1,
676 		[4] = DP_SET_POWER_D0,
677 	};
678 	int i;
679 
680 	BUILD_BUG_ON(sizeof(aux_msg) > 20);
681 	for (i = 0; i < sizeof(aux_msg); i += 4)
682 		intel_de_write(dev_priv,
683 			       psr_aux_data_reg(display, cpu_transcoder, i >> 2),
684 			       intel_dp_aux_pack(&aux_msg[i], sizeof(aux_msg) - i));
685 
686 	aux_clock_divider = intel_dp->get_aux_clock_divider(intel_dp, 0);
687 
688 	/* Start with bits set for DDI_AUX_CTL register */
689 	aux_ctl = intel_dp->get_aux_send_ctl(intel_dp, sizeof(aux_msg),
690 					     aux_clock_divider);
691 
692 	/* Select only valid bits for SRD_AUX_CTL */
693 	aux_ctl &= EDP_PSR_AUX_CTL_TIME_OUT_MASK |
694 		EDP_PSR_AUX_CTL_MESSAGE_SIZE_MASK |
695 		EDP_PSR_AUX_CTL_PRECHARGE_2US_MASK |
696 		EDP_PSR_AUX_CTL_BIT_CLOCK_2X_MASK;
697 
698 	intel_de_write(display, psr_aux_ctl_reg(display, cpu_transcoder),
699 		       aux_ctl);
700 }
701 
702 static bool psr2_su_region_et_valid(struct intel_dp *intel_dp, bool panel_replay)
703 {
704 	struct intel_display *display = to_intel_display(intel_dp);
705 
706 	if (DISPLAY_VER(display) < 20 || !intel_dp_is_edp(intel_dp) ||
707 	    intel_dp->psr.debug & I915_PSR_DEBUG_SU_REGION_ET_DISABLE)
708 		return false;
709 
710 	return panel_replay ?
711 		intel_dp->pr_dpcd & DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT :
712 		intel_dp->psr_dpcd[0] == DP_PSR2_WITH_Y_COORD_ET_SUPPORTED &&
713 		psr2_su_region_et_global_enabled(intel_dp);
714 }
715 
716 static void _panel_replay_enable_sink(struct intel_dp *intel_dp,
717 				      const struct intel_crtc_state *crtc_state)
718 {
719 	u8 val = DP_PANEL_REPLAY_ENABLE |
720 		DP_PANEL_REPLAY_VSC_SDP_CRC_EN |
721 		DP_PANEL_REPLAY_UNRECOVERABLE_ERROR_EN |
722 		DP_PANEL_REPLAY_RFB_STORAGE_ERROR_EN |
723 		DP_PANEL_REPLAY_ACTIVE_FRAME_CRC_ERROR_EN;
724 	u8 panel_replay_config2 = DP_PANEL_REPLAY_CRC_VERIFICATION;
725 
726 	if (crtc_state->has_sel_update)
727 		val |= DP_PANEL_REPLAY_SU_ENABLE;
728 
729 	if (crtc_state->enable_psr2_su_region_et)
730 		val |= DP_PANEL_REPLAY_ENABLE_SU_REGION_ET;
731 
732 	if (crtc_state->req_psr2_sdp_prior_scanline)
733 		panel_replay_config2 |=
734 			DP_PANEL_REPLAY_SU_REGION_SCANLINE_CAPTURE;
735 
736 	drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG, val);
737 
738 	drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG2,
739 			   panel_replay_config2);
740 }
741 
742 static void _psr_enable_sink(struct intel_dp *intel_dp,
743 			     const struct intel_crtc_state *crtc_state)
744 {
745 	struct intel_display *display = to_intel_display(intel_dp);
746 	u8 val = DP_PSR_ENABLE;
747 
748 	if (crtc_state->has_sel_update) {
749 		val |= DP_PSR_ENABLE_PSR2 | DP_PSR_IRQ_HPD_WITH_CRC_ERRORS;
750 	} else {
751 		if (intel_dp->psr.link_standby)
752 			val |= DP_PSR_MAIN_LINK_ACTIVE;
753 
754 		if (DISPLAY_VER(display) >= 8)
755 			val |= DP_PSR_CRC_VERIFICATION;
756 	}
757 
758 	if (crtc_state->req_psr2_sdp_prior_scanline)
759 		val |= DP_PSR_SU_REGION_SCANLINE_CAPTURE;
760 
761 	if (crtc_state->enable_psr2_su_region_et)
762 		val |= DP_PANEL_REPLAY_ENABLE_SU_REGION_ET;
763 
764 	if (intel_dp->psr.entry_setup_frames > 0)
765 		val |= DP_PSR_FRAME_CAPTURE;
766 
767 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, val);
768 }
769 
770 static void intel_psr_enable_sink_alpm(struct intel_dp *intel_dp,
771 				       const struct intel_crtc_state *crtc_state)
772 {
773 	u8 val;
774 
775 	/*
776 	 * eDP Panel Replay uses always ALPM
777 	 * PSR2 uses ALPM but PSR1 doesn't
778 	 */
779 	if (!intel_dp_is_edp(intel_dp) || (!crtc_state->has_panel_replay &&
780 					   !crtc_state->has_sel_update))
781 		return;
782 
783 	val = DP_ALPM_ENABLE | DP_ALPM_LOCK_ERROR_IRQ_HPD_ENABLE;
784 
785 	if (crtc_state->has_panel_replay)
786 		val |= DP_ALPM_MODE_AUX_LESS;
787 
788 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_RECEIVER_ALPM_CONFIG, val);
789 }
790 
791 void intel_psr_enable_sink(struct intel_dp *intel_dp,
792 			   const struct intel_crtc_state *crtc_state)
793 {
794 	intel_psr_enable_sink_alpm(intel_dp, crtc_state);
795 
796 	crtc_state->has_panel_replay ?
797 		_panel_replay_enable_sink(intel_dp, crtc_state) :
798 		_psr_enable_sink(intel_dp, crtc_state);
799 
800 	if (intel_dp_is_edp(intel_dp))
801 		drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
802 }
803 
804 static u32 intel_psr1_get_tp_time(struct intel_dp *intel_dp)
805 {
806 	struct intel_display *display = to_intel_display(intel_dp);
807 	struct intel_connector *connector = intel_dp->attached_connector;
808 	struct drm_i915_private *dev_priv = to_i915(display->drm);
809 	u32 val = 0;
810 
811 	if (DISPLAY_VER(display) >= 11)
812 		val |= EDP_PSR_TP4_TIME_0us;
813 
814 	if (display->params.psr_safest_params) {
815 		val |= EDP_PSR_TP1_TIME_2500us;
816 		val |= EDP_PSR_TP2_TP3_TIME_2500us;
817 		goto check_tp3_sel;
818 	}
819 
820 	if (connector->panel.vbt.psr.tp1_wakeup_time_us == 0)
821 		val |= EDP_PSR_TP1_TIME_0us;
822 	else if (connector->panel.vbt.psr.tp1_wakeup_time_us <= 100)
823 		val |= EDP_PSR_TP1_TIME_100us;
824 	else if (connector->panel.vbt.psr.tp1_wakeup_time_us <= 500)
825 		val |= EDP_PSR_TP1_TIME_500us;
826 	else
827 		val |= EDP_PSR_TP1_TIME_2500us;
828 
829 	if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us == 0)
830 		val |= EDP_PSR_TP2_TP3_TIME_0us;
831 	else if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us <= 100)
832 		val |= EDP_PSR_TP2_TP3_TIME_100us;
833 	else if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us <= 500)
834 		val |= EDP_PSR_TP2_TP3_TIME_500us;
835 	else
836 		val |= EDP_PSR_TP2_TP3_TIME_2500us;
837 
838 	/*
839 	 * WA 0479: hsw,bdw
840 	 * "Do not skip both TP1 and TP2/TP3"
841 	 */
842 	if (DISPLAY_VER(dev_priv) < 9 &&
843 	    connector->panel.vbt.psr.tp1_wakeup_time_us == 0 &&
844 	    connector->panel.vbt.psr.tp2_tp3_wakeup_time_us == 0)
845 		val |= EDP_PSR_TP2_TP3_TIME_100us;
846 
847 check_tp3_sel:
848 	if (intel_dp_source_supports_tps3(dev_priv) &&
849 	    drm_dp_tps3_supported(intel_dp->dpcd))
850 		val |= EDP_PSR_TP_TP1_TP3;
851 	else
852 		val |= EDP_PSR_TP_TP1_TP2;
853 
854 	return val;
855 }
856 
857 static u8 psr_compute_idle_frames(struct intel_dp *intel_dp)
858 {
859 	struct intel_display *display = to_intel_display(intel_dp);
860 	struct intel_connector *connector = intel_dp->attached_connector;
861 	int idle_frames;
862 
863 	/* Let's use 6 as the minimum to cover all known cases including the
864 	 * off-by-one issue that HW has in some cases.
865 	 */
866 	idle_frames = max(6, connector->panel.vbt.psr.idle_frames);
867 	idle_frames = max(idle_frames, intel_dp->psr.sink_sync_latency + 1);
868 
869 	if (drm_WARN_ON(display->drm, idle_frames > 0xf))
870 		idle_frames = 0xf;
871 
872 	return idle_frames;
873 }
874 
875 static void hsw_activate_psr1(struct intel_dp *intel_dp)
876 {
877 	struct intel_display *display = to_intel_display(intel_dp);
878 	struct drm_i915_private *dev_priv = to_i915(display->drm);
879 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
880 	u32 max_sleep_time = 0x1f;
881 	u32 val = EDP_PSR_ENABLE;
882 
883 	val |= EDP_PSR_IDLE_FRAMES(psr_compute_idle_frames(intel_dp));
884 
885 	if (DISPLAY_VER(display) < 20)
886 		val |= EDP_PSR_MAX_SLEEP_TIME(max_sleep_time);
887 
888 	if (IS_HASWELL(dev_priv))
889 		val |= EDP_PSR_MIN_LINK_ENTRY_TIME_8_LINES;
890 
891 	if (intel_dp->psr.link_standby)
892 		val |= EDP_PSR_LINK_STANDBY;
893 
894 	val |= intel_psr1_get_tp_time(intel_dp);
895 
896 	if (DISPLAY_VER(display) >= 8)
897 		val |= EDP_PSR_CRC_ENABLE;
898 
899 	if (DISPLAY_VER(display) >= 20)
900 		val |= LNL_EDP_PSR_ENTRY_SETUP_FRAMES(intel_dp->psr.entry_setup_frames);
901 
902 	intel_de_rmw(display, psr_ctl_reg(display, cpu_transcoder),
903 		     ~EDP_PSR_RESTORE_PSR_ACTIVE_CTX_MASK, val);
904 }
905 
906 static u32 intel_psr2_get_tp_time(struct intel_dp *intel_dp)
907 {
908 	struct intel_display *display = to_intel_display(intel_dp);
909 	struct intel_connector *connector = intel_dp->attached_connector;
910 	u32 val = 0;
911 
912 	if (display->params.psr_safest_params)
913 		return EDP_PSR2_TP2_TIME_2500us;
914 
915 	if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us >= 0 &&
916 	    connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 50)
917 		val |= EDP_PSR2_TP2_TIME_50us;
918 	else if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 100)
919 		val |= EDP_PSR2_TP2_TIME_100us;
920 	else if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 500)
921 		val |= EDP_PSR2_TP2_TIME_500us;
922 	else
923 		val |= EDP_PSR2_TP2_TIME_2500us;
924 
925 	return val;
926 }
927 
928 static int psr2_block_count_lines(struct intel_dp *intel_dp)
929 {
930 	return intel_dp->alpm_parameters.io_wake_lines < 9 &&
931 		intel_dp->alpm_parameters.fast_wake_lines < 9 ? 8 : 12;
932 }
933 
934 static int psr2_block_count(struct intel_dp *intel_dp)
935 {
936 	return psr2_block_count_lines(intel_dp) / 4;
937 }
938 
939 static u8 frames_before_su_entry(struct intel_dp *intel_dp)
940 {
941 	u8 frames_before_su_entry;
942 
943 	frames_before_su_entry = max_t(u8,
944 				       intel_dp->psr.sink_sync_latency + 1,
945 				       2);
946 
947 	/* Entry setup frames must be at least 1 less than frames before SU entry */
948 	if (intel_dp->psr.entry_setup_frames >= frames_before_su_entry)
949 		frames_before_su_entry = intel_dp->psr.entry_setup_frames + 1;
950 
951 	return frames_before_su_entry;
952 }
953 
954 static void dg2_activate_panel_replay(struct intel_dp *intel_dp)
955 {
956 	struct intel_display *display = to_intel_display(intel_dp);
957 	struct intel_psr *psr = &intel_dp->psr;
958 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
959 
960 	if (intel_dp_is_edp(intel_dp) && psr->sel_update_enabled) {
961 		u32 val = psr->su_region_et_enabled ?
962 			LNL_EDP_PSR2_SU_REGION_ET_ENABLE : 0;
963 
964 		if (intel_dp->psr.req_psr2_sdp_prior_scanline)
965 			val |= EDP_PSR2_SU_SDP_SCANLINE;
966 
967 		intel_de_write(display, EDP_PSR2_CTL(display, cpu_transcoder),
968 			       val);
969 	}
970 
971 	intel_de_rmw(display,
972 		     PSR2_MAN_TRK_CTL(display, intel_dp->psr.transcoder),
973 		     0, ADLP_PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME);
974 
975 	intel_de_rmw(display, TRANS_DP2_CTL(intel_dp->psr.transcoder), 0,
976 		     TRANS_DP2_PANEL_REPLAY_ENABLE);
977 }
978 
979 static void hsw_activate_psr2(struct intel_dp *intel_dp)
980 {
981 	struct intel_display *display = to_intel_display(intel_dp);
982 	struct drm_i915_private *dev_priv = to_i915(display->drm);
983 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
984 	u32 val = EDP_PSR2_ENABLE;
985 	u32 psr_val = 0;
986 
987 	val |= EDP_PSR2_IDLE_FRAMES(psr_compute_idle_frames(intel_dp));
988 
989 	if (DISPLAY_VER(display) < 14 && !IS_ALDERLAKE_P(dev_priv))
990 		val |= EDP_SU_TRACK_ENABLE;
991 
992 	if (DISPLAY_VER(display) >= 10 && DISPLAY_VER(display) < 13)
993 		val |= EDP_Y_COORDINATE_ENABLE;
994 
995 	val |= EDP_PSR2_FRAME_BEFORE_SU(frames_before_su_entry(intel_dp));
996 
997 	val |= intel_psr2_get_tp_time(intel_dp);
998 
999 	if (DISPLAY_VER(display) >= 12 && DISPLAY_VER(display) < 20) {
1000 		if (psr2_block_count(intel_dp) > 2)
1001 			val |= TGL_EDP_PSR2_BLOCK_COUNT_NUM_3;
1002 		else
1003 			val |= TGL_EDP_PSR2_BLOCK_COUNT_NUM_2;
1004 	}
1005 
1006 	/* Wa_22012278275:adl-p */
1007 	if (IS_ALDERLAKE_P(dev_priv) && IS_DISPLAY_STEP(display, STEP_A0, STEP_E0)) {
1008 		static const u8 map[] = {
1009 			2, /* 5 lines */
1010 			1, /* 6 lines */
1011 			0, /* 7 lines */
1012 			3, /* 8 lines */
1013 			6, /* 9 lines */
1014 			5, /* 10 lines */
1015 			4, /* 11 lines */
1016 			7, /* 12 lines */
1017 		};
1018 		/*
1019 		 * Still using the default IO_BUFFER_WAKE and FAST_WAKE, see
1020 		 * comments bellow for more information
1021 		 */
1022 		int tmp;
1023 
1024 		tmp = map[intel_dp->alpm_parameters.io_wake_lines -
1025 			  TGL_EDP_PSR2_IO_BUFFER_WAKE_MIN_LINES];
1026 		val |= TGL_EDP_PSR2_IO_BUFFER_WAKE(tmp + TGL_EDP_PSR2_IO_BUFFER_WAKE_MIN_LINES);
1027 
1028 		tmp = map[intel_dp->alpm_parameters.fast_wake_lines - TGL_EDP_PSR2_FAST_WAKE_MIN_LINES];
1029 		val |= TGL_EDP_PSR2_FAST_WAKE(tmp + TGL_EDP_PSR2_FAST_WAKE_MIN_LINES);
1030 	} else if (DISPLAY_VER(display) >= 20) {
1031 		val |= LNL_EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines);
1032 	} else if (DISPLAY_VER(display) >= 12) {
1033 		val |= TGL_EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines);
1034 		val |= TGL_EDP_PSR2_FAST_WAKE(intel_dp->alpm_parameters.fast_wake_lines);
1035 	} else if (DISPLAY_VER(display) >= 9) {
1036 		val |= EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines);
1037 		val |= EDP_PSR2_FAST_WAKE(intel_dp->alpm_parameters.fast_wake_lines);
1038 	}
1039 
1040 	if (intel_dp->psr.req_psr2_sdp_prior_scanline)
1041 		val |= EDP_PSR2_SU_SDP_SCANLINE;
1042 
1043 	if (DISPLAY_VER(display) >= 20)
1044 		psr_val |= LNL_EDP_PSR_ENTRY_SETUP_FRAMES(intel_dp->psr.entry_setup_frames);
1045 
1046 	if (intel_dp->psr.psr2_sel_fetch_enabled) {
1047 		u32 tmp;
1048 
1049 		tmp = intel_de_read(display,
1050 				    PSR2_MAN_TRK_CTL(display, cpu_transcoder));
1051 		drm_WARN_ON(display->drm, !(tmp & PSR2_MAN_TRK_CTL_ENABLE));
1052 	} else if (HAS_PSR2_SEL_FETCH(display)) {
1053 		intel_de_write(display,
1054 			       PSR2_MAN_TRK_CTL(display, cpu_transcoder), 0);
1055 	}
1056 
1057 	if (intel_dp->psr.su_region_et_enabled)
1058 		val |= LNL_EDP_PSR2_SU_REGION_ET_ENABLE;
1059 
1060 	/*
1061 	 * PSR2 HW is incorrectly using EDP_PSR_TP1_TP3_SEL and BSpec is
1062 	 * recommending keep this bit unset while PSR2 is enabled.
1063 	 */
1064 	intel_de_write(display, psr_ctl_reg(display, cpu_transcoder), psr_val);
1065 
1066 	intel_de_write(display, EDP_PSR2_CTL(display, cpu_transcoder), val);
1067 }
1068 
1069 static bool
1070 transcoder_has_psr2(struct intel_display *display, enum transcoder cpu_transcoder)
1071 {
1072 	struct drm_i915_private *dev_priv = to_i915(display->drm);
1073 
1074 	if (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14)
1075 		return cpu_transcoder == TRANSCODER_A || cpu_transcoder == TRANSCODER_B;
1076 	else if (DISPLAY_VER(display) >= 12)
1077 		return cpu_transcoder == TRANSCODER_A;
1078 	else if (DISPLAY_VER(display) >= 9)
1079 		return cpu_transcoder == TRANSCODER_EDP;
1080 	else
1081 		return false;
1082 }
1083 
1084 static u32 intel_get_frame_time_us(const struct intel_crtc_state *crtc_state)
1085 {
1086 	if (!crtc_state->hw.active)
1087 		return 0;
1088 
1089 	return DIV_ROUND_UP(1000 * 1000,
1090 			    drm_mode_vrefresh(&crtc_state->hw.adjusted_mode));
1091 }
1092 
1093 static void psr2_program_idle_frames(struct intel_dp *intel_dp,
1094 				     u32 idle_frames)
1095 {
1096 	struct intel_display *display = to_intel_display(intel_dp);
1097 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1098 
1099 	intel_de_rmw(display, EDP_PSR2_CTL(display, cpu_transcoder),
1100 		     EDP_PSR2_IDLE_FRAMES_MASK,
1101 		     EDP_PSR2_IDLE_FRAMES(idle_frames));
1102 }
1103 
1104 static void tgl_psr2_enable_dc3co(struct intel_dp *intel_dp)
1105 {
1106 	struct intel_display *display = to_intel_display(intel_dp);
1107 	struct drm_i915_private *dev_priv = to_i915(display->drm);
1108 
1109 	psr2_program_idle_frames(intel_dp, 0);
1110 	intel_display_power_set_target_dc_state(dev_priv, DC_STATE_EN_DC3CO);
1111 }
1112 
1113 static void tgl_psr2_disable_dc3co(struct intel_dp *intel_dp)
1114 {
1115 	struct intel_display *display = to_intel_display(intel_dp);
1116 	struct drm_i915_private *dev_priv = to_i915(display->drm);
1117 
1118 	intel_display_power_set_target_dc_state(dev_priv, DC_STATE_EN_UPTO_DC6);
1119 	psr2_program_idle_frames(intel_dp, psr_compute_idle_frames(intel_dp));
1120 }
1121 
1122 static void tgl_dc3co_disable_work(struct work_struct *work)
1123 {
1124 	struct intel_dp *intel_dp =
1125 		container_of(work, typeof(*intel_dp), psr.dc3co_work.work);
1126 
1127 	mutex_lock(&intel_dp->psr.lock);
1128 	/* If delayed work is pending, it is not idle */
1129 	if (delayed_work_pending(&intel_dp->psr.dc3co_work))
1130 		goto unlock;
1131 
1132 	tgl_psr2_disable_dc3co(intel_dp);
1133 unlock:
1134 	mutex_unlock(&intel_dp->psr.lock);
1135 }
1136 
1137 static void tgl_disallow_dc3co_on_psr2_exit(struct intel_dp *intel_dp)
1138 {
1139 	if (!intel_dp->psr.dc3co_exitline)
1140 		return;
1141 
1142 	cancel_delayed_work(&intel_dp->psr.dc3co_work);
1143 	/* Before PSR2 exit disallow dc3co*/
1144 	tgl_psr2_disable_dc3co(intel_dp);
1145 }
1146 
1147 static bool
1148 dc3co_is_pipe_port_compatible(struct intel_dp *intel_dp,
1149 			      struct intel_crtc_state *crtc_state)
1150 {
1151 	struct intel_display *display = to_intel_display(intel_dp);
1152 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1153 	enum pipe pipe = to_intel_crtc(crtc_state->uapi.crtc)->pipe;
1154 	struct drm_i915_private *dev_priv = to_i915(display->drm);
1155 	enum port port = dig_port->base.port;
1156 
1157 	if (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14)
1158 		return pipe <= PIPE_B && port <= PORT_B;
1159 	else
1160 		return pipe == PIPE_A && port == PORT_A;
1161 }
1162 
1163 static void
1164 tgl_dc3co_exitline_compute_config(struct intel_dp *intel_dp,
1165 				  struct intel_crtc_state *crtc_state)
1166 {
1167 	struct intel_display *display = to_intel_display(intel_dp);
1168 	struct drm_i915_private *dev_priv = to_i915(display->drm);
1169 	const u32 crtc_vdisplay = crtc_state->uapi.adjusted_mode.crtc_vdisplay;
1170 	struct i915_power_domains *power_domains = &display->power.domains;
1171 	u32 exit_scanlines;
1172 
1173 	/*
1174 	 * FIXME: Due to the changed sequence of activating/deactivating DC3CO,
1175 	 * disable DC3CO until the changed dc3co activating/deactivating sequence
1176 	 * is applied. B.Specs:49196
1177 	 */
1178 	return;
1179 
1180 	/*
1181 	 * DMC's DC3CO exit mechanism has an issue with Selective Fecth
1182 	 * TODO: when the issue is addressed, this restriction should be removed.
1183 	 */
1184 	if (crtc_state->enable_psr2_sel_fetch)
1185 		return;
1186 
1187 	if (!(power_domains->allowed_dc_mask & DC_STATE_EN_DC3CO))
1188 		return;
1189 
1190 	if (!dc3co_is_pipe_port_compatible(intel_dp, crtc_state))
1191 		return;
1192 
1193 	/* Wa_16011303918:adl-p */
1194 	if (IS_ALDERLAKE_P(dev_priv) && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0))
1195 		return;
1196 
1197 	/*
1198 	 * DC3CO Exit time 200us B.Spec 49196
1199 	 * PSR2 transcoder Early Exit scanlines = ROUNDUP(200 / line time) + 1
1200 	 */
1201 	exit_scanlines =
1202 		intel_usecs_to_scanlines(&crtc_state->uapi.adjusted_mode, 200) + 1;
1203 
1204 	if (drm_WARN_ON(display->drm, exit_scanlines > crtc_vdisplay))
1205 		return;
1206 
1207 	crtc_state->dc3co_exitline = crtc_vdisplay - exit_scanlines;
1208 }
1209 
1210 static bool intel_psr2_sel_fetch_config_valid(struct intel_dp *intel_dp,
1211 					      struct intel_crtc_state *crtc_state)
1212 {
1213 	struct intel_display *display = to_intel_display(intel_dp);
1214 
1215 	if (!display->params.enable_psr2_sel_fetch &&
1216 	    intel_dp->psr.debug != I915_PSR_DEBUG_ENABLE_SEL_FETCH) {
1217 		drm_dbg_kms(display->drm,
1218 			    "PSR2 sel fetch not enabled, disabled by parameter\n");
1219 		return false;
1220 	}
1221 
1222 	if (crtc_state->uapi.async_flip) {
1223 		drm_dbg_kms(display->drm,
1224 			    "PSR2 sel fetch not enabled, async flip enabled\n");
1225 		return false;
1226 	}
1227 
1228 	return crtc_state->enable_psr2_sel_fetch = true;
1229 }
1230 
1231 static bool psr2_granularity_check(struct intel_dp *intel_dp,
1232 				   struct intel_crtc_state *crtc_state)
1233 {
1234 	struct intel_display *display = to_intel_display(intel_dp);
1235 	struct drm_i915_private *dev_priv = to_i915(display->drm);
1236 	const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
1237 	const int crtc_hdisplay = crtc_state->hw.adjusted_mode.crtc_hdisplay;
1238 	const int crtc_vdisplay = crtc_state->hw.adjusted_mode.crtc_vdisplay;
1239 	u16 y_granularity = 0;
1240 
1241 	/* PSR2 HW only send full lines so we only need to validate the width */
1242 	if (crtc_hdisplay % intel_dp->psr.su_w_granularity)
1243 		return false;
1244 
1245 	if (crtc_vdisplay % intel_dp->psr.su_y_granularity)
1246 		return false;
1247 
1248 	/* HW tracking is only aligned to 4 lines */
1249 	if (!crtc_state->enable_psr2_sel_fetch)
1250 		return intel_dp->psr.su_y_granularity == 4;
1251 
1252 	/*
1253 	 * adl_p and mtl platforms have 1 line granularity.
1254 	 * For other platforms with SW tracking we can adjust the y coordinates
1255 	 * to match sink requirement if multiple of 4.
1256 	 */
1257 	if (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14)
1258 		y_granularity = intel_dp->psr.su_y_granularity;
1259 	else if (intel_dp->psr.su_y_granularity <= 2)
1260 		y_granularity = 4;
1261 	else if ((intel_dp->psr.su_y_granularity % 4) == 0)
1262 		y_granularity = intel_dp->psr.su_y_granularity;
1263 
1264 	if (y_granularity == 0 || crtc_vdisplay % y_granularity)
1265 		return false;
1266 
1267 	if (crtc_state->dsc.compression_enable &&
1268 	    vdsc_cfg->slice_height % y_granularity)
1269 		return false;
1270 
1271 	crtc_state->su_y_granularity = y_granularity;
1272 	return true;
1273 }
1274 
1275 static bool _compute_psr2_sdp_prior_scanline_indication(struct intel_dp *intel_dp,
1276 							struct intel_crtc_state *crtc_state)
1277 {
1278 	struct intel_display *display = to_intel_display(intel_dp);
1279 	const struct drm_display_mode *adjusted_mode = &crtc_state->uapi.adjusted_mode;
1280 	u32 hblank_total, hblank_ns, req_ns;
1281 
1282 	hblank_total = adjusted_mode->crtc_hblank_end - adjusted_mode->crtc_hblank_start;
1283 	hblank_ns = div_u64(1000000ULL * hblank_total, adjusted_mode->crtc_clock);
1284 
1285 	/* From spec: ((60 / number of lanes) + 11) * 1000 / symbol clock frequency MHz */
1286 	req_ns = ((60 / crtc_state->lane_count) + 11) * 1000 / (crtc_state->port_clock / 1000);
1287 
1288 	if ((hblank_ns - req_ns) > 100)
1289 		return true;
1290 
1291 	/* Not supported <13 / Wa_22012279113:adl-p */
1292 	if (DISPLAY_VER(display) < 14 || intel_dp->edp_dpcd[0] < DP_EDP_14b)
1293 		return false;
1294 
1295 	crtc_state->req_psr2_sdp_prior_scanline = true;
1296 	return true;
1297 }
1298 
1299 static int intel_psr_entry_setup_frames(struct intel_dp *intel_dp,
1300 					const struct drm_display_mode *adjusted_mode)
1301 {
1302 	struct intel_display *display = to_intel_display(intel_dp);
1303 	int psr_setup_time = drm_dp_psr_setup_time(intel_dp->psr_dpcd);
1304 	int entry_setup_frames = 0;
1305 
1306 	if (psr_setup_time < 0) {
1307 		drm_dbg_kms(display->drm,
1308 			    "PSR condition failed: Invalid PSR setup time (0x%02x)\n",
1309 			    intel_dp->psr_dpcd[1]);
1310 		return -ETIME;
1311 	}
1312 
1313 	if (intel_usecs_to_scanlines(adjusted_mode, psr_setup_time) >
1314 	    adjusted_mode->crtc_vtotal - adjusted_mode->crtc_vdisplay - 1) {
1315 		if (DISPLAY_VER(display) >= 20) {
1316 			/* setup entry frames can be up to 3 frames */
1317 			entry_setup_frames = 1;
1318 			drm_dbg_kms(display->drm,
1319 				    "PSR setup entry frames %d\n",
1320 				    entry_setup_frames);
1321 		} else {
1322 			drm_dbg_kms(display->drm,
1323 				    "PSR condition failed: PSR setup time (%d us) too long\n",
1324 				    psr_setup_time);
1325 			return -ETIME;
1326 		}
1327 	}
1328 
1329 	return entry_setup_frames;
1330 }
1331 
1332 static bool wake_lines_fit_into_vblank(struct intel_dp *intel_dp,
1333 				       const struct intel_crtc_state *crtc_state,
1334 				       bool aux_less)
1335 {
1336 	struct intel_display *display = to_intel_display(intel_dp);
1337 	int vblank = crtc_state->hw.adjusted_mode.crtc_vblank_end -
1338 		crtc_state->hw.adjusted_mode.crtc_vblank_start;
1339 	int wake_lines;
1340 
1341 	if (aux_less)
1342 		wake_lines = intel_dp->alpm_parameters.aux_less_wake_lines;
1343 	else
1344 		wake_lines = DISPLAY_VER(display) < 20 ?
1345 			psr2_block_count_lines(intel_dp) :
1346 			intel_dp->alpm_parameters.io_wake_lines;
1347 
1348 	if (crtc_state->req_psr2_sdp_prior_scanline)
1349 		vblank -= 1;
1350 
1351 	/* Vblank >= PSR2_CTL Block Count Number maximum line count */
1352 	if (vblank < wake_lines)
1353 		return false;
1354 
1355 	return true;
1356 }
1357 
1358 static bool alpm_config_valid(struct intel_dp *intel_dp,
1359 			      const struct intel_crtc_state *crtc_state,
1360 			      bool aux_less)
1361 {
1362 	struct intel_display *display = to_intel_display(intel_dp);
1363 
1364 	if (!intel_alpm_compute_params(intel_dp, crtc_state)) {
1365 		drm_dbg_kms(display->drm,
1366 			    "PSR2/Panel Replay  not enabled, Unable to use long enough wake times\n");
1367 		return false;
1368 	}
1369 
1370 	if (!wake_lines_fit_into_vblank(intel_dp, crtc_state, aux_less)) {
1371 		drm_dbg_kms(display->drm,
1372 			    "PSR2/Panel Replay not enabled, too short vblank time\n");
1373 		return false;
1374 	}
1375 
1376 	return true;
1377 }
1378 
1379 static bool intel_psr2_config_valid(struct intel_dp *intel_dp,
1380 				    struct intel_crtc_state *crtc_state)
1381 {
1382 	struct intel_display *display = to_intel_display(intel_dp);
1383 	struct drm_i915_private *dev_priv = to_i915(display->drm);
1384 	int crtc_hdisplay = crtc_state->hw.adjusted_mode.crtc_hdisplay;
1385 	int crtc_vdisplay = crtc_state->hw.adjusted_mode.crtc_vdisplay;
1386 	int psr_max_h = 0, psr_max_v = 0, max_bpp = 0;
1387 
1388 	if (!intel_dp->psr.sink_psr2_support)
1389 		return false;
1390 
1391 	/* JSL and EHL only supports eDP 1.3 */
1392 	if (IS_JASPERLAKE(dev_priv) || IS_ELKHARTLAKE(dev_priv)) {
1393 		drm_dbg_kms(display->drm, "PSR2 not supported by phy\n");
1394 		return false;
1395 	}
1396 
1397 	/* Wa_16011181250 */
1398 	if (IS_ROCKETLAKE(dev_priv) || IS_ALDERLAKE_S(dev_priv) ||
1399 	    IS_DG2(dev_priv)) {
1400 		drm_dbg_kms(display->drm,
1401 			    "PSR2 is defeatured for this platform\n");
1402 		return false;
1403 	}
1404 
1405 	if (IS_ALDERLAKE_P(dev_priv) && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) {
1406 		drm_dbg_kms(display->drm,
1407 			    "PSR2 not completely functional in this stepping\n");
1408 		return false;
1409 	}
1410 
1411 	if (!transcoder_has_psr2(display, crtc_state->cpu_transcoder)) {
1412 		drm_dbg_kms(display->drm,
1413 			    "PSR2 not supported in transcoder %s\n",
1414 			    transcoder_name(crtc_state->cpu_transcoder));
1415 		return false;
1416 	}
1417 
1418 	/*
1419 	 * DSC and PSR2 cannot be enabled simultaneously. If a requested
1420 	 * resolution requires DSC to be enabled, priority is given to DSC
1421 	 * over PSR2.
1422 	 */
1423 	if (crtc_state->dsc.compression_enable &&
1424 	    (DISPLAY_VER(display) < 14 && !IS_ALDERLAKE_P(dev_priv))) {
1425 		drm_dbg_kms(display->drm,
1426 			    "PSR2 cannot be enabled since DSC is enabled\n");
1427 		return false;
1428 	}
1429 
1430 	if (DISPLAY_VER(display) >= 12) {
1431 		psr_max_h = 5120;
1432 		psr_max_v = 3200;
1433 		max_bpp = 30;
1434 	} else if (DISPLAY_VER(display) >= 10) {
1435 		psr_max_h = 4096;
1436 		psr_max_v = 2304;
1437 		max_bpp = 24;
1438 	} else if (DISPLAY_VER(display) == 9) {
1439 		psr_max_h = 3640;
1440 		psr_max_v = 2304;
1441 		max_bpp = 24;
1442 	}
1443 
1444 	if (crtc_state->pipe_bpp > max_bpp) {
1445 		drm_dbg_kms(display->drm,
1446 			    "PSR2 not enabled, pipe bpp %d > max supported %d\n",
1447 			    crtc_state->pipe_bpp, max_bpp);
1448 		return false;
1449 	}
1450 
1451 	/* Wa_16011303918:adl-p */
1452 	if (crtc_state->vrr.enable &&
1453 	    IS_ALDERLAKE_P(dev_priv) && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) {
1454 		drm_dbg_kms(display->drm,
1455 			    "PSR2 not enabled, not compatible with HW stepping + VRR\n");
1456 		return false;
1457 	}
1458 
1459 	if (!alpm_config_valid(intel_dp, crtc_state, false))
1460 		return false;
1461 
1462 	if (!crtc_state->enable_psr2_sel_fetch &&
1463 	    (crtc_hdisplay > psr_max_h || crtc_vdisplay > psr_max_v)) {
1464 		drm_dbg_kms(display->drm,
1465 			    "PSR2 not enabled, resolution %dx%d > max supported %dx%d\n",
1466 			    crtc_hdisplay, crtc_vdisplay,
1467 			    psr_max_h, psr_max_v);
1468 		return false;
1469 	}
1470 
1471 	tgl_dc3co_exitline_compute_config(intel_dp, crtc_state);
1472 
1473 	return true;
1474 }
1475 
1476 static bool intel_sel_update_config_valid(struct intel_dp *intel_dp,
1477 					  struct intel_crtc_state *crtc_state)
1478 {
1479 	struct intel_display *display = to_intel_display(intel_dp);
1480 
1481 	if (HAS_PSR2_SEL_FETCH(display) &&
1482 	    !intel_psr2_sel_fetch_config_valid(intel_dp, crtc_state) &&
1483 	    !HAS_PSR_HW_TRACKING(display)) {
1484 		drm_dbg_kms(display->drm,
1485 			    "Selective update not enabled, selective fetch not valid and no HW tracking available\n");
1486 		goto unsupported;
1487 	}
1488 
1489 	if (!psr2_global_enabled(intel_dp)) {
1490 		drm_dbg_kms(display->drm,
1491 			    "Selective update disabled by flag\n");
1492 		goto unsupported;
1493 	}
1494 
1495 	if (!crtc_state->has_panel_replay && !intel_psr2_config_valid(intel_dp, crtc_state))
1496 		goto unsupported;
1497 
1498 	if (!_compute_psr2_sdp_prior_scanline_indication(intel_dp, crtc_state)) {
1499 		drm_dbg_kms(display->drm,
1500 			    "Selective update not enabled, SDP indication do not fit in hblank\n");
1501 		goto unsupported;
1502 	}
1503 
1504 	if (crtc_state->has_panel_replay && (DISPLAY_VER(display) < 14 ||
1505 					     !intel_dp->psr.sink_panel_replay_su_support))
1506 		goto unsupported;
1507 
1508 	if (crtc_state->crc_enabled) {
1509 		drm_dbg_kms(display->drm,
1510 			    "Selective update not enabled because it would inhibit pipe CRC calculation\n");
1511 		goto unsupported;
1512 	}
1513 
1514 	if (!psr2_granularity_check(intel_dp, crtc_state)) {
1515 		drm_dbg_kms(display->drm,
1516 			    "Selective update not enabled, SU granularity not compatible\n");
1517 		goto unsupported;
1518 	}
1519 
1520 	crtc_state->enable_psr2_su_region_et =
1521 		psr2_su_region_et_valid(intel_dp, crtc_state->has_panel_replay);
1522 
1523 	return true;
1524 
1525 unsupported:
1526 	crtc_state->enable_psr2_sel_fetch = false;
1527 	return false;
1528 }
1529 
1530 static bool _psr_compute_config(struct intel_dp *intel_dp,
1531 				struct intel_crtc_state *crtc_state)
1532 {
1533 	struct intel_display *display = to_intel_display(intel_dp);
1534 	const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
1535 	int entry_setup_frames;
1536 
1537 	/*
1538 	 * Current PSR panels don't work reliably with VRR enabled
1539 	 * So if VRR is enabled, do not enable PSR.
1540 	 */
1541 	if (crtc_state->vrr.enable)
1542 		return false;
1543 
1544 	if (!CAN_PSR(intel_dp))
1545 		return false;
1546 
1547 	entry_setup_frames = intel_psr_entry_setup_frames(intel_dp, adjusted_mode);
1548 
1549 	if (entry_setup_frames >= 0) {
1550 		intel_dp->psr.entry_setup_frames = entry_setup_frames;
1551 	} else {
1552 		drm_dbg_kms(display->drm,
1553 			    "PSR condition failed: PSR setup timing not met\n");
1554 		return false;
1555 	}
1556 
1557 	return true;
1558 }
1559 
1560 static bool
1561 _panel_replay_compute_config(struct intel_dp *intel_dp,
1562 			     const struct intel_crtc_state *crtc_state,
1563 			     const struct drm_connector_state *conn_state)
1564 {
1565 	struct intel_display *display = to_intel_display(intel_dp);
1566 	struct intel_connector *connector =
1567 		to_intel_connector(conn_state->connector);
1568 	struct intel_hdcp *hdcp = &connector->hdcp;
1569 
1570 	if (!CAN_PANEL_REPLAY(intel_dp))
1571 		return false;
1572 
1573 	if (!panel_replay_global_enabled(intel_dp)) {
1574 		drm_dbg_kms(display->drm, "Panel Replay disabled by flag\n");
1575 		return false;
1576 	}
1577 
1578 	if (!intel_dp_is_edp(intel_dp))
1579 		return true;
1580 
1581 	/* Remaining checks are for eDP only */
1582 
1583 	/* 128b/132b Panel Replay is not supported on eDP */
1584 	if (intel_dp_is_uhbr(crtc_state)) {
1585 		drm_dbg_kms(display->drm,
1586 			    "Panel Replay is not supported with 128b/132b\n");
1587 		return false;
1588 	}
1589 
1590 	/* HW will not allow Panel Replay on eDP when HDCP enabled */
1591 	if (conn_state->content_protection ==
1592 	    DRM_MODE_CONTENT_PROTECTION_DESIRED ||
1593 	    (conn_state->content_protection ==
1594 	     DRM_MODE_CONTENT_PROTECTION_ENABLED && hdcp->value ==
1595 	     DRM_MODE_CONTENT_PROTECTION_UNDESIRED)) {
1596 		drm_dbg_kms(display->drm,
1597 			    "Panel Replay is not supported with HDCP\n");
1598 		return false;
1599 	}
1600 
1601 	if (!alpm_config_valid(intel_dp, crtc_state, true))
1602 		return false;
1603 
1604 	if (crtc_state->crc_enabled) {
1605 		drm_dbg_kms(display->drm,
1606 			    "Panel Replay not enabled because it would inhibit pipe CRC calculation\n");
1607 		return false;
1608 	}
1609 
1610 	return true;
1611 }
1612 
1613 void intel_psr_compute_config(struct intel_dp *intel_dp,
1614 			      struct intel_crtc_state *crtc_state,
1615 			      struct drm_connector_state *conn_state)
1616 {
1617 	struct intel_display *display = to_intel_display(intel_dp);
1618 	const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
1619 
1620 	if (!psr_global_enabled(intel_dp)) {
1621 		drm_dbg_kms(display->drm, "PSR disabled by flag\n");
1622 		return;
1623 	}
1624 
1625 	if (intel_dp->psr.sink_not_reliable) {
1626 		drm_dbg_kms(display->drm,
1627 			    "PSR sink implementation is not reliable\n");
1628 		return;
1629 	}
1630 
1631 	if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) {
1632 		drm_dbg_kms(display->drm,
1633 			    "PSR condition failed: Interlaced mode enabled\n");
1634 		return;
1635 	}
1636 
1637 	/*
1638 	 * FIXME figure out what is wrong with PSR+joiner and
1639 	 * fix it. Presumably something related to the fact that
1640 	 * PSR is a transcoder level feature.
1641 	 */
1642 	if (crtc_state->joiner_pipes) {
1643 		drm_dbg_kms(display->drm,
1644 			    "PSR disabled due to joiner\n");
1645 		return;
1646 	}
1647 
1648 	crtc_state->has_panel_replay = _panel_replay_compute_config(intel_dp,
1649 								    crtc_state,
1650 								    conn_state);
1651 
1652 	crtc_state->has_psr = crtc_state->has_panel_replay ? true :
1653 		_psr_compute_config(intel_dp, crtc_state);
1654 
1655 	if (!crtc_state->has_psr)
1656 		return;
1657 
1658 	crtc_state->has_sel_update = intel_sel_update_config_valid(intel_dp, crtc_state);
1659 }
1660 
1661 void intel_psr_get_config(struct intel_encoder *encoder,
1662 			  struct intel_crtc_state *pipe_config)
1663 {
1664 	struct intel_display *display = to_intel_display(encoder);
1665 	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
1666 	enum transcoder cpu_transcoder = pipe_config->cpu_transcoder;
1667 	struct intel_dp *intel_dp;
1668 	u32 val;
1669 
1670 	if (!dig_port)
1671 		return;
1672 
1673 	intel_dp = &dig_port->dp;
1674 	if (!(CAN_PSR(intel_dp) || CAN_PANEL_REPLAY(intel_dp)))
1675 		return;
1676 
1677 	mutex_lock(&intel_dp->psr.lock);
1678 	if (!intel_dp->psr.enabled)
1679 		goto unlock;
1680 
1681 	if (intel_dp->psr.panel_replay_enabled) {
1682 		pipe_config->has_psr = pipe_config->has_panel_replay = true;
1683 	} else {
1684 		/*
1685 		 * Not possible to read EDP_PSR/PSR2_CTL registers as it is
1686 		 * enabled/disabled because of frontbuffer tracking and others.
1687 		 */
1688 		pipe_config->has_psr = true;
1689 	}
1690 
1691 	pipe_config->has_sel_update = intel_dp->psr.sel_update_enabled;
1692 	pipe_config->infoframes.enable |= intel_hdmi_infoframe_enable(DP_SDP_VSC);
1693 
1694 	if (!intel_dp->psr.sel_update_enabled)
1695 		goto unlock;
1696 
1697 	if (HAS_PSR2_SEL_FETCH(display)) {
1698 		val = intel_de_read(display,
1699 				    PSR2_MAN_TRK_CTL(display, cpu_transcoder));
1700 		if (val & PSR2_MAN_TRK_CTL_ENABLE)
1701 			pipe_config->enable_psr2_sel_fetch = true;
1702 	}
1703 
1704 	pipe_config->enable_psr2_su_region_et = intel_dp->psr.su_region_et_enabled;
1705 
1706 	if (DISPLAY_VER(display) >= 12) {
1707 		val = intel_de_read(display,
1708 				    TRANS_EXITLINE(display, cpu_transcoder));
1709 		pipe_config->dc3co_exitline = REG_FIELD_GET(EXITLINE_MASK, val);
1710 	}
1711 unlock:
1712 	mutex_unlock(&intel_dp->psr.lock);
1713 }
1714 
1715 static void intel_psr_activate(struct intel_dp *intel_dp)
1716 {
1717 	struct intel_display *display = to_intel_display(intel_dp);
1718 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1719 
1720 	drm_WARN_ON(display->drm,
1721 		    transcoder_has_psr2(display, cpu_transcoder) &&
1722 		    intel_de_read(display, EDP_PSR2_CTL(display, cpu_transcoder)) & EDP_PSR2_ENABLE);
1723 
1724 	drm_WARN_ON(display->drm,
1725 		    intel_de_read(display, psr_ctl_reg(display, cpu_transcoder)) & EDP_PSR_ENABLE);
1726 
1727 	drm_WARN_ON(display->drm, intel_dp->psr.active);
1728 
1729 	lockdep_assert_held(&intel_dp->psr.lock);
1730 
1731 	/* psr1, psr2 and panel-replay are mutually exclusive.*/
1732 	if (intel_dp->psr.panel_replay_enabled)
1733 		dg2_activate_panel_replay(intel_dp);
1734 	else if (intel_dp->psr.sel_update_enabled)
1735 		hsw_activate_psr2(intel_dp);
1736 	else
1737 		hsw_activate_psr1(intel_dp);
1738 
1739 	intel_dp->psr.active = true;
1740 }
1741 
1742 static u32 wa_16013835468_bit_get(struct intel_dp *intel_dp)
1743 {
1744 	switch (intel_dp->psr.pipe) {
1745 	case PIPE_A:
1746 		return LATENCY_REPORTING_REMOVED_PIPE_A;
1747 	case PIPE_B:
1748 		return LATENCY_REPORTING_REMOVED_PIPE_B;
1749 	case PIPE_C:
1750 		return LATENCY_REPORTING_REMOVED_PIPE_C;
1751 	case PIPE_D:
1752 		return LATENCY_REPORTING_REMOVED_PIPE_D;
1753 	default:
1754 		MISSING_CASE(intel_dp->psr.pipe);
1755 		return 0;
1756 	}
1757 }
1758 
1759 /*
1760  * Wa_16013835468
1761  * Wa_14015648006
1762  */
1763 static void wm_optimization_wa(struct intel_dp *intel_dp,
1764 			       const struct intel_crtc_state *crtc_state)
1765 {
1766 	struct intel_display *display = to_intel_display(intel_dp);
1767 	bool set_wa_bit = false;
1768 
1769 	/* Wa_14015648006 */
1770 	if (IS_DISPLAY_VER(display, 11, 14))
1771 		set_wa_bit |= crtc_state->wm_level_disabled;
1772 
1773 	/* Wa_16013835468 */
1774 	if (DISPLAY_VER(display) == 12)
1775 		set_wa_bit |= crtc_state->hw.adjusted_mode.crtc_vblank_start !=
1776 			crtc_state->hw.adjusted_mode.crtc_vdisplay;
1777 
1778 	if (set_wa_bit)
1779 		intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
1780 			     0, wa_16013835468_bit_get(intel_dp));
1781 	else
1782 		intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
1783 			     wa_16013835468_bit_get(intel_dp), 0);
1784 }
1785 
1786 static void intel_psr_enable_source(struct intel_dp *intel_dp,
1787 				    const struct intel_crtc_state *crtc_state)
1788 {
1789 	struct intel_display *display = to_intel_display(intel_dp);
1790 	struct drm_i915_private *dev_priv = to_i915(display->drm);
1791 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1792 	u32 mask = 0;
1793 
1794 	/*
1795 	 * Only HSW and BDW have PSR AUX registers that need to be setup.
1796 	 * SKL+ use hardcoded values PSR AUX transactions
1797 	 */
1798 	if (DISPLAY_VER(display) < 9)
1799 		hsw_psr_setup_aux(intel_dp);
1800 
1801 	/*
1802 	 * Per Spec: Avoid continuous PSR exit by masking MEMUP and HPD also
1803 	 * mask LPSP to avoid dependency on other drivers that might block
1804 	 * runtime_pm besides preventing  other hw tracking issues now we
1805 	 * can rely on frontbuffer tracking.
1806 	 *
1807 	 * From bspec prior LunarLake:
1808 	 * Only PSR_MASK[Mask FBC modify] and PSR_MASK[Mask Hotplug] are used in
1809 	 * panel replay mode.
1810 	 *
1811 	 * From bspec beyod LunarLake:
1812 	 * Panel Replay on DP: No bits are applicable
1813 	 * Panel Replay on eDP: All bits are applicable
1814 	 */
1815 	if (DISPLAY_VER(display) < 20 || intel_dp_is_edp(intel_dp))
1816 		mask = EDP_PSR_DEBUG_MASK_HPD;
1817 
1818 	if (intel_dp_is_edp(intel_dp)) {
1819 		mask |= EDP_PSR_DEBUG_MASK_MEMUP;
1820 
1821 		/*
1822 		 * For some unknown reason on HSW non-ULT (or at least on
1823 		 * Dell Latitude E6540) external displays start to flicker
1824 		 * when PSR is enabled on the eDP. SR/PC6 residency is much
1825 		 * higher than should be possible with an external display.
1826 		 * As a workaround leave LPSP unmasked to prevent PSR entry
1827 		 * when external displays are active.
1828 		 */
1829 		if (DISPLAY_VER(display) >= 8 || IS_HASWELL_ULT(dev_priv))
1830 			mask |= EDP_PSR_DEBUG_MASK_LPSP;
1831 
1832 		if (DISPLAY_VER(display) < 20)
1833 			mask |= EDP_PSR_DEBUG_MASK_MAX_SLEEP;
1834 
1835 		/*
1836 		 * No separate pipe reg write mask on hsw/bdw, so have to unmask all
1837 		 * registers in order to keep the CURSURFLIVE tricks working :(
1838 		 */
1839 		if (IS_DISPLAY_VER(display, 9, 10))
1840 			mask |= EDP_PSR_DEBUG_MASK_DISP_REG_WRITE;
1841 
1842 		/* allow PSR with sprite enabled */
1843 		if (IS_HASWELL(dev_priv))
1844 			mask |= EDP_PSR_DEBUG_MASK_SPRITE_ENABLE;
1845 	}
1846 
1847 	intel_de_write(display, psr_debug_reg(display, cpu_transcoder), mask);
1848 
1849 	psr_irq_control(intel_dp);
1850 
1851 	/*
1852 	 * TODO: if future platforms supports DC3CO in more than one
1853 	 * transcoder, EXITLINE will need to be unset when disabling PSR
1854 	 */
1855 	if (intel_dp->psr.dc3co_exitline)
1856 		intel_de_rmw(display,
1857 			     TRANS_EXITLINE(display, cpu_transcoder),
1858 			     EXITLINE_MASK,
1859 			     intel_dp->psr.dc3co_exitline << EXITLINE_SHIFT | EXITLINE_ENABLE);
1860 
1861 	if (HAS_PSR_HW_TRACKING(display) && HAS_PSR2_SEL_FETCH(display))
1862 		intel_de_rmw(display, CHICKEN_PAR1_1, IGNORE_PSR2_HW_TRACKING,
1863 			     intel_dp->psr.psr2_sel_fetch_enabled ?
1864 			     IGNORE_PSR2_HW_TRACKING : 0);
1865 
1866 	if (intel_dp_is_edp(intel_dp))
1867 		intel_alpm_configure(intel_dp, crtc_state);
1868 
1869 	/*
1870 	 * Wa_16013835468
1871 	 * Wa_14015648006
1872 	 */
1873 	wm_optimization_wa(intel_dp, crtc_state);
1874 
1875 	if (intel_dp->psr.sel_update_enabled) {
1876 		if (DISPLAY_VER(display) == 9)
1877 			intel_de_rmw(display, CHICKEN_TRANS(cpu_transcoder), 0,
1878 				     PSR2_VSC_ENABLE_PROG_HEADER |
1879 				     PSR2_ADD_VERTICAL_LINE_COUNT);
1880 
1881 		/*
1882 		 * Wa_16014451276:adlp,mtl[a0,b0]
1883 		 * All supported adlp panels have 1-based X granularity, this may
1884 		 * cause issues if non-supported panels are used.
1885 		 */
1886 		if (!intel_dp->psr.panel_replay_enabled &&
1887 		    (IS_DISPLAY_VER_STEP(display, IP_VER(14, 0), STEP_A0, STEP_B0) ||
1888 		     IS_ALDERLAKE_P(dev_priv)))
1889 			intel_de_rmw(display, hsw_chicken_trans_reg(dev_priv, cpu_transcoder),
1890 				     0, ADLP_1_BASED_X_GRANULARITY);
1891 
1892 		/* Wa_16012604467:adlp,mtl[a0,b0] */
1893 		if (!intel_dp->psr.panel_replay_enabled &&
1894 		    IS_DISPLAY_VER_STEP(display, IP_VER(14, 0), STEP_A0, STEP_B0))
1895 			intel_de_rmw(display,
1896 				     MTL_CLKGATE_DIS_TRANS(display, cpu_transcoder),
1897 				     0,
1898 				     MTL_CLKGATE_DIS_TRANS_DMASC_GATING_DIS);
1899 		else if (IS_ALDERLAKE_P(dev_priv))
1900 			intel_de_rmw(display, CLKGATE_DIS_MISC, 0,
1901 				     CLKGATE_DIS_MISC_DMASC_GATING_DIS);
1902 	}
1903 }
1904 
1905 static bool psr_interrupt_error_check(struct intel_dp *intel_dp)
1906 {
1907 	struct intel_display *display = to_intel_display(intel_dp);
1908 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1909 	u32 val;
1910 
1911 	if (intel_dp->psr.panel_replay_enabled)
1912 		goto no_err;
1913 
1914 	/*
1915 	 * If a PSR error happened and the driver is reloaded, the EDP_PSR_IIR
1916 	 * will still keep the error set even after the reset done in the
1917 	 * irq_preinstall and irq_uninstall hooks.
1918 	 * And enabling in this situation cause the screen to freeze in the
1919 	 * first time that PSR HW tries to activate so lets keep PSR disabled
1920 	 * to avoid any rendering problems.
1921 	 */
1922 	val = intel_de_read(display, psr_iir_reg(display, cpu_transcoder));
1923 	val &= psr_irq_psr_error_bit_get(intel_dp);
1924 	if (val) {
1925 		intel_dp->psr.sink_not_reliable = true;
1926 		drm_dbg_kms(display->drm,
1927 			    "PSR interruption error set, not enabling PSR\n");
1928 		return false;
1929 	}
1930 
1931 no_err:
1932 	return true;
1933 }
1934 
1935 static void intel_psr_enable_locked(struct intel_dp *intel_dp,
1936 				    const struct intel_crtc_state *crtc_state)
1937 {
1938 	struct intel_display *display = to_intel_display(intel_dp);
1939 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1940 	u32 val;
1941 
1942 	drm_WARN_ON(display->drm, intel_dp->psr.enabled);
1943 
1944 	intel_dp->psr.sel_update_enabled = crtc_state->has_sel_update;
1945 	intel_dp->psr.panel_replay_enabled = crtc_state->has_panel_replay;
1946 	intel_dp->psr.busy_frontbuffer_bits = 0;
1947 	intel_dp->psr.pipe = to_intel_crtc(crtc_state->uapi.crtc)->pipe;
1948 	intel_dp->psr.transcoder = crtc_state->cpu_transcoder;
1949 	/* DC5/DC6 requires at least 6 idle frames */
1950 	val = usecs_to_jiffies(intel_get_frame_time_us(crtc_state) * 6);
1951 	intel_dp->psr.dc3co_exit_delay = val;
1952 	intel_dp->psr.dc3co_exitline = crtc_state->dc3co_exitline;
1953 	intel_dp->psr.psr2_sel_fetch_enabled = crtc_state->enable_psr2_sel_fetch;
1954 	intel_dp->psr.su_region_et_enabled = crtc_state->enable_psr2_su_region_et;
1955 	intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
1956 	intel_dp->psr.req_psr2_sdp_prior_scanline =
1957 		crtc_state->req_psr2_sdp_prior_scanline;
1958 
1959 	if (!psr_interrupt_error_check(intel_dp))
1960 		return;
1961 
1962 	if (intel_dp->psr.panel_replay_enabled) {
1963 		drm_dbg_kms(display->drm, "Enabling Panel Replay\n");
1964 	} else {
1965 		drm_dbg_kms(display->drm, "Enabling PSR%s\n",
1966 			    intel_dp->psr.sel_update_enabled ? "2" : "1");
1967 
1968 		/*
1969 		 * Panel replay has to be enabled before link training: doing it
1970 		 * only for PSR here.
1971 		 */
1972 		intel_psr_enable_sink(intel_dp, crtc_state);
1973 	}
1974 
1975 	if (intel_dp_is_edp(intel_dp))
1976 		intel_snps_phy_update_psr_power_state(&dig_port->base, true);
1977 
1978 	intel_psr_enable_source(intel_dp, crtc_state);
1979 	intel_dp->psr.enabled = true;
1980 	intel_dp->psr.paused = false;
1981 
1982 	intel_psr_activate(intel_dp);
1983 }
1984 
1985 static void intel_psr_exit(struct intel_dp *intel_dp)
1986 {
1987 	struct intel_display *display = to_intel_display(intel_dp);
1988 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1989 	u32 val;
1990 
1991 	if (!intel_dp->psr.active) {
1992 		if (transcoder_has_psr2(display, cpu_transcoder)) {
1993 			val = intel_de_read(display,
1994 					    EDP_PSR2_CTL(display, cpu_transcoder));
1995 			drm_WARN_ON(display->drm, val & EDP_PSR2_ENABLE);
1996 		}
1997 
1998 		val = intel_de_read(display,
1999 				    psr_ctl_reg(display, cpu_transcoder));
2000 		drm_WARN_ON(display->drm, val & EDP_PSR_ENABLE);
2001 
2002 		return;
2003 	}
2004 
2005 	if (intel_dp->psr.panel_replay_enabled) {
2006 		intel_de_rmw(display, TRANS_DP2_CTL(intel_dp->psr.transcoder),
2007 			     TRANS_DP2_PANEL_REPLAY_ENABLE, 0);
2008 	} else if (intel_dp->psr.sel_update_enabled) {
2009 		tgl_disallow_dc3co_on_psr2_exit(intel_dp);
2010 
2011 		val = intel_de_rmw(display,
2012 				   EDP_PSR2_CTL(display, cpu_transcoder),
2013 				   EDP_PSR2_ENABLE, 0);
2014 
2015 		drm_WARN_ON(display->drm, !(val & EDP_PSR2_ENABLE));
2016 	} else {
2017 		val = intel_de_rmw(display,
2018 				   psr_ctl_reg(display, cpu_transcoder),
2019 				   EDP_PSR_ENABLE, 0);
2020 
2021 		drm_WARN_ON(display->drm, !(val & EDP_PSR_ENABLE));
2022 	}
2023 	intel_dp->psr.active = false;
2024 }
2025 
2026 static void intel_psr_wait_exit_locked(struct intel_dp *intel_dp)
2027 {
2028 	struct intel_display *display = to_intel_display(intel_dp);
2029 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2030 	i915_reg_t psr_status;
2031 	u32 psr_status_mask;
2032 
2033 	if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled ||
2034 					  intel_dp->psr.panel_replay_enabled)) {
2035 		psr_status = EDP_PSR2_STATUS(display, cpu_transcoder);
2036 		psr_status_mask = EDP_PSR2_STATUS_STATE_MASK;
2037 	} else {
2038 		psr_status = psr_status_reg(display, cpu_transcoder);
2039 		psr_status_mask = EDP_PSR_STATUS_STATE_MASK;
2040 	}
2041 
2042 	/* Wait till PSR is idle */
2043 	if (intel_de_wait_for_clear(display, psr_status,
2044 				    psr_status_mask, 2000))
2045 		drm_err(display->drm, "Timed out waiting PSR idle state\n");
2046 }
2047 
2048 static void intel_psr_disable_locked(struct intel_dp *intel_dp)
2049 {
2050 	struct intel_display *display = to_intel_display(intel_dp);
2051 	struct drm_i915_private *dev_priv = to_i915(display->drm);
2052 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2053 
2054 	lockdep_assert_held(&intel_dp->psr.lock);
2055 
2056 	if (!intel_dp->psr.enabled)
2057 		return;
2058 
2059 	if (intel_dp->psr.panel_replay_enabled)
2060 		drm_dbg_kms(display->drm, "Disabling Panel Replay\n");
2061 	else
2062 		drm_dbg_kms(display->drm, "Disabling PSR%s\n",
2063 			    intel_dp->psr.sel_update_enabled ? "2" : "1");
2064 
2065 	intel_psr_exit(intel_dp);
2066 	intel_psr_wait_exit_locked(intel_dp);
2067 
2068 	/*
2069 	 * Wa_16013835468
2070 	 * Wa_14015648006
2071 	 */
2072 	if (DISPLAY_VER(display) >= 11)
2073 		intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
2074 			     wa_16013835468_bit_get(intel_dp), 0);
2075 
2076 	if (intel_dp->psr.sel_update_enabled) {
2077 		/* Wa_16012604467:adlp,mtl[a0,b0] */
2078 		if (!intel_dp->psr.panel_replay_enabled &&
2079 		    IS_DISPLAY_VER_STEP(display, IP_VER(14, 0), STEP_A0, STEP_B0))
2080 			intel_de_rmw(display,
2081 				     MTL_CLKGATE_DIS_TRANS(display, cpu_transcoder),
2082 				     MTL_CLKGATE_DIS_TRANS_DMASC_GATING_DIS, 0);
2083 		else if (IS_ALDERLAKE_P(dev_priv))
2084 			intel_de_rmw(display, CLKGATE_DIS_MISC,
2085 				     CLKGATE_DIS_MISC_DMASC_GATING_DIS, 0);
2086 	}
2087 
2088 	if (intel_dp_is_edp(intel_dp))
2089 		intel_snps_phy_update_psr_power_state(&dp_to_dig_port(intel_dp)->base, false);
2090 
2091 	/* Panel Replay on eDP is always using ALPM aux less. */
2092 	if (intel_dp->psr.panel_replay_enabled && intel_dp_is_edp(intel_dp)) {
2093 		intel_de_rmw(display, ALPM_CTL(display, cpu_transcoder),
2094 			     ALPM_CTL_ALPM_ENABLE |
2095 			     ALPM_CTL_ALPM_AUX_LESS_ENABLE, 0);
2096 
2097 		intel_de_rmw(display,
2098 			     PORT_ALPM_CTL(display, cpu_transcoder),
2099 			     PORT_ALPM_CTL_ALPM_AUX_LESS_ENABLE, 0);
2100 	}
2101 
2102 	/* Disable PSR on Sink */
2103 	if (!intel_dp->psr.panel_replay_enabled) {
2104 		drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, 0);
2105 
2106 		if (intel_dp->psr.sel_update_enabled)
2107 			drm_dp_dpcd_writeb(&intel_dp->aux,
2108 					   DP_RECEIVER_ALPM_CONFIG, 0);
2109 	}
2110 
2111 	intel_dp->psr.enabled = false;
2112 	intel_dp->psr.panel_replay_enabled = false;
2113 	intel_dp->psr.sel_update_enabled = false;
2114 	intel_dp->psr.psr2_sel_fetch_enabled = false;
2115 	intel_dp->psr.su_region_et_enabled = false;
2116 	intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
2117 }
2118 
2119 /**
2120  * intel_psr_disable - Disable PSR
2121  * @intel_dp: Intel DP
2122  * @old_crtc_state: old CRTC state
2123  *
2124  * This function needs to be called before disabling pipe.
2125  */
2126 void intel_psr_disable(struct intel_dp *intel_dp,
2127 		       const struct intel_crtc_state *old_crtc_state)
2128 {
2129 	struct intel_display *display = to_intel_display(intel_dp);
2130 
2131 	if (!old_crtc_state->has_psr)
2132 		return;
2133 
2134 	if (drm_WARN_ON(display->drm, !CAN_PSR(intel_dp)))
2135 		return;
2136 
2137 	mutex_lock(&intel_dp->psr.lock);
2138 
2139 	intel_psr_disable_locked(intel_dp);
2140 
2141 	mutex_unlock(&intel_dp->psr.lock);
2142 	cancel_work_sync(&intel_dp->psr.work);
2143 	cancel_delayed_work_sync(&intel_dp->psr.dc3co_work);
2144 }
2145 
2146 /**
2147  * intel_psr_pause - Pause PSR
2148  * @intel_dp: Intel DP
2149  *
2150  * This function need to be called after enabling psr.
2151  */
2152 void intel_psr_pause(struct intel_dp *intel_dp)
2153 {
2154 	struct intel_display *display = to_intel_display(intel_dp);
2155 	struct intel_psr *psr = &intel_dp->psr;
2156 
2157 	if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
2158 		return;
2159 
2160 	mutex_lock(&psr->lock);
2161 
2162 	if (!psr->enabled) {
2163 		mutex_unlock(&psr->lock);
2164 		return;
2165 	}
2166 
2167 	/* If we ever hit this, we will need to add refcount to pause/resume */
2168 	drm_WARN_ON(display->drm, psr->paused);
2169 
2170 	intel_psr_exit(intel_dp);
2171 	intel_psr_wait_exit_locked(intel_dp);
2172 	psr->paused = true;
2173 
2174 	mutex_unlock(&psr->lock);
2175 
2176 	cancel_work_sync(&psr->work);
2177 	cancel_delayed_work_sync(&psr->dc3co_work);
2178 }
2179 
2180 /**
2181  * intel_psr_resume - Resume PSR
2182  * @intel_dp: Intel DP
2183  *
2184  * This function need to be called after pausing psr.
2185  */
2186 void intel_psr_resume(struct intel_dp *intel_dp)
2187 {
2188 	struct intel_psr *psr = &intel_dp->psr;
2189 
2190 	if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
2191 		return;
2192 
2193 	mutex_lock(&psr->lock);
2194 
2195 	if (!psr->paused)
2196 		goto unlock;
2197 
2198 	psr->paused = false;
2199 	intel_psr_activate(intel_dp);
2200 
2201 unlock:
2202 	mutex_unlock(&psr->lock);
2203 }
2204 
2205 static u32 man_trk_ctl_enable_bit_get(struct intel_display *display)
2206 {
2207 	struct drm_i915_private *dev_priv = to_i915(display->drm);
2208 
2209 	return IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14 ? 0 :
2210 		PSR2_MAN_TRK_CTL_ENABLE;
2211 }
2212 
2213 static u32 man_trk_ctl_single_full_frame_bit_get(struct intel_display *display)
2214 {
2215 	struct drm_i915_private *dev_priv = to_i915(display->drm);
2216 
2217 	return IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14 ?
2218 	       ADLP_PSR2_MAN_TRK_CTL_SF_SINGLE_FULL_FRAME :
2219 	       PSR2_MAN_TRK_CTL_SF_SINGLE_FULL_FRAME;
2220 }
2221 
2222 static u32 man_trk_ctl_partial_frame_bit_get(struct intel_display *display)
2223 {
2224 	struct drm_i915_private *dev_priv = to_i915(display->drm);
2225 
2226 	return IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14 ?
2227 	       ADLP_PSR2_MAN_TRK_CTL_SF_PARTIAL_FRAME_UPDATE :
2228 	       PSR2_MAN_TRK_CTL_SF_PARTIAL_FRAME_UPDATE;
2229 }
2230 
2231 static u32 man_trk_ctl_continuos_full_frame(struct intel_display *display)
2232 {
2233 	struct drm_i915_private *dev_priv = to_i915(display->drm);
2234 
2235 	return IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14 ?
2236 	       ADLP_PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME :
2237 	       PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME;
2238 }
2239 
2240 static void psr_force_hw_tracking_exit(struct intel_dp *intel_dp)
2241 {
2242 	struct intel_display *display = to_intel_display(intel_dp);
2243 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2244 
2245 	if (intel_dp->psr.psr2_sel_fetch_enabled)
2246 		intel_de_write(display,
2247 			       PSR2_MAN_TRK_CTL(display, cpu_transcoder),
2248 			       man_trk_ctl_enable_bit_get(display) |
2249 			       man_trk_ctl_partial_frame_bit_get(display) |
2250 			       man_trk_ctl_single_full_frame_bit_get(display) |
2251 			       man_trk_ctl_continuos_full_frame(display));
2252 
2253 	/*
2254 	 * Display WA #0884: skl+
2255 	 * This documented WA for bxt can be safely applied
2256 	 * broadly so we can force HW tracking to exit PSR
2257 	 * instead of disabling and re-enabling.
2258 	 * Workaround tells us to write 0 to CUR_SURFLIVE_A,
2259 	 * but it makes more sense write to the current active
2260 	 * pipe.
2261 	 *
2262 	 * This workaround do not exist for platforms with display 10 or newer
2263 	 * but testing proved that it works for up display 13, for newer
2264 	 * than that testing will be needed.
2265 	 */
2266 	intel_de_write(display, CURSURFLIVE(display, intel_dp->psr.pipe), 0);
2267 }
2268 
2269 void intel_psr2_program_trans_man_trk_ctl(const struct intel_crtc_state *crtc_state)
2270 {
2271 	struct intel_display *display = to_intel_display(crtc_state);
2272 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2273 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
2274 	struct intel_encoder *encoder;
2275 
2276 	if (!crtc_state->enable_psr2_sel_fetch)
2277 		return;
2278 
2279 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
2280 					     crtc_state->uapi.encoder_mask) {
2281 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2282 
2283 		lockdep_assert_held(&intel_dp->psr.lock);
2284 		if (intel_dp->psr.psr2_sel_fetch_cff_enabled)
2285 			return;
2286 		break;
2287 	}
2288 
2289 	intel_de_write(display, PSR2_MAN_TRK_CTL(display, cpu_transcoder),
2290 		       crtc_state->psr2_man_track_ctl);
2291 
2292 	if (!crtc_state->enable_psr2_su_region_et)
2293 		return;
2294 
2295 	intel_de_write(display, PIPE_SRCSZ_ERLY_TPT(crtc->pipe),
2296 		       crtc_state->pipe_srcsz_early_tpt);
2297 }
2298 
2299 static void psr2_man_trk_ctl_calc(struct intel_crtc_state *crtc_state,
2300 				  bool full_update)
2301 {
2302 	struct intel_display *display = to_intel_display(crtc_state);
2303 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2304 	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
2305 	u32 val = man_trk_ctl_enable_bit_get(display);
2306 
2307 	/* SF partial frame enable has to be set even on full update */
2308 	val |= man_trk_ctl_partial_frame_bit_get(display);
2309 
2310 	if (full_update) {
2311 		val |= man_trk_ctl_single_full_frame_bit_get(display);
2312 		val |= man_trk_ctl_continuos_full_frame(display);
2313 		goto exit;
2314 	}
2315 
2316 	if (crtc_state->psr2_su_area.y1 == -1)
2317 		goto exit;
2318 
2319 	if (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14) {
2320 		val |= ADLP_PSR2_MAN_TRK_CTL_SU_REGION_START_ADDR(crtc_state->psr2_su_area.y1);
2321 		val |= ADLP_PSR2_MAN_TRK_CTL_SU_REGION_END_ADDR(crtc_state->psr2_su_area.y2 - 1);
2322 	} else {
2323 		drm_WARN_ON(crtc_state->uapi.crtc->dev,
2324 			    crtc_state->psr2_su_area.y1 % 4 ||
2325 			    crtc_state->psr2_su_area.y2 % 4);
2326 
2327 		val |= PSR2_MAN_TRK_CTL_SU_REGION_START_ADDR(
2328 			crtc_state->psr2_su_area.y1 / 4 + 1);
2329 		val |= PSR2_MAN_TRK_CTL_SU_REGION_END_ADDR(
2330 			crtc_state->psr2_su_area.y2 / 4 + 1);
2331 	}
2332 exit:
2333 	crtc_state->psr2_man_track_ctl = val;
2334 }
2335 
2336 static u32 psr2_pipe_srcsz_early_tpt_calc(struct intel_crtc_state *crtc_state,
2337 					  bool full_update)
2338 {
2339 	int width, height;
2340 
2341 	if (!crtc_state->enable_psr2_su_region_et || full_update)
2342 		return 0;
2343 
2344 	width = drm_rect_width(&crtc_state->psr2_su_area);
2345 	height = drm_rect_height(&crtc_state->psr2_su_area);
2346 
2347 	return PIPESRC_WIDTH(width - 1) | PIPESRC_HEIGHT(height - 1);
2348 }
2349 
2350 static void clip_area_update(struct drm_rect *overlap_damage_area,
2351 			     struct drm_rect *damage_area,
2352 			     struct drm_rect *pipe_src)
2353 {
2354 	if (!drm_rect_intersect(damage_area, pipe_src))
2355 		return;
2356 
2357 	if (overlap_damage_area->y1 == -1) {
2358 		overlap_damage_area->y1 = damage_area->y1;
2359 		overlap_damage_area->y2 = damage_area->y2;
2360 		return;
2361 	}
2362 
2363 	if (damage_area->y1 < overlap_damage_area->y1)
2364 		overlap_damage_area->y1 = damage_area->y1;
2365 
2366 	if (damage_area->y2 > overlap_damage_area->y2)
2367 		overlap_damage_area->y2 = damage_area->y2;
2368 }
2369 
2370 static void intel_psr2_sel_fetch_pipe_alignment(struct intel_crtc_state *crtc_state)
2371 {
2372 	struct intel_display *display = to_intel_display(crtc_state);
2373 	struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
2374 	const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
2375 	u16 y_alignment;
2376 
2377 	/* ADLP aligns the SU region to vdsc slice height in case dsc is enabled */
2378 	if (crtc_state->dsc.compression_enable &&
2379 	    (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14))
2380 		y_alignment = vdsc_cfg->slice_height;
2381 	else
2382 		y_alignment = crtc_state->su_y_granularity;
2383 
2384 	crtc_state->psr2_su_area.y1 -= crtc_state->psr2_su_area.y1 % y_alignment;
2385 	if (crtc_state->psr2_su_area.y2 % y_alignment)
2386 		crtc_state->psr2_su_area.y2 = ((crtc_state->psr2_su_area.y2 /
2387 						y_alignment) + 1) * y_alignment;
2388 }
2389 
2390 /*
2391  * When early transport is in use we need to extend SU area to cover
2392  * cursor fully when cursor is in SU area.
2393  */
2394 static void
2395 intel_psr2_sel_fetch_et_alignment(struct intel_atomic_state *state,
2396 				  struct intel_crtc *crtc,
2397 				  bool *cursor_in_su_area)
2398 {
2399 	struct intel_crtc_state *crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
2400 	struct intel_plane_state *new_plane_state;
2401 	struct intel_plane *plane;
2402 	int i;
2403 
2404 	if (!crtc_state->enable_psr2_su_region_et)
2405 		return;
2406 
2407 	for_each_new_intel_plane_in_state(state, plane, new_plane_state, i) {
2408 		struct drm_rect inter;
2409 
2410 		if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc)
2411 			continue;
2412 
2413 		if (plane->id != PLANE_CURSOR)
2414 			continue;
2415 
2416 		if (!new_plane_state->uapi.visible)
2417 			continue;
2418 
2419 		inter = crtc_state->psr2_su_area;
2420 		if (!drm_rect_intersect(&inter, &new_plane_state->uapi.dst))
2421 			continue;
2422 
2423 		clip_area_update(&crtc_state->psr2_su_area, &new_plane_state->uapi.dst,
2424 				 &crtc_state->pipe_src);
2425 		*cursor_in_su_area = true;
2426 	}
2427 }
2428 
2429 /*
2430  * TODO: Not clear how to handle planes with negative position,
2431  * also planes are not updated if they have a negative X
2432  * position so for now doing a full update in this cases
2433  *
2434  * Plane scaling and rotation is not supported by selective fetch and both
2435  * properties can change without a modeset, so need to be check at every
2436  * atomic commit.
2437  */
2438 static bool psr2_sel_fetch_plane_state_supported(const struct intel_plane_state *plane_state)
2439 {
2440 	if (plane_state->uapi.dst.y1 < 0 ||
2441 	    plane_state->uapi.dst.x1 < 0 ||
2442 	    plane_state->scaler_id >= 0 ||
2443 	    plane_state->uapi.rotation != DRM_MODE_ROTATE_0)
2444 		return false;
2445 
2446 	return true;
2447 }
2448 
2449 /*
2450  * Check for pipe properties that is not supported by selective fetch.
2451  *
2452  * TODO: pipe scaling causes a modeset but skl_update_scaler_crtc() is executed
2453  * after intel_psr_compute_config(), so for now keeping PSR2 selective fetch
2454  * enabled and going to the full update path.
2455  */
2456 static bool psr2_sel_fetch_pipe_state_supported(const struct intel_crtc_state *crtc_state)
2457 {
2458 	if (crtc_state->scaler_state.scaler_id >= 0)
2459 		return false;
2460 
2461 	return true;
2462 }
2463 
2464 int intel_psr2_sel_fetch_update(struct intel_atomic_state *state,
2465 				struct intel_crtc *crtc)
2466 {
2467 	struct intel_display *display = to_intel_display(state);
2468 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2469 	struct intel_crtc_state *crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
2470 	struct intel_plane_state *new_plane_state, *old_plane_state;
2471 	struct intel_plane *plane;
2472 	bool full_update = false, cursor_in_su_area = false;
2473 	int i, ret;
2474 
2475 	if (!crtc_state->enable_psr2_sel_fetch)
2476 		return 0;
2477 
2478 	if (!psr2_sel_fetch_pipe_state_supported(crtc_state)) {
2479 		full_update = true;
2480 		goto skip_sel_fetch_set_loop;
2481 	}
2482 
2483 	crtc_state->psr2_su_area.x1 = 0;
2484 	crtc_state->psr2_su_area.y1 = -1;
2485 	crtc_state->psr2_su_area.x2 = drm_rect_width(&crtc_state->pipe_src);
2486 	crtc_state->psr2_su_area.y2 = -1;
2487 
2488 	/*
2489 	 * Calculate minimal selective fetch area of each plane and calculate
2490 	 * the pipe damaged area.
2491 	 * In the next loop the plane selective fetch area will actually be set
2492 	 * using whole pipe damaged area.
2493 	 */
2494 	for_each_oldnew_intel_plane_in_state(state, plane, old_plane_state,
2495 					     new_plane_state, i) {
2496 		struct drm_rect src, damaged_area = { .x1 = 0, .y1 = -1,
2497 						      .x2 = INT_MAX };
2498 
2499 		if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc)
2500 			continue;
2501 
2502 		if (!new_plane_state->uapi.visible &&
2503 		    !old_plane_state->uapi.visible)
2504 			continue;
2505 
2506 		if (!psr2_sel_fetch_plane_state_supported(new_plane_state)) {
2507 			full_update = true;
2508 			break;
2509 		}
2510 
2511 		/*
2512 		 * If visibility or plane moved, mark the whole plane area as
2513 		 * damaged as it needs to be complete redraw in the new and old
2514 		 * position.
2515 		 */
2516 		if (new_plane_state->uapi.visible != old_plane_state->uapi.visible ||
2517 		    !drm_rect_equals(&new_plane_state->uapi.dst,
2518 				     &old_plane_state->uapi.dst)) {
2519 			if (old_plane_state->uapi.visible) {
2520 				damaged_area.y1 = old_plane_state->uapi.dst.y1;
2521 				damaged_area.y2 = old_plane_state->uapi.dst.y2;
2522 				clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2523 						 &crtc_state->pipe_src);
2524 			}
2525 
2526 			if (new_plane_state->uapi.visible) {
2527 				damaged_area.y1 = new_plane_state->uapi.dst.y1;
2528 				damaged_area.y2 = new_plane_state->uapi.dst.y2;
2529 				clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2530 						 &crtc_state->pipe_src);
2531 			}
2532 			continue;
2533 		} else if (new_plane_state->uapi.alpha != old_plane_state->uapi.alpha) {
2534 			/* If alpha changed mark the whole plane area as damaged */
2535 			damaged_area.y1 = new_plane_state->uapi.dst.y1;
2536 			damaged_area.y2 = new_plane_state->uapi.dst.y2;
2537 			clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2538 					 &crtc_state->pipe_src);
2539 			continue;
2540 		}
2541 
2542 		src = drm_plane_state_src(&new_plane_state->uapi);
2543 		drm_rect_fp_to_int(&src, &src);
2544 
2545 		if (!drm_atomic_helper_damage_merged(&old_plane_state->uapi,
2546 						     &new_plane_state->uapi, &damaged_area))
2547 			continue;
2548 
2549 		damaged_area.y1 += new_plane_state->uapi.dst.y1 - src.y1;
2550 		damaged_area.y2 += new_plane_state->uapi.dst.y1 - src.y1;
2551 		damaged_area.x1 += new_plane_state->uapi.dst.x1 - src.x1;
2552 		damaged_area.x2 += new_plane_state->uapi.dst.x1 - src.x1;
2553 
2554 		clip_area_update(&crtc_state->psr2_su_area, &damaged_area, &crtc_state->pipe_src);
2555 	}
2556 
2557 	/*
2558 	 * TODO: For now we are just using full update in case
2559 	 * selective fetch area calculation fails. To optimize this we
2560 	 * should identify cases where this happens and fix the area
2561 	 * calculation for those.
2562 	 */
2563 	if (crtc_state->psr2_su_area.y1 == -1) {
2564 		drm_info_once(display->drm,
2565 			      "Selective fetch area calculation failed in pipe %c\n",
2566 			      pipe_name(crtc->pipe));
2567 		full_update = true;
2568 	}
2569 
2570 	if (full_update)
2571 		goto skip_sel_fetch_set_loop;
2572 
2573 	/* Wa_14014971492 */
2574 	if (!crtc_state->has_panel_replay &&
2575 	    ((IS_DISPLAY_VER_STEP(display, IP_VER(14, 0), STEP_A0, STEP_B0) ||
2576 	      IS_ALDERLAKE_P(dev_priv) || IS_TIGERLAKE(dev_priv))) &&
2577 	    crtc_state->splitter.enable)
2578 		crtc_state->psr2_su_area.y1 = 0;
2579 
2580 	ret = drm_atomic_add_affected_planes(&state->base, &crtc->base);
2581 	if (ret)
2582 		return ret;
2583 
2584 	/*
2585 	 * Adjust su area to cover cursor fully as necessary (early
2586 	 * transport). This needs to be done after
2587 	 * drm_atomic_add_affected_planes to ensure visible cursor is added into
2588 	 * affected planes even when cursor is not updated by itself.
2589 	 */
2590 	intel_psr2_sel_fetch_et_alignment(state, crtc, &cursor_in_su_area);
2591 
2592 	intel_psr2_sel_fetch_pipe_alignment(crtc_state);
2593 
2594 	/*
2595 	 * Now that we have the pipe damaged area check if it intersect with
2596 	 * every plane, if it does set the plane selective fetch area.
2597 	 */
2598 	for_each_oldnew_intel_plane_in_state(state, plane, old_plane_state,
2599 					     new_plane_state, i) {
2600 		struct drm_rect *sel_fetch_area, inter;
2601 		struct intel_plane *linked = new_plane_state->planar_linked_plane;
2602 
2603 		if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc ||
2604 		    !new_plane_state->uapi.visible)
2605 			continue;
2606 
2607 		inter = crtc_state->psr2_su_area;
2608 		sel_fetch_area = &new_plane_state->psr2_sel_fetch_area;
2609 		if (!drm_rect_intersect(&inter, &new_plane_state->uapi.dst)) {
2610 			sel_fetch_area->y1 = -1;
2611 			sel_fetch_area->y2 = -1;
2612 			/*
2613 			 * if plane sel fetch was previously enabled ->
2614 			 * disable it
2615 			 */
2616 			if (drm_rect_height(&old_plane_state->psr2_sel_fetch_area) > 0)
2617 				crtc_state->update_planes |= BIT(plane->id);
2618 
2619 			continue;
2620 		}
2621 
2622 		if (!psr2_sel_fetch_plane_state_supported(new_plane_state)) {
2623 			full_update = true;
2624 			break;
2625 		}
2626 
2627 		sel_fetch_area = &new_plane_state->psr2_sel_fetch_area;
2628 		sel_fetch_area->y1 = inter.y1 - new_plane_state->uapi.dst.y1;
2629 		sel_fetch_area->y2 = inter.y2 - new_plane_state->uapi.dst.y1;
2630 		crtc_state->update_planes |= BIT(plane->id);
2631 
2632 		/*
2633 		 * Sel_fetch_area is calculated for UV plane. Use
2634 		 * same area for Y plane as well.
2635 		 */
2636 		if (linked) {
2637 			struct intel_plane_state *linked_new_plane_state;
2638 			struct drm_rect *linked_sel_fetch_area;
2639 
2640 			linked_new_plane_state = intel_atomic_get_plane_state(state, linked);
2641 			if (IS_ERR(linked_new_plane_state))
2642 				return PTR_ERR(linked_new_plane_state);
2643 
2644 			linked_sel_fetch_area = &linked_new_plane_state->psr2_sel_fetch_area;
2645 			linked_sel_fetch_area->y1 = sel_fetch_area->y1;
2646 			linked_sel_fetch_area->y2 = sel_fetch_area->y2;
2647 			crtc_state->update_planes |= BIT(linked->id);
2648 		}
2649 	}
2650 
2651 skip_sel_fetch_set_loop:
2652 	psr2_man_trk_ctl_calc(crtc_state, full_update);
2653 	crtc_state->pipe_srcsz_early_tpt =
2654 		psr2_pipe_srcsz_early_tpt_calc(crtc_state, full_update);
2655 	return 0;
2656 }
2657 
2658 void intel_psr_pre_plane_update(struct intel_atomic_state *state,
2659 				struct intel_crtc *crtc)
2660 {
2661 	struct intel_display *display = to_intel_display(state);
2662 	struct drm_i915_private *i915 = to_i915(state->base.dev);
2663 	const struct intel_crtc_state *old_crtc_state =
2664 		intel_atomic_get_old_crtc_state(state, crtc);
2665 	const struct intel_crtc_state *new_crtc_state =
2666 		intel_atomic_get_new_crtc_state(state, crtc);
2667 	struct intel_encoder *encoder;
2668 
2669 	if (!HAS_PSR(display))
2670 		return;
2671 
2672 	for_each_intel_encoder_mask_with_psr(state->base.dev, encoder,
2673 					     old_crtc_state->uapi.encoder_mask) {
2674 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2675 		struct intel_psr *psr = &intel_dp->psr;
2676 		bool needs_to_disable = false;
2677 
2678 		mutex_lock(&psr->lock);
2679 
2680 		/*
2681 		 * Reasons to disable:
2682 		 * - PSR disabled in new state
2683 		 * - All planes will go inactive
2684 		 * - Changing between PSR versions
2685 		 * - Region Early Transport changing
2686 		 * - Display WA #1136: skl, bxt
2687 		 */
2688 		needs_to_disable |= intel_crtc_needs_modeset(new_crtc_state);
2689 		needs_to_disable |= !new_crtc_state->has_psr;
2690 		needs_to_disable |= !new_crtc_state->active_planes;
2691 		needs_to_disable |= new_crtc_state->has_sel_update != psr->sel_update_enabled;
2692 		needs_to_disable |= new_crtc_state->enable_psr2_su_region_et !=
2693 			psr->su_region_et_enabled;
2694 		needs_to_disable |= DISPLAY_VER(i915) < 11 &&
2695 			new_crtc_state->wm_level_disabled;
2696 
2697 		if (psr->enabled && needs_to_disable)
2698 			intel_psr_disable_locked(intel_dp);
2699 		else if (psr->enabled && new_crtc_state->wm_level_disabled)
2700 			/* Wa_14015648006 */
2701 			wm_optimization_wa(intel_dp, new_crtc_state);
2702 
2703 		mutex_unlock(&psr->lock);
2704 	}
2705 }
2706 
2707 void intel_psr_post_plane_update(struct intel_atomic_state *state,
2708 				 struct intel_crtc *crtc)
2709 {
2710 	struct intel_display *display = to_intel_display(state);
2711 	const struct intel_crtc_state *crtc_state =
2712 		intel_atomic_get_new_crtc_state(state, crtc);
2713 	struct intel_encoder *encoder;
2714 
2715 	if (!crtc_state->has_psr)
2716 		return;
2717 
2718 	for_each_intel_encoder_mask_with_psr(state->base.dev, encoder,
2719 					     crtc_state->uapi.encoder_mask) {
2720 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2721 		struct intel_psr *psr = &intel_dp->psr;
2722 		bool keep_disabled = false;
2723 
2724 		mutex_lock(&psr->lock);
2725 
2726 		drm_WARN_ON(display->drm,
2727 			    psr->enabled && !crtc_state->active_planes);
2728 
2729 		keep_disabled |= psr->sink_not_reliable;
2730 		keep_disabled |= !crtc_state->active_planes;
2731 
2732 		/* Display WA #1136: skl, bxt */
2733 		keep_disabled |= DISPLAY_VER(display) < 11 &&
2734 			crtc_state->wm_level_disabled;
2735 
2736 		if (!psr->enabled && !keep_disabled)
2737 			intel_psr_enable_locked(intel_dp, crtc_state);
2738 		else if (psr->enabled && !crtc_state->wm_level_disabled)
2739 			/* Wa_14015648006 */
2740 			wm_optimization_wa(intel_dp, crtc_state);
2741 
2742 		/* Force a PSR exit when enabling CRC to avoid CRC timeouts */
2743 		if (crtc_state->crc_enabled && psr->enabled)
2744 			psr_force_hw_tracking_exit(intel_dp);
2745 
2746 		/*
2747 		 * Clear possible busy bits in case we have
2748 		 * invalidate -> flip -> flush sequence.
2749 		 */
2750 		intel_dp->psr.busy_frontbuffer_bits = 0;
2751 
2752 		mutex_unlock(&psr->lock);
2753 	}
2754 }
2755 
2756 static int _psr2_ready_for_pipe_update_locked(struct intel_dp *intel_dp)
2757 {
2758 	struct intel_display *display = to_intel_display(intel_dp);
2759 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2760 
2761 	/*
2762 	 * Any state lower than EDP_PSR2_STATUS_STATE_DEEP_SLEEP is enough.
2763 	 * As all higher states has bit 4 of PSR2 state set we can just wait for
2764 	 * EDP_PSR2_STATUS_STATE_DEEP_SLEEP to be cleared.
2765 	 */
2766 	return intel_de_wait_for_clear(display,
2767 				       EDP_PSR2_STATUS(display, cpu_transcoder),
2768 				       EDP_PSR2_STATUS_STATE_DEEP_SLEEP, 50);
2769 }
2770 
2771 static int _psr1_ready_for_pipe_update_locked(struct intel_dp *intel_dp)
2772 {
2773 	struct intel_display *display = to_intel_display(intel_dp);
2774 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2775 
2776 	/*
2777 	 * From bspec: Panel Self Refresh (BDW+)
2778 	 * Max. time for PSR to idle = Inverse of the refresh rate + 6 ms of
2779 	 * exit training time + 1.5 ms of aux channel handshake. 50 ms is
2780 	 * defensive enough to cover everything.
2781 	 */
2782 	return intel_de_wait_for_clear(display,
2783 				       psr_status_reg(display, cpu_transcoder),
2784 				       EDP_PSR_STATUS_STATE_MASK, 50);
2785 }
2786 
2787 static int _panel_replay_ready_for_pipe_update_locked(struct intel_dp *intel_dp)
2788 {
2789 	return intel_dp_is_edp(intel_dp) ?
2790 		_psr2_ready_for_pipe_update_locked(intel_dp) :
2791 		_psr1_ready_for_pipe_update_locked(intel_dp);
2792 }
2793 
2794 /**
2795  * intel_psr_wait_for_idle_locked - wait for PSR be ready for a pipe update
2796  * @new_crtc_state: new CRTC state
2797  *
2798  * This function is expected to be called from pipe_update_start() where it is
2799  * not expected to race with PSR enable or disable.
2800  */
2801 void intel_psr_wait_for_idle_locked(const struct intel_crtc_state *new_crtc_state)
2802 {
2803 	struct intel_display *display = to_intel_display(new_crtc_state);
2804 	struct intel_encoder *encoder;
2805 
2806 	if (!new_crtc_state->has_psr)
2807 		return;
2808 
2809 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
2810 					     new_crtc_state->uapi.encoder_mask) {
2811 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2812 		int ret;
2813 
2814 		lockdep_assert_held(&intel_dp->psr.lock);
2815 
2816 		if (!intel_dp->psr.enabled)
2817 			continue;
2818 
2819 		if (intel_dp->psr.panel_replay_enabled)
2820 			ret = _panel_replay_ready_for_pipe_update_locked(intel_dp);
2821 		else if (intel_dp->psr.sel_update_enabled)
2822 			ret = _psr2_ready_for_pipe_update_locked(intel_dp);
2823 		else
2824 			ret = _psr1_ready_for_pipe_update_locked(intel_dp);
2825 
2826 		if (ret)
2827 			drm_err(display->drm,
2828 				"PSR wait timed out, atomic update may fail\n");
2829 	}
2830 }
2831 
2832 static bool __psr_wait_for_idle_locked(struct intel_dp *intel_dp)
2833 {
2834 	struct intel_display *display = to_intel_display(intel_dp);
2835 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2836 	i915_reg_t reg;
2837 	u32 mask;
2838 	int err;
2839 
2840 	if (!intel_dp->psr.enabled)
2841 		return false;
2842 
2843 	if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled ||
2844 					  intel_dp->psr.panel_replay_enabled)) {
2845 		reg = EDP_PSR2_STATUS(display, cpu_transcoder);
2846 		mask = EDP_PSR2_STATUS_STATE_MASK;
2847 	} else {
2848 		reg = psr_status_reg(display, cpu_transcoder);
2849 		mask = EDP_PSR_STATUS_STATE_MASK;
2850 	}
2851 
2852 	mutex_unlock(&intel_dp->psr.lock);
2853 
2854 	err = intel_de_wait_for_clear(display, reg, mask, 50);
2855 	if (err)
2856 		drm_err(display->drm,
2857 			"Timed out waiting for PSR Idle for re-enable\n");
2858 
2859 	/* After the unlocked wait, verify that PSR is still wanted! */
2860 	mutex_lock(&intel_dp->psr.lock);
2861 	return err == 0 && intel_dp->psr.enabled;
2862 }
2863 
2864 static int intel_psr_fastset_force(struct intel_display *display)
2865 {
2866 	struct drm_connector_list_iter conn_iter;
2867 	struct drm_modeset_acquire_ctx ctx;
2868 	struct drm_atomic_state *state;
2869 	struct drm_connector *conn;
2870 	int err = 0;
2871 
2872 	state = drm_atomic_state_alloc(display->drm);
2873 	if (!state)
2874 		return -ENOMEM;
2875 
2876 	drm_modeset_acquire_init(&ctx, DRM_MODESET_ACQUIRE_INTERRUPTIBLE);
2877 
2878 	state->acquire_ctx = &ctx;
2879 	to_intel_atomic_state(state)->internal = true;
2880 
2881 retry:
2882 	drm_connector_list_iter_begin(display->drm, &conn_iter);
2883 	drm_for_each_connector_iter(conn, &conn_iter) {
2884 		struct drm_connector_state *conn_state;
2885 		struct drm_crtc_state *crtc_state;
2886 
2887 		if (conn->connector_type != DRM_MODE_CONNECTOR_eDP)
2888 			continue;
2889 
2890 		conn_state = drm_atomic_get_connector_state(state, conn);
2891 		if (IS_ERR(conn_state)) {
2892 			err = PTR_ERR(conn_state);
2893 			break;
2894 		}
2895 
2896 		if (!conn_state->crtc)
2897 			continue;
2898 
2899 		crtc_state = drm_atomic_get_crtc_state(state, conn_state->crtc);
2900 		if (IS_ERR(crtc_state)) {
2901 			err = PTR_ERR(crtc_state);
2902 			break;
2903 		}
2904 
2905 		/* Mark mode as changed to trigger a pipe->update() */
2906 		crtc_state->mode_changed = true;
2907 	}
2908 	drm_connector_list_iter_end(&conn_iter);
2909 
2910 	if (err == 0)
2911 		err = drm_atomic_commit(state);
2912 
2913 	if (err == -EDEADLK) {
2914 		drm_atomic_state_clear(state);
2915 		err = drm_modeset_backoff(&ctx);
2916 		if (!err)
2917 			goto retry;
2918 	}
2919 
2920 	drm_modeset_drop_locks(&ctx);
2921 	drm_modeset_acquire_fini(&ctx);
2922 	drm_atomic_state_put(state);
2923 
2924 	return err;
2925 }
2926 
2927 int intel_psr_debug_set(struct intel_dp *intel_dp, u64 val)
2928 {
2929 	struct intel_display *display = to_intel_display(intel_dp);
2930 	const u32 mode = val & I915_PSR_DEBUG_MODE_MASK;
2931 	const u32 disable_bits = val & (I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
2932 					I915_PSR_DEBUG_PANEL_REPLAY_DISABLE);
2933 	u32 old_mode, old_disable_bits;
2934 	int ret;
2935 
2936 	if (val & ~(I915_PSR_DEBUG_IRQ | I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
2937 		    I915_PSR_DEBUG_PANEL_REPLAY_DISABLE |
2938 		    I915_PSR_DEBUG_MODE_MASK) ||
2939 	    mode > I915_PSR_DEBUG_ENABLE_SEL_FETCH) {
2940 		drm_dbg_kms(display->drm, "Invalid debug mask %llx\n", val);
2941 		return -EINVAL;
2942 	}
2943 
2944 	ret = mutex_lock_interruptible(&intel_dp->psr.lock);
2945 	if (ret)
2946 		return ret;
2947 
2948 	old_mode = intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK;
2949 	old_disable_bits = intel_dp->psr.debug &
2950 		(I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
2951 		 I915_PSR_DEBUG_PANEL_REPLAY_DISABLE);
2952 
2953 	intel_dp->psr.debug = val;
2954 
2955 	/*
2956 	 * Do it right away if it's already enabled, otherwise it will be done
2957 	 * when enabling the source.
2958 	 */
2959 	if (intel_dp->psr.enabled)
2960 		psr_irq_control(intel_dp);
2961 
2962 	mutex_unlock(&intel_dp->psr.lock);
2963 
2964 	if (old_mode != mode || old_disable_bits != disable_bits)
2965 		ret = intel_psr_fastset_force(display);
2966 
2967 	return ret;
2968 }
2969 
2970 static void intel_psr_handle_irq(struct intel_dp *intel_dp)
2971 {
2972 	struct intel_psr *psr = &intel_dp->psr;
2973 
2974 	intel_psr_disable_locked(intel_dp);
2975 	psr->sink_not_reliable = true;
2976 	/* let's make sure that sink is awaken */
2977 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
2978 }
2979 
2980 static void intel_psr_work(struct work_struct *work)
2981 {
2982 	struct intel_dp *intel_dp =
2983 		container_of(work, typeof(*intel_dp), psr.work);
2984 
2985 	mutex_lock(&intel_dp->psr.lock);
2986 
2987 	if (!intel_dp->psr.enabled)
2988 		goto unlock;
2989 
2990 	if (READ_ONCE(intel_dp->psr.irq_aux_error))
2991 		intel_psr_handle_irq(intel_dp);
2992 
2993 	/*
2994 	 * We have to make sure PSR is ready for re-enable
2995 	 * otherwise it keeps disabled until next full enable/disable cycle.
2996 	 * PSR might take some time to get fully disabled
2997 	 * and be ready for re-enable.
2998 	 */
2999 	if (!__psr_wait_for_idle_locked(intel_dp))
3000 		goto unlock;
3001 
3002 	/*
3003 	 * The delayed work can race with an invalidate hence we need to
3004 	 * recheck. Since psr_flush first clears this and then reschedules we
3005 	 * won't ever miss a flush when bailing out here.
3006 	 */
3007 	if (intel_dp->psr.busy_frontbuffer_bits || intel_dp->psr.active)
3008 		goto unlock;
3009 
3010 	intel_psr_activate(intel_dp);
3011 unlock:
3012 	mutex_unlock(&intel_dp->psr.lock);
3013 }
3014 
3015 static void _psr_invalidate_handle(struct intel_dp *intel_dp)
3016 {
3017 	struct intel_display *display = to_intel_display(intel_dp);
3018 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3019 
3020 	if (intel_dp->psr.psr2_sel_fetch_enabled) {
3021 		u32 val;
3022 
3023 		if (intel_dp->psr.psr2_sel_fetch_cff_enabled) {
3024 			/* Send one update otherwise lag is observed in screen */
3025 			intel_de_write(display,
3026 				       CURSURFLIVE(display, intel_dp->psr.pipe),
3027 				       0);
3028 			return;
3029 		}
3030 
3031 		val = man_trk_ctl_enable_bit_get(display) |
3032 		      man_trk_ctl_partial_frame_bit_get(display) |
3033 		      man_trk_ctl_continuos_full_frame(display);
3034 		intel_de_write(display,
3035 			       PSR2_MAN_TRK_CTL(display, cpu_transcoder),
3036 			       val);
3037 		intel_de_write(display,
3038 			       CURSURFLIVE(display, intel_dp->psr.pipe), 0);
3039 		intel_dp->psr.psr2_sel_fetch_cff_enabled = true;
3040 	} else {
3041 		intel_psr_exit(intel_dp);
3042 	}
3043 }
3044 
3045 /**
3046  * intel_psr_invalidate - Invalidate PSR
3047  * @display: display device
3048  * @frontbuffer_bits: frontbuffer plane tracking bits
3049  * @origin: which operation caused the invalidate
3050  *
3051  * Since the hardware frontbuffer tracking has gaps we need to integrate
3052  * with the software frontbuffer tracking. This function gets called every
3053  * time frontbuffer rendering starts and a buffer gets dirtied. PSR must be
3054  * disabled if the frontbuffer mask contains a buffer relevant to PSR.
3055  *
3056  * Dirty frontbuffers relevant to PSR are tracked in busy_frontbuffer_bits."
3057  */
3058 void intel_psr_invalidate(struct intel_display *display,
3059 			  unsigned frontbuffer_bits, enum fb_op_origin origin)
3060 {
3061 	struct intel_encoder *encoder;
3062 
3063 	if (origin == ORIGIN_FLIP)
3064 		return;
3065 
3066 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3067 		unsigned int pipe_frontbuffer_bits = frontbuffer_bits;
3068 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3069 
3070 		mutex_lock(&intel_dp->psr.lock);
3071 		if (!intel_dp->psr.enabled) {
3072 			mutex_unlock(&intel_dp->psr.lock);
3073 			continue;
3074 		}
3075 
3076 		pipe_frontbuffer_bits &=
3077 			INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe);
3078 		intel_dp->psr.busy_frontbuffer_bits |= pipe_frontbuffer_bits;
3079 
3080 		if (pipe_frontbuffer_bits)
3081 			_psr_invalidate_handle(intel_dp);
3082 
3083 		mutex_unlock(&intel_dp->psr.lock);
3084 	}
3085 }
3086 /*
3087  * When we will be completely rely on PSR2 S/W tracking in future,
3088  * intel_psr_flush() will invalidate and flush the PSR for ORIGIN_FLIP
3089  * event also therefore tgl_dc3co_flush_locked() require to be changed
3090  * accordingly in future.
3091  */
3092 static void
3093 tgl_dc3co_flush_locked(struct intel_dp *intel_dp, unsigned int frontbuffer_bits,
3094 		       enum fb_op_origin origin)
3095 {
3096 	struct intel_display *display = to_intel_display(intel_dp);
3097 	struct drm_i915_private *i915 = to_i915(display->drm);
3098 
3099 	if (!intel_dp->psr.dc3co_exitline || !intel_dp->psr.sel_update_enabled ||
3100 	    !intel_dp->psr.active)
3101 		return;
3102 
3103 	/*
3104 	 * At every frontbuffer flush flip event modified delay of delayed work,
3105 	 * when delayed work schedules that means display has been idle.
3106 	 */
3107 	if (!(frontbuffer_bits &
3108 	    INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe)))
3109 		return;
3110 
3111 	tgl_psr2_enable_dc3co(intel_dp);
3112 	mod_delayed_work(i915->unordered_wq, &intel_dp->psr.dc3co_work,
3113 			 intel_dp->psr.dc3co_exit_delay);
3114 }
3115 
3116 static void _psr_flush_handle(struct intel_dp *intel_dp)
3117 {
3118 	struct intel_display *display = to_intel_display(intel_dp);
3119 	struct drm_i915_private *dev_priv = to_i915(display->drm);
3120 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3121 
3122 	if (intel_dp->psr.psr2_sel_fetch_enabled) {
3123 		if (intel_dp->psr.psr2_sel_fetch_cff_enabled) {
3124 			/* can we turn CFF off? */
3125 			if (intel_dp->psr.busy_frontbuffer_bits == 0) {
3126 				u32 val = man_trk_ctl_enable_bit_get(display) |
3127 					man_trk_ctl_partial_frame_bit_get(display) |
3128 					man_trk_ctl_single_full_frame_bit_get(display) |
3129 					man_trk_ctl_continuos_full_frame(display);
3130 
3131 				/*
3132 				 * Set psr2_sel_fetch_cff_enabled as false to allow selective
3133 				 * updates. Still keep cff bit enabled as we don't have proper
3134 				 * SU configuration in case update is sent for any reason after
3135 				 * sff bit gets cleared by the HW on next vblank.
3136 				 */
3137 				intel_de_write(display,
3138 					       PSR2_MAN_TRK_CTL(display, cpu_transcoder),
3139 					       val);
3140 				intel_de_write(display,
3141 					       CURSURFLIVE(display, intel_dp->psr.pipe),
3142 					       0);
3143 				intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
3144 			}
3145 		} else {
3146 			/*
3147 			 * continuous full frame is disabled, only a single full
3148 			 * frame is required
3149 			 */
3150 			psr_force_hw_tracking_exit(intel_dp);
3151 		}
3152 	} else {
3153 		psr_force_hw_tracking_exit(intel_dp);
3154 
3155 		if (!intel_dp->psr.active && !intel_dp->psr.busy_frontbuffer_bits)
3156 			queue_work(dev_priv->unordered_wq, &intel_dp->psr.work);
3157 	}
3158 }
3159 
3160 /**
3161  * intel_psr_flush - Flush PSR
3162  * @display: display device
3163  * @frontbuffer_bits: frontbuffer plane tracking bits
3164  * @origin: which operation caused the flush
3165  *
3166  * Since the hardware frontbuffer tracking has gaps we need to integrate
3167  * with the software frontbuffer tracking. This function gets called every
3168  * time frontbuffer rendering has completed and flushed out to memory. PSR
3169  * can be enabled again if no other frontbuffer relevant to PSR is dirty.
3170  *
3171  * Dirty frontbuffers relevant to PSR are tracked in busy_frontbuffer_bits.
3172  */
3173 void intel_psr_flush(struct intel_display *display,
3174 		     unsigned frontbuffer_bits, enum fb_op_origin origin)
3175 {
3176 	struct intel_encoder *encoder;
3177 
3178 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3179 		unsigned int pipe_frontbuffer_bits = frontbuffer_bits;
3180 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3181 
3182 		mutex_lock(&intel_dp->psr.lock);
3183 		if (!intel_dp->psr.enabled) {
3184 			mutex_unlock(&intel_dp->psr.lock);
3185 			continue;
3186 		}
3187 
3188 		pipe_frontbuffer_bits &=
3189 			INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe);
3190 		intel_dp->psr.busy_frontbuffer_bits &= ~pipe_frontbuffer_bits;
3191 
3192 		/*
3193 		 * If the PSR is paused by an explicit intel_psr_paused() call,
3194 		 * we have to ensure that the PSR is not activated until
3195 		 * intel_psr_resume() is called.
3196 		 */
3197 		if (intel_dp->psr.paused)
3198 			goto unlock;
3199 
3200 		if (origin == ORIGIN_FLIP ||
3201 		    (origin == ORIGIN_CURSOR_UPDATE &&
3202 		     !intel_dp->psr.psr2_sel_fetch_enabled)) {
3203 			tgl_dc3co_flush_locked(intel_dp, frontbuffer_bits, origin);
3204 			goto unlock;
3205 		}
3206 
3207 		if (pipe_frontbuffer_bits == 0)
3208 			goto unlock;
3209 
3210 		/* By definition flush = invalidate + flush */
3211 		_psr_flush_handle(intel_dp);
3212 unlock:
3213 		mutex_unlock(&intel_dp->psr.lock);
3214 	}
3215 }
3216 
3217 /**
3218  * intel_psr_init - Init basic PSR work and mutex.
3219  * @intel_dp: Intel DP
3220  *
3221  * This function is called after the initializing connector.
3222  * (the initializing of connector treats the handling of connector capabilities)
3223  * And it initializes basic PSR stuff for each DP Encoder.
3224  */
3225 void intel_psr_init(struct intel_dp *intel_dp)
3226 {
3227 	struct intel_display *display = to_intel_display(intel_dp);
3228 	struct drm_i915_private *dev_priv = to_i915(display->drm);
3229 	struct intel_connector *connector = intel_dp->attached_connector;
3230 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
3231 
3232 	if (!(HAS_PSR(display) || HAS_DP20(dev_priv)))
3233 		return;
3234 
3235 	/*
3236 	 * HSW spec explicitly says PSR is tied to port A.
3237 	 * BDW+ platforms have a instance of PSR registers per transcoder but
3238 	 * BDW, GEN9 and GEN11 are not validated by HW team in other transcoder
3239 	 * than eDP one.
3240 	 * For now it only supports one instance of PSR for BDW, GEN9 and GEN11.
3241 	 * So lets keep it hardcoded to PORT_A for BDW, GEN9 and GEN11.
3242 	 * But GEN12 supports a instance of PSR registers per transcoder.
3243 	 */
3244 	if (DISPLAY_VER(display) < 12 && dig_port->base.port != PORT_A) {
3245 		drm_dbg_kms(display->drm,
3246 			    "PSR condition failed: Port not supported\n");
3247 		return;
3248 	}
3249 
3250 	if ((HAS_DP20(dev_priv) && !intel_dp_is_edp(intel_dp)) ||
3251 	    DISPLAY_VER(display) >= 20)
3252 		intel_dp->psr.source_panel_replay_support = true;
3253 
3254 	if (HAS_PSR(display) && intel_dp_is_edp(intel_dp))
3255 		intel_dp->psr.source_support = true;
3256 
3257 	/* Set link_standby x link_off defaults */
3258 	if (DISPLAY_VER(display) < 12)
3259 		/* For new platforms up to TGL let's respect VBT back again */
3260 		intel_dp->psr.link_standby = connector->panel.vbt.psr.full_link;
3261 
3262 	INIT_WORK(&intel_dp->psr.work, intel_psr_work);
3263 	INIT_DELAYED_WORK(&intel_dp->psr.dc3co_work, tgl_dc3co_disable_work);
3264 	mutex_init(&intel_dp->psr.lock);
3265 }
3266 
3267 static int psr_get_status_and_error_status(struct intel_dp *intel_dp,
3268 					   u8 *status, u8 *error_status)
3269 {
3270 	struct drm_dp_aux *aux = &intel_dp->aux;
3271 	int ret;
3272 	unsigned int offset;
3273 
3274 	offset = intel_dp->psr.panel_replay_enabled ?
3275 		 DP_SINK_DEVICE_PR_AND_FRAME_LOCK_STATUS : DP_PSR_STATUS;
3276 
3277 	ret = drm_dp_dpcd_readb(aux, offset, status);
3278 	if (ret != 1)
3279 		return ret;
3280 
3281 	offset = intel_dp->psr.panel_replay_enabled ?
3282 		 DP_PANEL_REPLAY_ERROR_STATUS : DP_PSR_ERROR_STATUS;
3283 
3284 	ret = drm_dp_dpcd_readb(aux, offset, error_status);
3285 	if (ret != 1)
3286 		return ret;
3287 
3288 	*status = *status & DP_PSR_SINK_STATE_MASK;
3289 
3290 	return 0;
3291 }
3292 
3293 static void psr_alpm_check(struct intel_dp *intel_dp)
3294 {
3295 	struct intel_display *display = to_intel_display(intel_dp);
3296 	struct drm_dp_aux *aux = &intel_dp->aux;
3297 	struct intel_psr *psr = &intel_dp->psr;
3298 	u8 val;
3299 	int r;
3300 
3301 	if (!psr->sel_update_enabled)
3302 		return;
3303 
3304 	r = drm_dp_dpcd_readb(aux, DP_RECEIVER_ALPM_STATUS, &val);
3305 	if (r != 1) {
3306 		drm_err(display->drm, "Error reading ALPM status\n");
3307 		return;
3308 	}
3309 
3310 	if (val & DP_ALPM_LOCK_TIMEOUT_ERROR) {
3311 		intel_psr_disable_locked(intel_dp);
3312 		psr->sink_not_reliable = true;
3313 		drm_dbg_kms(display->drm,
3314 			    "ALPM lock timeout error, disabling PSR\n");
3315 
3316 		/* Clearing error */
3317 		drm_dp_dpcd_writeb(aux, DP_RECEIVER_ALPM_STATUS, val);
3318 	}
3319 }
3320 
3321 static void psr_capability_changed_check(struct intel_dp *intel_dp)
3322 {
3323 	struct intel_display *display = to_intel_display(intel_dp);
3324 	struct intel_psr *psr = &intel_dp->psr;
3325 	u8 val;
3326 	int r;
3327 
3328 	r = drm_dp_dpcd_readb(&intel_dp->aux, DP_PSR_ESI, &val);
3329 	if (r != 1) {
3330 		drm_err(display->drm, "Error reading DP_PSR_ESI\n");
3331 		return;
3332 	}
3333 
3334 	if (val & DP_PSR_CAPS_CHANGE) {
3335 		intel_psr_disable_locked(intel_dp);
3336 		psr->sink_not_reliable = true;
3337 		drm_dbg_kms(display->drm,
3338 			    "Sink PSR capability changed, disabling PSR\n");
3339 
3340 		/* Clearing it */
3341 		drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_ESI, val);
3342 	}
3343 }
3344 
3345 /*
3346  * On common bits:
3347  * DP_PSR_RFB_STORAGE_ERROR == DP_PANEL_REPLAY_RFB_STORAGE_ERROR
3348  * DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR == DP_PANEL_REPLAY_VSC_SDP_UNCORRECTABLE_ERROR
3349  * DP_PSR_LINK_CRC_ERROR == DP_PANEL_REPLAY_LINK_CRC_ERROR
3350  * this function is relying on PSR definitions
3351  */
3352 void intel_psr_short_pulse(struct intel_dp *intel_dp)
3353 {
3354 	struct intel_display *display = to_intel_display(intel_dp);
3355 	struct intel_psr *psr = &intel_dp->psr;
3356 	u8 status, error_status;
3357 	const u8 errors = DP_PSR_RFB_STORAGE_ERROR |
3358 			  DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR |
3359 			  DP_PSR_LINK_CRC_ERROR;
3360 
3361 	if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
3362 		return;
3363 
3364 	mutex_lock(&psr->lock);
3365 
3366 	if (!psr->enabled)
3367 		goto exit;
3368 
3369 	if (psr_get_status_and_error_status(intel_dp, &status, &error_status)) {
3370 		drm_err(display->drm,
3371 			"Error reading PSR status or error status\n");
3372 		goto exit;
3373 	}
3374 
3375 	if ((!psr->panel_replay_enabled && status == DP_PSR_SINK_INTERNAL_ERROR) ||
3376 	    (error_status & errors)) {
3377 		intel_psr_disable_locked(intel_dp);
3378 		psr->sink_not_reliable = true;
3379 	}
3380 
3381 	if (!psr->panel_replay_enabled && status == DP_PSR_SINK_INTERNAL_ERROR &&
3382 	    !error_status)
3383 		drm_dbg_kms(display->drm,
3384 			    "PSR sink internal error, disabling PSR\n");
3385 	if (error_status & DP_PSR_RFB_STORAGE_ERROR)
3386 		drm_dbg_kms(display->drm,
3387 			    "PSR RFB storage error, disabling PSR\n");
3388 	if (error_status & DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR)
3389 		drm_dbg_kms(display->drm,
3390 			    "PSR VSC SDP uncorrectable error, disabling PSR\n");
3391 	if (error_status & DP_PSR_LINK_CRC_ERROR)
3392 		drm_dbg_kms(display->drm,
3393 			    "PSR Link CRC error, disabling PSR\n");
3394 
3395 	if (error_status & ~errors)
3396 		drm_err(display->drm,
3397 			"PSR_ERROR_STATUS unhandled errors %x\n",
3398 			error_status & ~errors);
3399 	/* clear status register */
3400 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_ERROR_STATUS, error_status);
3401 
3402 	if (!psr->panel_replay_enabled) {
3403 		psr_alpm_check(intel_dp);
3404 		psr_capability_changed_check(intel_dp);
3405 	}
3406 
3407 exit:
3408 	mutex_unlock(&psr->lock);
3409 }
3410 
3411 bool intel_psr_enabled(struct intel_dp *intel_dp)
3412 {
3413 	bool ret;
3414 
3415 	if (!CAN_PSR(intel_dp))
3416 		return false;
3417 
3418 	mutex_lock(&intel_dp->psr.lock);
3419 	ret = intel_dp->psr.enabled;
3420 	mutex_unlock(&intel_dp->psr.lock);
3421 
3422 	return ret;
3423 }
3424 
3425 /**
3426  * intel_psr_lock - grab PSR lock
3427  * @crtc_state: the crtc state
3428  *
3429  * This is initially meant to be used by around CRTC update, when
3430  * vblank sensitive registers are updated and we need grab the lock
3431  * before it to avoid vblank evasion.
3432  */
3433 void intel_psr_lock(const struct intel_crtc_state *crtc_state)
3434 {
3435 	struct intel_display *display = to_intel_display(crtc_state);
3436 	struct intel_encoder *encoder;
3437 
3438 	if (!crtc_state->has_psr)
3439 		return;
3440 
3441 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
3442 					     crtc_state->uapi.encoder_mask) {
3443 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3444 
3445 		mutex_lock(&intel_dp->psr.lock);
3446 		break;
3447 	}
3448 }
3449 
3450 /**
3451  * intel_psr_unlock - release PSR lock
3452  * @crtc_state: the crtc state
3453  *
3454  * Release the PSR lock that was held during pipe update.
3455  */
3456 void intel_psr_unlock(const struct intel_crtc_state *crtc_state)
3457 {
3458 	struct intel_display *display = to_intel_display(crtc_state);
3459 	struct intel_encoder *encoder;
3460 
3461 	if (!crtc_state->has_psr)
3462 		return;
3463 
3464 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
3465 					     crtc_state->uapi.encoder_mask) {
3466 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3467 
3468 		mutex_unlock(&intel_dp->psr.lock);
3469 		break;
3470 	}
3471 }
3472 
3473 static void
3474 psr_source_status(struct intel_dp *intel_dp, struct seq_file *m)
3475 {
3476 	struct intel_display *display = to_intel_display(intel_dp);
3477 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3478 	const char *status = "unknown";
3479 	u32 val, status_val;
3480 
3481 	if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled ||
3482 					  intel_dp->psr.panel_replay_enabled)) {
3483 		static const char * const live_status[] = {
3484 			"IDLE",
3485 			"CAPTURE",
3486 			"CAPTURE_FS",
3487 			"SLEEP",
3488 			"BUFON_FW",
3489 			"ML_UP",
3490 			"SU_STANDBY",
3491 			"FAST_SLEEP",
3492 			"DEEP_SLEEP",
3493 			"BUF_ON",
3494 			"TG_ON"
3495 		};
3496 		val = intel_de_read(display,
3497 				    EDP_PSR2_STATUS(display, cpu_transcoder));
3498 		status_val = REG_FIELD_GET(EDP_PSR2_STATUS_STATE_MASK, val);
3499 		if (status_val < ARRAY_SIZE(live_status))
3500 			status = live_status[status_val];
3501 	} else {
3502 		static const char * const live_status[] = {
3503 			"IDLE",
3504 			"SRDONACK",
3505 			"SRDENT",
3506 			"BUFOFF",
3507 			"BUFON",
3508 			"AUXACK",
3509 			"SRDOFFACK",
3510 			"SRDENT_ON",
3511 		};
3512 		val = intel_de_read(display,
3513 				    psr_status_reg(display, cpu_transcoder));
3514 		status_val = REG_FIELD_GET(EDP_PSR_STATUS_STATE_MASK, val);
3515 		if (status_val < ARRAY_SIZE(live_status))
3516 			status = live_status[status_val];
3517 	}
3518 
3519 	seq_printf(m, "Source PSR/PanelReplay status: %s [0x%08x]\n", status, val);
3520 }
3521 
3522 static void intel_psr_sink_capability(struct intel_dp *intel_dp,
3523 				      struct seq_file *m)
3524 {
3525 	struct intel_psr *psr = &intel_dp->psr;
3526 
3527 	seq_printf(m, "Sink support: PSR = %s",
3528 		   str_yes_no(psr->sink_support));
3529 
3530 	if (psr->sink_support)
3531 		seq_printf(m, " [0x%02x]", intel_dp->psr_dpcd[0]);
3532 	if (intel_dp->psr_dpcd[0] == DP_PSR2_WITH_Y_COORD_ET_SUPPORTED)
3533 		seq_printf(m, " (Early Transport)");
3534 	seq_printf(m, ", Panel Replay = %s", str_yes_no(psr->sink_panel_replay_support));
3535 	seq_printf(m, ", Panel Replay Selective Update = %s",
3536 		   str_yes_no(psr->sink_panel_replay_su_support));
3537 	if (intel_dp->pr_dpcd & DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT)
3538 		seq_printf(m, " (Early Transport)");
3539 	seq_printf(m, "\n");
3540 }
3541 
3542 static void intel_psr_print_mode(struct intel_dp *intel_dp,
3543 				 struct seq_file *m)
3544 {
3545 	struct intel_psr *psr = &intel_dp->psr;
3546 	const char *status, *mode, *region_et;
3547 
3548 	if (psr->enabled)
3549 		status = " enabled";
3550 	else
3551 		status = "disabled";
3552 
3553 	if (psr->panel_replay_enabled && psr->sel_update_enabled)
3554 		mode = "Panel Replay Selective Update";
3555 	else if (psr->panel_replay_enabled)
3556 		mode = "Panel Replay";
3557 	else if (psr->sel_update_enabled)
3558 		mode = "PSR2";
3559 	else if (psr->enabled)
3560 		mode = "PSR1";
3561 	else
3562 		mode = "";
3563 
3564 	if (psr->su_region_et_enabled)
3565 		region_et = " (Early Transport)";
3566 	else
3567 		region_et = "";
3568 
3569 	seq_printf(m, "PSR mode: %s%s%s\n", mode, status, region_et);
3570 }
3571 
3572 static int intel_psr_status(struct seq_file *m, struct intel_dp *intel_dp)
3573 {
3574 	struct intel_display *display = to_intel_display(intel_dp);
3575 	struct drm_i915_private *dev_priv = to_i915(display->drm);
3576 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3577 	struct intel_psr *psr = &intel_dp->psr;
3578 	intel_wakeref_t wakeref;
3579 	bool enabled;
3580 	u32 val, psr2_ctl;
3581 
3582 	intel_psr_sink_capability(intel_dp, m);
3583 
3584 	if (!(psr->sink_support || psr->sink_panel_replay_support))
3585 		return 0;
3586 
3587 	wakeref = intel_runtime_pm_get(&dev_priv->runtime_pm);
3588 	mutex_lock(&psr->lock);
3589 
3590 	intel_psr_print_mode(intel_dp, m);
3591 
3592 	if (!psr->enabled) {
3593 		seq_printf(m, "PSR sink not reliable: %s\n",
3594 			   str_yes_no(psr->sink_not_reliable));
3595 
3596 		goto unlock;
3597 	}
3598 
3599 	if (psr->panel_replay_enabled) {
3600 		val = intel_de_read(display, TRANS_DP2_CTL(cpu_transcoder));
3601 
3602 		if (intel_dp_is_edp(intel_dp))
3603 			psr2_ctl = intel_de_read(display,
3604 						 EDP_PSR2_CTL(display,
3605 							      cpu_transcoder));
3606 
3607 		enabled = val & TRANS_DP2_PANEL_REPLAY_ENABLE;
3608 	} else if (psr->sel_update_enabled) {
3609 		val = intel_de_read(display,
3610 				    EDP_PSR2_CTL(display, cpu_transcoder));
3611 		enabled = val & EDP_PSR2_ENABLE;
3612 	} else {
3613 		val = intel_de_read(display, psr_ctl_reg(display, cpu_transcoder));
3614 		enabled = val & EDP_PSR_ENABLE;
3615 	}
3616 	seq_printf(m, "Source PSR/PanelReplay ctl: %s [0x%08x]\n",
3617 		   str_enabled_disabled(enabled), val);
3618 	if (psr->panel_replay_enabled && intel_dp_is_edp(intel_dp))
3619 		seq_printf(m, "PSR2_CTL: 0x%08x\n",
3620 			   psr2_ctl);
3621 	psr_source_status(intel_dp, m);
3622 	seq_printf(m, "Busy frontbuffer bits: 0x%08x\n",
3623 		   psr->busy_frontbuffer_bits);
3624 
3625 	/*
3626 	 * SKL+ Perf counter is reset to 0 everytime DC state is entered
3627 	 */
3628 	val = intel_de_read(display, psr_perf_cnt_reg(display, cpu_transcoder));
3629 	seq_printf(m, "Performance counter: %u\n",
3630 		   REG_FIELD_GET(EDP_PSR_PERF_CNT_MASK, val));
3631 
3632 	if (psr->debug & I915_PSR_DEBUG_IRQ) {
3633 		seq_printf(m, "Last attempted entry at: %lld\n",
3634 			   psr->last_entry_attempt);
3635 		seq_printf(m, "Last exit at: %lld\n", psr->last_exit);
3636 	}
3637 
3638 	if (psr->sel_update_enabled) {
3639 		u32 su_frames_val[3];
3640 		int frame;
3641 
3642 		/*
3643 		 * Reading all 3 registers before hand to minimize crossing a
3644 		 * frame boundary between register reads
3645 		 */
3646 		for (frame = 0; frame < PSR2_SU_STATUS_FRAMES; frame += 3) {
3647 			val = intel_de_read(display,
3648 					    PSR2_SU_STATUS(display, cpu_transcoder, frame));
3649 			su_frames_val[frame / 3] = val;
3650 		}
3651 
3652 		seq_puts(m, "Frame:\tPSR2 SU blocks:\n");
3653 
3654 		for (frame = 0; frame < PSR2_SU_STATUS_FRAMES; frame++) {
3655 			u32 su_blocks;
3656 
3657 			su_blocks = su_frames_val[frame / 3] &
3658 				    PSR2_SU_STATUS_MASK(frame);
3659 			su_blocks = su_blocks >> PSR2_SU_STATUS_SHIFT(frame);
3660 			seq_printf(m, "%d\t%d\n", frame, su_blocks);
3661 		}
3662 
3663 		seq_printf(m, "PSR2 selective fetch: %s\n",
3664 			   str_enabled_disabled(psr->psr2_sel_fetch_enabled));
3665 	}
3666 
3667 unlock:
3668 	mutex_unlock(&psr->lock);
3669 	intel_runtime_pm_put(&dev_priv->runtime_pm, wakeref);
3670 
3671 	return 0;
3672 }
3673 
3674 static int i915_edp_psr_status_show(struct seq_file *m, void *data)
3675 {
3676 	struct intel_display *display = m->private;
3677 	struct intel_dp *intel_dp = NULL;
3678 	struct intel_encoder *encoder;
3679 
3680 	if (!HAS_PSR(display))
3681 		return -ENODEV;
3682 
3683 	/* Find the first EDP which supports PSR */
3684 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3685 		intel_dp = enc_to_intel_dp(encoder);
3686 		break;
3687 	}
3688 
3689 	if (!intel_dp)
3690 		return -ENODEV;
3691 
3692 	return intel_psr_status(m, intel_dp);
3693 }
3694 DEFINE_SHOW_ATTRIBUTE(i915_edp_psr_status);
3695 
3696 static int
3697 i915_edp_psr_debug_set(void *data, u64 val)
3698 {
3699 	struct intel_display *display = data;
3700 	struct drm_i915_private *dev_priv = to_i915(display->drm);
3701 	struct intel_encoder *encoder;
3702 	intel_wakeref_t wakeref;
3703 	int ret = -ENODEV;
3704 
3705 	if (!HAS_PSR(display))
3706 		return ret;
3707 
3708 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3709 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3710 
3711 		drm_dbg_kms(display->drm, "Setting PSR debug to %llx\n", val);
3712 
3713 		wakeref = intel_runtime_pm_get(&dev_priv->runtime_pm);
3714 
3715 		// TODO: split to each transcoder's PSR debug state
3716 		ret = intel_psr_debug_set(intel_dp, val);
3717 
3718 		intel_runtime_pm_put(&dev_priv->runtime_pm, wakeref);
3719 	}
3720 
3721 	return ret;
3722 }
3723 
3724 static int
3725 i915_edp_psr_debug_get(void *data, u64 *val)
3726 {
3727 	struct intel_display *display = data;
3728 	struct intel_encoder *encoder;
3729 
3730 	if (!HAS_PSR(display))
3731 		return -ENODEV;
3732 
3733 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3734 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3735 
3736 		// TODO: split to each transcoder's PSR debug state
3737 		*val = READ_ONCE(intel_dp->psr.debug);
3738 		return 0;
3739 	}
3740 
3741 	return -ENODEV;
3742 }
3743 
3744 DEFINE_SIMPLE_ATTRIBUTE(i915_edp_psr_debug_fops,
3745 			i915_edp_psr_debug_get, i915_edp_psr_debug_set,
3746 			"%llu\n");
3747 
3748 void intel_psr_debugfs_register(struct intel_display *display)
3749 {
3750 	struct drm_minor *minor = display->drm->primary;
3751 
3752 	debugfs_create_file("i915_edp_psr_debug", 0644, minor->debugfs_root,
3753 			    display, &i915_edp_psr_debug_fops);
3754 
3755 	debugfs_create_file("i915_edp_psr_status", 0444, minor->debugfs_root,
3756 			    display, &i915_edp_psr_status_fops);
3757 }
3758 
3759 static const char *psr_mode_str(struct intel_dp *intel_dp)
3760 {
3761 	if (intel_dp->psr.panel_replay_enabled)
3762 		return "PANEL-REPLAY";
3763 	else if (intel_dp->psr.enabled)
3764 		return "PSR";
3765 
3766 	return "unknown";
3767 }
3768 
3769 static int i915_psr_sink_status_show(struct seq_file *m, void *data)
3770 {
3771 	struct intel_connector *connector = m->private;
3772 	struct intel_dp *intel_dp = intel_attached_dp(connector);
3773 	static const char * const sink_status[] = {
3774 		"inactive",
3775 		"transition to active, capture and display",
3776 		"active, display from RFB",
3777 		"active, capture and display on sink device timings",
3778 		"transition to inactive, capture and display, timing re-sync",
3779 		"reserved",
3780 		"reserved",
3781 		"sink internal error",
3782 	};
3783 	const char *str;
3784 	int ret;
3785 	u8 status, error_status;
3786 
3787 	if (!(CAN_PSR(intel_dp) || CAN_PANEL_REPLAY(intel_dp))) {
3788 		seq_puts(m, "PSR/Panel-Replay Unsupported\n");
3789 		return -ENODEV;
3790 	}
3791 
3792 	if (connector->base.status != connector_status_connected)
3793 		return -ENODEV;
3794 
3795 	ret = psr_get_status_and_error_status(intel_dp, &status, &error_status);
3796 	if (ret)
3797 		return ret;
3798 
3799 	status &= DP_PSR_SINK_STATE_MASK;
3800 	if (status < ARRAY_SIZE(sink_status))
3801 		str = sink_status[status];
3802 	else
3803 		str = "unknown";
3804 
3805 	seq_printf(m, "Sink %s status: 0x%x [%s]\n", psr_mode_str(intel_dp), status, str);
3806 
3807 	seq_printf(m, "Sink %s error status: 0x%x", psr_mode_str(intel_dp), error_status);
3808 
3809 	if (error_status & (DP_PSR_RFB_STORAGE_ERROR |
3810 			    DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR |
3811 			    DP_PSR_LINK_CRC_ERROR))
3812 		seq_puts(m, ":\n");
3813 	else
3814 		seq_puts(m, "\n");
3815 	if (error_status & DP_PSR_RFB_STORAGE_ERROR)
3816 		seq_printf(m, "\t%s RFB storage error\n", psr_mode_str(intel_dp));
3817 	if (error_status & DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR)
3818 		seq_printf(m, "\t%s VSC SDP uncorrectable error\n", psr_mode_str(intel_dp));
3819 	if (error_status & DP_PSR_LINK_CRC_ERROR)
3820 		seq_printf(m, "\t%s Link CRC error\n", psr_mode_str(intel_dp));
3821 
3822 	return ret;
3823 }
3824 DEFINE_SHOW_ATTRIBUTE(i915_psr_sink_status);
3825 
3826 static int i915_psr_status_show(struct seq_file *m, void *data)
3827 {
3828 	struct intel_connector *connector = m->private;
3829 	struct intel_dp *intel_dp = intel_attached_dp(connector);
3830 
3831 	return intel_psr_status(m, intel_dp);
3832 }
3833 DEFINE_SHOW_ATTRIBUTE(i915_psr_status);
3834 
3835 void intel_psr_connector_debugfs_add(struct intel_connector *connector)
3836 {
3837 	struct intel_display *display = to_intel_display(connector);
3838 	struct drm_i915_private *i915 = to_i915(connector->base.dev);
3839 	struct dentry *root = connector->base.debugfs_entry;
3840 
3841 	/* TODO: Add support for MST connectors as well. */
3842 	if ((connector->base.connector_type != DRM_MODE_CONNECTOR_eDP &&
3843 	     connector->base.connector_type != DRM_MODE_CONNECTOR_DisplayPort) ||
3844 	    connector->mst_port)
3845 		return;
3846 
3847 	debugfs_create_file("i915_psr_sink_status", 0444, root,
3848 			    connector, &i915_psr_sink_status_fops);
3849 
3850 	if (HAS_PSR(display) || HAS_DP20(i915))
3851 		debugfs_create_file("i915_psr_status", 0444, root,
3852 				    connector, &i915_psr_status_fops);
3853 }
3854