xref: /linux/drivers/gpu/drm/i915/display/intel_psr.c (revision bcfe43f0ea77c42c2154fb79b99b7d1d82ac3231)
1 /*
2  * Copyright © 2014 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21  * DEALINGS IN THE SOFTWARE.
22  */
23 
24 #include <linux/debugfs.h>
25 
26 #include <drm/drm_atomic_helper.h>
27 #include <drm/drm_damage_helper.h>
28 #include <drm/drm_debugfs.h>
29 
30 #include "i915_drv.h"
31 #include "i915_reg.h"
32 #include "intel_alpm.h"
33 #include "intel_atomic.h"
34 #include "intel_crtc.h"
35 #include "intel_cursor_regs.h"
36 #include "intel_ddi.h"
37 #include "intel_de.h"
38 #include "intel_display_irq.h"
39 #include "intel_display_types.h"
40 #include "intel_dp.h"
41 #include "intel_dp_aux.h"
42 #include "intel_frontbuffer.h"
43 #include "intel_hdmi.h"
44 #include "intel_psr.h"
45 #include "intel_psr_regs.h"
46 #include "intel_snps_phy.h"
47 #include "skl_universal_plane.h"
48 
49 /**
50  * DOC: Panel Self Refresh (PSR/SRD)
51  *
52  * Since Haswell Display controller supports Panel Self-Refresh on display
53  * panels witch have a remote frame buffer (RFB) implemented according to PSR
54  * spec in eDP1.3. PSR feature allows the display to go to lower standby states
55  * when system is idle but display is on as it eliminates display refresh
56  * request to DDR memory completely as long as the frame buffer for that
57  * display is unchanged.
58  *
59  * Panel Self Refresh must be supported by both Hardware (source) and
60  * Panel (sink).
61  *
62  * PSR saves power by caching the framebuffer in the panel RFB, which allows us
63  * to power down the link and memory controller. For DSI panels the same idea
64  * is called "manual mode".
65  *
66  * The implementation uses the hardware-based PSR support which automatically
67  * enters/exits self-refresh mode. The hardware takes care of sending the
68  * required DP aux message and could even retrain the link (that part isn't
69  * enabled yet though). The hardware also keeps track of any frontbuffer
70  * changes to know when to exit self-refresh mode again. Unfortunately that
71  * part doesn't work too well, hence why the i915 PSR support uses the
72  * software frontbuffer tracking to make sure it doesn't miss a screen
73  * update. For this integration intel_psr_invalidate() and intel_psr_flush()
74  * get called by the frontbuffer tracking code. Note that because of locking
75  * issues the self-refresh re-enable code is done from a work queue, which
76  * must be correctly synchronized/cancelled when shutting down the pipe."
77  *
78  * DC3CO (DC3 clock off)
79  *
80  * On top of PSR2, GEN12 adds a intermediate power savings state that turns
81  * clock off automatically during PSR2 idle state.
82  * The smaller overhead of DC3co entry/exit vs. the overhead of PSR2 deep sleep
83  * entry/exit allows the HW to enter a low-power state even when page flipping
84  * periodically (for instance a 30fps video playback scenario).
85  *
86  * Every time a flips occurs PSR2 will get out of deep sleep state(if it was),
87  * so DC3CO is enabled and tgl_dc3co_disable_work is schedule to run after 6
88  * frames, if no other flip occurs and the function above is executed, DC3CO is
89  * disabled and PSR2 is configured to enter deep sleep, resetting again in case
90  * of another flip.
91  * Front buffer modifications do not trigger DC3CO activation on purpose as it
92  * would bring a lot of complexity and most of the moderns systems will only
93  * use page flips.
94  */
95 
96 /*
97  * Description of PSR mask bits:
98  *
99  * EDP_PSR_DEBUG[16]/EDP_PSR_DEBUG_MASK_DISP_REG_WRITE (hsw-skl):
100  *
101  *  When unmasked (nearly) all display register writes (eg. even
102  *  SWF) trigger a PSR exit. Some registers are excluded from this
103  *  and they have a more specific mask (described below). On icl+
104  *  this bit no longer exists and is effectively always set.
105  *
106  * PIPE_MISC[21]/PIPE_MISC_PSR_MASK_PIPE_REG_WRITE (skl+):
107  *
108  *  When unmasked (nearly) all pipe/plane register writes
109  *  trigger a PSR exit. Some plane registers are excluded from this
110  *  and they have a more specific mask (described below).
111  *
112  * CHICKEN_PIPESL_1[11]/SKL_PSR_MASK_PLANE_FLIP (skl+):
113  * PIPE_MISC[23]/PIPE_MISC_PSR_MASK_PRIMARY_FLIP (bdw):
114  * EDP_PSR_DEBUG[23]/EDP_PSR_DEBUG_MASK_PRIMARY_FLIP (hsw):
115  *
116  *  When unmasked PRI_SURF/PLANE_SURF writes trigger a PSR exit.
117  *  SPR_SURF/CURBASE are not included in this and instead are
118  *  controlled by PIPE_MISC_PSR_MASK_PIPE_REG_WRITE (skl+) or
119  *  EDP_PSR_DEBUG_MASK_DISP_REG_WRITE (hsw/bdw).
120  *
121  * PIPE_MISC[22]/PIPE_MISC_PSR_MASK_SPRITE_ENABLE (bdw):
122  * EDP_PSR_DEBUG[21]/EDP_PSR_DEBUG_MASK_SPRITE_ENABLE (hsw):
123  *
124  *  When unmasked PSR is blocked as long as the sprite
125  *  plane is enabled. skl+ with their universal planes no
126  *  longer have a mask bit like this, and no plane being
127  *  enabledb blocks PSR.
128  *
129  * PIPE_MISC[21]/PIPE_MISC_PSR_MASK_CURSOR_MOVE (bdw):
130  * EDP_PSR_DEBUG[20]/EDP_PSR_DEBUG_MASK_CURSOR_MOVE (hsw):
131  *
132  *  When umasked CURPOS writes trigger a PSR exit. On skl+
133  *  this doesn't exit but CURPOS is included in the
134  *  PIPE_MISC_PSR_MASK_PIPE_REG_WRITE mask.
135  *
136  * PIPE_MISC[20]/PIPE_MISC_PSR_MASK_VBLANK_VSYNC_INT (bdw+):
137  * EDP_PSR_DEBUG[19]/EDP_PSR_DEBUG_MASK_VBLANK_VSYNC_INT (hsw):
138  *
139  *  When unmasked PSR is blocked as long as vblank and/or vsync
140  *  interrupt is unmasked in IMR *and* enabled in IER.
141  *
142  * CHICKEN_TRANS[30]/SKL_UNMASK_VBL_TO_PIPE_IN_SRD (skl+):
143  * CHICKEN_PAR1_1[15]/HSW_MASK_VBL_TO_PIPE_IN_SRD (hsw/bdw):
144  *
145  *  Selectcs whether PSR exit generates an extra vblank before
146  *  the first frame is transmitted. Also note the opposite polarity
147  *  if the bit on hsw/bdw vs. skl+ (masked==generate the extra vblank,
148  *  unmasked==do not generate the extra vblank).
149  *
150  *  With DC states enabled the extra vblank happens after link training,
151  *  with DC states disabled it happens immediately upuon PSR exit trigger.
152  *  No idea as of now why there is a difference. HSW/BDW (which don't
153  *  even have DMC) always generate it after link training. Go figure.
154  *
155  *  Unfortunately CHICKEN_TRANS itself seems to be double buffered
156  *  and thus won't latch until the first vblank. So with DC states
157  *  enabled the register effctively uses the reset value during DC5
158  *  exit+PSR exit sequence, and thus the bit does nothing until
159  *  latched by the vblank that it was trying to prevent from being
160  *  generated in the first place. So we should probably call this
161  *  one a chicken/egg bit instead on skl+.
162  *
163  *  In standby mode (as opposed to link-off) this makes no difference
164  *  as the timing generator keeps running the whole time generating
165  *  normal periodic vblanks.
166  *
167  *  WaPsrDPAMaskVBlankInSRD asks us to set the bit on hsw/bdw,
168  *  and doing so makes the behaviour match the skl+ reset value.
169  *
170  * CHICKEN_PIPESL_1[0]/BDW_UNMASK_VBL_TO_REGS_IN_SRD (bdw):
171  * CHICKEN_PIPESL_1[15]/HSW_UNMASK_VBL_TO_REGS_IN_SRD (hsw):
172  *
173  *  On BDW without this bit is no vblanks whatsoever are
174  *  generated after PSR exit. On HSW this has no apparant effect.
175  *  WaPsrDPRSUnmaskVBlankInSRD says to set this.
176  *
177  * The rest of the bits are more self-explanatory and/or
178  * irrelevant for normal operation.
179  *
180  * Description of intel_crtc_state variables. has_psr, has_panel_replay and
181  * has_sel_update:
182  *
183  *  has_psr (alone):					PSR1
184  *  has_psr + has_sel_update:				PSR2
185  *  has_psr + has_panel_replay:				Panel Replay
186  *  has_psr + has_panel_replay + has_sel_update:	Panel Replay Selective Update
187  *
188  * Description of some intel_psr varibles. enabled, panel_replay_enabled,
189  * sel_update_enabled
190  *
191  *  enabled (alone):						PSR1
192  *  enabled + sel_update_enabled:				PSR2
193  *  enabled + panel_replay_enabled:				Panel Replay
194  *  enabled + panel_replay_enabled + sel_update_enabled:	Panel Replay SU
195  */
196 
197 #define CAN_PSR(intel_dp) ((intel_dp)->psr.sink_support && \
198 			   (intel_dp)->psr.source_support)
199 
200 bool intel_encoder_can_psr(struct intel_encoder *encoder)
201 {
202 	if (intel_encoder_is_dp(encoder) || encoder->type == INTEL_OUTPUT_DP_MST)
203 		return CAN_PSR(enc_to_intel_dp(encoder)) ||
204 		       CAN_PANEL_REPLAY(enc_to_intel_dp(encoder));
205 	else
206 		return false;
207 }
208 
209 bool intel_psr_needs_aux_io_power(struct intel_encoder *encoder,
210 				  const struct intel_crtc_state *crtc_state)
211 {
212 	/*
213 	 * For PSR/PR modes only eDP requires the AUX IO power to be enabled whenever
214 	 * the output is enabled. For non-eDP outputs the main link is always
215 	 * on, hence it doesn't require the HW initiated AUX wake-up signaling used
216 	 * for eDP.
217 	 *
218 	 * TODO:
219 	 * - Consider leaving AUX IO disabled for eDP / PR as well, in case
220 	 *   the ALPM with main-link off mode is not enabled.
221 	 * - Leave AUX IO enabled for DP / PR, once support for ALPM with
222 	 *   main-link off mode is added for it and this mode gets enabled.
223 	 */
224 	return intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP) &&
225 	       intel_encoder_can_psr(encoder);
226 }
227 
228 static bool psr_global_enabled(struct intel_dp *intel_dp)
229 {
230 	struct intel_display *display = to_intel_display(intel_dp);
231 	struct intel_connector *connector = intel_dp->attached_connector;
232 
233 	switch (intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK) {
234 	case I915_PSR_DEBUG_DEFAULT:
235 		if (display->params.enable_psr == -1)
236 			return connector->panel.vbt.psr.enable;
237 		return display->params.enable_psr;
238 	case I915_PSR_DEBUG_DISABLE:
239 		return false;
240 	default:
241 		return true;
242 	}
243 }
244 
245 static bool psr2_global_enabled(struct intel_dp *intel_dp)
246 {
247 	struct intel_display *display = to_intel_display(intel_dp);
248 
249 	switch (intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK) {
250 	case I915_PSR_DEBUG_DISABLE:
251 	case I915_PSR_DEBUG_FORCE_PSR1:
252 		return false;
253 	default:
254 		if (display->params.enable_psr == 1)
255 			return false;
256 		return true;
257 	}
258 }
259 
260 static bool psr2_su_region_et_global_enabled(struct intel_dp *intel_dp)
261 {
262 	struct intel_display *display = to_intel_display(intel_dp);
263 
264 	if (display->params.enable_psr != -1)
265 		return false;
266 
267 	return true;
268 }
269 
270 static bool panel_replay_global_enabled(struct intel_dp *intel_dp)
271 {
272 	struct intel_display *display = to_intel_display(intel_dp);
273 
274 	if ((display->params.enable_psr != -1) ||
275 	    (intel_dp->psr.debug & I915_PSR_DEBUG_PANEL_REPLAY_DISABLE))
276 		return false;
277 	return true;
278 }
279 
280 static u32 psr_irq_psr_error_bit_get(struct intel_dp *intel_dp)
281 {
282 	struct intel_display *display = to_intel_display(intel_dp);
283 
284 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_ERROR :
285 		EDP_PSR_ERROR(intel_dp->psr.transcoder);
286 }
287 
288 static u32 psr_irq_post_exit_bit_get(struct intel_dp *intel_dp)
289 {
290 	struct intel_display *display = to_intel_display(intel_dp);
291 
292 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_POST_EXIT :
293 		EDP_PSR_POST_EXIT(intel_dp->psr.transcoder);
294 }
295 
296 static u32 psr_irq_pre_entry_bit_get(struct intel_dp *intel_dp)
297 {
298 	struct intel_display *display = to_intel_display(intel_dp);
299 
300 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_PRE_ENTRY :
301 		EDP_PSR_PRE_ENTRY(intel_dp->psr.transcoder);
302 }
303 
304 static u32 psr_irq_mask_get(struct intel_dp *intel_dp)
305 {
306 	struct intel_display *display = to_intel_display(intel_dp);
307 
308 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_MASK :
309 		EDP_PSR_MASK(intel_dp->psr.transcoder);
310 }
311 
312 static i915_reg_t psr_ctl_reg(struct intel_display *display,
313 			      enum transcoder cpu_transcoder)
314 {
315 	if (DISPLAY_VER(display) >= 8)
316 		return EDP_PSR_CTL(display, cpu_transcoder);
317 	else
318 		return HSW_SRD_CTL;
319 }
320 
321 static i915_reg_t psr_debug_reg(struct intel_display *display,
322 				enum transcoder cpu_transcoder)
323 {
324 	if (DISPLAY_VER(display) >= 8)
325 		return EDP_PSR_DEBUG(display, cpu_transcoder);
326 	else
327 		return HSW_SRD_DEBUG;
328 }
329 
330 static i915_reg_t psr_perf_cnt_reg(struct intel_display *display,
331 				   enum transcoder cpu_transcoder)
332 {
333 	if (DISPLAY_VER(display) >= 8)
334 		return EDP_PSR_PERF_CNT(display, cpu_transcoder);
335 	else
336 		return HSW_SRD_PERF_CNT;
337 }
338 
339 static i915_reg_t psr_status_reg(struct intel_display *display,
340 				 enum transcoder cpu_transcoder)
341 {
342 	if (DISPLAY_VER(display) >= 8)
343 		return EDP_PSR_STATUS(display, cpu_transcoder);
344 	else
345 		return HSW_SRD_STATUS;
346 }
347 
348 static i915_reg_t psr_imr_reg(struct intel_display *display,
349 			      enum transcoder cpu_transcoder)
350 {
351 	if (DISPLAY_VER(display) >= 12)
352 		return TRANS_PSR_IMR(display, cpu_transcoder);
353 	else
354 		return EDP_PSR_IMR;
355 }
356 
357 static i915_reg_t psr_iir_reg(struct intel_display *display,
358 			      enum transcoder cpu_transcoder)
359 {
360 	if (DISPLAY_VER(display) >= 12)
361 		return TRANS_PSR_IIR(display, cpu_transcoder);
362 	else
363 		return EDP_PSR_IIR;
364 }
365 
366 static i915_reg_t psr_aux_ctl_reg(struct intel_display *display,
367 				  enum transcoder cpu_transcoder)
368 {
369 	if (DISPLAY_VER(display) >= 8)
370 		return EDP_PSR_AUX_CTL(display, cpu_transcoder);
371 	else
372 		return HSW_SRD_AUX_CTL;
373 }
374 
375 static i915_reg_t psr_aux_data_reg(struct intel_display *display,
376 				   enum transcoder cpu_transcoder, int i)
377 {
378 	if (DISPLAY_VER(display) >= 8)
379 		return EDP_PSR_AUX_DATA(display, cpu_transcoder, i);
380 	else
381 		return HSW_SRD_AUX_DATA(i);
382 }
383 
384 static void psr_irq_control(struct intel_dp *intel_dp)
385 {
386 	struct intel_display *display = to_intel_display(intel_dp);
387 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
388 	u32 mask;
389 
390 	if (intel_dp->psr.panel_replay_enabled)
391 		return;
392 
393 	mask = psr_irq_psr_error_bit_get(intel_dp);
394 	if (intel_dp->psr.debug & I915_PSR_DEBUG_IRQ)
395 		mask |= psr_irq_post_exit_bit_get(intel_dp) |
396 			psr_irq_pre_entry_bit_get(intel_dp);
397 
398 	intel_de_rmw(display, psr_imr_reg(display, cpu_transcoder),
399 		     psr_irq_mask_get(intel_dp), ~mask);
400 }
401 
402 static void psr_event_print(struct intel_display *display,
403 			    u32 val, bool sel_update_enabled)
404 {
405 	drm_dbg_kms(display->drm, "PSR exit events: 0x%x\n", val);
406 	if (val & PSR_EVENT_PSR2_WD_TIMER_EXPIRE)
407 		drm_dbg_kms(display->drm, "\tPSR2 watchdog timer expired\n");
408 	if ((val & PSR_EVENT_PSR2_DISABLED) && sel_update_enabled)
409 		drm_dbg_kms(display->drm, "\tPSR2 disabled\n");
410 	if (val & PSR_EVENT_SU_DIRTY_FIFO_UNDERRUN)
411 		drm_dbg_kms(display->drm, "\tSU dirty FIFO underrun\n");
412 	if (val & PSR_EVENT_SU_CRC_FIFO_UNDERRUN)
413 		drm_dbg_kms(display->drm, "\tSU CRC FIFO underrun\n");
414 	if (val & PSR_EVENT_GRAPHICS_RESET)
415 		drm_dbg_kms(display->drm, "\tGraphics reset\n");
416 	if (val & PSR_EVENT_PCH_INTERRUPT)
417 		drm_dbg_kms(display->drm, "\tPCH interrupt\n");
418 	if (val & PSR_EVENT_MEMORY_UP)
419 		drm_dbg_kms(display->drm, "\tMemory up\n");
420 	if (val & PSR_EVENT_FRONT_BUFFER_MODIFY)
421 		drm_dbg_kms(display->drm, "\tFront buffer modification\n");
422 	if (val & PSR_EVENT_WD_TIMER_EXPIRE)
423 		drm_dbg_kms(display->drm, "\tPSR watchdog timer expired\n");
424 	if (val & PSR_EVENT_PIPE_REGISTERS_UPDATE)
425 		drm_dbg_kms(display->drm, "\tPIPE registers updated\n");
426 	if (val & PSR_EVENT_REGISTER_UPDATE)
427 		drm_dbg_kms(display->drm, "\tRegister updated\n");
428 	if (val & PSR_EVENT_HDCP_ENABLE)
429 		drm_dbg_kms(display->drm, "\tHDCP enabled\n");
430 	if (val & PSR_EVENT_KVMR_SESSION_ENABLE)
431 		drm_dbg_kms(display->drm, "\tKVMR session enabled\n");
432 	if (val & PSR_EVENT_VBI_ENABLE)
433 		drm_dbg_kms(display->drm, "\tVBI enabled\n");
434 	if (val & PSR_EVENT_LPSP_MODE_EXIT)
435 		drm_dbg_kms(display->drm, "\tLPSP mode exited\n");
436 	if ((val & PSR_EVENT_PSR_DISABLE) && !sel_update_enabled)
437 		drm_dbg_kms(display->drm, "\tPSR disabled\n");
438 }
439 
440 void intel_psr_irq_handler(struct intel_dp *intel_dp, u32 psr_iir)
441 {
442 	struct intel_display *display = to_intel_display(intel_dp);
443 	struct drm_i915_private *dev_priv = to_i915(display->drm);
444 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
445 	ktime_t time_ns =  ktime_get();
446 
447 	if (psr_iir & psr_irq_pre_entry_bit_get(intel_dp)) {
448 		intel_dp->psr.last_entry_attempt = time_ns;
449 		drm_dbg_kms(display->drm,
450 			    "[transcoder %s] PSR entry attempt in 2 vblanks\n",
451 			    transcoder_name(cpu_transcoder));
452 	}
453 
454 	if (psr_iir & psr_irq_post_exit_bit_get(intel_dp)) {
455 		intel_dp->psr.last_exit = time_ns;
456 		drm_dbg_kms(display->drm,
457 			    "[transcoder %s] PSR exit completed\n",
458 			    transcoder_name(cpu_transcoder));
459 
460 		if (DISPLAY_VER(display) >= 9) {
461 			u32 val;
462 
463 			val = intel_de_rmw(dev_priv,
464 					   PSR_EVENT(dev_priv, cpu_transcoder),
465 					   0, 0);
466 
467 			psr_event_print(display, val, intel_dp->psr.sel_update_enabled);
468 		}
469 	}
470 
471 	if (psr_iir & psr_irq_psr_error_bit_get(intel_dp)) {
472 		drm_warn(display->drm, "[transcoder %s] PSR aux error\n",
473 			 transcoder_name(cpu_transcoder));
474 
475 		intel_dp->psr.irq_aux_error = true;
476 
477 		/*
478 		 * If this interruption is not masked it will keep
479 		 * interrupting so fast that it prevents the scheduled
480 		 * work to run.
481 		 * Also after a PSR error, we don't want to arm PSR
482 		 * again so we don't care about unmask the interruption
483 		 * or unset irq_aux_error.
484 		 */
485 		intel_de_rmw(display, psr_imr_reg(display, cpu_transcoder),
486 			     0, psr_irq_psr_error_bit_get(intel_dp));
487 
488 		queue_work(dev_priv->unordered_wq, &intel_dp->psr.work);
489 	}
490 }
491 
492 static u8 intel_dp_get_sink_sync_latency(struct intel_dp *intel_dp)
493 {
494 	struct intel_display *display = to_intel_display(intel_dp);
495 	u8 val = 8; /* assume the worst if we can't read the value */
496 
497 	if (drm_dp_dpcd_readb(&intel_dp->aux,
498 			      DP_SYNCHRONIZATION_LATENCY_IN_SINK, &val) == 1)
499 		val &= DP_MAX_RESYNC_FRAME_COUNT_MASK;
500 	else
501 		drm_dbg_kms(display->drm,
502 			    "Unable to get sink synchronization latency, assuming 8 frames\n");
503 	return val;
504 }
505 
506 static u8 intel_dp_get_su_capability(struct intel_dp *intel_dp)
507 {
508 	u8 su_capability = 0;
509 
510 	if (intel_dp->psr.sink_panel_replay_su_support)
511 		drm_dp_dpcd_readb(&intel_dp->aux,
512 				  DP_PANEL_PANEL_REPLAY_CAPABILITY,
513 				  &su_capability);
514 	else
515 		su_capability = intel_dp->psr_dpcd[1];
516 
517 	return su_capability;
518 }
519 
520 static unsigned int
521 intel_dp_get_su_x_granularity_offset(struct intel_dp *intel_dp)
522 {
523 	return intel_dp->psr.sink_panel_replay_su_support ?
524 		DP_PANEL_PANEL_REPLAY_X_GRANULARITY :
525 		DP_PSR2_SU_X_GRANULARITY;
526 }
527 
528 static unsigned int
529 intel_dp_get_su_y_granularity_offset(struct intel_dp *intel_dp)
530 {
531 	return intel_dp->psr.sink_panel_replay_su_support ?
532 		DP_PANEL_PANEL_REPLAY_Y_GRANULARITY :
533 		DP_PSR2_SU_Y_GRANULARITY;
534 }
535 
536 /*
537  * Note: Bits related to granularity are same in panel replay and psr
538  * registers. Rely on PSR definitions on these "common" bits.
539  */
540 static void intel_dp_get_su_granularity(struct intel_dp *intel_dp)
541 {
542 	struct intel_display *display = to_intel_display(intel_dp);
543 	ssize_t r;
544 	u16 w;
545 	u8 y;
546 
547 	/*
548 	 * TODO: Do we need to take into account panel supporting both PSR and
549 	 * Panel replay?
550 	 */
551 
552 	/*
553 	 * If sink don't have specific granularity requirements set legacy
554 	 * ones.
555 	 */
556 	if (!(intel_dp_get_su_capability(intel_dp) &
557 	      DP_PSR2_SU_GRANULARITY_REQUIRED)) {
558 		/* As PSR2 HW sends full lines, we do not care about x granularity */
559 		w = 4;
560 		y = 4;
561 		goto exit;
562 	}
563 
564 	r = drm_dp_dpcd_read(&intel_dp->aux,
565 			     intel_dp_get_su_x_granularity_offset(intel_dp),
566 			     &w, 2);
567 	if (r != 2)
568 		drm_dbg_kms(display->drm,
569 			    "Unable to read selective update x granularity\n");
570 	/*
571 	 * Spec says that if the value read is 0 the default granularity should
572 	 * be used instead.
573 	 */
574 	if (r != 2 || w == 0)
575 		w = 4;
576 
577 	r = drm_dp_dpcd_read(&intel_dp->aux,
578 			     intel_dp_get_su_y_granularity_offset(intel_dp),
579 			     &y, 1);
580 	if (r != 1) {
581 		drm_dbg_kms(display->drm,
582 			    "Unable to read selective update y granularity\n");
583 		y = 4;
584 	}
585 	if (y == 0)
586 		y = 1;
587 
588 exit:
589 	intel_dp->psr.su_w_granularity = w;
590 	intel_dp->psr.su_y_granularity = y;
591 }
592 
593 static void _panel_replay_init_dpcd(struct intel_dp *intel_dp)
594 {
595 	struct intel_display *display = to_intel_display(intel_dp);
596 
597 	if (intel_dp_is_edp(intel_dp)) {
598 		if (!intel_alpm_aux_less_wake_supported(intel_dp)) {
599 			drm_dbg_kms(display->drm,
600 				    "Panel doesn't support AUX-less ALPM, eDP Panel Replay not possible\n");
601 			return;
602 		}
603 
604 		if (!(intel_dp->pr_dpcd & DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT)) {
605 			drm_dbg_kms(display->drm,
606 				    "Panel doesn't support early transport, eDP Panel Replay not possible\n");
607 			return;
608 		}
609 	}
610 
611 	intel_dp->psr.sink_panel_replay_support = true;
612 
613 	if (intel_dp->pr_dpcd & DP_PANEL_REPLAY_SU_SUPPORT)
614 		intel_dp->psr.sink_panel_replay_su_support = true;
615 
616 	drm_dbg_kms(display->drm,
617 		    "Panel replay %sis supported by panel\n",
618 		    intel_dp->psr.sink_panel_replay_su_support ?
619 		    "selective_update " : "");
620 }
621 
622 static void _psr_init_dpcd(struct intel_dp *intel_dp)
623 {
624 	struct intel_display *display = to_intel_display(intel_dp);
625 
626 	drm_dbg_kms(display->drm, "eDP panel supports PSR version %x\n",
627 		    intel_dp->psr_dpcd[0]);
628 
629 	if (drm_dp_has_quirk(&intel_dp->desc, DP_DPCD_QUIRK_NO_PSR)) {
630 		drm_dbg_kms(display->drm,
631 			    "PSR support not currently available for this panel\n");
632 		return;
633 	}
634 
635 	if (!(intel_dp->edp_dpcd[1] & DP_EDP_SET_POWER_CAP)) {
636 		drm_dbg_kms(display->drm,
637 			    "Panel lacks power state control, PSR cannot be enabled\n");
638 		return;
639 	}
640 
641 	intel_dp->psr.sink_support = true;
642 	intel_dp->psr.sink_sync_latency =
643 		intel_dp_get_sink_sync_latency(intel_dp);
644 
645 	if (DISPLAY_VER(display) >= 9 &&
646 	    intel_dp->psr_dpcd[0] >= DP_PSR2_WITH_Y_COORD_IS_SUPPORTED) {
647 		bool y_req = intel_dp->psr_dpcd[1] &
648 			     DP_PSR2_SU_Y_COORDINATE_REQUIRED;
649 
650 		/*
651 		 * All panels that supports PSR version 03h (PSR2 +
652 		 * Y-coordinate) can handle Y-coordinates in VSC but we are
653 		 * only sure that it is going to be used when required by the
654 		 * panel. This way panel is capable to do selective update
655 		 * without a aux frame sync.
656 		 *
657 		 * To support PSR version 02h and PSR version 03h without
658 		 * Y-coordinate requirement panels we would need to enable
659 		 * GTC first.
660 		 */
661 		intel_dp->psr.sink_psr2_support = y_req &&
662 			intel_alpm_aux_wake_supported(intel_dp);
663 		drm_dbg_kms(display->drm, "PSR2 %ssupported\n",
664 			    intel_dp->psr.sink_psr2_support ? "" : "not ");
665 	}
666 }
667 
668 void intel_psr_init_dpcd(struct intel_dp *intel_dp)
669 {
670 	drm_dp_dpcd_read(&intel_dp->aux, DP_PSR_SUPPORT, intel_dp->psr_dpcd,
671 			 sizeof(intel_dp->psr_dpcd));
672 	drm_dp_dpcd_readb(&intel_dp->aux, DP_PANEL_REPLAY_CAP,
673 			  &intel_dp->pr_dpcd);
674 
675 	if (intel_dp->pr_dpcd & DP_PANEL_REPLAY_SUPPORT)
676 		_panel_replay_init_dpcd(intel_dp);
677 
678 	if (intel_dp->psr_dpcd[0])
679 		_psr_init_dpcd(intel_dp);
680 
681 	if (intel_dp->psr.sink_psr2_support ||
682 	    intel_dp->psr.sink_panel_replay_su_support)
683 		intel_dp_get_su_granularity(intel_dp);
684 }
685 
686 static void hsw_psr_setup_aux(struct intel_dp *intel_dp)
687 {
688 	struct intel_display *display = to_intel_display(intel_dp);
689 	struct drm_i915_private *dev_priv = to_i915(display->drm);
690 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
691 	u32 aux_clock_divider, aux_ctl;
692 	/* write DP_SET_POWER=D0 */
693 	static const u8 aux_msg[] = {
694 		[0] = (DP_AUX_NATIVE_WRITE << 4) | ((DP_SET_POWER >> 16) & 0xf),
695 		[1] = (DP_SET_POWER >> 8) & 0xff,
696 		[2] = DP_SET_POWER & 0xff,
697 		[3] = 1 - 1,
698 		[4] = DP_SET_POWER_D0,
699 	};
700 	int i;
701 
702 	BUILD_BUG_ON(sizeof(aux_msg) > 20);
703 	for (i = 0; i < sizeof(aux_msg); i += 4)
704 		intel_de_write(dev_priv,
705 			       psr_aux_data_reg(display, cpu_transcoder, i >> 2),
706 			       intel_dp_aux_pack(&aux_msg[i], sizeof(aux_msg) - i));
707 
708 	aux_clock_divider = intel_dp->get_aux_clock_divider(intel_dp, 0);
709 
710 	/* Start with bits set for DDI_AUX_CTL register */
711 	aux_ctl = intel_dp->get_aux_send_ctl(intel_dp, sizeof(aux_msg),
712 					     aux_clock_divider);
713 
714 	/* Select only valid bits for SRD_AUX_CTL */
715 	aux_ctl &= EDP_PSR_AUX_CTL_TIME_OUT_MASK |
716 		EDP_PSR_AUX_CTL_MESSAGE_SIZE_MASK |
717 		EDP_PSR_AUX_CTL_PRECHARGE_2US_MASK |
718 		EDP_PSR_AUX_CTL_BIT_CLOCK_2X_MASK;
719 
720 	intel_de_write(display, psr_aux_ctl_reg(display, cpu_transcoder),
721 		       aux_ctl);
722 }
723 
724 static bool psr2_su_region_et_valid(struct intel_dp *intel_dp, bool panel_replay)
725 {
726 	struct intel_display *display = to_intel_display(intel_dp);
727 
728 	if (DISPLAY_VER(display) < 20 || !intel_dp_is_edp(intel_dp) ||
729 	    intel_dp->psr.debug & I915_PSR_DEBUG_SU_REGION_ET_DISABLE)
730 		return false;
731 
732 	return panel_replay ?
733 		intel_dp->pr_dpcd & DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT :
734 		intel_dp->psr_dpcd[0] == DP_PSR2_WITH_Y_COORD_ET_SUPPORTED &&
735 		psr2_su_region_et_global_enabled(intel_dp);
736 }
737 
738 static void _panel_replay_enable_sink(struct intel_dp *intel_dp,
739 				      const struct intel_crtc_state *crtc_state)
740 {
741 	u8 val = DP_PANEL_REPLAY_ENABLE |
742 		DP_PANEL_REPLAY_VSC_SDP_CRC_EN |
743 		DP_PANEL_REPLAY_UNRECOVERABLE_ERROR_EN |
744 		DP_PANEL_REPLAY_RFB_STORAGE_ERROR_EN |
745 		DP_PANEL_REPLAY_ACTIVE_FRAME_CRC_ERROR_EN;
746 	u8 panel_replay_config2 = DP_PANEL_REPLAY_CRC_VERIFICATION;
747 
748 	if (crtc_state->has_sel_update)
749 		val |= DP_PANEL_REPLAY_SU_ENABLE;
750 
751 	if (crtc_state->enable_psr2_su_region_et)
752 		val |= DP_PANEL_REPLAY_ENABLE_SU_REGION_ET;
753 
754 	if (crtc_state->req_psr2_sdp_prior_scanline)
755 		panel_replay_config2 |=
756 			DP_PANEL_REPLAY_SU_REGION_SCANLINE_CAPTURE;
757 
758 	drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG, val);
759 
760 	drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG2,
761 			   panel_replay_config2);
762 }
763 
764 static void _psr_enable_sink(struct intel_dp *intel_dp,
765 			     const struct intel_crtc_state *crtc_state)
766 {
767 	struct intel_display *display = to_intel_display(intel_dp);
768 	u8 val = 0;
769 
770 	if (crtc_state->has_sel_update) {
771 		val |= DP_PSR_ENABLE_PSR2 | DP_PSR_IRQ_HPD_WITH_CRC_ERRORS;
772 	} else {
773 		if (intel_dp->psr.link_standby)
774 			val |= DP_PSR_MAIN_LINK_ACTIVE;
775 
776 		if (DISPLAY_VER(display) >= 8)
777 			val |= DP_PSR_CRC_VERIFICATION;
778 	}
779 
780 	if (crtc_state->req_psr2_sdp_prior_scanline)
781 		val |= DP_PSR_SU_REGION_SCANLINE_CAPTURE;
782 
783 	if (crtc_state->enable_psr2_su_region_et)
784 		val |= DP_PANEL_REPLAY_ENABLE_SU_REGION_ET;
785 
786 	if (intel_dp->psr.entry_setup_frames > 0)
787 		val |= DP_PSR_FRAME_CAPTURE;
788 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, val);
789 
790 	val |= DP_PSR_ENABLE;
791 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, val);
792 }
793 
794 static void intel_psr_enable_sink_alpm(struct intel_dp *intel_dp,
795 				       const struct intel_crtc_state *crtc_state)
796 {
797 	u8 val;
798 
799 	/*
800 	 * eDP Panel Replay uses always ALPM
801 	 * PSR2 uses ALPM but PSR1 doesn't
802 	 */
803 	if (!intel_dp_is_edp(intel_dp) || (!crtc_state->has_panel_replay &&
804 					   !crtc_state->has_sel_update))
805 		return;
806 
807 	val = DP_ALPM_ENABLE | DP_ALPM_LOCK_ERROR_IRQ_HPD_ENABLE;
808 
809 	if (crtc_state->has_panel_replay)
810 		val |= DP_ALPM_MODE_AUX_LESS;
811 
812 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_RECEIVER_ALPM_CONFIG, val);
813 }
814 
815 void intel_psr_enable_sink(struct intel_dp *intel_dp,
816 			   const struct intel_crtc_state *crtc_state)
817 {
818 	intel_psr_enable_sink_alpm(intel_dp, crtc_state);
819 
820 	crtc_state->has_panel_replay ?
821 		_panel_replay_enable_sink(intel_dp, crtc_state) :
822 		_psr_enable_sink(intel_dp, crtc_state);
823 
824 	if (intel_dp_is_edp(intel_dp))
825 		drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
826 }
827 
828 static u32 intel_psr1_get_tp_time(struct intel_dp *intel_dp)
829 {
830 	struct intel_display *display = to_intel_display(intel_dp);
831 	struct intel_connector *connector = intel_dp->attached_connector;
832 	struct drm_i915_private *dev_priv = to_i915(display->drm);
833 	u32 val = 0;
834 
835 	if (DISPLAY_VER(display) >= 11)
836 		val |= EDP_PSR_TP4_TIME_0us;
837 
838 	if (display->params.psr_safest_params) {
839 		val |= EDP_PSR_TP1_TIME_2500us;
840 		val |= EDP_PSR_TP2_TP3_TIME_2500us;
841 		goto check_tp3_sel;
842 	}
843 
844 	if (connector->panel.vbt.psr.tp1_wakeup_time_us == 0)
845 		val |= EDP_PSR_TP1_TIME_0us;
846 	else if (connector->panel.vbt.psr.tp1_wakeup_time_us <= 100)
847 		val |= EDP_PSR_TP1_TIME_100us;
848 	else if (connector->panel.vbt.psr.tp1_wakeup_time_us <= 500)
849 		val |= EDP_PSR_TP1_TIME_500us;
850 	else
851 		val |= EDP_PSR_TP1_TIME_2500us;
852 
853 	if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us == 0)
854 		val |= EDP_PSR_TP2_TP3_TIME_0us;
855 	else if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us <= 100)
856 		val |= EDP_PSR_TP2_TP3_TIME_100us;
857 	else if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us <= 500)
858 		val |= EDP_PSR_TP2_TP3_TIME_500us;
859 	else
860 		val |= EDP_PSR_TP2_TP3_TIME_2500us;
861 
862 	/*
863 	 * WA 0479: hsw,bdw
864 	 * "Do not skip both TP1 and TP2/TP3"
865 	 */
866 	if (DISPLAY_VER(dev_priv) < 9 &&
867 	    connector->panel.vbt.psr.tp1_wakeup_time_us == 0 &&
868 	    connector->panel.vbt.psr.tp2_tp3_wakeup_time_us == 0)
869 		val |= EDP_PSR_TP2_TP3_TIME_100us;
870 
871 check_tp3_sel:
872 	if (intel_dp_source_supports_tps3(dev_priv) &&
873 	    drm_dp_tps3_supported(intel_dp->dpcd))
874 		val |= EDP_PSR_TP_TP1_TP3;
875 	else
876 		val |= EDP_PSR_TP_TP1_TP2;
877 
878 	return val;
879 }
880 
881 static u8 psr_compute_idle_frames(struct intel_dp *intel_dp)
882 {
883 	struct intel_display *display = to_intel_display(intel_dp);
884 	struct intel_connector *connector = intel_dp->attached_connector;
885 	int idle_frames;
886 
887 	/* Let's use 6 as the minimum to cover all known cases including the
888 	 * off-by-one issue that HW has in some cases.
889 	 */
890 	idle_frames = max(6, connector->panel.vbt.psr.idle_frames);
891 	idle_frames = max(idle_frames, intel_dp->psr.sink_sync_latency + 1);
892 
893 	if (drm_WARN_ON(display->drm, idle_frames > 0xf))
894 		idle_frames = 0xf;
895 
896 	return idle_frames;
897 }
898 
899 static void hsw_activate_psr1(struct intel_dp *intel_dp)
900 {
901 	struct intel_display *display = to_intel_display(intel_dp);
902 	struct drm_i915_private *dev_priv = to_i915(display->drm);
903 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
904 	u32 max_sleep_time = 0x1f;
905 	u32 val = EDP_PSR_ENABLE;
906 
907 	val |= EDP_PSR_IDLE_FRAMES(psr_compute_idle_frames(intel_dp));
908 
909 	if (DISPLAY_VER(display) < 20)
910 		val |= EDP_PSR_MAX_SLEEP_TIME(max_sleep_time);
911 
912 	if (IS_HASWELL(dev_priv))
913 		val |= EDP_PSR_MIN_LINK_ENTRY_TIME_8_LINES;
914 
915 	if (intel_dp->psr.link_standby)
916 		val |= EDP_PSR_LINK_STANDBY;
917 
918 	val |= intel_psr1_get_tp_time(intel_dp);
919 
920 	if (DISPLAY_VER(display) >= 8)
921 		val |= EDP_PSR_CRC_ENABLE;
922 
923 	if (DISPLAY_VER(display) >= 20)
924 		val |= LNL_EDP_PSR_ENTRY_SETUP_FRAMES(intel_dp->psr.entry_setup_frames);
925 
926 	intel_de_rmw(display, psr_ctl_reg(display, cpu_transcoder),
927 		     ~EDP_PSR_RESTORE_PSR_ACTIVE_CTX_MASK, val);
928 }
929 
930 static u32 intel_psr2_get_tp_time(struct intel_dp *intel_dp)
931 {
932 	struct intel_display *display = to_intel_display(intel_dp);
933 	struct intel_connector *connector = intel_dp->attached_connector;
934 	u32 val = 0;
935 
936 	if (display->params.psr_safest_params)
937 		return EDP_PSR2_TP2_TIME_2500us;
938 
939 	if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us >= 0 &&
940 	    connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 50)
941 		val |= EDP_PSR2_TP2_TIME_50us;
942 	else if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 100)
943 		val |= EDP_PSR2_TP2_TIME_100us;
944 	else if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 500)
945 		val |= EDP_PSR2_TP2_TIME_500us;
946 	else
947 		val |= EDP_PSR2_TP2_TIME_2500us;
948 
949 	return val;
950 }
951 
952 static int psr2_block_count_lines(struct intel_dp *intel_dp)
953 {
954 	return intel_dp->alpm_parameters.io_wake_lines < 9 &&
955 		intel_dp->alpm_parameters.fast_wake_lines < 9 ? 8 : 12;
956 }
957 
958 static int psr2_block_count(struct intel_dp *intel_dp)
959 {
960 	return psr2_block_count_lines(intel_dp) / 4;
961 }
962 
963 static u8 frames_before_su_entry(struct intel_dp *intel_dp)
964 {
965 	u8 frames_before_su_entry;
966 
967 	frames_before_su_entry = max_t(u8,
968 				       intel_dp->psr.sink_sync_latency + 1,
969 				       2);
970 
971 	/* Entry setup frames must be at least 1 less than frames before SU entry */
972 	if (intel_dp->psr.entry_setup_frames >= frames_before_su_entry)
973 		frames_before_su_entry = intel_dp->psr.entry_setup_frames + 1;
974 
975 	return frames_before_su_entry;
976 }
977 
978 static void dg2_activate_panel_replay(struct intel_dp *intel_dp)
979 {
980 	struct intel_display *display = to_intel_display(intel_dp);
981 	struct intel_psr *psr = &intel_dp->psr;
982 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
983 
984 	if (intel_dp_is_edp(intel_dp) && psr->sel_update_enabled) {
985 		u32 val = psr->su_region_et_enabled ?
986 			LNL_EDP_PSR2_SU_REGION_ET_ENABLE : 0;
987 
988 		if (intel_dp->psr.req_psr2_sdp_prior_scanline)
989 			val |= EDP_PSR2_SU_SDP_SCANLINE;
990 
991 		intel_de_write(display, EDP_PSR2_CTL(display, cpu_transcoder),
992 			       val);
993 	}
994 
995 	intel_de_rmw(display,
996 		     PSR2_MAN_TRK_CTL(display, intel_dp->psr.transcoder),
997 		     0, ADLP_PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME);
998 
999 	intel_de_rmw(display, TRANS_DP2_CTL(intel_dp->psr.transcoder), 0,
1000 		     TRANS_DP2_PANEL_REPLAY_ENABLE);
1001 }
1002 
1003 static void hsw_activate_psr2(struct intel_dp *intel_dp)
1004 {
1005 	struct intel_display *display = to_intel_display(intel_dp);
1006 	struct drm_i915_private *dev_priv = to_i915(display->drm);
1007 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1008 	u32 val = EDP_PSR2_ENABLE;
1009 	u32 psr_val = 0;
1010 
1011 	val |= EDP_PSR2_IDLE_FRAMES(psr_compute_idle_frames(intel_dp));
1012 
1013 	if (DISPLAY_VER(display) < 14 && !IS_ALDERLAKE_P(dev_priv))
1014 		val |= EDP_SU_TRACK_ENABLE;
1015 
1016 	if (DISPLAY_VER(display) >= 10 && DISPLAY_VER(display) < 13)
1017 		val |= EDP_Y_COORDINATE_ENABLE;
1018 
1019 	val |= EDP_PSR2_FRAME_BEFORE_SU(frames_before_su_entry(intel_dp));
1020 
1021 	val |= intel_psr2_get_tp_time(intel_dp);
1022 
1023 	if (DISPLAY_VER(display) >= 12 && DISPLAY_VER(display) < 20) {
1024 		if (psr2_block_count(intel_dp) > 2)
1025 			val |= TGL_EDP_PSR2_BLOCK_COUNT_NUM_3;
1026 		else
1027 			val |= TGL_EDP_PSR2_BLOCK_COUNT_NUM_2;
1028 	}
1029 
1030 	/* Wa_22012278275:adl-p */
1031 	if (IS_ALDERLAKE_P(dev_priv) && IS_DISPLAY_STEP(display, STEP_A0, STEP_E0)) {
1032 		static const u8 map[] = {
1033 			2, /* 5 lines */
1034 			1, /* 6 lines */
1035 			0, /* 7 lines */
1036 			3, /* 8 lines */
1037 			6, /* 9 lines */
1038 			5, /* 10 lines */
1039 			4, /* 11 lines */
1040 			7, /* 12 lines */
1041 		};
1042 		/*
1043 		 * Still using the default IO_BUFFER_WAKE and FAST_WAKE, see
1044 		 * comments bellow for more information
1045 		 */
1046 		int tmp;
1047 
1048 		tmp = map[intel_dp->alpm_parameters.io_wake_lines -
1049 			  TGL_EDP_PSR2_IO_BUFFER_WAKE_MIN_LINES];
1050 		val |= TGL_EDP_PSR2_IO_BUFFER_WAKE(tmp + TGL_EDP_PSR2_IO_BUFFER_WAKE_MIN_LINES);
1051 
1052 		tmp = map[intel_dp->alpm_parameters.fast_wake_lines - TGL_EDP_PSR2_FAST_WAKE_MIN_LINES];
1053 		val |= TGL_EDP_PSR2_FAST_WAKE(tmp + TGL_EDP_PSR2_FAST_WAKE_MIN_LINES);
1054 	} else if (DISPLAY_VER(display) >= 20) {
1055 		val |= LNL_EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines);
1056 	} else if (DISPLAY_VER(display) >= 12) {
1057 		val |= TGL_EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines);
1058 		val |= TGL_EDP_PSR2_FAST_WAKE(intel_dp->alpm_parameters.fast_wake_lines);
1059 	} else if (DISPLAY_VER(display) >= 9) {
1060 		val |= EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines);
1061 		val |= EDP_PSR2_FAST_WAKE(intel_dp->alpm_parameters.fast_wake_lines);
1062 	}
1063 
1064 	if (intel_dp->psr.req_psr2_sdp_prior_scanline)
1065 		val |= EDP_PSR2_SU_SDP_SCANLINE;
1066 
1067 	if (DISPLAY_VER(display) >= 20)
1068 		psr_val |= LNL_EDP_PSR_ENTRY_SETUP_FRAMES(intel_dp->psr.entry_setup_frames);
1069 
1070 	if (intel_dp->psr.psr2_sel_fetch_enabled) {
1071 		u32 tmp;
1072 
1073 		tmp = intel_de_read(display,
1074 				    PSR2_MAN_TRK_CTL(display, cpu_transcoder));
1075 		drm_WARN_ON(display->drm, !(tmp & PSR2_MAN_TRK_CTL_ENABLE));
1076 	} else if (HAS_PSR2_SEL_FETCH(display)) {
1077 		intel_de_write(display,
1078 			       PSR2_MAN_TRK_CTL(display, cpu_transcoder), 0);
1079 	}
1080 
1081 	if (intel_dp->psr.su_region_et_enabled)
1082 		val |= LNL_EDP_PSR2_SU_REGION_ET_ENABLE;
1083 
1084 	/*
1085 	 * PSR2 HW is incorrectly using EDP_PSR_TP1_TP3_SEL and BSpec is
1086 	 * recommending keep this bit unset while PSR2 is enabled.
1087 	 */
1088 	intel_de_write(display, psr_ctl_reg(display, cpu_transcoder), psr_val);
1089 
1090 	intel_de_write(display, EDP_PSR2_CTL(display, cpu_transcoder), val);
1091 }
1092 
1093 static bool
1094 transcoder_has_psr2(struct intel_display *display, enum transcoder cpu_transcoder)
1095 {
1096 	struct drm_i915_private *dev_priv = to_i915(display->drm);
1097 
1098 	if (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14)
1099 		return cpu_transcoder == TRANSCODER_A || cpu_transcoder == TRANSCODER_B;
1100 	else if (DISPLAY_VER(display) >= 12)
1101 		return cpu_transcoder == TRANSCODER_A;
1102 	else if (DISPLAY_VER(display) >= 9)
1103 		return cpu_transcoder == TRANSCODER_EDP;
1104 	else
1105 		return false;
1106 }
1107 
1108 static u32 intel_get_frame_time_us(const struct intel_crtc_state *crtc_state)
1109 {
1110 	if (!crtc_state->hw.active)
1111 		return 0;
1112 
1113 	return DIV_ROUND_UP(1000 * 1000,
1114 			    drm_mode_vrefresh(&crtc_state->hw.adjusted_mode));
1115 }
1116 
1117 static void psr2_program_idle_frames(struct intel_dp *intel_dp,
1118 				     u32 idle_frames)
1119 {
1120 	struct intel_display *display = to_intel_display(intel_dp);
1121 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1122 
1123 	intel_de_rmw(display, EDP_PSR2_CTL(display, cpu_transcoder),
1124 		     EDP_PSR2_IDLE_FRAMES_MASK,
1125 		     EDP_PSR2_IDLE_FRAMES(idle_frames));
1126 }
1127 
1128 static void tgl_psr2_enable_dc3co(struct intel_dp *intel_dp)
1129 {
1130 	struct intel_display *display = to_intel_display(intel_dp);
1131 	struct drm_i915_private *dev_priv = to_i915(display->drm);
1132 
1133 	psr2_program_idle_frames(intel_dp, 0);
1134 	intel_display_power_set_target_dc_state(dev_priv, DC_STATE_EN_DC3CO);
1135 }
1136 
1137 static void tgl_psr2_disable_dc3co(struct intel_dp *intel_dp)
1138 {
1139 	struct intel_display *display = to_intel_display(intel_dp);
1140 	struct drm_i915_private *dev_priv = to_i915(display->drm);
1141 
1142 	intel_display_power_set_target_dc_state(dev_priv, DC_STATE_EN_UPTO_DC6);
1143 	psr2_program_idle_frames(intel_dp, psr_compute_idle_frames(intel_dp));
1144 }
1145 
1146 static void tgl_dc3co_disable_work(struct work_struct *work)
1147 {
1148 	struct intel_dp *intel_dp =
1149 		container_of(work, typeof(*intel_dp), psr.dc3co_work.work);
1150 
1151 	mutex_lock(&intel_dp->psr.lock);
1152 	/* If delayed work is pending, it is not idle */
1153 	if (delayed_work_pending(&intel_dp->psr.dc3co_work))
1154 		goto unlock;
1155 
1156 	tgl_psr2_disable_dc3co(intel_dp);
1157 unlock:
1158 	mutex_unlock(&intel_dp->psr.lock);
1159 }
1160 
1161 static void tgl_disallow_dc3co_on_psr2_exit(struct intel_dp *intel_dp)
1162 {
1163 	if (!intel_dp->psr.dc3co_exitline)
1164 		return;
1165 
1166 	cancel_delayed_work(&intel_dp->psr.dc3co_work);
1167 	/* Before PSR2 exit disallow dc3co*/
1168 	tgl_psr2_disable_dc3co(intel_dp);
1169 }
1170 
1171 static bool
1172 dc3co_is_pipe_port_compatible(struct intel_dp *intel_dp,
1173 			      struct intel_crtc_state *crtc_state)
1174 {
1175 	struct intel_display *display = to_intel_display(intel_dp);
1176 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1177 	enum pipe pipe = to_intel_crtc(crtc_state->uapi.crtc)->pipe;
1178 	struct drm_i915_private *dev_priv = to_i915(display->drm);
1179 	enum port port = dig_port->base.port;
1180 
1181 	if (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14)
1182 		return pipe <= PIPE_B && port <= PORT_B;
1183 	else
1184 		return pipe == PIPE_A && port == PORT_A;
1185 }
1186 
1187 static void
1188 tgl_dc3co_exitline_compute_config(struct intel_dp *intel_dp,
1189 				  struct intel_crtc_state *crtc_state)
1190 {
1191 	struct intel_display *display = to_intel_display(intel_dp);
1192 	struct drm_i915_private *dev_priv = to_i915(display->drm);
1193 	const u32 crtc_vdisplay = crtc_state->uapi.adjusted_mode.crtc_vdisplay;
1194 	struct i915_power_domains *power_domains = &display->power.domains;
1195 	u32 exit_scanlines;
1196 
1197 	/*
1198 	 * FIXME: Due to the changed sequence of activating/deactivating DC3CO,
1199 	 * disable DC3CO until the changed dc3co activating/deactivating sequence
1200 	 * is applied. B.Specs:49196
1201 	 */
1202 	return;
1203 
1204 	/*
1205 	 * DMC's DC3CO exit mechanism has an issue with Selective Fecth
1206 	 * TODO: when the issue is addressed, this restriction should be removed.
1207 	 */
1208 	if (crtc_state->enable_psr2_sel_fetch)
1209 		return;
1210 
1211 	if (!(power_domains->allowed_dc_mask & DC_STATE_EN_DC3CO))
1212 		return;
1213 
1214 	if (!dc3co_is_pipe_port_compatible(intel_dp, crtc_state))
1215 		return;
1216 
1217 	/* Wa_16011303918:adl-p */
1218 	if (IS_ALDERLAKE_P(dev_priv) && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0))
1219 		return;
1220 
1221 	/*
1222 	 * DC3CO Exit time 200us B.Spec 49196
1223 	 * PSR2 transcoder Early Exit scanlines = ROUNDUP(200 / line time) + 1
1224 	 */
1225 	exit_scanlines =
1226 		intel_usecs_to_scanlines(&crtc_state->uapi.adjusted_mode, 200) + 1;
1227 
1228 	if (drm_WARN_ON(display->drm, exit_scanlines > crtc_vdisplay))
1229 		return;
1230 
1231 	crtc_state->dc3co_exitline = crtc_vdisplay - exit_scanlines;
1232 }
1233 
1234 static bool intel_psr2_sel_fetch_config_valid(struct intel_dp *intel_dp,
1235 					      struct intel_crtc_state *crtc_state)
1236 {
1237 	struct intel_display *display = to_intel_display(intel_dp);
1238 
1239 	if (!display->params.enable_psr2_sel_fetch &&
1240 	    intel_dp->psr.debug != I915_PSR_DEBUG_ENABLE_SEL_FETCH) {
1241 		drm_dbg_kms(display->drm,
1242 			    "PSR2 sel fetch not enabled, disabled by parameter\n");
1243 		return false;
1244 	}
1245 
1246 	if (crtc_state->uapi.async_flip) {
1247 		drm_dbg_kms(display->drm,
1248 			    "PSR2 sel fetch not enabled, async flip enabled\n");
1249 		return false;
1250 	}
1251 
1252 	return crtc_state->enable_psr2_sel_fetch = true;
1253 }
1254 
1255 static bool psr2_granularity_check(struct intel_dp *intel_dp,
1256 				   struct intel_crtc_state *crtc_state)
1257 {
1258 	struct intel_display *display = to_intel_display(intel_dp);
1259 	struct drm_i915_private *dev_priv = to_i915(display->drm);
1260 	const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
1261 	const int crtc_hdisplay = crtc_state->hw.adjusted_mode.crtc_hdisplay;
1262 	const int crtc_vdisplay = crtc_state->hw.adjusted_mode.crtc_vdisplay;
1263 	u16 y_granularity = 0;
1264 
1265 	/* PSR2 HW only send full lines so we only need to validate the width */
1266 	if (crtc_hdisplay % intel_dp->psr.su_w_granularity)
1267 		return false;
1268 
1269 	if (crtc_vdisplay % intel_dp->psr.su_y_granularity)
1270 		return false;
1271 
1272 	/* HW tracking is only aligned to 4 lines */
1273 	if (!crtc_state->enable_psr2_sel_fetch)
1274 		return intel_dp->psr.su_y_granularity == 4;
1275 
1276 	/*
1277 	 * adl_p and mtl platforms have 1 line granularity.
1278 	 * For other platforms with SW tracking we can adjust the y coordinates
1279 	 * to match sink requirement if multiple of 4.
1280 	 */
1281 	if (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14)
1282 		y_granularity = intel_dp->psr.su_y_granularity;
1283 	else if (intel_dp->psr.su_y_granularity <= 2)
1284 		y_granularity = 4;
1285 	else if ((intel_dp->psr.su_y_granularity % 4) == 0)
1286 		y_granularity = intel_dp->psr.su_y_granularity;
1287 
1288 	if (y_granularity == 0 || crtc_vdisplay % y_granularity)
1289 		return false;
1290 
1291 	if (crtc_state->dsc.compression_enable &&
1292 	    vdsc_cfg->slice_height % y_granularity)
1293 		return false;
1294 
1295 	crtc_state->su_y_granularity = y_granularity;
1296 	return true;
1297 }
1298 
1299 static bool _compute_psr2_sdp_prior_scanline_indication(struct intel_dp *intel_dp,
1300 							struct intel_crtc_state *crtc_state)
1301 {
1302 	struct intel_display *display = to_intel_display(intel_dp);
1303 	const struct drm_display_mode *adjusted_mode = &crtc_state->uapi.adjusted_mode;
1304 	u32 hblank_total, hblank_ns, req_ns;
1305 
1306 	hblank_total = adjusted_mode->crtc_hblank_end - adjusted_mode->crtc_hblank_start;
1307 	hblank_ns = div_u64(1000000ULL * hblank_total, adjusted_mode->crtc_clock);
1308 
1309 	/* From spec: ((60 / number of lanes) + 11) * 1000 / symbol clock frequency MHz */
1310 	req_ns = ((60 / crtc_state->lane_count) + 11) * 1000 / (crtc_state->port_clock / 1000);
1311 
1312 	if ((hblank_ns - req_ns) > 100)
1313 		return true;
1314 
1315 	/* Not supported <13 / Wa_22012279113:adl-p */
1316 	if (DISPLAY_VER(display) < 14 || intel_dp->edp_dpcd[0] < DP_EDP_14b)
1317 		return false;
1318 
1319 	crtc_state->req_psr2_sdp_prior_scanline = true;
1320 	return true;
1321 }
1322 
1323 static int intel_psr_entry_setup_frames(struct intel_dp *intel_dp,
1324 					const struct drm_display_mode *adjusted_mode)
1325 {
1326 	struct intel_display *display = to_intel_display(intel_dp);
1327 	int psr_setup_time = drm_dp_psr_setup_time(intel_dp->psr_dpcd);
1328 	int entry_setup_frames = 0;
1329 
1330 	if (psr_setup_time < 0) {
1331 		drm_dbg_kms(display->drm,
1332 			    "PSR condition failed: Invalid PSR setup time (0x%02x)\n",
1333 			    intel_dp->psr_dpcd[1]);
1334 		return -ETIME;
1335 	}
1336 
1337 	if (intel_usecs_to_scanlines(adjusted_mode, psr_setup_time) >
1338 	    adjusted_mode->crtc_vtotal - adjusted_mode->crtc_vdisplay - 1) {
1339 		if (DISPLAY_VER(display) >= 20) {
1340 			/* setup entry frames can be up to 3 frames */
1341 			entry_setup_frames = 1;
1342 			drm_dbg_kms(display->drm,
1343 				    "PSR setup entry frames %d\n",
1344 				    entry_setup_frames);
1345 		} else {
1346 			drm_dbg_kms(display->drm,
1347 				    "PSR condition failed: PSR setup time (%d us) too long\n",
1348 				    psr_setup_time);
1349 			return -ETIME;
1350 		}
1351 	}
1352 
1353 	return entry_setup_frames;
1354 }
1355 
1356 static bool wake_lines_fit_into_vblank(struct intel_dp *intel_dp,
1357 				       const struct intel_crtc_state *crtc_state,
1358 				       bool aux_less)
1359 {
1360 	struct intel_display *display = to_intel_display(intel_dp);
1361 	int vblank = crtc_state->hw.adjusted_mode.crtc_vblank_end -
1362 		crtc_state->hw.adjusted_mode.crtc_vblank_start;
1363 	int wake_lines;
1364 
1365 	if (aux_less)
1366 		wake_lines = intel_dp->alpm_parameters.aux_less_wake_lines;
1367 	else
1368 		wake_lines = DISPLAY_VER(display) < 20 ?
1369 			psr2_block_count_lines(intel_dp) :
1370 			intel_dp->alpm_parameters.io_wake_lines;
1371 
1372 	if (crtc_state->req_psr2_sdp_prior_scanline)
1373 		vblank -= 1;
1374 
1375 	/* Vblank >= PSR2_CTL Block Count Number maximum line count */
1376 	if (vblank < wake_lines)
1377 		return false;
1378 
1379 	return true;
1380 }
1381 
1382 static bool alpm_config_valid(struct intel_dp *intel_dp,
1383 			      const struct intel_crtc_state *crtc_state,
1384 			      bool aux_less)
1385 {
1386 	struct intel_display *display = to_intel_display(intel_dp);
1387 
1388 	if (!intel_alpm_compute_params(intel_dp, crtc_state)) {
1389 		drm_dbg_kms(display->drm,
1390 			    "PSR2/Panel Replay  not enabled, Unable to use long enough wake times\n");
1391 		return false;
1392 	}
1393 
1394 	if (!wake_lines_fit_into_vblank(intel_dp, crtc_state, aux_less)) {
1395 		drm_dbg_kms(display->drm,
1396 			    "PSR2/Panel Replay not enabled, too short vblank time\n");
1397 		return false;
1398 	}
1399 
1400 	return true;
1401 }
1402 
1403 static bool intel_psr2_config_valid(struct intel_dp *intel_dp,
1404 				    struct intel_crtc_state *crtc_state)
1405 {
1406 	struct intel_display *display = to_intel_display(intel_dp);
1407 	struct drm_i915_private *dev_priv = to_i915(display->drm);
1408 	int crtc_hdisplay = crtc_state->hw.adjusted_mode.crtc_hdisplay;
1409 	int crtc_vdisplay = crtc_state->hw.adjusted_mode.crtc_vdisplay;
1410 	int psr_max_h = 0, psr_max_v = 0, max_bpp = 0;
1411 
1412 	if (!intel_dp->psr.sink_psr2_support)
1413 		return false;
1414 
1415 	/* JSL and EHL only supports eDP 1.3 */
1416 	if (IS_JASPERLAKE(dev_priv) || IS_ELKHARTLAKE(dev_priv)) {
1417 		drm_dbg_kms(display->drm, "PSR2 not supported by phy\n");
1418 		return false;
1419 	}
1420 
1421 	/* Wa_16011181250 */
1422 	if (IS_ROCKETLAKE(dev_priv) || IS_ALDERLAKE_S(dev_priv) ||
1423 	    IS_DG2(dev_priv)) {
1424 		drm_dbg_kms(display->drm,
1425 			    "PSR2 is defeatured for this platform\n");
1426 		return false;
1427 	}
1428 
1429 	if (IS_ALDERLAKE_P(dev_priv) && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) {
1430 		drm_dbg_kms(display->drm,
1431 			    "PSR2 not completely functional in this stepping\n");
1432 		return false;
1433 	}
1434 
1435 	if (!transcoder_has_psr2(display, crtc_state->cpu_transcoder)) {
1436 		drm_dbg_kms(display->drm,
1437 			    "PSR2 not supported in transcoder %s\n",
1438 			    transcoder_name(crtc_state->cpu_transcoder));
1439 		return false;
1440 	}
1441 
1442 	/*
1443 	 * DSC and PSR2 cannot be enabled simultaneously. If a requested
1444 	 * resolution requires DSC to be enabled, priority is given to DSC
1445 	 * over PSR2.
1446 	 */
1447 	if (crtc_state->dsc.compression_enable &&
1448 	    (DISPLAY_VER(display) < 14 && !IS_ALDERLAKE_P(dev_priv))) {
1449 		drm_dbg_kms(display->drm,
1450 			    "PSR2 cannot be enabled since DSC is enabled\n");
1451 		return false;
1452 	}
1453 
1454 	if (DISPLAY_VER(display) >= 12) {
1455 		psr_max_h = 5120;
1456 		psr_max_v = 3200;
1457 		max_bpp = 30;
1458 	} else if (DISPLAY_VER(display) >= 10) {
1459 		psr_max_h = 4096;
1460 		psr_max_v = 2304;
1461 		max_bpp = 24;
1462 	} else if (DISPLAY_VER(display) == 9) {
1463 		psr_max_h = 3640;
1464 		psr_max_v = 2304;
1465 		max_bpp = 24;
1466 	}
1467 
1468 	if (crtc_state->pipe_bpp > max_bpp) {
1469 		drm_dbg_kms(display->drm,
1470 			    "PSR2 not enabled, pipe bpp %d > max supported %d\n",
1471 			    crtc_state->pipe_bpp, max_bpp);
1472 		return false;
1473 	}
1474 
1475 	/* Wa_16011303918:adl-p */
1476 	if (crtc_state->vrr.enable &&
1477 	    IS_ALDERLAKE_P(dev_priv) && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) {
1478 		drm_dbg_kms(display->drm,
1479 			    "PSR2 not enabled, not compatible with HW stepping + VRR\n");
1480 		return false;
1481 	}
1482 
1483 	if (!alpm_config_valid(intel_dp, crtc_state, false))
1484 		return false;
1485 
1486 	if (!crtc_state->enable_psr2_sel_fetch &&
1487 	    (crtc_hdisplay > psr_max_h || crtc_vdisplay > psr_max_v)) {
1488 		drm_dbg_kms(display->drm,
1489 			    "PSR2 not enabled, resolution %dx%d > max supported %dx%d\n",
1490 			    crtc_hdisplay, crtc_vdisplay,
1491 			    psr_max_h, psr_max_v);
1492 		return false;
1493 	}
1494 
1495 	tgl_dc3co_exitline_compute_config(intel_dp, crtc_state);
1496 
1497 	return true;
1498 }
1499 
1500 static bool intel_sel_update_config_valid(struct intel_dp *intel_dp,
1501 					  struct intel_crtc_state *crtc_state)
1502 {
1503 	struct intel_display *display = to_intel_display(intel_dp);
1504 
1505 	if (HAS_PSR2_SEL_FETCH(display) &&
1506 	    !intel_psr2_sel_fetch_config_valid(intel_dp, crtc_state) &&
1507 	    !HAS_PSR_HW_TRACKING(display)) {
1508 		drm_dbg_kms(display->drm,
1509 			    "Selective update not enabled, selective fetch not valid and no HW tracking available\n");
1510 		goto unsupported;
1511 	}
1512 
1513 	if (!psr2_global_enabled(intel_dp)) {
1514 		drm_dbg_kms(display->drm,
1515 			    "Selective update disabled by flag\n");
1516 		goto unsupported;
1517 	}
1518 
1519 	if (!crtc_state->has_panel_replay && !intel_psr2_config_valid(intel_dp, crtc_state))
1520 		goto unsupported;
1521 
1522 	if (!_compute_psr2_sdp_prior_scanline_indication(intel_dp, crtc_state)) {
1523 		drm_dbg_kms(display->drm,
1524 			    "Selective update not enabled, SDP indication do not fit in hblank\n");
1525 		goto unsupported;
1526 	}
1527 
1528 	if (crtc_state->has_panel_replay && (DISPLAY_VER(display) < 14 ||
1529 					     !intel_dp->psr.sink_panel_replay_su_support))
1530 		goto unsupported;
1531 
1532 	if (crtc_state->crc_enabled) {
1533 		drm_dbg_kms(display->drm,
1534 			    "Selective update not enabled because it would inhibit pipe CRC calculation\n");
1535 		goto unsupported;
1536 	}
1537 
1538 	if (!psr2_granularity_check(intel_dp, crtc_state)) {
1539 		drm_dbg_kms(display->drm,
1540 			    "Selective update not enabled, SU granularity not compatible\n");
1541 		goto unsupported;
1542 	}
1543 
1544 	crtc_state->enable_psr2_su_region_et =
1545 		psr2_su_region_et_valid(intel_dp, crtc_state->has_panel_replay);
1546 
1547 	return true;
1548 
1549 unsupported:
1550 	crtc_state->enable_psr2_sel_fetch = false;
1551 	return false;
1552 }
1553 
1554 static bool _psr_compute_config(struct intel_dp *intel_dp,
1555 				struct intel_crtc_state *crtc_state)
1556 {
1557 	struct intel_display *display = to_intel_display(intel_dp);
1558 	const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
1559 	int entry_setup_frames;
1560 
1561 	/*
1562 	 * Current PSR panels don't work reliably with VRR enabled
1563 	 * So if VRR is enabled, do not enable PSR.
1564 	 */
1565 	if (crtc_state->vrr.enable)
1566 		return false;
1567 
1568 	if (!CAN_PSR(intel_dp))
1569 		return false;
1570 
1571 	entry_setup_frames = intel_psr_entry_setup_frames(intel_dp, adjusted_mode);
1572 
1573 	if (entry_setup_frames >= 0) {
1574 		intel_dp->psr.entry_setup_frames = entry_setup_frames;
1575 	} else {
1576 		drm_dbg_kms(display->drm,
1577 			    "PSR condition failed: PSR setup timing not met\n");
1578 		return false;
1579 	}
1580 
1581 	return true;
1582 }
1583 
1584 static bool
1585 _panel_replay_compute_config(struct intel_dp *intel_dp,
1586 			     const struct intel_crtc_state *crtc_state,
1587 			     const struct drm_connector_state *conn_state)
1588 {
1589 	struct intel_display *display = to_intel_display(intel_dp);
1590 	struct intel_connector *connector =
1591 		to_intel_connector(conn_state->connector);
1592 	struct intel_hdcp *hdcp = &connector->hdcp;
1593 
1594 	if (!CAN_PANEL_REPLAY(intel_dp))
1595 		return false;
1596 
1597 	if (!panel_replay_global_enabled(intel_dp)) {
1598 		drm_dbg_kms(display->drm, "Panel Replay disabled by flag\n");
1599 		return false;
1600 	}
1601 
1602 	if (!intel_dp_is_edp(intel_dp))
1603 		return true;
1604 
1605 	/* Remaining checks are for eDP only */
1606 
1607 	if (to_intel_crtc(crtc_state->uapi.crtc)->pipe != PIPE_A &&
1608 	    to_intel_crtc(crtc_state->uapi.crtc)->pipe != PIPE_B)
1609 		return false;
1610 
1611 	/* 128b/132b Panel Replay is not supported on eDP */
1612 	if (intel_dp_is_uhbr(crtc_state)) {
1613 		drm_dbg_kms(display->drm,
1614 			    "Panel Replay is not supported with 128b/132b\n");
1615 		return false;
1616 	}
1617 
1618 	/* HW will not allow Panel Replay on eDP when HDCP enabled */
1619 	if (conn_state->content_protection ==
1620 	    DRM_MODE_CONTENT_PROTECTION_DESIRED ||
1621 	    (conn_state->content_protection ==
1622 	     DRM_MODE_CONTENT_PROTECTION_ENABLED && hdcp->value ==
1623 	     DRM_MODE_CONTENT_PROTECTION_UNDESIRED)) {
1624 		drm_dbg_kms(display->drm,
1625 			    "Panel Replay is not supported with HDCP\n");
1626 		return false;
1627 	}
1628 
1629 	if (!alpm_config_valid(intel_dp, crtc_state, true))
1630 		return false;
1631 
1632 	if (crtc_state->crc_enabled) {
1633 		drm_dbg_kms(display->drm,
1634 			    "Panel Replay not enabled because it would inhibit pipe CRC calculation\n");
1635 		return false;
1636 	}
1637 
1638 	return true;
1639 }
1640 
1641 void intel_psr_compute_config(struct intel_dp *intel_dp,
1642 			      struct intel_crtc_state *crtc_state,
1643 			      struct drm_connector_state *conn_state)
1644 {
1645 	struct intel_display *display = to_intel_display(intel_dp);
1646 	const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
1647 
1648 	if (!psr_global_enabled(intel_dp)) {
1649 		drm_dbg_kms(display->drm, "PSR disabled by flag\n");
1650 		return;
1651 	}
1652 
1653 	if (intel_dp->psr.sink_not_reliable) {
1654 		drm_dbg_kms(display->drm,
1655 			    "PSR sink implementation is not reliable\n");
1656 		return;
1657 	}
1658 
1659 	if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) {
1660 		drm_dbg_kms(display->drm,
1661 			    "PSR condition failed: Interlaced mode enabled\n");
1662 		return;
1663 	}
1664 
1665 	/*
1666 	 * FIXME figure out what is wrong with PSR+joiner and
1667 	 * fix it. Presumably something related to the fact that
1668 	 * PSR is a transcoder level feature.
1669 	 */
1670 	if (crtc_state->joiner_pipes) {
1671 		drm_dbg_kms(display->drm,
1672 			    "PSR disabled due to joiner\n");
1673 		return;
1674 	}
1675 
1676 	crtc_state->has_panel_replay = _panel_replay_compute_config(intel_dp,
1677 								    crtc_state,
1678 								    conn_state);
1679 
1680 	crtc_state->has_psr = crtc_state->has_panel_replay ? true :
1681 		_psr_compute_config(intel_dp, crtc_state);
1682 
1683 	if (!crtc_state->has_psr)
1684 		return;
1685 
1686 	crtc_state->has_sel_update = intel_sel_update_config_valid(intel_dp, crtc_state);
1687 }
1688 
1689 void intel_psr_get_config(struct intel_encoder *encoder,
1690 			  struct intel_crtc_state *pipe_config)
1691 {
1692 	struct intel_display *display = to_intel_display(encoder);
1693 	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
1694 	enum transcoder cpu_transcoder = pipe_config->cpu_transcoder;
1695 	struct intel_dp *intel_dp;
1696 	u32 val;
1697 
1698 	if (!dig_port)
1699 		return;
1700 
1701 	intel_dp = &dig_port->dp;
1702 	if (!(CAN_PSR(intel_dp) || CAN_PANEL_REPLAY(intel_dp)))
1703 		return;
1704 
1705 	mutex_lock(&intel_dp->psr.lock);
1706 	if (!intel_dp->psr.enabled)
1707 		goto unlock;
1708 
1709 	if (intel_dp->psr.panel_replay_enabled) {
1710 		pipe_config->has_psr = pipe_config->has_panel_replay = true;
1711 	} else {
1712 		/*
1713 		 * Not possible to read EDP_PSR/PSR2_CTL registers as it is
1714 		 * enabled/disabled because of frontbuffer tracking and others.
1715 		 */
1716 		pipe_config->has_psr = true;
1717 	}
1718 
1719 	pipe_config->has_sel_update = intel_dp->psr.sel_update_enabled;
1720 	pipe_config->infoframes.enable |= intel_hdmi_infoframe_enable(DP_SDP_VSC);
1721 
1722 	if (!intel_dp->psr.sel_update_enabled)
1723 		goto unlock;
1724 
1725 	if (HAS_PSR2_SEL_FETCH(display)) {
1726 		val = intel_de_read(display,
1727 				    PSR2_MAN_TRK_CTL(display, cpu_transcoder));
1728 		if (val & PSR2_MAN_TRK_CTL_ENABLE)
1729 			pipe_config->enable_psr2_sel_fetch = true;
1730 	}
1731 
1732 	pipe_config->enable_psr2_su_region_et = intel_dp->psr.su_region_et_enabled;
1733 
1734 	if (DISPLAY_VER(display) >= 12) {
1735 		val = intel_de_read(display,
1736 				    TRANS_EXITLINE(display, cpu_transcoder));
1737 		pipe_config->dc3co_exitline = REG_FIELD_GET(EXITLINE_MASK, val);
1738 	}
1739 unlock:
1740 	mutex_unlock(&intel_dp->psr.lock);
1741 }
1742 
1743 static void intel_psr_activate(struct intel_dp *intel_dp)
1744 {
1745 	struct intel_display *display = to_intel_display(intel_dp);
1746 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1747 
1748 	drm_WARN_ON(display->drm,
1749 		    transcoder_has_psr2(display, cpu_transcoder) &&
1750 		    intel_de_read(display, EDP_PSR2_CTL(display, cpu_transcoder)) & EDP_PSR2_ENABLE);
1751 
1752 	drm_WARN_ON(display->drm,
1753 		    intel_de_read(display, psr_ctl_reg(display, cpu_transcoder)) & EDP_PSR_ENABLE);
1754 
1755 	drm_WARN_ON(display->drm, intel_dp->psr.active);
1756 
1757 	lockdep_assert_held(&intel_dp->psr.lock);
1758 
1759 	/* psr1, psr2 and panel-replay are mutually exclusive.*/
1760 	if (intel_dp->psr.panel_replay_enabled)
1761 		dg2_activate_panel_replay(intel_dp);
1762 	else if (intel_dp->psr.sel_update_enabled)
1763 		hsw_activate_psr2(intel_dp);
1764 	else
1765 		hsw_activate_psr1(intel_dp);
1766 
1767 	intel_dp->psr.active = true;
1768 }
1769 
1770 static u32 wa_16013835468_bit_get(struct intel_dp *intel_dp)
1771 {
1772 	switch (intel_dp->psr.pipe) {
1773 	case PIPE_A:
1774 		return LATENCY_REPORTING_REMOVED_PIPE_A;
1775 	case PIPE_B:
1776 		return LATENCY_REPORTING_REMOVED_PIPE_B;
1777 	case PIPE_C:
1778 		return LATENCY_REPORTING_REMOVED_PIPE_C;
1779 	case PIPE_D:
1780 		return LATENCY_REPORTING_REMOVED_PIPE_D;
1781 	default:
1782 		MISSING_CASE(intel_dp->psr.pipe);
1783 		return 0;
1784 	}
1785 }
1786 
1787 /*
1788  * Wa_16013835468
1789  * Wa_14015648006
1790  */
1791 static void wm_optimization_wa(struct intel_dp *intel_dp,
1792 			       const struct intel_crtc_state *crtc_state)
1793 {
1794 	struct intel_display *display = to_intel_display(intel_dp);
1795 	bool set_wa_bit = false;
1796 
1797 	/* Wa_14015648006 */
1798 	if (IS_DISPLAY_VER(display, 11, 14))
1799 		set_wa_bit |= crtc_state->wm_level_disabled;
1800 
1801 	/* Wa_16013835468 */
1802 	if (DISPLAY_VER(display) == 12)
1803 		set_wa_bit |= crtc_state->hw.adjusted_mode.crtc_vblank_start !=
1804 			crtc_state->hw.adjusted_mode.crtc_vdisplay;
1805 
1806 	if (set_wa_bit)
1807 		intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
1808 			     0, wa_16013835468_bit_get(intel_dp));
1809 	else
1810 		intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
1811 			     wa_16013835468_bit_get(intel_dp), 0);
1812 }
1813 
1814 static void intel_psr_enable_source(struct intel_dp *intel_dp,
1815 				    const struct intel_crtc_state *crtc_state)
1816 {
1817 	struct intel_display *display = to_intel_display(intel_dp);
1818 	struct drm_i915_private *dev_priv = to_i915(display->drm);
1819 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1820 	u32 mask = 0;
1821 
1822 	/*
1823 	 * Only HSW and BDW have PSR AUX registers that need to be setup.
1824 	 * SKL+ use hardcoded values PSR AUX transactions
1825 	 */
1826 	if (DISPLAY_VER(display) < 9)
1827 		hsw_psr_setup_aux(intel_dp);
1828 
1829 	/*
1830 	 * Per Spec: Avoid continuous PSR exit by masking MEMUP and HPD also
1831 	 * mask LPSP to avoid dependency on other drivers that might block
1832 	 * runtime_pm besides preventing  other hw tracking issues now we
1833 	 * can rely on frontbuffer tracking.
1834 	 *
1835 	 * From bspec prior LunarLake:
1836 	 * Only PSR_MASK[Mask FBC modify] and PSR_MASK[Mask Hotplug] are used in
1837 	 * panel replay mode.
1838 	 *
1839 	 * From bspec beyod LunarLake:
1840 	 * Panel Replay on DP: No bits are applicable
1841 	 * Panel Replay on eDP: All bits are applicable
1842 	 */
1843 	if (DISPLAY_VER(display) < 20 || intel_dp_is_edp(intel_dp))
1844 		mask = EDP_PSR_DEBUG_MASK_HPD;
1845 
1846 	if (intel_dp_is_edp(intel_dp)) {
1847 		mask |= EDP_PSR_DEBUG_MASK_MEMUP;
1848 
1849 		/*
1850 		 * For some unknown reason on HSW non-ULT (or at least on
1851 		 * Dell Latitude E6540) external displays start to flicker
1852 		 * when PSR is enabled on the eDP. SR/PC6 residency is much
1853 		 * higher than should be possible with an external display.
1854 		 * As a workaround leave LPSP unmasked to prevent PSR entry
1855 		 * when external displays are active.
1856 		 */
1857 		if (DISPLAY_VER(display) >= 8 || IS_HASWELL_ULT(dev_priv))
1858 			mask |= EDP_PSR_DEBUG_MASK_LPSP;
1859 
1860 		if (DISPLAY_VER(display) < 20)
1861 			mask |= EDP_PSR_DEBUG_MASK_MAX_SLEEP;
1862 
1863 		/*
1864 		 * No separate pipe reg write mask on hsw/bdw, so have to unmask all
1865 		 * registers in order to keep the CURSURFLIVE tricks working :(
1866 		 */
1867 		if (IS_DISPLAY_VER(display, 9, 10))
1868 			mask |= EDP_PSR_DEBUG_MASK_DISP_REG_WRITE;
1869 
1870 		/* allow PSR with sprite enabled */
1871 		if (IS_HASWELL(dev_priv))
1872 			mask |= EDP_PSR_DEBUG_MASK_SPRITE_ENABLE;
1873 	}
1874 
1875 	intel_de_write(display, psr_debug_reg(display, cpu_transcoder), mask);
1876 
1877 	psr_irq_control(intel_dp);
1878 
1879 	/*
1880 	 * TODO: if future platforms supports DC3CO in more than one
1881 	 * transcoder, EXITLINE will need to be unset when disabling PSR
1882 	 */
1883 	if (intel_dp->psr.dc3co_exitline)
1884 		intel_de_rmw(display,
1885 			     TRANS_EXITLINE(display, cpu_transcoder),
1886 			     EXITLINE_MASK,
1887 			     intel_dp->psr.dc3co_exitline << EXITLINE_SHIFT | EXITLINE_ENABLE);
1888 
1889 	if (HAS_PSR_HW_TRACKING(display) && HAS_PSR2_SEL_FETCH(display))
1890 		intel_de_rmw(display, CHICKEN_PAR1_1, IGNORE_PSR2_HW_TRACKING,
1891 			     intel_dp->psr.psr2_sel_fetch_enabled ?
1892 			     IGNORE_PSR2_HW_TRACKING : 0);
1893 
1894 	if (intel_dp_is_edp(intel_dp))
1895 		intel_alpm_configure(intel_dp, crtc_state);
1896 
1897 	/*
1898 	 * Wa_16013835468
1899 	 * Wa_14015648006
1900 	 */
1901 	wm_optimization_wa(intel_dp, crtc_state);
1902 
1903 	if (intel_dp->psr.sel_update_enabled) {
1904 		if (DISPLAY_VER(display) == 9)
1905 			intel_de_rmw(display, CHICKEN_TRANS(cpu_transcoder), 0,
1906 				     PSR2_VSC_ENABLE_PROG_HEADER |
1907 				     PSR2_ADD_VERTICAL_LINE_COUNT);
1908 
1909 		/*
1910 		 * Wa_16014451276:adlp,mtl[a0,b0]
1911 		 * All supported adlp panels have 1-based X granularity, this may
1912 		 * cause issues if non-supported panels are used.
1913 		 */
1914 		if (!intel_dp->psr.panel_replay_enabled &&
1915 		    (IS_DISPLAY_VER_STEP(display, IP_VER(14, 0), STEP_A0, STEP_B0) ||
1916 		     IS_ALDERLAKE_P(dev_priv)))
1917 			intel_de_rmw(display, hsw_chicken_trans_reg(dev_priv, cpu_transcoder),
1918 				     0, ADLP_1_BASED_X_GRANULARITY);
1919 
1920 		/* Wa_16012604467:adlp,mtl[a0,b0] */
1921 		if (!intel_dp->psr.panel_replay_enabled &&
1922 		    IS_DISPLAY_VER_STEP(display, IP_VER(14, 0), STEP_A0, STEP_B0))
1923 			intel_de_rmw(display,
1924 				     MTL_CLKGATE_DIS_TRANS(display, cpu_transcoder),
1925 				     0,
1926 				     MTL_CLKGATE_DIS_TRANS_DMASC_GATING_DIS);
1927 		else if (IS_ALDERLAKE_P(dev_priv))
1928 			intel_de_rmw(display, CLKGATE_DIS_MISC, 0,
1929 				     CLKGATE_DIS_MISC_DMASC_GATING_DIS);
1930 	}
1931 }
1932 
1933 static bool psr_interrupt_error_check(struct intel_dp *intel_dp)
1934 {
1935 	struct intel_display *display = to_intel_display(intel_dp);
1936 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1937 	u32 val;
1938 
1939 	if (intel_dp->psr.panel_replay_enabled)
1940 		goto no_err;
1941 
1942 	/*
1943 	 * If a PSR error happened and the driver is reloaded, the EDP_PSR_IIR
1944 	 * will still keep the error set even after the reset done in the
1945 	 * irq_preinstall and irq_uninstall hooks.
1946 	 * And enabling in this situation cause the screen to freeze in the
1947 	 * first time that PSR HW tries to activate so lets keep PSR disabled
1948 	 * to avoid any rendering problems.
1949 	 */
1950 	val = intel_de_read(display, psr_iir_reg(display, cpu_transcoder));
1951 	val &= psr_irq_psr_error_bit_get(intel_dp);
1952 	if (val) {
1953 		intel_dp->psr.sink_not_reliable = true;
1954 		drm_dbg_kms(display->drm,
1955 			    "PSR interruption error set, not enabling PSR\n");
1956 		return false;
1957 	}
1958 
1959 no_err:
1960 	return true;
1961 }
1962 
1963 static void intel_psr_enable_locked(struct intel_dp *intel_dp,
1964 				    const struct intel_crtc_state *crtc_state)
1965 {
1966 	struct intel_display *display = to_intel_display(intel_dp);
1967 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1968 	u32 val;
1969 
1970 	drm_WARN_ON(display->drm, intel_dp->psr.enabled);
1971 
1972 	intel_dp->psr.sel_update_enabled = crtc_state->has_sel_update;
1973 	intel_dp->psr.panel_replay_enabled = crtc_state->has_panel_replay;
1974 	intel_dp->psr.busy_frontbuffer_bits = 0;
1975 	intel_dp->psr.pipe = to_intel_crtc(crtc_state->uapi.crtc)->pipe;
1976 	intel_dp->psr.transcoder = crtc_state->cpu_transcoder;
1977 	/* DC5/DC6 requires at least 6 idle frames */
1978 	val = usecs_to_jiffies(intel_get_frame_time_us(crtc_state) * 6);
1979 	intel_dp->psr.dc3co_exit_delay = val;
1980 	intel_dp->psr.dc3co_exitline = crtc_state->dc3co_exitline;
1981 	intel_dp->psr.psr2_sel_fetch_enabled = crtc_state->enable_psr2_sel_fetch;
1982 	intel_dp->psr.su_region_et_enabled = crtc_state->enable_psr2_su_region_et;
1983 	intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
1984 	intel_dp->psr.req_psr2_sdp_prior_scanline =
1985 		crtc_state->req_psr2_sdp_prior_scanline;
1986 
1987 	if (!psr_interrupt_error_check(intel_dp))
1988 		return;
1989 
1990 	if (intel_dp->psr.panel_replay_enabled) {
1991 		drm_dbg_kms(display->drm, "Enabling Panel Replay\n");
1992 	} else {
1993 		drm_dbg_kms(display->drm, "Enabling PSR%s\n",
1994 			    intel_dp->psr.sel_update_enabled ? "2" : "1");
1995 
1996 		/*
1997 		 * Panel replay has to be enabled before link training: doing it
1998 		 * only for PSR here.
1999 		 */
2000 		intel_psr_enable_sink(intel_dp, crtc_state);
2001 	}
2002 
2003 	if (intel_dp_is_edp(intel_dp))
2004 		intel_snps_phy_update_psr_power_state(&dig_port->base, true);
2005 
2006 	intel_psr_enable_source(intel_dp, crtc_state);
2007 	intel_dp->psr.enabled = true;
2008 	intel_dp->psr.paused = false;
2009 
2010 	intel_psr_activate(intel_dp);
2011 }
2012 
2013 static void intel_psr_exit(struct intel_dp *intel_dp)
2014 {
2015 	struct intel_display *display = to_intel_display(intel_dp);
2016 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2017 	u32 val;
2018 
2019 	if (!intel_dp->psr.active) {
2020 		if (transcoder_has_psr2(display, cpu_transcoder)) {
2021 			val = intel_de_read(display,
2022 					    EDP_PSR2_CTL(display, cpu_transcoder));
2023 			drm_WARN_ON(display->drm, val & EDP_PSR2_ENABLE);
2024 		}
2025 
2026 		val = intel_de_read(display,
2027 				    psr_ctl_reg(display, cpu_transcoder));
2028 		drm_WARN_ON(display->drm, val & EDP_PSR_ENABLE);
2029 
2030 		return;
2031 	}
2032 
2033 	if (intel_dp->psr.panel_replay_enabled) {
2034 		intel_de_rmw(display, TRANS_DP2_CTL(intel_dp->psr.transcoder),
2035 			     TRANS_DP2_PANEL_REPLAY_ENABLE, 0);
2036 	} else if (intel_dp->psr.sel_update_enabled) {
2037 		tgl_disallow_dc3co_on_psr2_exit(intel_dp);
2038 
2039 		val = intel_de_rmw(display,
2040 				   EDP_PSR2_CTL(display, cpu_transcoder),
2041 				   EDP_PSR2_ENABLE, 0);
2042 
2043 		drm_WARN_ON(display->drm, !(val & EDP_PSR2_ENABLE));
2044 	} else {
2045 		val = intel_de_rmw(display,
2046 				   psr_ctl_reg(display, cpu_transcoder),
2047 				   EDP_PSR_ENABLE, 0);
2048 
2049 		drm_WARN_ON(display->drm, !(val & EDP_PSR_ENABLE));
2050 	}
2051 	intel_dp->psr.active = false;
2052 }
2053 
2054 static void intel_psr_wait_exit_locked(struct intel_dp *intel_dp)
2055 {
2056 	struct intel_display *display = to_intel_display(intel_dp);
2057 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2058 	i915_reg_t psr_status;
2059 	u32 psr_status_mask;
2060 
2061 	if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled ||
2062 					  intel_dp->psr.panel_replay_enabled)) {
2063 		psr_status = EDP_PSR2_STATUS(display, cpu_transcoder);
2064 		psr_status_mask = EDP_PSR2_STATUS_STATE_MASK;
2065 	} else {
2066 		psr_status = psr_status_reg(display, cpu_transcoder);
2067 		psr_status_mask = EDP_PSR_STATUS_STATE_MASK;
2068 	}
2069 
2070 	/* Wait till PSR is idle */
2071 	if (intel_de_wait_for_clear(display, psr_status,
2072 				    psr_status_mask, 2000))
2073 		drm_err(display->drm, "Timed out waiting PSR idle state\n");
2074 }
2075 
2076 static void intel_psr_disable_locked(struct intel_dp *intel_dp)
2077 {
2078 	struct intel_display *display = to_intel_display(intel_dp);
2079 	struct drm_i915_private *dev_priv = to_i915(display->drm);
2080 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2081 
2082 	lockdep_assert_held(&intel_dp->psr.lock);
2083 
2084 	if (!intel_dp->psr.enabled)
2085 		return;
2086 
2087 	if (intel_dp->psr.panel_replay_enabled)
2088 		drm_dbg_kms(display->drm, "Disabling Panel Replay\n");
2089 	else
2090 		drm_dbg_kms(display->drm, "Disabling PSR%s\n",
2091 			    intel_dp->psr.sel_update_enabled ? "2" : "1");
2092 
2093 	intel_psr_exit(intel_dp);
2094 	intel_psr_wait_exit_locked(intel_dp);
2095 
2096 	/*
2097 	 * Wa_16013835468
2098 	 * Wa_14015648006
2099 	 */
2100 	if (DISPLAY_VER(display) >= 11)
2101 		intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
2102 			     wa_16013835468_bit_get(intel_dp), 0);
2103 
2104 	if (intel_dp->psr.sel_update_enabled) {
2105 		/* Wa_16012604467:adlp,mtl[a0,b0] */
2106 		if (!intel_dp->psr.panel_replay_enabled &&
2107 		    IS_DISPLAY_VER_STEP(display, IP_VER(14, 0), STEP_A0, STEP_B0))
2108 			intel_de_rmw(display,
2109 				     MTL_CLKGATE_DIS_TRANS(display, cpu_transcoder),
2110 				     MTL_CLKGATE_DIS_TRANS_DMASC_GATING_DIS, 0);
2111 		else if (IS_ALDERLAKE_P(dev_priv))
2112 			intel_de_rmw(display, CLKGATE_DIS_MISC,
2113 				     CLKGATE_DIS_MISC_DMASC_GATING_DIS, 0);
2114 	}
2115 
2116 	if (intel_dp_is_edp(intel_dp))
2117 		intel_snps_phy_update_psr_power_state(&dp_to_dig_port(intel_dp)->base, false);
2118 
2119 	/* Panel Replay on eDP is always using ALPM aux less. */
2120 	if (intel_dp->psr.panel_replay_enabled && intel_dp_is_edp(intel_dp)) {
2121 		intel_de_rmw(display, ALPM_CTL(display, cpu_transcoder),
2122 			     ALPM_CTL_ALPM_ENABLE |
2123 			     ALPM_CTL_ALPM_AUX_LESS_ENABLE, 0);
2124 
2125 		intel_de_rmw(display,
2126 			     PORT_ALPM_CTL(cpu_transcoder),
2127 			     PORT_ALPM_CTL_ALPM_AUX_LESS_ENABLE, 0);
2128 	}
2129 
2130 	/* Disable PSR on Sink */
2131 	if (!intel_dp->psr.panel_replay_enabled) {
2132 		drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, 0);
2133 
2134 		if (intel_dp->psr.sel_update_enabled)
2135 			drm_dp_dpcd_writeb(&intel_dp->aux,
2136 					   DP_RECEIVER_ALPM_CONFIG, 0);
2137 	}
2138 
2139 	intel_dp->psr.enabled = false;
2140 	intel_dp->psr.panel_replay_enabled = false;
2141 	intel_dp->psr.sel_update_enabled = false;
2142 	intel_dp->psr.psr2_sel_fetch_enabled = false;
2143 	intel_dp->psr.su_region_et_enabled = false;
2144 	intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
2145 }
2146 
2147 /**
2148  * intel_psr_disable - Disable PSR
2149  * @intel_dp: Intel DP
2150  * @old_crtc_state: old CRTC state
2151  *
2152  * This function needs to be called before disabling pipe.
2153  */
2154 void intel_psr_disable(struct intel_dp *intel_dp,
2155 		       const struct intel_crtc_state *old_crtc_state)
2156 {
2157 	struct intel_display *display = to_intel_display(intel_dp);
2158 
2159 	if (!old_crtc_state->has_psr)
2160 		return;
2161 
2162 	if (drm_WARN_ON(display->drm, !CAN_PSR(intel_dp)))
2163 		return;
2164 
2165 	mutex_lock(&intel_dp->psr.lock);
2166 
2167 	intel_psr_disable_locked(intel_dp);
2168 
2169 	mutex_unlock(&intel_dp->psr.lock);
2170 	cancel_work_sync(&intel_dp->psr.work);
2171 	cancel_delayed_work_sync(&intel_dp->psr.dc3co_work);
2172 }
2173 
2174 /**
2175  * intel_psr_pause - Pause PSR
2176  * @intel_dp: Intel DP
2177  *
2178  * This function need to be called after enabling psr.
2179  */
2180 void intel_psr_pause(struct intel_dp *intel_dp)
2181 {
2182 	struct intel_display *display = to_intel_display(intel_dp);
2183 	struct intel_psr *psr = &intel_dp->psr;
2184 
2185 	if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
2186 		return;
2187 
2188 	mutex_lock(&psr->lock);
2189 
2190 	if (!psr->enabled) {
2191 		mutex_unlock(&psr->lock);
2192 		return;
2193 	}
2194 
2195 	/* If we ever hit this, we will need to add refcount to pause/resume */
2196 	drm_WARN_ON(display->drm, psr->paused);
2197 
2198 	intel_psr_exit(intel_dp);
2199 	intel_psr_wait_exit_locked(intel_dp);
2200 	psr->paused = true;
2201 
2202 	mutex_unlock(&psr->lock);
2203 
2204 	cancel_work_sync(&psr->work);
2205 	cancel_delayed_work_sync(&psr->dc3co_work);
2206 }
2207 
2208 /**
2209  * intel_psr_resume - Resume PSR
2210  * @intel_dp: Intel DP
2211  *
2212  * This function need to be called after pausing psr.
2213  */
2214 void intel_psr_resume(struct intel_dp *intel_dp)
2215 {
2216 	struct intel_psr *psr = &intel_dp->psr;
2217 
2218 	if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
2219 		return;
2220 
2221 	mutex_lock(&psr->lock);
2222 
2223 	if (!psr->paused)
2224 		goto unlock;
2225 
2226 	psr->paused = false;
2227 	intel_psr_activate(intel_dp);
2228 
2229 unlock:
2230 	mutex_unlock(&psr->lock);
2231 }
2232 
2233 /**
2234  * intel_psr_needs_block_dc_vblank - Check if block dc entry is needed
2235  * @crtc_state: CRTC status
2236  *
2237  * We need to block DC6 entry in case of Panel Replay as enabling VBI doesn't
2238  * prevent it in case of Panel Replay. Panel Replay switches main link off on
2239  * DC entry. This means vblank interrupts are not fired and is a problem if
2240  * user-space is polling for vblank events.
2241  */
2242 bool intel_psr_needs_block_dc_vblank(const struct intel_crtc_state *crtc_state)
2243 {
2244 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2245 	struct intel_encoder *encoder;
2246 
2247 	for_each_encoder_on_crtc(crtc->base.dev, &crtc->base, encoder) {
2248 		struct intel_dp *intel_dp;
2249 
2250 		if (!intel_encoder_is_dp(encoder))
2251 			continue;
2252 
2253 		intel_dp = enc_to_intel_dp(encoder);
2254 
2255 		if (intel_dp_is_edp(intel_dp) &&
2256 		    CAN_PANEL_REPLAY(intel_dp))
2257 			return true;
2258 	}
2259 
2260 	return false;
2261 }
2262 
2263 static u32 man_trk_ctl_enable_bit_get(struct intel_display *display)
2264 {
2265 	struct drm_i915_private *dev_priv = to_i915(display->drm);
2266 
2267 	return IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14 ? 0 :
2268 		PSR2_MAN_TRK_CTL_ENABLE;
2269 }
2270 
2271 static u32 man_trk_ctl_single_full_frame_bit_get(struct intel_display *display)
2272 {
2273 	struct drm_i915_private *dev_priv = to_i915(display->drm);
2274 
2275 	return IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14 ?
2276 	       ADLP_PSR2_MAN_TRK_CTL_SF_SINGLE_FULL_FRAME :
2277 	       PSR2_MAN_TRK_CTL_SF_SINGLE_FULL_FRAME;
2278 }
2279 
2280 static u32 man_trk_ctl_partial_frame_bit_get(struct intel_display *display)
2281 {
2282 	struct drm_i915_private *dev_priv = to_i915(display->drm);
2283 
2284 	return IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14 ?
2285 	       ADLP_PSR2_MAN_TRK_CTL_SF_PARTIAL_FRAME_UPDATE :
2286 	       PSR2_MAN_TRK_CTL_SF_PARTIAL_FRAME_UPDATE;
2287 }
2288 
2289 static u32 man_trk_ctl_continuos_full_frame(struct intel_display *display)
2290 {
2291 	struct drm_i915_private *dev_priv = to_i915(display->drm);
2292 
2293 	return IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14 ?
2294 	       ADLP_PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME :
2295 	       PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME;
2296 }
2297 
2298 static void psr_force_hw_tracking_exit(struct intel_dp *intel_dp)
2299 {
2300 	struct intel_display *display = to_intel_display(intel_dp);
2301 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2302 
2303 	if (intel_dp->psr.psr2_sel_fetch_enabled)
2304 		intel_de_write(display,
2305 			       PSR2_MAN_TRK_CTL(display, cpu_transcoder),
2306 			       man_trk_ctl_enable_bit_get(display) |
2307 			       man_trk_ctl_partial_frame_bit_get(display) |
2308 			       man_trk_ctl_single_full_frame_bit_get(display) |
2309 			       man_trk_ctl_continuos_full_frame(display));
2310 
2311 	/*
2312 	 * Display WA #0884: skl+
2313 	 * This documented WA for bxt can be safely applied
2314 	 * broadly so we can force HW tracking to exit PSR
2315 	 * instead of disabling and re-enabling.
2316 	 * Workaround tells us to write 0 to CUR_SURFLIVE_A,
2317 	 * but it makes more sense write to the current active
2318 	 * pipe.
2319 	 *
2320 	 * This workaround do not exist for platforms with display 10 or newer
2321 	 * but testing proved that it works for up display 13, for newer
2322 	 * than that testing will be needed.
2323 	 */
2324 	intel_de_write(display, CURSURFLIVE(display, intel_dp->psr.pipe), 0);
2325 }
2326 
2327 void intel_psr2_program_trans_man_trk_ctl(const struct intel_crtc_state *crtc_state)
2328 {
2329 	struct intel_display *display = to_intel_display(crtc_state);
2330 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2331 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
2332 	struct intel_encoder *encoder;
2333 
2334 	if (!crtc_state->enable_psr2_sel_fetch)
2335 		return;
2336 
2337 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
2338 					     crtc_state->uapi.encoder_mask) {
2339 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2340 
2341 		lockdep_assert_held(&intel_dp->psr.lock);
2342 		if (intel_dp->psr.psr2_sel_fetch_cff_enabled)
2343 			return;
2344 		break;
2345 	}
2346 
2347 	intel_de_write(display, PSR2_MAN_TRK_CTL(display, cpu_transcoder),
2348 		       crtc_state->psr2_man_track_ctl);
2349 
2350 	if (!crtc_state->enable_psr2_su_region_et)
2351 		return;
2352 
2353 	intel_de_write(display, PIPE_SRCSZ_ERLY_TPT(crtc->pipe),
2354 		       crtc_state->pipe_srcsz_early_tpt);
2355 }
2356 
2357 static void psr2_man_trk_ctl_calc(struct intel_crtc_state *crtc_state,
2358 				  bool full_update)
2359 {
2360 	struct intel_display *display = to_intel_display(crtc_state);
2361 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2362 	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
2363 	u32 val = man_trk_ctl_enable_bit_get(display);
2364 
2365 	/* SF partial frame enable has to be set even on full update */
2366 	val |= man_trk_ctl_partial_frame_bit_get(display);
2367 
2368 	if (full_update) {
2369 		val |= man_trk_ctl_single_full_frame_bit_get(display);
2370 		val |= man_trk_ctl_continuos_full_frame(display);
2371 		goto exit;
2372 	}
2373 
2374 	if (crtc_state->psr2_su_area.y1 == -1)
2375 		goto exit;
2376 
2377 	if (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14) {
2378 		val |= ADLP_PSR2_MAN_TRK_CTL_SU_REGION_START_ADDR(crtc_state->psr2_su_area.y1);
2379 		val |= ADLP_PSR2_MAN_TRK_CTL_SU_REGION_END_ADDR(crtc_state->psr2_su_area.y2 - 1);
2380 	} else {
2381 		drm_WARN_ON(crtc_state->uapi.crtc->dev,
2382 			    crtc_state->psr2_su_area.y1 % 4 ||
2383 			    crtc_state->psr2_su_area.y2 % 4);
2384 
2385 		val |= PSR2_MAN_TRK_CTL_SU_REGION_START_ADDR(
2386 			crtc_state->psr2_su_area.y1 / 4 + 1);
2387 		val |= PSR2_MAN_TRK_CTL_SU_REGION_END_ADDR(
2388 			crtc_state->psr2_su_area.y2 / 4 + 1);
2389 	}
2390 exit:
2391 	crtc_state->psr2_man_track_ctl = val;
2392 }
2393 
2394 static u32 psr2_pipe_srcsz_early_tpt_calc(struct intel_crtc_state *crtc_state,
2395 					  bool full_update)
2396 {
2397 	int width, height;
2398 
2399 	if (!crtc_state->enable_psr2_su_region_et || full_update)
2400 		return 0;
2401 
2402 	width = drm_rect_width(&crtc_state->psr2_su_area);
2403 	height = drm_rect_height(&crtc_state->psr2_su_area);
2404 
2405 	return PIPESRC_WIDTH(width - 1) | PIPESRC_HEIGHT(height - 1);
2406 }
2407 
2408 static void clip_area_update(struct drm_rect *overlap_damage_area,
2409 			     struct drm_rect *damage_area,
2410 			     struct drm_rect *pipe_src)
2411 {
2412 	if (!drm_rect_intersect(damage_area, pipe_src))
2413 		return;
2414 
2415 	if (overlap_damage_area->y1 == -1) {
2416 		overlap_damage_area->y1 = damage_area->y1;
2417 		overlap_damage_area->y2 = damage_area->y2;
2418 		return;
2419 	}
2420 
2421 	if (damage_area->y1 < overlap_damage_area->y1)
2422 		overlap_damage_area->y1 = damage_area->y1;
2423 
2424 	if (damage_area->y2 > overlap_damage_area->y2)
2425 		overlap_damage_area->y2 = damage_area->y2;
2426 }
2427 
2428 static void intel_psr2_sel_fetch_pipe_alignment(struct intel_crtc_state *crtc_state)
2429 {
2430 	struct intel_display *display = to_intel_display(crtc_state);
2431 	struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
2432 	const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
2433 	u16 y_alignment;
2434 
2435 	/* ADLP aligns the SU region to vdsc slice height in case dsc is enabled */
2436 	if (crtc_state->dsc.compression_enable &&
2437 	    (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14))
2438 		y_alignment = vdsc_cfg->slice_height;
2439 	else
2440 		y_alignment = crtc_state->su_y_granularity;
2441 
2442 	crtc_state->psr2_su_area.y1 -= crtc_state->psr2_su_area.y1 % y_alignment;
2443 	if (crtc_state->psr2_su_area.y2 % y_alignment)
2444 		crtc_state->psr2_su_area.y2 = ((crtc_state->psr2_su_area.y2 /
2445 						y_alignment) + 1) * y_alignment;
2446 }
2447 
2448 /*
2449  * When early transport is in use we need to extend SU area to cover
2450  * cursor fully when cursor is in SU area.
2451  */
2452 static void
2453 intel_psr2_sel_fetch_et_alignment(struct intel_atomic_state *state,
2454 				  struct intel_crtc *crtc,
2455 				  bool *cursor_in_su_area)
2456 {
2457 	struct intel_crtc_state *crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
2458 	struct intel_plane_state *new_plane_state;
2459 	struct intel_plane *plane;
2460 	int i;
2461 
2462 	if (!crtc_state->enable_psr2_su_region_et)
2463 		return;
2464 
2465 	for_each_new_intel_plane_in_state(state, plane, new_plane_state, i) {
2466 		struct drm_rect inter;
2467 
2468 		if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc)
2469 			continue;
2470 
2471 		if (plane->id != PLANE_CURSOR)
2472 			continue;
2473 
2474 		if (!new_plane_state->uapi.visible)
2475 			continue;
2476 
2477 		inter = crtc_state->psr2_su_area;
2478 		if (!drm_rect_intersect(&inter, &new_plane_state->uapi.dst))
2479 			continue;
2480 
2481 		clip_area_update(&crtc_state->psr2_su_area, &new_plane_state->uapi.dst,
2482 				 &crtc_state->pipe_src);
2483 		*cursor_in_su_area = true;
2484 	}
2485 }
2486 
2487 /*
2488  * TODO: Not clear how to handle planes with negative position,
2489  * also planes are not updated if they have a negative X
2490  * position so for now doing a full update in this cases
2491  *
2492  * Plane scaling and rotation is not supported by selective fetch and both
2493  * properties can change without a modeset, so need to be check at every
2494  * atomic commit.
2495  */
2496 static bool psr2_sel_fetch_plane_state_supported(const struct intel_plane_state *plane_state)
2497 {
2498 	if (plane_state->uapi.dst.y1 < 0 ||
2499 	    plane_state->uapi.dst.x1 < 0 ||
2500 	    plane_state->scaler_id >= 0 ||
2501 	    plane_state->uapi.rotation != DRM_MODE_ROTATE_0)
2502 		return false;
2503 
2504 	return true;
2505 }
2506 
2507 /*
2508  * Check for pipe properties that is not supported by selective fetch.
2509  *
2510  * TODO: pipe scaling causes a modeset but skl_update_scaler_crtc() is executed
2511  * after intel_psr_compute_config(), so for now keeping PSR2 selective fetch
2512  * enabled and going to the full update path.
2513  */
2514 static bool psr2_sel_fetch_pipe_state_supported(const struct intel_crtc_state *crtc_state)
2515 {
2516 	if (crtc_state->scaler_state.scaler_id >= 0)
2517 		return false;
2518 
2519 	return true;
2520 }
2521 
2522 /* Wa 14019834836 */
2523 static void intel_psr_apply_pr_link_on_su_wa(struct intel_crtc_state *crtc_state)
2524 {
2525 	struct intel_display *display = to_intel_display(crtc_state);
2526 	struct intel_encoder *encoder;
2527 	int hactive_limit;
2528 
2529 	if (crtc_state->psr2_su_area.y1 != 0 ||
2530 	    crtc_state->psr2_su_area.y2 != 0)
2531 		return;
2532 
2533 	if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420)
2534 		hactive_limit = intel_dp_is_uhbr(crtc_state) ? 1230 : 546;
2535 	else
2536 		hactive_limit = intel_dp_is_uhbr(crtc_state) ? 615 : 273;
2537 
2538 	if (crtc_state->hw.adjusted_mode.hdisplay < hactive_limit)
2539 		return;
2540 
2541 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
2542 					     crtc_state->uapi.encoder_mask) {
2543 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2544 
2545 		if (!intel_dp_is_edp(intel_dp) &&
2546 		    intel_dp->psr.panel_replay_enabled &&
2547 		    intel_dp->psr.sel_update_enabled) {
2548 			crtc_state->psr2_su_area.y2++;
2549 			return;
2550 		}
2551 	}
2552 }
2553 
2554 static void
2555 intel_psr_apply_su_area_workarounds(struct intel_crtc_state *crtc_state)
2556 {
2557 	struct intel_display *display = to_intel_display(crtc_state);
2558 	struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev);
2559 
2560 	/* Wa_14014971492 */
2561 	if (!crtc_state->has_panel_replay &&
2562 	    ((IS_DISPLAY_VER_STEP(display, IP_VER(14, 0), STEP_A0, STEP_B0) ||
2563 	      IS_ALDERLAKE_P(i915) || IS_TIGERLAKE(i915))) &&
2564 	    crtc_state->splitter.enable)
2565 		crtc_state->psr2_su_area.y1 = 0;
2566 
2567 	/* Wa 14019834836 */
2568 	if (DISPLAY_VER(display) == 30)
2569 		intel_psr_apply_pr_link_on_su_wa(crtc_state);
2570 }
2571 
2572 int intel_psr2_sel_fetch_update(struct intel_atomic_state *state,
2573 				struct intel_crtc *crtc)
2574 {
2575 	struct intel_display *display = to_intel_display(state);
2576 	struct intel_crtc_state *crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
2577 	struct intel_plane_state *new_plane_state, *old_plane_state;
2578 	struct intel_plane *plane;
2579 	bool full_update = false, cursor_in_su_area = false;
2580 	int i, ret;
2581 
2582 	if (!crtc_state->enable_psr2_sel_fetch)
2583 		return 0;
2584 
2585 	if (!psr2_sel_fetch_pipe_state_supported(crtc_state)) {
2586 		full_update = true;
2587 		goto skip_sel_fetch_set_loop;
2588 	}
2589 
2590 	crtc_state->psr2_su_area.x1 = 0;
2591 	crtc_state->psr2_su_area.y1 = -1;
2592 	crtc_state->psr2_su_area.x2 = drm_rect_width(&crtc_state->pipe_src);
2593 	crtc_state->psr2_su_area.y2 = -1;
2594 
2595 	/*
2596 	 * Calculate minimal selective fetch area of each plane and calculate
2597 	 * the pipe damaged area.
2598 	 * In the next loop the plane selective fetch area will actually be set
2599 	 * using whole pipe damaged area.
2600 	 */
2601 	for_each_oldnew_intel_plane_in_state(state, plane, old_plane_state,
2602 					     new_plane_state, i) {
2603 		struct drm_rect src, damaged_area = { .x1 = 0, .y1 = -1,
2604 						      .x2 = INT_MAX };
2605 
2606 		if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc)
2607 			continue;
2608 
2609 		if (!new_plane_state->uapi.visible &&
2610 		    !old_plane_state->uapi.visible)
2611 			continue;
2612 
2613 		if (!psr2_sel_fetch_plane_state_supported(new_plane_state)) {
2614 			full_update = true;
2615 			break;
2616 		}
2617 
2618 		/*
2619 		 * If visibility or plane moved, mark the whole plane area as
2620 		 * damaged as it needs to be complete redraw in the new and old
2621 		 * position.
2622 		 */
2623 		if (new_plane_state->uapi.visible != old_plane_state->uapi.visible ||
2624 		    !drm_rect_equals(&new_plane_state->uapi.dst,
2625 				     &old_plane_state->uapi.dst)) {
2626 			if (old_plane_state->uapi.visible) {
2627 				damaged_area.y1 = old_plane_state->uapi.dst.y1;
2628 				damaged_area.y2 = old_plane_state->uapi.dst.y2;
2629 				clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2630 						 &crtc_state->pipe_src);
2631 			}
2632 
2633 			if (new_plane_state->uapi.visible) {
2634 				damaged_area.y1 = new_plane_state->uapi.dst.y1;
2635 				damaged_area.y2 = new_plane_state->uapi.dst.y2;
2636 				clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2637 						 &crtc_state->pipe_src);
2638 			}
2639 			continue;
2640 		} else if (new_plane_state->uapi.alpha != old_plane_state->uapi.alpha) {
2641 			/* If alpha changed mark the whole plane area as damaged */
2642 			damaged_area.y1 = new_plane_state->uapi.dst.y1;
2643 			damaged_area.y2 = new_plane_state->uapi.dst.y2;
2644 			clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2645 					 &crtc_state->pipe_src);
2646 			continue;
2647 		}
2648 
2649 		src = drm_plane_state_src(&new_plane_state->uapi);
2650 		drm_rect_fp_to_int(&src, &src);
2651 
2652 		if (!drm_atomic_helper_damage_merged(&old_plane_state->uapi,
2653 						     &new_plane_state->uapi, &damaged_area))
2654 			continue;
2655 
2656 		damaged_area.y1 += new_plane_state->uapi.dst.y1 - src.y1;
2657 		damaged_area.y2 += new_plane_state->uapi.dst.y1 - src.y1;
2658 		damaged_area.x1 += new_plane_state->uapi.dst.x1 - src.x1;
2659 		damaged_area.x2 += new_plane_state->uapi.dst.x1 - src.x1;
2660 
2661 		clip_area_update(&crtc_state->psr2_su_area, &damaged_area, &crtc_state->pipe_src);
2662 	}
2663 
2664 	/*
2665 	 * TODO: For now we are just using full update in case
2666 	 * selective fetch area calculation fails. To optimize this we
2667 	 * should identify cases where this happens and fix the area
2668 	 * calculation for those.
2669 	 */
2670 	if (crtc_state->psr2_su_area.y1 == -1) {
2671 		drm_info_once(display->drm,
2672 			      "Selective fetch area calculation failed in pipe %c\n",
2673 			      pipe_name(crtc->pipe));
2674 		full_update = true;
2675 	}
2676 
2677 	if (full_update)
2678 		goto skip_sel_fetch_set_loop;
2679 
2680 	intel_psr_apply_su_area_workarounds(crtc_state);
2681 
2682 	ret = drm_atomic_add_affected_planes(&state->base, &crtc->base);
2683 	if (ret)
2684 		return ret;
2685 
2686 	/*
2687 	 * Adjust su area to cover cursor fully as necessary (early
2688 	 * transport). This needs to be done after
2689 	 * drm_atomic_add_affected_planes to ensure visible cursor is added into
2690 	 * affected planes even when cursor is not updated by itself.
2691 	 */
2692 	intel_psr2_sel_fetch_et_alignment(state, crtc, &cursor_in_su_area);
2693 
2694 	intel_psr2_sel_fetch_pipe_alignment(crtc_state);
2695 
2696 	/*
2697 	 * Now that we have the pipe damaged area check if it intersect with
2698 	 * every plane, if it does set the plane selective fetch area.
2699 	 */
2700 	for_each_oldnew_intel_plane_in_state(state, plane, old_plane_state,
2701 					     new_plane_state, i) {
2702 		struct drm_rect *sel_fetch_area, inter;
2703 		struct intel_plane *linked = new_plane_state->planar_linked_plane;
2704 
2705 		if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc ||
2706 		    !new_plane_state->uapi.visible)
2707 			continue;
2708 
2709 		inter = crtc_state->psr2_su_area;
2710 		sel_fetch_area = &new_plane_state->psr2_sel_fetch_area;
2711 		if (!drm_rect_intersect(&inter, &new_plane_state->uapi.dst)) {
2712 			sel_fetch_area->y1 = -1;
2713 			sel_fetch_area->y2 = -1;
2714 			/*
2715 			 * if plane sel fetch was previously enabled ->
2716 			 * disable it
2717 			 */
2718 			if (drm_rect_height(&old_plane_state->psr2_sel_fetch_area) > 0)
2719 				crtc_state->update_planes |= BIT(plane->id);
2720 
2721 			continue;
2722 		}
2723 
2724 		if (!psr2_sel_fetch_plane_state_supported(new_plane_state)) {
2725 			full_update = true;
2726 			break;
2727 		}
2728 
2729 		sel_fetch_area = &new_plane_state->psr2_sel_fetch_area;
2730 		sel_fetch_area->y1 = inter.y1 - new_plane_state->uapi.dst.y1;
2731 		sel_fetch_area->y2 = inter.y2 - new_plane_state->uapi.dst.y1;
2732 		crtc_state->update_planes |= BIT(plane->id);
2733 
2734 		/*
2735 		 * Sel_fetch_area is calculated for UV plane. Use
2736 		 * same area for Y plane as well.
2737 		 */
2738 		if (linked) {
2739 			struct intel_plane_state *linked_new_plane_state;
2740 			struct drm_rect *linked_sel_fetch_area;
2741 
2742 			linked_new_plane_state = intel_atomic_get_plane_state(state, linked);
2743 			if (IS_ERR(linked_new_plane_state))
2744 				return PTR_ERR(linked_new_plane_state);
2745 
2746 			linked_sel_fetch_area = &linked_new_plane_state->psr2_sel_fetch_area;
2747 			linked_sel_fetch_area->y1 = sel_fetch_area->y1;
2748 			linked_sel_fetch_area->y2 = sel_fetch_area->y2;
2749 			crtc_state->update_planes |= BIT(linked->id);
2750 		}
2751 	}
2752 
2753 skip_sel_fetch_set_loop:
2754 	psr2_man_trk_ctl_calc(crtc_state, full_update);
2755 	crtc_state->pipe_srcsz_early_tpt =
2756 		psr2_pipe_srcsz_early_tpt_calc(crtc_state, full_update);
2757 	return 0;
2758 }
2759 
2760 void intel_psr_pre_plane_update(struct intel_atomic_state *state,
2761 				struct intel_crtc *crtc)
2762 {
2763 	struct intel_display *display = to_intel_display(state);
2764 	struct drm_i915_private *i915 = to_i915(state->base.dev);
2765 	const struct intel_crtc_state *old_crtc_state =
2766 		intel_atomic_get_old_crtc_state(state, crtc);
2767 	const struct intel_crtc_state *new_crtc_state =
2768 		intel_atomic_get_new_crtc_state(state, crtc);
2769 	struct intel_encoder *encoder;
2770 
2771 	if (!HAS_PSR(display))
2772 		return;
2773 
2774 	for_each_intel_encoder_mask_with_psr(state->base.dev, encoder,
2775 					     old_crtc_state->uapi.encoder_mask) {
2776 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2777 		struct intel_psr *psr = &intel_dp->psr;
2778 		bool needs_to_disable = false;
2779 
2780 		mutex_lock(&psr->lock);
2781 
2782 		/*
2783 		 * Reasons to disable:
2784 		 * - PSR disabled in new state
2785 		 * - All planes will go inactive
2786 		 * - Changing between PSR versions
2787 		 * - Region Early Transport changing
2788 		 * - Display WA #1136: skl, bxt
2789 		 */
2790 		needs_to_disable |= intel_crtc_needs_modeset(new_crtc_state);
2791 		needs_to_disable |= !new_crtc_state->has_psr;
2792 		needs_to_disable |= !new_crtc_state->active_planes;
2793 		needs_to_disable |= new_crtc_state->has_sel_update != psr->sel_update_enabled;
2794 		needs_to_disable |= new_crtc_state->enable_psr2_su_region_et !=
2795 			psr->su_region_et_enabled;
2796 		needs_to_disable |= DISPLAY_VER(i915) < 11 &&
2797 			new_crtc_state->wm_level_disabled;
2798 
2799 		if (psr->enabled && needs_to_disable)
2800 			intel_psr_disable_locked(intel_dp);
2801 		else if (psr->enabled && new_crtc_state->wm_level_disabled)
2802 			/* Wa_14015648006 */
2803 			wm_optimization_wa(intel_dp, new_crtc_state);
2804 
2805 		mutex_unlock(&psr->lock);
2806 	}
2807 }
2808 
2809 void intel_psr_post_plane_update(struct intel_atomic_state *state,
2810 				 struct intel_crtc *crtc)
2811 {
2812 	struct intel_display *display = to_intel_display(state);
2813 	const struct intel_crtc_state *crtc_state =
2814 		intel_atomic_get_new_crtc_state(state, crtc);
2815 	struct intel_encoder *encoder;
2816 
2817 	if (!crtc_state->has_psr)
2818 		return;
2819 
2820 	for_each_intel_encoder_mask_with_psr(state->base.dev, encoder,
2821 					     crtc_state->uapi.encoder_mask) {
2822 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2823 		struct intel_psr *psr = &intel_dp->psr;
2824 		bool keep_disabled = false;
2825 
2826 		mutex_lock(&psr->lock);
2827 
2828 		drm_WARN_ON(display->drm,
2829 			    psr->enabled && !crtc_state->active_planes);
2830 
2831 		keep_disabled |= psr->sink_not_reliable;
2832 		keep_disabled |= !crtc_state->active_planes;
2833 
2834 		/* Display WA #1136: skl, bxt */
2835 		keep_disabled |= DISPLAY_VER(display) < 11 &&
2836 			crtc_state->wm_level_disabled;
2837 
2838 		if (!psr->enabled && !keep_disabled)
2839 			intel_psr_enable_locked(intel_dp, crtc_state);
2840 		else if (psr->enabled && !crtc_state->wm_level_disabled)
2841 			/* Wa_14015648006 */
2842 			wm_optimization_wa(intel_dp, crtc_state);
2843 
2844 		/* Force a PSR exit when enabling CRC to avoid CRC timeouts */
2845 		if (crtc_state->crc_enabled && psr->enabled)
2846 			psr_force_hw_tracking_exit(intel_dp);
2847 
2848 		/*
2849 		 * Clear possible busy bits in case we have
2850 		 * invalidate -> flip -> flush sequence.
2851 		 */
2852 		intel_dp->psr.busy_frontbuffer_bits = 0;
2853 
2854 		mutex_unlock(&psr->lock);
2855 	}
2856 }
2857 
2858 static int _psr2_ready_for_pipe_update_locked(struct intel_dp *intel_dp)
2859 {
2860 	struct intel_display *display = to_intel_display(intel_dp);
2861 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2862 
2863 	/*
2864 	 * Any state lower than EDP_PSR2_STATUS_STATE_DEEP_SLEEP is enough.
2865 	 * As all higher states has bit 4 of PSR2 state set we can just wait for
2866 	 * EDP_PSR2_STATUS_STATE_DEEP_SLEEP to be cleared.
2867 	 */
2868 	return intel_de_wait_for_clear(display,
2869 				       EDP_PSR2_STATUS(display, cpu_transcoder),
2870 				       EDP_PSR2_STATUS_STATE_DEEP_SLEEP, 50);
2871 }
2872 
2873 static int _psr1_ready_for_pipe_update_locked(struct intel_dp *intel_dp)
2874 {
2875 	struct intel_display *display = to_intel_display(intel_dp);
2876 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2877 
2878 	/*
2879 	 * From bspec: Panel Self Refresh (BDW+)
2880 	 * Max. time for PSR to idle = Inverse of the refresh rate + 6 ms of
2881 	 * exit training time + 1.5 ms of aux channel handshake. 50 ms is
2882 	 * defensive enough to cover everything.
2883 	 */
2884 	return intel_de_wait_for_clear(display,
2885 				       psr_status_reg(display, cpu_transcoder),
2886 				       EDP_PSR_STATUS_STATE_MASK, 50);
2887 }
2888 
2889 /**
2890  * intel_psr_wait_for_idle_locked - wait for PSR be ready for a pipe update
2891  * @new_crtc_state: new CRTC state
2892  *
2893  * This function is expected to be called from pipe_update_start() where it is
2894  * not expected to race with PSR enable or disable.
2895  */
2896 void intel_psr_wait_for_idle_locked(const struct intel_crtc_state *new_crtc_state)
2897 {
2898 	struct intel_display *display = to_intel_display(new_crtc_state);
2899 	struct intel_encoder *encoder;
2900 
2901 	if (!new_crtc_state->has_psr)
2902 		return;
2903 
2904 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
2905 					     new_crtc_state->uapi.encoder_mask) {
2906 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2907 		int ret;
2908 
2909 		lockdep_assert_held(&intel_dp->psr.lock);
2910 
2911 		if (!intel_dp->psr.enabled || intel_dp->psr.panel_replay_enabled)
2912 			continue;
2913 
2914 		if (intel_dp->psr.sel_update_enabled)
2915 			ret = _psr2_ready_for_pipe_update_locked(intel_dp);
2916 		else
2917 			ret = _psr1_ready_for_pipe_update_locked(intel_dp);
2918 
2919 		if (ret)
2920 			drm_err(display->drm,
2921 				"PSR wait timed out, atomic update may fail\n");
2922 	}
2923 }
2924 
2925 static bool __psr_wait_for_idle_locked(struct intel_dp *intel_dp)
2926 {
2927 	struct intel_display *display = to_intel_display(intel_dp);
2928 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2929 	i915_reg_t reg;
2930 	u32 mask;
2931 	int err;
2932 
2933 	if (!intel_dp->psr.enabled)
2934 		return false;
2935 
2936 	if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled ||
2937 					  intel_dp->psr.panel_replay_enabled)) {
2938 		reg = EDP_PSR2_STATUS(display, cpu_transcoder);
2939 		mask = EDP_PSR2_STATUS_STATE_MASK;
2940 	} else {
2941 		reg = psr_status_reg(display, cpu_transcoder);
2942 		mask = EDP_PSR_STATUS_STATE_MASK;
2943 	}
2944 
2945 	mutex_unlock(&intel_dp->psr.lock);
2946 
2947 	err = intel_de_wait_for_clear(display, reg, mask, 50);
2948 	if (err)
2949 		drm_err(display->drm,
2950 			"Timed out waiting for PSR Idle for re-enable\n");
2951 
2952 	/* After the unlocked wait, verify that PSR is still wanted! */
2953 	mutex_lock(&intel_dp->psr.lock);
2954 	return err == 0 && intel_dp->psr.enabled;
2955 }
2956 
2957 static int intel_psr_fastset_force(struct intel_display *display)
2958 {
2959 	struct drm_connector_list_iter conn_iter;
2960 	struct drm_modeset_acquire_ctx ctx;
2961 	struct drm_atomic_state *state;
2962 	struct drm_connector *conn;
2963 	int err = 0;
2964 
2965 	state = drm_atomic_state_alloc(display->drm);
2966 	if (!state)
2967 		return -ENOMEM;
2968 
2969 	drm_modeset_acquire_init(&ctx, DRM_MODESET_ACQUIRE_INTERRUPTIBLE);
2970 
2971 	state->acquire_ctx = &ctx;
2972 	to_intel_atomic_state(state)->internal = true;
2973 
2974 retry:
2975 	drm_connector_list_iter_begin(display->drm, &conn_iter);
2976 	drm_for_each_connector_iter(conn, &conn_iter) {
2977 		struct drm_connector_state *conn_state;
2978 		struct drm_crtc_state *crtc_state;
2979 
2980 		if (conn->connector_type != DRM_MODE_CONNECTOR_eDP)
2981 			continue;
2982 
2983 		conn_state = drm_atomic_get_connector_state(state, conn);
2984 		if (IS_ERR(conn_state)) {
2985 			err = PTR_ERR(conn_state);
2986 			break;
2987 		}
2988 
2989 		if (!conn_state->crtc)
2990 			continue;
2991 
2992 		crtc_state = drm_atomic_get_crtc_state(state, conn_state->crtc);
2993 		if (IS_ERR(crtc_state)) {
2994 			err = PTR_ERR(crtc_state);
2995 			break;
2996 		}
2997 
2998 		/* Mark mode as changed to trigger a pipe->update() */
2999 		crtc_state->mode_changed = true;
3000 	}
3001 	drm_connector_list_iter_end(&conn_iter);
3002 
3003 	if (err == 0)
3004 		err = drm_atomic_commit(state);
3005 
3006 	if (err == -EDEADLK) {
3007 		drm_atomic_state_clear(state);
3008 		err = drm_modeset_backoff(&ctx);
3009 		if (!err)
3010 			goto retry;
3011 	}
3012 
3013 	drm_modeset_drop_locks(&ctx);
3014 	drm_modeset_acquire_fini(&ctx);
3015 	drm_atomic_state_put(state);
3016 
3017 	return err;
3018 }
3019 
3020 int intel_psr_debug_set(struct intel_dp *intel_dp, u64 val)
3021 {
3022 	struct intel_display *display = to_intel_display(intel_dp);
3023 	const u32 mode = val & I915_PSR_DEBUG_MODE_MASK;
3024 	const u32 disable_bits = val & (I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
3025 					I915_PSR_DEBUG_PANEL_REPLAY_DISABLE);
3026 	u32 old_mode, old_disable_bits;
3027 	int ret;
3028 
3029 	if (val & ~(I915_PSR_DEBUG_IRQ | I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
3030 		    I915_PSR_DEBUG_PANEL_REPLAY_DISABLE |
3031 		    I915_PSR_DEBUG_MODE_MASK) ||
3032 	    mode > I915_PSR_DEBUG_ENABLE_SEL_FETCH) {
3033 		drm_dbg_kms(display->drm, "Invalid debug mask %llx\n", val);
3034 		return -EINVAL;
3035 	}
3036 
3037 	ret = mutex_lock_interruptible(&intel_dp->psr.lock);
3038 	if (ret)
3039 		return ret;
3040 
3041 	old_mode = intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK;
3042 	old_disable_bits = intel_dp->psr.debug &
3043 		(I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
3044 		 I915_PSR_DEBUG_PANEL_REPLAY_DISABLE);
3045 
3046 	intel_dp->psr.debug = val;
3047 
3048 	/*
3049 	 * Do it right away if it's already enabled, otherwise it will be done
3050 	 * when enabling the source.
3051 	 */
3052 	if (intel_dp->psr.enabled)
3053 		psr_irq_control(intel_dp);
3054 
3055 	mutex_unlock(&intel_dp->psr.lock);
3056 
3057 	if (old_mode != mode || old_disable_bits != disable_bits)
3058 		ret = intel_psr_fastset_force(display);
3059 
3060 	return ret;
3061 }
3062 
3063 static void intel_psr_handle_irq(struct intel_dp *intel_dp)
3064 {
3065 	struct intel_psr *psr = &intel_dp->psr;
3066 
3067 	intel_psr_disable_locked(intel_dp);
3068 	psr->sink_not_reliable = true;
3069 	/* let's make sure that sink is awaken */
3070 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
3071 }
3072 
3073 static void intel_psr_work(struct work_struct *work)
3074 {
3075 	struct intel_dp *intel_dp =
3076 		container_of(work, typeof(*intel_dp), psr.work);
3077 
3078 	mutex_lock(&intel_dp->psr.lock);
3079 
3080 	if (!intel_dp->psr.enabled)
3081 		goto unlock;
3082 
3083 	if (READ_ONCE(intel_dp->psr.irq_aux_error))
3084 		intel_psr_handle_irq(intel_dp);
3085 
3086 	/*
3087 	 * We have to make sure PSR is ready for re-enable
3088 	 * otherwise it keeps disabled until next full enable/disable cycle.
3089 	 * PSR might take some time to get fully disabled
3090 	 * and be ready for re-enable.
3091 	 */
3092 	if (!__psr_wait_for_idle_locked(intel_dp))
3093 		goto unlock;
3094 
3095 	/*
3096 	 * The delayed work can race with an invalidate hence we need to
3097 	 * recheck. Since psr_flush first clears this and then reschedules we
3098 	 * won't ever miss a flush when bailing out here.
3099 	 */
3100 	if (intel_dp->psr.busy_frontbuffer_bits || intel_dp->psr.active)
3101 		goto unlock;
3102 
3103 	intel_psr_activate(intel_dp);
3104 unlock:
3105 	mutex_unlock(&intel_dp->psr.lock);
3106 }
3107 
3108 static void _psr_invalidate_handle(struct intel_dp *intel_dp)
3109 {
3110 	struct intel_display *display = to_intel_display(intel_dp);
3111 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3112 
3113 	if (intel_dp->psr.psr2_sel_fetch_enabled) {
3114 		u32 val;
3115 
3116 		if (intel_dp->psr.psr2_sel_fetch_cff_enabled) {
3117 			/* Send one update otherwise lag is observed in screen */
3118 			intel_de_write(display,
3119 				       CURSURFLIVE(display, intel_dp->psr.pipe),
3120 				       0);
3121 			return;
3122 		}
3123 
3124 		val = man_trk_ctl_enable_bit_get(display) |
3125 		      man_trk_ctl_partial_frame_bit_get(display) |
3126 		      man_trk_ctl_continuos_full_frame(display);
3127 		intel_de_write(display,
3128 			       PSR2_MAN_TRK_CTL(display, cpu_transcoder),
3129 			       val);
3130 		intel_de_write(display,
3131 			       CURSURFLIVE(display, intel_dp->psr.pipe), 0);
3132 		intel_dp->psr.psr2_sel_fetch_cff_enabled = true;
3133 	} else {
3134 		intel_psr_exit(intel_dp);
3135 	}
3136 }
3137 
3138 /**
3139  * intel_psr_invalidate - Invalidate PSR
3140  * @display: display device
3141  * @frontbuffer_bits: frontbuffer plane tracking bits
3142  * @origin: which operation caused the invalidate
3143  *
3144  * Since the hardware frontbuffer tracking has gaps we need to integrate
3145  * with the software frontbuffer tracking. This function gets called every
3146  * time frontbuffer rendering starts and a buffer gets dirtied. PSR must be
3147  * disabled if the frontbuffer mask contains a buffer relevant to PSR.
3148  *
3149  * Dirty frontbuffers relevant to PSR are tracked in busy_frontbuffer_bits."
3150  */
3151 void intel_psr_invalidate(struct intel_display *display,
3152 			  unsigned frontbuffer_bits, enum fb_op_origin origin)
3153 {
3154 	struct intel_encoder *encoder;
3155 
3156 	if (origin == ORIGIN_FLIP)
3157 		return;
3158 
3159 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3160 		unsigned int pipe_frontbuffer_bits = frontbuffer_bits;
3161 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3162 
3163 		mutex_lock(&intel_dp->psr.lock);
3164 		if (!intel_dp->psr.enabled) {
3165 			mutex_unlock(&intel_dp->psr.lock);
3166 			continue;
3167 		}
3168 
3169 		pipe_frontbuffer_bits &=
3170 			INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe);
3171 		intel_dp->psr.busy_frontbuffer_bits |= pipe_frontbuffer_bits;
3172 
3173 		if (pipe_frontbuffer_bits)
3174 			_psr_invalidate_handle(intel_dp);
3175 
3176 		mutex_unlock(&intel_dp->psr.lock);
3177 	}
3178 }
3179 /*
3180  * When we will be completely rely on PSR2 S/W tracking in future,
3181  * intel_psr_flush() will invalidate and flush the PSR for ORIGIN_FLIP
3182  * event also therefore tgl_dc3co_flush_locked() require to be changed
3183  * accordingly in future.
3184  */
3185 static void
3186 tgl_dc3co_flush_locked(struct intel_dp *intel_dp, unsigned int frontbuffer_bits,
3187 		       enum fb_op_origin origin)
3188 {
3189 	struct intel_display *display = to_intel_display(intel_dp);
3190 	struct drm_i915_private *i915 = to_i915(display->drm);
3191 
3192 	if (!intel_dp->psr.dc3co_exitline || !intel_dp->psr.sel_update_enabled ||
3193 	    !intel_dp->psr.active)
3194 		return;
3195 
3196 	/*
3197 	 * At every frontbuffer flush flip event modified delay of delayed work,
3198 	 * when delayed work schedules that means display has been idle.
3199 	 */
3200 	if (!(frontbuffer_bits &
3201 	    INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe)))
3202 		return;
3203 
3204 	tgl_psr2_enable_dc3co(intel_dp);
3205 	mod_delayed_work(i915->unordered_wq, &intel_dp->psr.dc3co_work,
3206 			 intel_dp->psr.dc3co_exit_delay);
3207 }
3208 
3209 static void _psr_flush_handle(struct intel_dp *intel_dp)
3210 {
3211 	struct intel_display *display = to_intel_display(intel_dp);
3212 	struct drm_i915_private *dev_priv = to_i915(display->drm);
3213 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3214 
3215 	if (intel_dp->psr.psr2_sel_fetch_enabled) {
3216 		if (intel_dp->psr.psr2_sel_fetch_cff_enabled) {
3217 			/* can we turn CFF off? */
3218 			if (intel_dp->psr.busy_frontbuffer_bits == 0) {
3219 				u32 val = man_trk_ctl_enable_bit_get(display) |
3220 					man_trk_ctl_partial_frame_bit_get(display) |
3221 					man_trk_ctl_single_full_frame_bit_get(display) |
3222 					man_trk_ctl_continuos_full_frame(display);
3223 
3224 				/*
3225 				 * Set psr2_sel_fetch_cff_enabled as false to allow selective
3226 				 * updates. Still keep cff bit enabled as we don't have proper
3227 				 * SU configuration in case update is sent for any reason after
3228 				 * sff bit gets cleared by the HW on next vblank.
3229 				 */
3230 				intel_de_write(display,
3231 					       PSR2_MAN_TRK_CTL(display, cpu_transcoder),
3232 					       val);
3233 				intel_de_write(display,
3234 					       CURSURFLIVE(display, intel_dp->psr.pipe),
3235 					       0);
3236 				intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
3237 			}
3238 		} else {
3239 			/*
3240 			 * continuous full frame is disabled, only a single full
3241 			 * frame is required
3242 			 */
3243 			psr_force_hw_tracking_exit(intel_dp);
3244 		}
3245 	} else {
3246 		psr_force_hw_tracking_exit(intel_dp);
3247 
3248 		if (!intel_dp->psr.active && !intel_dp->psr.busy_frontbuffer_bits)
3249 			queue_work(dev_priv->unordered_wq, &intel_dp->psr.work);
3250 	}
3251 }
3252 
3253 /**
3254  * intel_psr_flush - Flush PSR
3255  * @display: display device
3256  * @frontbuffer_bits: frontbuffer plane tracking bits
3257  * @origin: which operation caused the flush
3258  *
3259  * Since the hardware frontbuffer tracking has gaps we need to integrate
3260  * with the software frontbuffer tracking. This function gets called every
3261  * time frontbuffer rendering has completed and flushed out to memory. PSR
3262  * can be enabled again if no other frontbuffer relevant to PSR is dirty.
3263  *
3264  * Dirty frontbuffers relevant to PSR are tracked in busy_frontbuffer_bits.
3265  */
3266 void intel_psr_flush(struct intel_display *display,
3267 		     unsigned frontbuffer_bits, enum fb_op_origin origin)
3268 {
3269 	struct intel_encoder *encoder;
3270 
3271 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3272 		unsigned int pipe_frontbuffer_bits = frontbuffer_bits;
3273 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3274 
3275 		mutex_lock(&intel_dp->psr.lock);
3276 		if (!intel_dp->psr.enabled) {
3277 			mutex_unlock(&intel_dp->psr.lock);
3278 			continue;
3279 		}
3280 
3281 		pipe_frontbuffer_bits &=
3282 			INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe);
3283 		intel_dp->psr.busy_frontbuffer_bits &= ~pipe_frontbuffer_bits;
3284 
3285 		/*
3286 		 * If the PSR is paused by an explicit intel_psr_paused() call,
3287 		 * we have to ensure that the PSR is not activated until
3288 		 * intel_psr_resume() is called.
3289 		 */
3290 		if (intel_dp->psr.paused)
3291 			goto unlock;
3292 
3293 		if (origin == ORIGIN_FLIP ||
3294 		    (origin == ORIGIN_CURSOR_UPDATE &&
3295 		     !intel_dp->psr.psr2_sel_fetch_enabled)) {
3296 			tgl_dc3co_flush_locked(intel_dp, frontbuffer_bits, origin);
3297 			goto unlock;
3298 		}
3299 
3300 		if (pipe_frontbuffer_bits == 0)
3301 			goto unlock;
3302 
3303 		/* By definition flush = invalidate + flush */
3304 		_psr_flush_handle(intel_dp);
3305 unlock:
3306 		mutex_unlock(&intel_dp->psr.lock);
3307 	}
3308 }
3309 
3310 /**
3311  * intel_psr_init - Init basic PSR work and mutex.
3312  * @intel_dp: Intel DP
3313  *
3314  * This function is called after the initializing connector.
3315  * (the initializing of connector treats the handling of connector capabilities)
3316  * And it initializes basic PSR stuff for each DP Encoder.
3317  */
3318 void intel_psr_init(struct intel_dp *intel_dp)
3319 {
3320 	struct intel_display *display = to_intel_display(intel_dp);
3321 	struct drm_i915_private *dev_priv = to_i915(display->drm);
3322 	struct intel_connector *connector = intel_dp->attached_connector;
3323 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
3324 
3325 	if (!(HAS_PSR(display) || HAS_DP20(dev_priv)))
3326 		return;
3327 
3328 	/*
3329 	 * HSW spec explicitly says PSR is tied to port A.
3330 	 * BDW+ platforms have a instance of PSR registers per transcoder but
3331 	 * BDW, GEN9 and GEN11 are not validated by HW team in other transcoder
3332 	 * than eDP one.
3333 	 * For now it only supports one instance of PSR for BDW, GEN9 and GEN11.
3334 	 * So lets keep it hardcoded to PORT_A for BDW, GEN9 and GEN11.
3335 	 * But GEN12 supports a instance of PSR registers per transcoder.
3336 	 */
3337 	if (DISPLAY_VER(display) < 12 && dig_port->base.port != PORT_A) {
3338 		drm_dbg_kms(display->drm,
3339 			    "PSR condition failed: Port not supported\n");
3340 		return;
3341 	}
3342 
3343 	if ((HAS_DP20(dev_priv) && !intel_dp_is_edp(intel_dp)) ||
3344 	    DISPLAY_VER(display) >= 20)
3345 		intel_dp->psr.source_panel_replay_support = true;
3346 
3347 	if (HAS_PSR(display) && intel_dp_is_edp(intel_dp))
3348 		intel_dp->psr.source_support = true;
3349 
3350 	/* Set link_standby x link_off defaults */
3351 	if (DISPLAY_VER(display) < 12)
3352 		/* For new platforms up to TGL let's respect VBT back again */
3353 		intel_dp->psr.link_standby = connector->panel.vbt.psr.full_link;
3354 
3355 	INIT_WORK(&intel_dp->psr.work, intel_psr_work);
3356 	INIT_DELAYED_WORK(&intel_dp->psr.dc3co_work, tgl_dc3co_disable_work);
3357 	mutex_init(&intel_dp->psr.lock);
3358 }
3359 
3360 static int psr_get_status_and_error_status(struct intel_dp *intel_dp,
3361 					   u8 *status, u8 *error_status)
3362 {
3363 	struct drm_dp_aux *aux = &intel_dp->aux;
3364 	int ret;
3365 	unsigned int offset;
3366 
3367 	offset = intel_dp->psr.panel_replay_enabled ?
3368 		 DP_SINK_DEVICE_PR_AND_FRAME_LOCK_STATUS : DP_PSR_STATUS;
3369 
3370 	ret = drm_dp_dpcd_readb(aux, offset, status);
3371 	if (ret != 1)
3372 		return ret;
3373 
3374 	offset = intel_dp->psr.panel_replay_enabled ?
3375 		 DP_PANEL_REPLAY_ERROR_STATUS : DP_PSR_ERROR_STATUS;
3376 
3377 	ret = drm_dp_dpcd_readb(aux, offset, error_status);
3378 	if (ret != 1)
3379 		return ret;
3380 
3381 	*status = *status & DP_PSR_SINK_STATE_MASK;
3382 
3383 	return 0;
3384 }
3385 
3386 static void psr_alpm_check(struct intel_dp *intel_dp)
3387 {
3388 	struct intel_display *display = to_intel_display(intel_dp);
3389 	struct drm_dp_aux *aux = &intel_dp->aux;
3390 	struct intel_psr *psr = &intel_dp->psr;
3391 	u8 val;
3392 	int r;
3393 
3394 	if (!psr->sel_update_enabled)
3395 		return;
3396 
3397 	r = drm_dp_dpcd_readb(aux, DP_RECEIVER_ALPM_STATUS, &val);
3398 	if (r != 1) {
3399 		drm_err(display->drm, "Error reading ALPM status\n");
3400 		return;
3401 	}
3402 
3403 	if (val & DP_ALPM_LOCK_TIMEOUT_ERROR) {
3404 		intel_psr_disable_locked(intel_dp);
3405 		psr->sink_not_reliable = true;
3406 		drm_dbg_kms(display->drm,
3407 			    "ALPM lock timeout error, disabling PSR\n");
3408 
3409 		/* Clearing error */
3410 		drm_dp_dpcd_writeb(aux, DP_RECEIVER_ALPM_STATUS, val);
3411 	}
3412 }
3413 
3414 static void psr_capability_changed_check(struct intel_dp *intel_dp)
3415 {
3416 	struct intel_display *display = to_intel_display(intel_dp);
3417 	struct intel_psr *psr = &intel_dp->psr;
3418 	u8 val;
3419 	int r;
3420 
3421 	r = drm_dp_dpcd_readb(&intel_dp->aux, DP_PSR_ESI, &val);
3422 	if (r != 1) {
3423 		drm_err(display->drm, "Error reading DP_PSR_ESI\n");
3424 		return;
3425 	}
3426 
3427 	if (val & DP_PSR_CAPS_CHANGE) {
3428 		intel_psr_disable_locked(intel_dp);
3429 		psr->sink_not_reliable = true;
3430 		drm_dbg_kms(display->drm,
3431 			    "Sink PSR capability changed, disabling PSR\n");
3432 
3433 		/* Clearing it */
3434 		drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_ESI, val);
3435 	}
3436 }
3437 
3438 /*
3439  * On common bits:
3440  * DP_PSR_RFB_STORAGE_ERROR == DP_PANEL_REPLAY_RFB_STORAGE_ERROR
3441  * DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR == DP_PANEL_REPLAY_VSC_SDP_UNCORRECTABLE_ERROR
3442  * DP_PSR_LINK_CRC_ERROR == DP_PANEL_REPLAY_LINK_CRC_ERROR
3443  * this function is relying on PSR definitions
3444  */
3445 void intel_psr_short_pulse(struct intel_dp *intel_dp)
3446 {
3447 	struct intel_display *display = to_intel_display(intel_dp);
3448 	struct intel_psr *psr = &intel_dp->psr;
3449 	u8 status, error_status;
3450 	const u8 errors = DP_PSR_RFB_STORAGE_ERROR |
3451 			  DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR |
3452 			  DP_PSR_LINK_CRC_ERROR;
3453 
3454 	if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
3455 		return;
3456 
3457 	mutex_lock(&psr->lock);
3458 
3459 	if (!psr->enabled)
3460 		goto exit;
3461 
3462 	if (psr_get_status_and_error_status(intel_dp, &status, &error_status)) {
3463 		drm_err(display->drm,
3464 			"Error reading PSR status or error status\n");
3465 		goto exit;
3466 	}
3467 
3468 	if ((!psr->panel_replay_enabled && status == DP_PSR_SINK_INTERNAL_ERROR) ||
3469 	    (error_status & errors)) {
3470 		intel_psr_disable_locked(intel_dp);
3471 		psr->sink_not_reliable = true;
3472 	}
3473 
3474 	if (!psr->panel_replay_enabled && status == DP_PSR_SINK_INTERNAL_ERROR &&
3475 	    !error_status)
3476 		drm_dbg_kms(display->drm,
3477 			    "PSR sink internal error, disabling PSR\n");
3478 	if (error_status & DP_PSR_RFB_STORAGE_ERROR)
3479 		drm_dbg_kms(display->drm,
3480 			    "PSR RFB storage error, disabling PSR\n");
3481 	if (error_status & DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR)
3482 		drm_dbg_kms(display->drm,
3483 			    "PSR VSC SDP uncorrectable error, disabling PSR\n");
3484 	if (error_status & DP_PSR_LINK_CRC_ERROR)
3485 		drm_dbg_kms(display->drm,
3486 			    "PSR Link CRC error, disabling PSR\n");
3487 
3488 	if (error_status & ~errors)
3489 		drm_err(display->drm,
3490 			"PSR_ERROR_STATUS unhandled errors %x\n",
3491 			error_status & ~errors);
3492 	/* clear status register */
3493 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_ERROR_STATUS, error_status);
3494 
3495 	if (!psr->panel_replay_enabled) {
3496 		psr_alpm_check(intel_dp);
3497 		psr_capability_changed_check(intel_dp);
3498 	}
3499 
3500 exit:
3501 	mutex_unlock(&psr->lock);
3502 }
3503 
3504 bool intel_psr_enabled(struct intel_dp *intel_dp)
3505 {
3506 	bool ret;
3507 
3508 	if (!CAN_PSR(intel_dp))
3509 		return false;
3510 
3511 	mutex_lock(&intel_dp->psr.lock);
3512 	ret = intel_dp->psr.enabled;
3513 	mutex_unlock(&intel_dp->psr.lock);
3514 
3515 	return ret;
3516 }
3517 
3518 /**
3519  * intel_psr_lock - grab PSR lock
3520  * @crtc_state: the crtc state
3521  *
3522  * This is initially meant to be used by around CRTC update, when
3523  * vblank sensitive registers are updated and we need grab the lock
3524  * before it to avoid vblank evasion.
3525  */
3526 void intel_psr_lock(const struct intel_crtc_state *crtc_state)
3527 {
3528 	struct intel_display *display = to_intel_display(crtc_state);
3529 	struct intel_encoder *encoder;
3530 
3531 	if (!crtc_state->has_psr)
3532 		return;
3533 
3534 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
3535 					     crtc_state->uapi.encoder_mask) {
3536 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3537 
3538 		mutex_lock(&intel_dp->psr.lock);
3539 		break;
3540 	}
3541 }
3542 
3543 /**
3544  * intel_psr_unlock - release PSR lock
3545  * @crtc_state: the crtc state
3546  *
3547  * Release the PSR lock that was held during pipe update.
3548  */
3549 void intel_psr_unlock(const struct intel_crtc_state *crtc_state)
3550 {
3551 	struct intel_display *display = to_intel_display(crtc_state);
3552 	struct intel_encoder *encoder;
3553 
3554 	if (!crtc_state->has_psr)
3555 		return;
3556 
3557 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
3558 					     crtc_state->uapi.encoder_mask) {
3559 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3560 
3561 		mutex_unlock(&intel_dp->psr.lock);
3562 		break;
3563 	}
3564 }
3565 
3566 static void
3567 psr_source_status(struct intel_dp *intel_dp, struct seq_file *m)
3568 {
3569 	struct intel_display *display = to_intel_display(intel_dp);
3570 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3571 	const char *status = "unknown";
3572 	u32 val, status_val;
3573 
3574 	if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled ||
3575 					  intel_dp->psr.panel_replay_enabled)) {
3576 		static const char * const live_status[] = {
3577 			"IDLE",
3578 			"CAPTURE",
3579 			"CAPTURE_FS",
3580 			"SLEEP",
3581 			"BUFON_FW",
3582 			"ML_UP",
3583 			"SU_STANDBY",
3584 			"FAST_SLEEP",
3585 			"DEEP_SLEEP",
3586 			"BUF_ON",
3587 			"TG_ON"
3588 		};
3589 		val = intel_de_read(display,
3590 				    EDP_PSR2_STATUS(display, cpu_transcoder));
3591 		status_val = REG_FIELD_GET(EDP_PSR2_STATUS_STATE_MASK, val);
3592 		if (status_val < ARRAY_SIZE(live_status))
3593 			status = live_status[status_val];
3594 	} else {
3595 		static const char * const live_status[] = {
3596 			"IDLE",
3597 			"SRDONACK",
3598 			"SRDENT",
3599 			"BUFOFF",
3600 			"BUFON",
3601 			"AUXACK",
3602 			"SRDOFFACK",
3603 			"SRDENT_ON",
3604 		};
3605 		val = intel_de_read(display,
3606 				    psr_status_reg(display, cpu_transcoder));
3607 		status_val = REG_FIELD_GET(EDP_PSR_STATUS_STATE_MASK, val);
3608 		if (status_val < ARRAY_SIZE(live_status))
3609 			status = live_status[status_val];
3610 	}
3611 
3612 	seq_printf(m, "Source PSR/PanelReplay status: %s [0x%08x]\n", status, val);
3613 }
3614 
3615 static void intel_psr_sink_capability(struct intel_dp *intel_dp,
3616 				      struct seq_file *m)
3617 {
3618 	struct intel_psr *psr = &intel_dp->psr;
3619 
3620 	seq_printf(m, "Sink support: PSR = %s",
3621 		   str_yes_no(psr->sink_support));
3622 
3623 	if (psr->sink_support)
3624 		seq_printf(m, " [0x%02x]", intel_dp->psr_dpcd[0]);
3625 	if (intel_dp->psr_dpcd[0] == DP_PSR2_WITH_Y_COORD_ET_SUPPORTED)
3626 		seq_printf(m, " (Early Transport)");
3627 	seq_printf(m, ", Panel Replay = %s", str_yes_no(psr->sink_panel_replay_support));
3628 	seq_printf(m, ", Panel Replay Selective Update = %s",
3629 		   str_yes_no(psr->sink_panel_replay_su_support));
3630 	if (intel_dp->pr_dpcd & DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT)
3631 		seq_printf(m, " (Early Transport)");
3632 	seq_printf(m, "\n");
3633 }
3634 
3635 static void intel_psr_print_mode(struct intel_dp *intel_dp,
3636 				 struct seq_file *m)
3637 {
3638 	struct intel_psr *psr = &intel_dp->psr;
3639 	const char *status, *mode, *region_et;
3640 
3641 	if (psr->enabled)
3642 		status = " enabled";
3643 	else
3644 		status = "disabled";
3645 
3646 	if (psr->panel_replay_enabled && psr->sel_update_enabled)
3647 		mode = "Panel Replay Selective Update";
3648 	else if (psr->panel_replay_enabled)
3649 		mode = "Panel Replay";
3650 	else if (psr->sel_update_enabled)
3651 		mode = "PSR2";
3652 	else if (psr->enabled)
3653 		mode = "PSR1";
3654 	else
3655 		mode = "";
3656 
3657 	if (psr->su_region_et_enabled)
3658 		region_et = " (Early Transport)";
3659 	else
3660 		region_et = "";
3661 
3662 	seq_printf(m, "PSR mode: %s%s%s\n", mode, status, region_et);
3663 }
3664 
3665 static int intel_psr_status(struct seq_file *m, struct intel_dp *intel_dp)
3666 {
3667 	struct intel_display *display = to_intel_display(intel_dp);
3668 	struct drm_i915_private *dev_priv = to_i915(display->drm);
3669 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3670 	struct intel_psr *psr = &intel_dp->psr;
3671 	intel_wakeref_t wakeref;
3672 	bool enabled;
3673 	u32 val, psr2_ctl;
3674 
3675 	intel_psr_sink_capability(intel_dp, m);
3676 
3677 	if (!(psr->sink_support || psr->sink_panel_replay_support))
3678 		return 0;
3679 
3680 	wakeref = intel_runtime_pm_get(&dev_priv->runtime_pm);
3681 	mutex_lock(&psr->lock);
3682 
3683 	intel_psr_print_mode(intel_dp, m);
3684 
3685 	if (!psr->enabled) {
3686 		seq_printf(m, "PSR sink not reliable: %s\n",
3687 			   str_yes_no(psr->sink_not_reliable));
3688 
3689 		goto unlock;
3690 	}
3691 
3692 	if (psr->panel_replay_enabled) {
3693 		val = intel_de_read(display, TRANS_DP2_CTL(cpu_transcoder));
3694 
3695 		if (intel_dp_is_edp(intel_dp))
3696 			psr2_ctl = intel_de_read(display,
3697 						 EDP_PSR2_CTL(display,
3698 							      cpu_transcoder));
3699 
3700 		enabled = val & TRANS_DP2_PANEL_REPLAY_ENABLE;
3701 	} else if (psr->sel_update_enabled) {
3702 		val = intel_de_read(display,
3703 				    EDP_PSR2_CTL(display, cpu_transcoder));
3704 		enabled = val & EDP_PSR2_ENABLE;
3705 	} else {
3706 		val = intel_de_read(display, psr_ctl_reg(display, cpu_transcoder));
3707 		enabled = val & EDP_PSR_ENABLE;
3708 	}
3709 	seq_printf(m, "Source PSR/PanelReplay ctl: %s [0x%08x]\n",
3710 		   str_enabled_disabled(enabled), val);
3711 	if (psr->panel_replay_enabled && intel_dp_is_edp(intel_dp))
3712 		seq_printf(m, "PSR2_CTL: 0x%08x\n",
3713 			   psr2_ctl);
3714 	psr_source_status(intel_dp, m);
3715 	seq_printf(m, "Busy frontbuffer bits: 0x%08x\n",
3716 		   psr->busy_frontbuffer_bits);
3717 
3718 	/*
3719 	 * SKL+ Perf counter is reset to 0 everytime DC state is entered
3720 	 */
3721 	val = intel_de_read(display, psr_perf_cnt_reg(display, cpu_transcoder));
3722 	seq_printf(m, "Performance counter: %u\n",
3723 		   REG_FIELD_GET(EDP_PSR_PERF_CNT_MASK, val));
3724 
3725 	if (psr->debug & I915_PSR_DEBUG_IRQ) {
3726 		seq_printf(m, "Last attempted entry at: %lld\n",
3727 			   psr->last_entry_attempt);
3728 		seq_printf(m, "Last exit at: %lld\n", psr->last_exit);
3729 	}
3730 
3731 	if (psr->sel_update_enabled) {
3732 		u32 su_frames_val[3];
3733 		int frame;
3734 
3735 		/*
3736 		 * Reading all 3 registers before hand to minimize crossing a
3737 		 * frame boundary between register reads
3738 		 */
3739 		for (frame = 0; frame < PSR2_SU_STATUS_FRAMES; frame += 3) {
3740 			val = intel_de_read(display,
3741 					    PSR2_SU_STATUS(display, cpu_transcoder, frame));
3742 			su_frames_val[frame / 3] = val;
3743 		}
3744 
3745 		seq_puts(m, "Frame:\tPSR2 SU blocks:\n");
3746 
3747 		for (frame = 0; frame < PSR2_SU_STATUS_FRAMES; frame++) {
3748 			u32 su_blocks;
3749 
3750 			su_blocks = su_frames_val[frame / 3] &
3751 				    PSR2_SU_STATUS_MASK(frame);
3752 			su_blocks = su_blocks >> PSR2_SU_STATUS_SHIFT(frame);
3753 			seq_printf(m, "%d\t%d\n", frame, su_blocks);
3754 		}
3755 
3756 		seq_printf(m, "PSR2 selective fetch: %s\n",
3757 			   str_enabled_disabled(psr->psr2_sel_fetch_enabled));
3758 	}
3759 
3760 unlock:
3761 	mutex_unlock(&psr->lock);
3762 	intel_runtime_pm_put(&dev_priv->runtime_pm, wakeref);
3763 
3764 	return 0;
3765 }
3766 
3767 static int i915_edp_psr_status_show(struct seq_file *m, void *data)
3768 {
3769 	struct intel_display *display = m->private;
3770 	struct intel_dp *intel_dp = NULL;
3771 	struct intel_encoder *encoder;
3772 
3773 	if (!HAS_PSR(display))
3774 		return -ENODEV;
3775 
3776 	/* Find the first EDP which supports PSR */
3777 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3778 		intel_dp = enc_to_intel_dp(encoder);
3779 		break;
3780 	}
3781 
3782 	if (!intel_dp)
3783 		return -ENODEV;
3784 
3785 	return intel_psr_status(m, intel_dp);
3786 }
3787 DEFINE_SHOW_ATTRIBUTE(i915_edp_psr_status);
3788 
3789 static int
3790 i915_edp_psr_debug_set(void *data, u64 val)
3791 {
3792 	struct intel_display *display = data;
3793 	struct drm_i915_private *dev_priv = to_i915(display->drm);
3794 	struct intel_encoder *encoder;
3795 	intel_wakeref_t wakeref;
3796 	int ret = -ENODEV;
3797 
3798 	if (!HAS_PSR(display))
3799 		return ret;
3800 
3801 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3802 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3803 
3804 		drm_dbg_kms(display->drm, "Setting PSR debug to %llx\n", val);
3805 
3806 		wakeref = intel_runtime_pm_get(&dev_priv->runtime_pm);
3807 
3808 		// TODO: split to each transcoder's PSR debug state
3809 		ret = intel_psr_debug_set(intel_dp, val);
3810 
3811 		intel_runtime_pm_put(&dev_priv->runtime_pm, wakeref);
3812 	}
3813 
3814 	return ret;
3815 }
3816 
3817 static int
3818 i915_edp_psr_debug_get(void *data, u64 *val)
3819 {
3820 	struct intel_display *display = data;
3821 	struct intel_encoder *encoder;
3822 
3823 	if (!HAS_PSR(display))
3824 		return -ENODEV;
3825 
3826 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3827 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3828 
3829 		// TODO: split to each transcoder's PSR debug state
3830 		*val = READ_ONCE(intel_dp->psr.debug);
3831 		return 0;
3832 	}
3833 
3834 	return -ENODEV;
3835 }
3836 
3837 DEFINE_SIMPLE_ATTRIBUTE(i915_edp_psr_debug_fops,
3838 			i915_edp_psr_debug_get, i915_edp_psr_debug_set,
3839 			"%llu\n");
3840 
3841 void intel_psr_debugfs_register(struct intel_display *display)
3842 {
3843 	struct drm_minor *minor = display->drm->primary;
3844 
3845 	debugfs_create_file("i915_edp_psr_debug", 0644, minor->debugfs_root,
3846 			    display, &i915_edp_psr_debug_fops);
3847 
3848 	debugfs_create_file("i915_edp_psr_status", 0444, minor->debugfs_root,
3849 			    display, &i915_edp_psr_status_fops);
3850 }
3851 
3852 static const char *psr_mode_str(struct intel_dp *intel_dp)
3853 {
3854 	if (intel_dp->psr.panel_replay_enabled)
3855 		return "PANEL-REPLAY";
3856 	else if (intel_dp->psr.enabled)
3857 		return "PSR";
3858 
3859 	return "unknown";
3860 }
3861 
3862 static int i915_psr_sink_status_show(struct seq_file *m, void *data)
3863 {
3864 	struct intel_connector *connector = m->private;
3865 	struct intel_dp *intel_dp = intel_attached_dp(connector);
3866 	static const char * const sink_status[] = {
3867 		"inactive",
3868 		"transition to active, capture and display",
3869 		"active, display from RFB",
3870 		"active, capture and display on sink device timings",
3871 		"transition to inactive, capture and display, timing re-sync",
3872 		"reserved",
3873 		"reserved",
3874 		"sink internal error",
3875 	};
3876 	const char *str;
3877 	int ret;
3878 	u8 status, error_status;
3879 
3880 	if (!(CAN_PSR(intel_dp) || CAN_PANEL_REPLAY(intel_dp))) {
3881 		seq_puts(m, "PSR/Panel-Replay Unsupported\n");
3882 		return -ENODEV;
3883 	}
3884 
3885 	if (connector->base.status != connector_status_connected)
3886 		return -ENODEV;
3887 
3888 	ret = psr_get_status_and_error_status(intel_dp, &status, &error_status);
3889 	if (ret)
3890 		return ret;
3891 
3892 	status &= DP_PSR_SINK_STATE_MASK;
3893 	if (status < ARRAY_SIZE(sink_status))
3894 		str = sink_status[status];
3895 	else
3896 		str = "unknown";
3897 
3898 	seq_printf(m, "Sink %s status: 0x%x [%s]\n", psr_mode_str(intel_dp), status, str);
3899 
3900 	seq_printf(m, "Sink %s error status: 0x%x", psr_mode_str(intel_dp), error_status);
3901 
3902 	if (error_status & (DP_PSR_RFB_STORAGE_ERROR |
3903 			    DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR |
3904 			    DP_PSR_LINK_CRC_ERROR))
3905 		seq_puts(m, ":\n");
3906 	else
3907 		seq_puts(m, "\n");
3908 	if (error_status & DP_PSR_RFB_STORAGE_ERROR)
3909 		seq_printf(m, "\t%s RFB storage error\n", psr_mode_str(intel_dp));
3910 	if (error_status & DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR)
3911 		seq_printf(m, "\t%s VSC SDP uncorrectable error\n", psr_mode_str(intel_dp));
3912 	if (error_status & DP_PSR_LINK_CRC_ERROR)
3913 		seq_printf(m, "\t%s Link CRC error\n", psr_mode_str(intel_dp));
3914 
3915 	return ret;
3916 }
3917 DEFINE_SHOW_ATTRIBUTE(i915_psr_sink_status);
3918 
3919 static int i915_psr_status_show(struct seq_file *m, void *data)
3920 {
3921 	struct intel_connector *connector = m->private;
3922 	struct intel_dp *intel_dp = intel_attached_dp(connector);
3923 
3924 	return intel_psr_status(m, intel_dp);
3925 }
3926 DEFINE_SHOW_ATTRIBUTE(i915_psr_status);
3927 
3928 void intel_psr_connector_debugfs_add(struct intel_connector *connector)
3929 {
3930 	struct intel_display *display = to_intel_display(connector);
3931 	struct drm_i915_private *i915 = to_i915(connector->base.dev);
3932 	struct dentry *root = connector->base.debugfs_entry;
3933 
3934 	if (connector->base.connector_type != DRM_MODE_CONNECTOR_eDP &&
3935 	    connector->base.connector_type != DRM_MODE_CONNECTOR_DisplayPort)
3936 		return;
3937 
3938 	debugfs_create_file("i915_psr_sink_status", 0444, root,
3939 			    connector, &i915_psr_sink_status_fops);
3940 
3941 	if (HAS_PSR(display) || HAS_DP20(i915))
3942 		debugfs_create_file("i915_psr_status", 0444, root,
3943 				    connector, &i915_psr_status_fops);
3944 }
3945