xref: /linux/drivers/gpu/drm/i915/display/intel_psr.c (revision 69f83f167463bad26104af7fbc114ce1f80366b0)
1 /*
2  * Copyright © 2014 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21  * DEALINGS IN THE SOFTWARE.
22  */
23 
24 #include <linux/debugfs.h>
25 
26 #include <drm/drm_atomic_helper.h>
27 #include <drm/drm_damage_helper.h>
28 #include <drm/drm_debugfs.h>
29 #include <drm/drm_print.h>
30 #include <drm/drm_vblank.h>
31 
32 #include "i915_reg.h"
33 #include "intel_alpm.h"
34 #include "intel_atomic.h"
35 #include "intel_crtc.h"
36 #include "intel_cursor_regs.h"
37 #include "intel_ddi.h"
38 #include "intel_de.h"
39 #include "intel_display_irq.h"
40 #include "intel_display_regs.h"
41 #include "intel_display_rpm.h"
42 #include "intel_display_types.h"
43 #include "intel_display_utils.h"
44 #include "intel_dmc.h"
45 #include "intel_dp.h"
46 #include "intel_dp_aux.h"
47 #include "intel_dsb.h"
48 #include "intel_frontbuffer.h"
49 #include "intel_hdmi.h"
50 #include "intel_psr.h"
51 #include "intel_psr_regs.h"
52 #include "intel_snps_phy.h"
53 #include "intel_step.h"
54 #include "intel_vblank.h"
55 #include "intel_vdsc.h"
56 #include "intel_vrr.h"
57 #include "skl_universal_plane.h"
58 
59 /**
60  * DOC: Panel Self Refresh (PSR/SRD)
61  *
62  * Since Haswell Display controller supports Panel Self-Refresh on display
63  * panels witch have a remote frame buffer (RFB) implemented according to PSR
64  * spec in eDP1.3. PSR feature allows the display to go to lower standby states
65  * when system is idle but display is on as it eliminates display refresh
66  * request to DDR memory completely as long as the frame buffer for that
67  * display is unchanged.
68  *
69  * Panel Self Refresh must be supported by both Hardware (source) and
70  * Panel (sink).
71  *
72  * PSR saves power by caching the framebuffer in the panel RFB, which allows us
73  * to power down the link and memory controller. For DSI panels the same idea
74  * is called "manual mode".
75  *
76  * The implementation uses the hardware-based PSR support which automatically
77  * enters/exits self-refresh mode. The hardware takes care of sending the
78  * required DP aux message and could even retrain the link (that part isn't
79  * enabled yet though). The hardware also keeps track of any frontbuffer
80  * changes to know when to exit self-refresh mode again. Unfortunately that
81  * part doesn't work too well, hence why the i915 PSR support uses the
82  * software frontbuffer tracking to make sure it doesn't miss a screen
83  * update. For this integration intel_psr_invalidate() and intel_psr_flush()
84  * get called by the frontbuffer tracking code. Note that because of locking
85  * issues the self-refresh re-enable code is done from a work queue, which
86  * must be correctly synchronized/cancelled when shutting down the pipe."
87  *
88  * DC3CO (DC3 clock off)
89  *
90  * On top of PSR2, GEN12 adds a intermediate power savings state that turns
91  * clock off automatically during PSR2 idle state.
92  * The smaller overhead of DC3co entry/exit vs. the overhead of PSR2 deep sleep
93  * entry/exit allows the HW to enter a low-power state even when page flipping
94  * periodically (for instance a 30fps video playback scenario).
95  *
96  * Every time a flips occurs PSR2 will get out of deep sleep state(if it was),
97  * so DC3CO is enabled and tgl_dc3co_disable_work is schedule to run after 6
98  * frames, if no other flip occurs and the function above is executed, DC3CO is
99  * disabled and PSR2 is configured to enter deep sleep, resetting again in case
100  * of another flip.
101  * Front buffer modifications do not trigger DC3CO activation on purpose as it
102  * would bring a lot of complexity and most of the moderns systems will only
103  * use page flips.
104  */
105 
106 /*
107  * Description of PSR mask bits:
108  *
109  * EDP_PSR_DEBUG[16]/EDP_PSR_DEBUG_MASK_DISP_REG_WRITE (hsw-skl):
110  *
111  *  When unmasked (nearly) all display register writes (eg. even
112  *  SWF) trigger a PSR exit. Some registers are excluded from this
113  *  and they have a more specific mask (described below). On icl+
114  *  this bit no longer exists and is effectively always set.
115  *
116  * PIPE_MISC[21]/PIPE_MISC_PSR_MASK_PIPE_REG_WRITE (skl+):
117  *
118  *  When unmasked (nearly) all pipe/plane register writes
119  *  trigger a PSR exit. Some plane registers are excluded from this
120  *  and they have a more specific mask (described below).
121  *
122  * CHICKEN_PIPESL_1[11]/SKL_PSR_MASK_PLANE_FLIP (skl+):
123  * PIPE_MISC[23]/PIPE_MISC_PSR_MASK_PRIMARY_FLIP (bdw):
124  * EDP_PSR_DEBUG[23]/EDP_PSR_DEBUG_MASK_PRIMARY_FLIP (hsw):
125  *
126  *  When unmasked PRI_SURF/PLANE_SURF writes trigger a PSR exit.
127  *  SPR_SURF/CURBASE are not included in this and instead are
128  *  controlled by PIPE_MISC_PSR_MASK_PIPE_REG_WRITE (skl+) or
129  *  EDP_PSR_DEBUG_MASK_DISP_REG_WRITE (hsw/bdw).
130  *
131  * PIPE_MISC[22]/PIPE_MISC_PSR_MASK_SPRITE_ENABLE (bdw):
132  * EDP_PSR_DEBUG[21]/EDP_PSR_DEBUG_MASK_SPRITE_ENABLE (hsw):
133  *
134  *  When unmasked PSR is blocked as long as the sprite
135  *  plane is enabled. skl+ with their universal planes no
136  *  longer have a mask bit like this, and no plane being
137  *  enabledb blocks PSR.
138  *
139  * PIPE_MISC[21]/PIPE_MISC_PSR_MASK_CURSOR_MOVE (bdw):
140  * EDP_PSR_DEBUG[20]/EDP_PSR_DEBUG_MASK_CURSOR_MOVE (hsw):
141  *
142  *  When umasked CURPOS writes trigger a PSR exit. On skl+
143  *  this doesn't exit but CURPOS is included in the
144  *  PIPE_MISC_PSR_MASK_PIPE_REG_WRITE mask.
145  *
146  * PIPE_MISC[20]/PIPE_MISC_PSR_MASK_VBLANK_VSYNC_INT (bdw+):
147  * EDP_PSR_DEBUG[19]/EDP_PSR_DEBUG_MASK_VBLANK_VSYNC_INT (hsw):
148  *
149  *  When unmasked PSR is blocked as long as vblank and/or vsync
150  *  interrupt is unmasked in IMR *and* enabled in IER.
151  *
152  * CHICKEN_TRANS[30]/SKL_UNMASK_VBL_TO_PIPE_IN_SRD (skl+):
153  * CHICKEN_PAR1_1[15]/HSW_MASK_VBL_TO_PIPE_IN_SRD (hsw/bdw):
154  *
155  *  Selectcs whether PSR exit generates an extra vblank before
156  *  the first frame is transmitted. Also note the opposite polarity
157  *  if the bit on hsw/bdw vs. skl+ (masked==generate the extra vblank,
158  *  unmasked==do not generate the extra vblank).
159  *
160  *  With DC states enabled the extra vblank happens after link training,
161  *  with DC states disabled it happens immediately upuon PSR exit trigger.
162  *  No idea as of now why there is a difference. HSW/BDW (which don't
163  *  even have DMC) always generate it after link training. Go figure.
164  *
165  *  Unfortunately CHICKEN_TRANS itself seems to be double buffered
166  *  and thus won't latch until the first vblank. So with DC states
167  *  enabled the register effectively uses the reset value during DC5
168  *  exit+PSR exit sequence, and thus the bit does nothing until
169  *  latched by the vblank that it was trying to prevent from being
170  *  generated in the first place. So we should probably call this
171  *  one a chicken/egg bit instead on skl+.
172  *
173  *  In standby mode (as opposed to link-off) this makes no difference
174  *  as the timing generator keeps running the whole time generating
175  *  normal periodic vblanks.
176  *
177  *  WaPsrDPAMaskVBlankInSRD asks us to set the bit on hsw/bdw,
178  *  and doing so makes the behaviour match the skl+ reset value.
179  *
180  * CHICKEN_PIPESL_1[0]/BDW_UNMASK_VBL_TO_REGS_IN_SRD (bdw):
181  * CHICKEN_PIPESL_1[15]/HSW_UNMASK_VBL_TO_REGS_IN_SRD (hsw):
182  *
183  *  On BDW without this bit is no vblanks whatsoever are
184  *  generated after PSR exit. On HSW this has no apparent effect.
185  *  WaPsrDPRSUnmaskVBlankInSRD says to set this.
186  *
187  * The rest of the bits are more self-explanatory and/or
188  * irrelevant for normal operation.
189  *
190  * Description of intel_crtc_state variables. has_psr, has_panel_replay and
191  * has_sel_update:
192  *
193  *  has_psr (alone):					PSR1
194  *  has_psr + has_sel_update:				PSR2
195  *  has_psr + has_panel_replay:				Panel Replay
196  *  has_psr + has_panel_replay + has_sel_update:	Panel Replay Selective Update
197  *
198  * Description of some intel_psr variables. enabled, panel_replay_enabled,
199  * sel_update_enabled
200  *
201  *  enabled (alone):						PSR1
202  *  enabled + sel_update_enabled:				PSR2
203  *  enabled + panel_replay_enabled:				Panel Replay
204  *  enabled + panel_replay_enabled + sel_update_enabled:	Panel Replay SU
205  */
206 
207 #define CAN_PSR(intel_dp) ((intel_dp)->psr.sink_support && \
208 			   (intel_dp)->psr.source_support)
209 
210 bool intel_encoder_can_psr(struct intel_encoder *encoder)
211 {
212 	if (intel_encoder_is_dp(encoder) || encoder->type == INTEL_OUTPUT_DP_MST)
213 		return CAN_PSR(enc_to_intel_dp(encoder)) ||
214 		       CAN_PANEL_REPLAY(enc_to_intel_dp(encoder));
215 	else
216 		return false;
217 }
218 
219 bool intel_psr_needs_aux_io_power(struct intel_encoder *encoder,
220 				  const struct intel_crtc_state *crtc_state)
221 {
222 	/*
223 	 * For PSR/PR modes only eDP requires the AUX IO power to be enabled whenever
224 	 * the output is enabled. For non-eDP outputs the main link is always
225 	 * on, hence it doesn't require the HW initiated AUX wake-up signaling used
226 	 * for eDP.
227 	 *
228 	 * TODO:
229 	 * - Consider leaving AUX IO disabled for eDP / PR as well, in case
230 	 *   the ALPM with main-link off mode is not enabled.
231 	 * - Leave AUX IO enabled for DP / PR, once support for ALPM with
232 	 *   main-link off mode is added for it and this mode gets enabled.
233 	 */
234 	return intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP) &&
235 	       intel_encoder_can_psr(encoder);
236 }
237 
238 static bool psr_global_enabled(struct intel_dp *intel_dp)
239 {
240 	struct intel_connector *connector = intel_dp->attached_connector;
241 
242 	switch (intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK) {
243 	case I915_PSR_DEBUG_DEFAULT:
244 		return intel_dp_is_edp(intel_dp) ?
245 			connector->panel.vbt.psr.enable : true;
246 	case I915_PSR_DEBUG_DISABLE:
247 		return false;
248 	default:
249 		return true;
250 	}
251 }
252 
253 static bool sel_update_global_enabled(struct intel_dp *intel_dp)
254 {
255 	switch (intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK) {
256 	case I915_PSR_DEBUG_DISABLE:
257 	case I915_PSR_DEBUG_FORCE_PSR1:
258 		return false;
259 	default:
260 		return true;
261 	}
262 }
263 
264 static bool panel_replay_global_enabled(struct intel_dp *intel_dp)
265 {
266 	struct intel_display *display = to_intel_display(intel_dp);
267 
268 	return !(intel_dp->psr.debug & I915_PSR_DEBUG_PANEL_REPLAY_DISABLE) &&
269 		display->params.enable_panel_replay;
270 }
271 
272 static u32 psr_irq_psr_error_bit_get(struct intel_dp *intel_dp)
273 {
274 	struct intel_display *display = to_intel_display(intel_dp);
275 
276 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_ERROR :
277 		EDP_PSR_ERROR(intel_dp->psr.transcoder);
278 }
279 
280 static u32 psr_irq_post_exit_bit_get(struct intel_dp *intel_dp)
281 {
282 	struct intel_display *display = to_intel_display(intel_dp);
283 
284 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_POST_EXIT :
285 		EDP_PSR_POST_EXIT(intel_dp->psr.transcoder);
286 }
287 
288 static u32 psr_irq_pre_entry_bit_get(struct intel_dp *intel_dp)
289 {
290 	struct intel_display *display = to_intel_display(intel_dp);
291 
292 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_PRE_ENTRY :
293 		EDP_PSR_PRE_ENTRY(intel_dp->psr.transcoder);
294 }
295 
296 static u32 psr_irq_mask_get(struct intel_dp *intel_dp)
297 {
298 	struct intel_display *display = to_intel_display(intel_dp);
299 
300 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_MASK :
301 		EDP_PSR_MASK(intel_dp->psr.transcoder);
302 }
303 
304 static i915_reg_t psr_ctl_reg(struct intel_display *display,
305 			      enum transcoder cpu_transcoder)
306 {
307 	if (DISPLAY_VER(display) >= 8)
308 		return EDP_PSR_CTL(display, cpu_transcoder);
309 	else
310 		return HSW_SRD_CTL;
311 }
312 
313 static i915_reg_t psr_debug_reg(struct intel_display *display,
314 				enum transcoder cpu_transcoder)
315 {
316 	if (DISPLAY_VER(display) >= 8)
317 		return EDP_PSR_DEBUG(display, cpu_transcoder);
318 	else
319 		return HSW_SRD_DEBUG;
320 }
321 
322 static i915_reg_t psr_perf_cnt_reg(struct intel_display *display,
323 				   enum transcoder cpu_transcoder)
324 {
325 	if (DISPLAY_VER(display) >= 8)
326 		return EDP_PSR_PERF_CNT(display, cpu_transcoder);
327 	else
328 		return HSW_SRD_PERF_CNT;
329 }
330 
331 static i915_reg_t psr_status_reg(struct intel_display *display,
332 				 enum transcoder cpu_transcoder)
333 {
334 	if (DISPLAY_VER(display) >= 8)
335 		return EDP_PSR_STATUS(display, cpu_transcoder);
336 	else
337 		return HSW_SRD_STATUS;
338 }
339 
340 static i915_reg_t psr_imr_reg(struct intel_display *display,
341 			      enum transcoder cpu_transcoder)
342 {
343 	if (DISPLAY_VER(display) >= 12)
344 		return TRANS_PSR_IMR(display, cpu_transcoder);
345 	else
346 		return EDP_PSR_IMR;
347 }
348 
349 static i915_reg_t psr_iir_reg(struct intel_display *display,
350 			      enum transcoder cpu_transcoder)
351 {
352 	if (DISPLAY_VER(display) >= 12)
353 		return TRANS_PSR_IIR(display, cpu_transcoder);
354 	else
355 		return EDP_PSR_IIR;
356 }
357 
358 static i915_reg_t psr_aux_ctl_reg(struct intel_display *display,
359 				  enum transcoder cpu_transcoder)
360 {
361 	if (DISPLAY_VER(display) >= 8)
362 		return EDP_PSR_AUX_CTL(display, cpu_transcoder);
363 	else
364 		return HSW_SRD_AUX_CTL;
365 }
366 
367 static i915_reg_t psr_aux_data_reg(struct intel_display *display,
368 				   enum transcoder cpu_transcoder, int i)
369 {
370 	if (DISPLAY_VER(display) >= 8)
371 		return EDP_PSR_AUX_DATA(display, cpu_transcoder, i);
372 	else
373 		return HSW_SRD_AUX_DATA(i);
374 }
375 
376 static void psr_irq_control(struct intel_dp *intel_dp)
377 {
378 	struct intel_display *display = to_intel_display(intel_dp);
379 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
380 	u32 mask;
381 
382 	if (intel_dp->psr.panel_replay_enabled)
383 		return;
384 
385 	mask = psr_irq_psr_error_bit_get(intel_dp);
386 	if (intel_dp->psr.debug & I915_PSR_DEBUG_IRQ)
387 		mask |= psr_irq_post_exit_bit_get(intel_dp) |
388 			psr_irq_pre_entry_bit_get(intel_dp);
389 
390 	intel_de_rmw(display, psr_imr_reg(display, cpu_transcoder),
391 		     psr_irq_mask_get(intel_dp), ~mask);
392 }
393 
394 static void psr_event_print(struct intel_display *display,
395 			    u32 val, bool sel_update_enabled)
396 {
397 	drm_dbg_kms(display->drm, "PSR exit events: 0x%x\n", val);
398 	if (val & PSR_EVENT_PSR2_WD_TIMER_EXPIRE)
399 		drm_dbg_kms(display->drm, "\tPSR2 watchdog timer expired\n");
400 	if ((val & PSR_EVENT_PSR2_DISABLED) && sel_update_enabled)
401 		drm_dbg_kms(display->drm, "\tPSR2 disabled\n");
402 	if (val & PSR_EVENT_SU_DIRTY_FIFO_UNDERRUN)
403 		drm_dbg_kms(display->drm, "\tSU dirty FIFO underrun\n");
404 	if (val & PSR_EVENT_SU_CRC_FIFO_UNDERRUN)
405 		drm_dbg_kms(display->drm, "\tSU CRC FIFO underrun\n");
406 	if (val & PSR_EVENT_GRAPHICS_RESET)
407 		drm_dbg_kms(display->drm, "\tGraphics reset\n");
408 	if (val & PSR_EVENT_PCH_INTERRUPT)
409 		drm_dbg_kms(display->drm, "\tPCH interrupt\n");
410 	if (val & PSR_EVENT_MEMORY_UP)
411 		drm_dbg_kms(display->drm, "\tMemory up\n");
412 	if (val & PSR_EVENT_FRONT_BUFFER_MODIFY)
413 		drm_dbg_kms(display->drm, "\tFront buffer modification\n");
414 	if (val & PSR_EVENT_WD_TIMER_EXPIRE)
415 		drm_dbg_kms(display->drm, "\tPSR watchdog timer expired\n");
416 	if (val & PSR_EVENT_PIPE_REGISTERS_UPDATE)
417 		drm_dbg_kms(display->drm, "\tPIPE registers updated\n");
418 	if (val & PSR_EVENT_REGISTER_UPDATE)
419 		drm_dbg_kms(display->drm, "\tRegister updated\n");
420 	if (val & PSR_EVENT_HDCP_ENABLE)
421 		drm_dbg_kms(display->drm, "\tHDCP enabled\n");
422 	if (val & PSR_EVENT_KVMR_SESSION_ENABLE)
423 		drm_dbg_kms(display->drm, "\tKVMR session enabled\n");
424 	if (val & PSR_EVENT_VBI_ENABLE)
425 		drm_dbg_kms(display->drm, "\tVBI enabled\n");
426 	if (val & PSR_EVENT_LPSP_MODE_EXIT)
427 		drm_dbg_kms(display->drm, "\tLPSP mode exited\n");
428 	if ((val & PSR_EVENT_PSR_DISABLE) && !sel_update_enabled)
429 		drm_dbg_kms(display->drm, "\tPSR disabled\n");
430 }
431 
432 void intel_psr_irq_handler(struct intel_dp *intel_dp, u32 psr_iir)
433 {
434 	struct intel_display *display = to_intel_display(intel_dp);
435 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
436 	ktime_t time_ns =  ktime_get();
437 
438 	if (psr_iir & psr_irq_pre_entry_bit_get(intel_dp)) {
439 		intel_dp->psr.last_entry_attempt = time_ns;
440 		drm_dbg_kms(display->drm,
441 			    "[transcoder %s] PSR entry attempt in 2 vblanks\n",
442 			    transcoder_name(cpu_transcoder));
443 	}
444 
445 	if (psr_iir & psr_irq_post_exit_bit_get(intel_dp)) {
446 		intel_dp->psr.last_exit = time_ns;
447 		drm_dbg_kms(display->drm,
448 			    "[transcoder %s] PSR exit completed\n",
449 			    transcoder_name(cpu_transcoder));
450 
451 		if (DISPLAY_VER(display) >= 9) {
452 			u32 val;
453 
454 			val = intel_de_rmw(display,
455 					   PSR_EVENT(display, cpu_transcoder),
456 					   0, 0);
457 
458 			psr_event_print(display, val, intel_dp->psr.sel_update_enabled);
459 		}
460 	}
461 
462 	if (psr_iir & psr_irq_psr_error_bit_get(intel_dp)) {
463 		drm_warn(display->drm, "[transcoder %s] PSR aux error\n",
464 			 transcoder_name(cpu_transcoder));
465 
466 		intel_dp->psr.irq_aux_error = true;
467 
468 		/*
469 		 * If this interruption is not masked it will keep
470 		 * interrupting so fast that it prevents the scheduled
471 		 * work to run.
472 		 * Also after a PSR error, we don't want to arm PSR
473 		 * again so we don't care about unmask the interruption
474 		 * or unset irq_aux_error.
475 		 */
476 		intel_de_rmw(display, psr_imr_reg(display, cpu_transcoder),
477 			     0, psr_irq_psr_error_bit_get(intel_dp));
478 
479 		queue_work(display->wq.unordered, &intel_dp->psr.work);
480 	}
481 }
482 
483 static u8 intel_dp_get_sink_sync_latency(struct intel_dp *intel_dp)
484 {
485 	struct intel_display *display = to_intel_display(intel_dp);
486 	u8 val = 8; /* assume the worst if we can't read the value */
487 
488 	if (drm_dp_dpcd_readb(&intel_dp->aux,
489 			      DP_SYNCHRONIZATION_LATENCY_IN_SINK, &val) == 1)
490 		val &= DP_MAX_RESYNC_FRAME_COUNT_MASK;
491 	else
492 		drm_dbg_kms(display->drm,
493 			    "Unable to get sink synchronization latency, assuming 8 frames\n");
494 	return val;
495 }
496 
497 static void _psr_compute_su_granularity(struct intel_dp *intel_dp,
498 					struct intel_connector *connector)
499 {
500 	struct intel_display *display = to_intel_display(intel_dp);
501 	ssize_t r;
502 	__le16 w;
503 	u8 y;
504 
505 	/*
506 	 * If sink don't have specific granularity requirements set legacy
507 	 * ones.
508 	 */
509 	if (!(connector->dp.psr_caps.dpcd[1] & DP_PSR2_SU_GRANULARITY_REQUIRED)) {
510 		/* As PSR2 HW sends full lines, we do not care about x granularity */
511 		w = cpu_to_le16(4);
512 		y = 4;
513 		goto exit;
514 	}
515 
516 	r = drm_dp_dpcd_read(&intel_dp->aux, DP_PSR2_SU_X_GRANULARITY, &w, sizeof(w));
517 	if (r != sizeof(w))
518 		drm_dbg_kms(display->drm,
519 			    "Unable to read selective update x granularity\n");
520 	/*
521 	 * Spec says that if the value read is 0 the default granularity should
522 	 * be used instead.
523 	 */
524 	if (r != sizeof(w) || w == 0)
525 		w = cpu_to_le16(4);
526 
527 	r = drm_dp_dpcd_read(&intel_dp->aux, DP_PSR2_SU_Y_GRANULARITY, &y, 1);
528 	if (r != 1) {
529 		drm_dbg_kms(display->drm,
530 			    "Unable to read selective update y granularity\n");
531 		y = 4;
532 	}
533 	if (y == 0)
534 		y = 1;
535 
536 exit:
537 	connector->dp.psr_caps.su_w_granularity = le16_to_cpu(w);
538 	connector->dp.psr_caps.su_y_granularity = y;
539 }
540 
541 static enum intel_panel_replay_dsc_support
542 compute_pr_dsc_support(struct intel_connector *connector)
543 {
544 	u8 pr_dsc_mode;
545 	u8 val;
546 
547 	val = connector->dp.panel_replay_caps.dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_CAPABILITY)];
548 	pr_dsc_mode = REG_FIELD_GET8(DP_PANEL_REPLAY_DSC_DECODE_CAPABILITY_IN_PR_MASK, val);
549 
550 	switch (pr_dsc_mode) {
551 	case DP_DSC_DECODE_CAPABILITY_IN_PR_FULL_FRAME_ONLY:
552 		return INTEL_DP_PANEL_REPLAY_DSC_FULL_FRAME_ONLY;
553 	case DP_DSC_DECODE_CAPABILITY_IN_PR_SUPPORTED:
554 		return INTEL_DP_PANEL_REPLAY_DSC_SELECTIVE_UPDATE;
555 	default:
556 		MISSING_CASE(pr_dsc_mode);
557 		fallthrough;
558 	case DP_DSC_DECODE_CAPABILITY_IN_PR_NOT_SUPPORTED:
559 	case DP_DSC_DECODE_CAPABILITY_IN_PR_RESERVED:
560 		return INTEL_DP_PANEL_REPLAY_DSC_NOT_SUPPORTED;
561 	}
562 }
563 
564 static const char *panel_replay_dsc_support_str(enum intel_panel_replay_dsc_support dsc_support)
565 {
566 	switch (dsc_support) {
567 	case INTEL_DP_PANEL_REPLAY_DSC_NOT_SUPPORTED:
568 		return "not supported";
569 	case INTEL_DP_PANEL_REPLAY_DSC_FULL_FRAME_ONLY:
570 		return "full frame only";
571 	case INTEL_DP_PANEL_REPLAY_DSC_SELECTIVE_UPDATE:
572 		return "selective update";
573 	default:
574 		MISSING_CASE(dsc_support);
575 		return "n/a";
576 	};
577 }
578 
579 static void _panel_replay_compute_su_granularity(struct intel_connector *connector)
580 {
581 	u16 w;
582 	u8 y;
583 
584 	if (!(connector->dp.panel_replay_caps.dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_CAPABILITY)] &
585 	       DP_PANEL_REPLAY_SU_GRANULARITY_REQUIRED)) {
586 		w = 4;
587 		y = 4;
588 		goto exit;
589 	}
590 
591 	/*
592 	 * Spec says that if the value read is 0 the default granularity should
593 	 * be used instead.
594 	 */
595 	w = le16_to_cpu(*(__le16 *)&connector->dp.panel_replay_caps.dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_X_GRANULARITY)]) ? : 4;
596 	y = connector->dp.panel_replay_caps.dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_Y_GRANULARITY)] ? : 1;
597 
598 exit:
599 	connector->dp.panel_replay_caps.su_w_granularity = w;
600 	connector->dp.panel_replay_caps.su_y_granularity = y;
601 }
602 
603 static void _panel_replay_init_dpcd(struct intel_dp *intel_dp, struct intel_connector *connector)
604 {
605 	struct intel_display *display = to_intel_display(intel_dp);
606 	int ret;
607 
608 	/* TODO: Enable Panel Replay on MST once it's properly implemented. */
609 	if (intel_dp->mst_detect == DRM_DP_MST)
610 		return;
611 
612 	ret = drm_dp_dpcd_read_data(&intel_dp->aux, DP_PANEL_REPLAY_CAP_SUPPORT,
613 				    &connector->dp.panel_replay_caps.dpcd,
614 				    sizeof(connector->dp.panel_replay_caps.dpcd));
615 	if (ret < 0)
616 		return;
617 
618 	if (!(connector->dp.panel_replay_caps.dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] &
619 	      DP_PANEL_REPLAY_SUPPORT))
620 		return;
621 
622 	if (intel_dp_is_edp(intel_dp)) {
623 		if (!intel_alpm_aux_less_wake_supported(intel_dp)) {
624 			drm_dbg_kms(display->drm,
625 				    "Panel doesn't support AUX-less ALPM, eDP Panel Replay not possible\n");
626 			return;
627 		}
628 
629 		if (!(connector->dp.panel_replay_caps.dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] &
630 		      DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT)) {
631 			drm_dbg_kms(display->drm,
632 				    "Panel doesn't support early transport, eDP Panel Replay not possible\n");
633 			return;
634 		}
635 	}
636 
637 	connector->dp.panel_replay_caps.support = true;
638 	intel_dp->psr.sink_panel_replay_support = true;
639 
640 	if (connector->dp.panel_replay_caps.dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] &
641 	    DP_PANEL_REPLAY_SU_SUPPORT) {
642 		connector->dp.panel_replay_caps.su_support = true;
643 
644 		_panel_replay_compute_su_granularity(connector);
645 	}
646 
647 	connector->dp.panel_replay_caps.dsc_support = compute_pr_dsc_support(connector);
648 
649 	drm_dbg_kms(display->drm,
650 		    "Panel replay %sis supported by panel (in DSC mode: %s)\n",
651 		    connector->dp.panel_replay_caps.su_support ?
652 		    "selective_update " : "",
653 		    panel_replay_dsc_support_str(connector->dp.panel_replay_caps.dsc_support));
654 }
655 
656 static void _psr_init_dpcd(struct intel_dp *intel_dp, struct intel_connector *connector)
657 {
658 	struct intel_display *display = to_intel_display(intel_dp);
659 	int ret;
660 
661 	ret = drm_dp_dpcd_read_data(&intel_dp->aux, DP_PSR_SUPPORT, connector->dp.psr_caps.dpcd,
662 				    sizeof(connector->dp.psr_caps.dpcd));
663 	if (ret < 0)
664 		return;
665 
666 	if (!connector->dp.psr_caps.dpcd[0])
667 		return;
668 
669 	drm_dbg_kms(display->drm, "eDP panel supports PSR version %x\n",
670 		    connector->dp.psr_caps.dpcd[0]);
671 
672 	if (drm_dp_has_quirk(&intel_dp->desc, DP_DPCD_QUIRK_NO_PSR)) {
673 		drm_dbg_kms(display->drm,
674 			    "PSR support not currently available for this panel\n");
675 		return;
676 	}
677 
678 	if (!(intel_dp->edp_dpcd[1] & DP_EDP_SET_POWER_CAP)) {
679 		drm_dbg_kms(display->drm,
680 			    "Panel lacks power state control, PSR cannot be enabled\n");
681 		return;
682 	}
683 
684 	connector->dp.psr_caps.support = true;
685 	intel_dp->psr.sink_support = true;
686 
687 	connector->dp.psr_caps.sync_latency = intel_dp_get_sink_sync_latency(intel_dp);
688 
689 	if (DISPLAY_VER(display) >= 9 &&
690 	    connector->dp.psr_caps.dpcd[0] >= DP_PSR2_WITH_Y_COORD_IS_SUPPORTED) {
691 		bool y_req = connector->dp.psr_caps.dpcd[1] &
692 			     DP_PSR2_SU_Y_COORDINATE_REQUIRED;
693 
694 		/*
695 		 * All panels that supports PSR version 03h (PSR2 +
696 		 * Y-coordinate) can handle Y-coordinates in VSC but we are
697 		 * only sure that it is going to be used when required by the
698 		 * panel. This way panel is capable to do selective update
699 		 * without a aux frame sync.
700 		 *
701 		 * To support PSR version 02h and PSR version 03h without
702 		 * Y-coordinate requirement panels we would need to enable
703 		 * GTC first.
704 		 */
705 		connector->dp.psr_caps.su_support = y_req &&
706 			intel_alpm_aux_wake_supported(intel_dp);
707 		drm_dbg_kms(display->drm, "PSR2 %ssupported\n",
708 			    connector->dp.psr_caps.su_support ? "" : "not ");
709 	}
710 
711 	if (connector->dp.psr_caps.su_support)
712 		_psr_compute_su_granularity(intel_dp, connector);
713 }
714 
715 void intel_psr_init_dpcd(struct intel_dp *intel_dp, struct intel_connector *connector)
716 {
717 	_psr_init_dpcd(intel_dp, connector);
718 
719 	_panel_replay_init_dpcd(intel_dp, connector);
720 }
721 
722 static void hsw_psr_setup_aux(struct intel_dp *intel_dp)
723 {
724 	struct intel_display *display = to_intel_display(intel_dp);
725 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
726 	u32 aux_clock_divider, aux_ctl;
727 	/* write DP_SET_POWER=D0 */
728 	static const u8 aux_msg[] = {
729 		[0] = (DP_AUX_NATIVE_WRITE << 4) | ((DP_SET_POWER >> 16) & 0xf),
730 		[1] = (DP_SET_POWER >> 8) & 0xff,
731 		[2] = DP_SET_POWER & 0xff,
732 		[3] = 1 - 1,
733 		[4] = DP_SET_POWER_D0,
734 	};
735 	int i;
736 
737 	BUILD_BUG_ON(sizeof(aux_msg) > 20);
738 	for (i = 0; i < sizeof(aux_msg); i += 4)
739 		intel_de_write(display,
740 			       psr_aux_data_reg(display, cpu_transcoder, i >> 2),
741 			       intel_dp_aux_pack(&aux_msg[i], sizeof(aux_msg) - i));
742 
743 	aux_clock_divider = intel_dp->get_aux_clock_divider(intel_dp, 0);
744 
745 	/* Start with bits set for DDI_AUX_CTL register */
746 	aux_ctl = intel_dp->get_aux_send_ctl(intel_dp, sizeof(aux_msg),
747 					     aux_clock_divider);
748 
749 	/* Select only valid bits for SRD_AUX_CTL */
750 	aux_ctl &= EDP_PSR_AUX_CTL_TIME_OUT_MASK |
751 		EDP_PSR_AUX_CTL_MESSAGE_SIZE_MASK |
752 		EDP_PSR_AUX_CTL_PRECHARGE_2US_MASK |
753 		EDP_PSR_AUX_CTL_BIT_CLOCK_2X_MASK;
754 
755 	intel_de_write(display, psr_aux_ctl_reg(display, cpu_transcoder),
756 		       aux_ctl);
757 }
758 
759 static bool psr2_su_region_et_valid(struct intel_connector *connector, bool panel_replay)
760 {
761 	struct intel_dp *intel_dp = intel_attached_dp(connector);
762 	struct intel_display *display = to_intel_display(intel_dp);
763 
764 	if (DISPLAY_VER(display) < 20 || !intel_dp_is_edp(intel_dp) ||
765 	    intel_dp->psr.debug & I915_PSR_DEBUG_SU_REGION_ET_DISABLE)
766 		return false;
767 
768 	return panel_replay ?
769 		connector->dp.panel_replay_caps.dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] &
770 		DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT :
771 		connector->dp.psr_caps.dpcd[0] == DP_PSR2_WITH_Y_COORD_ET_SUPPORTED;
772 }
773 
774 static void _panel_replay_enable_sink(struct intel_dp *intel_dp,
775 				      const struct intel_crtc_state *crtc_state)
776 {
777 	u8 val = DP_PANEL_REPLAY_ENABLE |
778 		DP_PANEL_REPLAY_VSC_SDP_CRC_EN |
779 		DP_PANEL_REPLAY_UNRECOVERABLE_ERROR_EN |
780 		DP_PANEL_REPLAY_RFB_STORAGE_ERROR_EN |
781 		DP_PANEL_REPLAY_ACTIVE_FRAME_CRC_ERROR_EN;
782 	u8 panel_replay_config2 = DP_PANEL_REPLAY_CRC_VERIFICATION;
783 
784 	if (crtc_state->has_sel_update)
785 		val |= DP_PANEL_REPLAY_SU_ENABLE;
786 
787 	if (crtc_state->enable_psr2_su_region_et)
788 		val |= DP_PANEL_REPLAY_ENABLE_SU_REGION_ET;
789 
790 	if (crtc_state->req_psr2_sdp_prior_scanline)
791 		panel_replay_config2 |=
792 			DP_PANEL_REPLAY_SU_REGION_SCANLINE_CAPTURE;
793 
794 	drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG, val);
795 
796 	drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG2,
797 			   panel_replay_config2);
798 }
799 
800 static void _psr_enable_sink(struct intel_dp *intel_dp,
801 			     const struct intel_crtc_state *crtc_state)
802 {
803 	struct intel_display *display = to_intel_display(intel_dp);
804 	u8 val = 0;
805 
806 	if (crtc_state->has_sel_update) {
807 		val |= DP_PSR_ENABLE_PSR2 | DP_PSR_IRQ_HPD_WITH_CRC_ERRORS;
808 	} else {
809 		if (intel_dp->psr.link_standby)
810 			val |= DP_PSR_MAIN_LINK_ACTIVE;
811 
812 		if (DISPLAY_VER(display) >= 8)
813 			val |= DP_PSR_CRC_VERIFICATION;
814 	}
815 
816 	if (crtc_state->req_psr2_sdp_prior_scanline)
817 		val |= DP_PSR_SU_REGION_SCANLINE_CAPTURE;
818 
819 	if (crtc_state->enable_psr2_su_region_et)
820 		val |= DP_PANEL_REPLAY_ENABLE_SU_REGION_ET;
821 
822 	if (intel_dp->psr.entry_setup_frames > 0)
823 		val |= DP_PSR_FRAME_CAPTURE;
824 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, val);
825 
826 	val |= DP_PSR_ENABLE;
827 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, val);
828 }
829 
830 static void intel_psr_enable_sink(struct intel_dp *intel_dp,
831 				  const struct intel_crtc_state *crtc_state)
832 {
833 	intel_alpm_enable_sink(intel_dp, crtc_state);
834 
835 	crtc_state->has_panel_replay ?
836 		_panel_replay_enable_sink(intel_dp, crtc_state) :
837 		_psr_enable_sink(intel_dp, crtc_state);
838 
839 	if (intel_dp_is_edp(intel_dp))
840 		drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
841 }
842 
843 void intel_psr_panel_replay_enable_sink(struct intel_dp *intel_dp)
844 {
845 	/*
846 	 * NOTE: We might want to trigger mode set when
847 	 * disabling/enabling Panel Replay via debugfs interface to
848 	 * ensure this bit is cleared/set accordingly.
849 	 */
850 	if (CAN_PANEL_REPLAY(intel_dp) && panel_replay_global_enabled(intel_dp))
851 		drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG,
852 				   DP_PANEL_REPLAY_ENABLE);
853 }
854 
855 static u32 intel_psr1_get_tp_time(struct intel_dp *intel_dp)
856 {
857 	struct intel_display *display = to_intel_display(intel_dp);
858 	struct intel_connector *connector = intel_dp->attached_connector;
859 	u32 val = 0;
860 
861 	if (DISPLAY_VER(display) >= 11)
862 		val |= EDP_PSR_TP4_TIME_0us;
863 
864 	if (display->params.psr_safest_params) {
865 		val |= EDP_PSR_TP1_TIME_2500us;
866 		val |= EDP_PSR_TP2_TP3_TIME_2500us;
867 		goto check_tp3_sel;
868 	}
869 
870 	if (connector->panel.vbt.psr.tp1_wakeup_time_us == 0)
871 		val |= EDP_PSR_TP1_TIME_0us;
872 	else if (connector->panel.vbt.psr.tp1_wakeup_time_us <= 100)
873 		val |= EDP_PSR_TP1_TIME_100us;
874 	else if (connector->panel.vbt.psr.tp1_wakeup_time_us <= 500)
875 		val |= EDP_PSR_TP1_TIME_500us;
876 	else
877 		val |= EDP_PSR_TP1_TIME_2500us;
878 
879 	if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us == 0)
880 		val |= EDP_PSR_TP2_TP3_TIME_0us;
881 	else if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us <= 100)
882 		val |= EDP_PSR_TP2_TP3_TIME_100us;
883 	else if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us <= 500)
884 		val |= EDP_PSR_TP2_TP3_TIME_500us;
885 	else
886 		val |= EDP_PSR_TP2_TP3_TIME_2500us;
887 
888 	/*
889 	 * WA 0479: hsw,bdw
890 	 * "Do not skip both TP1 and TP2/TP3"
891 	 */
892 	if (DISPLAY_VER(display) < 9 &&
893 	    connector->panel.vbt.psr.tp1_wakeup_time_us == 0 &&
894 	    connector->panel.vbt.psr.tp2_tp3_wakeup_time_us == 0)
895 		val |= EDP_PSR_TP2_TP3_TIME_100us;
896 
897 check_tp3_sel:
898 	if (intel_dp_source_supports_tps3(display) &&
899 	    drm_dp_tps3_supported(intel_dp->dpcd))
900 		val |= EDP_PSR_TP_TP1_TP3;
901 	else
902 		val |= EDP_PSR_TP_TP1_TP2;
903 
904 	return val;
905 }
906 
907 static u8 psr_compute_idle_frames(struct intel_dp *intel_dp)
908 {
909 	struct intel_display *display = to_intel_display(intel_dp);
910 	struct intel_connector *connector = intel_dp->attached_connector;
911 	int idle_frames;
912 
913 	/* Let's use 6 as the minimum to cover all known cases including the
914 	 * off-by-one issue that HW has in some cases.
915 	 */
916 	idle_frames = max(6, connector->panel.vbt.psr.idle_frames);
917 	idle_frames = max(idle_frames, connector->dp.psr_caps.sync_latency + 1);
918 
919 	if (drm_WARN_ON(display->drm, idle_frames > 0xf))
920 		idle_frames = 0xf;
921 
922 	return idle_frames;
923 }
924 
925 static bool is_dc5_dc6_blocked(struct intel_dp *intel_dp)
926 {
927 	struct intel_display *display = to_intel_display(intel_dp);
928 	u32 current_dc_state = intel_display_power_get_current_dc_state(display);
929 	struct intel_crtc *crtc = intel_crtc_for_pipe(display, intel_dp->psr.pipe);
930 	struct drm_vblank_crtc *vblank = drm_crtc_vblank_crtc(&crtc->base);
931 
932 	return (current_dc_state != DC_STATE_EN_UPTO_DC5 &&
933 		current_dc_state != DC_STATE_EN_UPTO_DC6) ||
934 		intel_dp->psr.active_non_psr_pipes ||
935 		READ_ONCE(vblank->enabled);
936 }
937 
938 static void hsw_activate_psr1(struct intel_dp *intel_dp)
939 {
940 	struct intel_display *display = to_intel_display(intel_dp);
941 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
942 	u32 max_sleep_time = 0x1f;
943 	u32 val = EDP_PSR_ENABLE;
944 
945 	val |= EDP_PSR_IDLE_FRAMES(psr_compute_idle_frames(intel_dp));
946 
947 	if (DISPLAY_VER(display) < 20)
948 		val |= EDP_PSR_MAX_SLEEP_TIME(max_sleep_time);
949 
950 	if (display->platform.haswell)
951 		val |= EDP_PSR_MIN_LINK_ENTRY_TIME_8_LINES;
952 
953 	if (intel_dp->psr.link_standby)
954 		val |= EDP_PSR_LINK_STANDBY;
955 
956 	val |= intel_psr1_get_tp_time(intel_dp);
957 
958 	if (DISPLAY_VER(display) >= 8)
959 		val |= EDP_PSR_CRC_ENABLE;
960 
961 	if (DISPLAY_VER(display) >= 20)
962 		val |= LNL_EDP_PSR_ENTRY_SETUP_FRAMES(intel_dp->psr.entry_setup_frames);
963 
964 	intel_de_rmw(display, psr_ctl_reg(display, cpu_transcoder),
965 		     ~EDP_PSR_RESTORE_PSR_ACTIVE_CTX_MASK, val);
966 
967 	/* Wa_16025596647 */
968 	if ((DISPLAY_VER(display) == 20 ||
969 	     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
970 	    is_dc5_dc6_blocked(intel_dp) && intel_dp->psr.pkg_c_latency_used)
971 		intel_dmc_start_pkgc_exit_at_start_of_undelayed_vblank(display,
972 								       intel_dp->psr.pipe,
973 								       true);
974 }
975 
976 static u32 intel_psr2_get_tp_time(struct intel_dp *intel_dp)
977 {
978 	struct intel_display *display = to_intel_display(intel_dp);
979 	struct intel_connector *connector = intel_dp->attached_connector;
980 	u32 val = 0;
981 
982 	if (display->params.psr_safest_params)
983 		return EDP_PSR2_TP2_TIME_2500us;
984 
985 	if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us >= 0 &&
986 	    connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 50)
987 		val |= EDP_PSR2_TP2_TIME_50us;
988 	else if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 100)
989 		val |= EDP_PSR2_TP2_TIME_100us;
990 	else if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 500)
991 		val |= EDP_PSR2_TP2_TIME_500us;
992 	else
993 		val |= EDP_PSR2_TP2_TIME_2500us;
994 
995 	return val;
996 }
997 
998 static int
999 psr2_block_count_lines(u8 io_wake_lines, u8 fast_wake_lines)
1000 {
1001 	return io_wake_lines < 9 && fast_wake_lines < 9 ? 8 : 12;
1002 }
1003 
1004 static int psr2_block_count(struct intel_dp *intel_dp)
1005 {
1006 	return psr2_block_count_lines(intel_dp->psr.io_wake_lines,
1007 				      intel_dp->psr.fast_wake_lines) / 4;
1008 }
1009 
1010 static u8 frames_before_su_entry(struct intel_dp *intel_dp)
1011 {
1012 	struct intel_connector *connector = intel_dp->attached_connector;
1013 	u8 frames_before_su_entry;
1014 
1015 	frames_before_su_entry = max_t(u8,
1016 				       connector->dp.psr_caps.sync_latency + 1,
1017 				       2);
1018 
1019 	/* Entry setup frames must be at least 1 less than frames before SU entry */
1020 	if (intel_dp->psr.entry_setup_frames >= frames_before_su_entry)
1021 		frames_before_su_entry = intel_dp->psr.entry_setup_frames + 1;
1022 
1023 	return frames_before_su_entry;
1024 }
1025 
1026 static void dg2_activate_panel_replay(struct intel_dp *intel_dp)
1027 {
1028 	struct intel_display *display = to_intel_display(intel_dp);
1029 	struct intel_psr *psr = &intel_dp->psr;
1030 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1031 
1032 	if (intel_dp_is_edp(intel_dp) && psr->sel_update_enabled) {
1033 		u32 val = psr->su_region_et_enabled ?
1034 			LNL_EDP_PSR2_SU_REGION_ET_ENABLE : 0;
1035 
1036 		if (intel_dp->psr.req_psr2_sdp_prior_scanline)
1037 			val |= EDP_PSR2_SU_SDP_SCANLINE;
1038 
1039 		intel_de_write(display, EDP_PSR2_CTL(display, cpu_transcoder),
1040 			       val);
1041 	}
1042 
1043 	intel_de_rmw(display,
1044 		     PSR2_MAN_TRK_CTL(display, intel_dp->psr.transcoder),
1045 		     0, ADLP_PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME);
1046 
1047 	intel_de_rmw(display, TRANS_DP2_CTL(intel_dp->psr.transcoder), 0,
1048 		     TRANS_DP2_PANEL_REPLAY_ENABLE);
1049 }
1050 
1051 static void hsw_activate_psr2(struct intel_dp *intel_dp)
1052 {
1053 	struct intel_display *display = to_intel_display(intel_dp);
1054 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1055 	u32 val = EDP_PSR2_ENABLE;
1056 	u32 psr_val = 0;
1057 	u8 idle_frames;
1058 
1059 	/* Wa_16025596647 */
1060 	if ((DISPLAY_VER(display) == 20 ||
1061 	     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
1062 	    is_dc5_dc6_blocked(intel_dp) && intel_dp->psr.pkg_c_latency_used)
1063 		idle_frames = 0;
1064 	else
1065 		idle_frames = psr_compute_idle_frames(intel_dp);
1066 	val |= EDP_PSR2_IDLE_FRAMES(idle_frames);
1067 
1068 	if (DISPLAY_VER(display) < 14 && !display->platform.alderlake_p)
1069 		val |= EDP_SU_TRACK_ENABLE;
1070 
1071 	if (DISPLAY_VER(display) >= 10 && DISPLAY_VER(display) < 13)
1072 		val |= EDP_Y_COORDINATE_ENABLE;
1073 
1074 	val |= EDP_PSR2_FRAME_BEFORE_SU(frames_before_su_entry(intel_dp));
1075 
1076 	val |= intel_psr2_get_tp_time(intel_dp);
1077 
1078 	if (DISPLAY_VER(display) >= 12 && DISPLAY_VER(display) < 20) {
1079 		if (psr2_block_count(intel_dp) > 2)
1080 			val |= TGL_EDP_PSR2_BLOCK_COUNT_NUM_3;
1081 		else
1082 			val |= TGL_EDP_PSR2_BLOCK_COUNT_NUM_2;
1083 	}
1084 
1085 	/* Wa_22012278275:adl-p */
1086 	if (display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_E0)) {
1087 		static const u8 map[] = {
1088 			2, /* 5 lines */
1089 			1, /* 6 lines */
1090 			0, /* 7 lines */
1091 			3, /* 8 lines */
1092 			6, /* 9 lines */
1093 			5, /* 10 lines */
1094 			4, /* 11 lines */
1095 			7, /* 12 lines */
1096 		};
1097 		/*
1098 		 * Still using the default IO_BUFFER_WAKE and FAST_WAKE, see
1099 		 * comments below for more information
1100 		 */
1101 		int tmp;
1102 
1103 		tmp = map[intel_dp->psr.io_wake_lines -
1104 			  TGL_EDP_PSR2_IO_BUFFER_WAKE_MIN_LINES];
1105 		val |= TGL_EDP_PSR2_IO_BUFFER_WAKE(tmp + TGL_EDP_PSR2_IO_BUFFER_WAKE_MIN_LINES);
1106 
1107 		tmp = map[intel_dp->psr.fast_wake_lines - TGL_EDP_PSR2_FAST_WAKE_MIN_LINES];
1108 		val |= TGL_EDP_PSR2_FAST_WAKE(tmp + TGL_EDP_PSR2_FAST_WAKE_MIN_LINES);
1109 	} else if (DISPLAY_VER(display) >= 20) {
1110 		val |= LNL_EDP_PSR2_IO_BUFFER_WAKE(intel_dp->psr.io_wake_lines);
1111 	} else if (DISPLAY_VER(display) >= 12) {
1112 		val |= TGL_EDP_PSR2_IO_BUFFER_WAKE(intel_dp->psr.io_wake_lines);
1113 		val |= TGL_EDP_PSR2_FAST_WAKE(intel_dp->psr.fast_wake_lines);
1114 	} else if (DISPLAY_VER(display) >= 9) {
1115 		val |= EDP_PSR2_IO_BUFFER_WAKE(intel_dp->psr.io_wake_lines);
1116 		val |= EDP_PSR2_FAST_WAKE(intel_dp->psr.fast_wake_lines);
1117 	}
1118 
1119 	if (intel_dp->psr.req_psr2_sdp_prior_scanline)
1120 		val |= EDP_PSR2_SU_SDP_SCANLINE;
1121 
1122 	if (DISPLAY_VER(display) >= 20)
1123 		psr_val |= LNL_EDP_PSR_ENTRY_SETUP_FRAMES(intel_dp->psr.entry_setup_frames);
1124 
1125 	if (intel_dp->psr.psr2_sel_fetch_enabled) {
1126 		u32 tmp;
1127 
1128 		tmp = intel_de_read(display,
1129 				    PSR2_MAN_TRK_CTL(display, cpu_transcoder));
1130 		drm_WARN_ON(display->drm, !(tmp & PSR2_MAN_TRK_CTL_ENABLE));
1131 	} else if (HAS_PSR2_SEL_FETCH(display)) {
1132 		intel_de_write(display,
1133 			       PSR2_MAN_TRK_CTL(display, cpu_transcoder), 0);
1134 	}
1135 
1136 	if (intel_dp->psr.su_region_et_enabled)
1137 		val |= LNL_EDP_PSR2_SU_REGION_ET_ENABLE;
1138 
1139 	/*
1140 	 * PSR2 HW is incorrectly using EDP_PSR_TP1_TP3_SEL and BSpec is
1141 	 * recommending keep this bit unset while PSR2 is enabled.
1142 	 */
1143 	intel_de_write(display, psr_ctl_reg(display, cpu_transcoder), psr_val);
1144 
1145 	intel_de_write(display, EDP_PSR2_CTL(display, cpu_transcoder), val);
1146 }
1147 
1148 static bool
1149 transcoder_has_psr2(struct intel_display *display, enum transcoder cpu_transcoder)
1150 {
1151 	if (display->platform.alderlake_p || DISPLAY_VER(display) >= 14)
1152 		return cpu_transcoder == TRANSCODER_A || cpu_transcoder == TRANSCODER_B;
1153 	else if (DISPLAY_VER(display) >= 12)
1154 		return cpu_transcoder == TRANSCODER_A;
1155 	else if (DISPLAY_VER(display) >= 9)
1156 		return cpu_transcoder == TRANSCODER_EDP;
1157 	else
1158 		return false;
1159 }
1160 
1161 static u32 intel_get_frame_time_us(const struct intel_crtc_state *crtc_state)
1162 {
1163 	if (!crtc_state->hw.active)
1164 		return 0;
1165 
1166 	return DIV_ROUND_UP(1000 * 1000,
1167 			    drm_mode_vrefresh(&crtc_state->hw.adjusted_mode));
1168 }
1169 
1170 static void psr2_program_idle_frames(struct intel_dp *intel_dp,
1171 				     u32 idle_frames)
1172 {
1173 	struct intel_display *display = to_intel_display(intel_dp);
1174 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1175 
1176 	intel_de_rmw(display, EDP_PSR2_CTL(display, cpu_transcoder),
1177 		     EDP_PSR2_IDLE_FRAMES_MASK,
1178 		     EDP_PSR2_IDLE_FRAMES(idle_frames));
1179 }
1180 
1181 static void tgl_psr2_enable_dc3co(struct intel_dp *intel_dp)
1182 {
1183 	struct intel_display *display = to_intel_display(intel_dp);
1184 
1185 	psr2_program_idle_frames(intel_dp, 0);
1186 	intel_display_power_set_target_dc_state(display, DC_STATE_EN_DC3CO);
1187 }
1188 
1189 static void tgl_psr2_disable_dc3co(struct intel_dp *intel_dp)
1190 {
1191 	struct intel_display *display = to_intel_display(intel_dp);
1192 
1193 	intel_display_power_set_target_dc_state(display, DC_STATE_EN_UPTO_DC6);
1194 	psr2_program_idle_frames(intel_dp, psr_compute_idle_frames(intel_dp));
1195 }
1196 
1197 static void tgl_dc3co_disable_work(struct work_struct *work)
1198 {
1199 	struct intel_dp *intel_dp =
1200 		container_of(work, typeof(*intel_dp), psr.dc3co_work.work);
1201 
1202 	mutex_lock(&intel_dp->psr.lock);
1203 	/* If delayed work is pending, it is not idle */
1204 	if (delayed_work_pending(&intel_dp->psr.dc3co_work))
1205 		goto unlock;
1206 
1207 	tgl_psr2_disable_dc3co(intel_dp);
1208 unlock:
1209 	mutex_unlock(&intel_dp->psr.lock);
1210 }
1211 
1212 static void tgl_disallow_dc3co_on_psr2_exit(struct intel_dp *intel_dp)
1213 {
1214 	if (!intel_dp->psr.dc3co_exitline)
1215 		return;
1216 
1217 	cancel_delayed_work(&intel_dp->psr.dc3co_work);
1218 	/* Before PSR2 exit disallow dc3co*/
1219 	tgl_psr2_disable_dc3co(intel_dp);
1220 }
1221 
1222 static bool
1223 dc3co_is_pipe_port_compatible(struct intel_dp *intel_dp,
1224 			      struct intel_crtc_state *crtc_state)
1225 {
1226 	struct intel_display *display = to_intel_display(intel_dp);
1227 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1228 	enum pipe pipe = to_intel_crtc(crtc_state->uapi.crtc)->pipe;
1229 	enum port port = dig_port->base.port;
1230 
1231 	if (display->platform.alderlake_p || DISPLAY_VER(display) >= 14)
1232 		return pipe <= PIPE_B && port <= PORT_B;
1233 	else
1234 		return pipe == PIPE_A && port == PORT_A;
1235 }
1236 
1237 static void
1238 tgl_dc3co_exitline_compute_config(struct intel_dp *intel_dp,
1239 				  struct intel_crtc_state *crtc_state)
1240 {
1241 	struct intel_display *display = to_intel_display(intel_dp);
1242 	const u32 crtc_vdisplay = crtc_state->uapi.adjusted_mode.crtc_vdisplay;
1243 	struct i915_power_domains *power_domains = &display->power.domains;
1244 	u32 exit_scanlines;
1245 
1246 	/*
1247 	 * FIXME: Due to the changed sequence of activating/deactivating DC3CO,
1248 	 * disable DC3CO until the changed dc3co activating/deactivating sequence
1249 	 * is applied. B.Specs:49196
1250 	 */
1251 	return;
1252 
1253 	/*
1254 	 * DMC's DC3CO exit mechanism has an issue with Selective Fecth
1255 	 * TODO: when the issue is addressed, this restriction should be removed.
1256 	 */
1257 	if (crtc_state->enable_psr2_sel_fetch)
1258 		return;
1259 
1260 	if (!(power_domains->allowed_dc_mask & DC_STATE_EN_DC3CO))
1261 		return;
1262 
1263 	if (!dc3co_is_pipe_port_compatible(intel_dp, crtc_state))
1264 		return;
1265 
1266 	/* Wa_16011303918:adl-p */
1267 	if (display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0))
1268 		return;
1269 
1270 	/*
1271 	 * DC3CO Exit time 200us B.Spec 49196
1272 	 * PSR2 transcoder Early Exit scanlines = ROUNDUP(200 / line time) + 1
1273 	 */
1274 	exit_scanlines =
1275 		intel_usecs_to_scanlines(&crtc_state->uapi.adjusted_mode, 200) + 1;
1276 
1277 	if (drm_WARN_ON(display->drm, exit_scanlines > crtc_vdisplay))
1278 		return;
1279 
1280 	crtc_state->dc3co_exitline = crtc_vdisplay - exit_scanlines;
1281 }
1282 
1283 static bool intel_psr2_sel_fetch_config_valid(struct intel_dp *intel_dp,
1284 					      struct intel_crtc_state *crtc_state)
1285 {
1286 	struct intel_display *display = to_intel_display(intel_dp);
1287 
1288 	if (!display->params.enable_psr2_sel_fetch &&
1289 	    intel_dp->psr.debug != I915_PSR_DEBUG_ENABLE_SEL_FETCH) {
1290 		drm_dbg_kms(display->drm,
1291 			    "PSR2 sel fetch not enabled, disabled by parameter\n");
1292 		return false;
1293 	}
1294 
1295 	return crtc_state->enable_psr2_sel_fetch = true;
1296 }
1297 
1298 static bool psr2_granularity_check(struct intel_crtc_state *crtc_state,
1299 				   struct intel_connector *connector)
1300 {
1301 	struct intel_dp *intel_dp = intel_attached_dp(connector);
1302 	struct intel_display *display = to_intel_display(intel_dp);
1303 	const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
1304 	const int crtc_hdisplay = crtc_state->hw.adjusted_mode.crtc_hdisplay;
1305 	const int crtc_vdisplay = crtc_state->hw.adjusted_mode.crtc_vdisplay;
1306 	u16 y_granularity = 0;
1307 	u16 sink_y_granularity = crtc_state->has_panel_replay ?
1308 		connector->dp.panel_replay_caps.su_y_granularity :
1309 		connector->dp.psr_caps.su_y_granularity;
1310 	u16 sink_w_granularity =  crtc_state->has_panel_replay ?
1311 		connector->dp.panel_replay_caps.su_w_granularity :
1312 		connector->dp.psr_caps.su_w_granularity;
1313 
1314 	/* PSR2 HW only send full lines so we only need to validate the width */
1315 	if (crtc_hdisplay % sink_w_granularity)
1316 		return false;
1317 
1318 	if (crtc_vdisplay % sink_y_granularity)
1319 		return false;
1320 
1321 	/* HW tracking is only aligned to 4 lines */
1322 	if (!crtc_state->enable_psr2_sel_fetch)
1323 		return sink_y_granularity == 4;
1324 
1325 	/*
1326 	 * adl_p and mtl platforms have 1 line granularity.
1327 	 * For other platforms with SW tracking we can adjust the y coordinates
1328 	 * to match sink requirement if multiple of 4.
1329 	 */
1330 	if (display->platform.alderlake_p || DISPLAY_VER(display) >= 14)
1331 		y_granularity = sink_y_granularity;
1332 	else if (sink_y_granularity <= 2)
1333 		y_granularity = 4;
1334 	else if ((sink_y_granularity % 4) == 0)
1335 		y_granularity = sink_y_granularity;
1336 
1337 	if (y_granularity == 0 || crtc_vdisplay % y_granularity)
1338 		return false;
1339 
1340 	if (crtc_state->dsc.compression_enable &&
1341 	    vdsc_cfg->slice_height % y_granularity)
1342 		return false;
1343 
1344 	crtc_state->su_y_granularity = y_granularity;
1345 	return true;
1346 }
1347 
1348 static bool _compute_psr2_sdp_prior_scanline_indication(struct intel_dp *intel_dp,
1349 							struct intel_crtc_state *crtc_state)
1350 {
1351 	struct intel_display *display = to_intel_display(intel_dp);
1352 	const struct drm_display_mode *adjusted_mode = &crtc_state->uapi.adjusted_mode;
1353 	u32 hblank_total, hblank_ns, req_ns;
1354 
1355 	hblank_total = adjusted_mode->crtc_hblank_end - adjusted_mode->crtc_hblank_start;
1356 	hblank_ns = div_u64(1000000ULL * hblank_total, adjusted_mode->crtc_clock);
1357 
1358 	/* From spec: ((60 / number of lanes) + 11) * 1000 / symbol clock frequency MHz */
1359 	req_ns = ((60 / crtc_state->lane_count) + 11) * 1000 / (crtc_state->port_clock / 1000);
1360 
1361 	if ((hblank_ns - req_ns) > 100)
1362 		return true;
1363 
1364 	/* Not supported <13 / Wa_22012279113:adl-p */
1365 	if (DISPLAY_VER(display) < 14 || intel_dp->edp_dpcd[0] < DP_EDP_14b)
1366 		return false;
1367 
1368 	crtc_state->req_psr2_sdp_prior_scanline = true;
1369 	return true;
1370 }
1371 
1372 static int intel_psr_entry_setup_frames(struct intel_dp *intel_dp,
1373 					struct drm_connector_state *conn_state,
1374 					const struct drm_display_mode *adjusted_mode)
1375 {
1376 	struct intel_display *display = to_intel_display(intel_dp);
1377 	struct intel_connector *connector = to_intel_connector(conn_state->connector);
1378 	int psr_setup_time = drm_dp_psr_setup_time(connector->dp.psr_caps.dpcd);
1379 	int entry_setup_frames = 0;
1380 
1381 	if (psr_setup_time < 0) {
1382 		drm_dbg_kms(display->drm,
1383 			    "PSR condition failed: Invalid PSR setup time (0x%02x)\n",
1384 			    connector->dp.psr_caps.dpcd[1]);
1385 		return -ETIME;
1386 	}
1387 
1388 	if (intel_usecs_to_scanlines(adjusted_mode, psr_setup_time) >
1389 	    adjusted_mode->crtc_vtotal - adjusted_mode->crtc_vdisplay - 1) {
1390 		if (DISPLAY_VER(display) >= 20) {
1391 			/* setup entry frames can be up to 3 frames */
1392 			entry_setup_frames = 1;
1393 			drm_dbg_kms(display->drm,
1394 				    "PSR setup entry frames %d\n",
1395 				    entry_setup_frames);
1396 		} else {
1397 			drm_dbg_kms(display->drm,
1398 				    "PSR condition failed: PSR setup time (%d us) too long\n",
1399 				    psr_setup_time);
1400 			return -ETIME;
1401 		}
1402 	}
1403 
1404 	return entry_setup_frames;
1405 }
1406 
1407 static
1408 int _intel_psr_min_set_context_latency(const struct intel_crtc_state *crtc_state,
1409 				       bool needs_panel_replay,
1410 				       bool needs_sel_update)
1411 {
1412 	struct intel_display *display = to_intel_display(crtc_state);
1413 
1414 	if (!crtc_state->has_psr)
1415 		return 0;
1416 
1417 	/* Wa_14015401596 */
1418 	if (intel_vrr_possible(crtc_state) && IS_DISPLAY_VER(display, 13, 14))
1419 		return 1;
1420 
1421 	/* Rest is for SRD_STATUS needed on LunarLake and onwards */
1422 	if (DISPLAY_VER(display) < 20)
1423 		return 0;
1424 
1425 	/*
1426 	 * Comment on SRD_STATUS register in Bspec for LunarLake and onwards:
1427 	 *
1428 	 * To deterministically capture the transition of the state machine
1429 	 * going from SRDOFFACK to IDLE, the delayed V. Blank should be at least
1430 	 * one line after the non-delayed V. Blank.
1431 	 *
1432 	 * Legacy TG: TRANS_SET_CONTEXT_LATENCY > 0
1433 	 * VRR TG: TRANS_VRR_CTL[ VRR Guardband ] < (TRANS_VRR_VMAX[ VRR Vmax ]
1434 	 * - TRANS_VTOTAL[ Vertical Active ])
1435 	 *
1436 	 * SRD_STATUS is used only by PSR1 on PantherLake.
1437 	 * SRD_STATUS is used by PSR1 and Panel Replay DP on LunarLake.
1438 	 */
1439 
1440 	if (DISPLAY_VER(display) >= 30 && (needs_panel_replay ||
1441 					   needs_sel_update))
1442 		return 0;
1443 	else if (DISPLAY_VER(display) < 30 && (needs_sel_update ||
1444 					       intel_crtc_has_type(crtc_state,
1445 								   INTEL_OUTPUT_EDP)))
1446 		return 0;
1447 	else
1448 		return 1;
1449 }
1450 
1451 static bool _wake_lines_fit_into_vblank(const struct intel_crtc_state *crtc_state,
1452 					int vblank,
1453 					int wake_lines)
1454 {
1455 	if (crtc_state->req_psr2_sdp_prior_scanline)
1456 		vblank -= 1;
1457 
1458 	/* Vblank >= PSR2_CTL Block Count Number maximum line count */
1459 	if (vblank < wake_lines)
1460 		return false;
1461 
1462 	return true;
1463 }
1464 
1465 static bool wake_lines_fit_into_vblank(struct intel_dp *intel_dp,
1466 				       const struct intel_crtc_state *crtc_state,
1467 				       bool aux_less,
1468 				       bool needs_panel_replay,
1469 				       bool needs_sel_update)
1470 {
1471 	struct intel_display *display = to_intel_display(intel_dp);
1472 	int vblank = crtc_state->hw.adjusted_mode.crtc_vblank_end -
1473 		crtc_state->hw.adjusted_mode.crtc_vblank_start;
1474 	int wake_lines;
1475 	int scl = _intel_psr_min_set_context_latency(crtc_state,
1476 						     needs_panel_replay,
1477 						     needs_sel_update);
1478 	vblank -= scl;
1479 
1480 	if (aux_less)
1481 		wake_lines = crtc_state->alpm_state.aux_less_wake_lines;
1482 	else
1483 		wake_lines = DISPLAY_VER(display) < 20 ?
1484 			psr2_block_count_lines(crtc_state->alpm_state.io_wake_lines,
1485 					       crtc_state->alpm_state.fast_wake_lines) :
1486 			crtc_state->alpm_state.io_wake_lines;
1487 
1488 	/*
1489 	 * Guardband has not been computed yet, so we conservatively check if the
1490 	 * full vblank duration is sufficient to accommodate wake line requirements
1491 	 * for PSR features like Panel Replay and Selective Update.
1492 	 *
1493 	 * Once the actual guardband is available, a more accurate validation is
1494 	 * performed in intel_psr_compute_config_late(), and PSR features are
1495 	 * disabled if wake lines exceed the available guardband.
1496 	 */
1497 	return _wake_lines_fit_into_vblank(crtc_state, vblank, wake_lines);
1498 }
1499 
1500 static bool alpm_config_valid(struct intel_dp *intel_dp,
1501 			      struct intel_crtc_state *crtc_state,
1502 			      bool aux_less,
1503 			      bool needs_panel_replay,
1504 			      bool needs_sel_update)
1505 {
1506 	struct intel_display *display = to_intel_display(intel_dp);
1507 
1508 	if (!intel_alpm_compute_params(intel_dp, crtc_state)) {
1509 		drm_dbg_kms(display->drm,
1510 			    "PSR2/Panel Replay  not enabled, Unable to use long enough wake times\n");
1511 		return false;
1512 	}
1513 
1514 	if (!wake_lines_fit_into_vblank(intel_dp, crtc_state, aux_less,
1515 					needs_panel_replay, needs_sel_update)) {
1516 		drm_dbg_kms(display->drm,
1517 			    "PSR2/Panel Replay not enabled, too short vblank time\n");
1518 		return false;
1519 	}
1520 
1521 	return true;
1522 }
1523 
1524 static bool intel_psr2_config_valid(struct intel_dp *intel_dp,
1525 				    struct intel_crtc_state *crtc_state,
1526 				    struct drm_connector_state *conn_state)
1527 {
1528 	struct intel_display *display = to_intel_display(intel_dp);
1529 	struct intel_connector *connector = to_intel_connector(conn_state->connector);
1530 	int crtc_hdisplay = crtc_state->hw.adjusted_mode.crtc_hdisplay;
1531 	int crtc_vdisplay = crtc_state->hw.adjusted_mode.crtc_vdisplay;
1532 	int psr_max_h = 0, psr_max_v = 0, max_bpp = 0;
1533 
1534 	if (!connector->dp.psr_caps.su_support || display->params.enable_psr == 1)
1535 		return false;
1536 
1537 	/* JSL and EHL only supports eDP 1.3 */
1538 	if (display->platform.jasperlake || display->platform.elkhartlake) {
1539 		drm_dbg_kms(display->drm, "PSR2 not supported by phy\n");
1540 		return false;
1541 	}
1542 
1543 	/* Wa_16011181250 */
1544 	if (display->platform.rocketlake || display->platform.alderlake_s ||
1545 	    display->platform.dg2) {
1546 		drm_dbg_kms(display->drm,
1547 			    "PSR2 is defeatured for this platform\n");
1548 		return false;
1549 	}
1550 
1551 	if (display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) {
1552 		drm_dbg_kms(display->drm,
1553 			    "PSR2 not completely functional in this stepping\n");
1554 		return false;
1555 	}
1556 
1557 	if (!transcoder_has_psr2(display, crtc_state->cpu_transcoder)) {
1558 		drm_dbg_kms(display->drm,
1559 			    "PSR2 not supported in transcoder %s\n",
1560 			    transcoder_name(crtc_state->cpu_transcoder));
1561 		return false;
1562 	}
1563 
1564 	/*
1565 	 * DSC and PSR2 cannot be enabled simultaneously. If a requested
1566 	 * resolution requires DSC to be enabled, priority is given to DSC
1567 	 * over PSR2.
1568 	 */
1569 	if (crtc_state->dsc.compression_enable &&
1570 	    (DISPLAY_VER(display) < 14 && !display->platform.alderlake_p)) {
1571 		drm_dbg_kms(display->drm,
1572 			    "PSR2 cannot be enabled since DSC is enabled\n");
1573 		return false;
1574 	}
1575 
1576 	if (DISPLAY_VER(display) >= 20) {
1577 		psr_max_h = crtc_hdisplay;
1578 		psr_max_v = crtc_vdisplay;
1579 		max_bpp = crtc_state->pipe_bpp;
1580 	} else if (IS_DISPLAY_VER(display, 12, 14)) {
1581 		psr_max_h = 5120;
1582 		psr_max_v = 3200;
1583 		max_bpp = 30;
1584 	} else if (IS_DISPLAY_VER(display, 10, 11)) {
1585 		psr_max_h = 4096;
1586 		psr_max_v = 2304;
1587 		max_bpp = 24;
1588 	} else if (DISPLAY_VER(display) == 9) {
1589 		psr_max_h = 3640;
1590 		psr_max_v = 2304;
1591 		max_bpp = 24;
1592 	}
1593 
1594 	if (crtc_state->pipe_bpp > max_bpp) {
1595 		drm_dbg_kms(display->drm,
1596 			    "PSR2 not enabled, pipe bpp %d > max supported %d\n",
1597 			    crtc_state->pipe_bpp, max_bpp);
1598 		return false;
1599 	}
1600 
1601 	/* Wa_16011303918:adl-p */
1602 	if (crtc_state->vrr.enable &&
1603 	    display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) {
1604 		drm_dbg_kms(display->drm,
1605 			    "PSR2 not enabled, not compatible with HW stepping + VRR\n");
1606 		return false;
1607 	}
1608 
1609 	if (!alpm_config_valid(intel_dp, crtc_state, false, false, true))
1610 		return false;
1611 
1612 	if (!crtc_state->enable_psr2_sel_fetch &&
1613 	    (crtc_hdisplay > psr_max_h || crtc_vdisplay > psr_max_v)) {
1614 		drm_dbg_kms(display->drm,
1615 			    "PSR2 not enabled, resolution %dx%d > max supported %dx%d\n",
1616 			    crtc_hdisplay, crtc_vdisplay,
1617 			    psr_max_h, psr_max_v);
1618 		return false;
1619 	}
1620 
1621 	tgl_dc3co_exitline_compute_config(intel_dp, crtc_state);
1622 
1623 	return true;
1624 }
1625 
1626 static bool intel_sel_update_config_valid(struct intel_crtc_state *crtc_state,
1627 					  struct drm_connector_state *conn_state)
1628 {
1629 	struct intel_connector *connector = to_intel_connector(conn_state->connector);
1630 	struct intel_dp *intel_dp = intel_attached_dp(connector);
1631 	struct intel_display *display = to_intel_display(intel_dp);
1632 
1633 	if (HAS_PSR2_SEL_FETCH(display) &&
1634 	    !intel_psr2_sel_fetch_config_valid(intel_dp, crtc_state) &&
1635 	    !HAS_PSR_HW_TRACKING(display)) {
1636 		drm_dbg_kms(display->drm,
1637 			    "Selective update not enabled, selective fetch not valid and no HW tracking available\n");
1638 		goto unsupported;
1639 	}
1640 
1641 	if (!sel_update_global_enabled(intel_dp)) {
1642 		drm_dbg_kms(display->drm,
1643 			    "Selective update disabled by flag\n");
1644 		goto unsupported;
1645 	}
1646 
1647 	if (!crtc_state->has_panel_replay && !intel_psr2_config_valid(intel_dp, crtc_state,
1648 								      conn_state))
1649 		goto unsupported;
1650 
1651 	if (!_compute_psr2_sdp_prior_scanline_indication(intel_dp, crtc_state)) {
1652 		drm_dbg_kms(display->drm,
1653 			    "Selective update not enabled, SDP indication do not fit in hblank\n");
1654 		goto unsupported;
1655 	}
1656 
1657 	if (crtc_state->has_panel_replay) {
1658 		if (DISPLAY_VER(display) < 14)
1659 			goto unsupported;
1660 
1661 		if (!connector->dp.panel_replay_caps.su_support)
1662 			goto unsupported;
1663 
1664 		if (intel_dsc_enabled_on_link(crtc_state) &&
1665 		    connector->dp.panel_replay_caps.dsc_support !=
1666 		    INTEL_DP_PANEL_REPLAY_DSC_SELECTIVE_UPDATE) {
1667 			drm_dbg_kms(display->drm,
1668 				    "Selective update with Panel Replay not enabled because it's not supported with DSC\n");
1669 			goto unsupported;
1670 		}
1671 	}
1672 
1673 	if (crtc_state->crc_enabled) {
1674 		drm_dbg_kms(display->drm,
1675 			    "Selective update not enabled because it would inhibit pipe CRC calculation\n");
1676 		goto unsupported;
1677 	}
1678 
1679 	if (!psr2_granularity_check(crtc_state, connector)) {
1680 		drm_dbg_kms(display->drm,
1681 			    "Selective update not enabled, SU granularity not compatible\n");
1682 		goto unsupported;
1683 	}
1684 
1685 	crtc_state->enable_psr2_su_region_et = psr2_su_region_et_valid(connector,
1686 								       crtc_state->has_panel_replay);
1687 
1688 	return true;
1689 
1690 unsupported:
1691 	crtc_state->enable_psr2_sel_fetch = false;
1692 	return false;
1693 }
1694 
1695 static bool _psr_compute_config(struct intel_dp *intel_dp,
1696 				struct intel_crtc_state *crtc_state,
1697 				struct drm_connector_state *conn_state)
1698 {
1699 	struct intel_display *display = to_intel_display(intel_dp);
1700 	const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
1701 	int entry_setup_frames;
1702 
1703 	if (!CAN_PSR(intel_dp) || !display->params.enable_psr)
1704 		return false;
1705 
1706 	/*
1707 	 * Currently PSR doesn't work reliably with VRR enabled.
1708 	 */
1709 	if (crtc_state->vrr.enable)
1710 		return false;
1711 
1712 	entry_setup_frames = intel_psr_entry_setup_frames(intel_dp, conn_state, adjusted_mode);
1713 
1714 	if (entry_setup_frames >= 0) {
1715 		intel_dp->psr.entry_setup_frames = entry_setup_frames;
1716 	} else {
1717 		crtc_state->no_psr_reason = "PSR setup timing not met";
1718 		drm_dbg_kms(display->drm,
1719 			    "PSR condition failed: PSR setup timing not met\n");
1720 		return false;
1721 	}
1722 
1723 	return true;
1724 }
1725 
1726 static inline bool compute_link_off_after_as_sdp_when_pr_active(struct intel_connector *connector)
1727 {
1728 	return (connector->dp.panel_replay_caps.dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_CAPABILITY)] &
1729 		DP_PANEL_REPLAY_LINK_OFF_SUPPORTED_IN_PR_AFTER_ADAPTIVE_SYNC_SDP);
1730 }
1731 
1732 static inline bool compute_disable_as_sdp_when_pr_active(struct intel_connector *connector)
1733 {
1734 	return !(connector->dp.panel_replay_caps.dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_CAPABILITY)] &
1735 		 DP_PANEL_REPLAY_ASYNC_VIDEO_TIMING_NOT_SUPPORTED_IN_PR);
1736 }
1737 
1738 static bool _panel_replay_compute_config(struct intel_crtc_state *crtc_state,
1739 					 const struct drm_connector_state *conn_state)
1740 {
1741 	struct intel_connector *connector =
1742 		to_intel_connector(conn_state->connector);
1743 	struct intel_dp *intel_dp = intel_attached_dp(connector);
1744 	struct intel_display *display = to_intel_display(intel_dp);
1745 	struct intel_hdcp *hdcp = &connector->hdcp;
1746 
1747 	if (!CAN_PANEL_REPLAY(intel_dp))
1748 		return false;
1749 
1750 	if (!connector->dp.panel_replay_caps.support)
1751 		return false;
1752 
1753 	if (!panel_replay_global_enabled(intel_dp)) {
1754 		drm_dbg_kms(display->drm, "Panel Replay disabled by flag\n");
1755 		return false;
1756 	}
1757 
1758 	if (crtc_state->crc_enabled) {
1759 		drm_dbg_kms(display->drm,
1760 			    "Panel Replay not enabled because it would inhibit pipe CRC calculation\n");
1761 		return false;
1762 	}
1763 
1764 	if (intel_dsc_enabled_on_link(crtc_state) &&
1765 	    connector->dp.panel_replay_caps.dsc_support ==
1766 	    INTEL_DP_PANEL_REPLAY_DSC_NOT_SUPPORTED) {
1767 		drm_dbg_kms(display->drm,
1768 			    "Panel Replay not enabled because it's not supported with DSC\n");
1769 		return false;
1770 	}
1771 
1772 	crtc_state->link_off_after_as_sdp_when_pr_active = compute_link_off_after_as_sdp_when_pr_active(connector);
1773 	crtc_state->disable_as_sdp_when_pr_active = compute_disable_as_sdp_when_pr_active(connector);
1774 
1775 	if (!intel_dp_is_edp(intel_dp))
1776 		return true;
1777 
1778 	/* Remaining checks are for eDP only */
1779 
1780 	if (to_intel_crtc(crtc_state->uapi.crtc)->pipe != PIPE_A &&
1781 	    to_intel_crtc(crtc_state->uapi.crtc)->pipe != PIPE_B)
1782 		return false;
1783 
1784 	/* 128b/132b Panel Replay is not supported on eDP */
1785 	if (intel_dp_is_uhbr(crtc_state)) {
1786 		drm_dbg_kms(display->drm,
1787 			    "Panel Replay is not supported with 128b/132b\n");
1788 		return false;
1789 	}
1790 
1791 	/* HW will not allow Panel Replay on eDP when HDCP enabled */
1792 	if (conn_state->content_protection ==
1793 	    DRM_MODE_CONTENT_PROTECTION_DESIRED ||
1794 	    (conn_state->content_protection ==
1795 	     DRM_MODE_CONTENT_PROTECTION_ENABLED && hdcp->value ==
1796 	     DRM_MODE_CONTENT_PROTECTION_UNDESIRED)) {
1797 		drm_dbg_kms(display->drm,
1798 			    "Panel Replay is not supported with HDCP\n");
1799 		return false;
1800 	}
1801 
1802 	if (!alpm_config_valid(intel_dp, crtc_state, true, true, false))
1803 		return false;
1804 
1805 	return true;
1806 }
1807 
1808 static bool intel_psr_needs_wa_18037818876(struct intel_dp *intel_dp,
1809 					   struct intel_crtc_state *crtc_state)
1810 {
1811 	struct intel_display *display = to_intel_display(intel_dp);
1812 
1813 	return (DISPLAY_VER(display) == 20 && intel_dp->psr.entry_setup_frames > 0 &&
1814 		!crtc_state->has_sel_update);
1815 }
1816 
1817 static
1818 void intel_psr_set_non_psr_pipes(struct intel_dp *intel_dp,
1819 				 struct intel_crtc_state *crtc_state)
1820 {
1821 	struct intel_display *display = to_intel_display(intel_dp);
1822 	struct intel_atomic_state *state = to_intel_atomic_state(crtc_state->uapi.state);
1823 	struct intel_crtc *crtc;
1824 	u8 active_pipes = 0;
1825 
1826 	/* Wa_16025596647 */
1827 	if (DISPLAY_VER(display) != 20 &&
1828 	    !IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0))
1829 		return;
1830 
1831 	/* Not needed by Panel Replay  */
1832 	if (crtc_state->has_panel_replay)
1833 		return;
1834 
1835 	/* We ignore possible secondary PSR/Panel Replay capable eDP */
1836 	for_each_intel_crtc(display->drm, crtc)
1837 		active_pipes |= crtc->active ? BIT(crtc->pipe) : 0;
1838 
1839 	active_pipes = intel_calc_active_pipes(state, active_pipes);
1840 
1841 	crtc_state->active_non_psr_pipes = active_pipes &
1842 		~BIT(to_intel_crtc(crtc_state->uapi.crtc)->pipe);
1843 }
1844 
1845 void intel_psr_compute_config(struct intel_dp *intel_dp,
1846 			      struct intel_crtc_state *crtc_state,
1847 			      struct drm_connector_state *conn_state)
1848 {
1849 	struct intel_display *display = to_intel_display(intel_dp);
1850 	struct intel_connector *connector = to_intel_connector(conn_state->connector);
1851 	const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
1852 
1853 	if (!psr_global_enabled(intel_dp)) {
1854 		drm_dbg_kms(display->drm, "PSR disabled by flag\n");
1855 		return;
1856 	}
1857 
1858 	if (intel_dp->psr.sink_not_reliable) {
1859 		drm_dbg_kms(display->drm,
1860 			    "PSR sink implementation is not reliable\n");
1861 		return;
1862 	}
1863 
1864 	if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) {
1865 		drm_dbg_kms(display->drm,
1866 			    "PSR condition failed: Interlaced mode enabled\n");
1867 		return;
1868 	}
1869 
1870 	/*
1871 	 * FIXME figure out what is wrong with PSR+joiner and
1872 	 * fix it. Presumably something related to the fact that
1873 	 * PSR is a transcoder level feature.
1874 	 */
1875 	if (crtc_state->joiner_pipes) {
1876 		drm_dbg_kms(display->drm,
1877 			    "PSR disabled due to joiner\n");
1878 		return;
1879 	}
1880 
1881 	/* Only used for state verification. */
1882 	crtc_state->panel_replay_dsc_support = connector->dp.panel_replay_caps.dsc_support;
1883 	crtc_state->has_panel_replay = _panel_replay_compute_config(crtc_state, conn_state);
1884 
1885 	crtc_state->has_psr = crtc_state->has_panel_replay ? true :
1886 		_psr_compute_config(intel_dp, crtc_state, conn_state);
1887 
1888 	if (!crtc_state->has_psr)
1889 		return;
1890 
1891 	crtc_state->has_sel_update = intel_sel_update_config_valid(crtc_state, conn_state);
1892 }
1893 
1894 void intel_psr_get_config(struct intel_encoder *encoder,
1895 			  struct intel_crtc_state *pipe_config)
1896 {
1897 	struct intel_display *display = to_intel_display(encoder);
1898 	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
1899 	enum transcoder cpu_transcoder = pipe_config->cpu_transcoder;
1900 	struct intel_dp *intel_dp;
1901 	u32 val;
1902 
1903 	if (!dig_port)
1904 		return;
1905 
1906 	intel_dp = &dig_port->dp;
1907 	if (!(CAN_PSR(intel_dp) || CAN_PANEL_REPLAY(intel_dp)))
1908 		return;
1909 
1910 	mutex_lock(&intel_dp->psr.lock);
1911 	if (!intel_dp->psr.enabled)
1912 		goto unlock;
1913 
1914 	if (intel_dp->psr.panel_replay_enabled) {
1915 		pipe_config->has_psr = pipe_config->has_panel_replay = true;
1916 	} else {
1917 		/*
1918 		 * Not possible to read EDP_PSR/PSR2_CTL registers as it is
1919 		 * enabled/disabled because of frontbuffer tracking and others.
1920 		 */
1921 		pipe_config->has_psr = true;
1922 	}
1923 
1924 	pipe_config->has_sel_update = intel_dp->psr.sel_update_enabled;
1925 	pipe_config->infoframes.enable |= intel_hdmi_infoframe_enable(DP_SDP_VSC);
1926 
1927 	if (!intel_dp->psr.sel_update_enabled)
1928 		goto unlock;
1929 
1930 	if (HAS_PSR2_SEL_FETCH(display)) {
1931 		val = intel_de_read(display,
1932 				    PSR2_MAN_TRK_CTL(display, cpu_transcoder));
1933 		if (val & PSR2_MAN_TRK_CTL_ENABLE)
1934 			pipe_config->enable_psr2_sel_fetch = true;
1935 	}
1936 
1937 	pipe_config->enable_psr2_su_region_et = intel_dp->psr.su_region_et_enabled;
1938 
1939 	if (DISPLAY_VER(display) >= 12) {
1940 		val = intel_de_read(display,
1941 				    TRANS_EXITLINE(display, cpu_transcoder));
1942 		pipe_config->dc3co_exitline = REG_FIELD_GET(EXITLINE_MASK, val);
1943 	}
1944 unlock:
1945 	mutex_unlock(&intel_dp->psr.lock);
1946 }
1947 
1948 static void intel_psr_activate(struct intel_dp *intel_dp)
1949 {
1950 	struct intel_display *display = to_intel_display(intel_dp);
1951 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1952 
1953 	drm_WARN_ON(display->drm,
1954 		    transcoder_has_psr2(display, cpu_transcoder) &&
1955 		    intel_de_read(display, EDP_PSR2_CTL(display, cpu_transcoder)) & EDP_PSR2_ENABLE);
1956 
1957 	drm_WARN_ON(display->drm,
1958 		    intel_de_read(display, psr_ctl_reg(display, cpu_transcoder)) & EDP_PSR_ENABLE);
1959 
1960 	drm_WARN_ON(display->drm, intel_dp->psr.active);
1961 
1962 	drm_WARN_ON(display->drm, !intel_dp->psr.enabled);
1963 
1964 	lockdep_assert_held(&intel_dp->psr.lock);
1965 
1966 	/* psr1, psr2 and panel-replay are mutually exclusive.*/
1967 	if (intel_dp->psr.panel_replay_enabled)
1968 		dg2_activate_panel_replay(intel_dp);
1969 	else if (intel_dp->psr.sel_update_enabled)
1970 		hsw_activate_psr2(intel_dp);
1971 	else
1972 		hsw_activate_psr1(intel_dp);
1973 
1974 	intel_dp->psr.active = true;
1975 	intel_dp->psr.no_psr_reason = NULL;
1976 }
1977 
1978 /*
1979  * Wa_16013835468
1980  * Wa_14015648006
1981  */
1982 static void wm_optimization_wa(struct intel_dp *intel_dp,
1983 			       const struct intel_crtc_state *crtc_state)
1984 {
1985 	struct intel_display *display = to_intel_display(intel_dp);
1986 	enum pipe pipe = intel_dp->psr.pipe;
1987 	bool activate = false;
1988 
1989 	/* Wa_14015648006 */
1990 	if (IS_DISPLAY_VER(display, 11, 14) && crtc_state->wm_level_disabled)
1991 		activate = true;
1992 
1993 	/* Wa_16013835468 */
1994 	if (DISPLAY_VER(display) == 12 &&
1995 	    crtc_state->hw.adjusted_mode.crtc_vblank_start !=
1996 	    crtc_state->hw.adjusted_mode.crtc_vdisplay)
1997 		activate = true;
1998 
1999 	if (activate)
2000 		intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
2001 			     0, LATENCY_REPORTING_REMOVED(pipe));
2002 	else
2003 		intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
2004 			     LATENCY_REPORTING_REMOVED(pipe), 0);
2005 }
2006 
2007 static void intel_psr_enable_source(struct intel_dp *intel_dp,
2008 				    const struct intel_crtc_state *crtc_state)
2009 {
2010 	struct intel_display *display = to_intel_display(intel_dp);
2011 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2012 	u32 mask = 0;
2013 
2014 	/*
2015 	 * Only HSW and BDW have PSR AUX registers that need to be setup.
2016 	 * SKL+ use hardcoded values PSR AUX transactions
2017 	 */
2018 	if (DISPLAY_VER(display) < 9)
2019 		hsw_psr_setup_aux(intel_dp);
2020 
2021 	/*
2022 	 * Per Spec: Avoid continuous PSR exit by masking MEMUP and HPD also
2023 	 * mask LPSP to avoid dependency on other drivers that might block
2024 	 * runtime_pm besides preventing  other hw tracking issues now we
2025 	 * can rely on frontbuffer tracking.
2026 	 *
2027 	 * From bspec prior LunarLake:
2028 	 * Only PSR_MASK[Mask FBC modify] and PSR_MASK[Mask Hotplug] are used in
2029 	 * panel replay mode.
2030 	 *
2031 	 * From bspec beyod LunarLake:
2032 	 * Panel Replay on DP: No bits are applicable
2033 	 * Panel Replay on eDP: All bits are applicable
2034 	 */
2035 	if (DISPLAY_VER(display) < 20 || intel_dp_is_edp(intel_dp))
2036 		mask = EDP_PSR_DEBUG_MASK_HPD;
2037 
2038 	if (intel_dp_is_edp(intel_dp)) {
2039 		mask |= EDP_PSR_DEBUG_MASK_MEMUP;
2040 
2041 		/*
2042 		 * For some unknown reason on HSW non-ULT (or at least on
2043 		 * Dell Latitude E6540) external displays start to flicker
2044 		 * when PSR is enabled on the eDP. SR/PC6 residency is much
2045 		 * higher than should be possible with an external display.
2046 		 * As a workaround leave LPSP unmasked to prevent PSR entry
2047 		 * when external displays are active.
2048 		 */
2049 		if (DISPLAY_VER(display) >= 8 || display->platform.haswell_ult)
2050 			mask |= EDP_PSR_DEBUG_MASK_LPSP;
2051 
2052 		if (DISPLAY_VER(display) < 20)
2053 			mask |= EDP_PSR_DEBUG_MASK_MAX_SLEEP;
2054 
2055 		/*
2056 		 * No separate pipe reg write mask on hsw/bdw, so have to unmask all
2057 		 * registers in order to keep the CURSURFLIVE tricks working :(
2058 		 */
2059 		if (IS_DISPLAY_VER(display, 9, 10))
2060 			mask |= EDP_PSR_DEBUG_MASK_DISP_REG_WRITE;
2061 
2062 		/* allow PSR with sprite enabled */
2063 		if (display->platform.haswell)
2064 			mask |= EDP_PSR_DEBUG_MASK_SPRITE_ENABLE;
2065 	}
2066 
2067 	intel_de_write(display, psr_debug_reg(display, cpu_transcoder), mask);
2068 
2069 	psr_irq_control(intel_dp);
2070 
2071 	/*
2072 	 * TODO: if future platforms supports DC3CO in more than one
2073 	 * transcoder, EXITLINE will need to be unset when disabling PSR
2074 	 */
2075 	if (intel_dp->psr.dc3co_exitline)
2076 		intel_de_rmw(display,
2077 			     TRANS_EXITLINE(display, cpu_transcoder),
2078 			     EXITLINE_MASK,
2079 			     intel_dp->psr.dc3co_exitline << EXITLINE_SHIFT | EXITLINE_ENABLE);
2080 
2081 	if (HAS_PSR_HW_TRACKING(display) && HAS_PSR2_SEL_FETCH(display))
2082 		intel_de_rmw(display, CHICKEN_PAR1_1, IGNORE_PSR2_HW_TRACKING,
2083 			     intel_dp->psr.psr2_sel_fetch_enabled ?
2084 			     IGNORE_PSR2_HW_TRACKING : 0);
2085 
2086 	/*
2087 	 * Wa_16013835468
2088 	 * Wa_14015648006
2089 	 */
2090 	wm_optimization_wa(intel_dp, crtc_state);
2091 
2092 	if (intel_dp->psr.sel_update_enabled) {
2093 		if (DISPLAY_VER(display) == 9)
2094 			intel_de_rmw(display, CHICKEN_TRANS(display, cpu_transcoder), 0,
2095 				     PSR2_VSC_ENABLE_PROG_HEADER |
2096 				     PSR2_ADD_VERTICAL_LINE_COUNT);
2097 
2098 		/*
2099 		 * Wa_16014451276:adlp,mtl[a0,b0]
2100 		 * All supported adlp panels have 1-based X granularity, this may
2101 		 * cause issues if non-supported panels are used.
2102 		 */
2103 		if (!intel_dp->psr.panel_replay_enabled &&
2104 		    (IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0) ||
2105 		     display->platform.alderlake_p))
2106 			intel_de_rmw(display, CHICKEN_TRANS(display, cpu_transcoder),
2107 				     0, ADLP_1_BASED_X_GRANULARITY);
2108 
2109 		/* Wa_16012604467:adlp,mtl[a0,b0] */
2110 		if (!intel_dp->psr.panel_replay_enabled &&
2111 		    IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0))
2112 			intel_de_rmw(display,
2113 				     MTL_CLKGATE_DIS_TRANS(display, cpu_transcoder),
2114 				     0,
2115 				     MTL_CLKGATE_DIS_TRANS_DMASC_GATING_DIS);
2116 		else if (display->platform.alderlake_p)
2117 			intel_de_rmw(display, CLKGATE_DIS_MISC, 0,
2118 				     CLKGATE_DIS_MISC_DMASC_GATING_DIS);
2119 	}
2120 
2121 	/* Wa_16025596647 */
2122 	if ((DISPLAY_VER(display) == 20 ||
2123 	     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
2124 	    !intel_dp->psr.panel_replay_enabled)
2125 		intel_dmc_block_pkgc(display, intel_dp->psr.pipe, true);
2126 
2127 	intel_alpm_configure(intel_dp, crtc_state);
2128 }
2129 
2130 static bool psr_interrupt_error_check(struct intel_dp *intel_dp)
2131 {
2132 	struct intel_display *display = to_intel_display(intel_dp);
2133 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2134 	u32 val;
2135 
2136 	if (intel_dp->psr.panel_replay_enabled)
2137 		goto no_err;
2138 
2139 	/*
2140 	 * If a PSR error happened and the driver is reloaded, the EDP_PSR_IIR
2141 	 * will still keep the error set even after the reset done in the
2142 	 * irq_preinstall and irq_uninstall hooks.
2143 	 * And enabling in this situation cause the screen to freeze in the
2144 	 * first time that PSR HW tries to activate so lets keep PSR disabled
2145 	 * to avoid any rendering problems.
2146 	 */
2147 	val = intel_de_read(display, psr_iir_reg(display, cpu_transcoder));
2148 	val &= psr_irq_psr_error_bit_get(intel_dp);
2149 	if (val) {
2150 		intel_dp->psr.sink_not_reliable = true;
2151 		drm_dbg_kms(display->drm,
2152 			    "PSR interruption error set, not enabling PSR\n");
2153 		return false;
2154 	}
2155 
2156 no_err:
2157 	return true;
2158 }
2159 
2160 static void intel_psr_enable_locked(struct intel_dp *intel_dp,
2161 				    const struct intel_crtc_state *crtc_state)
2162 {
2163 	struct intel_display *display = to_intel_display(intel_dp);
2164 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
2165 	u32 val;
2166 
2167 	drm_WARN_ON(display->drm, intel_dp->psr.enabled);
2168 
2169 	intel_dp->psr.sel_update_enabled = crtc_state->has_sel_update;
2170 	intel_dp->psr.panel_replay_enabled = crtc_state->has_panel_replay;
2171 	intel_dp->psr.busy_frontbuffer_bits = 0;
2172 	intel_dp->psr.pipe = to_intel_crtc(crtc_state->uapi.crtc)->pipe;
2173 	intel_dp->psr.transcoder = crtc_state->cpu_transcoder;
2174 	/* DC5/DC6 requires at least 6 idle frames */
2175 	val = usecs_to_jiffies(intel_get_frame_time_us(crtc_state) * 6);
2176 	intel_dp->psr.dc3co_exit_delay = val;
2177 	intel_dp->psr.dc3co_exitline = crtc_state->dc3co_exitline;
2178 	intel_dp->psr.psr2_sel_fetch_enabled = crtc_state->enable_psr2_sel_fetch;
2179 	intel_dp->psr.su_region_et_enabled = crtc_state->enable_psr2_su_region_et;
2180 	intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
2181 	intel_dp->psr.req_psr2_sdp_prior_scanline =
2182 		crtc_state->req_psr2_sdp_prior_scanline;
2183 	intel_dp->psr.active_non_psr_pipes = crtc_state->active_non_psr_pipes;
2184 	intel_dp->psr.pkg_c_latency_used = crtc_state->pkg_c_latency_used;
2185 	intel_dp->psr.io_wake_lines = crtc_state->alpm_state.io_wake_lines;
2186 	intel_dp->psr.fast_wake_lines = crtc_state->alpm_state.fast_wake_lines;
2187 
2188 	if (!psr_interrupt_error_check(intel_dp))
2189 		return;
2190 
2191 	if (intel_dp->psr.panel_replay_enabled)
2192 		drm_dbg_kms(display->drm, "Enabling Panel Replay\n");
2193 	else
2194 		drm_dbg_kms(display->drm, "Enabling PSR%s\n",
2195 			    intel_dp->psr.sel_update_enabled ? "2" : "1");
2196 
2197 	/*
2198 	 * Enabling sink PSR/Panel Replay here only for PSR. Panel Replay enable
2199 	 * bit is already written at this point. Sink ALPM is enabled here for
2200 	 * PSR and Panel Replay. See
2201 	 * intel_psr_panel_replay_enable_sink. Modifiers/options:
2202 	 *  - Selective Update
2203 	 *  - Region Early Transport
2204 	 *  - Selective Update Region Scanline Capture
2205 	 *  - VSC_SDP_CRC
2206 	 *  - HPD on different Errors
2207 	 *  - CRC verification
2208 	 * are written for PSR and Panel Replay here.
2209 	 */
2210 	intel_psr_enable_sink(intel_dp, crtc_state);
2211 
2212 	if (intel_dp_is_edp(intel_dp))
2213 		intel_snps_phy_update_psr_power_state(&dig_port->base, true);
2214 
2215 	intel_psr_enable_source(intel_dp, crtc_state);
2216 	intel_dp->psr.enabled = true;
2217 	intel_dp->psr.pause_counter = 0;
2218 
2219 	/*
2220 	 * Link_ok is sticky and set here on PSR enable. We can assume link
2221 	 * training is complete as we never continue to PSR enable with
2222 	 * untrained link. Link_ok is kept as set until first short pulse
2223 	 * interrupt. This is targeted to workaround panels stating bad link
2224 	 * after PSR is enabled.
2225 	 */
2226 	intel_dp->psr.link_ok = true;
2227 
2228 	intel_psr_activate(intel_dp);
2229 }
2230 
2231 static void intel_psr_exit(struct intel_dp *intel_dp)
2232 {
2233 	struct intel_display *display = to_intel_display(intel_dp);
2234 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2235 	u32 val;
2236 
2237 	if (!intel_dp->psr.active) {
2238 		if (transcoder_has_psr2(display, cpu_transcoder)) {
2239 			val = intel_de_read(display,
2240 					    EDP_PSR2_CTL(display, cpu_transcoder));
2241 			drm_WARN_ON(display->drm, val & EDP_PSR2_ENABLE);
2242 		}
2243 
2244 		val = intel_de_read(display,
2245 				    psr_ctl_reg(display, cpu_transcoder));
2246 		drm_WARN_ON(display->drm, val & EDP_PSR_ENABLE);
2247 
2248 		return;
2249 	}
2250 
2251 	if (intel_dp->psr.panel_replay_enabled) {
2252 		intel_de_rmw(display, TRANS_DP2_CTL(intel_dp->psr.transcoder),
2253 			     TRANS_DP2_PANEL_REPLAY_ENABLE, 0);
2254 	} else if (intel_dp->psr.sel_update_enabled) {
2255 		tgl_disallow_dc3co_on_psr2_exit(intel_dp);
2256 
2257 		val = intel_de_rmw(display,
2258 				   EDP_PSR2_CTL(display, cpu_transcoder),
2259 				   EDP_PSR2_ENABLE, 0);
2260 
2261 		drm_WARN_ON(display->drm, !(val & EDP_PSR2_ENABLE));
2262 	} else {
2263 		if ((DISPLAY_VER(display) == 20 ||
2264 		     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
2265 			intel_dp->psr.pkg_c_latency_used)
2266 			intel_dmc_start_pkgc_exit_at_start_of_undelayed_vblank(display,
2267 								       intel_dp->psr.pipe,
2268 								       false);
2269 
2270 		val = intel_de_rmw(display,
2271 				   psr_ctl_reg(display, cpu_transcoder),
2272 				   EDP_PSR_ENABLE, 0);
2273 
2274 		drm_WARN_ON(display->drm, !(val & EDP_PSR_ENABLE));
2275 	}
2276 	intel_dp->psr.active = false;
2277 }
2278 
2279 static void intel_psr_wait_exit_locked(struct intel_dp *intel_dp)
2280 {
2281 	struct intel_display *display = to_intel_display(intel_dp);
2282 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2283 	i915_reg_t psr_status;
2284 	u32 psr_status_mask;
2285 
2286 	if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled ||
2287 					  intel_dp->psr.panel_replay_enabled)) {
2288 		psr_status = EDP_PSR2_STATUS(display, cpu_transcoder);
2289 		psr_status_mask = EDP_PSR2_STATUS_STATE_MASK;
2290 	} else {
2291 		psr_status = psr_status_reg(display, cpu_transcoder);
2292 		psr_status_mask = EDP_PSR_STATUS_STATE_MASK;
2293 	}
2294 
2295 	/* Wait till PSR is idle */
2296 	if (intel_de_wait_for_clear_ms(display, psr_status,
2297 				       psr_status_mask, 2000))
2298 		drm_err(display->drm, "Timed out waiting PSR idle state\n");
2299 }
2300 
2301 static void intel_psr_disable_locked(struct intel_dp *intel_dp)
2302 {
2303 	struct intel_display *display = to_intel_display(intel_dp);
2304 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2305 
2306 	lockdep_assert_held(&intel_dp->psr.lock);
2307 
2308 	if (!intel_dp->psr.enabled)
2309 		return;
2310 
2311 	if (intel_dp->psr.panel_replay_enabled)
2312 		drm_dbg_kms(display->drm, "Disabling Panel Replay\n");
2313 	else
2314 		drm_dbg_kms(display->drm, "Disabling PSR%s\n",
2315 			    intel_dp->psr.sel_update_enabled ? "2" : "1");
2316 
2317 	intel_psr_exit(intel_dp);
2318 	intel_psr_wait_exit_locked(intel_dp);
2319 
2320 	/*
2321 	 * Wa_16013835468
2322 	 * Wa_14015648006
2323 	 */
2324 	if (DISPLAY_VER(display) >= 11)
2325 		intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
2326 			     LATENCY_REPORTING_REMOVED(intel_dp->psr.pipe), 0);
2327 
2328 	if (intel_dp->psr.sel_update_enabled) {
2329 		/* Wa_16012604467:adlp,mtl[a0,b0] */
2330 		if (!intel_dp->psr.panel_replay_enabled &&
2331 		    IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0))
2332 			intel_de_rmw(display,
2333 				     MTL_CLKGATE_DIS_TRANS(display, cpu_transcoder),
2334 				     MTL_CLKGATE_DIS_TRANS_DMASC_GATING_DIS, 0);
2335 		else if (display->platform.alderlake_p)
2336 			intel_de_rmw(display, CLKGATE_DIS_MISC,
2337 				     CLKGATE_DIS_MISC_DMASC_GATING_DIS, 0);
2338 	}
2339 
2340 	if (intel_dp_is_edp(intel_dp))
2341 		intel_snps_phy_update_psr_power_state(&dp_to_dig_port(intel_dp)->base, false);
2342 
2343 	if (intel_dp->psr.panel_replay_enabled && intel_dp_is_edp(intel_dp))
2344 		intel_alpm_disable(intel_dp);
2345 
2346 	/* Disable PSR on Sink */
2347 	if (!intel_dp->psr.panel_replay_enabled) {
2348 		drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, 0);
2349 
2350 		if (intel_dp->psr.sel_update_enabled)
2351 			drm_dp_dpcd_writeb(&intel_dp->aux,
2352 					   DP_RECEIVER_ALPM_CONFIG, 0);
2353 	}
2354 
2355 	/* Wa_16025596647 */
2356 	if ((DISPLAY_VER(display) == 20 ||
2357 	     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
2358 	    !intel_dp->psr.panel_replay_enabled)
2359 		intel_dmc_block_pkgc(display, intel_dp->psr.pipe, false);
2360 
2361 	intel_dp->psr.enabled = false;
2362 	intel_dp->psr.panel_replay_enabled = false;
2363 	intel_dp->psr.sel_update_enabled = false;
2364 	intel_dp->psr.psr2_sel_fetch_enabled = false;
2365 	intel_dp->psr.su_region_et_enabled = false;
2366 	intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
2367 	intel_dp->psr.active_non_psr_pipes = 0;
2368 	intel_dp->psr.pkg_c_latency_used = 0;
2369 }
2370 
2371 /**
2372  * intel_psr_disable - Disable PSR
2373  * @intel_dp: Intel DP
2374  * @old_crtc_state: old CRTC state
2375  *
2376  * This function needs to be called before disabling pipe.
2377  */
2378 void intel_psr_disable(struct intel_dp *intel_dp,
2379 		       const struct intel_crtc_state *old_crtc_state)
2380 {
2381 	struct intel_display *display = to_intel_display(intel_dp);
2382 
2383 	if (!old_crtc_state->has_psr)
2384 		return;
2385 
2386 	if (drm_WARN_ON(display->drm, !CAN_PSR(intel_dp) &&
2387 			!CAN_PANEL_REPLAY(intel_dp)))
2388 		return;
2389 
2390 	mutex_lock(&intel_dp->psr.lock);
2391 
2392 	intel_psr_disable_locked(intel_dp);
2393 
2394 	intel_dp->psr.link_ok = false;
2395 
2396 	mutex_unlock(&intel_dp->psr.lock);
2397 	cancel_work_sync(&intel_dp->psr.work);
2398 	cancel_delayed_work_sync(&intel_dp->psr.dc3co_work);
2399 }
2400 
2401 /**
2402  * intel_psr_pause - Pause PSR
2403  * @intel_dp: Intel DP
2404  *
2405  * This function need to be called after enabling psr.
2406  */
2407 void intel_psr_pause(struct intel_dp *intel_dp)
2408 {
2409 	struct intel_psr *psr = &intel_dp->psr;
2410 
2411 	if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
2412 		return;
2413 
2414 	mutex_lock(&psr->lock);
2415 
2416 	if (!psr->enabled) {
2417 		mutex_unlock(&psr->lock);
2418 		return;
2419 	}
2420 
2421 	if (intel_dp->psr.pause_counter++ == 0) {
2422 		intel_psr_exit(intel_dp);
2423 		intel_psr_wait_exit_locked(intel_dp);
2424 	}
2425 
2426 	mutex_unlock(&psr->lock);
2427 
2428 	cancel_work_sync(&psr->work);
2429 	cancel_delayed_work_sync(&psr->dc3co_work);
2430 }
2431 
2432 /**
2433  * intel_psr_resume - Resume PSR
2434  * @intel_dp: Intel DP
2435  *
2436  * This function need to be called after pausing psr.
2437  */
2438 void intel_psr_resume(struct intel_dp *intel_dp)
2439 {
2440 	struct intel_display *display = to_intel_display(intel_dp);
2441 	struct intel_psr *psr = &intel_dp->psr;
2442 
2443 	if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
2444 		return;
2445 
2446 	mutex_lock(&psr->lock);
2447 
2448 	if (!psr->enabled)
2449 		goto out;
2450 
2451 	if (!psr->pause_counter) {
2452 		drm_warn(display->drm, "Unbalanced PSR pause/resume!\n");
2453 		goto out;
2454 	}
2455 
2456 	if (--intel_dp->psr.pause_counter == 0)
2457 		intel_psr_activate(intel_dp);
2458 
2459 out:
2460 	mutex_unlock(&psr->lock);
2461 }
2462 
2463 /**
2464  * intel_psr_needs_vblank_notification - Check if PSR need vblank enable/disable
2465  * notification.
2466  * @crtc_state: CRTC status
2467  *
2468  * We need to block DC6 entry in case of Panel Replay as enabling VBI doesn't
2469  * prevent it in case of Panel Replay. Panel Replay switches main link off on
2470  * DC entry. This means vblank interrupts are not fired and is a problem if
2471  * user-space is polling for vblank events. Also Wa_16025596647 needs
2472  * information when vblank is enabled/disabled.
2473  */
2474 bool intel_psr_needs_vblank_notification(const struct intel_crtc_state *crtc_state)
2475 {
2476 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2477 	struct intel_display *display = to_intel_display(crtc_state);
2478 	struct intel_encoder *encoder;
2479 
2480 	for_each_encoder_on_crtc(crtc->base.dev, &crtc->base, encoder) {
2481 		struct intel_dp *intel_dp;
2482 
2483 		if (!intel_encoder_is_dp(encoder))
2484 			continue;
2485 
2486 		intel_dp = enc_to_intel_dp(encoder);
2487 
2488 		if (!intel_dp_is_edp(intel_dp))
2489 			continue;
2490 
2491 		if (CAN_PANEL_REPLAY(intel_dp))
2492 			return true;
2493 
2494 		if ((DISPLAY_VER(display) == 20 ||
2495 		     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
2496 		    CAN_PSR(intel_dp))
2497 			return true;
2498 	}
2499 
2500 	return false;
2501 }
2502 
2503 /**
2504  * intel_psr_trigger_frame_change_event - Trigger "Frame Change" event
2505  * @dsb: DSB context
2506  * @state: the atomic state
2507  * @crtc: the CRTC
2508  *
2509  * Generate PSR "Frame Change" event.
2510  */
2511 void intel_psr_trigger_frame_change_event(struct intel_dsb *dsb,
2512 					  struct intel_atomic_state *state,
2513 					  struct intel_crtc *crtc)
2514 {
2515 	const struct intel_crtc_state *crtc_state =
2516 		intel_pre_commit_crtc_state(state, crtc);
2517 	struct intel_display *display = to_intel_display(crtc);
2518 
2519 	if (crtc_state->has_psr)
2520 		intel_de_write_dsb(display, dsb,
2521 				   CURSURFLIVE(display, crtc->pipe), 0);
2522 }
2523 
2524 /**
2525  * intel_psr_min_set_context_latency - Minimum 'set context latency' lines needed by PSR
2526  * @crtc_state: the crtc state
2527  *
2528  * Return minimum SCL lines/delay needed by PSR.
2529  */
2530 int intel_psr_min_set_context_latency(const struct intel_crtc_state *crtc_state)
2531 {
2532 
2533 	return _intel_psr_min_set_context_latency(crtc_state,
2534 						  crtc_state->has_panel_replay,
2535 						  crtc_state->has_sel_update);
2536 }
2537 
2538 static u32 man_trk_ctl_enable_bit_get(struct intel_display *display)
2539 {
2540 	return display->platform.alderlake_p || DISPLAY_VER(display) >= 14 ? 0 :
2541 		PSR2_MAN_TRK_CTL_ENABLE;
2542 }
2543 
2544 static u32 man_trk_ctl_single_full_frame_bit_get(struct intel_display *display)
2545 {
2546 	return display->platform.alderlake_p || DISPLAY_VER(display) >= 14 ?
2547 	       ADLP_PSR2_MAN_TRK_CTL_SF_SINGLE_FULL_FRAME :
2548 	       PSR2_MAN_TRK_CTL_SF_SINGLE_FULL_FRAME;
2549 }
2550 
2551 static u32 man_trk_ctl_partial_frame_bit_get(struct intel_display *display)
2552 {
2553 	return display->platform.alderlake_p || DISPLAY_VER(display) >= 14 ?
2554 	       ADLP_PSR2_MAN_TRK_CTL_SF_PARTIAL_FRAME_UPDATE :
2555 	       PSR2_MAN_TRK_CTL_SF_PARTIAL_FRAME_UPDATE;
2556 }
2557 
2558 static u32 man_trk_ctl_continuos_full_frame(struct intel_display *display)
2559 {
2560 	return display->platform.alderlake_p || DISPLAY_VER(display) >= 14 ?
2561 	       ADLP_PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME :
2562 	       PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME;
2563 }
2564 
2565 static void intel_psr_force_update(struct intel_dp *intel_dp)
2566 {
2567 	struct intel_display *display = to_intel_display(intel_dp);
2568 
2569 	/*
2570 	 * Display WA #0884: skl+
2571 	 * This documented WA for bxt can be safely applied
2572 	 * broadly so we can force HW tracking to exit PSR
2573 	 * instead of disabling and re-enabling.
2574 	 * Workaround tells us to write 0 to CUR_SURFLIVE_A,
2575 	 * but it makes more sense write to the current active
2576 	 * pipe.
2577 	 *
2578 	 * This workaround do not exist for platforms with display 10 or newer
2579 	 * but testing proved that it works for up display 13, for newer
2580 	 * than that testing will be needed.
2581 	 */
2582 	intel_de_write(display, CURSURFLIVE(display, intel_dp->psr.pipe), 0);
2583 }
2584 
2585 void intel_psr2_program_trans_man_trk_ctl(struct intel_dsb *dsb,
2586 					  const struct intel_crtc_state *crtc_state)
2587 {
2588 	struct intel_display *display = to_intel_display(crtc_state);
2589 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2590 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
2591 	struct intel_encoder *encoder;
2592 
2593 	if (!crtc_state->enable_psr2_sel_fetch)
2594 		return;
2595 
2596 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
2597 					     crtc_state->uapi.encoder_mask) {
2598 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2599 
2600 		if (!dsb)
2601 			lockdep_assert_held(&intel_dp->psr.lock);
2602 
2603 		if (DISPLAY_VER(display) < 20 && intel_dp->psr.psr2_sel_fetch_cff_enabled)
2604 			return;
2605 		break;
2606 	}
2607 
2608 	intel_de_write_dsb(display, dsb,
2609 			   PSR2_MAN_TRK_CTL(display, cpu_transcoder),
2610 			   crtc_state->psr2_man_track_ctl);
2611 
2612 	if (!crtc_state->enable_psr2_su_region_et)
2613 		return;
2614 
2615 	intel_de_write_dsb(display, dsb, PIPE_SRCSZ_ERLY_TPT(crtc->pipe),
2616 			   crtc_state->pipe_srcsz_early_tpt);
2617 }
2618 
2619 static void psr2_man_trk_ctl_calc(struct intel_crtc_state *crtc_state,
2620 				  bool full_update)
2621 {
2622 	struct intel_display *display = to_intel_display(crtc_state);
2623 	u32 val = man_trk_ctl_enable_bit_get(display);
2624 
2625 	/* SF partial frame enable has to be set even on full update */
2626 	val |= man_trk_ctl_partial_frame_bit_get(display);
2627 
2628 	if (full_update) {
2629 		val |= man_trk_ctl_continuos_full_frame(display);
2630 		goto exit;
2631 	}
2632 
2633 	if (crtc_state->psr2_su_area.y1 == -1)
2634 		goto exit;
2635 
2636 	if (display->platform.alderlake_p || DISPLAY_VER(display) >= 14) {
2637 		val |= ADLP_PSR2_MAN_TRK_CTL_SU_REGION_START_ADDR(crtc_state->psr2_su_area.y1);
2638 		val |= ADLP_PSR2_MAN_TRK_CTL_SU_REGION_END_ADDR(crtc_state->psr2_su_area.y2 - 1);
2639 	} else {
2640 		drm_WARN_ON(crtc_state->uapi.crtc->dev,
2641 			    crtc_state->psr2_su_area.y1 % 4 ||
2642 			    crtc_state->psr2_su_area.y2 % 4);
2643 
2644 		val |= PSR2_MAN_TRK_CTL_SU_REGION_START_ADDR(
2645 			crtc_state->psr2_su_area.y1 / 4 + 1);
2646 		val |= PSR2_MAN_TRK_CTL_SU_REGION_END_ADDR(
2647 			crtc_state->psr2_su_area.y2 / 4 + 1);
2648 	}
2649 exit:
2650 	crtc_state->psr2_man_track_ctl = val;
2651 }
2652 
2653 static u32 psr2_pipe_srcsz_early_tpt_calc(struct intel_crtc_state *crtc_state,
2654 					  bool full_update)
2655 {
2656 	int width, height;
2657 
2658 	if (!crtc_state->enable_psr2_su_region_et || full_update)
2659 		return 0;
2660 
2661 	width = drm_rect_width(&crtc_state->psr2_su_area);
2662 	height = drm_rect_height(&crtc_state->psr2_su_area);
2663 
2664 	return PIPESRC_WIDTH(width - 1) | PIPESRC_HEIGHT(height - 1);
2665 }
2666 
2667 static void clip_area_update(struct drm_rect *overlap_damage_area,
2668 			     struct drm_rect *damage_area,
2669 			     struct drm_rect *pipe_src)
2670 {
2671 	if (!drm_rect_intersect(damage_area, pipe_src))
2672 		return;
2673 
2674 	if (overlap_damage_area->y1 == -1) {
2675 		overlap_damage_area->y1 = damage_area->y1;
2676 		overlap_damage_area->y2 = damage_area->y2;
2677 		return;
2678 	}
2679 
2680 	if (damage_area->y1 < overlap_damage_area->y1)
2681 		overlap_damage_area->y1 = damage_area->y1;
2682 
2683 	if (damage_area->y2 > overlap_damage_area->y2)
2684 		overlap_damage_area->y2 = damage_area->y2;
2685 }
2686 
2687 static void intel_psr2_sel_fetch_pipe_alignment(struct intel_crtc_state *crtc_state)
2688 {
2689 	struct intel_display *display = to_intel_display(crtc_state);
2690 	const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
2691 	u16 y_alignment;
2692 
2693 	/* ADLP aligns the SU region to vdsc slice height in case dsc is enabled */
2694 	if (crtc_state->dsc.compression_enable &&
2695 	    (display->platform.alderlake_p || DISPLAY_VER(display) >= 14))
2696 		y_alignment = vdsc_cfg->slice_height;
2697 	else
2698 		y_alignment = crtc_state->su_y_granularity;
2699 
2700 	crtc_state->psr2_su_area.y1 -= crtc_state->psr2_su_area.y1 % y_alignment;
2701 	if (crtc_state->psr2_su_area.y2 % y_alignment)
2702 		crtc_state->psr2_su_area.y2 = ((crtc_state->psr2_su_area.y2 /
2703 						y_alignment) + 1) * y_alignment;
2704 }
2705 
2706 /*
2707  * When early transport is in use we need to extend SU area to cover
2708  * cursor fully when cursor is in SU area.
2709  */
2710 static void
2711 intel_psr2_sel_fetch_et_alignment(struct intel_atomic_state *state,
2712 				  struct intel_crtc *crtc,
2713 				  bool *cursor_in_su_area)
2714 {
2715 	struct intel_crtc_state *crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
2716 	struct intel_plane_state *new_plane_state;
2717 	struct intel_plane *plane;
2718 	int i;
2719 
2720 	if (!crtc_state->enable_psr2_su_region_et)
2721 		return;
2722 
2723 	for_each_new_intel_plane_in_state(state, plane, new_plane_state, i) {
2724 		struct drm_rect inter;
2725 
2726 		if (new_plane_state->hw.crtc != crtc_state->uapi.crtc)
2727 			continue;
2728 
2729 		if (plane->id != PLANE_CURSOR)
2730 			continue;
2731 
2732 		if (!new_plane_state->uapi.visible)
2733 			continue;
2734 
2735 		inter = crtc_state->psr2_su_area;
2736 		if (!drm_rect_intersect(&inter, &new_plane_state->uapi.dst))
2737 			continue;
2738 
2739 		clip_area_update(&crtc_state->psr2_su_area, &new_plane_state->uapi.dst,
2740 				 &crtc_state->pipe_src);
2741 		*cursor_in_su_area = true;
2742 	}
2743 }
2744 
2745 /*
2746  * TODO: Not clear how to handle planes with negative position,
2747  * also planes are not updated if they have a negative X
2748  * position so for now doing a full update in this cases
2749  *
2750  * Plane scaling and rotation is not supported by selective fetch and both
2751  * properties can change without a modeset, so need to be check at every
2752  * atomic commit.
2753  */
2754 static bool psr2_sel_fetch_plane_state_supported(const struct intel_plane_state *plane_state)
2755 {
2756 	if (plane_state->uapi.dst.y1 < 0 ||
2757 	    plane_state->uapi.dst.x1 < 0 ||
2758 	    plane_state->scaler_id >= 0 ||
2759 	    plane_state->hw.rotation != DRM_MODE_ROTATE_0)
2760 		return false;
2761 
2762 	return true;
2763 }
2764 
2765 /*
2766  * Check for pipe properties that is not supported by selective fetch.
2767  *
2768  * TODO: pipe scaling causes a modeset but skl_update_scaler_crtc() is executed
2769  * after intel_psr_compute_config(), so for now keeping PSR2 selective fetch
2770  * enabled and going to the full update path.
2771  */
2772 static bool psr2_sel_fetch_pipe_state_supported(const struct intel_crtc_state *crtc_state)
2773 {
2774 	if (crtc_state->scaler_state.scaler_id >= 0 ||
2775 	    crtc_state->async_flip_planes)
2776 		return false;
2777 
2778 	return true;
2779 }
2780 
2781 /* Wa 14019834836 */
2782 static void intel_psr_apply_pr_link_on_su_wa(struct intel_crtc_state *crtc_state)
2783 {
2784 	struct intel_display *display = to_intel_display(crtc_state);
2785 	struct intel_encoder *encoder;
2786 	int hactive_limit;
2787 
2788 	if (crtc_state->psr2_su_area.y1 != 0 ||
2789 	    crtc_state->psr2_su_area.y2 != 0)
2790 		return;
2791 
2792 	if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420)
2793 		hactive_limit = intel_dp_is_uhbr(crtc_state) ? 1230 : 546;
2794 	else
2795 		hactive_limit = intel_dp_is_uhbr(crtc_state) ? 615 : 273;
2796 
2797 	if (crtc_state->hw.adjusted_mode.hdisplay < hactive_limit)
2798 		return;
2799 
2800 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
2801 					     crtc_state->uapi.encoder_mask) {
2802 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2803 
2804 		if (!intel_dp_is_edp(intel_dp) &&
2805 		    intel_dp->psr.panel_replay_enabled &&
2806 		    intel_dp->psr.sel_update_enabled) {
2807 			crtc_state->psr2_su_area.y2++;
2808 			return;
2809 		}
2810 	}
2811 }
2812 
2813 static void
2814 intel_psr_apply_su_area_workarounds(struct intel_crtc_state *crtc_state)
2815 {
2816 	struct intel_display *display = to_intel_display(crtc_state);
2817 
2818 	/* Wa_14014971492 */
2819 	if (!crtc_state->has_panel_replay &&
2820 	    ((IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0) ||
2821 	      display->platform.alderlake_p || display->platform.tigerlake)) &&
2822 	    crtc_state->splitter.enable)
2823 		crtc_state->psr2_su_area.y1 = 0;
2824 
2825 	/* Wa 14019834836 */
2826 	if (DISPLAY_VER(display) == 30)
2827 		intel_psr_apply_pr_link_on_su_wa(crtc_state);
2828 }
2829 
2830 int intel_psr2_sel_fetch_update(struct intel_atomic_state *state,
2831 				struct intel_crtc *crtc)
2832 {
2833 	struct intel_display *display = to_intel_display(state);
2834 	struct intel_crtc_state *crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
2835 	struct intel_plane_state *new_plane_state, *old_plane_state;
2836 	struct intel_plane *plane;
2837 	bool full_update = false, cursor_in_su_area = false;
2838 	int i, ret;
2839 
2840 	if (!crtc_state->enable_psr2_sel_fetch)
2841 		return 0;
2842 
2843 	if (!psr2_sel_fetch_pipe_state_supported(crtc_state)) {
2844 		full_update = true;
2845 		goto skip_sel_fetch_set_loop;
2846 	}
2847 
2848 	crtc_state->psr2_su_area.x1 = 0;
2849 	crtc_state->psr2_su_area.y1 = -1;
2850 	crtc_state->psr2_su_area.x2 = drm_rect_width(&crtc_state->pipe_src);
2851 	crtc_state->psr2_su_area.y2 = -1;
2852 
2853 	/*
2854 	 * Calculate minimal selective fetch area of each plane and calculate
2855 	 * the pipe damaged area.
2856 	 * In the next loop the plane selective fetch area will actually be set
2857 	 * using whole pipe damaged area.
2858 	 */
2859 	for_each_oldnew_intel_plane_in_state(state, plane, old_plane_state,
2860 					     new_plane_state, i) {
2861 		struct drm_rect src, damaged_area = { .x1 = 0, .y1 = -1,
2862 						      .x2 = INT_MAX };
2863 
2864 		if (new_plane_state->hw.crtc != crtc_state->uapi.crtc)
2865 			continue;
2866 
2867 		if (!new_plane_state->uapi.visible &&
2868 		    !old_plane_state->uapi.visible)
2869 			continue;
2870 
2871 		if (!psr2_sel_fetch_plane_state_supported(new_plane_state)) {
2872 			full_update = true;
2873 			break;
2874 		}
2875 
2876 		/*
2877 		 * If visibility or plane moved, mark the whole plane area as
2878 		 * damaged as it needs to be complete redraw in the new and old
2879 		 * position.
2880 		 */
2881 		if (new_plane_state->uapi.visible != old_plane_state->uapi.visible ||
2882 		    !drm_rect_equals(&new_plane_state->uapi.dst,
2883 				     &old_plane_state->uapi.dst)) {
2884 			if (old_plane_state->uapi.visible) {
2885 				damaged_area.y1 = old_plane_state->uapi.dst.y1;
2886 				damaged_area.y2 = old_plane_state->uapi.dst.y2;
2887 				clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2888 						 &crtc_state->pipe_src);
2889 			}
2890 
2891 			if (new_plane_state->uapi.visible) {
2892 				damaged_area.y1 = new_plane_state->uapi.dst.y1;
2893 				damaged_area.y2 = new_plane_state->uapi.dst.y2;
2894 				clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2895 						 &crtc_state->pipe_src);
2896 			}
2897 			continue;
2898 		} else if (new_plane_state->uapi.alpha != old_plane_state->uapi.alpha) {
2899 			/* If alpha changed mark the whole plane area as damaged */
2900 			damaged_area.y1 = new_plane_state->uapi.dst.y1;
2901 			damaged_area.y2 = new_plane_state->uapi.dst.y2;
2902 			clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2903 					 &crtc_state->pipe_src);
2904 			continue;
2905 		}
2906 
2907 		src = drm_plane_state_src(&new_plane_state->uapi);
2908 		drm_rect_fp_to_int(&src, &src);
2909 
2910 		if (!drm_atomic_helper_damage_merged(&old_plane_state->uapi,
2911 						     &new_plane_state->uapi, &damaged_area))
2912 			continue;
2913 
2914 		damaged_area.y1 += new_plane_state->uapi.dst.y1 - src.y1;
2915 		damaged_area.y2 += new_plane_state->uapi.dst.y1 - src.y1;
2916 		damaged_area.x1 += new_plane_state->uapi.dst.x1 - src.x1;
2917 		damaged_area.x2 += new_plane_state->uapi.dst.x1 - src.x1;
2918 
2919 		clip_area_update(&crtc_state->psr2_su_area, &damaged_area, &crtc_state->pipe_src);
2920 	}
2921 
2922 	/*
2923 	 * TODO: For now we are just using full update in case
2924 	 * selective fetch area calculation fails. To optimize this we
2925 	 * should identify cases where this happens and fix the area
2926 	 * calculation for those.
2927 	 */
2928 	if (crtc_state->psr2_su_area.y1 == -1) {
2929 		drm_info_once(display->drm,
2930 			      "Selective fetch area calculation failed in pipe %c\n",
2931 			      pipe_name(crtc->pipe));
2932 		full_update = true;
2933 	}
2934 
2935 	if (full_update)
2936 		goto skip_sel_fetch_set_loop;
2937 
2938 	intel_psr_apply_su_area_workarounds(crtc_state);
2939 
2940 	ret = drm_atomic_add_affected_planes(&state->base, &crtc->base);
2941 	if (ret)
2942 		return ret;
2943 
2944 	/*
2945 	 * Adjust su area to cover cursor fully as necessary (early
2946 	 * transport). This needs to be done after
2947 	 * drm_atomic_add_affected_planes to ensure visible cursor is added into
2948 	 * affected planes even when cursor is not updated by itself.
2949 	 */
2950 	intel_psr2_sel_fetch_et_alignment(state, crtc, &cursor_in_su_area);
2951 
2952 	intel_psr2_sel_fetch_pipe_alignment(crtc_state);
2953 
2954 	/*
2955 	 * Now that we have the pipe damaged area check if it intersect with
2956 	 * every plane, if it does set the plane selective fetch area.
2957 	 */
2958 	for_each_oldnew_intel_plane_in_state(state, plane, old_plane_state,
2959 					     new_plane_state, i) {
2960 		struct drm_rect *sel_fetch_area, inter;
2961 		struct intel_plane *linked = new_plane_state->planar_linked_plane;
2962 
2963 		if (new_plane_state->hw.crtc != crtc_state->uapi.crtc ||
2964 		    !new_plane_state->uapi.visible)
2965 			continue;
2966 
2967 		inter = crtc_state->psr2_su_area;
2968 		sel_fetch_area = &new_plane_state->psr2_sel_fetch_area;
2969 		if (!drm_rect_intersect(&inter, &new_plane_state->uapi.dst)) {
2970 			sel_fetch_area->y1 = -1;
2971 			sel_fetch_area->y2 = -1;
2972 			/*
2973 			 * if plane sel fetch was previously enabled ->
2974 			 * disable it
2975 			 */
2976 			if (drm_rect_height(&old_plane_state->psr2_sel_fetch_area) > 0)
2977 				crtc_state->update_planes |= BIT(plane->id);
2978 
2979 			continue;
2980 		}
2981 
2982 		if (!psr2_sel_fetch_plane_state_supported(new_plane_state)) {
2983 			full_update = true;
2984 			break;
2985 		}
2986 
2987 		sel_fetch_area = &new_plane_state->psr2_sel_fetch_area;
2988 		sel_fetch_area->y1 = inter.y1 - new_plane_state->uapi.dst.y1;
2989 		sel_fetch_area->y2 = inter.y2 - new_plane_state->uapi.dst.y1;
2990 		crtc_state->update_planes |= BIT(plane->id);
2991 
2992 		/*
2993 		 * Sel_fetch_area is calculated for UV plane. Use
2994 		 * same area for Y plane as well.
2995 		 */
2996 		if (linked) {
2997 			struct intel_plane_state *linked_new_plane_state;
2998 			struct drm_rect *linked_sel_fetch_area;
2999 
3000 			linked_new_plane_state = intel_atomic_get_plane_state(state, linked);
3001 			if (IS_ERR(linked_new_plane_state))
3002 				return PTR_ERR(linked_new_plane_state);
3003 
3004 			linked_sel_fetch_area = &linked_new_plane_state->psr2_sel_fetch_area;
3005 			linked_sel_fetch_area->y1 = sel_fetch_area->y1;
3006 			linked_sel_fetch_area->y2 = sel_fetch_area->y2;
3007 			crtc_state->update_planes |= BIT(linked->id);
3008 		}
3009 	}
3010 
3011 skip_sel_fetch_set_loop:
3012 	psr2_man_trk_ctl_calc(crtc_state, full_update);
3013 	crtc_state->pipe_srcsz_early_tpt =
3014 		psr2_pipe_srcsz_early_tpt_calc(crtc_state, full_update);
3015 	return 0;
3016 }
3017 
3018 void intel_psr2_panic_force_full_update(const struct intel_crtc_state *crtc_state)
3019 {
3020 	struct intel_display *display = to_intel_display(crtc_state);
3021 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
3022 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
3023 	u32 val = man_trk_ctl_enable_bit_get(display);
3024 
3025 	/* SF partial frame enable has to be set even on full update */
3026 	val |= man_trk_ctl_partial_frame_bit_get(display);
3027 	val |= man_trk_ctl_continuos_full_frame(display);
3028 
3029 	/* Directly write the register */
3030 	intel_de_write_fw(display, PSR2_MAN_TRK_CTL(display, cpu_transcoder), val);
3031 
3032 	if (!crtc_state->enable_psr2_su_region_et)
3033 		return;
3034 
3035 	intel_de_write_fw(display, PIPE_SRCSZ_ERLY_TPT(crtc->pipe), 0);
3036 }
3037 
3038 void intel_psr_pre_plane_update(struct intel_atomic_state *state,
3039 				struct intel_crtc *crtc)
3040 {
3041 	struct intel_display *display = to_intel_display(state);
3042 	const struct intel_crtc_state *old_crtc_state =
3043 		intel_atomic_get_old_crtc_state(state, crtc);
3044 	const struct intel_crtc_state *new_crtc_state =
3045 		intel_atomic_get_new_crtc_state(state, crtc);
3046 	struct intel_encoder *encoder;
3047 
3048 	if (!HAS_PSR(display))
3049 		return;
3050 
3051 	for_each_intel_encoder_mask_with_psr(state->base.dev, encoder,
3052 					     old_crtc_state->uapi.encoder_mask) {
3053 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3054 		struct intel_psr *psr = &intel_dp->psr;
3055 
3056 		mutex_lock(&psr->lock);
3057 
3058 		if (!new_crtc_state->has_psr)
3059 			psr->no_psr_reason = new_crtc_state->no_psr_reason;
3060 
3061 		if (psr->enabled) {
3062 			/*
3063 			 * Reasons to disable:
3064 			 * - PSR disabled in new state
3065 			 * - All planes will go inactive
3066 			 * - Changing between PSR versions
3067 			 * - Region Early Transport changing
3068 			 * - Display WA #1136: skl, bxt
3069 			 */
3070 			if (intel_crtc_needs_modeset(new_crtc_state) ||
3071 			    !new_crtc_state->has_psr ||
3072 			    !new_crtc_state->active_planes ||
3073 			    new_crtc_state->has_sel_update != psr->sel_update_enabled ||
3074 			    new_crtc_state->enable_psr2_su_region_et != psr->su_region_et_enabled ||
3075 			    new_crtc_state->has_panel_replay != psr->panel_replay_enabled ||
3076 			    (DISPLAY_VER(display) < 11 && new_crtc_state->wm_level_disabled))
3077 				intel_psr_disable_locked(intel_dp);
3078 			else if (new_crtc_state->wm_level_disabled)
3079 				/* Wa_14015648006 */
3080 				wm_optimization_wa(intel_dp, new_crtc_state);
3081 		}
3082 
3083 		mutex_unlock(&psr->lock);
3084 	}
3085 }
3086 
3087 static void
3088 verify_panel_replay_dsc_state(const struct intel_crtc_state *crtc_state)
3089 {
3090 	struct intel_display *display = to_intel_display(crtc_state);
3091 
3092 	if (!crtc_state->has_panel_replay)
3093 		return;
3094 
3095 	drm_WARN_ON(display->drm,
3096 		    intel_dsc_enabled_on_link(crtc_state) &&
3097 		    crtc_state->panel_replay_dsc_support ==
3098 		    INTEL_DP_PANEL_REPLAY_DSC_NOT_SUPPORTED);
3099 }
3100 
3101 void intel_psr_post_plane_update(struct intel_atomic_state *state,
3102 				 struct intel_crtc *crtc)
3103 {
3104 	struct intel_display *display = to_intel_display(state);
3105 	const struct intel_crtc_state *crtc_state =
3106 		intel_atomic_get_new_crtc_state(state, crtc);
3107 	struct intel_encoder *encoder;
3108 
3109 	if (!crtc_state->has_psr)
3110 		return;
3111 
3112 	verify_panel_replay_dsc_state(crtc_state);
3113 
3114 	for_each_intel_encoder_mask_with_psr(state->base.dev, encoder,
3115 					     crtc_state->uapi.encoder_mask) {
3116 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3117 		struct intel_psr *psr = &intel_dp->psr;
3118 		bool keep_disabled = false;
3119 
3120 		mutex_lock(&psr->lock);
3121 
3122 		drm_WARN_ON(display->drm,
3123 			    psr->enabled && !crtc_state->active_planes);
3124 
3125 		if (psr->sink_not_reliable)
3126 			keep_disabled = true;
3127 
3128 		if (!crtc_state->active_planes) {
3129 			psr->no_psr_reason = "All planes inactive";
3130 			keep_disabled = true;
3131 		}
3132 
3133 		/* Display WA #1136: skl, bxt */
3134 		if (DISPLAY_VER(display) < 11 && crtc_state->wm_level_disabled) {
3135 			psr->no_psr_reason = "Workaround #1136 for skl, bxt";
3136 			keep_disabled = true;
3137 		}
3138 
3139 		if (!psr->enabled && !keep_disabled)
3140 			intel_psr_enable_locked(intel_dp, crtc_state);
3141 		else if (psr->enabled && !crtc_state->wm_level_disabled)
3142 			/* Wa_14015648006 */
3143 			wm_optimization_wa(intel_dp, crtc_state);
3144 
3145 		/* Force a PSR exit when enabling CRC to avoid CRC timeouts */
3146 		if (crtc_state->crc_enabled && psr->enabled)
3147 			intel_psr_force_update(intel_dp);
3148 
3149 		/*
3150 		 * Clear possible busy bits in case we have
3151 		 * invalidate -> flip -> flush sequence.
3152 		 */
3153 		intel_dp->psr.busy_frontbuffer_bits = 0;
3154 
3155 		mutex_unlock(&psr->lock);
3156 	}
3157 }
3158 
3159 /*
3160  * From bspec: Panel Self Refresh (BDW+)
3161  * Max. time for PSR to idle = Inverse of the refresh rate + 6 ms of
3162  * exit training time + 1.5 ms of aux channel handshake. 50 ms is
3163  * defensive enough to cover everything.
3164  */
3165 #define PSR_IDLE_TIMEOUT_MS 50
3166 
3167 static int
3168 _psr2_ready_for_pipe_update_locked(const struct intel_crtc_state *new_crtc_state,
3169 				   struct intel_dsb *dsb)
3170 {
3171 	struct intel_display *display = to_intel_display(new_crtc_state);
3172 	enum transcoder cpu_transcoder = new_crtc_state->cpu_transcoder;
3173 
3174 	/*
3175 	 * Any state lower than EDP_PSR2_STATUS_STATE_DEEP_SLEEP is enough.
3176 	 * As all higher states has bit 4 of PSR2 state set we can just wait for
3177 	 * EDP_PSR2_STATUS_STATE_DEEP_SLEEP to be cleared.
3178 	 */
3179 	if (dsb) {
3180 		intel_dsb_poll(dsb, EDP_PSR2_STATUS(display, cpu_transcoder),
3181 			       EDP_PSR2_STATUS_STATE_DEEP_SLEEP, 0, 200,
3182 			       PSR_IDLE_TIMEOUT_MS * 1000 / 200);
3183 		return true;
3184 	}
3185 
3186 	return intel_de_wait_for_clear_ms(display,
3187 				       EDP_PSR2_STATUS(display, cpu_transcoder),
3188 				       EDP_PSR2_STATUS_STATE_DEEP_SLEEP,
3189 				       PSR_IDLE_TIMEOUT_MS);
3190 }
3191 
3192 static int
3193 _psr1_ready_for_pipe_update_locked(const struct intel_crtc_state *new_crtc_state,
3194 				   struct intel_dsb *dsb)
3195 {
3196 	struct intel_display *display = to_intel_display(new_crtc_state);
3197 	enum transcoder cpu_transcoder = new_crtc_state->cpu_transcoder;
3198 
3199 	if (dsb) {
3200 		intel_dsb_poll(dsb, psr_status_reg(display, cpu_transcoder),
3201 			       EDP_PSR_STATUS_STATE_MASK, 0, 200,
3202 			       PSR_IDLE_TIMEOUT_MS * 1000 / 200);
3203 		return true;
3204 	}
3205 
3206 	return intel_de_wait_for_clear_ms(display,
3207 				       psr_status_reg(display, cpu_transcoder),
3208 				       EDP_PSR_STATUS_STATE_MASK,
3209 				       PSR_IDLE_TIMEOUT_MS);
3210 }
3211 
3212 /**
3213  * intel_psr_wait_for_idle_locked - wait for PSR be ready for a pipe update
3214  * @new_crtc_state: new CRTC state
3215  *
3216  * This function is expected to be called from pipe_update_start() where it is
3217  * not expected to race with PSR enable or disable.
3218  */
3219 void intel_psr_wait_for_idle_locked(const struct intel_crtc_state *new_crtc_state)
3220 {
3221 	struct intel_display *display = to_intel_display(new_crtc_state);
3222 	struct intel_encoder *encoder;
3223 
3224 	if (!new_crtc_state->has_psr)
3225 		return;
3226 
3227 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
3228 					     new_crtc_state->uapi.encoder_mask) {
3229 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3230 		int ret;
3231 
3232 		lockdep_assert_held(&intel_dp->psr.lock);
3233 
3234 		if (!intel_dp->psr.enabled || intel_dp->psr.panel_replay_enabled)
3235 			continue;
3236 
3237 		if (intel_dp->psr.sel_update_enabled)
3238 			ret = _psr2_ready_for_pipe_update_locked(new_crtc_state,
3239 								 NULL);
3240 		else
3241 			ret = _psr1_ready_for_pipe_update_locked(new_crtc_state,
3242 								 NULL);
3243 
3244 		if (ret)
3245 			drm_err(display->drm,
3246 				"PSR wait timed out, atomic update may fail\n");
3247 	}
3248 }
3249 
3250 void intel_psr_wait_for_idle_dsb(struct intel_dsb *dsb,
3251 				 const struct intel_crtc_state *new_crtc_state)
3252 {
3253 	if (!new_crtc_state->has_psr || new_crtc_state->has_panel_replay)
3254 		return;
3255 
3256 	if (new_crtc_state->has_sel_update)
3257 		_psr2_ready_for_pipe_update_locked(new_crtc_state, dsb);
3258 	else
3259 		_psr1_ready_for_pipe_update_locked(new_crtc_state, dsb);
3260 }
3261 
3262 static bool __psr_wait_for_idle_locked(struct intel_dp *intel_dp)
3263 {
3264 	struct intel_display *display = to_intel_display(intel_dp);
3265 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3266 	i915_reg_t reg;
3267 	u32 mask;
3268 	int err;
3269 
3270 	if (!intel_dp->psr.enabled)
3271 		return false;
3272 
3273 	if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled ||
3274 					  intel_dp->psr.panel_replay_enabled)) {
3275 		reg = EDP_PSR2_STATUS(display, cpu_transcoder);
3276 		mask = EDP_PSR2_STATUS_STATE_MASK;
3277 	} else {
3278 		reg = psr_status_reg(display, cpu_transcoder);
3279 		mask = EDP_PSR_STATUS_STATE_MASK;
3280 	}
3281 
3282 	mutex_unlock(&intel_dp->psr.lock);
3283 
3284 	err = intel_de_wait_for_clear_ms(display, reg, mask, 50);
3285 	if (err)
3286 		drm_err(display->drm,
3287 			"Timed out waiting for PSR Idle for re-enable\n");
3288 
3289 	/* After the unlocked wait, verify that PSR is still wanted! */
3290 	mutex_lock(&intel_dp->psr.lock);
3291 	return err == 0 && intel_dp->psr.enabled && !intel_dp->psr.pause_counter;
3292 }
3293 
3294 static int intel_psr_fastset_force(struct intel_display *display)
3295 {
3296 	struct drm_connector_list_iter conn_iter;
3297 	struct drm_modeset_acquire_ctx ctx;
3298 	struct drm_atomic_state *state;
3299 	struct drm_connector *conn;
3300 	int err = 0;
3301 
3302 	state = drm_atomic_state_alloc(display->drm);
3303 	if (!state)
3304 		return -ENOMEM;
3305 
3306 	drm_modeset_acquire_init(&ctx, DRM_MODESET_ACQUIRE_INTERRUPTIBLE);
3307 
3308 	state->acquire_ctx = &ctx;
3309 	to_intel_atomic_state(state)->internal = true;
3310 
3311 retry:
3312 	drm_connector_list_iter_begin(display->drm, &conn_iter);
3313 	drm_for_each_connector_iter(conn, &conn_iter) {
3314 		struct drm_connector_state *conn_state;
3315 		struct drm_crtc_state *crtc_state;
3316 
3317 		if (conn->connector_type != DRM_MODE_CONNECTOR_eDP)
3318 			continue;
3319 
3320 		conn_state = drm_atomic_get_connector_state(state, conn);
3321 		if (IS_ERR(conn_state)) {
3322 			err = PTR_ERR(conn_state);
3323 			break;
3324 		}
3325 
3326 		if (!conn_state->crtc)
3327 			continue;
3328 
3329 		crtc_state = drm_atomic_get_crtc_state(state, conn_state->crtc);
3330 		if (IS_ERR(crtc_state)) {
3331 			err = PTR_ERR(crtc_state);
3332 			break;
3333 		}
3334 
3335 		/* Mark mode as changed to trigger a pipe->update() */
3336 		crtc_state->mode_changed = true;
3337 	}
3338 	drm_connector_list_iter_end(&conn_iter);
3339 
3340 	if (err == 0)
3341 		err = drm_atomic_commit(state);
3342 
3343 	if (err == -EDEADLK) {
3344 		drm_atomic_state_clear(state);
3345 		err = drm_modeset_backoff(&ctx);
3346 		if (!err)
3347 			goto retry;
3348 	}
3349 
3350 	drm_modeset_drop_locks(&ctx);
3351 	drm_modeset_acquire_fini(&ctx);
3352 	drm_atomic_state_put(state);
3353 
3354 	return err;
3355 }
3356 
3357 int intel_psr_debug_set(struct intel_dp *intel_dp, u64 val)
3358 {
3359 	struct intel_display *display = to_intel_display(intel_dp);
3360 	const u32 mode = val & I915_PSR_DEBUG_MODE_MASK;
3361 	const u32 disable_bits = val & (I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
3362 					I915_PSR_DEBUG_PANEL_REPLAY_DISABLE);
3363 	u32 old_mode, old_disable_bits;
3364 	int ret;
3365 
3366 	if (val & ~(I915_PSR_DEBUG_IRQ | I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
3367 		    I915_PSR_DEBUG_PANEL_REPLAY_DISABLE |
3368 		    I915_PSR_DEBUG_MODE_MASK) ||
3369 	    mode > I915_PSR_DEBUG_ENABLE_SEL_FETCH) {
3370 		drm_dbg_kms(display->drm, "Invalid debug mask %llx\n", val);
3371 		return -EINVAL;
3372 	}
3373 
3374 	ret = mutex_lock_interruptible(&intel_dp->psr.lock);
3375 	if (ret)
3376 		return ret;
3377 
3378 	old_mode = intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK;
3379 	old_disable_bits = intel_dp->psr.debug &
3380 		(I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
3381 		 I915_PSR_DEBUG_PANEL_REPLAY_DISABLE);
3382 
3383 	intel_dp->psr.debug = val;
3384 
3385 	/*
3386 	 * Do it right away if it's already enabled, otherwise it will be done
3387 	 * when enabling the source.
3388 	 */
3389 	if (intel_dp->psr.enabled)
3390 		psr_irq_control(intel_dp);
3391 
3392 	mutex_unlock(&intel_dp->psr.lock);
3393 
3394 	if (old_mode != mode || old_disable_bits != disable_bits)
3395 		ret = intel_psr_fastset_force(display);
3396 
3397 	return ret;
3398 }
3399 
3400 static void intel_psr_handle_irq(struct intel_dp *intel_dp)
3401 {
3402 	struct intel_psr *psr = &intel_dp->psr;
3403 
3404 	intel_psr_disable_locked(intel_dp);
3405 	psr->sink_not_reliable = true;
3406 	/* let's make sure that sink is awaken */
3407 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
3408 }
3409 
3410 static void intel_psr_work(struct work_struct *work)
3411 {
3412 	struct intel_dp *intel_dp =
3413 		container_of(work, typeof(*intel_dp), psr.work);
3414 
3415 	mutex_lock(&intel_dp->psr.lock);
3416 
3417 	if (!intel_dp->psr.enabled)
3418 		goto unlock;
3419 
3420 	if (READ_ONCE(intel_dp->psr.irq_aux_error)) {
3421 		intel_psr_handle_irq(intel_dp);
3422 		goto unlock;
3423 	}
3424 
3425 	if (intel_dp->psr.pause_counter)
3426 		goto unlock;
3427 
3428 	/*
3429 	 * We have to make sure PSR is ready for re-enable
3430 	 * otherwise it keeps disabled until next full enable/disable cycle.
3431 	 * PSR might take some time to get fully disabled
3432 	 * and be ready for re-enable.
3433 	 */
3434 	if (!__psr_wait_for_idle_locked(intel_dp))
3435 		goto unlock;
3436 
3437 	/*
3438 	 * The delayed work can race with an invalidate hence we need to
3439 	 * recheck. Since psr_flush first clears this and then reschedules we
3440 	 * won't ever miss a flush when bailing out here.
3441 	 */
3442 	if (intel_dp->psr.busy_frontbuffer_bits || intel_dp->psr.active)
3443 		goto unlock;
3444 
3445 	intel_psr_activate(intel_dp);
3446 unlock:
3447 	mutex_unlock(&intel_dp->psr.lock);
3448 }
3449 
3450 static void intel_psr_configure_full_frame_update(struct intel_dp *intel_dp)
3451 {
3452 	struct intel_display *display = to_intel_display(intel_dp);
3453 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3454 
3455 	if (!intel_dp->psr.psr2_sel_fetch_enabled)
3456 		return;
3457 
3458 	if (DISPLAY_VER(display) >= 20)
3459 		intel_de_write(display, LNL_SFF_CTL(cpu_transcoder),
3460 			       LNL_SFF_CTL_SF_SINGLE_FULL_FRAME);
3461 	else
3462 		intel_de_write(display,
3463 			       PSR2_MAN_TRK_CTL(display, cpu_transcoder),
3464 			       man_trk_ctl_enable_bit_get(display) |
3465 			       man_trk_ctl_partial_frame_bit_get(display) |
3466 			       man_trk_ctl_single_full_frame_bit_get(display) |
3467 			       man_trk_ctl_continuos_full_frame(display));
3468 }
3469 
3470 static void _psr_invalidate_handle(struct intel_dp *intel_dp)
3471 {
3472 	struct intel_display *display = to_intel_display(intel_dp);
3473 
3474 	if (DISPLAY_VER(display) < 20 && intel_dp->psr.psr2_sel_fetch_enabled) {
3475 		if (!intel_dp->psr.psr2_sel_fetch_cff_enabled) {
3476 			intel_dp->psr.psr2_sel_fetch_cff_enabled = true;
3477 			intel_psr_configure_full_frame_update(intel_dp);
3478 		}
3479 
3480 		intel_psr_force_update(intel_dp);
3481 	} else {
3482 		intel_psr_exit(intel_dp);
3483 	}
3484 }
3485 
3486 /**
3487  * intel_psr_invalidate - Invalidate PSR
3488  * @display: display device
3489  * @frontbuffer_bits: frontbuffer plane tracking bits
3490  * @origin: which operation caused the invalidate
3491  *
3492  * Since the hardware frontbuffer tracking has gaps we need to integrate
3493  * with the software frontbuffer tracking. This function gets called every
3494  * time frontbuffer rendering starts and a buffer gets dirtied. PSR must be
3495  * disabled if the frontbuffer mask contains a buffer relevant to PSR.
3496  *
3497  * Dirty frontbuffers relevant to PSR are tracked in busy_frontbuffer_bits."
3498  */
3499 void intel_psr_invalidate(struct intel_display *display,
3500 			  unsigned frontbuffer_bits, enum fb_op_origin origin)
3501 {
3502 	struct intel_encoder *encoder;
3503 
3504 	if (origin == ORIGIN_FLIP)
3505 		return;
3506 
3507 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3508 		unsigned int pipe_frontbuffer_bits = frontbuffer_bits;
3509 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3510 
3511 		mutex_lock(&intel_dp->psr.lock);
3512 		if (!intel_dp->psr.enabled) {
3513 			mutex_unlock(&intel_dp->psr.lock);
3514 			continue;
3515 		}
3516 
3517 		pipe_frontbuffer_bits &=
3518 			INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe);
3519 		intel_dp->psr.busy_frontbuffer_bits |= pipe_frontbuffer_bits;
3520 
3521 		if (pipe_frontbuffer_bits)
3522 			_psr_invalidate_handle(intel_dp);
3523 
3524 		mutex_unlock(&intel_dp->psr.lock);
3525 	}
3526 }
3527 /*
3528  * When we will be completely rely on PSR2 S/W tracking in future,
3529  * intel_psr_flush() will invalidate and flush the PSR for ORIGIN_FLIP
3530  * event also therefore tgl_dc3co_flush_locked() require to be changed
3531  * accordingly in future.
3532  */
3533 static void
3534 tgl_dc3co_flush_locked(struct intel_dp *intel_dp, unsigned int frontbuffer_bits,
3535 		       enum fb_op_origin origin)
3536 {
3537 	struct intel_display *display = to_intel_display(intel_dp);
3538 
3539 	if (!intel_dp->psr.dc3co_exitline || !intel_dp->psr.sel_update_enabled ||
3540 	    !intel_dp->psr.active)
3541 		return;
3542 
3543 	/*
3544 	 * At every frontbuffer flush flip event modified delay of delayed work,
3545 	 * when delayed work schedules that means display has been idle.
3546 	 */
3547 	if (!(frontbuffer_bits &
3548 	    INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe)))
3549 		return;
3550 
3551 	tgl_psr2_enable_dc3co(intel_dp);
3552 	mod_delayed_work(display->wq.unordered, &intel_dp->psr.dc3co_work,
3553 			 intel_dp->psr.dc3co_exit_delay);
3554 }
3555 
3556 static void _psr_flush_handle(struct intel_dp *intel_dp)
3557 {
3558 	struct intel_display *display = to_intel_display(intel_dp);
3559 
3560 	if (DISPLAY_VER(display) < 20 && intel_dp->psr.psr2_sel_fetch_enabled) {
3561 		/* Selective fetch prior LNL */
3562 		if (intel_dp->psr.psr2_sel_fetch_cff_enabled) {
3563 			/* can we turn CFF off? */
3564 			if (intel_dp->psr.busy_frontbuffer_bits == 0)
3565 				intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
3566 		}
3567 
3568 		/*
3569 		 * Still keep cff bit enabled as we don't have proper SU
3570 		 * configuration in case update is sent for any reason after
3571 		 * sff bit gets cleared by the HW on next vblank.
3572 		 *
3573 		 * NOTE: Setting cff bit is not needed for LunarLake onwards as
3574 		 * we have own register for SFF bit and we are not overwriting
3575 		 * existing SU configuration
3576 		 */
3577 		intel_psr_configure_full_frame_update(intel_dp);
3578 
3579 		intel_psr_force_update(intel_dp);
3580 	} else if (!intel_dp->psr.psr2_sel_fetch_enabled) {
3581 		/*
3582 		 * PSR1 on all platforms
3583 		 * PSR2 HW tracking
3584 		 * Panel Replay Full frame update
3585 		 */
3586 		intel_psr_force_update(intel_dp);
3587 	} else {
3588 		/* Selective update LNL onwards */
3589 		intel_psr_exit(intel_dp);
3590 	}
3591 
3592 	if (!intel_dp->psr.active && !intel_dp->psr.busy_frontbuffer_bits)
3593 		queue_work(display->wq.unordered, &intel_dp->psr.work);
3594 }
3595 
3596 /**
3597  * intel_psr_flush - Flush PSR
3598  * @display: display device
3599  * @frontbuffer_bits: frontbuffer plane tracking bits
3600  * @origin: which operation caused the flush
3601  *
3602  * Since the hardware frontbuffer tracking has gaps we need to integrate
3603  * with the software frontbuffer tracking. This function gets called every
3604  * time frontbuffer rendering has completed and flushed out to memory. PSR
3605  * can be enabled again if no other frontbuffer relevant to PSR is dirty.
3606  *
3607  * Dirty frontbuffers relevant to PSR are tracked in busy_frontbuffer_bits.
3608  */
3609 void intel_psr_flush(struct intel_display *display,
3610 		     unsigned frontbuffer_bits, enum fb_op_origin origin)
3611 {
3612 	struct intel_encoder *encoder;
3613 
3614 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3615 		unsigned int pipe_frontbuffer_bits = frontbuffer_bits;
3616 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3617 
3618 		mutex_lock(&intel_dp->psr.lock);
3619 		if (!intel_dp->psr.enabled) {
3620 			mutex_unlock(&intel_dp->psr.lock);
3621 			continue;
3622 		}
3623 
3624 		pipe_frontbuffer_bits &=
3625 			INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe);
3626 		intel_dp->psr.busy_frontbuffer_bits &= ~pipe_frontbuffer_bits;
3627 
3628 		/*
3629 		 * If the PSR is paused by an explicit intel_psr_paused() call,
3630 		 * we have to ensure that the PSR is not activated until
3631 		 * intel_psr_resume() is called.
3632 		 */
3633 		if (intel_dp->psr.pause_counter)
3634 			goto unlock;
3635 
3636 		if (origin == ORIGIN_FLIP ||
3637 		    (origin == ORIGIN_CURSOR_UPDATE &&
3638 		     !intel_dp->psr.psr2_sel_fetch_enabled)) {
3639 			tgl_dc3co_flush_locked(intel_dp, frontbuffer_bits, origin);
3640 			goto unlock;
3641 		}
3642 
3643 		if (pipe_frontbuffer_bits == 0)
3644 			goto unlock;
3645 
3646 		/* By definition flush = invalidate + flush */
3647 		_psr_flush_handle(intel_dp);
3648 unlock:
3649 		mutex_unlock(&intel_dp->psr.lock);
3650 	}
3651 }
3652 
3653 /**
3654  * intel_psr_init - Init basic PSR work and mutex.
3655  * @intel_dp: Intel DP
3656  *
3657  * This function is called after the initializing connector.
3658  * (the initializing of connector treats the handling of connector capabilities)
3659  * And it initializes basic PSR stuff for each DP Encoder.
3660  */
3661 void intel_psr_init(struct intel_dp *intel_dp)
3662 {
3663 	struct intel_display *display = to_intel_display(intel_dp);
3664 	struct intel_connector *connector = intel_dp->attached_connector;
3665 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
3666 
3667 	if (!(HAS_PSR(display) || HAS_DP20(display)))
3668 		return;
3669 
3670 	/*
3671 	 * HSW spec explicitly says PSR is tied to port A.
3672 	 * BDW+ platforms have a instance of PSR registers per transcoder but
3673 	 * BDW, GEN9 and GEN11 are not validated by HW team in other transcoder
3674 	 * than eDP one.
3675 	 * For now it only supports one instance of PSR for BDW, GEN9 and GEN11.
3676 	 * So lets keep it hardcoded to PORT_A for BDW, GEN9 and GEN11.
3677 	 * But GEN12 supports a instance of PSR registers per transcoder.
3678 	 */
3679 	if (DISPLAY_VER(display) < 12 && dig_port->base.port != PORT_A) {
3680 		drm_dbg_kms(display->drm,
3681 			    "PSR condition failed: Port not supported\n");
3682 		return;
3683 	}
3684 
3685 	if ((HAS_DP20(display) && !intel_dp_is_edp(intel_dp)) ||
3686 	    DISPLAY_VER(display) >= 20)
3687 		intel_dp->psr.source_panel_replay_support = true;
3688 
3689 	if (HAS_PSR(display) && intel_dp_is_edp(intel_dp))
3690 		intel_dp->psr.source_support = true;
3691 
3692 	/* Set link_standby x link_off defaults */
3693 	if (DISPLAY_VER(display) < 12)
3694 		/* For new platforms up to TGL let's respect VBT back again */
3695 		intel_dp->psr.link_standby = connector->panel.vbt.psr.full_link;
3696 
3697 	INIT_WORK(&intel_dp->psr.work, intel_psr_work);
3698 	INIT_DELAYED_WORK(&intel_dp->psr.dc3co_work, tgl_dc3co_disable_work);
3699 	mutex_init(&intel_dp->psr.lock);
3700 }
3701 
3702 static int psr_get_status_and_error_status(struct intel_dp *intel_dp,
3703 					   u8 *status, u8 *error_status)
3704 {
3705 	struct drm_dp_aux *aux = &intel_dp->aux;
3706 	int ret;
3707 	unsigned int offset;
3708 
3709 	offset = intel_dp->psr.panel_replay_enabled ?
3710 		 DP_SINK_DEVICE_PR_AND_FRAME_LOCK_STATUS : DP_PSR_STATUS;
3711 
3712 	ret = drm_dp_dpcd_readb(aux, offset, status);
3713 	if (ret != 1)
3714 		return ret;
3715 
3716 	offset = intel_dp->psr.panel_replay_enabled ?
3717 		 DP_PANEL_REPLAY_ERROR_STATUS : DP_PSR_ERROR_STATUS;
3718 
3719 	ret = drm_dp_dpcd_readb(aux, offset, error_status);
3720 	if (ret != 1)
3721 		return ret;
3722 
3723 	*status = *status & DP_PSR_SINK_STATE_MASK;
3724 
3725 	return 0;
3726 }
3727 
3728 static void psr_alpm_check(struct intel_dp *intel_dp)
3729 {
3730 	struct intel_psr *psr = &intel_dp->psr;
3731 
3732 	if (!psr->sel_update_enabled)
3733 		return;
3734 
3735 	if (intel_alpm_get_error(intel_dp)) {
3736 		intel_psr_disable_locked(intel_dp);
3737 		psr->sink_not_reliable = true;
3738 	}
3739 }
3740 
3741 static void psr_capability_changed_check(struct intel_dp *intel_dp)
3742 {
3743 	struct intel_display *display = to_intel_display(intel_dp);
3744 	struct intel_psr *psr = &intel_dp->psr;
3745 	u8 val;
3746 	int r;
3747 
3748 	r = drm_dp_dpcd_readb(&intel_dp->aux, DP_PSR_ESI, &val);
3749 	if (r != 1) {
3750 		drm_err(display->drm, "Error reading DP_PSR_ESI\n");
3751 		return;
3752 	}
3753 
3754 	if (val & DP_PSR_CAPS_CHANGE) {
3755 		intel_psr_disable_locked(intel_dp);
3756 		psr->sink_not_reliable = true;
3757 		drm_dbg_kms(display->drm,
3758 			    "Sink PSR capability changed, disabling PSR\n");
3759 
3760 		/* Clearing it */
3761 		drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_ESI, val);
3762 	}
3763 }
3764 
3765 /*
3766  * On common bits:
3767  * DP_PSR_RFB_STORAGE_ERROR == DP_PANEL_REPLAY_RFB_STORAGE_ERROR
3768  * DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR == DP_PANEL_REPLAY_VSC_SDP_UNCORRECTABLE_ERROR
3769  * DP_PSR_LINK_CRC_ERROR == DP_PANEL_REPLAY_LINK_CRC_ERROR
3770  * this function is relying on PSR definitions
3771  */
3772 void intel_psr_short_pulse(struct intel_dp *intel_dp)
3773 {
3774 	struct intel_display *display = to_intel_display(intel_dp);
3775 	struct intel_psr *psr = &intel_dp->psr;
3776 	u8 status, error_status;
3777 	const u8 errors = DP_PSR_RFB_STORAGE_ERROR |
3778 			  DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR |
3779 			  DP_PSR_LINK_CRC_ERROR;
3780 
3781 	if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
3782 		return;
3783 
3784 	mutex_lock(&psr->lock);
3785 
3786 	psr->link_ok = false;
3787 
3788 	if (!psr->enabled)
3789 		goto exit;
3790 
3791 	if (psr_get_status_and_error_status(intel_dp, &status, &error_status)) {
3792 		drm_err(display->drm,
3793 			"Error reading PSR status or error status\n");
3794 		goto exit;
3795 	}
3796 
3797 	if ((!psr->panel_replay_enabled && status == DP_PSR_SINK_INTERNAL_ERROR) ||
3798 	    (error_status & errors)) {
3799 		intel_psr_disable_locked(intel_dp);
3800 		psr->sink_not_reliable = true;
3801 	}
3802 
3803 	if (!psr->panel_replay_enabled && status == DP_PSR_SINK_INTERNAL_ERROR &&
3804 	    !error_status)
3805 		drm_dbg_kms(display->drm,
3806 			    "PSR sink internal error, disabling PSR\n");
3807 	if (error_status & DP_PSR_RFB_STORAGE_ERROR)
3808 		drm_dbg_kms(display->drm,
3809 			    "PSR RFB storage error, disabling PSR\n");
3810 	if (error_status & DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR)
3811 		drm_dbg_kms(display->drm,
3812 			    "PSR VSC SDP uncorrectable error, disabling PSR\n");
3813 	if (error_status & DP_PSR_LINK_CRC_ERROR)
3814 		drm_dbg_kms(display->drm,
3815 			    "PSR Link CRC error, disabling PSR\n");
3816 
3817 	if (error_status & ~errors)
3818 		drm_err(display->drm,
3819 			"PSR_ERROR_STATUS unhandled errors %x\n",
3820 			error_status & ~errors);
3821 	/* clear status register */
3822 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_ERROR_STATUS, error_status);
3823 
3824 	if (!psr->panel_replay_enabled) {
3825 		psr_alpm_check(intel_dp);
3826 		psr_capability_changed_check(intel_dp);
3827 	}
3828 
3829 exit:
3830 	mutex_unlock(&psr->lock);
3831 }
3832 
3833 bool intel_psr_enabled(struct intel_dp *intel_dp)
3834 {
3835 	bool ret;
3836 
3837 	if (!CAN_PSR(intel_dp))
3838 		return false;
3839 
3840 	mutex_lock(&intel_dp->psr.lock);
3841 	ret = intel_dp->psr.enabled;
3842 	mutex_unlock(&intel_dp->psr.lock);
3843 
3844 	return ret;
3845 }
3846 
3847 /**
3848  * intel_psr_link_ok - return psr->link_ok
3849  * @intel_dp: struct intel_dp
3850  *
3851  * We are seeing unexpected link re-trainings with some panels. This is caused
3852  * by panel stating bad link status after PSR is enabled. Code checking link
3853  * status can call this to ensure it can ignore bad link status stated by the
3854  * panel I.e. if panel is stating bad link and intel_psr_link_ok is stating link
3855  * is ok caller should rely on latter.
3856  *
3857  * Return value of link_ok
3858  */
3859 bool intel_psr_link_ok(struct intel_dp *intel_dp)
3860 {
3861 	bool ret;
3862 
3863 	if ((!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp)) ||
3864 	    !intel_dp_is_edp(intel_dp))
3865 		return false;
3866 
3867 	mutex_lock(&intel_dp->psr.lock);
3868 	ret = intel_dp->psr.link_ok;
3869 	mutex_unlock(&intel_dp->psr.lock);
3870 
3871 	return ret;
3872 }
3873 
3874 /**
3875  * intel_psr_lock - grab PSR lock
3876  * @crtc_state: the crtc state
3877  *
3878  * This is initially meant to be used by around CRTC update, when
3879  * vblank sensitive registers are updated and we need grab the lock
3880  * before it to avoid vblank evasion.
3881  */
3882 void intel_psr_lock(const struct intel_crtc_state *crtc_state)
3883 {
3884 	struct intel_display *display = to_intel_display(crtc_state);
3885 	struct intel_encoder *encoder;
3886 
3887 	if (!crtc_state->has_psr)
3888 		return;
3889 
3890 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
3891 					     crtc_state->uapi.encoder_mask) {
3892 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3893 
3894 		mutex_lock(&intel_dp->psr.lock);
3895 		break;
3896 	}
3897 }
3898 
3899 /**
3900  * intel_psr_unlock - release PSR lock
3901  * @crtc_state: the crtc state
3902  *
3903  * Release the PSR lock that was held during pipe update.
3904  */
3905 void intel_psr_unlock(const struct intel_crtc_state *crtc_state)
3906 {
3907 	struct intel_display *display = to_intel_display(crtc_state);
3908 	struct intel_encoder *encoder;
3909 
3910 	if (!crtc_state->has_psr)
3911 		return;
3912 
3913 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
3914 					     crtc_state->uapi.encoder_mask) {
3915 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3916 
3917 		mutex_unlock(&intel_dp->psr.lock);
3918 		break;
3919 	}
3920 }
3921 
3922 /* Wa_16025596647 */
3923 static void intel_psr_apply_underrun_on_idle_wa_locked(struct intel_dp *intel_dp)
3924 {
3925 	struct intel_display *display = to_intel_display(intel_dp);
3926 	bool dc5_dc6_blocked;
3927 
3928 	if (!intel_dp->psr.active || !intel_dp->psr.pkg_c_latency_used)
3929 		return;
3930 
3931 	dc5_dc6_blocked = is_dc5_dc6_blocked(intel_dp);
3932 
3933 	if (intel_dp->psr.sel_update_enabled)
3934 		psr2_program_idle_frames(intel_dp, dc5_dc6_blocked ? 0 :
3935 					 psr_compute_idle_frames(intel_dp));
3936 	else
3937 		intel_dmc_start_pkgc_exit_at_start_of_undelayed_vblank(display,
3938 								       intel_dp->psr.pipe,
3939 								       dc5_dc6_blocked);
3940 }
3941 
3942 static void psr_dc5_dc6_wa_work(struct work_struct *work)
3943 {
3944 	struct intel_display *display = container_of(work, typeof(*display),
3945 						     psr_dc5_dc6_wa_work);
3946 	struct intel_encoder *encoder;
3947 
3948 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3949 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3950 
3951 		mutex_lock(&intel_dp->psr.lock);
3952 
3953 		if (intel_dp->psr.enabled && !intel_dp->psr.panel_replay_enabled &&
3954 		    !intel_dp->psr.pkg_c_latency_used)
3955 			intel_psr_apply_underrun_on_idle_wa_locked(intel_dp);
3956 
3957 		mutex_unlock(&intel_dp->psr.lock);
3958 	}
3959 }
3960 
3961 /**
3962  * intel_psr_notify_dc5_dc6 - Notify PSR about enable/disable dc5/dc6
3963  * @display: intel atomic state
3964  *
3965  * This is targeted for underrun on idle PSR HW bug (Wa_16025596647) to schedule
3966  * psr_dc5_dc6_wa_work used for applying/removing the workaround.
3967  */
3968 void intel_psr_notify_dc5_dc6(struct intel_display *display)
3969 {
3970 	if (DISPLAY_VER(display) != 20 &&
3971 	    !IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0))
3972 		return;
3973 
3974 	schedule_work(&display->psr_dc5_dc6_wa_work);
3975 }
3976 
3977 /**
3978  * intel_psr_dc5_dc6_wa_init - Init work for underrun on idle PSR HW bug wa
3979  * @display: intel atomic state
3980  *
3981  * This is targeted for underrun on idle PSR HW bug (Wa_16025596647) to init
3982  * psr_dc5_dc6_wa_work used for applying the workaround.
3983  */
3984 void intel_psr_dc5_dc6_wa_init(struct intel_display *display)
3985 {
3986 	if (DISPLAY_VER(display) != 20 &&
3987 	    !IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0))
3988 		return;
3989 
3990 	INIT_WORK(&display->psr_dc5_dc6_wa_work, psr_dc5_dc6_wa_work);
3991 }
3992 
3993 /**
3994  * intel_psr_notify_pipe_change - Notify PSR about enable/disable of a pipe
3995  * @state: intel atomic state
3996  * @crtc: intel crtc
3997  * @enable: enable/disable
3998  *
3999  * This is targeted for underrun on idle PSR HW bug (Wa_16025596647) to apply
4000  * remove the workaround when pipe is getting enabled/disabled
4001  */
4002 void intel_psr_notify_pipe_change(struct intel_atomic_state *state,
4003 				  struct intel_crtc *crtc, bool enable)
4004 {
4005 	struct intel_display *display = to_intel_display(state);
4006 	struct intel_encoder *encoder;
4007 
4008 	if (DISPLAY_VER(display) != 20 &&
4009 	    !IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0))
4010 		return;
4011 
4012 	for_each_intel_encoder_with_psr(display->drm, encoder) {
4013 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4014 		u8 active_non_psr_pipes;
4015 
4016 		mutex_lock(&intel_dp->psr.lock);
4017 
4018 		if (!intel_dp->psr.enabled || intel_dp->psr.panel_replay_enabled)
4019 			goto unlock;
4020 
4021 		active_non_psr_pipes = intel_dp->psr.active_non_psr_pipes;
4022 
4023 		if (enable)
4024 			active_non_psr_pipes |= BIT(crtc->pipe);
4025 		else
4026 			active_non_psr_pipes &= ~BIT(crtc->pipe);
4027 
4028 		if (active_non_psr_pipes == intel_dp->psr.active_non_psr_pipes)
4029 			goto unlock;
4030 
4031 		if ((enable && intel_dp->psr.active_non_psr_pipes) ||
4032 		    (!enable && !intel_dp->psr.active_non_psr_pipes) ||
4033 		    !intel_dp->psr.pkg_c_latency_used) {
4034 			intel_dp->psr.active_non_psr_pipes = active_non_psr_pipes;
4035 			goto unlock;
4036 		}
4037 
4038 		intel_dp->psr.active_non_psr_pipes = active_non_psr_pipes;
4039 
4040 		intel_psr_apply_underrun_on_idle_wa_locked(intel_dp);
4041 unlock:
4042 		mutex_unlock(&intel_dp->psr.lock);
4043 	}
4044 }
4045 
4046 /**
4047  * intel_psr_notify_vblank_enable_disable - Notify PSR about enable/disable of vblank
4048  * @display: intel display struct
4049  * @enable: enable/disable
4050  *
4051  * This is targeted for underrun on idle PSR HW bug (Wa_16025596647) to apply
4052  * remove the workaround when vblank is getting enabled/disabled
4053  */
4054 void intel_psr_notify_vblank_enable_disable(struct intel_display *display,
4055 					    bool enable)
4056 {
4057 	struct intel_encoder *encoder;
4058 
4059 	for_each_intel_encoder_with_psr(display->drm, encoder) {
4060 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4061 
4062 		mutex_lock(&intel_dp->psr.lock);
4063 		if (intel_dp->psr.panel_replay_enabled) {
4064 			mutex_unlock(&intel_dp->psr.lock);
4065 			break;
4066 		}
4067 
4068 		if (intel_dp->psr.enabled && intel_dp->psr.pkg_c_latency_used)
4069 			intel_psr_apply_underrun_on_idle_wa_locked(intel_dp);
4070 
4071 		mutex_unlock(&intel_dp->psr.lock);
4072 		return;
4073 	}
4074 
4075 	/*
4076 	 * NOTE: intel_display_power_set_target_dc_state is used
4077 	 * only by PSR * code for DC3CO handling. DC3CO target
4078 	 * state is currently disabled in * PSR code. If DC3CO
4079 	 * is taken into use we need take that into account here
4080 	 * as well.
4081 	 */
4082 	intel_display_power_set_target_dc_state(display, enable ? DC_STATE_DISABLE :
4083 						DC_STATE_EN_UPTO_DC6);
4084 }
4085 
4086 static void
4087 psr_source_status(struct intel_dp *intel_dp, struct seq_file *m)
4088 {
4089 	struct intel_display *display = to_intel_display(intel_dp);
4090 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
4091 	const char *status = "unknown";
4092 	u32 val, status_val;
4093 
4094 	if ((intel_dp_is_edp(intel_dp) || DISPLAY_VER(display) >= 30) &&
4095 	    (intel_dp->psr.sel_update_enabled || intel_dp->psr.panel_replay_enabled)) {
4096 		static const char * const live_status[] = {
4097 			"IDLE",
4098 			"CAPTURE",
4099 			"CAPTURE_FS",
4100 			"SLEEP",
4101 			"BUFON_FW",
4102 			"ML_UP",
4103 			"SU_STANDBY",
4104 			"FAST_SLEEP",
4105 			"DEEP_SLEEP",
4106 			"BUF_ON",
4107 			"TG_ON"
4108 		};
4109 		val = intel_de_read(display,
4110 				    EDP_PSR2_STATUS(display, cpu_transcoder));
4111 		status_val = REG_FIELD_GET(EDP_PSR2_STATUS_STATE_MASK, val);
4112 		if (status_val < ARRAY_SIZE(live_status))
4113 			status = live_status[status_val];
4114 	} else {
4115 		static const char * const live_status[] = {
4116 			"IDLE",
4117 			"SRDONACK",
4118 			"SRDENT",
4119 			"BUFOFF",
4120 			"BUFON",
4121 			"AUXACK",
4122 			"SRDOFFACK",
4123 			"SRDENT_ON",
4124 		};
4125 		val = intel_de_read(display,
4126 				    psr_status_reg(display, cpu_transcoder));
4127 		status_val = REG_FIELD_GET(EDP_PSR_STATUS_STATE_MASK, val);
4128 		if (status_val < ARRAY_SIZE(live_status))
4129 			status = live_status[status_val];
4130 	}
4131 
4132 	seq_printf(m, "Source PSR/PanelReplay status: %s [0x%08x]\n", status, val);
4133 }
4134 
4135 static void intel_psr_sink_capability(struct intel_connector *connector,
4136 				      struct seq_file *m)
4137 {
4138 	seq_printf(m, "Sink support: PSR = %s",
4139 		   str_yes_no(connector->dp.psr_caps.support));
4140 
4141 	if (connector->dp.psr_caps.support)
4142 		seq_printf(m, " [0x%02x]", connector->dp.psr_caps.dpcd[0]);
4143 	if (connector->dp.psr_caps.dpcd[0] == DP_PSR2_WITH_Y_COORD_ET_SUPPORTED)
4144 		seq_printf(m, " (Early Transport)");
4145 	seq_printf(m, ", Panel Replay = %s", str_yes_no(connector->dp.panel_replay_caps.support));
4146 	seq_printf(m, ", Panel Replay Selective Update = %s",
4147 		   str_yes_no(connector->dp.panel_replay_caps.su_support));
4148 	seq_printf(m, ", Panel Replay DSC support = %s",
4149 		   panel_replay_dsc_support_str(connector->dp.panel_replay_caps.dsc_support));
4150 	if (connector->dp.panel_replay_caps.dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] &
4151 	    DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT)
4152 		seq_printf(m, " (Early Transport)");
4153 	seq_printf(m, "\n");
4154 }
4155 
4156 static void intel_psr_print_mode(struct intel_dp *intel_dp,
4157 				 struct seq_file *m)
4158 {
4159 	struct intel_psr *psr = &intel_dp->psr;
4160 	const char *status, *mode, *region_et;
4161 
4162 	if (psr->enabled)
4163 		status = " enabled";
4164 	else
4165 		status = "disabled";
4166 
4167 	if (psr->panel_replay_enabled && psr->sel_update_enabled)
4168 		mode = "Panel Replay Selective Update";
4169 	else if (psr->panel_replay_enabled)
4170 		mode = "Panel Replay";
4171 	else if (psr->sel_update_enabled)
4172 		mode = "PSR2";
4173 	else if (psr->enabled)
4174 		mode = "PSR1";
4175 	else
4176 		mode = "";
4177 
4178 	if (psr->su_region_et_enabled)
4179 		region_et = " (Early Transport)";
4180 	else
4181 		region_et = "";
4182 
4183 	seq_printf(m, "PSR mode: %s%s%s\n", mode, status, region_et);
4184 	if (psr->no_psr_reason)
4185 		seq_printf(m, "  %s\n", psr->no_psr_reason);
4186 }
4187 
4188 static int intel_psr_status(struct seq_file *m, struct intel_dp *intel_dp,
4189 			    struct intel_connector *connector)
4190 {
4191 	struct intel_display *display = to_intel_display(intel_dp);
4192 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
4193 	struct intel_psr *psr = &intel_dp->psr;
4194 	struct ref_tracker *wakeref;
4195 	bool enabled;
4196 	u32 val, psr2_ctl;
4197 
4198 	intel_psr_sink_capability(connector, m);
4199 
4200 	if (!(connector->dp.psr_caps.support || connector->dp.panel_replay_caps.support))
4201 		return 0;
4202 
4203 	wakeref = intel_display_rpm_get(display);
4204 	mutex_lock(&psr->lock);
4205 
4206 	intel_psr_print_mode(intel_dp, m);
4207 
4208 	if (!psr->enabled) {
4209 		seq_printf(m, "PSR sink not reliable: %s\n",
4210 			   str_yes_no(psr->sink_not_reliable));
4211 
4212 		goto unlock;
4213 	}
4214 
4215 	if (psr->panel_replay_enabled) {
4216 		val = intel_de_read(display, TRANS_DP2_CTL(cpu_transcoder));
4217 
4218 		if (intel_dp_is_edp(intel_dp))
4219 			psr2_ctl = intel_de_read(display,
4220 						 EDP_PSR2_CTL(display,
4221 							      cpu_transcoder));
4222 
4223 		enabled = val & TRANS_DP2_PANEL_REPLAY_ENABLE;
4224 	} else if (psr->sel_update_enabled) {
4225 		val = intel_de_read(display,
4226 				    EDP_PSR2_CTL(display, cpu_transcoder));
4227 		enabled = val & EDP_PSR2_ENABLE;
4228 	} else {
4229 		val = intel_de_read(display, psr_ctl_reg(display, cpu_transcoder));
4230 		enabled = val & EDP_PSR_ENABLE;
4231 	}
4232 	seq_printf(m, "Source PSR/PanelReplay ctl: %s [0x%08x]\n",
4233 		   str_enabled_disabled(enabled), val);
4234 	if (psr->panel_replay_enabled && intel_dp_is_edp(intel_dp))
4235 		seq_printf(m, "PSR2_CTL: 0x%08x\n",
4236 			   psr2_ctl);
4237 	psr_source_status(intel_dp, m);
4238 	seq_printf(m, "Busy frontbuffer bits: 0x%08x\n",
4239 		   psr->busy_frontbuffer_bits);
4240 
4241 	/*
4242 	 * SKL+ Perf counter is reset to 0 everytime DC state is entered
4243 	 */
4244 	val = intel_de_read(display, psr_perf_cnt_reg(display, cpu_transcoder));
4245 	seq_printf(m, "Performance counter: %u\n",
4246 		   REG_FIELD_GET(EDP_PSR_PERF_CNT_MASK, val));
4247 
4248 	if (psr->debug & I915_PSR_DEBUG_IRQ) {
4249 		seq_printf(m, "Last attempted entry at: %lld\n",
4250 			   psr->last_entry_attempt);
4251 		seq_printf(m, "Last exit at: %lld\n", psr->last_exit);
4252 	}
4253 
4254 	if (psr->sel_update_enabled) {
4255 		u32 su_frames_val[3];
4256 		int frame;
4257 
4258 		/*
4259 		 * PSR2_SU_STATUS register has been tied-off since DG2/ADL-P
4260 		 * (it returns zeros only) and it has been removed on Xe2_LPD.
4261 		 */
4262 		if (DISPLAY_VER(display) < 13) {
4263 			/*
4264 			 * Reading all 3 registers before hand to minimize crossing a
4265 			 * frame boundary between register reads
4266 			 */
4267 			for (frame = 0; frame < PSR2_SU_STATUS_FRAMES; frame += 3) {
4268 				val = intel_de_read(display,
4269 						    PSR2_SU_STATUS(display, cpu_transcoder, frame));
4270 				su_frames_val[frame / 3] = val;
4271 			}
4272 
4273 			seq_puts(m, "Frame:\tPSR2 SU blocks:\n");
4274 
4275 			for (frame = 0; frame < PSR2_SU_STATUS_FRAMES; frame++) {
4276 				u32 su_blocks;
4277 
4278 				su_blocks = su_frames_val[frame / 3] &
4279 					PSR2_SU_STATUS_MASK(frame);
4280 				su_blocks = su_blocks >> PSR2_SU_STATUS_SHIFT(frame);
4281 				seq_printf(m, "%d\t%d\n", frame, su_blocks);
4282 			}
4283 		}
4284 
4285 		seq_printf(m, "PSR2 selective fetch: %s\n",
4286 			   str_enabled_disabled(psr->psr2_sel_fetch_enabled));
4287 	}
4288 
4289 unlock:
4290 	mutex_unlock(&psr->lock);
4291 	intel_display_rpm_put(display, wakeref);
4292 
4293 	return 0;
4294 }
4295 
4296 static int i915_edp_psr_status_show(struct seq_file *m, void *data)
4297 {
4298 	struct intel_display *display = m->private;
4299 	struct intel_dp *intel_dp = NULL;
4300 	struct intel_encoder *encoder;
4301 
4302 	if (!HAS_PSR(display))
4303 		return -ENODEV;
4304 
4305 	/* Find the first EDP which supports PSR */
4306 	for_each_intel_encoder_with_psr(display->drm, encoder) {
4307 		intel_dp = enc_to_intel_dp(encoder);
4308 		break;
4309 	}
4310 
4311 	if (!intel_dp)
4312 		return -ENODEV;
4313 
4314 	return intel_psr_status(m, intel_dp, intel_dp->attached_connector);
4315 }
4316 DEFINE_SHOW_ATTRIBUTE(i915_edp_psr_status);
4317 
4318 static int
4319 i915_edp_psr_debug_set(void *data, u64 val)
4320 {
4321 	struct intel_display *display = data;
4322 	struct intel_encoder *encoder;
4323 	int ret = -ENODEV;
4324 
4325 	if (!HAS_PSR(display))
4326 		return ret;
4327 
4328 	for_each_intel_encoder_with_psr(display->drm, encoder) {
4329 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4330 
4331 		drm_dbg_kms(display->drm, "Setting PSR debug to %llx\n", val);
4332 
4333 		// TODO: split to each transcoder's PSR debug state
4334 		with_intel_display_rpm(display)
4335 			ret = intel_psr_debug_set(intel_dp, val);
4336 	}
4337 
4338 	return ret;
4339 }
4340 
4341 static int
4342 i915_edp_psr_debug_get(void *data, u64 *val)
4343 {
4344 	struct intel_display *display = data;
4345 	struct intel_encoder *encoder;
4346 
4347 	if (!HAS_PSR(display))
4348 		return -ENODEV;
4349 
4350 	for_each_intel_encoder_with_psr(display->drm, encoder) {
4351 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4352 
4353 		// TODO: split to each transcoder's PSR debug state
4354 		*val = READ_ONCE(intel_dp->psr.debug);
4355 		return 0;
4356 	}
4357 
4358 	return -ENODEV;
4359 }
4360 
4361 DEFINE_SIMPLE_ATTRIBUTE(i915_edp_psr_debug_fops,
4362 			i915_edp_psr_debug_get, i915_edp_psr_debug_set,
4363 			"%llu\n");
4364 
4365 void intel_psr_debugfs_register(struct intel_display *display)
4366 {
4367 	struct dentry *debugfs_root = display->drm->debugfs_root;
4368 
4369 	debugfs_create_file("i915_edp_psr_debug", 0644, debugfs_root,
4370 			    display, &i915_edp_psr_debug_fops);
4371 
4372 	debugfs_create_file("i915_edp_psr_status", 0444, debugfs_root,
4373 			    display, &i915_edp_psr_status_fops);
4374 }
4375 
4376 static const char *psr_mode_str(struct intel_dp *intel_dp)
4377 {
4378 	if (intel_dp->psr.panel_replay_enabled)
4379 		return "PANEL-REPLAY";
4380 	else if (intel_dp->psr.enabled)
4381 		return "PSR";
4382 
4383 	return "unknown";
4384 }
4385 
4386 static int i915_psr_sink_status_show(struct seq_file *m, void *data)
4387 {
4388 	struct intel_connector *connector = m->private;
4389 	struct intel_dp *intel_dp = intel_attached_dp(connector);
4390 	static const char * const sink_status[] = {
4391 		"inactive",
4392 		"transition to active, capture and display",
4393 		"active, display from RFB",
4394 		"active, capture and display on sink device timings",
4395 		"transition to inactive, capture and display, timing re-sync",
4396 		"reserved",
4397 		"reserved",
4398 		"sink internal error",
4399 	};
4400 	const char *str;
4401 	int ret;
4402 	u8 status, error_status;
4403 
4404 	if (!(CAN_PSR(intel_dp) || CAN_PANEL_REPLAY(intel_dp))) {
4405 		seq_puts(m, "PSR/Panel-Replay Unsupported\n");
4406 		return -ENODEV;
4407 	}
4408 
4409 	if (connector->base.status != connector_status_connected)
4410 		return -ENODEV;
4411 
4412 	ret = psr_get_status_and_error_status(intel_dp, &status, &error_status);
4413 	if (ret)
4414 		return ret;
4415 
4416 	status &= DP_PSR_SINK_STATE_MASK;
4417 	if (status < ARRAY_SIZE(sink_status))
4418 		str = sink_status[status];
4419 	else
4420 		str = "unknown";
4421 
4422 	seq_printf(m, "Sink %s status: 0x%x [%s]\n", psr_mode_str(intel_dp), status, str);
4423 
4424 	seq_printf(m, "Sink %s error status: 0x%x", psr_mode_str(intel_dp), error_status);
4425 
4426 	if (error_status & (DP_PSR_RFB_STORAGE_ERROR |
4427 			    DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR |
4428 			    DP_PSR_LINK_CRC_ERROR))
4429 		seq_puts(m, ":\n");
4430 	else
4431 		seq_puts(m, "\n");
4432 	if (error_status & DP_PSR_RFB_STORAGE_ERROR)
4433 		seq_printf(m, "\t%s RFB storage error\n", psr_mode_str(intel_dp));
4434 	if (error_status & DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR)
4435 		seq_printf(m, "\t%s VSC SDP uncorrectable error\n", psr_mode_str(intel_dp));
4436 	if (error_status & DP_PSR_LINK_CRC_ERROR)
4437 		seq_printf(m, "\t%s Link CRC error\n", psr_mode_str(intel_dp));
4438 
4439 	return ret;
4440 }
4441 DEFINE_SHOW_ATTRIBUTE(i915_psr_sink_status);
4442 
4443 static int i915_psr_status_show(struct seq_file *m, void *data)
4444 {
4445 	struct intel_connector *connector = m->private;
4446 	struct intel_dp *intel_dp = intel_attached_dp(connector);
4447 
4448 	return intel_psr_status(m, intel_dp, connector);
4449 }
4450 DEFINE_SHOW_ATTRIBUTE(i915_psr_status);
4451 
4452 void intel_psr_connector_debugfs_add(struct intel_connector *connector)
4453 {
4454 	struct intel_display *display = to_intel_display(connector);
4455 	struct dentry *root = connector->base.debugfs_entry;
4456 
4457 	if (connector->base.connector_type != DRM_MODE_CONNECTOR_eDP &&
4458 	    connector->base.connector_type != DRM_MODE_CONNECTOR_DisplayPort)
4459 		return;
4460 
4461 	debugfs_create_file("i915_psr_sink_status", 0444, root,
4462 			    connector, &i915_psr_sink_status_fops);
4463 
4464 	if (HAS_PSR(display) || HAS_DP20(display))
4465 		debugfs_create_file("i915_psr_status", 0444, root,
4466 				    connector, &i915_psr_status_fops);
4467 }
4468 
4469 bool intel_psr_needs_alpm(struct intel_dp *intel_dp, const struct intel_crtc_state *crtc_state)
4470 {
4471 	/*
4472 	 * eDP Panel Replay uses always ALPM
4473 	 * PSR2 uses ALPM but PSR1 doesn't
4474 	 */
4475 	return intel_dp_is_edp(intel_dp) && (crtc_state->has_sel_update ||
4476 					     crtc_state->has_panel_replay);
4477 }
4478 
4479 bool intel_psr_needs_alpm_aux_less(struct intel_dp *intel_dp,
4480 				   const struct intel_crtc_state *crtc_state)
4481 {
4482 	return intel_dp_is_edp(intel_dp) && crtc_state->has_panel_replay;
4483 }
4484 
4485 void intel_psr_compute_config_late(struct intel_dp *intel_dp,
4486 				   struct intel_crtc_state *crtc_state)
4487 {
4488 	struct intel_display *display = to_intel_display(intel_dp);
4489 	int vblank = intel_crtc_vblank_length(crtc_state);
4490 	int wake_lines;
4491 
4492 	if (intel_psr_needs_alpm_aux_less(intel_dp, crtc_state))
4493 		wake_lines = crtc_state->alpm_state.aux_less_wake_lines;
4494 	else if (intel_psr_needs_alpm(intel_dp, crtc_state))
4495 		wake_lines = DISPLAY_VER(display) < 20 ?
4496 			     psr2_block_count_lines(crtc_state->alpm_state.io_wake_lines,
4497 						    crtc_state->alpm_state.fast_wake_lines) :
4498 			     crtc_state->alpm_state.io_wake_lines;
4499 	else
4500 		wake_lines = 0;
4501 
4502 	/*
4503 	 * Disable the PSR features if wake lines exceed the available vblank.
4504 	 * Though SCL is computed based on these PSR features, it is not reset
4505 	 * even if the PSR features are disabled to avoid changing vblank start
4506 	 * at this stage.
4507 	 */
4508 	if (wake_lines && !_wake_lines_fit_into_vblank(crtc_state, vblank, wake_lines)) {
4509 		drm_dbg_kms(display->drm,
4510 			    "Adjusting PSR/PR mode: vblank too short for wake lines = %d\n",
4511 			    wake_lines);
4512 
4513 		if (crtc_state->has_panel_replay) {
4514 			crtc_state->has_panel_replay = false;
4515 			/*
4516 			 * #TODO : Add fall back to PSR/PSR2
4517 			 * Since panel replay cannot be supported, we can fall back to PSR/PSR2.
4518 			 * This will require calling compute_config for psr and psr2 with check for
4519 			 * actual guardband instead of vblank_length.
4520 			 */
4521 			crtc_state->has_psr = false;
4522 		}
4523 
4524 		crtc_state->has_sel_update = false;
4525 		crtc_state->enable_psr2_su_region_et = false;
4526 		crtc_state->enable_psr2_sel_fetch = false;
4527 	}
4528 
4529 	/* Wa_18037818876 */
4530 	if (intel_psr_needs_wa_18037818876(intel_dp, crtc_state)) {
4531 		crtc_state->has_psr = false;
4532 		drm_dbg_kms(display->drm,
4533 			    "PSR disabled to workaround PSR FSM hang issue\n");
4534 	}
4535 
4536 	intel_psr_set_non_psr_pipes(intel_dp, crtc_state);
4537 }
4538 
4539 int intel_psr_min_guardband(struct intel_crtc_state *crtc_state)
4540 {
4541 	struct intel_display *display = to_intel_display(crtc_state);
4542 	int psr_min_guardband;
4543 	int wake_lines;
4544 
4545 	if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
4546 		return 0;
4547 
4548 	if (crtc_state->has_panel_replay)
4549 		wake_lines = crtc_state->alpm_state.aux_less_wake_lines;
4550 	else if (crtc_state->has_sel_update)
4551 		wake_lines = DISPLAY_VER(display) < 20 ?
4552 			     psr2_block_count_lines(crtc_state->alpm_state.io_wake_lines,
4553 						    crtc_state->alpm_state.fast_wake_lines) :
4554 			     crtc_state->alpm_state.io_wake_lines;
4555 	else
4556 		return 0;
4557 
4558 	psr_min_guardband = wake_lines + crtc_state->set_context_latency;
4559 
4560 	if (crtc_state->req_psr2_sdp_prior_scanline)
4561 		psr_min_guardband++;
4562 
4563 	return psr_min_guardband;
4564 }
4565