xref: /linux/drivers/gpu/drm/i915/display/intel_psr.c (revision 00e08fb2e7ce88e2ae366cbc79997d71d014b0ac)
1 /*
2  * Copyright © 2014 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21  * DEALINGS IN THE SOFTWARE.
22  */
23 
24 #include <linux/debugfs.h>
25 
26 #include <drm/drm_atomic_helper.h>
27 #include <drm/drm_damage_helper.h>
28 #include <drm/drm_debugfs.h>
29 #include <drm/drm_print.h>
30 #include <drm/drm_vblank.h>
31 
32 #include "i915_reg.h"
33 #include "intel_alpm.h"
34 #include "intel_atomic.h"
35 #include "intel_crtc.h"
36 #include "intel_cursor_regs.h"
37 #include "intel_ddi.h"
38 #include "intel_de.h"
39 #include "intel_display_irq.h"
40 #include "intel_display_regs.h"
41 #include "intel_display_rpm.h"
42 #include "intel_display_types.h"
43 #include "intel_display_utils.h"
44 #include "intel_dmc.h"
45 #include "intel_dp.h"
46 #include "intel_dp_aux.h"
47 #include "intel_dsb.h"
48 #include "intel_frontbuffer.h"
49 #include "intel_hdmi.h"
50 #include "intel_psr.h"
51 #include "intel_psr_regs.h"
52 #include "intel_snps_phy.h"
53 #include "intel_step.h"
54 #include "intel_vblank.h"
55 #include "intel_vdsc.h"
56 #include "intel_vrr.h"
57 #include "skl_universal_plane.h"
58 
59 /**
60  * DOC: Panel Self Refresh (PSR/SRD)
61  *
62  * Since Haswell Display controller supports Panel Self-Refresh on display
63  * panels witch have a remote frame buffer (RFB) implemented according to PSR
64  * spec in eDP1.3. PSR feature allows the display to go to lower standby states
65  * when system is idle but display is on as it eliminates display refresh
66  * request to DDR memory completely as long as the frame buffer for that
67  * display is unchanged.
68  *
69  * Panel Self Refresh must be supported by both Hardware (source) and
70  * Panel (sink).
71  *
72  * PSR saves power by caching the framebuffer in the panel RFB, which allows us
73  * to power down the link and memory controller. For DSI panels the same idea
74  * is called "manual mode".
75  *
76  * The implementation uses the hardware-based PSR support which automatically
77  * enters/exits self-refresh mode. The hardware takes care of sending the
78  * required DP aux message and could even retrain the link (that part isn't
79  * enabled yet though). The hardware also keeps track of any frontbuffer
80  * changes to know when to exit self-refresh mode again. Unfortunately that
81  * part doesn't work too well, hence why the i915 PSR support uses the
82  * software frontbuffer tracking to make sure it doesn't miss a screen
83  * update. For this integration intel_psr_invalidate() and intel_psr_flush()
84  * get called by the frontbuffer tracking code. Note that because of locking
85  * issues the self-refresh re-enable code is done from a work queue, which
86  * must be correctly synchronized/cancelled when shutting down the pipe."
87  *
88  * DC3CO (DC3 clock off)
89  *
90  * On top of PSR2, GEN12 adds a intermediate power savings state that turns
91  * clock off automatically during PSR2 idle state.
92  * The smaller overhead of DC3co entry/exit vs. the overhead of PSR2 deep sleep
93  * entry/exit allows the HW to enter a low-power state even when page flipping
94  * periodically (for instance a 30fps video playback scenario).
95  *
96  * Every time a flips occurs PSR2 will get out of deep sleep state(if it was),
97  * so DC3CO is enabled and tgl_dc3co_disable_work is schedule to run after 6
98  * frames, if no other flip occurs and the function above is executed, DC3CO is
99  * disabled and PSR2 is configured to enter deep sleep, resetting again in case
100  * of another flip.
101  * Front buffer modifications do not trigger DC3CO activation on purpose as it
102  * would bring a lot of complexity and most of the moderns systems will only
103  * use page flips.
104  */
105 
106 /*
107  * Description of PSR mask bits:
108  *
109  * EDP_PSR_DEBUG[16]/EDP_PSR_DEBUG_MASK_DISP_REG_WRITE (hsw-skl):
110  *
111  *  When unmasked (nearly) all display register writes (eg. even
112  *  SWF) trigger a PSR exit. Some registers are excluded from this
113  *  and they have a more specific mask (described below). On icl+
114  *  this bit no longer exists and is effectively always set.
115  *
116  * PIPE_MISC[21]/PIPE_MISC_PSR_MASK_PIPE_REG_WRITE (skl+):
117  *
118  *  When unmasked (nearly) all pipe/plane register writes
119  *  trigger a PSR exit. Some plane registers are excluded from this
120  *  and they have a more specific mask (described below).
121  *
122  * CHICKEN_PIPESL_1[11]/SKL_PSR_MASK_PLANE_FLIP (skl+):
123  * PIPE_MISC[23]/PIPE_MISC_PSR_MASK_PRIMARY_FLIP (bdw):
124  * EDP_PSR_DEBUG[23]/EDP_PSR_DEBUG_MASK_PRIMARY_FLIP (hsw):
125  *
126  *  When unmasked PRI_SURF/PLANE_SURF writes trigger a PSR exit.
127  *  SPR_SURF/CURBASE are not included in this and instead are
128  *  controlled by PIPE_MISC_PSR_MASK_PIPE_REG_WRITE (skl+) or
129  *  EDP_PSR_DEBUG_MASK_DISP_REG_WRITE (hsw/bdw).
130  *
131  * PIPE_MISC[22]/PIPE_MISC_PSR_MASK_SPRITE_ENABLE (bdw):
132  * EDP_PSR_DEBUG[21]/EDP_PSR_DEBUG_MASK_SPRITE_ENABLE (hsw):
133  *
134  *  When unmasked PSR is blocked as long as the sprite
135  *  plane is enabled. skl+ with their universal planes no
136  *  longer have a mask bit like this, and no plane being
137  *  enabledb blocks PSR.
138  *
139  * PIPE_MISC[21]/PIPE_MISC_PSR_MASK_CURSOR_MOVE (bdw):
140  * EDP_PSR_DEBUG[20]/EDP_PSR_DEBUG_MASK_CURSOR_MOVE (hsw):
141  *
142  *  When umasked CURPOS writes trigger a PSR exit. On skl+
143  *  this doesn't exit but CURPOS is included in the
144  *  PIPE_MISC_PSR_MASK_PIPE_REG_WRITE mask.
145  *
146  * PIPE_MISC[20]/PIPE_MISC_PSR_MASK_VBLANK_VSYNC_INT (bdw+):
147  * EDP_PSR_DEBUG[19]/EDP_PSR_DEBUG_MASK_VBLANK_VSYNC_INT (hsw):
148  *
149  *  When unmasked PSR is blocked as long as vblank and/or vsync
150  *  interrupt is unmasked in IMR *and* enabled in IER.
151  *
152  * CHICKEN_TRANS[30]/SKL_UNMASK_VBL_TO_PIPE_IN_SRD (skl+):
153  * CHICKEN_PAR1_1[15]/HSW_MASK_VBL_TO_PIPE_IN_SRD (hsw/bdw):
154  *
155  *  Selectcs whether PSR exit generates an extra vblank before
156  *  the first frame is transmitted. Also note the opposite polarity
157  *  if the bit on hsw/bdw vs. skl+ (masked==generate the extra vblank,
158  *  unmasked==do not generate the extra vblank).
159  *
160  *  With DC states enabled the extra vblank happens after link training,
161  *  with DC states disabled it happens immediately upuon PSR exit trigger.
162  *  No idea as of now why there is a difference. HSW/BDW (which don't
163  *  even have DMC) always generate it after link training. Go figure.
164  *
165  *  Unfortunately CHICKEN_TRANS itself seems to be double buffered
166  *  and thus won't latch until the first vblank. So with DC states
167  *  enabled the register effectively uses the reset value during DC5
168  *  exit+PSR exit sequence, and thus the bit does nothing until
169  *  latched by the vblank that it was trying to prevent from being
170  *  generated in the first place. So we should probably call this
171  *  one a chicken/egg bit instead on skl+.
172  *
173  *  In standby mode (as opposed to link-off) this makes no difference
174  *  as the timing generator keeps running the whole time generating
175  *  normal periodic vblanks.
176  *
177  *  WaPsrDPAMaskVBlankInSRD asks us to set the bit on hsw/bdw,
178  *  and doing so makes the behaviour match the skl+ reset value.
179  *
180  * CHICKEN_PIPESL_1[0]/BDW_UNMASK_VBL_TO_REGS_IN_SRD (bdw):
181  * CHICKEN_PIPESL_1[15]/HSW_UNMASK_VBL_TO_REGS_IN_SRD (hsw):
182  *
183  *  On BDW without this bit is no vblanks whatsoever are
184  *  generated after PSR exit. On HSW this has no apparent effect.
185  *  WaPsrDPRSUnmaskVBlankInSRD says to set this.
186  *
187  * The rest of the bits are more self-explanatory and/or
188  * irrelevant for normal operation.
189  *
190  * Description of intel_crtc_state variables. has_psr, has_panel_replay and
191  * has_sel_update:
192  *
193  *  has_psr (alone):					PSR1
194  *  has_psr + has_sel_update:				PSR2
195  *  has_psr + has_panel_replay:				Panel Replay
196  *  has_psr + has_panel_replay + has_sel_update:	Panel Replay Selective Update
197  *
198  * Description of some intel_psr variables. enabled, panel_replay_enabled,
199  * sel_update_enabled
200  *
201  *  enabled (alone):						PSR1
202  *  enabled + sel_update_enabled:				PSR2
203  *  enabled + panel_replay_enabled:				Panel Replay
204  *  enabled + panel_replay_enabled + sel_update_enabled:	Panel Replay SU
205  */
206 
207 #define CAN_PSR(intel_dp) ((intel_dp)->psr.sink_support && \
208 			   (intel_dp)->psr.source_support)
209 
210 bool intel_encoder_can_psr(struct intel_encoder *encoder)
211 {
212 	if (intel_encoder_is_dp(encoder) || encoder->type == INTEL_OUTPUT_DP_MST)
213 		return CAN_PSR(enc_to_intel_dp(encoder)) ||
214 		       CAN_PANEL_REPLAY(enc_to_intel_dp(encoder));
215 	else
216 		return false;
217 }
218 
219 bool intel_psr_needs_aux_io_power(struct intel_encoder *encoder,
220 				  const struct intel_crtc_state *crtc_state)
221 {
222 	/*
223 	 * For PSR/PR modes only eDP requires the AUX IO power to be enabled whenever
224 	 * the output is enabled. For non-eDP outputs the main link is always
225 	 * on, hence it doesn't require the HW initiated AUX wake-up signaling used
226 	 * for eDP.
227 	 *
228 	 * TODO:
229 	 * - Consider leaving AUX IO disabled for eDP / PR as well, in case
230 	 *   the ALPM with main-link off mode is not enabled.
231 	 * - Leave AUX IO enabled for DP / PR, once support for ALPM with
232 	 *   main-link off mode is added for it and this mode gets enabled.
233 	 */
234 	return intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP) &&
235 	       intel_encoder_can_psr(encoder);
236 }
237 
238 static bool psr_global_enabled(struct intel_dp *intel_dp)
239 {
240 	struct intel_connector *connector = intel_dp->attached_connector;
241 
242 	switch (intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK) {
243 	case I915_PSR_DEBUG_DEFAULT:
244 		return intel_dp_is_edp(intel_dp) ?
245 			connector->panel.vbt.psr.enable : true;
246 	case I915_PSR_DEBUG_DISABLE:
247 		return false;
248 	default:
249 		return true;
250 	}
251 }
252 
253 static bool sel_update_global_enabled(struct intel_dp *intel_dp)
254 {
255 	switch (intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK) {
256 	case I915_PSR_DEBUG_DISABLE:
257 	case I915_PSR_DEBUG_FORCE_PSR1:
258 		return false;
259 	default:
260 		return true;
261 	}
262 }
263 
264 static bool panel_replay_global_enabled(struct intel_dp *intel_dp)
265 {
266 	struct intel_display *display = to_intel_display(intel_dp);
267 
268 	return !(intel_dp->psr.debug & I915_PSR_DEBUG_PANEL_REPLAY_DISABLE) &&
269 		display->params.enable_panel_replay;
270 }
271 
272 static u32 psr_irq_psr_error_bit_get(struct intel_dp *intel_dp)
273 {
274 	struct intel_display *display = to_intel_display(intel_dp);
275 
276 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_ERROR :
277 		EDP_PSR_ERROR(intel_dp->psr.transcoder);
278 }
279 
280 static u32 psr_irq_post_exit_bit_get(struct intel_dp *intel_dp)
281 {
282 	struct intel_display *display = to_intel_display(intel_dp);
283 
284 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_POST_EXIT :
285 		EDP_PSR_POST_EXIT(intel_dp->psr.transcoder);
286 }
287 
288 static u32 psr_irq_pre_entry_bit_get(struct intel_dp *intel_dp)
289 {
290 	struct intel_display *display = to_intel_display(intel_dp);
291 
292 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_PRE_ENTRY :
293 		EDP_PSR_PRE_ENTRY(intel_dp->psr.transcoder);
294 }
295 
296 static u32 psr_irq_mask_get(struct intel_dp *intel_dp)
297 {
298 	struct intel_display *display = to_intel_display(intel_dp);
299 
300 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_MASK :
301 		EDP_PSR_MASK(intel_dp->psr.transcoder);
302 }
303 
304 static i915_reg_t psr_ctl_reg(struct intel_display *display,
305 			      enum transcoder cpu_transcoder)
306 {
307 	if (DISPLAY_VER(display) >= 8)
308 		return EDP_PSR_CTL(display, cpu_transcoder);
309 	else
310 		return HSW_SRD_CTL;
311 }
312 
313 static i915_reg_t psr_debug_reg(struct intel_display *display,
314 				enum transcoder cpu_transcoder)
315 {
316 	if (DISPLAY_VER(display) >= 8)
317 		return EDP_PSR_DEBUG(display, cpu_transcoder);
318 	else
319 		return HSW_SRD_DEBUG;
320 }
321 
322 static i915_reg_t psr_perf_cnt_reg(struct intel_display *display,
323 				   enum transcoder cpu_transcoder)
324 {
325 	if (DISPLAY_VER(display) >= 8)
326 		return EDP_PSR_PERF_CNT(display, cpu_transcoder);
327 	else
328 		return HSW_SRD_PERF_CNT;
329 }
330 
331 static i915_reg_t psr_status_reg(struct intel_display *display,
332 				 enum transcoder cpu_transcoder)
333 {
334 	if (DISPLAY_VER(display) >= 8)
335 		return EDP_PSR_STATUS(display, cpu_transcoder);
336 	else
337 		return HSW_SRD_STATUS;
338 }
339 
340 static i915_reg_t psr_imr_reg(struct intel_display *display,
341 			      enum transcoder cpu_transcoder)
342 {
343 	if (DISPLAY_VER(display) >= 12)
344 		return TRANS_PSR_IMR(display, cpu_transcoder);
345 	else
346 		return EDP_PSR_IMR;
347 }
348 
349 static i915_reg_t psr_iir_reg(struct intel_display *display,
350 			      enum transcoder cpu_transcoder)
351 {
352 	if (DISPLAY_VER(display) >= 12)
353 		return TRANS_PSR_IIR(display, cpu_transcoder);
354 	else
355 		return EDP_PSR_IIR;
356 }
357 
358 static i915_reg_t psr_aux_ctl_reg(struct intel_display *display,
359 				  enum transcoder cpu_transcoder)
360 {
361 	if (DISPLAY_VER(display) >= 8)
362 		return EDP_PSR_AUX_CTL(display, cpu_transcoder);
363 	else
364 		return HSW_SRD_AUX_CTL;
365 }
366 
367 static i915_reg_t psr_aux_data_reg(struct intel_display *display,
368 				   enum transcoder cpu_transcoder, int i)
369 {
370 	if (DISPLAY_VER(display) >= 8)
371 		return EDP_PSR_AUX_DATA(display, cpu_transcoder, i);
372 	else
373 		return HSW_SRD_AUX_DATA(i);
374 }
375 
376 static void psr_irq_control(struct intel_dp *intel_dp)
377 {
378 	struct intel_display *display = to_intel_display(intel_dp);
379 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
380 	u32 mask;
381 
382 	if (intel_dp->psr.panel_replay_enabled)
383 		return;
384 
385 	mask = psr_irq_psr_error_bit_get(intel_dp);
386 	if (intel_dp->psr.debug & I915_PSR_DEBUG_IRQ)
387 		mask |= psr_irq_post_exit_bit_get(intel_dp) |
388 			psr_irq_pre_entry_bit_get(intel_dp);
389 
390 	intel_de_rmw(display, psr_imr_reg(display, cpu_transcoder),
391 		     psr_irq_mask_get(intel_dp), ~mask);
392 }
393 
394 static void psr_event_print(struct intel_display *display,
395 			    u32 val, bool sel_update_enabled)
396 {
397 	drm_dbg_kms(display->drm, "PSR exit events: 0x%x\n", val);
398 	if (val & PSR_EVENT_PSR2_WD_TIMER_EXPIRE)
399 		drm_dbg_kms(display->drm, "\tPSR2 watchdog timer expired\n");
400 	if ((val & PSR_EVENT_PSR2_DISABLED) && sel_update_enabled)
401 		drm_dbg_kms(display->drm, "\tPSR2 disabled\n");
402 	if (val & PSR_EVENT_SU_DIRTY_FIFO_UNDERRUN)
403 		drm_dbg_kms(display->drm, "\tSU dirty FIFO underrun\n");
404 	if (val & PSR_EVENT_SU_CRC_FIFO_UNDERRUN)
405 		drm_dbg_kms(display->drm, "\tSU CRC FIFO underrun\n");
406 	if (val & PSR_EVENT_GRAPHICS_RESET)
407 		drm_dbg_kms(display->drm, "\tGraphics reset\n");
408 	if (val & PSR_EVENT_PCH_INTERRUPT)
409 		drm_dbg_kms(display->drm, "\tPCH interrupt\n");
410 	if (val & PSR_EVENT_MEMORY_UP)
411 		drm_dbg_kms(display->drm, "\tMemory up\n");
412 	if (val & PSR_EVENT_FRONT_BUFFER_MODIFY)
413 		drm_dbg_kms(display->drm, "\tFront buffer modification\n");
414 	if (val & PSR_EVENT_WD_TIMER_EXPIRE)
415 		drm_dbg_kms(display->drm, "\tPSR watchdog timer expired\n");
416 	if (val & PSR_EVENT_PIPE_REGISTERS_UPDATE)
417 		drm_dbg_kms(display->drm, "\tPIPE registers updated\n");
418 	if (val & PSR_EVENT_REGISTER_UPDATE)
419 		drm_dbg_kms(display->drm, "\tRegister updated\n");
420 	if (val & PSR_EVENT_HDCP_ENABLE)
421 		drm_dbg_kms(display->drm, "\tHDCP enabled\n");
422 	if (val & PSR_EVENT_KVMR_SESSION_ENABLE)
423 		drm_dbg_kms(display->drm, "\tKVMR session enabled\n");
424 	if (val & PSR_EVENT_VBI_ENABLE)
425 		drm_dbg_kms(display->drm, "\tVBI enabled\n");
426 	if (val & PSR_EVENT_LPSP_MODE_EXIT)
427 		drm_dbg_kms(display->drm, "\tLPSP mode exited\n");
428 	if ((val & PSR_EVENT_PSR_DISABLE) && !sel_update_enabled)
429 		drm_dbg_kms(display->drm, "\tPSR disabled\n");
430 }
431 
432 void intel_psr_irq_handler(struct intel_dp *intel_dp, u32 psr_iir)
433 {
434 	struct intel_display *display = to_intel_display(intel_dp);
435 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
436 	ktime_t time_ns =  ktime_get();
437 
438 	if (psr_iir & psr_irq_pre_entry_bit_get(intel_dp)) {
439 		intel_dp->psr.last_entry_attempt = time_ns;
440 		drm_dbg_kms(display->drm,
441 			    "[transcoder %s] PSR entry attempt in 2 vblanks\n",
442 			    transcoder_name(cpu_transcoder));
443 	}
444 
445 	if (psr_iir & psr_irq_post_exit_bit_get(intel_dp)) {
446 		intel_dp->psr.last_exit = time_ns;
447 		drm_dbg_kms(display->drm,
448 			    "[transcoder %s] PSR exit completed\n",
449 			    transcoder_name(cpu_transcoder));
450 
451 		if (DISPLAY_VER(display) >= 9) {
452 			u32 val;
453 
454 			val = intel_de_rmw(display,
455 					   PSR_EVENT(display, cpu_transcoder),
456 					   0, 0);
457 
458 			psr_event_print(display, val, intel_dp->psr.sel_update_enabled);
459 		}
460 	}
461 
462 	if (psr_iir & psr_irq_psr_error_bit_get(intel_dp)) {
463 		drm_warn(display->drm, "[transcoder %s] PSR aux error\n",
464 			 transcoder_name(cpu_transcoder));
465 
466 		intel_dp->psr.irq_aux_error = true;
467 
468 		/*
469 		 * If this interruption is not masked it will keep
470 		 * interrupting so fast that it prevents the scheduled
471 		 * work to run.
472 		 * Also after a PSR error, we don't want to arm PSR
473 		 * again so we don't care about unmask the interruption
474 		 * or unset irq_aux_error.
475 		 */
476 		intel_de_rmw(display, psr_imr_reg(display, cpu_transcoder),
477 			     0, psr_irq_psr_error_bit_get(intel_dp));
478 
479 		queue_work(display->wq.unordered, &intel_dp->psr.work);
480 	}
481 }
482 
483 static u8 intel_dp_get_sink_sync_latency(struct intel_dp *intel_dp)
484 {
485 	struct intel_display *display = to_intel_display(intel_dp);
486 	u8 val = 8; /* assume the worst if we can't read the value */
487 
488 	if (drm_dp_dpcd_readb(&intel_dp->aux,
489 			      DP_SYNCHRONIZATION_LATENCY_IN_SINK, &val) == 1)
490 		val &= DP_MAX_RESYNC_FRAME_COUNT_MASK;
491 	else
492 		drm_dbg_kms(display->drm,
493 			    "Unable to get sink synchronization latency, assuming 8 frames\n");
494 	return val;
495 }
496 
497 static u8 intel_dp_get_su_capability(struct intel_dp *intel_dp)
498 {
499 	u8 su_capability = 0;
500 
501 	if (intel_dp->psr.sink_panel_replay_su_support) {
502 		if (drm_dp_dpcd_read_byte(&intel_dp->aux,
503 					  DP_PANEL_REPLAY_CAP_CAPABILITY,
504 					  &su_capability) < 0)
505 			return 0;
506 	} else {
507 		su_capability = intel_dp->psr_dpcd[1];
508 	}
509 
510 	return su_capability;
511 }
512 
513 static unsigned int
514 intel_dp_get_su_x_granularity_offset(struct intel_dp *intel_dp)
515 {
516 	return intel_dp->psr.sink_panel_replay_su_support ?
517 		DP_PANEL_REPLAY_CAP_X_GRANULARITY :
518 		DP_PSR2_SU_X_GRANULARITY;
519 }
520 
521 static unsigned int
522 intel_dp_get_su_y_granularity_offset(struct intel_dp *intel_dp)
523 {
524 	return intel_dp->psr.sink_panel_replay_su_support ?
525 		DP_PANEL_REPLAY_CAP_Y_GRANULARITY :
526 		DP_PSR2_SU_Y_GRANULARITY;
527 }
528 
529 /*
530  * Note: Bits related to granularity are same in panel replay and psr
531  * registers. Rely on PSR definitions on these "common" bits.
532  */
533 static void intel_dp_get_su_granularity(struct intel_dp *intel_dp)
534 {
535 	struct intel_display *display = to_intel_display(intel_dp);
536 	ssize_t r;
537 	u16 w;
538 	u8 y;
539 
540 	/*
541 	 * TODO: Do we need to take into account panel supporting both PSR and
542 	 * Panel replay?
543 	 */
544 
545 	/*
546 	 * If sink don't have specific granularity requirements set legacy
547 	 * ones.
548 	 */
549 	if (!(intel_dp_get_su_capability(intel_dp) &
550 	      DP_PSR2_SU_GRANULARITY_REQUIRED)) {
551 		/* As PSR2 HW sends full lines, we do not care about x granularity */
552 		w = 4;
553 		y = 4;
554 		goto exit;
555 	}
556 
557 	r = drm_dp_dpcd_read(&intel_dp->aux,
558 			     intel_dp_get_su_x_granularity_offset(intel_dp),
559 			     &w, 2);
560 	if (r != 2)
561 		drm_dbg_kms(display->drm,
562 			    "Unable to read selective update x granularity\n");
563 	/*
564 	 * Spec says that if the value read is 0 the default granularity should
565 	 * be used instead.
566 	 */
567 	if (r != 2 || w == 0)
568 		w = 4;
569 
570 	r = drm_dp_dpcd_read(&intel_dp->aux,
571 			     intel_dp_get_su_y_granularity_offset(intel_dp),
572 			     &y, 1);
573 	if (r != 1) {
574 		drm_dbg_kms(display->drm,
575 			    "Unable to read selective update y granularity\n");
576 		y = 4;
577 	}
578 	if (y == 0)
579 		y = 1;
580 
581 exit:
582 	intel_dp->psr.su_w_granularity = w;
583 	intel_dp->psr.su_y_granularity = y;
584 }
585 
586 static enum intel_panel_replay_dsc_support
587 compute_pr_dsc_support(struct intel_dp *intel_dp)
588 {
589 	u8 pr_dsc_mode;
590 	u8 val;
591 
592 	val = intel_dp->pr_dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_CAPABILITY)];
593 	pr_dsc_mode = REG_FIELD_GET8(DP_PANEL_REPLAY_DSC_DECODE_CAPABILITY_IN_PR_MASK, val);
594 
595 	switch (pr_dsc_mode) {
596 	case DP_DSC_DECODE_CAPABILITY_IN_PR_FULL_FRAME_ONLY:
597 		return INTEL_DP_PANEL_REPLAY_DSC_FULL_FRAME_ONLY;
598 	case DP_DSC_DECODE_CAPABILITY_IN_PR_SUPPORTED:
599 		return INTEL_DP_PANEL_REPLAY_DSC_SELECTIVE_UPDATE;
600 	default:
601 		MISSING_CASE(pr_dsc_mode);
602 		fallthrough;
603 	case DP_DSC_DECODE_CAPABILITY_IN_PR_NOT_SUPPORTED:
604 	case DP_DSC_DECODE_CAPABILITY_IN_PR_RESERVED:
605 		return INTEL_DP_PANEL_REPLAY_DSC_NOT_SUPPORTED;
606 	}
607 }
608 
609 static const char *panel_replay_dsc_support_str(enum intel_panel_replay_dsc_support dsc_support)
610 {
611 	switch (dsc_support) {
612 	case INTEL_DP_PANEL_REPLAY_DSC_NOT_SUPPORTED:
613 		return "not supported";
614 	case INTEL_DP_PANEL_REPLAY_DSC_FULL_FRAME_ONLY:
615 		return "full frame only";
616 	case INTEL_DP_PANEL_REPLAY_DSC_SELECTIVE_UPDATE:
617 		return "selective update";
618 	default:
619 		MISSING_CASE(dsc_support);
620 		return "n/a";
621 	};
622 }
623 
624 static void _panel_replay_init_dpcd(struct intel_dp *intel_dp)
625 {
626 	struct intel_display *display = to_intel_display(intel_dp);
627 	int ret;
628 
629 	/* TODO: Enable Panel Replay on MST once it's properly implemented. */
630 	if (intel_dp->mst_detect == DRM_DP_MST)
631 		return;
632 
633 	ret = drm_dp_dpcd_read_data(&intel_dp->aux, DP_PANEL_REPLAY_CAP_SUPPORT,
634 				    &intel_dp->pr_dpcd, sizeof(intel_dp->pr_dpcd));
635 	if (ret < 0)
636 		return;
637 
638 	if (!(intel_dp->pr_dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] &
639 	      DP_PANEL_REPLAY_SUPPORT))
640 		return;
641 
642 	if (intel_dp_is_edp(intel_dp)) {
643 		if (!intel_alpm_aux_less_wake_supported(intel_dp)) {
644 			drm_dbg_kms(display->drm,
645 				    "Panel doesn't support AUX-less ALPM, eDP Panel Replay not possible\n");
646 			return;
647 		}
648 
649 		if (!(intel_dp->pr_dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] &
650 		      DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT)) {
651 			drm_dbg_kms(display->drm,
652 				    "Panel doesn't support early transport, eDP Panel Replay not possible\n");
653 			return;
654 		}
655 	}
656 
657 	intel_dp->psr.sink_panel_replay_support = true;
658 
659 	if (intel_dp->pr_dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] &
660 	    DP_PANEL_REPLAY_SU_SUPPORT)
661 		intel_dp->psr.sink_panel_replay_su_support = true;
662 
663 	intel_dp->psr.sink_panel_replay_dsc_support = compute_pr_dsc_support(intel_dp);
664 
665 	drm_dbg_kms(display->drm,
666 		    "Panel replay %sis supported by panel (in DSC mode: %s)\n",
667 		    intel_dp->psr.sink_panel_replay_su_support ?
668 		    "selective_update " : "",
669 		    panel_replay_dsc_support_str(intel_dp->psr.sink_panel_replay_dsc_support));
670 }
671 
672 static void _psr_init_dpcd(struct intel_dp *intel_dp)
673 {
674 	struct intel_display *display = to_intel_display(intel_dp);
675 	int ret;
676 
677 	ret = drm_dp_dpcd_read_data(&intel_dp->aux, DP_PSR_SUPPORT, intel_dp->psr_dpcd,
678 				    sizeof(intel_dp->psr_dpcd));
679 	if (ret < 0)
680 		return;
681 
682 	if (!intel_dp->psr_dpcd[0])
683 		return;
684 
685 	drm_dbg_kms(display->drm, "eDP panel supports PSR version %x\n",
686 		    intel_dp->psr_dpcd[0]);
687 
688 	if (drm_dp_has_quirk(&intel_dp->desc, DP_DPCD_QUIRK_NO_PSR)) {
689 		drm_dbg_kms(display->drm,
690 			    "PSR support not currently available for this panel\n");
691 		return;
692 	}
693 
694 	if (!(intel_dp->edp_dpcd[1] & DP_EDP_SET_POWER_CAP)) {
695 		drm_dbg_kms(display->drm,
696 			    "Panel lacks power state control, PSR cannot be enabled\n");
697 		return;
698 	}
699 
700 	intel_dp->psr.sink_support = true;
701 	intel_dp->psr.sink_sync_latency =
702 		intel_dp_get_sink_sync_latency(intel_dp);
703 
704 	if (DISPLAY_VER(display) >= 9 &&
705 	    intel_dp->psr_dpcd[0] >= DP_PSR2_WITH_Y_COORD_IS_SUPPORTED) {
706 		bool y_req = intel_dp->psr_dpcd[1] &
707 			     DP_PSR2_SU_Y_COORDINATE_REQUIRED;
708 
709 		/*
710 		 * All panels that supports PSR version 03h (PSR2 +
711 		 * Y-coordinate) can handle Y-coordinates in VSC but we are
712 		 * only sure that it is going to be used when required by the
713 		 * panel. This way panel is capable to do selective update
714 		 * without a aux frame sync.
715 		 *
716 		 * To support PSR version 02h and PSR version 03h without
717 		 * Y-coordinate requirement panels we would need to enable
718 		 * GTC first.
719 		 */
720 		intel_dp->psr.sink_psr2_support = y_req &&
721 			intel_alpm_aux_wake_supported(intel_dp);
722 		drm_dbg_kms(display->drm, "PSR2 %ssupported\n",
723 			    intel_dp->psr.sink_psr2_support ? "" : "not ");
724 	}
725 }
726 
727 void intel_psr_init_dpcd(struct intel_dp *intel_dp)
728 {
729 	_psr_init_dpcd(intel_dp);
730 
731 	_panel_replay_init_dpcd(intel_dp);
732 
733 	if (intel_dp->psr.sink_psr2_support ||
734 	    intel_dp->psr.sink_panel_replay_su_support)
735 		intel_dp_get_su_granularity(intel_dp);
736 }
737 
738 static void hsw_psr_setup_aux(struct intel_dp *intel_dp)
739 {
740 	struct intel_display *display = to_intel_display(intel_dp);
741 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
742 	u32 aux_clock_divider, aux_ctl;
743 	/* write DP_SET_POWER=D0 */
744 	static const u8 aux_msg[] = {
745 		[0] = (DP_AUX_NATIVE_WRITE << 4) | ((DP_SET_POWER >> 16) & 0xf),
746 		[1] = (DP_SET_POWER >> 8) & 0xff,
747 		[2] = DP_SET_POWER & 0xff,
748 		[3] = 1 - 1,
749 		[4] = DP_SET_POWER_D0,
750 	};
751 	int i;
752 
753 	BUILD_BUG_ON(sizeof(aux_msg) > 20);
754 	for (i = 0; i < sizeof(aux_msg); i += 4)
755 		intel_de_write(display,
756 			       psr_aux_data_reg(display, cpu_transcoder, i >> 2),
757 			       intel_dp_aux_pack(&aux_msg[i], sizeof(aux_msg) - i));
758 
759 	aux_clock_divider = intel_dp->get_aux_clock_divider(intel_dp, 0);
760 
761 	/* Start with bits set for DDI_AUX_CTL register */
762 	aux_ctl = intel_dp->get_aux_send_ctl(intel_dp, sizeof(aux_msg),
763 					     aux_clock_divider);
764 
765 	/* Select only valid bits for SRD_AUX_CTL */
766 	aux_ctl &= EDP_PSR_AUX_CTL_TIME_OUT_MASK |
767 		EDP_PSR_AUX_CTL_MESSAGE_SIZE_MASK |
768 		EDP_PSR_AUX_CTL_PRECHARGE_2US_MASK |
769 		EDP_PSR_AUX_CTL_BIT_CLOCK_2X_MASK;
770 
771 	intel_de_write(display, psr_aux_ctl_reg(display, cpu_transcoder),
772 		       aux_ctl);
773 }
774 
775 static bool psr2_su_region_et_valid(struct intel_dp *intel_dp, bool panel_replay)
776 {
777 	struct intel_display *display = to_intel_display(intel_dp);
778 
779 	if (DISPLAY_VER(display) < 20 || !intel_dp_is_edp(intel_dp) ||
780 	    intel_dp->psr.debug & I915_PSR_DEBUG_SU_REGION_ET_DISABLE)
781 		return false;
782 
783 	return panel_replay ?
784 		intel_dp->pr_dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] &
785 		DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT :
786 		intel_dp->psr_dpcd[0] == DP_PSR2_WITH_Y_COORD_ET_SUPPORTED;
787 }
788 
789 static void _panel_replay_enable_sink(struct intel_dp *intel_dp,
790 				      const struct intel_crtc_state *crtc_state)
791 {
792 	u8 val = DP_PANEL_REPLAY_ENABLE |
793 		DP_PANEL_REPLAY_VSC_SDP_CRC_EN |
794 		DP_PANEL_REPLAY_UNRECOVERABLE_ERROR_EN |
795 		DP_PANEL_REPLAY_RFB_STORAGE_ERROR_EN |
796 		DP_PANEL_REPLAY_ACTIVE_FRAME_CRC_ERROR_EN;
797 	u8 panel_replay_config2 = DP_PANEL_REPLAY_CRC_VERIFICATION;
798 
799 	if (crtc_state->has_sel_update)
800 		val |= DP_PANEL_REPLAY_SU_ENABLE;
801 
802 	if (crtc_state->enable_psr2_su_region_et)
803 		val |= DP_PANEL_REPLAY_ENABLE_SU_REGION_ET;
804 
805 	if (crtc_state->req_psr2_sdp_prior_scanline)
806 		panel_replay_config2 |=
807 			DP_PANEL_REPLAY_SU_REGION_SCANLINE_CAPTURE;
808 
809 	drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG, val);
810 
811 	drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG2,
812 			   panel_replay_config2);
813 }
814 
815 static void _psr_enable_sink(struct intel_dp *intel_dp,
816 			     const struct intel_crtc_state *crtc_state)
817 {
818 	struct intel_display *display = to_intel_display(intel_dp);
819 	u8 val = 0;
820 
821 	if (crtc_state->has_sel_update) {
822 		val |= DP_PSR_ENABLE_PSR2 | DP_PSR_IRQ_HPD_WITH_CRC_ERRORS;
823 	} else {
824 		if (intel_dp->psr.link_standby)
825 			val |= DP_PSR_MAIN_LINK_ACTIVE;
826 
827 		if (DISPLAY_VER(display) >= 8)
828 			val |= DP_PSR_CRC_VERIFICATION;
829 	}
830 
831 	if (crtc_state->req_psr2_sdp_prior_scanline)
832 		val |= DP_PSR_SU_REGION_SCANLINE_CAPTURE;
833 
834 	if (crtc_state->enable_psr2_su_region_et)
835 		val |= DP_PANEL_REPLAY_ENABLE_SU_REGION_ET;
836 
837 	if (intel_dp->psr.entry_setup_frames > 0)
838 		val |= DP_PSR_FRAME_CAPTURE;
839 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, val);
840 
841 	val |= DP_PSR_ENABLE;
842 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, val);
843 }
844 
845 static void intel_psr_enable_sink(struct intel_dp *intel_dp,
846 				  const struct intel_crtc_state *crtc_state)
847 {
848 	intel_alpm_enable_sink(intel_dp, crtc_state);
849 
850 	crtc_state->has_panel_replay ?
851 		_panel_replay_enable_sink(intel_dp, crtc_state) :
852 		_psr_enable_sink(intel_dp, crtc_state);
853 
854 	if (intel_dp_is_edp(intel_dp))
855 		drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
856 }
857 
858 void intel_psr_panel_replay_enable_sink(struct intel_dp *intel_dp)
859 {
860 	if (CAN_PANEL_REPLAY(intel_dp))
861 		drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG,
862 				   DP_PANEL_REPLAY_ENABLE);
863 }
864 
865 static u32 intel_psr1_get_tp_time(struct intel_dp *intel_dp)
866 {
867 	struct intel_display *display = to_intel_display(intel_dp);
868 	struct intel_connector *connector = intel_dp->attached_connector;
869 	u32 val = 0;
870 
871 	if (DISPLAY_VER(display) >= 11)
872 		val |= EDP_PSR_TP4_TIME_0us;
873 
874 	if (display->params.psr_safest_params) {
875 		val |= EDP_PSR_TP1_TIME_2500us;
876 		val |= EDP_PSR_TP2_TP3_TIME_2500us;
877 		goto check_tp3_sel;
878 	}
879 
880 	if (connector->panel.vbt.psr.tp1_wakeup_time_us == 0)
881 		val |= EDP_PSR_TP1_TIME_0us;
882 	else if (connector->panel.vbt.psr.tp1_wakeup_time_us <= 100)
883 		val |= EDP_PSR_TP1_TIME_100us;
884 	else if (connector->panel.vbt.psr.tp1_wakeup_time_us <= 500)
885 		val |= EDP_PSR_TP1_TIME_500us;
886 	else
887 		val |= EDP_PSR_TP1_TIME_2500us;
888 
889 	if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us == 0)
890 		val |= EDP_PSR_TP2_TP3_TIME_0us;
891 	else if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us <= 100)
892 		val |= EDP_PSR_TP2_TP3_TIME_100us;
893 	else if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us <= 500)
894 		val |= EDP_PSR_TP2_TP3_TIME_500us;
895 	else
896 		val |= EDP_PSR_TP2_TP3_TIME_2500us;
897 
898 	/*
899 	 * WA 0479: hsw,bdw
900 	 * "Do not skip both TP1 and TP2/TP3"
901 	 */
902 	if (DISPLAY_VER(display) < 9 &&
903 	    connector->panel.vbt.psr.tp1_wakeup_time_us == 0 &&
904 	    connector->panel.vbt.psr.tp2_tp3_wakeup_time_us == 0)
905 		val |= EDP_PSR_TP2_TP3_TIME_100us;
906 
907 check_tp3_sel:
908 	if (intel_dp_source_supports_tps3(display) &&
909 	    drm_dp_tps3_supported(intel_dp->dpcd))
910 		val |= EDP_PSR_TP_TP1_TP3;
911 	else
912 		val |= EDP_PSR_TP_TP1_TP2;
913 
914 	return val;
915 }
916 
917 static u8 psr_compute_idle_frames(struct intel_dp *intel_dp)
918 {
919 	struct intel_display *display = to_intel_display(intel_dp);
920 	struct intel_connector *connector = intel_dp->attached_connector;
921 	int idle_frames;
922 
923 	/* Let's use 6 as the minimum to cover all known cases including the
924 	 * off-by-one issue that HW has in some cases.
925 	 */
926 	idle_frames = max(6, connector->panel.vbt.psr.idle_frames);
927 	idle_frames = max(idle_frames, intel_dp->psr.sink_sync_latency + 1);
928 
929 	if (drm_WARN_ON(display->drm, idle_frames > 0xf))
930 		idle_frames = 0xf;
931 
932 	return idle_frames;
933 }
934 
935 static bool is_dc5_dc6_blocked(struct intel_dp *intel_dp)
936 {
937 	struct intel_display *display = to_intel_display(intel_dp);
938 	u32 current_dc_state = intel_display_power_get_current_dc_state(display);
939 	struct intel_crtc *crtc = intel_crtc_for_pipe(display, intel_dp->psr.pipe);
940 	struct drm_vblank_crtc *vblank = drm_crtc_vblank_crtc(&crtc->base);
941 
942 	return (current_dc_state != DC_STATE_EN_UPTO_DC5 &&
943 		current_dc_state != DC_STATE_EN_UPTO_DC6) ||
944 		intel_dp->psr.active_non_psr_pipes ||
945 		READ_ONCE(vblank->enabled);
946 }
947 
948 static void hsw_activate_psr1(struct intel_dp *intel_dp)
949 {
950 	struct intel_display *display = to_intel_display(intel_dp);
951 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
952 	u32 max_sleep_time = 0x1f;
953 	u32 val = EDP_PSR_ENABLE;
954 
955 	val |= EDP_PSR_IDLE_FRAMES(psr_compute_idle_frames(intel_dp));
956 
957 	if (DISPLAY_VER(display) < 20)
958 		val |= EDP_PSR_MAX_SLEEP_TIME(max_sleep_time);
959 
960 	if (display->platform.haswell)
961 		val |= EDP_PSR_MIN_LINK_ENTRY_TIME_8_LINES;
962 
963 	if (intel_dp->psr.link_standby)
964 		val |= EDP_PSR_LINK_STANDBY;
965 
966 	val |= intel_psr1_get_tp_time(intel_dp);
967 
968 	if (DISPLAY_VER(display) >= 8)
969 		val |= EDP_PSR_CRC_ENABLE;
970 
971 	if (DISPLAY_VER(display) >= 20)
972 		val |= LNL_EDP_PSR_ENTRY_SETUP_FRAMES(intel_dp->psr.entry_setup_frames);
973 
974 	intel_de_rmw(display, psr_ctl_reg(display, cpu_transcoder),
975 		     ~EDP_PSR_RESTORE_PSR_ACTIVE_CTX_MASK, val);
976 
977 	/* Wa_16025596647 */
978 	if ((DISPLAY_VER(display) == 20 ||
979 	     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
980 	    is_dc5_dc6_blocked(intel_dp) && intel_dp->psr.pkg_c_latency_used)
981 		intel_dmc_start_pkgc_exit_at_start_of_undelayed_vblank(display,
982 								       intel_dp->psr.pipe,
983 								       true);
984 }
985 
986 static u32 intel_psr2_get_tp_time(struct intel_dp *intel_dp)
987 {
988 	struct intel_display *display = to_intel_display(intel_dp);
989 	struct intel_connector *connector = intel_dp->attached_connector;
990 	u32 val = 0;
991 
992 	if (display->params.psr_safest_params)
993 		return EDP_PSR2_TP2_TIME_2500us;
994 
995 	if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us >= 0 &&
996 	    connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 50)
997 		val |= EDP_PSR2_TP2_TIME_50us;
998 	else if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 100)
999 		val |= EDP_PSR2_TP2_TIME_100us;
1000 	else if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 500)
1001 		val |= EDP_PSR2_TP2_TIME_500us;
1002 	else
1003 		val |= EDP_PSR2_TP2_TIME_2500us;
1004 
1005 	return val;
1006 }
1007 
1008 static int
1009 psr2_block_count_lines(u8 io_wake_lines, u8 fast_wake_lines)
1010 {
1011 	return io_wake_lines < 9 && fast_wake_lines < 9 ? 8 : 12;
1012 }
1013 
1014 static int psr2_block_count(struct intel_dp *intel_dp)
1015 {
1016 	return psr2_block_count_lines(intel_dp->psr.io_wake_lines,
1017 				      intel_dp->psr.fast_wake_lines) / 4;
1018 }
1019 
1020 static u8 frames_before_su_entry(struct intel_dp *intel_dp)
1021 {
1022 	u8 frames_before_su_entry;
1023 
1024 	frames_before_su_entry = max_t(u8,
1025 				       intel_dp->psr.sink_sync_latency + 1,
1026 				       2);
1027 
1028 	/* Entry setup frames must be at least 1 less than frames before SU entry */
1029 	if (intel_dp->psr.entry_setup_frames >= frames_before_su_entry)
1030 		frames_before_su_entry = intel_dp->psr.entry_setup_frames + 1;
1031 
1032 	return frames_before_su_entry;
1033 }
1034 
1035 static void dg2_activate_panel_replay(struct intel_dp *intel_dp)
1036 {
1037 	struct intel_display *display = to_intel_display(intel_dp);
1038 	struct intel_psr *psr = &intel_dp->psr;
1039 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1040 
1041 	if (intel_dp_is_edp(intel_dp) && psr->sel_update_enabled) {
1042 		u32 val = psr->su_region_et_enabled ?
1043 			LNL_EDP_PSR2_SU_REGION_ET_ENABLE : 0;
1044 
1045 		if (intel_dp->psr.req_psr2_sdp_prior_scanline)
1046 			val |= EDP_PSR2_SU_SDP_SCANLINE;
1047 
1048 		intel_de_write(display, EDP_PSR2_CTL(display, cpu_transcoder),
1049 			       val);
1050 	}
1051 
1052 	intel_de_rmw(display,
1053 		     PSR2_MAN_TRK_CTL(display, intel_dp->psr.transcoder),
1054 		     0, ADLP_PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME);
1055 
1056 	intel_de_rmw(display, TRANS_DP2_CTL(intel_dp->psr.transcoder), 0,
1057 		     TRANS_DP2_PANEL_REPLAY_ENABLE);
1058 }
1059 
1060 static void hsw_activate_psr2(struct intel_dp *intel_dp)
1061 {
1062 	struct intel_display *display = to_intel_display(intel_dp);
1063 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1064 	u32 val = EDP_PSR2_ENABLE;
1065 	u32 psr_val = 0;
1066 	u8 idle_frames;
1067 
1068 	/* Wa_16025596647 */
1069 	if ((DISPLAY_VER(display) == 20 ||
1070 	     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
1071 	    is_dc5_dc6_blocked(intel_dp) && intel_dp->psr.pkg_c_latency_used)
1072 		idle_frames = 0;
1073 	else
1074 		idle_frames = psr_compute_idle_frames(intel_dp);
1075 	val |= EDP_PSR2_IDLE_FRAMES(idle_frames);
1076 
1077 	if (DISPLAY_VER(display) < 14 && !display->platform.alderlake_p)
1078 		val |= EDP_SU_TRACK_ENABLE;
1079 
1080 	if (DISPLAY_VER(display) >= 10 && DISPLAY_VER(display) < 13)
1081 		val |= EDP_Y_COORDINATE_ENABLE;
1082 
1083 	val |= EDP_PSR2_FRAME_BEFORE_SU(frames_before_su_entry(intel_dp));
1084 
1085 	val |= intel_psr2_get_tp_time(intel_dp);
1086 
1087 	if (DISPLAY_VER(display) >= 12 && DISPLAY_VER(display) < 20) {
1088 		if (psr2_block_count(intel_dp) > 2)
1089 			val |= TGL_EDP_PSR2_BLOCK_COUNT_NUM_3;
1090 		else
1091 			val |= TGL_EDP_PSR2_BLOCK_COUNT_NUM_2;
1092 	}
1093 
1094 	/* Wa_22012278275:adl-p */
1095 	if (display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_E0)) {
1096 		static const u8 map[] = {
1097 			2, /* 5 lines */
1098 			1, /* 6 lines */
1099 			0, /* 7 lines */
1100 			3, /* 8 lines */
1101 			6, /* 9 lines */
1102 			5, /* 10 lines */
1103 			4, /* 11 lines */
1104 			7, /* 12 lines */
1105 		};
1106 		/*
1107 		 * Still using the default IO_BUFFER_WAKE and FAST_WAKE, see
1108 		 * comments below for more information
1109 		 */
1110 		int tmp;
1111 
1112 		tmp = map[intel_dp->psr.io_wake_lines -
1113 			  TGL_EDP_PSR2_IO_BUFFER_WAKE_MIN_LINES];
1114 		val |= TGL_EDP_PSR2_IO_BUFFER_WAKE(tmp + TGL_EDP_PSR2_IO_BUFFER_WAKE_MIN_LINES);
1115 
1116 		tmp = map[intel_dp->psr.fast_wake_lines - TGL_EDP_PSR2_FAST_WAKE_MIN_LINES];
1117 		val |= TGL_EDP_PSR2_FAST_WAKE(tmp + TGL_EDP_PSR2_FAST_WAKE_MIN_LINES);
1118 	} else if (DISPLAY_VER(display) >= 20) {
1119 		val |= LNL_EDP_PSR2_IO_BUFFER_WAKE(intel_dp->psr.io_wake_lines);
1120 	} else if (DISPLAY_VER(display) >= 12) {
1121 		val |= TGL_EDP_PSR2_IO_BUFFER_WAKE(intel_dp->psr.io_wake_lines);
1122 		val |= TGL_EDP_PSR2_FAST_WAKE(intel_dp->psr.fast_wake_lines);
1123 	} else if (DISPLAY_VER(display) >= 9) {
1124 		val |= EDP_PSR2_IO_BUFFER_WAKE(intel_dp->psr.io_wake_lines);
1125 		val |= EDP_PSR2_FAST_WAKE(intel_dp->psr.fast_wake_lines);
1126 	}
1127 
1128 	if (intel_dp->psr.req_psr2_sdp_prior_scanline)
1129 		val |= EDP_PSR2_SU_SDP_SCANLINE;
1130 
1131 	if (DISPLAY_VER(display) >= 20)
1132 		psr_val |= LNL_EDP_PSR_ENTRY_SETUP_FRAMES(intel_dp->psr.entry_setup_frames);
1133 
1134 	if (intel_dp->psr.psr2_sel_fetch_enabled) {
1135 		u32 tmp;
1136 
1137 		tmp = intel_de_read(display,
1138 				    PSR2_MAN_TRK_CTL(display, cpu_transcoder));
1139 		drm_WARN_ON(display->drm, !(tmp & PSR2_MAN_TRK_CTL_ENABLE));
1140 	} else if (HAS_PSR2_SEL_FETCH(display)) {
1141 		intel_de_write(display,
1142 			       PSR2_MAN_TRK_CTL(display, cpu_transcoder), 0);
1143 	}
1144 
1145 	if (intel_dp->psr.su_region_et_enabled)
1146 		val |= LNL_EDP_PSR2_SU_REGION_ET_ENABLE;
1147 
1148 	/*
1149 	 * PSR2 HW is incorrectly using EDP_PSR_TP1_TP3_SEL and BSpec is
1150 	 * recommending keep this bit unset while PSR2 is enabled.
1151 	 */
1152 	intel_de_write(display, psr_ctl_reg(display, cpu_transcoder), psr_val);
1153 
1154 	intel_de_write(display, EDP_PSR2_CTL(display, cpu_transcoder), val);
1155 }
1156 
1157 static bool
1158 transcoder_has_psr2(struct intel_display *display, enum transcoder cpu_transcoder)
1159 {
1160 	if (display->platform.alderlake_p || DISPLAY_VER(display) >= 14)
1161 		return cpu_transcoder == TRANSCODER_A || cpu_transcoder == TRANSCODER_B;
1162 	else if (DISPLAY_VER(display) >= 12)
1163 		return cpu_transcoder == TRANSCODER_A;
1164 	else if (DISPLAY_VER(display) >= 9)
1165 		return cpu_transcoder == TRANSCODER_EDP;
1166 	else
1167 		return false;
1168 }
1169 
1170 static u32 intel_get_frame_time_us(const struct intel_crtc_state *crtc_state)
1171 {
1172 	if (!crtc_state->hw.active)
1173 		return 0;
1174 
1175 	return DIV_ROUND_UP(1000 * 1000,
1176 			    drm_mode_vrefresh(&crtc_state->hw.adjusted_mode));
1177 }
1178 
1179 static void psr2_program_idle_frames(struct intel_dp *intel_dp,
1180 				     u32 idle_frames)
1181 {
1182 	struct intel_display *display = to_intel_display(intel_dp);
1183 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1184 
1185 	intel_de_rmw(display, EDP_PSR2_CTL(display, cpu_transcoder),
1186 		     EDP_PSR2_IDLE_FRAMES_MASK,
1187 		     EDP_PSR2_IDLE_FRAMES(idle_frames));
1188 }
1189 
1190 static void tgl_psr2_enable_dc3co(struct intel_dp *intel_dp)
1191 {
1192 	struct intel_display *display = to_intel_display(intel_dp);
1193 
1194 	psr2_program_idle_frames(intel_dp, 0);
1195 	intel_display_power_set_target_dc_state(display, DC_STATE_EN_DC3CO);
1196 }
1197 
1198 static void tgl_psr2_disable_dc3co(struct intel_dp *intel_dp)
1199 {
1200 	struct intel_display *display = to_intel_display(intel_dp);
1201 
1202 	intel_display_power_set_target_dc_state(display, DC_STATE_EN_UPTO_DC6);
1203 	psr2_program_idle_frames(intel_dp, psr_compute_idle_frames(intel_dp));
1204 }
1205 
1206 static void tgl_dc3co_disable_work(struct work_struct *work)
1207 {
1208 	struct intel_dp *intel_dp =
1209 		container_of(work, typeof(*intel_dp), psr.dc3co_work.work);
1210 
1211 	mutex_lock(&intel_dp->psr.lock);
1212 	/* If delayed work is pending, it is not idle */
1213 	if (delayed_work_pending(&intel_dp->psr.dc3co_work))
1214 		goto unlock;
1215 
1216 	tgl_psr2_disable_dc3co(intel_dp);
1217 unlock:
1218 	mutex_unlock(&intel_dp->psr.lock);
1219 }
1220 
1221 static void tgl_disallow_dc3co_on_psr2_exit(struct intel_dp *intel_dp)
1222 {
1223 	if (!intel_dp->psr.dc3co_exitline)
1224 		return;
1225 
1226 	cancel_delayed_work(&intel_dp->psr.dc3co_work);
1227 	/* Before PSR2 exit disallow dc3co*/
1228 	tgl_psr2_disable_dc3co(intel_dp);
1229 }
1230 
1231 static bool
1232 dc3co_is_pipe_port_compatible(struct intel_dp *intel_dp,
1233 			      struct intel_crtc_state *crtc_state)
1234 {
1235 	struct intel_display *display = to_intel_display(intel_dp);
1236 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1237 	enum pipe pipe = to_intel_crtc(crtc_state->uapi.crtc)->pipe;
1238 	enum port port = dig_port->base.port;
1239 
1240 	if (display->platform.alderlake_p || DISPLAY_VER(display) >= 14)
1241 		return pipe <= PIPE_B && port <= PORT_B;
1242 	else
1243 		return pipe == PIPE_A && port == PORT_A;
1244 }
1245 
1246 static void
1247 tgl_dc3co_exitline_compute_config(struct intel_dp *intel_dp,
1248 				  struct intel_crtc_state *crtc_state)
1249 {
1250 	struct intel_display *display = to_intel_display(intel_dp);
1251 	const u32 crtc_vdisplay = crtc_state->uapi.adjusted_mode.crtc_vdisplay;
1252 	struct i915_power_domains *power_domains = &display->power.domains;
1253 	u32 exit_scanlines;
1254 
1255 	/*
1256 	 * FIXME: Due to the changed sequence of activating/deactivating DC3CO,
1257 	 * disable DC3CO until the changed dc3co activating/deactivating sequence
1258 	 * is applied. B.Specs:49196
1259 	 */
1260 	return;
1261 
1262 	/*
1263 	 * DMC's DC3CO exit mechanism has an issue with Selective Fecth
1264 	 * TODO: when the issue is addressed, this restriction should be removed.
1265 	 */
1266 	if (crtc_state->enable_psr2_sel_fetch)
1267 		return;
1268 
1269 	if (!(power_domains->allowed_dc_mask & DC_STATE_EN_DC3CO))
1270 		return;
1271 
1272 	if (!dc3co_is_pipe_port_compatible(intel_dp, crtc_state))
1273 		return;
1274 
1275 	/* Wa_16011303918:adl-p */
1276 	if (display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0))
1277 		return;
1278 
1279 	/*
1280 	 * DC3CO Exit time 200us B.Spec 49196
1281 	 * PSR2 transcoder Early Exit scanlines = ROUNDUP(200 / line time) + 1
1282 	 */
1283 	exit_scanlines =
1284 		intel_usecs_to_scanlines(&crtc_state->uapi.adjusted_mode, 200) + 1;
1285 
1286 	if (drm_WARN_ON(display->drm, exit_scanlines > crtc_vdisplay))
1287 		return;
1288 
1289 	crtc_state->dc3co_exitline = crtc_vdisplay - exit_scanlines;
1290 }
1291 
1292 static bool intel_psr2_sel_fetch_config_valid(struct intel_dp *intel_dp,
1293 					      struct intel_crtc_state *crtc_state)
1294 {
1295 	struct intel_display *display = to_intel_display(intel_dp);
1296 
1297 	if (!display->params.enable_psr2_sel_fetch &&
1298 	    intel_dp->psr.debug != I915_PSR_DEBUG_ENABLE_SEL_FETCH) {
1299 		drm_dbg_kms(display->drm,
1300 			    "PSR2 sel fetch not enabled, disabled by parameter\n");
1301 		return false;
1302 	}
1303 
1304 	if (crtc_state->uapi.async_flip) {
1305 		drm_dbg_kms(display->drm,
1306 			    "PSR2 sel fetch not enabled, async flip enabled\n");
1307 		return false;
1308 	}
1309 
1310 	return crtc_state->enable_psr2_sel_fetch = true;
1311 }
1312 
1313 static bool psr2_granularity_check(struct intel_dp *intel_dp,
1314 				   struct intel_crtc_state *crtc_state)
1315 {
1316 	struct intel_display *display = to_intel_display(intel_dp);
1317 	const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
1318 	const int crtc_hdisplay = crtc_state->hw.adjusted_mode.crtc_hdisplay;
1319 	const int crtc_vdisplay = crtc_state->hw.adjusted_mode.crtc_vdisplay;
1320 	u16 y_granularity = 0;
1321 
1322 	/* PSR2 HW only send full lines so we only need to validate the width */
1323 	if (crtc_hdisplay % intel_dp->psr.su_w_granularity)
1324 		return false;
1325 
1326 	if (crtc_vdisplay % intel_dp->psr.su_y_granularity)
1327 		return false;
1328 
1329 	/* HW tracking is only aligned to 4 lines */
1330 	if (!crtc_state->enable_psr2_sel_fetch)
1331 		return intel_dp->psr.su_y_granularity == 4;
1332 
1333 	/*
1334 	 * adl_p and mtl platforms have 1 line granularity.
1335 	 * For other platforms with SW tracking we can adjust the y coordinates
1336 	 * to match sink requirement if multiple of 4.
1337 	 */
1338 	if (display->platform.alderlake_p || DISPLAY_VER(display) >= 14)
1339 		y_granularity = intel_dp->psr.su_y_granularity;
1340 	else if (intel_dp->psr.su_y_granularity <= 2)
1341 		y_granularity = 4;
1342 	else if ((intel_dp->psr.su_y_granularity % 4) == 0)
1343 		y_granularity = intel_dp->psr.su_y_granularity;
1344 
1345 	if (y_granularity == 0 || crtc_vdisplay % y_granularity)
1346 		return false;
1347 
1348 	if (crtc_state->dsc.compression_enable &&
1349 	    vdsc_cfg->slice_height % y_granularity)
1350 		return false;
1351 
1352 	crtc_state->su_y_granularity = y_granularity;
1353 	return true;
1354 }
1355 
1356 static bool _compute_psr2_sdp_prior_scanline_indication(struct intel_dp *intel_dp,
1357 							struct intel_crtc_state *crtc_state)
1358 {
1359 	struct intel_display *display = to_intel_display(intel_dp);
1360 	const struct drm_display_mode *adjusted_mode = &crtc_state->uapi.adjusted_mode;
1361 	u32 hblank_total, hblank_ns, req_ns;
1362 
1363 	hblank_total = adjusted_mode->crtc_hblank_end - adjusted_mode->crtc_hblank_start;
1364 	hblank_ns = div_u64(1000000ULL * hblank_total, adjusted_mode->crtc_clock);
1365 
1366 	/* From spec: ((60 / number of lanes) + 11) * 1000 / symbol clock frequency MHz */
1367 	req_ns = ((60 / crtc_state->lane_count) + 11) * 1000 / (crtc_state->port_clock / 1000);
1368 
1369 	if ((hblank_ns - req_ns) > 100)
1370 		return true;
1371 
1372 	/* Not supported <13 / Wa_22012279113:adl-p */
1373 	if (DISPLAY_VER(display) < 14 || intel_dp->edp_dpcd[0] < DP_EDP_14b)
1374 		return false;
1375 
1376 	crtc_state->req_psr2_sdp_prior_scanline = true;
1377 	return true;
1378 }
1379 
1380 static int intel_psr_entry_setup_frames(struct intel_dp *intel_dp,
1381 					const struct drm_display_mode *adjusted_mode)
1382 {
1383 	struct intel_display *display = to_intel_display(intel_dp);
1384 	int psr_setup_time = drm_dp_psr_setup_time(intel_dp->psr_dpcd);
1385 	int entry_setup_frames = 0;
1386 
1387 	if (psr_setup_time < 0) {
1388 		drm_dbg_kms(display->drm,
1389 			    "PSR condition failed: Invalid PSR setup time (0x%02x)\n",
1390 			    intel_dp->psr_dpcd[1]);
1391 		return -ETIME;
1392 	}
1393 
1394 	if (intel_usecs_to_scanlines(adjusted_mode, psr_setup_time) >
1395 	    adjusted_mode->crtc_vtotal - adjusted_mode->crtc_vdisplay - 1) {
1396 		if (DISPLAY_VER(display) >= 20) {
1397 			/* setup entry frames can be up to 3 frames */
1398 			entry_setup_frames = 1;
1399 			drm_dbg_kms(display->drm,
1400 				    "PSR setup entry frames %d\n",
1401 				    entry_setup_frames);
1402 		} else {
1403 			drm_dbg_kms(display->drm,
1404 				    "PSR condition failed: PSR setup time (%d us) too long\n",
1405 				    psr_setup_time);
1406 			return -ETIME;
1407 		}
1408 	}
1409 
1410 	return entry_setup_frames;
1411 }
1412 
1413 static
1414 int _intel_psr_min_set_context_latency(const struct intel_crtc_state *crtc_state,
1415 				       bool needs_panel_replay,
1416 				       bool needs_sel_update)
1417 {
1418 	struct intel_display *display = to_intel_display(crtc_state);
1419 
1420 	if (!crtc_state->has_psr)
1421 		return 0;
1422 
1423 	/* Wa_14015401596 */
1424 	if (intel_vrr_possible(crtc_state) && IS_DISPLAY_VER(display, 13, 14))
1425 		return 1;
1426 
1427 	/* Rest is for SRD_STATUS needed on LunarLake and onwards */
1428 	if (DISPLAY_VER(display) < 20)
1429 		return 0;
1430 
1431 	/*
1432 	 * Comment on SRD_STATUS register in Bspec for LunarLake and onwards:
1433 	 *
1434 	 * To deterministically capture the transition of the state machine
1435 	 * going from SRDOFFACK to IDLE, the delayed V. Blank should be at least
1436 	 * one line after the non-delayed V. Blank.
1437 	 *
1438 	 * Legacy TG: TRANS_SET_CONTEXT_LATENCY > 0
1439 	 * VRR TG: TRANS_VRR_CTL[ VRR Guardband ] < (TRANS_VRR_VMAX[ VRR Vmax ]
1440 	 * - TRANS_VTOTAL[ Vertical Active ])
1441 	 *
1442 	 * SRD_STATUS is used only by PSR1 on PantherLake.
1443 	 * SRD_STATUS is used by PSR1 and Panel Replay DP on LunarLake.
1444 	 */
1445 
1446 	if (DISPLAY_VER(display) >= 30 && (needs_panel_replay ||
1447 					   needs_sel_update))
1448 		return 0;
1449 	else if (DISPLAY_VER(display) < 30 && (needs_sel_update ||
1450 					       intel_crtc_has_type(crtc_state,
1451 								   INTEL_OUTPUT_EDP)))
1452 		return 0;
1453 	else
1454 		return 1;
1455 }
1456 
1457 static bool _wake_lines_fit_into_vblank(const struct intel_crtc_state *crtc_state,
1458 					int vblank,
1459 					int wake_lines)
1460 {
1461 	if (crtc_state->req_psr2_sdp_prior_scanline)
1462 		vblank -= 1;
1463 
1464 	/* Vblank >= PSR2_CTL Block Count Number maximum line count */
1465 	if (vblank < wake_lines)
1466 		return false;
1467 
1468 	return true;
1469 }
1470 
1471 static bool wake_lines_fit_into_vblank(struct intel_dp *intel_dp,
1472 				       const struct intel_crtc_state *crtc_state,
1473 				       bool aux_less,
1474 				       bool needs_panel_replay,
1475 				       bool needs_sel_update)
1476 {
1477 	struct intel_display *display = to_intel_display(intel_dp);
1478 	int vblank = crtc_state->hw.adjusted_mode.crtc_vblank_end -
1479 		crtc_state->hw.adjusted_mode.crtc_vblank_start;
1480 	int wake_lines;
1481 	int scl = _intel_psr_min_set_context_latency(crtc_state,
1482 						     needs_panel_replay,
1483 						     needs_sel_update);
1484 	vblank -= scl;
1485 
1486 	if (aux_less)
1487 		wake_lines = crtc_state->alpm_state.aux_less_wake_lines;
1488 	else
1489 		wake_lines = DISPLAY_VER(display) < 20 ?
1490 			psr2_block_count_lines(crtc_state->alpm_state.io_wake_lines,
1491 					       crtc_state->alpm_state.fast_wake_lines) :
1492 			crtc_state->alpm_state.io_wake_lines;
1493 
1494 	/*
1495 	 * Guardband has not been computed yet, so we conservatively check if the
1496 	 * full vblank duration is sufficient to accommodate wake line requirements
1497 	 * for PSR features like Panel Replay and Selective Update.
1498 	 *
1499 	 * Once the actual guardband is available, a more accurate validation is
1500 	 * performed in intel_psr_compute_config_late(), and PSR features are
1501 	 * disabled if wake lines exceed the available guardband.
1502 	 */
1503 	return _wake_lines_fit_into_vblank(crtc_state, vblank, wake_lines);
1504 }
1505 
1506 static bool alpm_config_valid(struct intel_dp *intel_dp,
1507 			      struct intel_crtc_state *crtc_state,
1508 			      bool aux_less,
1509 			      bool needs_panel_replay,
1510 			      bool needs_sel_update)
1511 {
1512 	struct intel_display *display = to_intel_display(intel_dp);
1513 
1514 	if (!intel_alpm_compute_params(intel_dp, crtc_state)) {
1515 		drm_dbg_kms(display->drm,
1516 			    "PSR2/Panel Replay  not enabled, Unable to use long enough wake times\n");
1517 		return false;
1518 	}
1519 
1520 	if (!wake_lines_fit_into_vblank(intel_dp, crtc_state, aux_less,
1521 					needs_panel_replay, needs_sel_update)) {
1522 		drm_dbg_kms(display->drm,
1523 			    "PSR2/Panel Replay not enabled, too short vblank time\n");
1524 		return false;
1525 	}
1526 
1527 	return true;
1528 }
1529 
1530 static bool intel_psr2_config_valid(struct intel_dp *intel_dp,
1531 				    struct intel_crtc_state *crtc_state)
1532 {
1533 	struct intel_display *display = to_intel_display(intel_dp);
1534 	int crtc_hdisplay = crtc_state->hw.adjusted_mode.crtc_hdisplay;
1535 	int crtc_vdisplay = crtc_state->hw.adjusted_mode.crtc_vdisplay;
1536 	int psr_max_h = 0, psr_max_v = 0, max_bpp = 0;
1537 
1538 	if (!intel_dp->psr.sink_psr2_support || display->params.enable_psr == 1)
1539 		return false;
1540 
1541 	/* JSL and EHL only supports eDP 1.3 */
1542 	if (display->platform.jasperlake || display->platform.elkhartlake) {
1543 		drm_dbg_kms(display->drm, "PSR2 not supported by phy\n");
1544 		return false;
1545 	}
1546 
1547 	/* Wa_16011181250 */
1548 	if (display->platform.rocketlake || display->platform.alderlake_s ||
1549 	    display->platform.dg2) {
1550 		drm_dbg_kms(display->drm,
1551 			    "PSR2 is defeatured for this platform\n");
1552 		return false;
1553 	}
1554 
1555 	if (display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) {
1556 		drm_dbg_kms(display->drm,
1557 			    "PSR2 not completely functional in this stepping\n");
1558 		return false;
1559 	}
1560 
1561 	if (!transcoder_has_psr2(display, crtc_state->cpu_transcoder)) {
1562 		drm_dbg_kms(display->drm,
1563 			    "PSR2 not supported in transcoder %s\n",
1564 			    transcoder_name(crtc_state->cpu_transcoder));
1565 		return false;
1566 	}
1567 
1568 	/*
1569 	 * DSC and PSR2 cannot be enabled simultaneously. If a requested
1570 	 * resolution requires DSC to be enabled, priority is given to DSC
1571 	 * over PSR2.
1572 	 */
1573 	if (crtc_state->dsc.compression_enable &&
1574 	    (DISPLAY_VER(display) < 14 && !display->platform.alderlake_p)) {
1575 		drm_dbg_kms(display->drm,
1576 			    "PSR2 cannot be enabled since DSC is enabled\n");
1577 		return false;
1578 	}
1579 
1580 	if (DISPLAY_VER(display) >= 20) {
1581 		psr_max_h = crtc_hdisplay;
1582 		psr_max_v = crtc_vdisplay;
1583 		max_bpp = crtc_state->pipe_bpp;
1584 	} else if (IS_DISPLAY_VER(display, 12, 14)) {
1585 		psr_max_h = 5120;
1586 		psr_max_v = 3200;
1587 		max_bpp = 30;
1588 	} else if (IS_DISPLAY_VER(display, 10, 11)) {
1589 		psr_max_h = 4096;
1590 		psr_max_v = 2304;
1591 		max_bpp = 24;
1592 	} else if (DISPLAY_VER(display) == 9) {
1593 		psr_max_h = 3640;
1594 		psr_max_v = 2304;
1595 		max_bpp = 24;
1596 	}
1597 
1598 	if (crtc_state->pipe_bpp > max_bpp) {
1599 		drm_dbg_kms(display->drm,
1600 			    "PSR2 not enabled, pipe bpp %d > max supported %d\n",
1601 			    crtc_state->pipe_bpp, max_bpp);
1602 		return false;
1603 	}
1604 
1605 	/* Wa_16011303918:adl-p */
1606 	if (crtc_state->vrr.enable &&
1607 	    display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) {
1608 		drm_dbg_kms(display->drm,
1609 			    "PSR2 not enabled, not compatible with HW stepping + VRR\n");
1610 		return false;
1611 	}
1612 
1613 	if (!alpm_config_valid(intel_dp, crtc_state, false, false, true))
1614 		return false;
1615 
1616 	if (!crtc_state->enable_psr2_sel_fetch &&
1617 	    (crtc_hdisplay > psr_max_h || crtc_vdisplay > psr_max_v)) {
1618 		drm_dbg_kms(display->drm,
1619 			    "PSR2 not enabled, resolution %dx%d > max supported %dx%d\n",
1620 			    crtc_hdisplay, crtc_vdisplay,
1621 			    psr_max_h, psr_max_v);
1622 		return false;
1623 	}
1624 
1625 	tgl_dc3co_exitline_compute_config(intel_dp, crtc_state);
1626 
1627 	return true;
1628 }
1629 
1630 static bool intel_sel_update_config_valid(struct intel_dp *intel_dp,
1631 					  struct intel_crtc_state *crtc_state)
1632 {
1633 	struct intel_display *display = to_intel_display(intel_dp);
1634 
1635 	if (HAS_PSR2_SEL_FETCH(display) &&
1636 	    !intel_psr2_sel_fetch_config_valid(intel_dp, crtc_state) &&
1637 	    !HAS_PSR_HW_TRACKING(display)) {
1638 		drm_dbg_kms(display->drm,
1639 			    "Selective update not enabled, selective fetch not valid and no HW tracking available\n");
1640 		goto unsupported;
1641 	}
1642 
1643 	if (!sel_update_global_enabled(intel_dp)) {
1644 		drm_dbg_kms(display->drm,
1645 			    "Selective update disabled by flag\n");
1646 		goto unsupported;
1647 	}
1648 
1649 	if (!crtc_state->has_panel_replay && !intel_psr2_config_valid(intel_dp, crtc_state))
1650 		goto unsupported;
1651 
1652 	if (!_compute_psr2_sdp_prior_scanline_indication(intel_dp, crtc_state)) {
1653 		drm_dbg_kms(display->drm,
1654 			    "Selective update not enabled, SDP indication do not fit in hblank\n");
1655 		goto unsupported;
1656 	}
1657 
1658 	if (crtc_state->has_panel_replay) {
1659 		if (DISPLAY_VER(display) < 14)
1660 			goto unsupported;
1661 
1662 		if (!intel_dp->psr.sink_panel_replay_su_support)
1663 			goto unsupported;
1664 
1665 		if (intel_dsc_enabled_on_link(crtc_state) &&
1666 		    intel_dp->psr.sink_panel_replay_dsc_support !=
1667 		    INTEL_DP_PANEL_REPLAY_DSC_SELECTIVE_UPDATE) {
1668 			drm_dbg_kms(display->drm,
1669 				    "Selective update with Panel Replay not enabled because it's not supported with DSC\n");
1670 			goto unsupported;
1671 		}
1672 	}
1673 
1674 	if (crtc_state->crc_enabled) {
1675 		drm_dbg_kms(display->drm,
1676 			    "Selective update not enabled because it would inhibit pipe CRC calculation\n");
1677 		goto unsupported;
1678 	}
1679 
1680 	if (!psr2_granularity_check(intel_dp, crtc_state)) {
1681 		drm_dbg_kms(display->drm,
1682 			    "Selective update not enabled, SU granularity not compatible\n");
1683 		goto unsupported;
1684 	}
1685 
1686 	crtc_state->enable_psr2_su_region_et =
1687 		psr2_su_region_et_valid(intel_dp, crtc_state->has_panel_replay);
1688 
1689 	return true;
1690 
1691 unsupported:
1692 	crtc_state->enable_psr2_sel_fetch = false;
1693 	return false;
1694 }
1695 
1696 static bool _psr_compute_config(struct intel_dp *intel_dp,
1697 				struct intel_crtc_state *crtc_state)
1698 {
1699 	struct intel_display *display = to_intel_display(intel_dp);
1700 	const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
1701 	int entry_setup_frames;
1702 
1703 	if (!CAN_PSR(intel_dp) || !display->params.enable_psr)
1704 		return false;
1705 
1706 	/*
1707 	 * Currently PSR doesn't work reliably with VRR enabled.
1708 	 */
1709 	if (crtc_state->vrr.enable)
1710 		return false;
1711 
1712 	entry_setup_frames = intel_psr_entry_setup_frames(intel_dp, adjusted_mode);
1713 
1714 	if (entry_setup_frames >= 0) {
1715 		intel_dp->psr.entry_setup_frames = entry_setup_frames;
1716 	} else {
1717 		crtc_state->no_psr_reason = "PSR setup timing not met";
1718 		drm_dbg_kms(display->drm,
1719 			    "PSR condition failed: PSR setup timing not met\n");
1720 		return false;
1721 	}
1722 
1723 	return true;
1724 }
1725 
1726 static bool
1727 _panel_replay_compute_config(struct intel_dp *intel_dp,
1728 			     struct intel_crtc_state *crtc_state,
1729 			     const struct drm_connector_state *conn_state)
1730 {
1731 	struct intel_display *display = to_intel_display(intel_dp);
1732 	struct intel_connector *connector =
1733 		to_intel_connector(conn_state->connector);
1734 	struct intel_hdcp *hdcp = &connector->hdcp;
1735 
1736 	if (!CAN_PANEL_REPLAY(intel_dp))
1737 		return false;
1738 
1739 	if (!panel_replay_global_enabled(intel_dp)) {
1740 		drm_dbg_kms(display->drm, "Panel Replay disabled by flag\n");
1741 		return false;
1742 	}
1743 
1744 	if (crtc_state->crc_enabled) {
1745 		drm_dbg_kms(display->drm,
1746 			    "Panel Replay not enabled because it would inhibit pipe CRC calculation\n");
1747 		return false;
1748 	}
1749 
1750 	if (intel_dsc_enabled_on_link(crtc_state) &&
1751 	    intel_dp->psr.sink_panel_replay_dsc_support ==
1752 	    INTEL_DP_PANEL_REPLAY_DSC_NOT_SUPPORTED) {
1753 		drm_dbg_kms(display->drm,
1754 			    "Panel Replay not enabled because it's not supported with DSC\n");
1755 		return false;
1756 	}
1757 
1758 	if (!intel_dp_is_edp(intel_dp))
1759 		return true;
1760 
1761 	/* Remaining checks are for eDP only */
1762 
1763 	if (to_intel_crtc(crtc_state->uapi.crtc)->pipe != PIPE_A &&
1764 	    to_intel_crtc(crtc_state->uapi.crtc)->pipe != PIPE_B)
1765 		return false;
1766 
1767 	/* 128b/132b Panel Replay is not supported on eDP */
1768 	if (intel_dp_is_uhbr(crtc_state)) {
1769 		drm_dbg_kms(display->drm,
1770 			    "Panel Replay is not supported with 128b/132b\n");
1771 		return false;
1772 	}
1773 
1774 	/* HW will not allow Panel Replay on eDP when HDCP enabled */
1775 	if (conn_state->content_protection ==
1776 	    DRM_MODE_CONTENT_PROTECTION_DESIRED ||
1777 	    (conn_state->content_protection ==
1778 	     DRM_MODE_CONTENT_PROTECTION_ENABLED && hdcp->value ==
1779 	     DRM_MODE_CONTENT_PROTECTION_UNDESIRED)) {
1780 		drm_dbg_kms(display->drm,
1781 			    "Panel Replay is not supported with HDCP\n");
1782 		return false;
1783 	}
1784 
1785 	if (!alpm_config_valid(intel_dp, crtc_state, true, true, false))
1786 		return false;
1787 
1788 	return true;
1789 }
1790 
1791 static bool intel_psr_needs_wa_18037818876(struct intel_dp *intel_dp,
1792 					   struct intel_crtc_state *crtc_state)
1793 {
1794 	struct intel_display *display = to_intel_display(intel_dp);
1795 
1796 	return (DISPLAY_VER(display) == 20 && intel_dp->psr.entry_setup_frames > 0 &&
1797 		!crtc_state->has_sel_update);
1798 }
1799 
1800 static
1801 void intel_psr_set_non_psr_pipes(struct intel_dp *intel_dp,
1802 				 struct intel_crtc_state *crtc_state)
1803 {
1804 	struct intel_display *display = to_intel_display(intel_dp);
1805 	struct intel_atomic_state *state = to_intel_atomic_state(crtc_state->uapi.state);
1806 	struct intel_crtc *crtc;
1807 	u8 active_pipes = 0;
1808 
1809 	/* Wa_16025596647 */
1810 	if (DISPLAY_VER(display) != 20 &&
1811 	    !IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0))
1812 		return;
1813 
1814 	/* Not needed by Panel Replay  */
1815 	if (crtc_state->has_panel_replay)
1816 		return;
1817 
1818 	/* We ignore possible secondary PSR/Panel Replay capable eDP */
1819 	for_each_intel_crtc(display->drm, crtc)
1820 		active_pipes |= crtc->active ? BIT(crtc->pipe) : 0;
1821 
1822 	active_pipes = intel_calc_active_pipes(state, active_pipes);
1823 
1824 	crtc_state->active_non_psr_pipes = active_pipes &
1825 		~BIT(to_intel_crtc(crtc_state->uapi.crtc)->pipe);
1826 }
1827 
1828 void intel_psr_compute_config(struct intel_dp *intel_dp,
1829 			      struct intel_crtc_state *crtc_state,
1830 			      struct drm_connector_state *conn_state)
1831 {
1832 	struct intel_display *display = to_intel_display(intel_dp);
1833 	const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
1834 
1835 	if (!psr_global_enabled(intel_dp)) {
1836 		drm_dbg_kms(display->drm, "PSR disabled by flag\n");
1837 		return;
1838 	}
1839 
1840 	if (intel_dp->psr.sink_not_reliable) {
1841 		drm_dbg_kms(display->drm,
1842 			    "PSR sink implementation is not reliable\n");
1843 		return;
1844 	}
1845 
1846 	if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) {
1847 		drm_dbg_kms(display->drm,
1848 			    "PSR condition failed: Interlaced mode enabled\n");
1849 		return;
1850 	}
1851 
1852 	/*
1853 	 * FIXME figure out what is wrong with PSR+joiner and
1854 	 * fix it. Presumably something related to the fact that
1855 	 * PSR is a transcoder level feature.
1856 	 */
1857 	if (crtc_state->joiner_pipes) {
1858 		drm_dbg_kms(display->drm,
1859 			    "PSR disabled due to joiner\n");
1860 		return;
1861 	}
1862 
1863 	/* Only used for state verification. */
1864 	crtc_state->panel_replay_dsc_support = intel_dp->psr.sink_panel_replay_dsc_support;
1865 	crtc_state->has_panel_replay = _panel_replay_compute_config(intel_dp,
1866 								    crtc_state,
1867 								    conn_state);
1868 
1869 	crtc_state->has_psr = crtc_state->has_panel_replay ? true :
1870 		_psr_compute_config(intel_dp, crtc_state);
1871 
1872 	if (!crtc_state->has_psr)
1873 		return;
1874 
1875 	crtc_state->has_sel_update = intel_sel_update_config_valid(intel_dp, crtc_state);
1876 }
1877 
1878 void intel_psr_get_config(struct intel_encoder *encoder,
1879 			  struct intel_crtc_state *pipe_config)
1880 {
1881 	struct intel_display *display = to_intel_display(encoder);
1882 	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
1883 	enum transcoder cpu_transcoder = pipe_config->cpu_transcoder;
1884 	struct intel_dp *intel_dp;
1885 	u32 val;
1886 
1887 	if (!dig_port)
1888 		return;
1889 
1890 	intel_dp = &dig_port->dp;
1891 	if (!(CAN_PSR(intel_dp) || CAN_PANEL_REPLAY(intel_dp)))
1892 		return;
1893 
1894 	mutex_lock(&intel_dp->psr.lock);
1895 	if (!intel_dp->psr.enabled)
1896 		goto unlock;
1897 
1898 	if (intel_dp->psr.panel_replay_enabled) {
1899 		pipe_config->has_psr = pipe_config->has_panel_replay = true;
1900 	} else {
1901 		/*
1902 		 * Not possible to read EDP_PSR/PSR2_CTL registers as it is
1903 		 * enabled/disabled because of frontbuffer tracking and others.
1904 		 */
1905 		pipe_config->has_psr = true;
1906 	}
1907 
1908 	pipe_config->has_sel_update = intel_dp->psr.sel_update_enabled;
1909 	pipe_config->infoframes.enable |= intel_hdmi_infoframe_enable(DP_SDP_VSC);
1910 
1911 	if (!intel_dp->psr.sel_update_enabled)
1912 		goto unlock;
1913 
1914 	if (HAS_PSR2_SEL_FETCH(display)) {
1915 		val = intel_de_read(display,
1916 				    PSR2_MAN_TRK_CTL(display, cpu_transcoder));
1917 		if (val & PSR2_MAN_TRK_CTL_ENABLE)
1918 			pipe_config->enable_psr2_sel_fetch = true;
1919 	}
1920 
1921 	pipe_config->enable_psr2_su_region_et = intel_dp->psr.su_region_et_enabled;
1922 
1923 	if (DISPLAY_VER(display) >= 12) {
1924 		val = intel_de_read(display,
1925 				    TRANS_EXITLINE(display, cpu_transcoder));
1926 		pipe_config->dc3co_exitline = REG_FIELD_GET(EXITLINE_MASK, val);
1927 	}
1928 unlock:
1929 	mutex_unlock(&intel_dp->psr.lock);
1930 }
1931 
1932 static void intel_psr_activate(struct intel_dp *intel_dp)
1933 {
1934 	struct intel_display *display = to_intel_display(intel_dp);
1935 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1936 
1937 	drm_WARN_ON(display->drm,
1938 		    transcoder_has_psr2(display, cpu_transcoder) &&
1939 		    intel_de_read(display, EDP_PSR2_CTL(display, cpu_transcoder)) & EDP_PSR2_ENABLE);
1940 
1941 	drm_WARN_ON(display->drm,
1942 		    intel_de_read(display, psr_ctl_reg(display, cpu_transcoder)) & EDP_PSR_ENABLE);
1943 
1944 	drm_WARN_ON(display->drm, intel_dp->psr.active);
1945 
1946 	drm_WARN_ON(display->drm, !intel_dp->psr.enabled);
1947 
1948 	lockdep_assert_held(&intel_dp->psr.lock);
1949 
1950 	/* psr1, psr2 and panel-replay are mutually exclusive.*/
1951 	if (intel_dp->psr.panel_replay_enabled)
1952 		dg2_activate_panel_replay(intel_dp);
1953 	else if (intel_dp->psr.sel_update_enabled)
1954 		hsw_activate_psr2(intel_dp);
1955 	else
1956 		hsw_activate_psr1(intel_dp);
1957 
1958 	intel_dp->psr.active = true;
1959 	intel_dp->psr.no_psr_reason = NULL;
1960 }
1961 
1962 /*
1963  * Wa_16013835468
1964  * Wa_14015648006
1965  */
1966 static void wm_optimization_wa(struct intel_dp *intel_dp,
1967 			       const struct intel_crtc_state *crtc_state)
1968 {
1969 	struct intel_display *display = to_intel_display(intel_dp);
1970 	enum pipe pipe = intel_dp->psr.pipe;
1971 	bool activate = false;
1972 
1973 	/* Wa_14015648006 */
1974 	if (IS_DISPLAY_VER(display, 11, 14) && crtc_state->wm_level_disabled)
1975 		activate = true;
1976 
1977 	/* Wa_16013835468 */
1978 	if (DISPLAY_VER(display) == 12 &&
1979 	    crtc_state->hw.adjusted_mode.crtc_vblank_start !=
1980 	    crtc_state->hw.adjusted_mode.crtc_vdisplay)
1981 		activate = true;
1982 
1983 	if (activate)
1984 		intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
1985 			     0, LATENCY_REPORTING_REMOVED(pipe));
1986 	else
1987 		intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
1988 			     LATENCY_REPORTING_REMOVED(pipe), 0);
1989 }
1990 
1991 static void intel_psr_enable_source(struct intel_dp *intel_dp,
1992 				    const struct intel_crtc_state *crtc_state)
1993 {
1994 	struct intel_display *display = to_intel_display(intel_dp);
1995 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1996 	u32 mask = 0;
1997 
1998 	/*
1999 	 * Only HSW and BDW have PSR AUX registers that need to be setup.
2000 	 * SKL+ use hardcoded values PSR AUX transactions
2001 	 */
2002 	if (DISPLAY_VER(display) < 9)
2003 		hsw_psr_setup_aux(intel_dp);
2004 
2005 	/*
2006 	 * Per Spec: Avoid continuous PSR exit by masking MEMUP and HPD also
2007 	 * mask LPSP to avoid dependency on other drivers that might block
2008 	 * runtime_pm besides preventing  other hw tracking issues now we
2009 	 * can rely on frontbuffer tracking.
2010 	 *
2011 	 * From bspec prior LunarLake:
2012 	 * Only PSR_MASK[Mask FBC modify] and PSR_MASK[Mask Hotplug] are used in
2013 	 * panel replay mode.
2014 	 *
2015 	 * From bspec beyod LunarLake:
2016 	 * Panel Replay on DP: No bits are applicable
2017 	 * Panel Replay on eDP: All bits are applicable
2018 	 */
2019 	if (DISPLAY_VER(display) < 20 || intel_dp_is_edp(intel_dp))
2020 		mask = EDP_PSR_DEBUG_MASK_HPD;
2021 
2022 	if (intel_dp_is_edp(intel_dp)) {
2023 		mask |= EDP_PSR_DEBUG_MASK_MEMUP;
2024 
2025 		/*
2026 		 * For some unknown reason on HSW non-ULT (or at least on
2027 		 * Dell Latitude E6540) external displays start to flicker
2028 		 * when PSR is enabled on the eDP. SR/PC6 residency is much
2029 		 * higher than should be possible with an external display.
2030 		 * As a workaround leave LPSP unmasked to prevent PSR entry
2031 		 * when external displays are active.
2032 		 */
2033 		if (DISPLAY_VER(display) >= 8 || display->platform.haswell_ult)
2034 			mask |= EDP_PSR_DEBUG_MASK_LPSP;
2035 
2036 		if (DISPLAY_VER(display) < 20)
2037 			mask |= EDP_PSR_DEBUG_MASK_MAX_SLEEP;
2038 
2039 		/*
2040 		 * No separate pipe reg write mask on hsw/bdw, so have to unmask all
2041 		 * registers in order to keep the CURSURFLIVE tricks working :(
2042 		 */
2043 		if (IS_DISPLAY_VER(display, 9, 10))
2044 			mask |= EDP_PSR_DEBUG_MASK_DISP_REG_WRITE;
2045 
2046 		/* allow PSR with sprite enabled */
2047 		if (display->platform.haswell)
2048 			mask |= EDP_PSR_DEBUG_MASK_SPRITE_ENABLE;
2049 	}
2050 
2051 	intel_de_write(display, psr_debug_reg(display, cpu_transcoder), mask);
2052 
2053 	psr_irq_control(intel_dp);
2054 
2055 	/*
2056 	 * TODO: if future platforms supports DC3CO in more than one
2057 	 * transcoder, EXITLINE will need to be unset when disabling PSR
2058 	 */
2059 	if (intel_dp->psr.dc3co_exitline)
2060 		intel_de_rmw(display,
2061 			     TRANS_EXITLINE(display, cpu_transcoder),
2062 			     EXITLINE_MASK,
2063 			     intel_dp->psr.dc3co_exitline << EXITLINE_SHIFT | EXITLINE_ENABLE);
2064 
2065 	if (HAS_PSR_HW_TRACKING(display) && HAS_PSR2_SEL_FETCH(display))
2066 		intel_de_rmw(display, CHICKEN_PAR1_1, IGNORE_PSR2_HW_TRACKING,
2067 			     intel_dp->psr.psr2_sel_fetch_enabled ?
2068 			     IGNORE_PSR2_HW_TRACKING : 0);
2069 
2070 	/*
2071 	 * Wa_16013835468
2072 	 * Wa_14015648006
2073 	 */
2074 	wm_optimization_wa(intel_dp, crtc_state);
2075 
2076 	if (intel_dp->psr.sel_update_enabled) {
2077 		if (DISPLAY_VER(display) == 9)
2078 			intel_de_rmw(display, CHICKEN_TRANS(display, cpu_transcoder), 0,
2079 				     PSR2_VSC_ENABLE_PROG_HEADER |
2080 				     PSR2_ADD_VERTICAL_LINE_COUNT);
2081 
2082 		/*
2083 		 * Wa_16014451276:adlp,mtl[a0,b0]
2084 		 * All supported adlp panels have 1-based X granularity, this may
2085 		 * cause issues if non-supported panels are used.
2086 		 */
2087 		if (!intel_dp->psr.panel_replay_enabled &&
2088 		    (IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0) ||
2089 		     display->platform.alderlake_p))
2090 			intel_de_rmw(display, CHICKEN_TRANS(display, cpu_transcoder),
2091 				     0, ADLP_1_BASED_X_GRANULARITY);
2092 
2093 		/* Wa_16012604467:adlp,mtl[a0,b0] */
2094 		if (!intel_dp->psr.panel_replay_enabled &&
2095 		    IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0))
2096 			intel_de_rmw(display,
2097 				     MTL_CLKGATE_DIS_TRANS(display, cpu_transcoder),
2098 				     0,
2099 				     MTL_CLKGATE_DIS_TRANS_DMASC_GATING_DIS);
2100 		else if (display->platform.alderlake_p)
2101 			intel_de_rmw(display, CLKGATE_DIS_MISC, 0,
2102 				     CLKGATE_DIS_MISC_DMASC_GATING_DIS);
2103 	}
2104 
2105 	/* Wa_16025596647 */
2106 	if ((DISPLAY_VER(display) == 20 ||
2107 	     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
2108 	    !intel_dp->psr.panel_replay_enabled)
2109 		intel_dmc_block_pkgc(display, intel_dp->psr.pipe, true);
2110 
2111 	intel_alpm_configure(intel_dp, crtc_state);
2112 }
2113 
2114 static bool psr_interrupt_error_check(struct intel_dp *intel_dp)
2115 {
2116 	struct intel_display *display = to_intel_display(intel_dp);
2117 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2118 	u32 val;
2119 
2120 	if (intel_dp->psr.panel_replay_enabled)
2121 		goto no_err;
2122 
2123 	/*
2124 	 * If a PSR error happened and the driver is reloaded, the EDP_PSR_IIR
2125 	 * will still keep the error set even after the reset done in the
2126 	 * irq_preinstall and irq_uninstall hooks.
2127 	 * And enabling in this situation cause the screen to freeze in the
2128 	 * first time that PSR HW tries to activate so lets keep PSR disabled
2129 	 * to avoid any rendering problems.
2130 	 */
2131 	val = intel_de_read(display, psr_iir_reg(display, cpu_transcoder));
2132 	val &= psr_irq_psr_error_bit_get(intel_dp);
2133 	if (val) {
2134 		intel_dp->psr.sink_not_reliable = true;
2135 		drm_dbg_kms(display->drm,
2136 			    "PSR interruption error set, not enabling PSR\n");
2137 		return false;
2138 	}
2139 
2140 no_err:
2141 	return true;
2142 }
2143 
2144 static void intel_psr_enable_locked(struct intel_dp *intel_dp,
2145 				    const struct intel_crtc_state *crtc_state)
2146 {
2147 	struct intel_display *display = to_intel_display(intel_dp);
2148 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
2149 	u32 val;
2150 
2151 	drm_WARN_ON(display->drm, intel_dp->psr.enabled);
2152 
2153 	intel_dp->psr.sel_update_enabled = crtc_state->has_sel_update;
2154 	intel_dp->psr.panel_replay_enabled = crtc_state->has_panel_replay;
2155 	intel_dp->psr.busy_frontbuffer_bits = 0;
2156 	intel_dp->psr.pipe = to_intel_crtc(crtc_state->uapi.crtc)->pipe;
2157 	intel_dp->psr.transcoder = crtc_state->cpu_transcoder;
2158 	/* DC5/DC6 requires at least 6 idle frames */
2159 	val = usecs_to_jiffies(intel_get_frame_time_us(crtc_state) * 6);
2160 	intel_dp->psr.dc3co_exit_delay = val;
2161 	intel_dp->psr.dc3co_exitline = crtc_state->dc3co_exitline;
2162 	intel_dp->psr.psr2_sel_fetch_enabled = crtc_state->enable_psr2_sel_fetch;
2163 	intel_dp->psr.su_region_et_enabled = crtc_state->enable_psr2_su_region_et;
2164 	intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
2165 	intel_dp->psr.req_psr2_sdp_prior_scanline =
2166 		crtc_state->req_psr2_sdp_prior_scanline;
2167 	intel_dp->psr.active_non_psr_pipes = crtc_state->active_non_psr_pipes;
2168 	intel_dp->psr.pkg_c_latency_used = crtc_state->pkg_c_latency_used;
2169 	intel_dp->psr.io_wake_lines = crtc_state->alpm_state.io_wake_lines;
2170 	intel_dp->psr.fast_wake_lines = crtc_state->alpm_state.fast_wake_lines;
2171 
2172 	if (!psr_interrupt_error_check(intel_dp))
2173 		return;
2174 
2175 	if (intel_dp->psr.panel_replay_enabled)
2176 		drm_dbg_kms(display->drm, "Enabling Panel Replay\n");
2177 	else
2178 		drm_dbg_kms(display->drm, "Enabling PSR%s\n",
2179 			    intel_dp->psr.sel_update_enabled ? "2" : "1");
2180 
2181 	/*
2182 	 * Enabling sink PSR/Panel Replay here only for PSR. Panel Replay enable
2183 	 * bit is already written at this point. Sink ALPM is enabled here for
2184 	 * PSR and Panel Replay. See
2185 	 * intel_psr_panel_replay_enable_sink. Modifiers/options:
2186 	 *  - Selective Update
2187 	 *  - Region Early Transport
2188 	 *  - Selective Update Region Scanline Capture
2189 	 *  - VSC_SDP_CRC
2190 	 *  - HPD on different Errors
2191 	 *  - CRC verification
2192 	 * are written for PSR and Panel Replay here.
2193 	 */
2194 	intel_psr_enable_sink(intel_dp, crtc_state);
2195 
2196 	if (intel_dp_is_edp(intel_dp))
2197 		intel_snps_phy_update_psr_power_state(&dig_port->base, true);
2198 
2199 	intel_psr_enable_source(intel_dp, crtc_state);
2200 	intel_dp->psr.enabled = true;
2201 	intel_dp->psr.pause_counter = 0;
2202 
2203 	/*
2204 	 * Link_ok is sticky and set here on PSR enable. We can assume link
2205 	 * training is complete as we never continue to PSR enable with
2206 	 * untrained link. Link_ok is kept as set until first short pulse
2207 	 * interrupt. This is targeted to workaround panels stating bad link
2208 	 * after PSR is enabled.
2209 	 */
2210 	intel_dp->psr.link_ok = true;
2211 
2212 	intel_psr_activate(intel_dp);
2213 }
2214 
2215 static void intel_psr_exit(struct intel_dp *intel_dp)
2216 {
2217 	struct intel_display *display = to_intel_display(intel_dp);
2218 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2219 	u32 val;
2220 
2221 	if (!intel_dp->psr.active) {
2222 		if (transcoder_has_psr2(display, cpu_transcoder)) {
2223 			val = intel_de_read(display,
2224 					    EDP_PSR2_CTL(display, cpu_transcoder));
2225 			drm_WARN_ON(display->drm, val & EDP_PSR2_ENABLE);
2226 		}
2227 
2228 		val = intel_de_read(display,
2229 				    psr_ctl_reg(display, cpu_transcoder));
2230 		drm_WARN_ON(display->drm, val & EDP_PSR_ENABLE);
2231 
2232 		return;
2233 	}
2234 
2235 	if (intel_dp->psr.panel_replay_enabled) {
2236 		intel_de_rmw(display, TRANS_DP2_CTL(intel_dp->psr.transcoder),
2237 			     TRANS_DP2_PANEL_REPLAY_ENABLE, 0);
2238 	} else if (intel_dp->psr.sel_update_enabled) {
2239 		tgl_disallow_dc3co_on_psr2_exit(intel_dp);
2240 
2241 		val = intel_de_rmw(display,
2242 				   EDP_PSR2_CTL(display, cpu_transcoder),
2243 				   EDP_PSR2_ENABLE, 0);
2244 
2245 		drm_WARN_ON(display->drm, !(val & EDP_PSR2_ENABLE));
2246 	} else {
2247 		if ((DISPLAY_VER(display) == 20 ||
2248 		     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
2249 			intel_dp->psr.pkg_c_latency_used)
2250 			intel_dmc_start_pkgc_exit_at_start_of_undelayed_vblank(display,
2251 								       intel_dp->psr.pipe,
2252 								       false);
2253 
2254 		val = intel_de_rmw(display,
2255 				   psr_ctl_reg(display, cpu_transcoder),
2256 				   EDP_PSR_ENABLE, 0);
2257 
2258 		drm_WARN_ON(display->drm, !(val & EDP_PSR_ENABLE));
2259 	}
2260 	intel_dp->psr.active = false;
2261 }
2262 
2263 static void intel_psr_wait_exit_locked(struct intel_dp *intel_dp)
2264 {
2265 	struct intel_display *display = to_intel_display(intel_dp);
2266 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2267 	i915_reg_t psr_status;
2268 	u32 psr_status_mask;
2269 
2270 	if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled ||
2271 					  intel_dp->psr.panel_replay_enabled)) {
2272 		psr_status = EDP_PSR2_STATUS(display, cpu_transcoder);
2273 		psr_status_mask = EDP_PSR2_STATUS_STATE_MASK;
2274 	} else {
2275 		psr_status = psr_status_reg(display, cpu_transcoder);
2276 		psr_status_mask = EDP_PSR_STATUS_STATE_MASK;
2277 	}
2278 
2279 	/* Wait till PSR is idle */
2280 	if (intel_de_wait_for_clear_ms(display, psr_status,
2281 				       psr_status_mask, 2000))
2282 		drm_err(display->drm, "Timed out waiting PSR idle state\n");
2283 }
2284 
2285 static void intel_psr_disable_locked(struct intel_dp *intel_dp)
2286 {
2287 	struct intel_display *display = to_intel_display(intel_dp);
2288 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2289 
2290 	lockdep_assert_held(&intel_dp->psr.lock);
2291 
2292 	if (!intel_dp->psr.enabled)
2293 		return;
2294 
2295 	if (intel_dp->psr.panel_replay_enabled)
2296 		drm_dbg_kms(display->drm, "Disabling Panel Replay\n");
2297 	else
2298 		drm_dbg_kms(display->drm, "Disabling PSR%s\n",
2299 			    intel_dp->psr.sel_update_enabled ? "2" : "1");
2300 
2301 	intel_psr_exit(intel_dp);
2302 	intel_psr_wait_exit_locked(intel_dp);
2303 
2304 	/*
2305 	 * Wa_16013835468
2306 	 * Wa_14015648006
2307 	 */
2308 	if (DISPLAY_VER(display) >= 11)
2309 		intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
2310 			     LATENCY_REPORTING_REMOVED(intel_dp->psr.pipe), 0);
2311 
2312 	if (intel_dp->psr.sel_update_enabled) {
2313 		/* Wa_16012604467:adlp,mtl[a0,b0] */
2314 		if (!intel_dp->psr.panel_replay_enabled &&
2315 		    IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0))
2316 			intel_de_rmw(display,
2317 				     MTL_CLKGATE_DIS_TRANS(display, cpu_transcoder),
2318 				     MTL_CLKGATE_DIS_TRANS_DMASC_GATING_DIS, 0);
2319 		else if (display->platform.alderlake_p)
2320 			intel_de_rmw(display, CLKGATE_DIS_MISC,
2321 				     CLKGATE_DIS_MISC_DMASC_GATING_DIS, 0);
2322 	}
2323 
2324 	if (intel_dp_is_edp(intel_dp))
2325 		intel_snps_phy_update_psr_power_state(&dp_to_dig_port(intel_dp)->base, false);
2326 
2327 	if (intel_dp->psr.panel_replay_enabled && intel_dp_is_edp(intel_dp))
2328 		intel_alpm_disable(intel_dp);
2329 
2330 	/* Disable PSR on Sink */
2331 	if (!intel_dp->psr.panel_replay_enabled) {
2332 		drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, 0);
2333 
2334 		if (intel_dp->psr.sel_update_enabled)
2335 			drm_dp_dpcd_writeb(&intel_dp->aux,
2336 					   DP_RECEIVER_ALPM_CONFIG, 0);
2337 	}
2338 
2339 	/* Wa_16025596647 */
2340 	if ((DISPLAY_VER(display) == 20 ||
2341 	     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
2342 	    !intel_dp->psr.panel_replay_enabled)
2343 		intel_dmc_block_pkgc(display, intel_dp->psr.pipe, false);
2344 
2345 	intel_dp->psr.enabled = false;
2346 	intel_dp->psr.panel_replay_enabled = false;
2347 	intel_dp->psr.sel_update_enabled = false;
2348 	intel_dp->psr.psr2_sel_fetch_enabled = false;
2349 	intel_dp->psr.su_region_et_enabled = false;
2350 	intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
2351 	intel_dp->psr.active_non_psr_pipes = 0;
2352 	intel_dp->psr.pkg_c_latency_used = 0;
2353 }
2354 
2355 /**
2356  * intel_psr_disable - Disable PSR
2357  * @intel_dp: Intel DP
2358  * @old_crtc_state: old CRTC state
2359  *
2360  * This function needs to be called before disabling pipe.
2361  */
2362 void intel_psr_disable(struct intel_dp *intel_dp,
2363 		       const struct intel_crtc_state *old_crtc_state)
2364 {
2365 	struct intel_display *display = to_intel_display(intel_dp);
2366 
2367 	if (!old_crtc_state->has_psr)
2368 		return;
2369 
2370 	if (drm_WARN_ON(display->drm, !CAN_PSR(intel_dp) &&
2371 			!CAN_PANEL_REPLAY(intel_dp)))
2372 		return;
2373 
2374 	mutex_lock(&intel_dp->psr.lock);
2375 
2376 	intel_psr_disable_locked(intel_dp);
2377 
2378 	intel_dp->psr.link_ok = false;
2379 
2380 	mutex_unlock(&intel_dp->psr.lock);
2381 	cancel_work_sync(&intel_dp->psr.work);
2382 	cancel_delayed_work_sync(&intel_dp->psr.dc3co_work);
2383 }
2384 
2385 /**
2386  * intel_psr_pause - Pause PSR
2387  * @intel_dp: Intel DP
2388  *
2389  * This function need to be called after enabling psr.
2390  */
2391 void intel_psr_pause(struct intel_dp *intel_dp)
2392 {
2393 	struct intel_psr *psr = &intel_dp->psr;
2394 
2395 	if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
2396 		return;
2397 
2398 	mutex_lock(&psr->lock);
2399 
2400 	if (!psr->enabled) {
2401 		mutex_unlock(&psr->lock);
2402 		return;
2403 	}
2404 
2405 	if (intel_dp->psr.pause_counter++ == 0) {
2406 		intel_psr_exit(intel_dp);
2407 		intel_psr_wait_exit_locked(intel_dp);
2408 	}
2409 
2410 	mutex_unlock(&psr->lock);
2411 
2412 	cancel_work_sync(&psr->work);
2413 	cancel_delayed_work_sync(&psr->dc3co_work);
2414 }
2415 
2416 /**
2417  * intel_psr_resume - Resume PSR
2418  * @intel_dp: Intel DP
2419  *
2420  * This function need to be called after pausing psr.
2421  */
2422 void intel_psr_resume(struct intel_dp *intel_dp)
2423 {
2424 	struct intel_display *display = to_intel_display(intel_dp);
2425 	struct intel_psr *psr = &intel_dp->psr;
2426 
2427 	if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
2428 		return;
2429 
2430 	mutex_lock(&psr->lock);
2431 
2432 	if (!psr->enabled)
2433 		goto out;
2434 
2435 	if (!psr->pause_counter) {
2436 		drm_warn(display->drm, "Unbalanced PSR pause/resume!\n");
2437 		goto out;
2438 	}
2439 
2440 	if (--intel_dp->psr.pause_counter == 0)
2441 		intel_psr_activate(intel_dp);
2442 
2443 out:
2444 	mutex_unlock(&psr->lock);
2445 }
2446 
2447 /**
2448  * intel_psr_needs_vblank_notification - Check if PSR need vblank enable/disable
2449  * notification.
2450  * @crtc_state: CRTC status
2451  *
2452  * We need to block DC6 entry in case of Panel Replay as enabling VBI doesn't
2453  * prevent it in case of Panel Replay. Panel Replay switches main link off on
2454  * DC entry. This means vblank interrupts are not fired and is a problem if
2455  * user-space is polling for vblank events. Also Wa_16025596647 needs
2456  * information when vblank is enabled/disabled.
2457  */
2458 bool intel_psr_needs_vblank_notification(const struct intel_crtc_state *crtc_state)
2459 {
2460 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2461 	struct intel_display *display = to_intel_display(crtc_state);
2462 	struct intel_encoder *encoder;
2463 
2464 	for_each_encoder_on_crtc(crtc->base.dev, &crtc->base, encoder) {
2465 		struct intel_dp *intel_dp;
2466 
2467 		if (!intel_encoder_is_dp(encoder))
2468 			continue;
2469 
2470 		intel_dp = enc_to_intel_dp(encoder);
2471 
2472 		if (!intel_dp_is_edp(intel_dp))
2473 			continue;
2474 
2475 		if (CAN_PANEL_REPLAY(intel_dp))
2476 			return true;
2477 
2478 		if ((DISPLAY_VER(display) == 20 ||
2479 		     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
2480 		    CAN_PSR(intel_dp))
2481 			return true;
2482 	}
2483 
2484 	return false;
2485 }
2486 
2487 /**
2488  * intel_psr_trigger_frame_change_event - Trigger "Frame Change" event
2489  * @dsb: DSB context
2490  * @state: the atomic state
2491  * @crtc: the CRTC
2492  *
2493  * Generate PSR "Frame Change" event.
2494  */
2495 void intel_psr_trigger_frame_change_event(struct intel_dsb *dsb,
2496 					  struct intel_atomic_state *state,
2497 					  struct intel_crtc *crtc)
2498 {
2499 	const struct intel_crtc_state *crtc_state =
2500 		intel_pre_commit_crtc_state(state, crtc);
2501 	struct intel_display *display = to_intel_display(crtc);
2502 
2503 	if (crtc_state->has_psr)
2504 		intel_de_write_dsb(display, dsb,
2505 				   CURSURFLIVE(display, crtc->pipe), 0);
2506 }
2507 
2508 /**
2509  * intel_psr_min_set_context_latency - Minimum 'set context latency' lines needed by PSR
2510  * @crtc_state: the crtc state
2511  *
2512  * Return minimum SCL lines/delay needed by PSR.
2513  */
2514 int intel_psr_min_set_context_latency(const struct intel_crtc_state *crtc_state)
2515 {
2516 
2517 	return _intel_psr_min_set_context_latency(crtc_state,
2518 						  crtc_state->has_panel_replay,
2519 						  crtc_state->has_sel_update);
2520 }
2521 
2522 static u32 man_trk_ctl_enable_bit_get(struct intel_display *display)
2523 {
2524 	return display->platform.alderlake_p || DISPLAY_VER(display) >= 14 ? 0 :
2525 		PSR2_MAN_TRK_CTL_ENABLE;
2526 }
2527 
2528 static u32 man_trk_ctl_single_full_frame_bit_get(struct intel_display *display)
2529 {
2530 	return display->platform.alderlake_p || DISPLAY_VER(display) >= 14 ?
2531 	       ADLP_PSR2_MAN_TRK_CTL_SF_SINGLE_FULL_FRAME :
2532 	       PSR2_MAN_TRK_CTL_SF_SINGLE_FULL_FRAME;
2533 }
2534 
2535 static u32 man_trk_ctl_partial_frame_bit_get(struct intel_display *display)
2536 {
2537 	return display->platform.alderlake_p || DISPLAY_VER(display) >= 14 ?
2538 	       ADLP_PSR2_MAN_TRK_CTL_SF_PARTIAL_FRAME_UPDATE :
2539 	       PSR2_MAN_TRK_CTL_SF_PARTIAL_FRAME_UPDATE;
2540 }
2541 
2542 static u32 man_trk_ctl_continuos_full_frame(struct intel_display *display)
2543 {
2544 	return display->platform.alderlake_p || DISPLAY_VER(display) >= 14 ?
2545 	       ADLP_PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME :
2546 	       PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME;
2547 }
2548 
2549 static void intel_psr_force_update(struct intel_dp *intel_dp)
2550 {
2551 	struct intel_display *display = to_intel_display(intel_dp);
2552 
2553 	/*
2554 	 * Display WA #0884: skl+
2555 	 * This documented WA for bxt can be safely applied
2556 	 * broadly so we can force HW tracking to exit PSR
2557 	 * instead of disabling and re-enabling.
2558 	 * Workaround tells us to write 0 to CUR_SURFLIVE_A,
2559 	 * but it makes more sense write to the current active
2560 	 * pipe.
2561 	 *
2562 	 * This workaround do not exist for platforms with display 10 or newer
2563 	 * but testing proved that it works for up display 13, for newer
2564 	 * than that testing will be needed.
2565 	 */
2566 	intel_de_write(display, CURSURFLIVE(display, intel_dp->psr.pipe), 0);
2567 }
2568 
2569 void intel_psr2_program_trans_man_trk_ctl(struct intel_dsb *dsb,
2570 					  const struct intel_crtc_state *crtc_state)
2571 {
2572 	struct intel_display *display = to_intel_display(crtc_state);
2573 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2574 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
2575 	struct intel_encoder *encoder;
2576 
2577 	if (!crtc_state->enable_psr2_sel_fetch)
2578 		return;
2579 
2580 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
2581 					     crtc_state->uapi.encoder_mask) {
2582 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2583 
2584 		if (!dsb)
2585 			lockdep_assert_held(&intel_dp->psr.lock);
2586 
2587 		if (DISPLAY_VER(display) < 20 && intel_dp->psr.psr2_sel_fetch_cff_enabled)
2588 			return;
2589 		break;
2590 	}
2591 
2592 	intel_de_write_dsb(display, dsb,
2593 			   PSR2_MAN_TRK_CTL(display, cpu_transcoder),
2594 			   crtc_state->psr2_man_track_ctl);
2595 
2596 	if (!crtc_state->enable_psr2_su_region_et)
2597 		return;
2598 
2599 	intel_de_write_dsb(display, dsb, PIPE_SRCSZ_ERLY_TPT(crtc->pipe),
2600 			   crtc_state->pipe_srcsz_early_tpt);
2601 }
2602 
2603 static void psr2_man_trk_ctl_calc(struct intel_crtc_state *crtc_state,
2604 				  bool full_update)
2605 {
2606 	struct intel_display *display = to_intel_display(crtc_state);
2607 	u32 val = man_trk_ctl_enable_bit_get(display);
2608 
2609 	/* SF partial frame enable has to be set even on full update */
2610 	val |= man_trk_ctl_partial_frame_bit_get(display);
2611 
2612 	if (full_update) {
2613 		val |= man_trk_ctl_continuos_full_frame(display);
2614 		goto exit;
2615 	}
2616 
2617 	if (crtc_state->psr2_su_area.y1 == -1)
2618 		goto exit;
2619 
2620 	if (display->platform.alderlake_p || DISPLAY_VER(display) >= 14) {
2621 		val |= ADLP_PSR2_MAN_TRK_CTL_SU_REGION_START_ADDR(crtc_state->psr2_su_area.y1);
2622 		val |= ADLP_PSR2_MAN_TRK_CTL_SU_REGION_END_ADDR(crtc_state->psr2_su_area.y2 - 1);
2623 	} else {
2624 		drm_WARN_ON(crtc_state->uapi.crtc->dev,
2625 			    crtc_state->psr2_su_area.y1 % 4 ||
2626 			    crtc_state->psr2_su_area.y2 % 4);
2627 
2628 		val |= PSR2_MAN_TRK_CTL_SU_REGION_START_ADDR(
2629 			crtc_state->psr2_su_area.y1 / 4 + 1);
2630 		val |= PSR2_MAN_TRK_CTL_SU_REGION_END_ADDR(
2631 			crtc_state->psr2_su_area.y2 / 4 + 1);
2632 	}
2633 exit:
2634 	crtc_state->psr2_man_track_ctl = val;
2635 }
2636 
2637 static u32 psr2_pipe_srcsz_early_tpt_calc(struct intel_crtc_state *crtc_state,
2638 					  bool full_update)
2639 {
2640 	int width, height;
2641 
2642 	if (!crtc_state->enable_psr2_su_region_et || full_update)
2643 		return 0;
2644 
2645 	width = drm_rect_width(&crtc_state->psr2_su_area);
2646 	height = drm_rect_height(&crtc_state->psr2_su_area);
2647 
2648 	return PIPESRC_WIDTH(width - 1) | PIPESRC_HEIGHT(height - 1);
2649 }
2650 
2651 static void clip_area_update(struct drm_rect *overlap_damage_area,
2652 			     struct drm_rect *damage_area,
2653 			     struct drm_rect *pipe_src)
2654 {
2655 	if (!drm_rect_intersect(damage_area, pipe_src))
2656 		return;
2657 
2658 	if (overlap_damage_area->y1 == -1) {
2659 		overlap_damage_area->y1 = damage_area->y1;
2660 		overlap_damage_area->y2 = damage_area->y2;
2661 		return;
2662 	}
2663 
2664 	if (damage_area->y1 < overlap_damage_area->y1)
2665 		overlap_damage_area->y1 = damage_area->y1;
2666 
2667 	if (damage_area->y2 > overlap_damage_area->y2)
2668 		overlap_damage_area->y2 = damage_area->y2;
2669 }
2670 
2671 static void intel_psr2_sel_fetch_pipe_alignment(struct intel_crtc_state *crtc_state)
2672 {
2673 	struct intel_display *display = to_intel_display(crtc_state);
2674 	const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
2675 	u16 y_alignment;
2676 
2677 	/* ADLP aligns the SU region to vdsc slice height in case dsc is enabled */
2678 	if (crtc_state->dsc.compression_enable &&
2679 	    (display->platform.alderlake_p || DISPLAY_VER(display) >= 14))
2680 		y_alignment = vdsc_cfg->slice_height;
2681 	else
2682 		y_alignment = crtc_state->su_y_granularity;
2683 
2684 	crtc_state->psr2_su_area.y1 -= crtc_state->psr2_su_area.y1 % y_alignment;
2685 	if (crtc_state->psr2_su_area.y2 % y_alignment)
2686 		crtc_state->psr2_su_area.y2 = ((crtc_state->psr2_su_area.y2 /
2687 						y_alignment) + 1) * y_alignment;
2688 }
2689 
2690 /*
2691  * When early transport is in use we need to extend SU area to cover
2692  * cursor fully when cursor is in SU area.
2693  */
2694 static void
2695 intel_psr2_sel_fetch_et_alignment(struct intel_atomic_state *state,
2696 				  struct intel_crtc *crtc,
2697 				  bool *cursor_in_su_area)
2698 {
2699 	struct intel_crtc_state *crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
2700 	struct intel_plane_state *new_plane_state;
2701 	struct intel_plane *plane;
2702 	int i;
2703 
2704 	if (!crtc_state->enable_psr2_su_region_et)
2705 		return;
2706 
2707 	for_each_new_intel_plane_in_state(state, plane, new_plane_state, i) {
2708 		struct drm_rect inter;
2709 
2710 		if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc)
2711 			continue;
2712 
2713 		if (plane->id != PLANE_CURSOR)
2714 			continue;
2715 
2716 		if (!new_plane_state->uapi.visible)
2717 			continue;
2718 
2719 		inter = crtc_state->psr2_su_area;
2720 		if (!drm_rect_intersect(&inter, &new_plane_state->uapi.dst))
2721 			continue;
2722 
2723 		clip_area_update(&crtc_state->psr2_su_area, &new_plane_state->uapi.dst,
2724 				 &crtc_state->pipe_src);
2725 		*cursor_in_su_area = true;
2726 	}
2727 }
2728 
2729 /*
2730  * TODO: Not clear how to handle planes with negative position,
2731  * also planes are not updated if they have a negative X
2732  * position so for now doing a full update in this cases
2733  *
2734  * Plane scaling and rotation is not supported by selective fetch and both
2735  * properties can change without a modeset, so need to be check at every
2736  * atomic commit.
2737  */
2738 static bool psr2_sel_fetch_plane_state_supported(const struct intel_plane_state *plane_state)
2739 {
2740 	if (plane_state->uapi.dst.y1 < 0 ||
2741 	    plane_state->uapi.dst.x1 < 0 ||
2742 	    plane_state->scaler_id >= 0 ||
2743 	    plane_state->uapi.rotation != DRM_MODE_ROTATE_0)
2744 		return false;
2745 
2746 	return true;
2747 }
2748 
2749 /*
2750  * Check for pipe properties that is not supported by selective fetch.
2751  *
2752  * TODO: pipe scaling causes a modeset but skl_update_scaler_crtc() is executed
2753  * after intel_psr_compute_config(), so for now keeping PSR2 selective fetch
2754  * enabled and going to the full update path.
2755  */
2756 static bool psr2_sel_fetch_pipe_state_supported(const struct intel_crtc_state *crtc_state)
2757 {
2758 	if (crtc_state->scaler_state.scaler_id >= 0)
2759 		return false;
2760 
2761 	return true;
2762 }
2763 
2764 /* Wa 14019834836 */
2765 static void intel_psr_apply_pr_link_on_su_wa(struct intel_crtc_state *crtc_state)
2766 {
2767 	struct intel_display *display = to_intel_display(crtc_state);
2768 	struct intel_encoder *encoder;
2769 	int hactive_limit;
2770 
2771 	if (crtc_state->psr2_su_area.y1 != 0 ||
2772 	    crtc_state->psr2_su_area.y2 != 0)
2773 		return;
2774 
2775 	if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420)
2776 		hactive_limit = intel_dp_is_uhbr(crtc_state) ? 1230 : 546;
2777 	else
2778 		hactive_limit = intel_dp_is_uhbr(crtc_state) ? 615 : 273;
2779 
2780 	if (crtc_state->hw.adjusted_mode.hdisplay < hactive_limit)
2781 		return;
2782 
2783 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
2784 					     crtc_state->uapi.encoder_mask) {
2785 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2786 
2787 		if (!intel_dp_is_edp(intel_dp) &&
2788 		    intel_dp->psr.panel_replay_enabled &&
2789 		    intel_dp->psr.sel_update_enabled) {
2790 			crtc_state->psr2_su_area.y2++;
2791 			return;
2792 		}
2793 	}
2794 }
2795 
2796 static void
2797 intel_psr_apply_su_area_workarounds(struct intel_crtc_state *crtc_state)
2798 {
2799 	struct intel_display *display = to_intel_display(crtc_state);
2800 
2801 	/* Wa_14014971492 */
2802 	if (!crtc_state->has_panel_replay &&
2803 	    ((IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0) ||
2804 	      display->platform.alderlake_p || display->platform.tigerlake)) &&
2805 	    crtc_state->splitter.enable)
2806 		crtc_state->psr2_su_area.y1 = 0;
2807 
2808 	/* Wa 14019834836 */
2809 	if (DISPLAY_VER(display) == 30)
2810 		intel_psr_apply_pr_link_on_su_wa(crtc_state);
2811 }
2812 
2813 int intel_psr2_sel_fetch_update(struct intel_atomic_state *state,
2814 				struct intel_crtc *crtc)
2815 {
2816 	struct intel_display *display = to_intel_display(state);
2817 	struct intel_crtc_state *crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
2818 	struct intel_plane_state *new_plane_state, *old_plane_state;
2819 	struct intel_plane *plane;
2820 	bool full_update = false, cursor_in_su_area = false;
2821 	int i, ret;
2822 
2823 	if (!crtc_state->enable_psr2_sel_fetch)
2824 		return 0;
2825 
2826 	if (!psr2_sel_fetch_pipe_state_supported(crtc_state)) {
2827 		full_update = true;
2828 		goto skip_sel_fetch_set_loop;
2829 	}
2830 
2831 	crtc_state->psr2_su_area.x1 = 0;
2832 	crtc_state->psr2_su_area.y1 = -1;
2833 	crtc_state->psr2_su_area.x2 = drm_rect_width(&crtc_state->pipe_src);
2834 	crtc_state->psr2_su_area.y2 = -1;
2835 
2836 	/*
2837 	 * Calculate minimal selective fetch area of each plane and calculate
2838 	 * the pipe damaged area.
2839 	 * In the next loop the plane selective fetch area will actually be set
2840 	 * using whole pipe damaged area.
2841 	 */
2842 	for_each_oldnew_intel_plane_in_state(state, plane, old_plane_state,
2843 					     new_plane_state, i) {
2844 		struct drm_rect src, damaged_area = { .x1 = 0, .y1 = -1,
2845 						      .x2 = INT_MAX };
2846 
2847 		if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc)
2848 			continue;
2849 
2850 		if (!new_plane_state->uapi.visible &&
2851 		    !old_plane_state->uapi.visible)
2852 			continue;
2853 
2854 		if (!psr2_sel_fetch_plane_state_supported(new_plane_state)) {
2855 			full_update = true;
2856 			break;
2857 		}
2858 
2859 		/*
2860 		 * If visibility or plane moved, mark the whole plane area as
2861 		 * damaged as it needs to be complete redraw in the new and old
2862 		 * position.
2863 		 */
2864 		if (new_plane_state->uapi.visible != old_plane_state->uapi.visible ||
2865 		    !drm_rect_equals(&new_plane_state->uapi.dst,
2866 				     &old_plane_state->uapi.dst)) {
2867 			if (old_plane_state->uapi.visible) {
2868 				damaged_area.y1 = old_plane_state->uapi.dst.y1;
2869 				damaged_area.y2 = old_plane_state->uapi.dst.y2;
2870 				clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2871 						 &crtc_state->pipe_src);
2872 			}
2873 
2874 			if (new_plane_state->uapi.visible) {
2875 				damaged_area.y1 = new_plane_state->uapi.dst.y1;
2876 				damaged_area.y2 = new_plane_state->uapi.dst.y2;
2877 				clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2878 						 &crtc_state->pipe_src);
2879 			}
2880 			continue;
2881 		} else if (new_plane_state->uapi.alpha != old_plane_state->uapi.alpha) {
2882 			/* If alpha changed mark the whole plane area as damaged */
2883 			damaged_area.y1 = new_plane_state->uapi.dst.y1;
2884 			damaged_area.y2 = new_plane_state->uapi.dst.y2;
2885 			clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2886 					 &crtc_state->pipe_src);
2887 			continue;
2888 		}
2889 
2890 		src = drm_plane_state_src(&new_plane_state->uapi);
2891 		drm_rect_fp_to_int(&src, &src);
2892 
2893 		if (!drm_atomic_helper_damage_merged(&old_plane_state->uapi,
2894 						     &new_plane_state->uapi, &damaged_area))
2895 			continue;
2896 
2897 		damaged_area.y1 += new_plane_state->uapi.dst.y1 - src.y1;
2898 		damaged_area.y2 += new_plane_state->uapi.dst.y1 - src.y1;
2899 		damaged_area.x1 += new_plane_state->uapi.dst.x1 - src.x1;
2900 		damaged_area.x2 += new_plane_state->uapi.dst.x1 - src.x1;
2901 
2902 		clip_area_update(&crtc_state->psr2_su_area, &damaged_area, &crtc_state->pipe_src);
2903 	}
2904 
2905 	/*
2906 	 * TODO: For now we are just using full update in case
2907 	 * selective fetch area calculation fails. To optimize this we
2908 	 * should identify cases where this happens and fix the area
2909 	 * calculation for those.
2910 	 */
2911 	if (crtc_state->psr2_su_area.y1 == -1) {
2912 		drm_info_once(display->drm,
2913 			      "Selective fetch area calculation failed in pipe %c\n",
2914 			      pipe_name(crtc->pipe));
2915 		full_update = true;
2916 	}
2917 
2918 	if (full_update)
2919 		goto skip_sel_fetch_set_loop;
2920 
2921 	intel_psr_apply_su_area_workarounds(crtc_state);
2922 
2923 	ret = drm_atomic_add_affected_planes(&state->base, &crtc->base);
2924 	if (ret)
2925 		return ret;
2926 
2927 	/*
2928 	 * Adjust su area to cover cursor fully as necessary (early
2929 	 * transport). This needs to be done after
2930 	 * drm_atomic_add_affected_planes to ensure visible cursor is added into
2931 	 * affected planes even when cursor is not updated by itself.
2932 	 */
2933 	intel_psr2_sel_fetch_et_alignment(state, crtc, &cursor_in_su_area);
2934 
2935 	intel_psr2_sel_fetch_pipe_alignment(crtc_state);
2936 
2937 	/*
2938 	 * Now that we have the pipe damaged area check if it intersect with
2939 	 * every plane, if it does set the plane selective fetch area.
2940 	 */
2941 	for_each_oldnew_intel_plane_in_state(state, plane, old_plane_state,
2942 					     new_plane_state, i) {
2943 		struct drm_rect *sel_fetch_area, inter;
2944 		struct intel_plane *linked = new_plane_state->planar_linked_plane;
2945 
2946 		if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc ||
2947 		    !new_plane_state->uapi.visible)
2948 			continue;
2949 
2950 		inter = crtc_state->psr2_su_area;
2951 		sel_fetch_area = &new_plane_state->psr2_sel_fetch_area;
2952 		if (!drm_rect_intersect(&inter, &new_plane_state->uapi.dst)) {
2953 			sel_fetch_area->y1 = -1;
2954 			sel_fetch_area->y2 = -1;
2955 			/*
2956 			 * if plane sel fetch was previously enabled ->
2957 			 * disable it
2958 			 */
2959 			if (drm_rect_height(&old_plane_state->psr2_sel_fetch_area) > 0)
2960 				crtc_state->update_planes |= BIT(plane->id);
2961 
2962 			continue;
2963 		}
2964 
2965 		if (!psr2_sel_fetch_plane_state_supported(new_plane_state)) {
2966 			full_update = true;
2967 			break;
2968 		}
2969 
2970 		sel_fetch_area = &new_plane_state->psr2_sel_fetch_area;
2971 		sel_fetch_area->y1 = inter.y1 - new_plane_state->uapi.dst.y1;
2972 		sel_fetch_area->y2 = inter.y2 - new_plane_state->uapi.dst.y1;
2973 		crtc_state->update_planes |= BIT(plane->id);
2974 
2975 		/*
2976 		 * Sel_fetch_area is calculated for UV plane. Use
2977 		 * same area for Y plane as well.
2978 		 */
2979 		if (linked) {
2980 			struct intel_plane_state *linked_new_plane_state;
2981 			struct drm_rect *linked_sel_fetch_area;
2982 
2983 			linked_new_plane_state = intel_atomic_get_plane_state(state, linked);
2984 			if (IS_ERR(linked_new_plane_state))
2985 				return PTR_ERR(linked_new_plane_state);
2986 
2987 			linked_sel_fetch_area = &linked_new_plane_state->psr2_sel_fetch_area;
2988 			linked_sel_fetch_area->y1 = sel_fetch_area->y1;
2989 			linked_sel_fetch_area->y2 = sel_fetch_area->y2;
2990 			crtc_state->update_planes |= BIT(linked->id);
2991 		}
2992 	}
2993 
2994 skip_sel_fetch_set_loop:
2995 	psr2_man_trk_ctl_calc(crtc_state, full_update);
2996 	crtc_state->pipe_srcsz_early_tpt =
2997 		psr2_pipe_srcsz_early_tpt_calc(crtc_state, full_update);
2998 	return 0;
2999 }
3000 
3001 void intel_psr2_panic_force_full_update(struct intel_display *display,
3002 					struct intel_crtc_state *crtc_state)
3003 {
3004 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
3005 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
3006 	u32 val = man_trk_ctl_enable_bit_get(display);
3007 
3008 	/* SF partial frame enable has to be set even on full update */
3009 	val |= man_trk_ctl_partial_frame_bit_get(display);
3010 	val |= man_trk_ctl_continuos_full_frame(display);
3011 
3012 	/* Directly write the register */
3013 	intel_de_write_fw(display, PSR2_MAN_TRK_CTL(display, cpu_transcoder), val);
3014 
3015 	if (!crtc_state->enable_psr2_su_region_et)
3016 		return;
3017 
3018 	intel_de_write_fw(display, PIPE_SRCSZ_ERLY_TPT(crtc->pipe), 0);
3019 }
3020 
3021 void intel_psr_pre_plane_update(struct intel_atomic_state *state,
3022 				struct intel_crtc *crtc)
3023 {
3024 	struct intel_display *display = to_intel_display(state);
3025 	const struct intel_crtc_state *old_crtc_state =
3026 		intel_atomic_get_old_crtc_state(state, crtc);
3027 	const struct intel_crtc_state *new_crtc_state =
3028 		intel_atomic_get_new_crtc_state(state, crtc);
3029 	struct intel_encoder *encoder;
3030 
3031 	if (!HAS_PSR(display))
3032 		return;
3033 
3034 	for_each_intel_encoder_mask_with_psr(state->base.dev, encoder,
3035 					     old_crtc_state->uapi.encoder_mask) {
3036 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3037 		struct intel_psr *psr = &intel_dp->psr;
3038 
3039 		mutex_lock(&psr->lock);
3040 
3041 		if (!new_crtc_state->has_psr)
3042 			psr->no_psr_reason = new_crtc_state->no_psr_reason;
3043 
3044 		if (psr->enabled) {
3045 			/*
3046 			 * Reasons to disable:
3047 			 * - PSR disabled in new state
3048 			 * - All planes will go inactive
3049 			 * - Changing between PSR versions
3050 			 * - Region Early Transport changing
3051 			 * - Display WA #1136: skl, bxt
3052 			 */
3053 			if (intel_crtc_needs_modeset(new_crtc_state) ||
3054 			    !new_crtc_state->has_psr ||
3055 			    !new_crtc_state->active_planes ||
3056 			    new_crtc_state->has_sel_update != psr->sel_update_enabled ||
3057 			    new_crtc_state->enable_psr2_su_region_et != psr->su_region_et_enabled ||
3058 			    new_crtc_state->has_panel_replay != psr->panel_replay_enabled ||
3059 			    (DISPLAY_VER(display) < 11 && new_crtc_state->wm_level_disabled))
3060 				intel_psr_disable_locked(intel_dp);
3061 			else if (new_crtc_state->wm_level_disabled)
3062 				/* Wa_14015648006 */
3063 				wm_optimization_wa(intel_dp, new_crtc_state);
3064 		}
3065 
3066 		mutex_unlock(&psr->lock);
3067 	}
3068 }
3069 
3070 static void
3071 verify_panel_replay_dsc_state(const struct intel_crtc_state *crtc_state)
3072 {
3073 	struct intel_display *display = to_intel_display(crtc_state);
3074 
3075 	if (!crtc_state->has_panel_replay)
3076 		return;
3077 
3078 	drm_WARN_ON(display->drm,
3079 		    intel_dsc_enabled_on_link(crtc_state) &&
3080 		    crtc_state->panel_replay_dsc_support ==
3081 		    INTEL_DP_PANEL_REPLAY_DSC_NOT_SUPPORTED);
3082 }
3083 
3084 void intel_psr_post_plane_update(struct intel_atomic_state *state,
3085 				 struct intel_crtc *crtc)
3086 {
3087 	struct intel_display *display = to_intel_display(state);
3088 	const struct intel_crtc_state *crtc_state =
3089 		intel_atomic_get_new_crtc_state(state, crtc);
3090 	struct intel_encoder *encoder;
3091 
3092 	if (!crtc_state->has_psr)
3093 		return;
3094 
3095 	verify_panel_replay_dsc_state(crtc_state);
3096 
3097 	for_each_intel_encoder_mask_with_psr(state->base.dev, encoder,
3098 					     crtc_state->uapi.encoder_mask) {
3099 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3100 		struct intel_psr *psr = &intel_dp->psr;
3101 		bool keep_disabled = false;
3102 
3103 		mutex_lock(&psr->lock);
3104 
3105 		drm_WARN_ON(display->drm,
3106 			    psr->enabled && !crtc_state->active_planes);
3107 
3108 		if (psr->sink_not_reliable)
3109 			keep_disabled = true;
3110 
3111 		if (!crtc_state->active_planes) {
3112 			psr->no_psr_reason = "All planes inactive";
3113 			keep_disabled = true;
3114 		}
3115 
3116 		/* Display WA #1136: skl, bxt */
3117 		if (DISPLAY_VER(display) < 11 && crtc_state->wm_level_disabled) {
3118 			psr->no_psr_reason = "Workaround #1136 for skl, bxt";
3119 			keep_disabled = true;
3120 		}
3121 
3122 		if (!psr->enabled && !keep_disabled)
3123 			intel_psr_enable_locked(intel_dp, crtc_state);
3124 		else if (psr->enabled && !crtc_state->wm_level_disabled)
3125 			/* Wa_14015648006 */
3126 			wm_optimization_wa(intel_dp, crtc_state);
3127 
3128 		/* Force a PSR exit when enabling CRC to avoid CRC timeouts */
3129 		if (crtc_state->crc_enabled && psr->enabled)
3130 			intel_psr_force_update(intel_dp);
3131 
3132 		/*
3133 		 * Clear possible busy bits in case we have
3134 		 * invalidate -> flip -> flush sequence.
3135 		 */
3136 		intel_dp->psr.busy_frontbuffer_bits = 0;
3137 
3138 		mutex_unlock(&psr->lock);
3139 	}
3140 }
3141 
3142 /*
3143  * From bspec: Panel Self Refresh (BDW+)
3144  * Max. time for PSR to idle = Inverse of the refresh rate + 6 ms of
3145  * exit training time + 1.5 ms of aux channel handshake. 50 ms is
3146  * defensive enough to cover everything.
3147  */
3148 #define PSR_IDLE_TIMEOUT_MS 50
3149 
3150 static int
3151 _psr2_ready_for_pipe_update_locked(const struct intel_crtc_state *new_crtc_state,
3152 				   struct intel_dsb *dsb)
3153 {
3154 	struct intel_display *display = to_intel_display(new_crtc_state);
3155 	enum transcoder cpu_transcoder = new_crtc_state->cpu_transcoder;
3156 
3157 	/*
3158 	 * Any state lower than EDP_PSR2_STATUS_STATE_DEEP_SLEEP is enough.
3159 	 * As all higher states has bit 4 of PSR2 state set we can just wait for
3160 	 * EDP_PSR2_STATUS_STATE_DEEP_SLEEP to be cleared.
3161 	 */
3162 	if (dsb) {
3163 		intel_dsb_poll(dsb, EDP_PSR2_STATUS(display, cpu_transcoder),
3164 			       EDP_PSR2_STATUS_STATE_DEEP_SLEEP, 0, 200,
3165 			       PSR_IDLE_TIMEOUT_MS * 1000 / 200);
3166 		return true;
3167 	}
3168 
3169 	return intel_de_wait_for_clear_ms(display,
3170 				       EDP_PSR2_STATUS(display, cpu_transcoder),
3171 				       EDP_PSR2_STATUS_STATE_DEEP_SLEEP,
3172 				       PSR_IDLE_TIMEOUT_MS);
3173 }
3174 
3175 static int
3176 _psr1_ready_for_pipe_update_locked(const struct intel_crtc_state *new_crtc_state,
3177 				   struct intel_dsb *dsb)
3178 {
3179 	struct intel_display *display = to_intel_display(new_crtc_state);
3180 	enum transcoder cpu_transcoder = new_crtc_state->cpu_transcoder;
3181 
3182 	if (dsb) {
3183 		intel_dsb_poll(dsb, psr_status_reg(display, cpu_transcoder),
3184 			       EDP_PSR_STATUS_STATE_MASK, 0, 200,
3185 			       PSR_IDLE_TIMEOUT_MS * 1000 / 200);
3186 		return true;
3187 	}
3188 
3189 	return intel_de_wait_for_clear_ms(display,
3190 				       psr_status_reg(display, cpu_transcoder),
3191 				       EDP_PSR_STATUS_STATE_MASK,
3192 				       PSR_IDLE_TIMEOUT_MS);
3193 }
3194 
3195 /**
3196  * intel_psr_wait_for_idle_locked - wait for PSR be ready for a pipe update
3197  * @new_crtc_state: new CRTC state
3198  *
3199  * This function is expected to be called from pipe_update_start() where it is
3200  * not expected to race with PSR enable or disable.
3201  */
3202 void intel_psr_wait_for_idle_locked(const struct intel_crtc_state *new_crtc_state)
3203 {
3204 	struct intel_display *display = to_intel_display(new_crtc_state);
3205 	struct intel_encoder *encoder;
3206 
3207 	if (!new_crtc_state->has_psr)
3208 		return;
3209 
3210 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
3211 					     new_crtc_state->uapi.encoder_mask) {
3212 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3213 		int ret;
3214 
3215 		lockdep_assert_held(&intel_dp->psr.lock);
3216 
3217 		if (!intel_dp->psr.enabled || intel_dp->psr.panel_replay_enabled)
3218 			continue;
3219 
3220 		if (intel_dp->psr.sel_update_enabled)
3221 			ret = _psr2_ready_for_pipe_update_locked(new_crtc_state,
3222 								 NULL);
3223 		else
3224 			ret = _psr1_ready_for_pipe_update_locked(new_crtc_state,
3225 								 NULL);
3226 
3227 		if (ret)
3228 			drm_err(display->drm,
3229 				"PSR wait timed out, atomic update may fail\n");
3230 	}
3231 }
3232 
3233 void intel_psr_wait_for_idle_dsb(struct intel_dsb *dsb,
3234 				 const struct intel_crtc_state *new_crtc_state)
3235 {
3236 	if (!new_crtc_state->has_psr || new_crtc_state->has_panel_replay)
3237 		return;
3238 
3239 	if (new_crtc_state->has_sel_update)
3240 		_psr2_ready_for_pipe_update_locked(new_crtc_state, dsb);
3241 	else
3242 		_psr1_ready_for_pipe_update_locked(new_crtc_state, dsb);
3243 }
3244 
3245 static bool __psr_wait_for_idle_locked(struct intel_dp *intel_dp)
3246 {
3247 	struct intel_display *display = to_intel_display(intel_dp);
3248 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3249 	i915_reg_t reg;
3250 	u32 mask;
3251 	int err;
3252 
3253 	if (!intel_dp->psr.enabled)
3254 		return false;
3255 
3256 	if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled ||
3257 					  intel_dp->psr.panel_replay_enabled)) {
3258 		reg = EDP_PSR2_STATUS(display, cpu_transcoder);
3259 		mask = EDP_PSR2_STATUS_STATE_MASK;
3260 	} else {
3261 		reg = psr_status_reg(display, cpu_transcoder);
3262 		mask = EDP_PSR_STATUS_STATE_MASK;
3263 	}
3264 
3265 	mutex_unlock(&intel_dp->psr.lock);
3266 
3267 	err = intel_de_wait_for_clear_ms(display, reg, mask, 50);
3268 	if (err)
3269 		drm_err(display->drm,
3270 			"Timed out waiting for PSR Idle for re-enable\n");
3271 
3272 	/* After the unlocked wait, verify that PSR is still wanted! */
3273 	mutex_lock(&intel_dp->psr.lock);
3274 	return err == 0 && intel_dp->psr.enabled && !intel_dp->psr.pause_counter;
3275 }
3276 
3277 static int intel_psr_fastset_force(struct intel_display *display)
3278 {
3279 	struct drm_connector_list_iter conn_iter;
3280 	struct drm_modeset_acquire_ctx ctx;
3281 	struct drm_atomic_state *state;
3282 	struct drm_connector *conn;
3283 	int err = 0;
3284 
3285 	state = drm_atomic_state_alloc(display->drm);
3286 	if (!state)
3287 		return -ENOMEM;
3288 
3289 	drm_modeset_acquire_init(&ctx, DRM_MODESET_ACQUIRE_INTERRUPTIBLE);
3290 
3291 	state->acquire_ctx = &ctx;
3292 	to_intel_atomic_state(state)->internal = true;
3293 
3294 retry:
3295 	drm_connector_list_iter_begin(display->drm, &conn_iter);
3296 	drm_for_each_connector_iter(conn, &conn_iter) {
3297 		struct drm_connector_state *conn_state;
3298 		struct drm_crtc_state *crtc_state;
3299 
3300 		if (conn->connector_type != DRM_MODE_CONNECTOR_eDP)
3301 			continue;
3302 
3303 		conn_state = drm_atomic_get_connector_state(state, conn);
3304 		if (IS_ERR(conn_state)) {
3305 			err = PTR_ERR(conn_state);
3306 			break;
3307 		}
3308 
3309 		if (!conn_state->crtc)
3310 			continue;
3311 
3312 		crtc_state = drm_atomic_get_crtc_state(state, conn_state->crtc);
3313 		if (IS_ERR(crtc_state)) {
3314 			err = PTR_ERR(crtc_state);
3315 			break;
3316 		}
3317 
3318 		/* Mark mode as changed to trigger a pipe->update() */
3319 		crtc_state->mode_changed = true;
3320 	}
3321 	drm_connector_list_iter_end(&conn_iter);
3322 
3323 	if (err == 0)
3324 		err = drm_atomic_commit(state);
3325 
3326 	if (err == -EDEADLK) {
3327 		drm_atomic_state_clear(state);
3328 		err = drm_modeset_backoff(&ctx);
3329 		if (!err)
3330 			goto retry;
3331 	}
3332 
3333 	drm_modeset_drop_locks(&ctx);
3334 	drm_modeset_acquire_fini(&ctx);
3335 	drm_atomic_state_put(state);
3336 
3337 	return err;
3338 }
3339 
3340 int intel_psr_debug_set(struct intel_dp *intel_dp, u64 val)
3341 {
3342 	struct intel_display *display = to_intel_display(intel_dp);
3343 	const u32 mode = val & I915_PSR_DEBUG_MODE_MASK;
3344 	const u32 disable_bits = val & (I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
3345 					I915_PSR_DEBUG_PANEL_REPLAY_DISABLE);
3346 	u32 old_mode, old_disable_bits;
3347 	int ret;
3348 
3349 	if (val & ~(I915_PSR_DEBUG_IRQ | I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
3350 		    I915_PSR_DEBUG_PANEL_REPLAY_DISABLE |
3351 		    I915_PSR_DEBUG_MODE_MASK) ||
3352 	    mode > I915_PSR_DEBUG_ENABLE_SEL_FETCH) {
3353 		drm_dbg_kms(display->drm, "Invalid debug mask %llx\n", val);
3354 		return -EINVAL;
3355 	}
3356 
3357 	ret = mutex_lock_interruptible(&intel_dp->psr.lock);
3358 	if (ret)
3359 		return ret;
3360 
3361 	old_mode = intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK;
3362 	old_disable_bits = intel_dp->psr.debug &
3363 		(I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
3364 		 I915_PSR_DEBUG_PANEL_REPLAY_DISABLE);
3365 
3366 	intel_dp->psr.debug = val;
3367 
3368 	/*
3369 	 * Do it right away if it's already enabled, otherwise it will be done
3370 	 * when enabling the source.
3371 	 */
3372 	if (intel_dp->psr.enabled)
3373 		psr_irq_control(intel_dp);
3374 
3375 	mutex_unlock(&intel_dp->psr.lock);
3376 
3377 	if (old_mode != mode || old_disable_bits != disable_bits)
3378 		ret = intel_psr_fastset_force(display);
3379 
3380 	return ret;
3381 }
3382 
3383 static void intel_psr_handle_irq(struct intel_dp *intel_dp)
3384 {
3385 	struct intel_psr *psr = &intel_dp->psr;
3386 
3387 	intel_psr_disable_locked(intel_dp);
3388 	psr->sink_not_reliable = true;
3389 	/* let's make sure that sink is awaken */
3390 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
3391 }
3392 
3393 static void intel_psr_work(struct work_struct *work)
3394 {
3395 	struct intel_dp *intel_dp =
3396 		container_of(work, typeof(*intel_dp), psr.work);
3397 
3398 	mutex_lock(&intel_dp->psr.lock);
3399 
3400 	if (!intel_dp->psr.enabled)
3401 		goto unlock;
3402 
3403 	if (READ_ONCE(intel_dp->psr.irq_aux_error)) {
3404 		intel_psr_handle_irq(intel_dp);
3405 		goto unlock;
3406 	}
3407 
3408 	if (intel_dp->psr.pause_counter)
3409 		goto unlock;
3410 
3411 	/*
3412 	 * We have to make sure PSR is ready for re-enable
3413 	 * otherwise it keeps disabled until next full enable/disable cycle.
3414 	 * PSR might take some time to get fully disabled
3415 	 * and be ready for re-enable.
3416 	 */
3417 	if (!__psr_wait_for_idle_locked(intel_dp))
3418 		goto unlock;
3419 
3420 	/*
3421 	 * The delayed work can race with an invalidate hence we need to
3422 	 * recheck. Since psr_flush first clears this and then reschedules we
3423 	 * won't ever miss a flush when bailing out here.
3424 	 */
3425 	if (intel_dp->psr.busy_frontbuffer_bits || intel_dp->psr.active)
3426 		goto unlock;
3427 
3428 	intel_psr_activate(intel_dp);
3429 unlock:
3430 	mutex_unlock(&intel_dp->psr.lock);
3431 }
3432 
3433 static void intel_psr_configure_full_frame_update(struct intel_dp *intel_dp)
3434 {
3435 	struct intel_display *display = to_intel_display(intel_dp);
3436 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3437 
3438 	if (!intel_dp->psr.psr2_sel_fetch_enabled)
3439 		return;
3440 
3441 	if (DISPLAY_VER(display) >= 20)
3442 		intel_de_write(display, LNL_SFF_CTL(cpu_transcoder),
3443 			       LNL_SFF_CTL_SF_SINGLE_FULL_FRAME);
3444 	else
3445 		intel_de_write(display,
3446 			       PSR2_MAN_TRK_CTL(display, cpu_transcoder),
3447 			       man_trk_ctl_enable_bit_get(display) |
3448 			       man_trk_ctl_partial_frame_bit_get(display) |
3449 			       man_trk_ctl_single_full_frame_bit_get(display) |
3450 			       man_trk_ctl_continuos_full_frame(display));
3451 }
3452 
3453 static void _psr_invalidate_handle(struct intel_dp *intel_dp)
3454 {
3455 	struct intel_display *display = to_intel_display(intel_dp);
3456 
3457 	if (DISPLAY_VER(display) < 20 && intel_dp->psr.psr2_sel_fetch_enabled) {
3458 		if (!intel_dp->psr.psr2_sel_fetch_cff_enabled) {
3459 			intel_dp->psr.psr2_sel_fetch_cff_enabled = true;
3460 			intel_psr_configure_full_frame_update(intel_dp);
3461 		}
3462 
3463 		intel_psr_force_update(intel_dp);
3464 	} else {
3465 		intel_psr_exit(intel_dp);
3466 	}
3467 }
3468 
3469 /**
3470  * intel_psr_invalidate - Invalidate PSR
3471  * @display: display device
3472  * @frontbuffer_bits: frontbuffer plane tracking bits
3473  * @origin: which operation caused the invalidate
3474  *
3475  * Since the hardware frontbuffer tracking has gaps we need to integrate
3476  * with the software frontbuffer tracking. This function gets called every
3477  * time frontbuffer rendering starts and a buffer gets dirtied. PSR must be
3478  * disabled if the frontbuffer mask contains a buffer relevant to PSR.
3479  *
3480  * Dirty frontbuffers relevant to PSR are tracked in busy_frontbuffer_bits."
3481  */
3482 void intel_psr_invalidate(struct intel_display *display,
3483 			  unsigned frontbuffer_bits, enum fb_op_origin origin)
3484 {
3485 	struct intel_encoder *encoder;
3486 
3487 	if (origin == ORIGIN_FLIP)
3488 		return;
3489 
3490 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3491 		unsigned int pipe_frontbuffer_bits = frontbuffer_bits;
3492 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3493 
3494 		mutex_lock(&intel_dp->psr.lock);
3495 		if (!intel_dp->psr.enabled) {
3496 			mutex_unlock(&intel_dp->psr.lock);
3497 			continue;
3498 		}
3499 
3500 		pipe_frontbuffer_bits &=
3501 			INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe);
3502 		intel_dp->psr.busy_frontbuffer_bits |= pipe_frontbuffer_bits;
3503 
3504 		if (pipe_frontbuffer_bits)
3505 			_psr_invalidate_handle(intel_dp);
3506 
3507 		mutex_unlock(&intel_dp->psr.lock);
3508 	}
3509 }
3510 /*
3511  * When we will be completely rely on PSR2 S/W tracking in future,
3512  * intel_psr_flush() will invalidate and flush the PSR for ORIGIN_FLIP
3513  * event also therefore tgl_dc3co_flush_locked() require to be changed
3514  * accordingly in future.
3515  */
3516 static void
3517 tgl_dc3co_flush_locked(struct intel_dp *intel_dp, unsigned int frontbuffer_bits,
3518 		       enum fb_op_origin origin)
3519 {
3520 	struct intel_display *display = to_intel_display(intel_dp);
3521 
3522 	if (!intel_dp->psr.dc3co_exitline || !intel_dp->psr.sel_update_enabled ||
3523 	    !intel_dp->psr.active)
3524 		return;
3525 
3526 	/*
3527 	 * At every frontbuffer flush flip event modified delay of delayed work,
3528 	 * when delayed work schedules that means display has been idle.
3529 	 */
3530 	if (!(frontbuffer_bits &
3531 	    INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe)))
3532 		return;
3533 
3534 	tgl_psr2_enable_dc3co(intel_dp);
3535 	mod_delayed_work(display->wq.unordered, &intel_dp->psr.dc3co_work,
3536 			 intel_dp->psr.dc3co_exit_delay);
3537 }
3538 
3539 static void _psr_flush_handle(struct intel_dp *intel_dp)
3540 {
3541 	struct intel_display *display = to_intel_display(intel_dp);
3542 
3543 	if (DISPLAY_VER(display) < 20 && intel_dp->psr.psr2_sel_fetch_enabled) {
3544 		/* Selective fetch prior LNL */
3545 		if (intel_dp->psr.psr2_sel_fetch_cff_enabled) {
3546 			/* can we turn CFF off? */
3547 			if (intel_dp->psr.busy_frontbuffer_bits == 0)
3548 				intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
3549 		}
3550 
3551 		/*
3552 		 * Still keep cff bit enabled as we don't have proper SU
3553 		 * configuration in case update is sent for any reason after
3554 		 * sff bit gets cleared by the HW on next vblank.
3555 		 *
3556 		 * NOTE: Setting cff bit is not needed for LunarLake onwards as
3557 		 * we have own register for SFF bit and we are not overwriting
3558 		 * existing SU configuration
3559 		 */
3560 		intel_psr_configure_full_frame_update(intel_dp);
3561 
3562 		intel_psr_force_update(intel_dp);
3563 	} else if (!intel_dp->psr.psr2_sel_fetch_enabled) {
3564 		/*
3565 		 * PSR1 on all platforms
3566 		 * PSR2 HW tracking
3567 		 * Panel Replay Full frame update
3568 		 */
3569 		intel_psr_force_update(intel_dp);
3570 	} else {
3571 		/* Selective update LNL onwards */
3572 		intel_psr_exit(intel_dp);
3573 	}
3574 
3575 	if (!intel_dp->psr.active && !intel_dp->psr.busy_frontbuffer_bits)
3576 		queue_work(display->wq.unordered, &intel_dp->psr.work);
3577 }
3578 
3579 /**
3580  * intel_psr_flush - Flush PSR
3581  * @display: display device
3582  * @frontbuffer_bits: frontbuffer plane tracking bits
3583  * @origin: which operation caused the flush
3584  *
3585  * Since the hardware frontbuffer tracking has gaps we need to integrate
3586  * with the software frontbuffer tracking. This function gets called every
3587  * time frontbuffer rendering has completed and flushed out to memory. PSR
3588  * can be enabled again if no other frontbuffer relevant to PSR is dirty.
3589  *
3590  * Dirty frontbuffers relevant to PSR are tracked in busy_frontbuffer_bits.
3591  */
3592 void intel_psr_flush(struct intel_display *display,
3593 		     unsigned frontbuffer_bits, enum fb_op_origin origin)
3594 {
3595 	struct intel_encoder *encoder;
3596 
3597 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3598 		unsigned int pipe_frontbuffer_bits = frontbuffer_bits;
3599 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3600 
3601 		mutex_lock(&intel_dp->psr.lock);
3602 		if (!intel_dp->psr.enabled) {
3603 			mutex_unlock(&intel_dp->psr.lock);
3604 			continue;
3605 		}
3606 
3607 		pipe_frontbuffer_bits &=
3608 			INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe);
3609 		intel_dp->psr.busy_frontbuffer_bits &= ~pipe_frontbuffer_bits;
3610 
3611 		/*
3612 		 * If the PSR is paused by an explicit intel_psr_paused() call,
3613 		 * we have to ensure that the PSR is not activated until
3614 		 * intel_psr_resume() is called.
3615 		 */
3616 		if (intel_dp->psr.pause_counter)
3617 			goto unlock;
3618 
3619 		if (origin == ORIGIN_FLIP ||
3620 		    (origin == ORIGIN_CURSOR_UPDATE &&
3621 		     !intel_dp->psr.psr2_sel_fetch_enabled)) {
3622 			tgl_dc3co_flush_locked(intel_dp, frontbuffer_bits, origin);
3623 			goto unlock;
3624 		}
3625 
3626 		if (pipe_frontbuffer_bits == 0)
3627 			goto unlock;
3628 
3629 		/* By definition flush = invalidate + flush */
3630 		_psr_flush_handle(intel_dp);
3631 unlock:
3632 		mutex_unlock(&intel_dp->psr.lock);
3633 	}
3634 }
3635 
3636 /**
3637  * intel_psr_init - Init basic PSR work and mutex.
3638  * @intel_dp: Intel DP
3639  *
3640  * This function is called after the initializing connector.
3641  * (the initializing of connector treats the handling of connector capabilities)
3642  * And it initializes basic PSR stuff for each DP Encoder.
3643  */
3644 void intel_psr_init(struct intel_dp *intel_dp)
3645 {
3646 	struct intel_display *display = to_intel_display(intel_dp);
3647 	struct intel_connector *connector = intel_dp->attached_connector;
3648 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
3649 
3650 	if (!(HAS_PSR(display) || HAS_DP20(display)))
3651 		return;
3652 
3653 	/*
3654 	 * HSW spec explicitly says PSR is tied to port A.
3655 	 * BDW+ platforms have a instance of PSR registers per transcoder but
3656 	 * BDW, GEN9 and GEN11 are not validated by HW team in other transcoder
3657 	 * than eDP one.
3658 	 * For now it only supports one instance of PSR for BDW, GEN9 and GEN11.
3659 	 * So lets keep it hardcoded to PORT_A for BDW, GEN9 and GEN11.
3660 	 * But GEN12 supports a instance of PSR registers per transcoder.
3661 	 */
3662 	if (DISPLAY_VER(display) < 12 && dig_port->base.port != PORT_A) {
3663 		drm_dbg_kms(display->drm,
3664 			    "PSR condition failed: Port not supported\n");
3665 		return;
3666 	}
3667 
3668 	if ((HAS_DP20(display) && !intel_dp_is_edp(intel_dp)) ||
3669 	    DISPLAY_VER(display) >= 20)
3670 		intel_dp->psr.source_panel_replay_support = true;
3671 
3672 	if (HAS_PSR(display) && intel_dp_is_edp(intel_dp))
3673 		intel_dp->psr.source_support = true;
3674 
3675 	/* Set link_standby x link_off defaults */
3676 	if (DISPLAY_VER(display) < 12)
3677 		/* For new platforms up to TGL let's respect VBT back again */
3678 		intel_dp->psr.link_standby = connector->panel.vbt.psr.full_link;
3679 
3680 	INIT_WORK(&intel_dp->psr.work, intel_psr_work);
3681 	INIT_DELAYED_WORK(&intel_dp->psr.dc3co_work, tgl_dc3co_disable_work);
3682 	mutex_init(&intel_dp->psr.lock);
3683 }
3684 
3685 static int psr_get_status_and_error_status(struct intel_dp *intel_dp,
3686 					   u8 *status, u8 *error_status)
3687 {
3688 	struct drm_dp_aux *aux = &intel_dp->aux;
3689 	int ret;
3690 	unsigned int offset;
3691 
3692 	offset = intel_dp->psr.panel_replay_enabled ?
3693 		 DP_SINK_DEVICE_PR_AND_FRAME_LOCK_STATUS : DP_PSR_STATUS;
3694 
3695 	ret = drm_dp_dpcd_readb(aux, offset, status);
3696 	if (ret != 1)
3697 		return ret;
3698 
3699 	offset = intel_dp->psr.panel_replay_enabled ?
3700 		 DP_PANEL_REPLAY_ERROR_STATUS : DP_PSR_ERROR_STATUS;
3701 
3702 	ret = drm_dp_dpcd_readb(aux, offset, error_status);
3703 	if (ret != 1)
3704 		return ret;
3705 
3706 	*status = *status & DP_PSR_SINK_STATE_MASK;
3707 
3708 	return 0;
3709 }
3710 
3711 static void psr_alpm_check(struct intel_dp *intel_dp)
3712 {
3713 	struct intel_psr *psr = &intel_dp->psr;
3714 
3715 	if (!psr->sel_update_enabled)
3716 		return;
3717 
3718 	if (intel_alpm_get_error(intel_dp)) {
3719 		intel_psr_disable_locked(intel_dp);
3720 		psr->sink_not_reliable = true;
3721 	}
3722 }
3723 
3724 static void psr_capability_changed_check(struct intel_dp *intel_dp)
3725 {
3726 	struct intel_display *display = to_intel_display(intel_dp);
3727 	struct intel_psr *psr = &intel_dp->psr;
3728 	u8 val;
3729 	int r;
3730 
3731 	r = drm_dp_dpcd_readb(&intel_dp->aux, DP_PSR_ESI, &val);
3732 	if (r != 1) {
3733 		drm_err(display->drm, "Error reading DP_PSR_ESI\n");
3734 		return;
3735 	}
3736 
3737 	if (val & DP_PSR_CAPS_CHANGE) {
3738 		intel_psr_disable_locked(intel_dp);
3739 		psr->sink_not_reliable = true;
3740 		drm_dbg_kms(display->drm,
3741 			    "Sink PSR capability changed, disabling PSR\n");
3742 
3743 		/* Clearing it */
3744 		drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_ESI, val);
3745 	}
3746 }
3747 
3748 /*
3749  * On common bits:
3750  * DP_PSR_RFB_STORAGE_ERROR == DP_PANEL_REPLAY_RFB_STORAGE_ERROR
3751  * DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR == DP_PANEL_REPLAY_VSC_SDP_UNCORRECTABLE_ERROR
3752  * DP_PSR_LINK_CRC_ERROR == DP_PANEL_REPLAY_LINK_CRC_ERROR
3753  * this function is relying on PSR definitions
3754  */
3755 void intel_psr_short_pulse(struct intel_dp *intel_dp)
3756 {
3757 	struct intel_display *display = to_intel_display(intel_dp);
3758 	struct intel_psr *psr = &intel_dp->psr;
3759 	u8 status, error_status;
3760 	const u8 errors = DP_PSR_RFB_STORAGE_ERROR |
3761 			  DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR |
3762 			  DP_PSR_LINK_CRC_ERROR;
3763 
3764 	if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
3765 		return;
3766 
3767 	mutex_lock(&psr->lock);
3768 
3769 	psr->link_ok = false;
3770 
3771 	if (!psr->enabled)
3772 		goto exit;
3773 
3774 	if (psr_get_status_and_error_status(intel_dp, &status, &error_status)) {
3775 		drm_err(display->drm,
3776 			"Error reading PSR status or error status\n");
3777 		goto exit;
3778 	}
3779 
3780 	if ((!psr->panel_replay_enabled && status == DP_PSR_SINK_INTERNAL_ERROR) ||
3781 	    (error_status & errors)) {
3782 		intel_psr_disable_locked(intel_dp);
3783 		psr->sink_not_reliable = true;
3784 	}
3785 
3786 	if (!psr->panel_replay_enabled && status == DP_PSR_SINK_INTERNAL_ERROR &&
3787 	    !error_status)
3788 		drm_dbg_kms(display->drm,
3789 			    "PSR sink internal error, disabling PSR\n");
3790 	if (error_status & DP_PSR_RFB_STORAGE_ERROR)
3791 		drm_dbg_kms(display->drm,
3792 			    "PSR RFB storage error, disabling PSR\n");
3793 	if (error_status & DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR)
3794 		drm_dbg_kms(display->drm,
3795 			    "PSR VSC SDP uncorrectable error, disabling PSR\n");
3796 	if (error_status & DP_PSR_LINK_CRC_ERROR)
3797 		drm_dbg_kms(display->drm,
3798 			    "PSR Link CRC error, disabling PSR\n");
3799 
3800 	if (error_status & ~errors)
3801 		drm_err(display->drm,
3802 			"PSR_ERROR_STATUS unhandled errors %x\n",
3803 			error_status & ~errors);
3804 	/* clear status register */
3805 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_ERROR_STATUS, error_status);
3806 
3807 	if (!psr->panel_replay_enabled) {
3808 		psr_alpm_check(intel_dp);
3809 		psr_capability_changed_check(intel_dp);
3810 	}
3811 
3812 exit:
3813 	mutex_unlock(&psr->lock);
3814 }
3815 
3816 bool intel_psr_enabled(struct intel_dp *intel_dp)
3817 {
3818 	bool ret;
3819 
3820 	if (!CAN_PSR(intel_dp))
3821 		return false;
3822 
3823 	mutex_lock(&intel_dp->psr.lock);
3824 	ret = intel_dp->psr.enabled;
3825 	mutex_unlock(&intel_dp->psr.lock);
3826 
3827 	return ret;
3828 }
3829 
3830 /**
3831  * intel_psr_link_ok - return psr->link_ok
3832  * @intel_dp: struct intel_dp
3833  *
3834  * We are seeing unexpected link re-trainings with some panels. This is caused
3835  * by panel stating bad link status after PSR is enabled. Code checking link
3836  * status can call this to ensure it can ignore bad link status stated by the
3837  * panel I.e. if panel is stating bad link and intel_psr_link_ok is stating link
3838  * is ok caller should rely on latter.
3839  *
3840  * Return value of link_ok
3841  */
3842 bool intel_psr_link_ok(struct intel_dp *intel_dp)
3843 {
3844 	bool ret;
3845 
3846 	if ((!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp)) ||
3847 	    !intel_dp_is_edp(intel_dp))
3848 		return false;
3849 
3850 	mutex_lock(&intel_dp->psr.lock);
3851 	ret = intel_dp->psr.link_ok;
3852 	mutex_unlock(&intel_dp->psr.lock);
3853 
3854 	return ret;
3855 }
3856 
3857 /**
3858  * intel_psr_lock - grab PSR lock
3859  * @crtc_state: the crtc state
3860  *
3861  * This is initially meant to be used by around CRTC update, when
3862  * vblank sensitive registers are updated and we need grab the lock
3863  * before it to avoid vblank evasion.
3864  */
3865 void intel_psr_lock(const struct intel_crtc_state *crtc_state)
3866 {
3867 	struct intel_display *display = to_intel_display(crtc_state);
3868 	struct intel_encoder *encoder;
3869 
3870 	if (!crtc_state->has_psr)
3871 		return;
3872 
3873 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
3874 					     crtc_state->uapi.encoder_mask) {
3875 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3876 
3877 		mutex_lock(&intel_dp->psr.lock);
3878 		break;
3879 	}
3880 }
3881 
3882 /**
3883  * intel_psr_unlock - release PSR lock
3884  * @crtc_state: the crtc state
3885  *
3886  * Release the PSR lock that was held during pipe update.
3887  */
3888 void intel_psr_unlock(const struct intel_crtc_state *crtc_state)
3889 {
3890 	struct intel_display *display = to_intel_display(crtc_state);
3891 	struct intel_encoder *encoder;
3892 
3893 	if (!crtc_state->has_psr)
3894 		return;
3895 
3896 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
3897 					     crtc_state->uapi.encoder_mask) {
3898 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3899 
3900 		mutex_unlock(&intel_dp->psr.lock);
3901 		break;
3902 	}
3903 }
3904 
3905 /* Wa_16025596647 */
3906 static void intel_psr_apply_underrun_on_idle_wa_locked(struct intel_dp *intel_dp)
3907 {
3908 	struct intel_display *display = to_intel_display(intel_dp);
3909 	bool dc5_dc6_blocked;
3910 
3911 	if (!intel_dp->psr.active || !intel_dp->psr.pkg_c_latency_used)
3912 		return;
3913 
3914 	dc5_dc6_blocked = is_dc5_dc6_blocked(intel_dp);
3915 
3916 	if (intel_dp->psr.sel_update_enabled)
3917 		psr2_program_idle_frames(intel_dp, dc5_dc6_blocked ? 0 :
3918 					 psr_compute_idle_frames(intel_dp));
3919 	else
3920 		intel_dmc_start_pkgc_exit_at_start_of_undelayed_vblank(display,
3921 								       intel_dp->psr.pipe,
3922 								       dc5_dc6_blocked);
3923 }
3924 
3925 static void psr_dc5_dc6_wa_work(struct work_struct *work)
3926 {
3927 	struct intel_display *display = container_of(work, typeof(*display),
3928 						     psr_dc5_dc6_wa_work);
3929 	struct intel_encoder *encoder;
3930 
3931 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3932 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3933 
3934 		mutex_lock(&intel_dp->psr.lock);
3935 
3936 		if (intel_dp->psr.enabled && !intel_dp->psr.panel_replay_enabled &&
3937 		    !intel_dp->psr.pkg_c_latency_used)
3938 			intel_psr_apply_underrun_on_idle_wa_locked(intel_dp);
3939 
3940 		mutex_unlock(&intel_dp->psr.lock);
3941 	}
3942 }
3943 
3944 /**
3945  * intel_psr_notify_dc5_dc6 - Notify PSR about enable/disable dc5/dc6
3946  * @display: intel atomic state
3947  *
3948  * This is targeted for underrun on idle PSR HW bug (Wa_16025596647) to schedule
3949  * psr_dc5_dc6_wa_work used for applying/removing the workaround.
3950  */
3951 void intel_psr_notify_dc5_dc6(struct intel_display *display)
3952 {
3953 	if (DISPLAY_VER(display) != 20 &&
3954 	    !IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0))
3955 		return;
3956 
3957 	schedule_work(&display->psr_dc5_dc6_wa_work);
3958 }
3959 
3960 /**
3961  * intel_psr_dc5_dc6_wa_init - Init work for underrun on idle PSR HW bug wa
3962  * @display: intel atomic state
3963  *
3964  * This is targeted for underrun on idle PSR HW bug (Wa_16025596647) to init
3965  * psr_dc5_dc6_wa_work used for applying the workaround.
3966  */
3967 void intel_psr_dc5_dc6_wa_init(struct intel_display *display)
3968 {
3969 	if (DISPLAY_VER(display) != 20 &&
3970 	    !IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0))
3971 		return;
3972 
3973 	INIT_WORK(&display->psr_dc5_dc6_wa_work, psr_dc5_dc6_wa_work);
3974 }
3975 
3976 /**
3977  * intel_psr_notify_pipe_change - Notify PSR about enable/disable of a pipe
3978  * @state: intel atomic state
3979  * @crtc: intel crtc
3980  * @enable: enable/disable
3981  *
3982  * This is targeted for underrun on idle PSR HW bug (Wa_16025596647) to apply
3983  * remove the workaround when pipe is getting enabled/disabled
3984  */
3985 void intel_psr_notify_pipe_change(struct intel_atomic_state *state,
3986 				  struct intel_crtc *crtc, bool enable)
3987 {
3988 	struct intel_display *display = to_intel_display(state);
3989 	struct intel_encoder *encoder;
3990 
3991 	if (DISPLAY_VER(display) != 20 &&
3992 	    !IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0))
3993 		return;
3994 
3995 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3996 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3997 		u8 active_non_psr_pipes;
3998 
3999 		mutex_lock(&intel_dp->psr.lock);
4000 
4001 		if (!intel_dp->psr.enabled || intel_dp->psr.panel_replay_enabled)
4002 			goto unlock;
4003 
4004 		active_non_psr_pipes = intel_dp->psr.active_non_psr_pipes;
4005 
4006 		if (enable)
4007 			active_non_psr_pipes |= BIT(crtc->pipe);
4008 		else
4009 			active_non_psr_pipes &= ~BIT(crtc->pipe);
4010 
4011 		if (active_non_psr_pipes == intel_dp->psr.active_non_psr_pipes)
4012 			goto unlock;
4013 
4014 		if ((enable && intel_dp->psr.active_non_psr_pipes) ||
4015 		    (!enable && !intel_dp->psr.active_non_psr_pipes) ||
4016 		    !intel_dp->psr.pkg_c_latency_used) {
4017 			intel_dp->psr.active_non_psr_pipes = active_non_psr_pipes;
4018 			goto unlock;
4019 		}
4020 
4021 		intel_dp->psr.active_non_psr_pipes = active_non_psr_pipes;
4022 
4023 		intel_psr_apply_underrun_on_idle_wa_locked(intel_dp);
4024 unlock:
4025 		mutex_unlock(&intel_dp->psr.lock);
4026 	}
4027 }
4028 
4029 /**
4030  * intel_psr_notify_vblank_enable_disable - Notify PSR about enable/disable of vblank
4031  * @display: intel display struct
4032  * @enable: enable/disable
4033  *
4034  * This is targeted for underrun on idle PSR HW bug (Wa_16025596647) to apply
4035  * remove the workaround when vblank is getting enabled/disabled
4036  */
4037 void intel_psr_notify_vblank_enable_disable(struct intel_display *display,
4038 					    bool enable)
4039 {
4040 	struct intel_encoder *encoder;
4041 
4042 	for_each_intel_encoder_with_psr(display->drm, encoder) {
4043 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4044 
4045 		mutex_lock(&intel_dp->psr.lock);
4046 		if (intel_dp->psr.panel_replay_enabled) {
4047 			mutex_unlock(&intel_dp->psr.lock);
4048 			break;
4049 		}
4050 
4051 		if (intel_dp->psr.enabled && intel_dp->psr.pkg_c_latency_used)
4052 			intel_psr_apply_underrun_on_idle_wa_locked(intel_dp);
4053 
4054 		mutex_unlock(&intel_dp->psr.lock);
4055 		return;
4056 	}
4057 
4058 	/*
4059 	 * NOTE: intel_display_power_set_target_dc_state is used
4060 	 * only by PSR * code for DC3CO handling. DC3CO target
4061 	 * state is currently disabled in * PSR code. If DC3CO
4062 	 * is taken into use we need take that into account here
4063 	 * as well.
4064 	 */
4065 	intel_display_power_set_target_dc_state(display, enable ? DC_STATE_DISABLE :
4066 						DC_STATE_EN_UPTO_DC6);
4067 }
4068 
4069 static void
4070 psr_source_status(struct intel_dp *intel_dp, struct seq_file *m)
4071 {
4072 	struct intel_display *display = to_intel_display(intel_dp);
4073 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
4074 	const char *status = "unknown";
4075 	u32 val, status_val;
4076 
4077 	if ((intel_dp_is_edp(intel_dp) || DISPLAY_VER(display) >= 30) &&
4078 	    (intel_dp->psr.sel_update_enabled || intel_dp->psr.panel_replay_enabled)) {
4079 		static const char * const live_status[] = {
4080 			"IDLE",
4081 			"CAPTURE",
4082 			"CAPTURE_FS",
4083 			"SLEEP",
4084 			"BUFON_FW",
4085 			"ML_UP",
4086 			"SU_STANDBY",
4087 			"FAST_SLEEP",
4088 			"DEEP_SLEEP",
4089 			"BUF_ON",
4090 			"TG_ON"
4091 		};
4092 		val = intel_de_read(display,
4093 				    EDP_PSR2_STATUS(display, cpu_transcoder));
4094 		status_val = REG_FIELD_GET(EDP_PSR2_STATUS_STATE_MASK, val);
4095 		if (status_val < ARRAY_SIZE(live_status))
4096 			status = live_status[status_val];
4097 	} else {
4098 		static const char * const live_status[] = {
4099 			"IDLE",
4100 			"SRDONACK",
4101 			"SRDENT",
4102 			"BUFOFF",
4103 			"BUFON",
4104 			"AUXACK",
4105 			"SRDOFFACK",
4106 			"SRDENT_ON",
4107 		};
4108 		val = intel_de_read(display,
4109 				    psr_status_reg(display, cpu_transcoder));
4110 		status_val = REG_FIELD_GET(EDP_PSR_STATUS_STATE_MASK, val);
4111 		if (status_val < ARRAY_SIZE(live_status))
4112 			status = live_status[status_val];
4113 	}
4114 
4115 	seq_printf(m, "Source PSR/PanelReplay status: %s [0x%08x]\n", status, val);
4116 }
4117 
4118 static void intel_psr_sink_capability(struct intel_dp *intel_dp,
4119 				      struct seq_file *m)
4120 {
4121 	struct intel_psr *psr = &intel_dp->psr;
4122 
4123 	seq_printf(m, "Sink support: PSR = %s",
4124 		   str_yes_no(psr->sink_support));
4125 
4126 	if (psr->sink_support)
4127 		seq_printf(m, " [0x%02x]", intel_dp->psr_dpcd[0]);
4128 	if (intel_dp->psr_dpcd[0] == DP_PSR2_WITH_Y_COORD_ET_SUPPORTED)
4129 		seq_printf(m, " (Early Transport)");
4130 	seq_printf(m, ", Panel Replay = %s", str_yes_no(psr->sink_panel_replay_support));
4131 	seq_printf(m, ", Panel Replay Selective Update = %s",
4132 		   str_yes_no(psr->sink_panel_replay_su_support));
4133 	seq_printf(m, ", Panel Replay DSC support = %s",
4134 		   panel_replay_dsc_support_str(psr->sink_panel_replay_dsc_support));
4135 	if (intel_dp->pr_dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] &
4136 	    DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT)
4137 		seq_printf(m, " (Early Transport)");
4138 	seq_printf(m, "\n");
4139 }
4140 
4141 static void intel_psr_print_mode(struct intel_dp *intel_dp,
4142 				 struct seq_file *m)
4143 {
4144 	struct intel_psr *psr = &intel_dp->psr;
4145 	const char *status, *mode, *region_et;
4146 
4147 	if (psr->enabled)
4148 		status = " enabled";
4149 	else
4150 		status = "disabled";
4151 
4152 	if (psr->panel_replay_enabled && psr->sel_update_enabled)
4153 		mode = "Panel Replay Selective Update";
4154 	else if (psr->panel_replay_enabled)
4155 		mode = "Panel Replay";
4156 	else if (psr->sel_update_enabled)
4157 		mode = "PSR2";
4158 	else if (psr->enabled)
4159 		mode = "PSR1";
4160 	else
4161 		mode = "";
4162 
4163 	if (psr->su_region_et_enabled)
4164 		region_et = " (Early Transport)";
4165 	else
4166 		region_et = "";
4167 
4168 	seq_printf(m, "PSR mode: %s%s%s\n", mode, status, region_et);
4169 	if (psr->no_psr_reason)
4170 		seq_printf(m, "  %s\n", psr->no_psr_reason);
4171 }
4172 
4173 static int intel_psr_status(struct seq_file *m, struct intel_dp *intel_dp)
4174 {
4175 	struct intel_display *display = to_intel_display(intel_dp);
4176 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
4177 	struct intel_psr *psr = &intel_dp->psr;
4178 	struct ref_tracker *wakeref;
4179 	bool enabled;
4180 	u32 val, psr2_ctl;
4181 
4182 	intel_psr_sink_capability(intel_dp, m);
4183 
4184 	if (!(psr->sink_support || psr->sink_panel_replay_support))
4185 		return 0;
4186 
4187 	wakeref = intel_display_rpm_get(display);
4188 	mutex_lock(&psr->lock);
4189 
4190 	intel_psr_print_mode(intel_dp, m);
4191 
4192 	if (!psr->enabled) {
4193 		seq_printf(m, "PSR sink not reliable: %s\n",
4194 			   str_yes_no(psr->sink_not_reliable));
4195 
4196 		goto unlock;
4197 	}
4198 
4199 	if (psr->panel_replay_enabled) {
4200 		val = intel_de_read(display, TRANS_DP2_CTL(cpu_transcoder));
4201 
4202 		if (intel_dp_is_edp(intel_dp))
4203 			psr2_ctl = intel_de_read(display,
4204 						 EDP_PSR2_CTL(display,
4205 							      cpu_transcoder));
4206 
4207 		enabled = val & TRANS_DP2_PANEL_REPLAY_ENABLE;
4208 	} else if (psr->sel_update_enabled) {
4209 		val = intel_de_read(display,
4210 				    EDP_PSR2_CTL(display, cpu_transcoder));
4211 		enabled = val & EDP_PSR2_ENABLE;
4212 	} else {
4213 		val = intel_de_read(display, psr_ctl_reg(display, cpu_transcoder));
4214 		enabled = val & EDP_PSR_ENABLE;
4215 	}
4216 	seq_printf(m, "Source PSR/PanelReplay ctl: %s [0x%08x]\n",
4217 		   str_enabled_disabled(enabled), val);
4218 	if (psr->panel_replay_enabled && intel_dp_is_edp(intel_dp))
4219 		seq_printf(m, "PSR2_CTL: 0x%08x\n",
4220 			   psr2_ctl);
4221 	psr_source_status(intel_dp, m);
4222 	seq_printf(m, "Busy frontbuffer bits: 0x%08x\n",
4223 		   psr->busy_frontbuffer_bits);
4224 
4225 	/*
4226 	 * SKL+ Perf counter is reset to 0 everytime DC state is entered
4227 	 */
4228 	val = intel_de_read(display, psr_perf_cnt_reg(display, cpu_transcoder));
4229 	seq_printf(m, "Performance counter: %u\n",
4230 		   REG_FIELD_GET(EDP_PSR_PERF_CNT_MASK, val));
4231 
4232 	if (psr->debug & I915_PSR_DEBUG_IRQ) {
4233 		seq_printf(m, "Last attempted entry at: %lld\n",
4234 			   psr->last_entry_attempt);
4235 		seq_printf(m, "Last exit at: %lld\n", psr->last_exit);
4236 	}
4237 
4238 	if (psr->sel_update_enabled) {
4239 		u32 su_frames_val[3];
4240 		int frame;
4241 
4242 		/*
4243 		 * PSR2_SU_STATUS register has been tied-off since DG2/ADL-P
4244 		 * (it returns zeros only) and it has been removed on Xe2_LPD.
4245 		 */
4246 		if (DISPLAY_VER(display) < 13) {
4247 			/*
4248 			 * Reading all 3 registers before hand to minimize crossing a
4249 			 * frame boundary between register reads
4250 			 */
4251 			for (frame = 0; frame < PSR2_SU_STATUS_FRAMES; frame += 3) {
4252 				val = intel_de_read(display,
4253 						    PSR2_SU_STATUS(display, cpu_transcoder, frame));
4254 				su_frames_val[frame / 3] = val;
4255 			}
4256 
4257 			seq_puts(m, "Frame:\tPSR2 SU blocks:\n");
4258 
4259 			for (frame = 0; frame < PSR2_SU_STATUS_FRAMES; frame++) {
4260 				u32 su_blocks;
4261 
4262 				su_blocks = su_frames_val[frame / 3] &
4263 					PSR2_SU_STATUS_MASK(frame);
4264 				su_blocks = su_blocks >> PSR2_SU_STATUS_SHIFT(frame);
4265 				seq_printf(m, "%d\t%d\n", frame, su_blocks);
4266 			}
4267 		}
4268 
4269 		seq_printf(m, "PSR2 selective fetch: %s\n",
4270 			   str_enabled_disabled(psr->psr2_sel_fetch_enabled));
4271 	}
4272 
4273 unlock:
4274 	mutex_unlock(&psr->lock);
4275 	intel_display_rpm_put(display, wakeref);
4276 
4277 	return 0;
4278 }
4279 
4280 static int i915_edp_psr_status_show(struct seq_file *m, void *data)
4281 {
4282 	struct intel_display *display = m->private;
4283 	struct intel_dp *intel_dp = NULL;
4284 	struct intel_encoder *encoder;
4285 
4286 	if (!HAS_PSR(display))
4287 		return -ENODEV;
4288 
4289 	/* Find the first EDP which supports PSR */
4290 	for_each_intel_encoder_with_psr(display->drm, encoder) {
4291 		intel_dp = enc_to_intel_dp(encoder);
4292 		break;
4293 	}
4294 
4295 	if (!intel_dp)
4296 		return -ENODEV;
4297 
4298 	return intel_psr_status(m, intel_dp);
4299 }
4300 DEFINE_SHOW_ATTRIBUTE(i915_edp_psr_status);
4301 
4302 static int
4303 i915_edp_psr_debug_set(void *data, u64 val)
4304 {
4305 	struct intel_display *display = data;
4306 	struct intel_encoder *encoder;
4307 	int ret = -ENODEV;
4308 
4309 	if (!HAS_PSR(display))
4310 		return ret;
4311 
4312 	for_each_intel_encoder_with_psr(display->drm, encoder) {
4313 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4314 
4315 		drm_dbg_kms(display->drm, "Setting PSR debug to %llx\n", val);
4316 
4317 		// TODO: split to each transcoder's PSR debug state
4318 		with_intel_display_rpm(display)
4319 			ret = intel_psr_debug_set(intel_dp, val);
4320 	}
4321 
4322 	return ret;
4323 }
4324 
4325 static int
4326 i915_edp_psr_debug_get(void *data, u64 *val)
4327 {
4328 	struct intel_display *display = data;
4329 	struct intel_encoder *encoder;
4330 
4331 	if (!HAS_PSR(display))
4332 		return -ENODEV;
4333 
4334 	for_each_intel_encoder_with_psr(display->drm, encoder) {
4335 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4336 
4337 		// TODO: split to each transcoder's PSR debug state
4338 		*val = READ_ONCE(intel_dp->psr.debug);
4339 		return 0;
4340 	}
4341 
4342 	return -ENODEV;
4343 }
4344 
4345 DEFINE_SIMPLE_ATTRIBUTE(i915_edp_psr_debug_fops,
4346 			i915_edp_psr_debug_get, i915_edp_psr_debug_set,
4347 			"%llu\n");
4348 
4349 void intel_psr_debugfs_register(struct intel_display *display)
4350 {
4351 	struct dentry *debugfs_root = display->drm->debugfs_root;
4352 
4353 	debugfs_create_file("i915_edp_psr_debug", 0644, debugfs_root,
4354 			    display, &i915_edp_psr_debug_fops);
4355 
4356 	debugfs_create_file("i915_edp_psr_status", 0444, debugfs_root,
4357 			    display, &i915_edp_psr_status_fops);
4358 }
4359 
4360 static const char *psr_mode_str(struct intel_dp *intel_dp)
4361 {
4362 	if (intel_dp->psr.panel_replay_enabled)
4363 		return "PANEL-REPLAY";
4364 	else if (intel_dp->psr.enabled)
4365 		return "PSR";
4366 
4367 	return "unknown";
4368 }
4369 
4370 static int i915_psr_sink_status_show(struct seq_file *m, void *data)
4371 {
4372 	struct intel_connector *connector = m->private;
4373 	struct intel_dp *intel_dp = intel_attached_dp(connector);
4374 	static const char * const sink_status[] = {
4375 		"inactive",
4376 		"transition to active, capture and display",
4377 		"active, display from RFB",
4378 		"active, capture and display on sink device timings",
4379 		"transition to inactive, capture and display, timing re-sync",
4380 		"reserved",
4381 		"reserved",
4382 		"sink internal error",
4383 	};
4384 	const char *str;
4385 	int ret;
4386 	u8 status, error_status;
4387 
4388 	if (!(CAN_PSR(intel_dp) || CAN_PANEL_REPLAY(intel_dp))) {
4389 		seq_puts(m, "PSR/Panel-Replay Unsupported\n");
4390 		return -ENODEV;
4391 	}
4392 
4393 	if (connector->base.status != connector_status_connected)
4394 		return -ENODEV;
4395 
4396 	ret = psr_get_status_and_error_status(intel_dp, &status, &error_status);
4397 	if (ret)
4398 		return ret;
4399 
4400 	status &= DP_PSR_SINK_STATE_MASK;
4401 	if (status < ARRAY_SIZE(sink_status))
4402 		str = sink_status[status];
4403 	else
4404 		str = "unknown";
4405 
4406 	seq_printf(m, "Sink %s status: 0x%x [%s]\n", psr_mode_str(intel_dp), status, str);
4407 
4408 	seq_printf(m, "Sink %s error status: 0x%x", psr_mode_str(intel_dp), error_status);
4409 
4410 	if (error_status & (DP_PSR_RFB_STORAGE_ERROR |
4411 			    DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR |
4412 			    DP_PSR_LINK_CRC_ERROR))
4413 		seq_puts(m, ":\n");
4414 	else
4415 		seq_puts(m, "\n");
4416 	if (error_status & DP_PSR_RFB_STORAGE_ERROR)
4417 		seq_printf(m, "\t%s RFB storage error\n", psr_mode_str(intel_dp));
4418 	if (error_status & DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR)
4419 		seq_printf(m, "\t%s VSC SDP uncorrectable error\n", psr_mode_str(intel_dp));
4420 	if (error_status & DP_PSR_LINK_CRC_ERROR)
4421 		seq_printf(m, "\t%s Link CRC error\n", psr_mode_str(intel_dp));
4422 
4423 	return ret;
4424 }
4425 DEFINE_SHOW_ATTRIBUTE(i915_psr_sink_status);
4426 
4427 static int i915_psr_status_show(struct seq_file *m, void *data)
4428 {
4429 	struct intel_connector *connector = m->private;
4430 	struct intel_dp *intel_dp = intel_attached_dp(connector);
4431 
4432 	return intel_psr_status(m, intel_dp);
4433 }
4434 DEFINE_SHOW_ATTRIBUTE(i915_psr_status);
4435 
4436 void intel_psr_connector_debugfs_add(struct intel_connector *connector)
4437 {
4438 	struct intel_display *display = to_intel_display(connector);
4439 	struct dentry *root = connector->base.debugfs_entry;
4440 
4441 	if (connector->base.connector_type != DRM_MODE_CONNECTOR_eDP &&
4442 	    connector->base.connector_type != DRM_MODE_CONNECTOR_DisplayPort)
4443 		return;
4444 
4445 	debugfs_create_file("i915_psr_sink_status", 0444, root,
4446 			    connector, &i915_psr_sink_status_fops);
4447 
4448 	if (HAS_PSR(display) || HAS_DP20(display))
4449 		debugfs_create_file("i915_psr_status", 0444, root,
4450 				    connector, &i915_psr_status_fops);
4451 }
4452 
4453 bool intel_psr_needs_alpm(struct intel_dp *intel_dp, const struct intel_crtc_state *crtc_state)
4454 {
4455 	/*
4456 	 * eDP Panel Replay uses always ALPM
4457 	 * PSR2 uses ALPM but PSR1 doesn't
4458 	 */
4459 	return intel_dp_is_edp(intel_dp) && (crtc_state->has_sel_update ||
4460 					     crtc_state->has_panel_replay);
4461 }
4462 
4463 bool intel_psr_needs_alpm_aux_less(struct intel_dp *intel_dp,
4464 				   const struct intel_crtc_state *crtc_state)
4465 {
4466 	return intel_dp_is_edp(intel_dp) && crtc_state->has_panel_replay;
4467 }
4468 
4469 void intel_psr_compute_config_late(struct intel_dp *intel_dp,
4470 				   struct intel_crtc_state *crtc_state)
4471 {
4472 	struct intel_display *display = to_intel_display(intel_dp);
4473 	int vblank = intel_crtc_vblank_length(crtc_state);
4474 	int wake_lines;
4475 
4476 	if (intel_psr_needs_alpm_aux_less(intel_dp, crtc_state))
4477 		wake_lines = crtc_state->alpm_state.aux_less_wake_lines;
4478 	else if (intel_psr_needs_alpm(intel_dp, crtc_state))
4479 		wake_lines = DISPLAY_VER(display) < 20 ?
4480 			     psr2_block_count_lines(crtc_state->alpm_state.io_wake_lines,
4481 						    crtc_state->alpm_state.fast_wake_lines) :
4482 			     crtc_state->alpm_state.io_wake_lines;
4483 	else
4484 		wake_lines = 0;
4485 
4486 	/*
4487 	 * Disable the PSR features if wake lines exceed the available vblank.
4488 	 * Though SCL is computed based on these PSR features, it is not reset
4489 	 * even if the PSR features are disabled to avoid changing vblank start
4490 	 * at this stage.
4491 	 */
4492 	if (wake_lines && !_wake_lines_fit_into_vblank(crtc_state, vblank, wake_lines)) {
4493 		drm_dbg_kms(display->drm,
4494 			    "Adjusting PSR/PR mode: vblank too short for wake lines = %d\n",
4495 			    wake_lines);
4496 
4497 		if (crtc_state->has_panel_replay) {
4498 			crtc_state->has_panel_replay = false;
4499 			/*
4500 			 * #TODO : Add fall back to PSR/PSR2
4501 			 * Since panel replay cannot be supported, we can fall back to PSR/PSR2.
4502 			 * This will require calling compute_config for psr and psr2 with check for
4503 			 * actual guardband instead of vblank_length.
4504 			 */
4505 			crtc_state->has_psr = false;
4506 		}
4507 
4508 		crtc_state->has_sel_update = false;
4509 		crtc_state->enable_psr2_su_region_et = false;
4510 		crtc_state->enable_psr2_sel_fetch = false;
4511 	}
4512 
4513 	/* Wa_18037818876 */
4514 	if (intel_psr_needs_wa_18037818876(intel_dp, crtc_state)) {
4515 		crtc_state->has_psr = false;
4516 		drm_dbg_kms(display->drm,
4517 			    "PSR disabled to workaround PSR FSM hang issue\n");
4518 	}
4519 
4520 	intel_psr_set_non_psr_pipes(intel_dp, crtc_state);
4521 }
4522 
4523 int intel_psr_min_guardband(struct intel_crtc_state *crtc_state)
4524 {
4525 	struct intel_display *display = to_intel_display(crtc_state);
4526 	int psr_min_guardband;
4527 	int wake_lines;
4528 
4529 	if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
4530 		return 0;
4531 
4532 	if (crtc_state->has_panel_replay)
4533 		wake_lines = crtc_state->alpm_state.aux_less_wake_lines;
4534 	else if (crtc_state->has_sel_update)
4535 		wake_lines = DISPLAY_VER(display) < 20 ?
4536 			     psr2_block_count_lines(crtc_state->alpm_state.io_wake_lines,
4537 						    crtc_state->alpm_state.fast_wake_lines) :
4538 			     crtc_state->alpm_state.io_wake_lines;
4539 	else
4540 		return 0;
4541 
4542 	psr_min_guardband = wake_lines + crtc_state->set_context_latency;
4543 
4544 	if (crtc_state->req_psr2_sdp_prior_scanline)
4545 		psr_min_guardband++;
4546 
4547 	return psr_min_guardband;
4548 }
4549