xref: /linux/drivers/gpu/drm/i915/display/intel_psr.c (revision a4871e6201c46c8e1d04308265b4b4c5753c8209)
1 /*
2  * Copyright © 2014 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21  * DEALINGS IN THE SOFTWARE.
22  */
23 
24 #include <linux/debugfs.h>
25 
26 #include <drm/drm_atomic_helper.h>
27 #include <drm/drm_damage_helper.h>
28 #include <drm/drm_debugfs.h>
29 
30 #include "i915_drv.h"
31 #include "i915_reg.h"
32 #include "intel_alpm.h"
33 #include "intel_atomic.h"
34 #include "intel_crtc.h"
35 #include "intel_cursor_regs.h"
36 #include "intel_ddi.h"
37 #include "intel_de.h"
38 #include "intel_display_irq.h"
39 #include "intel_display_rpm.h"
40 #include "intel_display_types.h"
41 #include "intel_dp.h"
42 #include "intel_dp_aux.h"
43 #include "intel_frontbuffer.h"
44 #include "intel_hdmi.h"
45 #include "intel_psr.h"
46 #include "intel_psr_regs.h"
47 #include "intel_snps_phy.h"
48 #include "intel_vblank.h"
49 #include "skl_universal_plane.h"
50 
51 /**
52  * DOC: Panel Self Refresh (PSR/SRD)
53  *
54  * Since Haswell Display controller supports Panel Self-Refresh on display
55  * panels witch have a remote frame buffer (RFB) implemented according to PSR
56  * spec in eDP1.3. PSR feature allows the display to go to lower standby states
57  * when system is idle but display is on as it eliminates display refresh
58  * request to DDR memory completely as long as the frame buffer for that
59  * display is unchanged.
60  *
61  * Panel Self Refresh must be supported by both Hardware (source) and
62  * Panel (sink).
63  *
64  * PSR saves power by caching the framebuffer in the panel RFB, which allows us
65  * to power down the link and memory controller. For DSI panels the same idea
66  * is called "manual mode".
67  *
68  * The implementation uses the hardware-based PSR support which automatically
69  * enters/exits self-refresh mode. The hardware takes care of sending the
70  * required DP aux message and could even retrain the link (that part isn't
71  * enabled yet though). The hardware also keeps track of any frontbuffer
72  * changes to know when to exit self-refresh mode again. Unfortunately that
73  * part doesn't work too well, hence why the i915 PSR support uses the
74  * software frontbuffer tracking to make sure it doesn't miss a screen
75  * update. For this integration intel_psr_invalidate() and intel_psr_flush()
76  * get called by the frontbuffer tracking code. Note that because of locking
77  * issues the self-refresh re-enable code is done from a work queue, which
78  * must be correctly synchronized/cancelled when shutting down the pipe."
79  *
80  * DC3CO (DC3 clock off)
81  *
82  * On top of PSR2, GEN12 adds a intermediate power savings state that turns
83  * clock off automatically during PSR2 idle state.
84  * The smaller overhead of DC3co entry/exit vs. the overhead of PSR2 deep sleep
85  * entry/exit allows the HW to enter a low-power state even when page flipping
86  * periodically (for instance a 30fps video playback scenario).
87  *
88  * Every time a flips occurs PSR2 will get out of deep sleep state(if it was),
89  * so DC3CO is enabled and tgl_dc3co_disable_work is schedule to run after 6
90  * frames, if no other flip occurs and the function above is executed, DC3CO is
91  * disabled and PSR2 is configured to enter deep sleep, resetting again in case
92  * of another flip.
93  * Front buffer modifications do not trigger DC3CO activation on purpose as it
94  * would bring a lot of complexity and most of the moderns systems will only
95  * use page flips.
96  */
97 
98 /*
99  * Description of PSR mask bits:
100  *
101  * EDP_PSR_DEBUG[16]/EDP_PSR_DEBUG_MASK_DISP_REG_WRITE (hsw-skl):
102  *
103  *  When unmasked (nearly) all display register writes (eg. even
104  *  SWF) trigger a PSR exit. Some registers are excluded from this
105  *  and they have a more specific mask (described below). On icl+
106  *  this bit no longer exists and is effectively always set.
107  *
108  * PIPE_MISC[21]/PIPE_MISC_PSR_MASK_PIPE_REG_WRITE (skl+):
109  *
110  *  When unmasked (nearly) all pipe/plane register writes
111  *  trigger a PSR exit. Some plane registers are excluded from this
112  *  and they have a more specific mask (described below).
113  *
114  * CHICKEN_PIPESL_1[11]/SKL_PSR_MASK_PLANE_FLIP (skl+):
115  * PIPE_MISC[23]/PIPE_MISC_PSR_MASK_PRIMARY_FLIP (bdw):
116  * EDP_PSR_DEBUG[23]/EDP_PSR_DEBUG_MASK_PRIMARY_FLIP (hsw):
117  *
118  *  When unmasked PRI_SURF/PLANE_SURF writes trigger a PSR exit.
119  *  SPR_SURF/CURBASE are not included in this and instead are
120  *  controlled by PIPE_MISC_PSR_MASK_PIPE_REG_WRITE (skl+) or
121  *  EDP_PSR_DEBUG_MASK_DISP_REG_WRITE (hsw/bdw).
122  *
123  * PIPE_MISC[22]/PIPE_MISC_PSR_MASK_SPRITE_ENABLE (bdw):
124  * EDP_PSR_DEBUG[21]/EDP_PSR_DEBUG_MASK_SPRITE_ENABLE (hsw):
125  *
126  *  When unmasked PSR is blocked as long as the sprite
127  *  plane is enabled. skl+ with their universal planes no
128  *  longer have a mask bit like this, and no plane being
129  *  enabledb blocks PSR.
130  *
131  * PIPE_MISC[21]/PIPE_MISC_PSR_MASK_CURSOR_MOVE (bdw):
132  * EDP_PSR_DEBUG[20]/EDP_PSR_DEBUG_MASK_CURSOR_MOVE (hsw):
133  *
134  *  When umasked CURPOS writes trigger a PSR exit. On skl+
135  *  this doesn't exit but CURPOS is included in the
136  *  PIPE_MISC_PSR_MASK_PIPE_REG_WRITE mask.
137  *
138  * PIPE_MISC[20]/PIPE_MISC_PSR_MASK_VBLANK_VSYNC_INT (bdw+):
139  * EDP_PSR_DEBUG[19]/EDP_PSR_DEBUG_MASK_VBLANK_VSYNC_INT (hsw):
140  *
141  *  When unmasked PSR is blocked as long as vblank and/or vsync
142  *  interrupt is unmasked in IMR *and* enabled in IER.
143  *
144  * CHICKEN_TRANS[30]/SKL_UNMASK_VBL_TO_PIPE_IN_SRD (skl+):
145  * CHICKEN_PAR1_1[15]/HSW_MASK_VBL_TO_PIPE_IN_SRD (hsw/bdw):
146  *
147  *  Selectcs whether PSR exit generates an extra vblank before
148  *  the first frame is transmitted. Also note the opposite polarity
149  *  if the bit on hsw/bdw vs. skl+ (masked==generate the extra vblank,
150  *  unmasked==do not generate the extra vblank).
151  *
152  *  With DC states enabled the extra vblank happens after link training,
153  *  with DC states disabled it happens immediately upuon PSR exit trigger.
154  *  No idea as of now why there is a difference. HSW/BDW (which don't
155  *  even have DMC) always generate it after link training. Go figure.
156  *
157  *  Unfortunately CHICKEN_TRANS itself seems to be double buffered
158  *  and thus won't latch until the first vblank. So with DC states
159  *  enabled the register effectively uses the reset value during DC5
160  *  exit+PSR exit sequence, and thus the bit does nothing until
161  *  latched by the vblank that it was trying to prevent from being
162  *  generated in the first place. So we should probably call this
163  *  one a chicken/egg bit instead on skl+.
164  *
165  *  In standby mode (as opposed to link-off) this makes no difference
166  *  as the timing generator keeps running the whole time generating
167  *  normal periodic vblanks.
168  *
169  *  WaPsrDPAMaskVBlankInSRD asks us to set the bit on hsw/bdw,
170  *  and doing so makes the behaviour match the skl+ reset value.
171  *
172  * CHICKEN_PIPESL_1[0]/BDW_UNMASK_VBL_TO_REGS_IN_SRD (bdw):
173  * CHICKEN_PIPESL_1[15]/HSW_UNMASK_VBL_TO_REGS_IN_SRD (hsw):
174  *
175  *  On BDW without this bit is no vblanks whatsoever are
176  *  generated after PSR exit. On HSW this has no apparent effect.
177  *  WaPsrDPRSUnmaskVBlankInSRD says to set this.
178  *
179  * The rest of the bits are more self-explanatory and/or
180  * irrelevant for normal operation.
181  *
182  * Description of intel_crtc_state variables. has_psr, has_panel_replay and
183  * has_sel_update:
184  *
185  *  has_psr (alone):					PSR1
186  *  has_psr + has_sel_update:				PSR2
187  *  has_psr + has_panel_replay:				Panel Replay
188  *  has_psr + has_panel_replay + has_sel_update:	Panel Replay Selective Update
189  *
190  * Description of some intel_psr variables. enabled, panel_replay_enabled,
191  * sel_update_enabled
192  *
193  *  enabled (alone):						PSR1
194  *  enabled + sel_update_enabled:				PSR2
195  *  enabled + panel_replay_enabled:				Panel Replay
196  *  enabled + panel_replay_enabled + sel_update_enabled:	Panel Replay SU
197  */
198 
199 #define CAN_PSR(intel_dp) ((intel_dp)->psr.sink_support && \
200 			   (intel_dp)->psr.source_support)
201 
202 bool intel_encoder_can_psr(struct intel_encoder *encoder)
203 {
204 	if (intel_encoder_is_dp(encoder) || encoder->type == INTEL_OUTPUT_DP_MST)
205 		return CAN_PSR(enc_to_intel_dp(encoder)) ||
206 		       CAN_PANEL_REPLAY(enc_to_intel_dp(encoder));
207 	else
208 		return false;
209 }
210 
211 bool intel_psr_needs_aux_io_power(struct intel_encoder *encoder,
212 				  const struct intel_crtc_state *crtc_state)
213 {
214 	/*
215 	 * For PSR/PR modes only eDP requires the AUX IO power to be enabled whenever
216 	 * the output is enabled. For non-eDP outputs the main link is always
217 	 * on, hence it doesn't require the HW initiated AUX wake-up signaling used
218 	 * for eDP.
219 	 *
220 	 * TODO:
221 	 * - Consider leaving AUX IO disabled for eDP / PR as well, in case
222 	 *   the ALPM with main-link off mode is not enabled.
223 	 * - Leave AUX IO enabled for DP / PR, once support for ALPM with
224 	 *   main-link off mode is added for it and this mode gets enabled.
225 	 */
226 	return intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP) &&
227 	       intel_encoder_can_psr(encoder);
228 }
229 
230 static bool psr_global_enabled(struct intel_dp *intel_dp)
231 {
232 	struct intel_display *display = to_intel_display(intel_dp);
233 	struct intel_connector *connector = intel_dp->attached_connector;
234 
235 	switch (intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK) {
236 	case I915_PSR_DEBUG_DEFAULT:
237 		if (display->params.enable_psr == -1)
238 			return intel_dp_is_edp(intel_dp) ?
239 				connector->panel.vbt.psr.enable :
240 				true;
241 		return display->params.enable_psr;
242 	case I915_PSR_DEBUG_DISABLE:
243 		return false;
244 	default:
245 		return true;
246 	}
247 }
248 
249 static bool psr2_global_enabled(struct intel_dp *intel_dp)
250 {
251 	struct intel_display *display = to_intel_display(intel_dp);
252 
253 	switch (intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK) {
254 	case I915_PSR_DEBUG_DISABLE:
255 	case I915_PSR_DEBUG_FORCE_PSR1:
256 		return false;
257 	default:
258 		if (display->params.enable_psr == 1)
259 			return false;
260 		return true;
261 	}
262 }
263 
264 static bool psr2_su_region_et_global_enabled(struct intel_dp *intel_dp)
265 {
266 	struct intel_display *display = to_intel_display(intel_dp);
267 
268 	if (display->params.enable_psr != -1)
269 		return false;
270 
271 	return true;
272 }
273 
274 static bool panel_replay_global_enabled(struct intel_dp *intel_dp)
275 {
276 	struct intel_display *display = to_intel_display(intel_dp);
277 
278 	if ((display->params.enable_psr != -1) ||
279 	    (intel_dp->psr.debug & I915_PSR_DEBUG_PANEL_REPLAY_DISABLE))
280 		return false;
281 	return true;
282 }
283 
284 static u32 psr_irq_psr_error_bit_get(struct intel_dp *intel_dp)
285 {
286 	struct intel_display *display = to_intel_display(intel_dp);
287 
288 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_ERROR :
289 		EDP_PSR_ERROR(intel_dp->psr.transcoder);
290 }
291 
292 static u32 psr_irq_post_exit_bit_get(struct intel_dp *intel_dp)
293 {
294 	struct intel_display *display = to_intel_display(intel_dp);
295 
296 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_POST_EXIT :
297 		EDP_PSR_POST_EXIT(intel_dp->psr.transcoder);
298 }
299 
300 static u32 psr_irq_pre_entry_bit_get(struct intel_dp *intel_dp)
301 {
302 	struct intel_display *display = to_intel_display(intel_dp);
303 
304 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_PRE_ENTRY :
305 		EDP_PSR_PRE_ENTRY(intel_dp->psr.transcoder);
306 }
307 
308 static u32 psr_irq_mask_get(struct intel_dp *intel_dp)
309 {
310 	struct intel_display *display = to_intel_display(intel_dp);
311 
312 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_MASK :
313 		EDP_PSR_MASK(intel_dp->psr.transcoder);
314 }
315 
316 static i915_reg_t psr_ctl_reg(struct intel_display *display,
317 			      enum transcoder cpu_transcoder)
318 {
319 	if (DISPLAY_VER(display) >= 8)
320 		return EDP_PSR_CTL(display, cpu_transcoder);
321 	else
322 		return HSW_SRD_CTL;
323 }
324 
325 static i915_reg_t psr_debug_reg(struct intel_display *display,
326 				enum transcoder cpu_transcoder)
327 {
328 	if (DISPLAY_VER(display) >= 8)
329 		return EDP_PSR_DEBUG(display, cpu_transcoder);
330 	else
331 		return HSW_SRD_DEBUG;
332 }
333 
334 static i915_reg_t psr_perf_cnt_reg(struct intel_display *display,
335 				   enum transcoder cpu_transcoder)
336 {
337 	if (DISPLAY_VER(display) >= 8)
338 		return EDP_PSR_PERF_CNT(display, cpu_transcoder);
339 	else
340 		return HSW_SRD_PERF_CNT;
341 }
342 
343 static i915_reg_t psr_status_reg(struct intel_display *display,
344 				 enum transcoder cpu_transcoder)
345 {
346 	if (DISPLAY_VER(display) >= 8)
347 		return EDP_PSR_STATUS(display, cpu_transcoder);
348 	else
349 		return HSW_SRD_STATUS;
350 }
351 
352 static i915_reg_t psr_imr_reg(struct intel_display *display,
353 			      enum transcoder cpu_transcoder)
354 {
355 	if (DISPLAY_VER(display) >= 12)
356 		return TRANS_PSR_IMR(display, cpu_transcoder);
357 	else
358 		return EDP_PSR_IMR;
359 }
360 
361 static i915_reg_t psr_iir_reg(struct intel_display *display,
362 			      enum transcoder cpu_transcoder)
363 {
364 	if (DISPLAY_VER(display) >= 12)
365 		return TRANS_PSR_IIR(display, cpu_transcoder);
366 	else
367 		return EDP_PSR_IIR;
368 }
369 
370 static i915_reg_t psr_aux_ctl_reg(struct intel_display *display,
371 				  enum transcoder cpu_transcoder)
372 {
373 	if (DISPLAY_VER(display) >= 8)
374 		return EDP_PSR_AUX_CTL(display, cpu_transcoder);
375 	else
376 		return HSW_SRD_AUX_CTL;
377 }
378 
379 static i915_reg_t psr_aux_data_reg(struct intel_display *display,
380 				   enum transcoder cpu_transcoder, int i)
381 {
382 	if (DISPLAY_VER(display) >= 8)
383 		return EDP_PSR_AUX_DATA(display, cpu_transcoder, i);
384 	else
385 		return HSW_SRD_AUX_DATA(i);
386 }
387 
388 static void psr_irq_control(struct intel_dp *intel_dp)
389 {
390 	struct intel_display *display = to_intel_display(intel_dp);
391 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
392 	u32 mask;
393 
394 	if (intel_dp->psr.panel_replay_enabled)
395 		return;
396 
397 	mask = psr_irq_psr_error_bit_get(intel_dp);
398 	if (intel_dp->psr.debug & I915_PSR_DEBUG_IRQ)
399 		mask |= psr_irq_post_exit_bit_get(intel_dp) |
400 			psr_irq_pre_entry_bit_get(intel_dp);
401 
402 	intel_de_rmw(display, psr_imr_reg(display, cpu_transcoder),
403 		     psr_irq_mask_get(intel_dp), ~mask);
404 }
405 
406 static void psr_event_print(struct intel_display *display,
407 			    u32 val, bool sel_update_enabled)
408 {
409 	drm_dbg_kms(display->drm, "PSR exit events: 0x%x\n", val);
410 	if (val & PSR_EVENT_PSR2_WD_TIMER_EXPIRE)
411 		drm_dbg_kms(display->drm, "\tPSR2 watchdog timer expired\n");
412 	if ((val & PSR_EVENT_PSR2_DISABLED) && sel_update_enabled)
413 		drm_dbg_kms(display->drm, "\tPSR2 disabled\n");
414 	if (val & PSR_EVENT_SU_DIRTY_FIFO_UNDERRUN)
415 		drm_dbg_kms(display->drm, "\tSU dirty FIFO underrun\n");
416 	if (val & PSR_EVENT_SU_CRC_FIFO_UNDERRUN)
417 		drm_dbg_kms(display->drm, "\tSU CRC FIFO underrun\n");
418 	if (val & PSR_EVENT_GRAPHICS_RESET)
419 		drm_dbg_kms(display->drm, "\tGraphics reset\n");
420 	if (val & PSR_EVENT_PCH_INTERRUPT)
421 		drm_dbg_kms(display->drm, "\tPCH interrupt\n");
422 	if (val & PSR_EVENT_MEMORY_UP)
423 		drm_dbg_kms(display->drm, "\tMemory up\n");
424 	if (val & PSR_EVENT_FRONT_BUFFER_MODIFY)
425 		drm_dbg_kms(display->drm, "\tFront buffer modification\n");
426 	if (val & PSR_EVENT_WD_TIMER_EXPIRE)
427 		drm_dbg_kms(display->drm, "\tPSR watchdog timer expired\n");
428 	if (val & PSR_EVENT_PIPE_REGISTERS_UPDATE)
429 		drm_dbg_kms(display->drm, "\tPIPE registers updated\n");
430 	if (val & PSR_EVENT_REGISTER_UPDATE)
431 		drm_dbg_kms(display->drm, "\tRegister updated\n");
432 	if (val & PSR_EVENT_HDCP_ENABLE)
433 		drm_dbg_kms(display->drm, "\tHDCP enabled\n");
434 	if (val & PSR_EVENT_KVMR_SESSION_ENABLE)
435 		drm_dbg_kms(display->drm, "\tKVMR session enabled\n");
436 	if (val & PSR_EVENT_VBI_ENABLE)
437 		drm_dbg_kms(display->drm, "\tVBI enabled\n");
438 	if (val & PSR_EVENT_LPSP_MODE_EXIT)
439 		drm_dbg_kms(display->drm, "\tLPSP mode exited\n");
440 	if ((val & PSR_EVENT_PSR_DISABLE) && !sel_update_enabled)
441 		drm_dbg_kms(display->drm, "\tPSR disabled\n");
442 }
443 
444 void intel_psr_irq_handler(struct intel_dp *intel_dp, u32 psr_iir)
445 {
446 	struct intel_display *display = to_intel_display(intel_dp);
447 	struct drm_i915_private *dev_priv = to_i915(display->drm);
448 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
449 	ktime_t time_ns =  ktime_get();
450 
451 	if (psr_iir & psr_irq_pre_entry_bit_get(intel_dp)) {
452 		intel_dp->psr.last_entry_attempt = time_ns;
453 		drm_dbg_kms(display->drm,
454 			    "[transcoder %s] PSR entry attempt in 2 vblanks\n",
455 			    transcoder_name(cpu_transcoder));
456 	}
457 
458 	if (psr_iir & psr_irq_post_exit_bit_get(intel_dp)) {
459 		intel_dp->psr.last_exit = time_ns;
460 		drm_dbg_kms(display->drm,
461 			    "[transcoder %s] PSR exit completed\n",
462 			    transcoder_name(cpu_transcoder));
463 
464 		if (DISPLAY_VER(display) >= 9) {
465 			u32 val;
466 
467 			val = intel_de_rmw(display,
468 					   PSR_EVENT(display, cpu_transcoder),
469 					   0, 0);
470 
471 			psr_event_print(display, val, intel_dp->psr.sel_update_enabled);
472 		}
473 	}
474 
475 	if (psr_iir & psr_irq_psr_error_bit_get(intel_dp)) {
476 		drm_warn(display->drm, "[transcoder %s] PSR aux error\n",
477 			 transcoder_name(cpu_transcoder));
478 
479 		intel_dp->psr.irq_aux_error = true;
480 
481 		/*
482 		 * If this interruption is not masked it will keep
483 		 * interrupting so fast that it prevents the scheduled
484 		 * work to run.
485 		 * Also after a PSR error, we don't want to arm PSR
486 		 * again so we don't care about unmask the interruption
487 		 * or unset irq_aux_error.
488 		 */
489 		intel_de_rmw(display, psr_imr_reg(display, cpu_transcoder),
490 			     0, psr_irq_psr_error_bit_get(intel_dp));
491 
492 		queue_work(dev_priv->unordered_wq, &intel_dp->psr.work);
493 	}
494 }
495 
496 static u8 intel_dp_get_sink_sync_latency(struct intel_dp *intel_dp)
497 {
498 	struct intel_display *display = to_intel_display(intel_dp);
499 	u8 val = 8; /* assume the worst if we can't read the value */
500 
501 	if (drm_dp_dpcd_readb(&intel_dp->aux,
502 			      DP_SYNCHRONIZATION_LATENCY_IN_SINK, &val) == 1)
503 		val &= DP_MAX_RESYNC_FRAME_COUNT_MASK;
504 	else
505 		drm_dbg_kms(display->drm,
506 			    "Unable to get sink synchronization latency, assuming 8 frames\n");
507 	return val;
508 }
509 
510 static u8 intel_dp_get_su_capability(struct intel_dp *intel_dp)
511 {
512 	u8 su_capability = 0;
513 
514 	if (intel_dp->psr.sink_panel_replay_su_support)
515 		drm_dp_dpcd_readb(&intel_dp->aux,
516 				  DP_PANEL_PANEL_REPLAY_CAPABILITY,
517 				  &su_capability);
518 	else
519 		su_capability = intel_dp->psr_dpcd[1];
520 
521 	return su_capability;
522 }
523 
524 static unsigned int
525 intel_dp_get_su_x_granularity_offset(struct intel_dp *intel_dp)
526 {
527 	return intel_dp->psr.sink_panel_replay_su_support ?
528 		DP_PANEL_PANEL_REPLAY_X_GRANULARITY :
529 		DP_PSR2_SU_X_GRANULARITY;
530 }
531 
532 static unsigned int
533 intel_dp_get_su_y_granularity_offset(struct intel_dp *intel_dp)
534 {
535 	return intel_dp->psr.sink_panel_replay_su_support ?
536 		DP_PANEL_PANEL_REPLAY_Y_GRANULARITY :
537 		DP_PSR2_SU_Y_GRANULARITY;
538 }
539 
540 /*
541  * Note: Bits related to granularity are same in panel replay and psr
542  * registers. Rely on PSR definitions on these "common" bits.
543  */
544 static void intel_dp_get_su_granularity(struct intel_dp *intel_dp)
545 {
546 	struct intel_display *display = to_intel_display(intel_dp);
547 	ssize_t r;
548 	u16 w;
549 	u8 y;
550 
551 	/*
552 	 * TODO: Do we need to take into account panel supporting both PSR and
553 	 * Panel replay?
554 	 */
555 
556 	/*
557 	 * If sink don't have specific granularity requirements set legacy
558 	 * ones.
559 	 */
560 	if (!(intel_dp_get_su_capability(intel_dp) &
561 	      DP_PSR2_SU_GRANULARITY_REQUIRED)) {
562 		/* As PSR2 HW sends full lines, we do not care about x granularity */
563 		w = 4;
564 		y = 4;
565 		goto exit;
566 	}
567 
568 	r = drm_dp_dpcd_read(&intel_dp->aux,
569 			     intel_dp_get_su_x_granularity_offset(intel_dp),
570 			     &w, 2);
571 	if (r != 2)
572 		drm_dbg_kms(display->drm,
573 			    "Unable to read selective update x granularity\n");
574 	/*
575 	 * Spec says that if the value read is 0 the default granularity should
576 	 * be used instead.
577 	 */
578 	if (r != 2 || w == 0)
579 		w = 4;
580 
581 	r = drm_dp_dpcd_read(&intel_dp->aux,
582 			     intel_dp_get_su_y_granularity_offset(intel_dp),
583 			     &y, 1);
584 	if (r != 1) {
585 		drm_dbg_kms(display->drm,
586 			    "Unable to read selective update y granularity\n");
587 		y = 4;
588 	}
589 	if (y == 0)
590 		y = 1;
591 
592 exit:
593 	intel_dp->psr.su_w_granularity = w;
594 	intel_dp->psr.su_y_granularity = y;
595 }
596 
597 static void _panel_replay_init_dpcd(struct intel_dp *intel_dp)
598 {
599 	struct intel_display *display = to_intel_display(intel_dp);
600 
601 	if (intel_dp_is_edp(intel_dp)) {
602 		if (!intel_alpm_aux_less_wake_supported(intel_dp)) {
603 			drm_dbg_kms(display->drm,
604 				    "Panel doesn't support AUX-less ALPM, eDP Panel Replay not possible\n");
605 			return;
606 		}
607 
608 		if (!(intel_dp->pr_dpcd & DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT)) {
609 			drm_dbg_kms(display->drm,
610 				    "Panel doesn't support early transport, eDP Panel Replay not possible\n");
611 			return;
612 		}
613 	}
614 
615 	intel_dp->psr.sink_panel_replay_support = true;
616 
617 	if (intel_dp->pr_dpcd & DP_PANEL_REPLAY_SU_SUPPORT)
618 		intel_dp->psr.sink_panel_replay_su_support = true;
619 
620 	drm_dbg_kms(display->drm,
621 		    "Panel replay %sis supported by panel\n",
622 		    intel_dp->psr.sink_panel_replay_su_support ?
623 		    "selective_update " : "");
624 }
625 
626 static void _psr_init_dpcd(struct intel_dp *intel_dp)
627 {
628 	struct intel_display *display = to_intel_display(intel_dp);
629 
630 	drm_dbg_kms(display->drm, "eDP panel supports PSR version %x\n",
631 		    intel_dp->psr_dpcd[0]);
632 
633 	if (drm_dp_has_quirk(&intel_dp->desc, DP_DPCD_QUIRK_NO_PSR)) {
634 		drm_dbg_kms(display->drm,
635 			    "PSR support not currently available for this panel\n");
636 		return;
637 	}
638 
639 	if (!(intel_dp->edp_dpcd[1] & DP_EDP_SET_POWER_CAP)) {
640 		drm_dbg_kms(display->drm,
641 			    "Panel lacks power state control, PSR cannot be enabled\n");
642 		return;
643 	}
644 
645 	intel_dp->psr.sink_support = true;
646 	intel_dp->psr.sink_sync_latency =
647 		intel_dp_get_sink_sync_latency(intel_dp);
648 
649 	if (DISPLAY_VER(display) >= 9 &&
650 	    intel_dp->psr_dpcd[0] >= DP_PSR2_WITH_Y_COORD_IS_SUPPORTED) {
651 		bool y_req = intel_dp->psr_dpcd[1] &
652 			     DP_PSR2_SU_Y_COORDINATE_REQUIRED;
653 
654 		/*
655 		 * All panels that supports PSR version 03h (PSR2 +
656 		 * Y-coordinate) can handle Y-coordinates in VSC but we are
657 		 * only sure that it is going to be used when required by the
658 		 * panel. This way panel is capable to do selective update
659 		 * without a aux frame sync.
660 		 *
661 		 * To support PSR version 02h and PSR version 03h without
662 		 * Y-coordinate requirement panels we would need to enable
663 		 * GTC first.
664 		 */
665 		intel_dp->psr.sink_psr2_support = y_req &&
666 			intel_alpm_aux_wake_supported(intel_dp);
667 		drm_dbg_kms(display->drm, "PSR2 %ssupported\n",
668 			    intel_dp->psr.sink_psr2_support ? "" : "not ");
669 	}
670 }
671 
672 void intel_psr_init_dpcd(struct intel_dp *intel_dp)
673 {
674 	drm_dp_dpcd_read(&intel_dp->aux, DP_PSR_SUPPORT, intel_dp->psr_dpcd,
675 			 sizeof(intel_dp->psr_dpcd));
676 	drm_dp_dpcd_readb(&intel_dp->aux, DP_PANEL_REPLAY_CAP,
677 			  &intel_dp->pr_dpcd);
678 
679 	if (intel_dp->pr_dpcd & DP_PANEL_REPLAY_SUPPORT)
680 		_panel_replay_init_dpcd(intel_dp);
681 
682 	if (intel_dp->psr_dpcd[0])
683 		_psr_init_dpcd(intel_dp);
684 
685 	if (intel_dp->psr.sink_psr2_support ||
686 	    intel_dp->psr.sink_panel_replay_su_support)
687 		intel_dp_get_su_granularity(intel_dp);
688 }
689 
690 static void hsw_psr_setup_aux(struct intel_dp *intel_dp)
691 {
692 	struct intel_display *display = to_intel_display(intel_dp);
693 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
694 	u32 aux_clock_divider, aux_ctl;
695 	/* write DP_SET_POWER=D0 */
696 	static const u8 aux_msg[] = {
697 		[0] = (DP_AUX_NATIVE_WRITE << 4) | ((DP_SET_POWER >> 16) & 0xf),
698 		[1] = (DP_SET_POWER >> 8) & 0xff,
699 		[2] = DP_SET_POWER & 0xff,
700 		[3] = 1 - 1,
701 		[4] = DP_SET_POWER_D0,
702 	};
703 	int i;
704 
705 	BUILD_BUG_ON(sizeof(aux_msg) > 20);
706 	for (i = 0; i < sizeof(aux_msg); i += 4)
707 		intel_de_write(display,
708 			       psr_aux_data_reg(display, cpu_transcoder, i >> 2),
709 			       intel_dp_aux_pack(&aux_msg[i], sizeof(aux_msg) - i));
710 
711 	aux_clock_divider = intel_dp->get_aux_clock_divider(intel_dp, 0);
712 
713 	/* Start with bits set for DDI_AUX_CTL register */
714 	aux_ctl = intel_dp->get_aux_send_ctl(intel_dp, sizeof(aux_msg),
715 					     aux_clock_divider);
716 
717 	/* Select only valid bits for SRD_AUX_CTL */
718 	aux_ctl &= EDP_PSR_AUX_CTL_TIME_OUT_MASK |
719 		EDP_PSR_AUX_CTL_MESSAGE_SIZE_MASK |
720 		EDP_PSR_AUX_CTL_PRECHARGE_2US_MASK |
721 		EDP_PSR_AUX_CTL_BIT_CLOCK_2X_MASK;
722 
723 	intel_de_write(display, psr_aux_ctl_reg(display, cpu_transcoder),
724 		       aux_ctl);
725 }
726 
727 static bool psr2_su_region_et_valid(struct intel_dp *intel_dp, bool panel_replay)
728 {
729 	struct intel_display *display = to_intel_display(intel_dp);
730 
731 	if (DISPLAY_VER(display) < 20 || !intel_dp_is_edp(intel_dp) ||
732 	    intel_dp->psr.debug & I915_PSR_DEBUG_SU_REGION_ET_DISABLE)
733 		return false;
734 
735 	return panel_replay ?
736 		intel_dp->pr_dpcd & DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT :
737 		intel_dp->psr_dpcd[0] == DP_PSR2_WITH_Y_COORD_ET_SUPPORTED &&
738 		psr2_su_region_et_global_enabled(intel_dp);
739 }
740 
741 static void _panel_replay_enable_sink(struct intel_dp *intel_dp,
742 				      const struct intel_crtc_state *crtc_state)
743 {
744 	u8 val = DP_PANEL_REPLAY_ENABLE |
745 		DP_PANEL_REPLAY_VSC_SDP_CRC_EN |
746 		DP_PANEL_REPLAY_UNRECOVERABLE_ERROR_EN |
747 		DP_PANEL_REPLAY_RFB_STORAGE_ERROR_EN |
748 		DP_PANEL_REPLAY_ACTIVE_FRAME_CRC_ERROR_EN;
749 	u8 panel_replay_config2 = DP_PANEL_REPLAY_CRC_VERIFICATION;
750 
751 	if (crtc_state->has_sel_update)
752 		val |= DP_PANEL_REPLAY_SU_ENABLE;
753 
754 	if (crtc_state->enable_psr2_su_region_et)
755 		val |= DP_PANEL_REPLAY_ENABLE_SU_REGION_ET;
756 
757 	if (crtc_state->req_psr2_sdp_prior_scanline)
758 		panel_replay_config2 |=
759 			DP_PANEL_REPLAY_SU_REGION_SCANLINE_CAPTURE;
760 
761 	drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG, val);
762 
763 	drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG2,
764 			   panel_replay_config2);
765 }
766 
767 static void _psr_enable_sink(struct intel_dp *intel_dp,
768 			     const struct intel_crtc_state *crtc_state)
769 {
770 	struct intel_display *display = to_intel_display(intel_dp);
771 	u8 val = 0;
772 
773 	if (crtc_state->has_sel_update) {
774 		val |= DP_PSR_ENABLE_PSR2 | DP_PSR_IRQ_HPD_WITH_CRC_ERRORS;
775 	} else {
776 		if (intel_dp->psr.link_standby)
777 			val |= DP_PSR_MAIN_LINK_ACTIVE;
778 
779 		if (DISPLAY_VER(display) >= 8)
780 			val |= DP_PSR_CRC_VERIFICATION;
781 	}
782 
783 	if (crtc_state->req_psr2_sdp_prior_scanline)
784 		val |= DP_PSR_SU_REGION_SCANLINE_CAPTURE;
785 
786 	if (crtc_state->enable_psr2_su_region_et)
787 		val |= DP_PANEL_REPLAY_ENABLE_SU_REGION_ET;
788 
789 	if (intel_dp->psr.entry_setup_frames > 0)
790 		val |= DP_PSR_FRAME_CAPTURE;
791 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, val);
792 
793 	val |= DP_PSR_ENABLE;
794 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, val);
795 }
796 
797 static void intel_psr_enable_sink_alpm(struct intel_dp *intel_dp,
798 				       const struct intel_crtc_state *crtc_state)
799 {
800 	u8 val;
801 
802 	/*
803 	 * eDP Panel Replay uses always ALPM
804 	 * PSR2 uses ALPM but PSR1 doesn't
805 	 */
806 	if (!intel_dp_is_edp(intel_dp) || (!crtc_state->has_panel_replay &&
807 					   !crtc_state->has_sel_update))
808 		return;
809 
810 	val = DP_ALPM_ENABLE | DP_ALPM_LOCK_ERROR_IRQ_HPD_ENABLE;
811 
812 	if (crtc_state->has_panel_replay)
813 		val |= DP_ALPM_MODE_AUX_LESS;
814 
815 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_RECEIVER_ALPM_CONFIG, val);
816 }
817 
818 static void intel_psr_enable_sink(struct intel_dp *intel_dp,
819 				  const struct intel_crtc_state *crtc_state)
820 {
821 	intel_psr_enable_sink_alpm(intel_dp, crtc_state);
822 
823 	crtc_state->has_panel_replay ?
824 		_panel_replay_enable_sink(intel_dp, crtc_state) :
825 		_psr_enable_sink(intel_dp, crtc_state);
826 
827 	if (intel_dp_is_edp(intel_dp))
828 		drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
829 }
830 
831 void intel_psr_panel_replay_enable_sink(struct intel_dp *intel_dp)
832 {
833 	if (CAN_PANEL_REPLAY(intel_dp))
834 		drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG,
835 				   DP_PANEL_REPLAY_ENABLE);
836 }
837 
838 static u32 intel_psr1_get_tp_time(struct intel_dp *intel_dp)
839 {
840 	struct intel_display *display = to_intel_display(intel_dp);
841 	struct intel_connector *connector = intel_dp->attached_connector;
842 	u32 val = 0;
843 
844 	if (DISPLAY_VER(display) >= 11)
845 		val |= EDP_PSR_TP4_TIME_0us;
846 
847 	if (display->params.psr_safest_params) {
848 		val |= EDP_PSR_TP1_TIME_2500us;
849 		val |= EDP_PSR_TP2_TP3_TIME_2500us;
850 		goto check_tp3_sel;
851 	}
852 
853 	if (connector->panel.vbt.psr.tp1_wakeup_time_us == 0)
854 		val |= EDP_PSR_TP1_TIME_0us;
855 	else if (connector->panel.vbt.psr.tp1_wakeup_time_us <= 100)
856 		val |= EDP_PSR_TP1_TIME_100us;
857 	else if (connector->panel.vbt.psr.tp1_wakeup_time_us <= 500)
858 		val |= EDP_PSR_TP1_TIME_500us;
859 	else
860 		val |= EDP_PSR_TP1_TIME_2500us;
861 
862 	if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us == 0)
863 		val |= EDP_PSR_TP2_TP3_TIME_0us;
864 	else if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us <= 100)
865 		val |= EDP_PSR_TP2_TP3_TIME_100us;
866 	else if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us <= 500)
867 		val |= EDP_PSR_TP2_TP3_TIME_500us;
868 	else
869 		val |= EDP_PSR_TP2_TP3_TIME_2500us;
870 
871 	/*
872 	 * WA 0479: hsw,bdw
873 	 * "Do not skip both TP1 and TP2/TP3"
874 	 */
875 	if (DISPLAY_VER(display) < 9 &&
876 	    connector->panel.vbt.psr.tp1_wakeup_time_us == 0 &&
877 	    connector->panel.vbt.psr.tp2_tp3_wakeup_time_us == 0)
878 		val |= EDP_PSR_TP2_TP3_TIME_100us;
879 
880 check_tp3_sel:
881 	if (intel_dp_source_supports_tps3(display) &&
882 	    drm_dp_tps3_supported(intel_dp->dpcd))
883 		val |= EDP_PSR_TP_TP1_TP3;
884 	else
885 		val |= EDP_PSR_TP_TP1_TP2;
886 
887 	return val;
888 }
889 
890 static u8 psr_compute_idle_frames(struct intel_dp *intel_dp)
891 {
892 	struct intel_display *display = to_intel_display(intel_dp);
893 	struct intel_connector *connector = intel_dp->attached_connector;
894 	int idle_frames;
895 
896 	/* Let's use 6 as the minimum to cover all known cases including the
897 	 * off-by-one issue that HW has in some cases.
898 	 */
899 	idle_frames = max(6, connector->panel.vbt.psr.idle_frames);
900 	idle_frames = max(idle_frames, intel_dp->psr.sink_sync_latency + 1);
901 
902 	if (drm_WARN_ON(display->drm, idle_frames > 0xf))
903 		idle_frames = 0xf;
904 
905 	return idle_frames;
906 }
907 
908 static void hsw_activate_psr1(struct intel_dp *intel_dp)
909 {
910 	struct intel_display *display = to_intel_display(intel_dp);
911 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
912 	u32 max_sleep_time = 0x1f;
913 	u32 val = EDP_PSR_ENABLE;
914 
915 	val |= EDP_PSR_IDLE_FRAMES(psr_compute_idle_frames(intel_dp));
916 
917 	if (DISPLAY_VER(display) < 20)
918 		val |= EDP_PSR_MAX_SLEEP_TIME(max_sleep_time);
919 
920 	if (display->platform.haswell)
921 		val |= EDP_PSR_MIN_LINK_ENTRY_TIME_8_LINES;
922 
923 	if (intel_dp->psr.link_standby)
924 		val |= EDP_PSR_LINK_STANDBY;
925 
926 	val |= intel_psr1_get_tp_time(intel_dp);
927 
928 	if (DISPLAY_VER(display) >= 8)
929 		val |= EDP_PSR_CRC_ENABLE;
930 
931 	if (DISPLAY_VER(display) >= 20)
932 		val |= LNL_EDP_PSR_ENTRY_SETUP_FRAMES(intel_dp->psr.entry_setup_frames);
933 
934 	intel_de_rmw(display, psr_ctl_reg(display, cpu_transcoder),
935 		     ~EDP_PSR_RESTORE_PSR_ACTIVE_CTX_MASK, val);
936 }
937 
938 static u32 intel_psr2_get_tp_time(struct intel_dp *intel_dp)
939 {
940 	struct intel_display *display = to_intel_display(intel_dp);
941 	struct intel_connector *connector = intel_dp->attached_connector;
942 	u32 val = 0;
943 
944 	if (display->params.psr_safest_params)
945 		return EDP_PSR2_TP2_TIME_2500us;
946 
947 	if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us >= 0 &&
948 	    connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 50)
949 		val |= EDP_PSR2_TP2_TIME_50us;
950 	else if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 100)
951 		val |= EDP_PSR2_TP2_TIME_100us;
952 	else if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 500)
953 		val |= EDP_PSR2_TP2_TIME_500us;
954 	else
955 		val |= EDP_PSR2_TP2_TIME_2500us;
956 
957 	return val;
958 }
959 
960 static int psr2_block_count_lines(struct intel_dp *intel_dp)
961 {
962 	return intel_dp->alpm_parameters.io_wake_lines < 9 &&
963 		intel_dp->alpm_parameters.fast_wake_lines < 9 ? 8 : 12;
964 }
965 
966 static int psr2_block_count(struct intel_dp *intel_dp)
967 {
968 	return psr2_block_count_lines(intel_dp) / 4;
969 }
970 
971 static u8 frames_before_su_entry(struct intel_dp *intel_dp)
972 {
973 	u8 frames_before_su_entry;
974 
975 	frames_before_su_entry = max_t(u8,
976 				       intel_dp->psr.sink_sync_latency + 1,
977 				       2);
978 
979 	/* Entry setup frames must be at least 1 less than frames before SU entry */
980 	if (intel_dp->psr.entry_setup_frames >= frames_before_su_entry)
981 		frames_before_su_entry = intel_dp->psr.entry_setup_frames + 1;
982 
983 	return frames_before_su_entry;
984 }
985 
986 static void dg2_activate_panel_replay(struct intel_dp *intel_dp)
987 {
988 	struct intel_display *display = to_intel_display(intel_dp);
989 	struct intel_psr *psr = &intel_dp->psr;
990 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
991 
992 	if (intel_dp_is_edp(intel_dp) && psr->sel_update_enabled) {
993 		u32 val = psr->su_region_et_enabled ?
994 			LNL_EDP_PSR2_SU_REGION_ET_ENABLE : 0;
995 
996 		if (intel_dp->psr.req_psr2_sdp_prior_scanline)
997 			val |= EDP_PSR2_SU_SDP_SCANLINE;
998 
999 		intel_de_write(display, EDP_PSR2_CTL(display, cpu_transcoder),
1000 			       val);
1001 	}
1002 
1003 	intel_de_rmw(display,
1004 		     PSR2_MAN_TRK_CTL(display, intel_dp->psr.transcoder),
1005 		     0, ADLP_PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME);
1006 
1007 	intel_de_rmw(display, TRANS_DP2_CTL(intel_dp->psr.transcoder), 0,
1008 		     TRANS_DP2_PANEL_REPLAY_ENABLE);
1009 }
1010 
1011 static void hsw_activate_psr2(struct intel_dp *intel_dp)
1012 {
1013 	struct intel_display *display = to_intel_display(intel_dp);
1014 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1015 	u32 val = EDP_PSR2_ENABLE;
1016 	u32 psr_val = 0;
1017 
1018 	val |= EDP_PSR2_IDLE_FRAMES(psr_compute_idle_frames(intel_dp));
1019 
1020 	if (DISPLAY_VER(display) < 14 && !display->platform.alderlake_p)
1021 		val |= EDP_SU_TRACK_ENABLE;
1022 
1023 	if (DISPLAY_VER(display) >= 10 && DISPLAY_VER(display) < 13)
1024 		val |= EDP_Y_COORDINATE_ENABLE;
1025 
1026 	val |= EDP_PSR2_FRAME_BEFORE_SU(frames_before_su_entry(intel_dp));
1027 
1028 	val |= intel_psr2_get_tp_time(intel_dp);
1029 
1030 	if (DISPLAY_VER(display) >= 12 && DISPLAY_VER(display) < 20) {
1031 		if (psr2_block_count(intel_dp) > 2)
1032 			val |= TGL_EDP_PSR2_BLOCK_COUNT_NUM_3;
1033 		else
1034 			val |= TGL_EDP_PSR2_BLOCK_COUNT_NUM_2;
1035 	}
1036 
1037 	/* Wa_22012278275:adl-p */
1038 	if (display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_E0)) {
1039 		static const u8 map[] = {
1040 			2, /* 5 lines */
1041 			1, /* 6 lines */
1042 			0, /* 7 lines */
1043 			3, /* 8 lines */
1044 			6, /* 9 lines */
1045 			5, /* 10 lines */
1046 			4, /* 11 lines */
1047 			7, /* 12 lines */
1048 		};
1049 		/*
1050 		 * Still using the default IO_BUFFER_WAKE and FAST_WAKE, see
1051 		 * comments below for more information
1052 		 */
1053 		int tmp;
1054 
1055 		tmp = map[intel_dp->alpm_parameters.io_wake_lines -
1056 			  TGL_EDP_PSR2_IO_BUFFER_WAKE_MIN_LINES];
1057 		val |= TGL_EDP_PSR2_IO_BUFFER_WAKE(tmp + TGL_EDP_PSR2_IO_BUFFER_WAKE_MIN_LINES);
1058 
1059 		tmp = map[intel_dp->alpm_parameters.fast_wake_lines - TGL_EDP_PSR2_FAST_WAKE_MIN_LINES];
1060 		val |= TGL_EDP_PSR2_FAST_WAKE(tmp + TGL_EDP_PSR2_FAST_WAKE_MIN_LINES);
1061 	} else if (DISPLAY_VER(display) >= 20) {
1062 		val |= LNL_EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines);
1063 	} else if (DISPLAY_VER(display) >= 12) {
1064 		val |= TGL_EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines);
1065 		val |= TGL_EDP_PSR2_FAST_WAKE(intel_dp->alpm_parameters.fast_wake_lines);
1066 	} else if (DISPLAY_VER(display) >= 9) {
1067 		val |= EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines);
1068 		val |= EDP_PSR2_FAST_WAKE(intel_dp->alpm_parameters.fast_wake_lines);
1069 	}
1070 
1071 	if (intel_dp->psr.req_psr2_sdp_prior_scanline)
1072 		val |= EDP_PSR2_SU_SDP_SCANLINE;
1073 
1074 	if (DISPLAY_VER(display) >= 20)
1075 		psr_val |= LNL_EDP_PSR_ENTRY_SETUP_FRAMES(intel_dp->psr.entry_setup_frames);
1076 
1077 	if (intel_dp->psr.psr2_sel_fetch_enabled) {
1078 		u32 tmp;
1079 
1080 		tmp = intel_de_read(display,
1081 				    PSR2_MAN_TRK_CTL(display, cpu_transcoder));
1082 		drm_WARN_ON(display->drm, !(tmp & PSR2_MAN_TRK_CTL_ENABLE));
1083 	} else if (HAS_PSR2_SEL_FETCH(display)) {
1084 		intel_de_write(display,
1085 			       PSR2_MAN_TRK_CTL(display, cpu_transcoder), 0);
1086 	}
1087 
1088 	if (intel_dp->psr.su_region_et_enabled)
1089 		val |= LNL_EDP_PSR2_SU_REGION_ET_ENABLE;
1090 
1091 	/*
1092 	 * PSR2 HW is incorrectly using EDP_PSR_TP1_TP3_SEL and BSpec is
1093 	 * recommending keep this bit unset while PSR2 is enabled.
1094 	 */
1095 	intel_de_write(display, psr_ctl_reg(display, cpu_transcoder), psr_val);
1096 
1097 	intel_de_write(display, EDP_PSR2_CTL(display, cpu_transcoder), val);
1098 }
1099 
1100 static bool
1101 transcoder_has_psr2(struct intel_display *display, enum transcoder cpu_transcoder)
1102 {
1103 	if (display->platform.alderlake_p || DISPLAY_VER(display) >= 14)
1104 		return cpu_transcoder == TRANSCODER_A || cpu_transcoder == TRANSCODER_B;
1105 	else if (DISPLAY_VER(display) >= 12)
1106 		return cpu_transcoder == TRANSCODER_A;
1107 	else if (DISPLAY_VER(display) >= 9)
1108 		return cpu_transcoder == TRANSCODER_EDP;
1109 	else
1110 		return false;
1111 }
1112 
1113 static u32 intel_get_frame_time_us(const struct intel_crtc_state *crtc_state)
1114 {
1115 	if (!crtc_state->hw.active)
1116 		return 0;
1117 
1118 	return DIV_ROUND_UP(1000 * 1000,
1119 			    drm_mode_vrefresh(&crtc_state->hw.adjusted_mode));
1120 }
1121 
1122 static void psr2_program_idle_frames(struct intel_dp *intel_dp,
1123 				     u32 idle_frames)
1124 {
1125 	struct intel_display *display = to_intel_display(intel_dp);
1126 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1127 
1128 	intel_de_rmw(display, EDP_PSR2_CTL(display, cpu_transcoder),
1129 		     EDP_PSR2_IDLE_FRAMES_MASK,
1130 		     EDP_PSR2_IDLE_FRAMES(idle_frames));
1131 }
1132 
1133 static void tgl_psr2_enable_dc3co(struct intel_dp *intel_dp)
1134 {
1135 	struct intel_display *display = to_intel_display(intel_dp);
1136 
1137 	psr2_program_idle_frames(intel_dp, 0);
1138 	intel_display_power_set_target_dc_state(display, DC_STATE_EN_DC3CO);
1139 }
1140 
1141 static void tgl_psr2_disable_dc3co(struct intel_dp *intel_dp)
1142 {
1143 	struct intel_display *display = to_intel_display(intel_dp);
1144 
1145 	intel_display_power_set_target_dc_state(display, DC_STATE_EN_UPTO_DC6);
1146 	psr2_program_idle_frames(intel_dp, psr_compute_idle_frames(intel_dp));
1147 }
1148 
1149 static void tgl_dc3co_disable_work(struct work_struct *work)
1150 {
1151 	struct intel_dp *intel_dp =
1152 		container_of(work, typeof(*intel_dp), psr.dc3co_work.work);
1153 
1154 	mutex_lock(&intel_dp->psr.lock);
1155 	/* If delayed work is pending, it is not idle */
1156 	if (delayed_work_pending(&intel_dp->psr.dc3co_work))
1157 		goto unlock;
1158 
1159 	tgl_psr2_disable_dc3co(intel_dp);
1160 unlock:
1161 	mutex_unlock(&intel_dp->psr.lock);
1162 }
1163 
1164 static void tgl_disallow_dc3co_on_psr2_exit(struct intel_dp *intel_dp)
1165 {
1166 	if (!intel_dp->psr.dc3co_exitline)
1167 		return;
1168 
1169 	cancel_delayed_work(&intel_dp->psr.dc3co_work);
1170 	/* Before PSR2 exit disallow dc3co*/
1171 	tgl_psr2_disable_dc3co(intel_dp);
1172 }
1173 
1174 static bool
1175 dc3co_is_pipe_port_compatible(struct intel_dp *intel_dp,
1176 			      struct intel_crtc_state *crtc_state)
1177 {
1178 	struct intel_display *display = to_intel_display(intel_dp);
1179 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1180 	enum pipe pipe = to_intel_crtc(crtc_state->uapi.crtc)->pipe;
1181 	enum port port = dig_port->base.port;
1182 
1183 	if (display->platform.alderlake_p || DISPLAY_VER(display) >= 14)
1184 		return pipe <= PIPE_B && port <= PORT_B;
1185 	else
1186 		return pipe == PIPE_A && port == PORT_A;
1187 }
1188 
1189 static void
1190 tgl_dc3co_exitline_compute_config(struct intel_dp *intel_dp,
1191 				  struct intel_crtc_state *crtc_state)
1192 {
1193 	struct intel_display *display = to_intel_display(intel_dp);
1194 	const u32 crtc_vdisplay = crtc_state->uapi.adjusted_mode.crtc_vdisplay;
1195 	struct i915_power_domains *power_domains = &display->power.domains;
1196 	u32 exit_scanlines;
1197 
1198 	/*
1199 	 * FIXME: Due to the changed sequence of activating/deactivating DC3CO,
1200 	 * disable DC3CO until the changed dc3co activating/deactivating sequence
1201 	 * is applied. B.Specs:49196
1202 	 */
1203 	return;
1204 
1205 	/*
1206 	 * DMC's DC3CO exit mechanism has an issue with Selective Fecth
1207 	 * TODO: when the issue is addressed, this restriction should be removed.
1208 	 */
1209 	if (crtc_state->enable_psr2_sel_fetch)
1210 		return;
1211 
1212 	if (!(power_domains->allowed_dc_mask & DC_STATE_EN_DC3CO))
1213 		return;
1214 
1215 	if (!dc3co_is_pipe_port_compatible(intel_dp, crtc_state))
1216 		return;
1217 
1218 	/* Wa_16011303918:adl-p */
1219 	if (display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0))
1220 		return;
1221 
1222 	/*
1223 	 * DC3CO Exit time 200us B.Spec 49196
1224 	 * PSR2 transcoder Early Exit scanlines = ROUNDUP(200 / line time) + 1
1225 	 */
1226 	exit_scanlines =
1227 		intel_usecs_to_scanlines(&crtc_state->uapi.adjusted_mode, 200) + 1;
1228 
1229 	if (drm_WARN_ON(display->drm, exit_scanlines > crtc_vdisplay))
1230 		return;
1231 
1232 	crtc_state->dc3co_exitline = crtc_vdisplay - exit_scanlines;
1233 }
1234 
1235 static bool intel_psr2_sel_fetch_config_valid(struct intel_dp *intel_dp,
1236 					      struct intel_crtc_state *crtc_state)
1237 {
1238 	struct intel_display *display = to_intel_display(intel_dp);
1239 
1240 	if (!display->params.enable_psr2_sel_fetch &&
1241 	    intel_dp->psr.debug != I915_PSR_DEBUG_ENABLE_SEL_FETCH) {
1242 		drm_dbg_kms(display->drm,
1243 			    "PSR2 sel fetch not enabled, disabled by parameter\n");
1244 		return false;
1245 	}
1246 
1247 	if (crtc_state->uapi.async_flip) {
1248 		drm_dbg_kms(display->drm,
1249 			    "PSR2 sel fetch not enabled, async flip enabled\n");
1250 		return false;
1251 	}
1252 
1253 	return crtc_state->enable_psr2_sel_fetch = true;
1254 }
1255 
1256 static bool psr2_granularity_check(struct intel_dp *intel_dp,
1257 				   struct intel_crtc_state *crtc_state)
1258 {
1259 	struct intel_display *display = to_intel_display(intel_dp);
1260 	const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
1261 	const int crtc_hdisplay = crtc_state->hw.adjusted_mode.crtc_hdisplay;
1262 	const int crtc_vdisplay = crtc_state->hw.adjusted_mode.crtc_vdisplay;
1263 	u16 y_granularity = 0;
1264 
1265 	/* PSR2 HW only send full lines so we only need to validate the width */
1266 	if (crtc_hdisplay % intel_dp->psr.su_w_granularity)
1267 		return false;
1268 
1269 	if (crtc_vdisplay % intel_dp->psr.su_y_granularity)
1270 		return false;
1271 
1272 	/* HW tracking is only aligned to 4 lines */
1273 	if (!crtc_state->enable_psr2_sel_fetch)
1274 		return intel_dp->psr.su_y_granularity == 4;
1275 
1276 	/*
1277 	 * adl_p and mtl platforms have 1 line granularity.
1278 	 * For other platforms with SW tracking we can adjust the y coordinates
1279 	 * to match sink requirement if multiple of 4.
1280 	 */
1281 	if (display->platform.alderlake_p || DISPLAY_VER(display) >= 14)
1282 		y_granularity = intel_dp->psr.su_y_granularity;
1283 	else if (intel_dp->psr.su_y_granularity <= 2)
1284 		y_granularity = 4;
1285 	else if ((intel_dp->psr.su_y_granularity % 4) == 0)
1286 		y_granularity = intel_dp->psr.su_y_granularity;
1287 
1288 	if (y_granularity == 0 || crtc_vdisplay % y_granularity)
1289 		return false;
1290 
1291 	if (crtc_state->dsc.compression_enable &&
1292 	    vdsc_cfg->slice_height % y_granularity)
1293 		return false;
1294 
1295 	crtc_state->su_y_granularity = y_granularity;
1296 	return true;
1297 }
1298 
1299 static bool _compute_psr2_sdp_prior_scanline_indication(struct intel_dp *intel_dp,
1300 							struct intel_crtc_state *crtc_state)
1301 {
1302 	struct intel_display *display = to_intel_display(intel_dp);
1303 	const struct drm_display_mode *adjusted_mode = &crtc_state->uapi.adjusted_mode;
1304 	u32 hblank_total, hblank_ns, req_ns;
1305 
1306 	hblank_total = adjusted_mode->crtc_hblank_end - adjusted_mode->crtc_hblank_start;
1307 	hblank_ns = div_u64(1000000ULL * hblank_total, adjusted_mode->crtc_clock);
1308 
1309 	/* From spec: ((60 / number of lanes) + 11) * 1000 / symbol clock frequency MHz */
1310 	req_ns = ((60 / crtc_state->lane_count) + 11) * 1000 / (crtc_state->port_clock / 1000);
1311 
1312 	if ((hblank_ns - req_ns) > 100)
1313 		return true;
1314 
1315 	/* Not supported <13 / Wa_22012279113:adl-p */
1316 	if (DISPLAY_VER(display) < 14 || intel_dp->edp_dpcd[0] < DP_EDP_14b)
1317 		return false;
1318 
1319 	crtc_state->req_psr2_sdp_prior_scanline = true;
1320 	return true;
1321 }
1322 
1323 static int intel_psr_entry_setup_frames(struct intel_dp *intel_dp,
1324 					const struct drm_display_mode *adjusted_mode)
1325 {
1326 	struct intel_display *display = to_intel_display(intel_dp);
1327 	int psr_setup_time = drm_dp_psr_setup_time(intel_dp->psr_dpcd);
1328 	int entry_setup_frames = 0;
1329 
1330 	if (psr_setup_time < 0) {
1331 		drm_dbg_kms(display->drm,
1332 			    "PSR condition failed: Invalid PSR setup time (0x%02x)\n",
1333 			    intel_dp->psr_dpcd[1]);
1334 		return -ETIME;
1335 	}
1336 
1337 	if (intel_usecs_to_scanlines(adjusted_mode, psr_setup_time) >
1338 	    adjusted_mode->crtc_vtotal - adjusted_mode->crtc_vdisplay - 1) {
1339 		if (DISPLAY_VER(display) >= 20) {
1340 			/* setup entry frames can be up to 3 frames */
1341 			entry_setup_frames = 1;
1342 			drm_dbg_kms(display->drm,
1343 				    "PSR setup entry frames %d\n",
1344 				    entry_setup_frames);
1345 		} else {
1346 			drm_dbg_kms(display->drm,
1347 				    "PSR condition failed: PSR setup time (%d us) too long\n",
1348 				    psr_setup_time);
1349 			return -ETIME;
1350 		}
1351 	}
1352 
1353 	return entry_setup_frames;
1354 }
1355 
1356 static bool wake_lines_fit_into_vblank(struct intel_dp *intel_dp,
1357 				       const struct intel_crtc_state *crtc_state,
1358 				       bool aux_less)
1359 {
1360 	struct intel_display *display = to_intel_display(intel_dp);
1361 	int vblank = crtc_state->hw.adjusted_mode.crtc_vblank_end -
1362 		crtc_state->hw.adjusted_mode.crtc_vblank_start;
1363 	int wake_lines;
1364 
1365 	if (aux_less)
1366 		wake_lines = intel_dp->alpm_parameters.aux_less_wake_lines;
1367 	else
1368 		wake_lines = DISPLAY_VER(display) < 20 ?
1369 			psr2_block_count_lines(intel_dp) :
1370 			intel_dp->alpm_parameters.io_wake_lines;
1371 
1372 	if (crtc_state->req_psr2_sdp_prior_scanline)
1373 		vblank -= 1;
1374 
1375 	/* Vblank >= PSR2_CTL Block Count Number maximum line count */
1376 	if (vblank < wake_lines)
1377 		return false;
1378 
1379 	return true;
1380 }
1381 
1382 static bool alpm_config_valid(struct intel_dp *intel_dp,
1383 			      const struct intel_crtc_state *crtc_state,
1384 			      bool aux_less)
1385 {
1386 	struct intel_display *display = to_intel_display(intel_dp);
1387 
1388 	if (!intel_alpm_compute_params(intel_dp, crtc_state)) {
1389 		drm_dbg_kms(display->drm,
1390 			    "PSR2/Panel Replay  not enabled, Unable to use long enough wake times\n");
1391 		return false;
1392 	}
1393 
1394 	if (!wake_lines_fit_into_vblank(intel_dp, crtc_state, aux_less)) {
1395 		drm_dbg_kms(display->drm,
1396 			    "PSR2/Panel Replay not enabled, too short vblank time\n");
1397 		return false;
1398 	}
1399 
1400 	return true;
1401 }
1402 
1403 static bool intel_psr2_config_valid(struct intel_dp *intel_dp,
1404 				    struct intel_crtc_state *crtc_state)
1405 {
1406 	struct intel_display *display = to_intel_display(intel_dp);
1407 	int crtc_hdisplay = crtc_state->hw.adjusted_mode.crtc_hdisplay;
1408 	int crtc_vdisplay = crtc_state->hw.adjusted_mode.crtc_vdisplay;
1409 	int psr_max_h = 0, psr_max_v = 0, max_bpp = 0;
1410 
1411 	if (!intel_dp->psr.sink_psr2_support)
1412 		return false;
1413 
1414 	/* JSL and EHL only supports eDP 1.3 */
1415 	if (display->platform.jasperlake || display->platform.elkhartlake) {
1416 		drm_dbg_kms(display->drm, "PSR2 not supported by phy\n");
1417 		return false;
1418 	}
1419 
1420 	/* Wa_16011181250 */
1421 	if (display->platform.rocketlake || display->platform.alderlake_s ||
1422 	    display->platform.dg2) {
1423 		drm_dbg_kms(display->drm,
1424 			    "PSR2 is defeatured for this platform\n");
1425 		return false;
1426 	}
1427 
1428 	if (display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) {
1429 		drm_dbg_kms(display->drm,
1430 			    "PSR2 not completely functional in this stepping\n");
1431 		return false;
1432 	}
1433 
1434 	if (!transcoder_has_psr2(display, crtc_state->cpu_transcoder)) {
1435 		drm_dbg_kms(display->drm,
1436 			    "PSR2 not supported in transcoder %s\n",
1437 			    transcoder_name(crtc_state->cpu_transcoder));
1438 		return false;
1439 	}
1440 
1441 	/*
1442 	 * DSC and PSR2 cannot be enabled simultaneously. If a requested
1443 	 * resolution requires DSC to be enabled, priority is given to DSC
1444 	 * over PSR2.
1445 	 */
1446 	if (crtc_state->dsc.compression_enable &&
1447 	    (DISPLAY_VER(display) < 14 && !display->platform.alderlake_p)) {
1448 		drm_dbg_kms(display->drm,
1449 			    "PSR2 cannot be enabled since DSC is enabled\n");
1450 		return false;
1451 	}
1452 
1453 	if (DISPLAY_VER(display) >= 20) {
1454 		psr_max_h = crtc_hdisplay;
1455 		psr_max_v = crtc_vdisplay;
1456 		max_bpp = crtc_state->pipe_bpp;
1457 	} else if (IS_DISPLAY_VER(display, 12, 14)) {
1458 		psr_max_h = 5120;
1459 		psr_max_v = 3200;
1460 		max_bpp = 30;
1461 	} else if (IS_DISPLAY_VER(display, 10, 11)) {
1462 		psr_max_h = 4096;
1463 		psr_max_v = 2304;
1464 		max_bpp = 24;
1465 	} else if (DISPLAY_VER(display) == 9) {
1466 		psr_max_h = 3640;
1467 		psr_max_v = 2304;
1468 		max_bpp = 24;
1469 	}
1470 
1471 	if (crtc_state->pipe_bpp > max_bpp) {
1472 		drm_dbg_kms(display->drm,
1473 			    "PSR2 not enabled, pipe bpp %d > max supported %d\n",
1474 			    crtc_state->pipe_bpp, max_bpp);
1475 		return false;
1476 	}
1477 
1478 	/* Wa_16011303918:adl-p */
1479 	if (crtc_state->vrr.enable &&
1480 	    display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) {
1481 		drm_dbg_kms(display->drm,
1482 			    "PSR2 not enabled, not compatible with HW stepping + VRR\n");
1483 		return false;
1484 	}
1485 
1486 	if (!alpm_config_valid(intel_dp, crtc_state, false))
1487 		return false;
1488 
1489 	if (!crtc_state->enable_psr2_sel_fetch &&
1490 	    (crtc_hdisplay > psr_max_h || crtc_vdisplay > psr_max_v)) {
1491 		drm_dbg_kms(display->drm,
1492 			    "PSR2 not enabled, resolution %dx%d > max supported %dx%d\n",
1493 			    crtc_hdisplay, crtc_vdisplay,
1494 			    psr_max_h, psr_max_v);
1495 		return false;
1496 	}
1497 
1498 	tgl_dc3co_exitline_compute_config(intel_dp, crtc_state);
1499 
1500 	return true;
1501 }
1502 
1503 static bool intel_sel_update_config_valid(struct intel_dp *intel_dp,
1504 					  struct intel_crtc_state *crtc_state)
1505 {
1506 	struct intel_display *display = to_intel_display(intel_dp);
1507 
1508 	if (HAS_PSR2_SEL_FETCH(display) &&
1509 	    !intel_psr2_sel_fetch_config_valid(intel_dp, crtc_state) &&
1510 	    !HAS_PSR_HW_TRACKING(display)) {
1511 		drm_dbg_kms(display->drm,
1512 			    "Selective update not enabled, selective fetch not valid and no HW tracking available\n");
1513 		goto unsupported;
1514 	}
1515 
1516 	if (!psr2_global_enabled(intel_dp)) {
1517 		drm_dbg_kms(display->drm,
1518 			    "Selective update disabled by flag\n");
1519 		goto unsupported;
1520 	}
1521 
1522 	if (!crtc_state->has_panel_replay && !intel_psr2_config_valid(intel_dp, crtc_state))
1523 		goto unsupported;
1524 
1525 	if (!_compute_psr2_sdp_prior_scanline_indication(intel_dp, crtc_state)) {
1526 		drm_dbg_kms(display->drm,
1527 			    "Selective update not enabled, SDP indication do not fit in hblank\n");
1528 		goto unsupported;
1529 	}
1530 
1531 	if (crtc_state->has_panel_replay && (DISPLAY_VER(display) < 14 ||
1532 					     !intel_dp->psr.sink_panel_replay_su_support))
1533 		goto unsupported;
1534 
1535 	if (crtc_state->crc_enabled) {
1536 		drm_dbg_kms(display->drm,
1537 			    "Selective update not enabled because it would inhibit pipe CRC calculation\n");
1538 		goto unsupported;
1539 	}
1540 
1541 	if (!psr2_granularity_check(intel_dp, crtc_state)) {
1542 		drm_dbg_kms(display->drm,
1543 			    "Selective update not enabled, SU granularity not compatible\n");
1544 		goto unsupported;
1545 	}
1546 
1547 	crtc_state->enable_psr2_su_region_et =
1548 		psr2_su_region_et_valid(intel_dp, crtc_state->has_panel_replay);
1549 
1550 	return true;
1551 
1552 unsupported:
1553 	crtc_state->enable_psr2_sel_fetch = false;
1554 	return false;
1555 }
1556 
1557 static bool _psr_compute_config(struct intel_dp *intel_dp,
1558 				struct intel_crtc_state *crtc_state)
1559 {
1560 	struct intel_display *display = to_intel_display(intel_dp);
1561 	const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
1562 	int entry_setup_frames;
1563 
1564 	if (!CAN_PSR(intel_dp))
1565 		return false;
1566 
1567 	entry_setup_frames = intel_psr_entry_setup_frames(intel_dp, adjusted_mode);
1568 
1569 	if (entry_setup_frames >= 0) {
1570 		intel_dp->psr.entry_setup_frames = entry_setup_frames;
1571 	} else {
1572 		drm_dbg_kms(display->drm,
1573 			    "PSR condition failed: PSR setup timing not met\n");
1574 		return false;
1575 	}
1576 
1577 	return true;
1578 }
1579 
1580 static bool
1581 _panel_replay_compute_config(struct intel_dp *intel_dp,
1582 			     const struct intel_crtc_state *crtc_state,
1583 			     const struct drm_connector_state *conn_state)
1584 {
1585 	struct intel_display *display = to_intel_display(intel_dp);
1586 	struct intel_connector *connector =
1587 		to_intel_connector(conn_state->connector);
1588 	struct intel_hdcp *hdcp = &connector->hdcp;
1589 
1590 	if (!CAN_PANEL_REPLAY(intel_dp))
1591 		return false;
1592 
1593 	if (!panel_replay_global_enabled(intel_dp)) {
1594 		drm_dbg_kms(display->drm, "Panel Replay disabled by flag\n");
1595 		return false;
1596 	}
1597 
1598 	if (crtc_state->crc_enabled) {
1599 		drm_dbg_kms(display->drm,
1600 			    "Panel Replay not enabled because it would inhibit pipe CRC calculation\n");
1601 		return false;
1602 	}
1603 
1604 	if (!intel_dp_is_edp(intel_dp))
1605 		return true;
1606 
1607 	/* Remaining checks are for eDP only */
1608 
1609 	if (to_intel_crtc(crtc_state->uapi.crtc)->pipe != PIPE_A &&
1610 	    to_intel_crtc(crtc_state->uapi.crtc)->pipe != PIPE_B)
1611 		return false;
1612 
1613 	/* 128b/132b Panel Replay is not supported on eDP */
1614 	if (intel_dp_is_uhbr(crtc_state)) {
1615 		drm_dbg_kms(display->drm,
1616 			    "Panel Replay is not supported with 128b/132b\n");
1617 		return false;
1618 	}
1619 
1620 	/* HW will not allow Panel Replay on eDP when HDCP enabled */
1621 	if (conn_state->content_protection ==
1622 	    DRM_MODE_CONTENT_PROTECTION_DESIRED ||
1623 	    (conn_state->content_protection ==
1624 	     DRM_MODE_CONTENT_PROTECTION_ENABLED && hdcp->value ==
1625 	     DRM_MODE_CONTENT_PROTECTION_UNDESIRED)) {
1626 		drm_dbg_kms(display->drm,
1627 			    "Panel Replay is not supported with HDCP\n");
1628 		return false;
1629 	}
1630 
1631 	if (!alpm_config_valid(intel_dp, crtc_state, true))
1632 		return false;
1633 
1634 	return true;
1635 }
1636 
1637 static bool intel_psr_needs_wa_18037818876(struct intel_dp *intel_dp,
1638 					   struct intel_crtc_state *crtc_state)
1639 {
1640 	struct intel_display *display = to_intel_display(intel_dp);
1641 
1642 	return (DISPLAY_VER(display) == 20 && intel_dp->psr.entry_setup_frames > 0 &&
1643 		!crtc_state->has_sel_update);
1644 }
1645 
1646 void intel_psr_compute_config(struct intel_dp *intel_dp,
1647 			      struct intel_crtc_state *crtc_state,
1648 			      struct drm_connector_state *conn_state)
1649 {
1650 	struct intel_display *display = to_intel_display(intel_dp);
1651 	const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
1652 
1653 	if (!psr_global_enabled(intel_dp)) {
1654 		drm_dbg_kms(display->drm, "PSR disabled by flag\n");
1655 		return;
1656 	}
1657 
1658 	if (intel_dp->psr.sink_not_reliable) {
1659 		drm_dbg_kms(display->drm,
1660 			    "PSR sink implementation is not reliable\n");
1661 		return;
1662 	}
1663 
1664 	if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) {
1665 		drm_dbg_kms(display->drm,
1666 			    "PSR condition failed: Interlaced mode enabled\n");
1667 		return;
1668 	}
1669 
1670 	/*
1671 	 * FIXME figure out what is wrong with PSR+joiner and
1672 	 * fix it. Presumably something related to the fact that
1673 	 * PSR is a transcoder level feature.
1674 	 */
1675 	if (crtc_state->joiner_pipes) {
1676 		drm_dbg_kms(display->drm,
1677 			    "PSR disabled due to joiner\n");
1678 		return;
1679 	}
1680 
1681 	/*
1682 	 * Currently PSR/PR doesn't work reliably with VRR enabled.
1683 	 */
1684 	if (crtc_state->vrr.enable)
1685 		return;
1686 
1687 	crtc_state->has_panel_replay = _panel_replay_compute_config(intel_dp,
1688 								    crtc_state,
1689 								    conn_state);
1690 
1691 	crtc_state->has_psr = crtc_state->has_panel_replay ? true :
1692 		_psr_compute_config(intel_dp, crtc_state);
1693 
1694 	if (!crtc_state->has_psr)
1695 		return;
1696 
1697 	crtc_state->has_sel_update = intel_sel_update_config_valid(intel_dp, crtc_state);
1698 
1699 	/* Wa_18037818876 */
1700 	if (intel_psr_needs_wa_18037818876(intel_dp, crtc_state)) {
1701 		crtc_state->has_psr = false;
1702 		drm_dbg_kms(display->drm,
1703 			    "PSR disabled to workaround PSR FSM hang issue\n");
1704 	}
1705 }
1706 
1707 void intel_psr_get_config(struct intel_encoder *encoder,
1708 			  struct intel_crtc_state *pipe_config)
1709 {
1710 	struct intel_display *display = to_intel_display(encoder);
1711 	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
1712 	enum transcoder cpu_transcoder = pipe_config->cpu_transcoder;
1713 	struct intel_dp *intel_dp;
1714 	u32 val;
1715 
1716 	if (!dig_port)
1717 		return;
1718 
1719 	intel_dp = &dig_port->dp;
1720 	if (!(CAN_PSR(intel_dp) || CAN_PANEL_REPLAY(intel_dp)))
1721 		return;
1722 
1723 	mutex_lock(&intel_dp->psr.lock);
1724 	if (!intel_dp->psr.enabled)
1725 		goto unlock;
1726 
1727 	if (intel_dp->psr.panel_replay_enabled) {
1728 		pipe_config->has_psr = pipe_config->has_panel_replay = true;
1729 	} else {
1730 		/*
1731 		 * Not possible to read EDP_PSR/PSR2_CTL registers as it is
1732 		 * enabled/disabled because of frontbuffer tracking and others.
1733 		 */
1734 		pipe_config->has_psr = true;
1735 	}
1736 
1737 	pipe_config->has_sel_update = intel_dp->psr.sel_update_enabled;
1738 	pipe_config->infoframes.enable |= intel_hdmi_infoframe_enable(DP_SDP_VSC);
1739 
1740 	if (!intel_dp->psr.sel_update_enabled)
1741 		goto unlock;
1742 
1743 	if (HAS_PSR2_SEL_FETCH(display)) {
1744 		val = intel_de_read(display,
1745 				    PSR2_MAN_TRK_CTL(display, cpu_transcoder));
1746 		if (val & PSR2_MAN_TRK_CTL_ENABLE)
1747 			pipe_config->enable_psr2_sel_fetch = true;
1748 	}
1749 
1750 	pipe_config->enable_psr2_su_region_et = intel_dp->psr.su_region_et_enabled;
1751 
1752 	if (DISPLAY_VER(display) >= 12) {
1753 		val = intel_de_read(display,
1754 				    TRANS_EXITLINE(display, cpu_transcoder));
1755 		pipe_config->dc3co_exitline = REG_FIELD_GET(EXITLINE_MASK, val);
1756 	}
1757 unlock:
1758 	mutex_unlock(&intel_dp->psr.lock);
1759 }
1760 
1761 static void intel_psr_activate(struct intel_dp *intel_dp)
1762 {
1763 	struct intel_display *display = to_intel_display(intel_dp);
1764 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1765 
1766 	drm_WARN_ON(display->drm,
1767 		    transcoder_has_psr2(display, cpu_transcoder) &&
1768 		    intel_de_read(display, EDP_PSR2_CTL(display, cpu_transcoder)) & EDP_PSR2_ENABLE);
1769 
1770 	drm_WARN_ON(display->drm,
1771 		    intel_de_read(display, psr_ctl_reg(display, cpu_transcoder)) & EDP_PSR_ENABLE);
1772 
1773 	drm_WARN_ON(display->drm, intel_dp->psr.active);
1774 
1775 	lockdep_assert_held(&intel_dp->psr.lock);
1776 
1777 	/* psr1, psr2 and panel-replay are mutually exclusive.*/
1778 	if (intel_dp->psr.panel_replay_enabled)
1779 		dg2_activate_panel_replay(intel_dp);
1780 	else if (intel_dp->psr.sel_update_enabled)
1781 		hsw_activate_psr2(intel_dp);
1782 	else
1783 		hsw_activate_psr1(intel_dp);
1784 
1785 	intel_dp->psr.active = true;
1786 }
1787 
1788 /*
1789  * Wa_16013835468
1790  * Wa_14015648006
1791  */
1792 static void wm_optimization_wa(struct intel_dp *intel_dp,
1793 			       const struct intel_crtc_state *crtc_state)
1794 {
1795 	struct intel_display *display = to_intel_display(intel_dp);
1796 	enum pipe pipe = intel_dp->psr.pipe;
1797 	bool activate = false;
1798 
1799 	/* Wa_14015648006 */
1800 	if (IS_DISPLAY_VER(display, 11, 14) && crtc_state->wm_level_disabled)
1801 		activate = true;
1802 
1803 	/* Wa_16013835468 */
1804 	if (DISPLAY_VER(display) == 12 &&
1805 	    crtc_state->hw.adjusted_mode.crtc_vblank_start !=
1806 	    crtc_state->hw.adjusted_mode.crtc_vdisplay)
1807 		activate = true;
1808 
1809 	if (activate)
1810 		intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
1811 			     0, LATENCY_REPORTING_REMOVED(pipe));
1812 	else
1813 		intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
1814 			     LATENCY_REPORTING_REMOVED(pipe), 0);
1815 }
1816 
1817 static void intel_psr_enable_source(struct intel_dp *intel_dp,
1818 				    const struct intel_crtc_state *crtc_state)
1819 {
1820 	struct intel_display *display = to_intel_display(intel_dp);
1821 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1822 	u32 mask = 0;
1823 
1824 	/*
1825 	 * Only HSW and BDW have PSR AUX registers that need to be setup.
1826 	 * SKL+ use hardcoded values PSR AUX transactions
1827 	 */
1828 	if (DISPLAY_VER(display) < 9)
1829 		hsw_psr_setup_aux(intel_dp);
1830 
1831 	/*
1832 	 * Per Spec: Avoid continuous PSR exit by masking MEMUP and HPD also
1833 	 * mask LPSP to avoid dependency on other drivers that might block
1834 	 * runtime_pm besides preventing  other hw tracking issues now we
1835 	 * can rely on frontbuffer tracking.
1836 	 *
1837 	 * From bspec prior LunarLake:
1838 	 * Only PSR_MASK[Mask FBC modify] and PSR_MASK[Mask Hotplug] are used in
1839 	 * panel replay mode.
1840 	 *
1841 	 * From bspec beyod LunarLake:
1842 	 * Panel Replay on DP: No bits are applicable
1843 	 * Panel Replay on eDP: All bits are applicable
1844 	 */
1845 	if (DISPLAY_VER(display) < 20 || intel_dp_is_edp(intel_dp))
1846 		mask = EDP_PSR_DEBUG_MASK_HPD;
1847 
1848 	if (intel_dp_is_edp(intel_dp)) {
1849 		mask |= EDP_PSR_DEBUG_MASK_MEMUP;
1850 
1851 		/*
1852 		 * For some unknown reason on HSW non-ULT (or at least on
1853 		 * Dell Latitude E6540) external displays start to flicker
1854 		 * when PSR is enabled on the eDP. SR/PC6 residency is much
1855 		 * higher than should be possible with an external display.
1856 		 * As a workaround leave LPSP unmasked to prevent PSR entry
1857 		 * when external displays are active.
1858 		 */
1859 		if (DISPLAY_VER(display) >= 8 || display->platform.haswell_ult)
1860 			mask |= EDP_PSR_DEBUG_MASK_LPSP;
1861 
1862 		if (DISPLAY_VER(display) < 20)
1863 			mask |= EDP_PSR_DEBUG_MASK_MAX_SLEEP;
1864 
1865 		/*
1866 		 * No separate pipe reg write mask on hsw/bdw, so have to unmask all
1867 		 * registers in order to keep the CURSURFLIVE tricks working :(
1868 		 */
1869 		if (IS_DISPLAY_VER(display, 9, 10))
1870 			mask |= EDP_PSR_DEBUG_MASK_DISP_REG_WRITE;
1871 
1872 		/* allow PSR with sprite enabled */
1873 		if (display->platform.haswell)
1874 			mask |= EDP_PSR_DEBUG_MASK_SPRITE_ENABLE;
1875 	}
1876 
1877 	intel_de_write(display, psr_debug_reg(display, cpu_transcoder), mask);
1878 
1879 	psr_irq_control(intel_dp);
1880 
1881 	/*
1882 	 * TODO: if future platforms supports DC3CO in more than one
1883 	 * transcoder, EXITLINE will need to be unset when disabling PSR
1884 	 */
1885 	if (intel_dp->psr.dc3co_exitline)
1886 		intel_de_rmw(display,
1887 			     TRANS_EXITLINE(display, cpu_transcoder),
1888 			     EXITLINE_MASK,
1889 			     intel_dp->psr.dc3co_exitline << EXITLINE_SHIFT | EXITLINE_ENABLE);
1890 
1891 	if (HAS_PSR_HW_TRACKING(display) && HAS_PSR2_SEL_FETCH(display))
1892 		intel_de_rmw(display, CHICKEN_PAR1_1, IGNORE_PSR2_HW_TRACKING,
1893 			     intel_dp->psr.psr2_sel_fetch_enabled ?
1894 			     IGNORE_PSR2_HW_TRACKING : 0);
1895 
1896 	if (intel_dp_is_edp(intel_dp))
1897 		intel_alpm_configure(intel_dp, crtc_state);
1898 
1899 	/*
1900 	 * Wa_16013835468
1901 	 * Wa_14015648006
1902 	 */
1903 	wm_optimization_wa(intel_dp, crtc_state);
1904 
1905 	if (intel_dp->psr.sel_update_enabled) {
1906 		if (DISPLAY_VER(display) == 9)
1907 			intel_de_rmw(display, CHICKEN_TRANS(display, cpu_transcoder), 0,
1908 				     PSR2_VSC_ENABLE_PROG_HEADER |
1909 				     PSR2_ADD_VERTICAL_LINE_COUNT);
1910 
1911 		/*
1912 		 * Wa_16014451276:adlp,mtl[a0,b0]
1913 		 * All supported adlp panels have 1-based X granularity, this may
1914 		 * cause issues if non-supported panels are used.
1915 		 */
1916 		if (!intel_dp->psr.panel_replay_enabled &&
1917 		    (IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0) ||
1918 		     display->platform.alderlake_p))
1919 			intel_de_rmw(display, CHICKEN_TRANS(display, cpu_transcoder),
1920 				     0, ADLP_1_BASED_X_GRANULARITY);
1921 
1922 		/* Wa_16012604467:adlp,mtl[a0,b0] */
1923 		if (!intel_dp->psr.panel_replay_enabled &&
1924 		    IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0))
1925 			intel_de_rmw(display,
1926 				     MTL_CLKGATE_DIS_TRANS(display, cpu_transcoder),
1927 				     0,
1928 				     MTL_CLKGATE_DIS_TRANS_DMASC_GATING_DIS);
1929 		else if (display->platform.alderlake_p)
1930 			intel_de_rmw(display, CLKGATE_DIS_MISC, 0,
1931 				     CLKGATE_DIS_MISC_DMASC_GATING_DIS);
1932 	}
1933 }
1934 
1935 static bool psr_interrupt_error_check(struct intel_dp *intel_dp)
1936 {
1937 	struct intel_display *display = to_intel_display(intel_dp);
1938 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1939 	u32 val;
1940 
1941 	if (intel_dp->psr.panel_replay_enabled)
1942 		goto no_err;
1943 
1944 	/*
1945 	 * If a PSR error happened and the driver is reloaded, the EDP_PSR_IIR
1946 	 * will still keep the error set even after the reset done in the
1947 	 * irq_preinstall and irq_uninstall hooks.
1948 	 * And enabling in this situation cause the screen to freeze in the
1949 	 * first time that PSR HW tries to activate so lets keep PSR disabled
1950 	 * to avoid any rendering problems.
1951 	 */
1952 	val = intel_de_read(display, psr_iir_reg(display, cpu_transcoder));
1953 	val &= psr_irq_psr_error_bit_get(intel_dp);
1954 	if (val) {
1955 		intel_dp->psr.sink_not_reliable = true;
1956 		drm_dbg_kms(display->drm,
1957 			    "PSR interruption error set, not enabling PSR\n");
1958 		return false;
1959 	}
1960 
1961 no_err:
1962 	return true;
1963 }
1964 
1965 static void intel_psr_enable_locked(struct intel_dp *intel_dp,
1966 				    const struct intel_crtc_state *crtc_state)
1967 {
1968 	struct intel_display *display = to_intel_display(intel_dp);
1969 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1970 	u32 val;
1971 
1972 	drm_WARN_ON(display->drm, intel_dp->psr.enabled);
1973 
1974 	intel_dp->psr.sel_update_enabled = crtc_state->has_sel_update;
1975 	intel_dp->psr.panel_replay_enabled = crtc_state->has_panel_replay;
1976 	intel_dp->psr.busy_frontbuffer_bits = 0;
1977 	intel_dp->psr.pipe = to_intel_crtc(crtc_state->uapi.crtc)->pipe;
1978 	intel_dp->psr.transcoder = crtc_state->cpu_transcoder;
1979 	/* DC5/DC6 requires at least 6 idle frames */
1980 	val = usecs_to_jiffies(intel_get_frame_time_us(crtc_state) * 6);
1981 	intel_dp->psr.dc3co_exit_delay = val;
1982 	intel_dp->psr.dc3co_exitline = crtc_state->dc3co_exitline;
1983 	intel_dp->psr.psr2_sel_fetch_enabled = crtc_state->enable_psr2_sel_fetch;
1984 	intel_dp->psr.su_region_et_enabled = crtc_state->enable_psr2_su_region_et;
1985 	intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
1986 	intel_dp->psr.req_psr2_sdp_prior_scanline =
1987 		crtc_state->req_psr2_sdp_prior_scanline;
1988 
1989 	if (!psr_interrupt_error_check(intel_dp))
1990 		return;
1991 
1992 	if (intel_dp->psr.panel_replay_enabled)
1993 		drm_dbg_kms(display->drm, "Enabling Panel Replay\n");
1994 	else
1995 		drm_dbg_kms(display->drm, "Enabling PSR%s\n",
1996 			    intel_dp->psr.sel_update_enabled ? "2" : "1");
1997 
1998 	/*
1999 	 * Enabling here only for PSR. Panel Replay enable bit is already
2000 	 * written at this point. See
2001 	 * intel_psr_panel_replay_enable_sink. Modifiers/options:
2002 	 *  - Selective Update
2003 	 *  - Region Early Transport
2004 	 *  - Selective Update Region Scanline Capture
2005 	 *  - VSC_SDP_CRC
2006 	 *  - HPD on different Errors
2007 	 *  - CRC verification
2008 	 * are written for PSR and Panel Replay here.
2009 	 */
2010 	intel_psr_enable_sink(intel_dp, crtc_state);
2011 
2012 	if (intel_dp_is_edp(intel_dp))
2013 		intel_snps_phy_update_psr_power_state(&dig_port->base, true);
2014 
2015 	intel_psr_enable_source(intel_dp, crtc_state);
2016 	intel_dp->psr.enabled = true;
2017 	intel_dp->psr.pause_counter = 0;
2018 
2019 	/*
2020 	 * Link_ok is sticky and set here on PSR enable. We can assume link
2021 	 * training is complete as we never continue to PSR enable with
2022 	 * untrained link. Link_ok is kept as set until first short pulse
2023 	 * interrupt. This is targeted to workaround panels stating bad link
2024 	 * after PSR is enabled.
2025 	 */
2026 	intel_dp->psr.link_ok = true;
2027 
2028 	intel_psr_activate(intel_dp);
2029 }
2030 
2031 static void intel_psr_exit(struct intel_dp *intel_dp)
2032 {
2033 	struct intel_display *display = to_intel_display(intel_dp);
2034 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2035 	u32 val;
2036 
2037 	if (!intel_dp->psr.active) {
2038 		if (transcoder_has_psr2(display, cpu_transcoder)) {
2039 			val = intel_de_read(display,
2040 					    EDP_PSR2_CTL(display, cpu_transcoder));
2041 			drm_WARN_ON(display->drm, val & EDP_PSR2_ENABLE);
2042 		}
2043 
2044 		val = intel_de_read(display,
2045 				    psr_ctl_reg(display, cpu_transcoder));
2046 		drm_WARN_ON(display->drm, val & EDP_PSR_ENABLE);
2047 
2048 		return;
2049 	}
2050 
2051 	if (intel_dp->psr.panel_replay_enabled) {
2052 		intel_de_rmw(display, TRANS_DP2_CTL(intel_dp->psr.transcoder),
2053 			     TRANS_DP2_PANEL_REPLAY_ENABLE, 0);
2054 	} else if (intel_dp->psr.sel_update_enabled) {
2055 		tgl_disallow_dc3co_on_psr2_exit(intel_dp);
2056 
2057 		val = intel_de_rmw(display,
2058 				   EDP_PSR2_CTL(display, cpu_transcoder),
2059 				   EDP_PSR2_ENABLE, 0);
2060 
2061 		drm_WARN_ON(display->drm, !(val & EDP_PSR2_ENABLE));
2062 	} else {
2063 		val = intel_de_rmw(display,
2064 				   psr_ctl_reg(display, cpu_transcoder),
2065 				   EDP_PSR_ENABLE, 0);
2066 
2067 		drm_WARN_ON(display->drm, !(val & EDP_PSR_ENABLE));
2068 	}
2069 	intel_dp->psr.active = false;
2070 }
2071 
2072 static void intel_psr_wait_exit_locked(struct intel_dp *intel_dp)
2073 {
2074 	struct intel_display *display = to_intel_display(intel_dp);
2075 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2076 	i915_reg_t psr_status;
2077 	u32 psr_status_mask;
2078 
2079 	if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled ||
2080 					  intel_dp->psr.panel_replay_enabled)) {
2081 		psr_status = EDP_PSR2_STATUS(display, cpu_transcoder);
2082 		psr_status_mask = EDP_PSR2_STATUS_STATE_MASK;
2083 	} else {
2084 		psr_status = psr_status_reg(display, cpu_transcoder);
2085 		psr_status_mask = EDP_PSR_STATUS_STATE_MASK;
2086 	}
2087 
2088 	/* Wait till PSR is idle */
2089 	if (intel_de_wait_for_clear(display, psr_status,
2090 				    psr_status_mask, 2000))
2091 		drm_err(display->drm, "Timed out waiting PSR idle state\n");
2092 }
2093 
2094 static void intel_psr_disable_locked(struct intel_dp *intel_dp)
2095 {
2096 	struct intel_display *display = to_intel_display(intel_dp);
2097 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2098 
2099 	lockdep_assert_held(&intel_dp->psr.lock);
2100 
2101 	if (!intel_dp->psr.enabled)
2102 		return;
2103 
2104 	if (intel_dp->psr.panel_replay_enabled)
2105 		drm_dbg_kms(display->drm, "Disabling Panel Replay\n");
2106 	else
2107 		drm_dbg_kms(display->drm, "Disabling PSR%s\n",
2108 			    intel_dp->psr.sel_update_enabled ? "2" : "1");
2109 
2110 	intel_psr_exit(intel_dp);
2111 	intel_psr_wait_exit_locked(intel_dp);
2112 
2113 	/*
2114 	 * Wa_16013835468
2115 	 * Wa_14015648006
2116 	 */
2117 	if (DISPLAY_VER(display) >= 11)
2118 		intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
2119 			     LATENCY_REPORTING_REMOVED(intel_dp->psr.pipe), 0);
2120 
2121 	if (intel_dp->psr.sel_update_enabled) {
2122 		/* Wa_16012604467:adlp,mtl[a0,b0] */
2123 		if (!intel_dp->psr.panel_replay_enabled &&
2124 		    IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0))
2125 			intel_de_rmw(display,
2126 				     MTL_CLKGATE_DIS_TRANS(display, cpu_transcoder),
2127 				     MTL_CLKGATE_DIS_TRANS_DMASC_GATING_DIS, 0);
2128 		else if (display->platform.alderlake_p)
2129 			intel_de_rmw(display, CLKGATE_DIS_MISC,
2130 				     CLKGATE_DIS_MISC_DMASC_GATING_DIS, 0);
2131 	}
2132 
2133 	if (intel_dp_is_edp(intel_dp))
2134 		intel_snps_phy_update_psr_power_state(&dp_to_dig_port(intel_dp)->base, false);
2135 
2136 	/* Panel Replay on eDP is always using ALPM aux less. */
2137 	if (intel_dp->psr.panel_replay_enabled && intel_dp_is_edp(intel_dp)) {
2138 		intel_de_rmw(display, ALPM_CTL(display, cpu_transcoder),
2139 			     ALPM_CTL_ALPM_ENABLE |
2140 			     ALPM_CTL_ALPM_AUX_LESS_ENABLE, 0);
2141 
2142 		intel_de_rmw(display,
2143 			     PORT_ALPM_CTL(cpu_transcoder),
2144 			     PORT_ALPM_CTL_ALPM_AUX_LESS_ENABLE, 0);
2145 	}
2146 
2147 	/* Disable PSR on Sink */
2148 	if (!intel_dp->psr.panel_replay_enabled) {
2149 		drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, 0);
2150 
2151 		if (intel_dp->psr.sel_update_enabled)
2152 			drm_dp_dpcd_writeb(&intel_dp->aux,
2153 					   DP_RECEIVER_ALPM_CONFIG, 0);
2154 	}
2155 
2156 	intel_dp->psr.enabled = false;
2157 	intel_dp->psr.panel_replay_enabled = false;
2158 	intel_dp->psr.sel_update_enabled = false;
2159 	intel_dp->psr.psr2_sel_fetch_enabled = false;
2160 	intel_dp->psr.su_region_et_enabled = false;
2161 	intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
2162 }
2163 
2164 /**
2165  * intel_psr_disable - Disable PSR
2166  * @intel_dp: Intel DP
2167  * @old_crtc_state: old CRTC state
2168  *
2169  * This function needs to be called before disabling pipe.
2170  */
2171 void intel_psr_disable(struct intel_dp *intel_dp,
2172 		       const struct intel_crtc_state *old_crtc_state)
2173 {
2174 	struct intel_display *display = to_intel_display(intel_dp);
2175 
2176 	if (!old_crtc_state->has_psr)
2177 		return;
2178 
2179 	if (drm_WARN_ON(display->drm, !CAN_PSR(intel_dp) &&
2180 			!CAN_PANEL_REPLAY(intel_dp)))
2181 		return;
2182 
2183 	mutex_lock(&intel_dp->psr.lock);
2184 
2185 	intel_psr_disable_locked(intel_dp);
2186 
2187 	intel_dp->psr.link_ok = false;
2188 
2189 	mutex_unlock(&intel_dp->psr.lock);
2190 	cancel_work_sync(&intel_dp->psr.work);
2191 	cancel_delayed_work_sync(&intel_dp->psr.dc3co_work);
2192 }
2193 
2194 /**
2195  * intel_psr_pause - Pause PSR
2196  * @intel_dp: Intel DP
2197  *
2198  * This function need to be called after enabling psr.
2199  */
2200 void intel_psr_pause(struct intel_dp *intel_dp)
2201 {
2202 	struct intel_psr *psr = &intel_dp->psr;
2203 
2204 	if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
2205 		return;
2206 
2207 	mutex_lock(&psr->lock);
2208 
2209 	if (!psr->enabled) {
2210 		mutex_unlock(&psr->lock);
2211 		return;
2212 	}
2213 
2214 	if (intel_dp->psr.pause_counter++ == 0) {
2215 		intel_psr_exit(intel_dp);
2216 		intel_psr_wait_exit_locked(intel_dp);
2217 	}
2218 
2219 	mutex_unlock(&psr->lock);
2220 
2221 	cancel_work_sync(&psr->work);
2222 	cancel_delayed_work_sync(&psr->dc3co_work);
2223 }
2224 
2225 /**
2226  * intel_psr_resume - Resume PSR
2227  * @intel_dp: Intel DP
2228  *
2229  * This function need to be called after pausing psr.
2230  */
2231 void intel_psr_resume(struct intel_dp *intel_dp)
2232 {
2233 	struct intel_display *display = to_intel_display(intel_dp);
2234 	struct intel_psr *psr = &intel_dp->psr;
2235 
2236 	if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
2237 		return;
2238 
2239 	mutex_lock(&psr->lock);
2240 
2241 	if (!psr->enabled)
2242 		goto out;
2243 
2244 	if (!psr->pause_counter) {
2245 		drm_warn(display->drm, "Unbalanced PSR pause/resume!\n");
2246 		goto out;
2247 	}
2248 
2249 	if (--intel_dp->psr.pause_counter == 0)
2250 		intel_psr_activate(intel_dp);
2251 
2252 out:
2253 	mutex_unlock(&psr->lock);
2254 }
2255 
2256 /**
2257  * intel_psr_needs_block_dc_vblank - Check if block dc entry is needed
2258  * @crtc_state: CRTC status
2259  *
2260  * We need to block DC6 entry in case of Panel Replay as enabling VBI doesn't
2261  * prevent it in case of Panel Replay. Panel Replay switches main link off on
2262  * DC entry. This means vblank interrupts are not fired and is a problem if
2263  * user-space is polling for vblank events.
2264  */
2265 bool intel_psr_needs_block_dc_vblank(const struct intel_crtc_state *crtc_state)
2266 {
2267 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2268 	struct intel_encoder *encoder;
2269 
2270 	for_each_encoder_on_crtc(crtc->base.dev, &crtc->base, encoder) {
2271 		struct intel_dp *intel_dp;
2272 
2273 		if (!intel_encoder_is_dp(encoder))
2274 			continue;
2275 
2276 		intel_dp = enc_to_intel_dp(encoder);
2277 
2278 		if (intel_dp_is_edp(intel_dp) &&
2279 		    CAN_PANEL_REPLAY(intel_dp))
2280 			return true;
2281 	}
2282 
2283 	return false;
2284 }
2285 
2286 /**
2287  * intel_psr_trigger_frame_change_event - Trigger "Frame Change" event
2288  * @dsb: DSB context
2289  * @state: the atomic state
2290  * @crtc: the CRTC
2291  *
2292  * Generate PSR "Frame Change" event.
2293  */
2294 void intel_psr_trigger_frame_change_event(struct intel_dsb *dsb,
2295 					  struct intel_atomic_state *state,
2296 					  struct intel_crtc *crtc)
2297 {
2298 	const struct intel_crtc_state *crtc_state =
2299 		intel_pre_commit_crtc_state(state, crtc);
2300 	struct intel_display *display = to_intel_display(crtc);
2301 
2302 	if (crtc_state->has_psr)
2303 		intel_de_write_dsb(display, dsb,
2304 				   CURSURFLIVE(display, crtc->pipe), 0);
2305 }
2306 
2307 static u32 man_trk_ctl_enable_bit_get(struct intel_display *display)
2308 {
2309 	return display->platform.alderlake_p || DISPLAY_VER(display) >= 14 ? 0 :
2310 		PSR2_MAN_TRK_CTL_ENABLE;
2311 }
2312 
2313 static u32 man_trk_ctl_single_full_frame_bit_get(struct intel_display *display)
2314 {
2315 	return display->platform.alderlake_p || DISPLAY_VER(display) >= 14 ?
2316 	       ADLP_PSR2_MAN_TRK_CTL_SF_SINGLE_FULL_FRAME :
2317 	       PSR2_MAN_TRK_CTL_SF_SINGLE_FULL_FRAME;
2318 }
2319 
2320 static u32 man_trk_ctl_partial_frame_bit_get(struct intel_display *display)
2321 {
2322 	return display->platform.alderlake_p || DISPLAY_VER(display) >= 14 ?
2323 	       ADLP_PSR2_MAN_TRK_CTL_SF_PARTIAL_FRAME_UPDATE :
2324 	       PSR2_MAN_TRK_CTL_SF_PARTIAL_FRAME_UPDATE;
2325 }
2326 
2327 static u32 man_trk_ctl_continuos_full_frame(struct intel_display *display)
2328 {
2329 	return display->platform.alderlake_p || DISPLAY_VER(display) >= 14 ?
2330 	       ADLP_PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME :
2331 	       PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME;
2332 }
2333 
2334 static void intel_psr_force_update(struct intel_dp *intel_dp)
2335 {
2336 	struct intel_display *display = to_intel_display(intel_dp);
2337 
2338 	/*
2339 	 * Display WA #0884: skl+
2340 	 * This documented WA for bxt can be safely applied
2341 	 * broadly so we can force HW tracking to exit PSR
2342 	 * instead of disabling and re-enabling.
2343 	 * Workaround tells us to write 0 to CUR_SURFLIVE_A,
2344 	 * but it makes more sense write to the current active
2345 	 * pipe.
2346 	 *
2347 	 * This workaround do not exist for platforms with display 10 or newer
2348 	 * but testing proved that it works for up display 13, for newer
2349 	 * than that testing will be needed.
2350 	 */
2351 	intel_de_write(display, CURSURFLIVE(display, intel_dp->psr.pipe), 0);
2352 }
2353 
2354 void intel_psr2_program_trans_man_trk_ctl(struct intel_dsb *dsb,
2355 					  const struct intel_crtc_state *crtc_state)
2356 {
2357 	struct intel_display *display = to_intel_display(crtc_state);
2358 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2359 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
2360 	struct intel_encoder *encoder;
2361 
2362 	if (!crtc_state->enable_psr2_sel_fetch)
2363 		return;
2364 
2365 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
2366 					     crtc_state->uapi.encoder_mask) {
2367 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2368 
2369 		if (!dsb)
2370 			lockdep_assert_held(&intel_dp->psr.lock);
2371 
2372 		if (DISPLAY_VER(display) < 20 && intel_dp->psr.psr2_sel_fetch_cff_enabled)
2373 			return;
2374 		break;
2375 	}
2376 
2377 	intel_de_write_dsb(display, dsb,
2378 			   PSR2_MAN_TRK_CTL(display, cpu_transcoder),
2379 			   crtc_state->psr2_man_track_ctl);
2380 
2381 	if (!crtc_state->enable_psr2_su_region_et)
2382 		return;
2383 
2384 	intel_de_write_dsb(display, dsb, PIPE_SRCSZ_ERLY_TPT(crtc->pipe),
2385 			   crtc_state->pipe_srcsz_early_tpt);
2386 }
2387 
2388 static void psr2_man_trk_ctl_calc(struct intel_crtc_state *crtc_state,
2389 				  bool full_update)
2390 {
2391 	struct intel_display *display = to_intel_display(crtc_state);
2392 	u32 val = man_trk_ctl_enable_bit_get(display);
2393 
2394 	/* SF partial frame enable has to be set even on full update */
2395 	val |= man_trk_ctl_partial_frame_bit_get(display);
2396 
2397 	if (full_update) {
2398 		val |= man_trk_ctl_continuos_full_frame(display);
2399 		goto exit;
2400 	}
2401 
2402 	if (crtc_state->psr2_su_area.y1 == -1)
2403 		goto exit;
2404 
2405 	if (display->platform.alderlake_p || DISPLAY_VER(display) >= 14) {
2406 		val |= ADLP_PSR2_MAN_TRK_CTL_SU_REGION_START_ADDR(crtc_state->psr2_su_area.y1);
2407 		val |= ADLP_PSR2_MAN_TRK_CTL_SU_REGION_END_ADDR(crtc_state->psr2_su_area.y2 - 1);
2408 	} else {
2409 		drm_WARN_ON(crtc_state->uapi.crtc->dev,
2410 			    crtc_state->psr2_su_area.y1 % 4 ||
2411 			    crtc_state->psr2_su_area.y2 % 4);
2412 
2413 		val |= PSR2_MAN_TRK_CTL_SU_REGION_START_ADDR(
2414 			crtc_state->psr2_su_area.y1 / 4 + 1);
2415 		val |= PSR2_MAN_TRK_CTL_SU_REGION_END_ADDR(
2416 			crtc_state->psr2_su_area.y2 / 4 + 1);
2417 	}
2418 exit:
2419 	crtc_state->psr2_man_track_ctl = val;
2420 }
2421 
2422 static u32 psr2_pipe_srcsz_early_tpt_calc(struct intel_crtc_state *crtc_state,
2423 					  bool full_update)
2424 {
2425 	int width, height;
2426 
2427 	if (!crtc_state->enable_psr2_su_region_et || full_update)
2428 		return 0;
2429 
2430 	width = drm_rect_width(&crtc_state->psr2_su_area);
2431 	height = drm_rect_height(&crtc_state->psr2_su_area);
2432 
2433 	return PIPESRC_WIDTH(width - 1) | PIPESRC_HEIGHT(height - 1);
2434 }
2435 
2436 static void clip_area_update(struct drm_rect *overlap_damage_area,
2437 			     struct drm_rect *damage_area,
2438 			     struct drm_rect *pipe_src)
2439 {
2440 	if (!drm_rect_intersect(damage_area, pipe_src))
2441 		return;
2442 
2443 	if (overlap_damage_area->y1 == -1) {
2444 		overlap_damage_area->y1 = damage_area->y1;
2445 		overlap_damage_area->y2 = damage_area->y2;
2446 		return;
2447 	}
2448 
2449 	if (damage_area->y1 < overlap_damage_area->y1)
2450 		overlap_damage_area->y1 = damage_area->y1;
2451 
2452 	if (damage_area->y2 > overlap_damage_area->y2)
2453 		overlap_damage_area->y2 = damage_area->y2;
2454 }
2455 
2456 static void intel_psr2_sel_fetch_pipe_alignment(struct intel_crtc_state *crtc_state)
2457 {
2458 	struct intel_display *display = to_intel_display(crtc_state);
2459 	const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
2460 	u16 y_alignment;
2461 
2462 	/* ADLP aligns the SU region to vdsc slice height in case dsc is enabled */
2463 	if (crtc_state->dsc.compression_enable &&
2464 	    (display->platform.alderlake_p || DISPLAY_VER(display) >= 14))
2465 		y_alignment = vdsc_cfg->slice_height;
2466 	else
2467 		y_alignment = crtc_state->su_y_granularity;
2468 
2469 	crtc_state->psr2_su_area.y1 -= crtc_state->psr2_su_area.y1 % y_alignment;
2470 	if (crtc_state->psr2_su_area.y2 % y_alignment)
2471 		crtc_state->psr2_su_area.y2 = ((crtc_state->psr2_su_area.y2 /
2472 						y_alignment) + 1) * y_alignment;
2473 }
2474 
2475 /*
2476  * When early transport is in use we need to extend SU area to cover
2477  * cursor fully when cursor is in SU area.
2478  */
2479 static void
2480 intel_psr2_sel_fetch_et_alignment(struct intel_atomic_state *state,
2481 				  struct intel_crtc *crtc,
2482 				  bool *cursor_in_su_area)
2483 {
2484 	struct intel_crtc_state *crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
2485 	struct intel_plane_state *new_plane_state;
2486 	struct intel_plane *plane;
2487 	int i;
2488 
2489 	if (!crtc_state->enable_psr2_su_region_et)
2490 		return;
2491 
2492 	for_each_new_intel_plane_in_state(state, plane, new_plane_state, i) {
2493 		struct drm_rect inter;
2494 
2495 		if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc)
2496 			continue;
2497 
2498 		if (plane->id != PLANE_CURSOR)
2499 			continue;
2500 
2501 		if (!new_plane_state->uapi.visible)
2502 			continue;
2503 
2504 		inter = crtc_state->psr2_su_area;
2505 		if (!drm_rect_intersect(&inter, &new_plane_state->uapi.dst))
2506 			continue;
2507 
2508 		clip_area_update(&crtc_state->psr2_su_area, &new_plane_state->uapi.dst,
2509 				 &crtc_state->pipe_src);
2510 		*cursor_in_su_area = true;
2511 	}
2512 }
2513 
2514 /*
2515  * TODO: Not clear how to handle planes with negative position,
2516  * also planes are not updated if they have a negative X
2517  * position so for now doing a full update in this cases
2518  *
2519  * Plane scaling and rotation is not supported by selective fetch and both
2520  * properties can change without a modeset, so need to be check at every
2521  * atomic commit.
2522  */
2523 static bool psr2_sel_fetch_plane_state_supported(const struct intel_plane_state *plane_state)
2524 {
2525 	if (plane_state->uapi.dst.y1 < 0 ||
2526 	    plane_state->uapi.dst.x1 < 0 ||
2527 	    plane_state->scaler_id >= 0 ||
2528 	    plane_state->uapi.rotation != DRM_MODE_ROTATE_0)
2529 		return false;
2530 
2531 	return true;
2532 }
2533 
2534 /*
2535  * Check for pipe properties that is not supported by selective fetch.
2536  *
2537  * TODO: pipe scaling causes a modeset but skl_update_scaler_crtc() is executed
2538  * after intel_psr_compute_config(), so for now keeping PSR2 selective fetch
2539  * enabled and going to the full update path.
2540  */
2541 static bool psr2_sel_fetch_pipe_state_supported(const struct intel_crtc_state *crtc_state)
2542 {
2543 	if (crtc_state->scaler_state.scaler_id >= 0)
2544 		return false;
2545 
2546 	return true;
2547 }
2548 
2549 /* Wa 14019834836 */
2550 static void intel_psr_apply_pr_link_on_su_wa(struct intel_crtc_state *crtc_state)
2551 {
2552 	struct intel_display *display = to_intel_display(crtc_state);
2553 	struct intel_encoder *encoder;
2554 	int hactive_limit;
2555 
2556 	if (crtc_state->psr2_su_area.y1 != 0 ||
2557 	    crtc_state->psr2_su_area.y2 != 0)
2558 		return;
2559 
2560 	if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420)
2561 		hactive_limit = intel_dp_is_uhbr(crtc_state) ? 1230 : 546;
2562 	else
2563 		hactive_limit = intel_dp_is_uhbr(crtc_state) ? 615 : 273;
2564 
2565 	if (crtc_state->hw.adjusted_mode.hdisplay < hactive_limit)
2566 		return;
2567 
2568 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
2569 					     crtc_state->uapi.encoder_mask) {
2570 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2571 
2572 		if (!intel_dp_is_edp(intel_dp) &&
2573 		    intel_dp->psr.panel_replay_enabled &&
2574 		    intel_dp->psr.sel_update_enabled) {
2575 			crtc_state->psr2_su_area.y2++;
2576 			return;
2577 		}
2578 	}
2579 }
2580 
2581 static void
2582 intel_psr_apply_su_area_workarounds(struct intel_crtc_state *crtc_state)
2583 {
2584 	struct intel_display *display = to_intel_display(crtc_state);
2585 
2586 	/* Wa_14014971492 */
2587 	if (!crtc_state->has_panel_replay &&
2588 	    ((IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0) ||
2589 	      display->platform.alderlake_p || display->platform.tigerlake)) &&
2590 	    crtc_state->splitter.enable)
2591 		crtc_state->psr2_su_area.y1 = 0;
2592 
2593 	/* Wa 14019834836 */
2594 	if (DISPLAY_VER(display) == 30)
2595 		intel_psr_apply_pr_link_on_su_wa(crtc_state);
2596 }
2597 
2598 int intel_psr2_sel_fetch_update(struct intel_atomic_state *state,
2599 				struct intel_crtc *crtc)
2600 {
2601 	struct intel_display *display = to_intel_display(state);
2602 	struct intel_crtc_state *crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
2603 	struct intel_plane_state *new_plane_state, *old_plane_state;
2604 	struct intel_plane *plane;
2605 	bool full_update = false, cursor_in_su_area = false;
2606 	int i, ret;
2607 
2608 	if (!crtc_state->enable_psr2_sel_fetch)
2609 		return 0;
2610 
2611 	if (!psr2_sel_fetch_pipe_state_supported(crtc_state)) {
2612 		full_update = true;
2613 		goto skip_sel_fetch_set_loop;
2614 	}
2615 
2616 	crtc_state->psr2_su_area.x1 = 0;
2617 	crtc_state->psr2_su_area.y1 = -1;
2618 	crtc_state->psr2_su_area.x2 = drm_rect_width(&crtc_state->pipe_src);
2619 	crtc_state->psr2_su_area.y2 = -1;
2620 
2621 	/*
2622 	 * Calculate minimal selective fetch area of each plane and calculate
2623 	 * the pipe damaged area.
2624 	 * In the next loop the plane selective fetch area will actually be set
2625 	 * using whole pipe damaged area.
2626 	 */
2627 	for_each_oldnew_intel_plane_in_state(state, plane, old_plane_state,
2628 					     new_plane_state, i) {
2629 		struct drm_rect src, damaged_area = { .x1 = 0, .y1 = -1,
2630 						      .x2 = INT_MAX };
2631 
2632 		if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc)
2633 			continue;
2634 
2635 		if (!new_plane_state->uapi.visible &&
2636 		    !old_plane_state->uapi.visible)
2637 			continue;
2638 
2639 		if (!psr2_sel_fetch_plane_state_supported(new_plane_state)) {
2640 			full_update = true;
2641 			break;
2642 		}
2643 
2644 		/*
2645 		 * If visibility or plane moved, mark the whole plane area as
2646 		 * damaged as it needs to be complete redraw in the new and old
2647 		 * position.
2648 		 */
2649 		if (new_plane_state->uapi.visible != old_plane_state->uapi.visible ||
2650 		    !drm_rect_equals(&new_plane_state->uapi.dst,
2651 				     &old_plane_state->uapi.dst)) {
2652 			if (old_plane_state->uapi.visible) {
2653 				damaged_area.y1 = old_plane_state->uapi.dst.y1;
2654 				damaged_area.y2 = old_plane_state->uapi.dst.y2;
2655 				clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2656 						 &crtc_state->pipe_src);
2657 			}
2658 
2659 			if (new_plane_state->uapi.visible) {
2660 				damaged_area.y1 = new_plane_state->uapi.dst.y1;
2661 				damaged_area.y2 = new_plane_state->uapi.dst.y2;
2662 				clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2663 						 &crtc_state->pipe_src);
2664 			}
2665 			continue;
2666 		} else if (new_plane_state->uapi.alpha != old_plane_state->uapi.alpha) {
2667 			/* If alpha changed mark the whole plane area as damaged */
2668 			damaged_area.y1 = new_plane_state->uapi.dst.y1;
2669 			damaged_area.y2 = new_plane_state->uapi.dst.y2;
2670 			clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2671 					 &crtc_state->pipe_src);
2672 			continue;
2673 		}
2674 
2675 		src = drm_plane_state_src(&new_plane_state->uapi);
2676 		drm_rect_fp_to_int(&src, &src);
2677 
2678 		if (!drm_atomic_helper_damage_merged(&old_plane_state->uapi,
2679 						     &new_plane_state->uapi, &damaged_area))
2680 			continue;
2681 
2682 		damaged_area.y1 += new_plane_state->uapi.dst.y1 - src.y1;
2683 		damaged_area.y2 += new_plane_state->uapi.dst.y1 - src.y1;
2684 		damaged_area.x1 += new_plane_state->uapi.dst.x1 - src.x1;
2685 		damaged_area.x2 += new_plane_state->uapi.dst.x1 - src.x1;
2686 
2687 		clip_area_update(&crtc_state->psr2_su_area, &damaged_area, &crtc_state->pipe_src);
2688 	}
2689 
2690 	/*
2691 	 * TODO: For now we are just using full update in case
2692 	 * selective fetch area calculation fails. To optimize this we
2693 	 * should identify cases where this happens and fix the area
2694 	 * calculation for those.
2695 	 */
2696 	if (crtc_state->psr2_su_area.y1 == -1) {
2697 		drm_info_once(display->drm,
2698 			      "Selective fetch area calculation failed in pipe %c\n",
2699 			      pipe_name(crtc->pipe));
2700 		full_update = true;
2701 	}
2702 
2703 	if (full_update)
2704 		goto skip_sel_fetch_set_loop;
2705 
2706 	intel_psr_apply_su_area_workarounds(crtc_state);
2707 
2708 	ret = drm_atomic_add_affected_planes(&state->base, &crtc->base);
2709 	if (ret)
2710 		return ret;
2711 
2712 	/*
2713 	 * Adjust su area to cover cursor fully as necessary (early
2714 	 * transport). This needs to be done after
2715 	 * drm_atomic_add_affected_planes to ensure visible cursor is added into
2716 	 * affected planes even when cursor is not updated by itself.
2717 	 */
2718 	intel_psr2_sel_fetch_et_alignment(state, crtc, &cursor_in_su_area);
2719 
2720 	intel_psr2_sel_fetch_pipe_alignment(crtc_state);
2721 
2722 	/*
2723 	 * Now that we have the pipe damaged area check if it intersect with
2724 	 * every plane, if it does set the plane selective fetch area.
2725 	 */
2726 	for_each_oldnew_intel_plane_in_state(state, plane, old_plane_state,
2727 					     new_plane_state, i) {
2728 		struct drm_rect *sel_fetch_area, inter;
2729 		struct intel_plane *linked = new_plane_state->planar_linked_plane;
2730 
2731 		if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc ||
2732 		    !new_plane_state->uapi.visible)
2733 			continue;
2734 
2735 		inter = crtc_state->psr2_su_area;
2736 		sel_fetch_area = &new_plane_state->psr2_sel_fetch_area;
2737 		if (!drm_rect_intersect(&inter, &new_plane_state->uapi.dst)) {
2738 			sel_fetch_area->y1 = -1;
2739 			sel_fetch_area->y2 = -1;
2740 			/*
2741 			 * if plane sel fetch was previously enabled ->
2742 			 * disable it
2743 			 */
2744 			if (drm_rect_height(&old_plane_state->psr2_sel_fetch_area) > 0)
2745 				crtc_state->update_planes |= BIT(plane->id);
2746 
2747 			continue;
2748 		}
2749 
2750 		if (!psr2_sel_fetch_plane_state_supported(new_plane_state)) {
2751 			full_update = true;
2752 			break;
2753 		}
2754 
2755 		sel_fetch_area = &new_plane_state->psr2_sel_fetch_area;
2756 		sel_fetch_area->y1 = inter.y1 - new_plane_state->uapi.dst.y1;
2757 		sel_fetch_area->y2 = inter.y2 - new_plane_state->uapi.dst.y1;
2758 		crtc_state->update_planes |= BIT(plane->id);
2759 
2760 		/*
2761 		 * Sel_fetch_area is calculated for UV plane. Use
2762 		 * same area for Y plane as well.
2763 		 */
2764 		if (linked) {
2765 			struct intel_plane_state *linked_new_plane_state;
2766 			struct drm_rect *linked_sel_fetch_area;
2767 
2768 			linked_new_plane_state = intel_atomic_get_plane_state(state, linked);
2769 			if (IS_ERR(linked_new_plane_state))
2770 				return PTR_ERR(linked_new_plane_state);
2771 
2772 			linked_sel_fetch_area = &linked_new_plane_state->psr2_sel_fetch_area;
2773 			linked_sel_fetch_area->y1 = sel_fetch_area->y1;
2774 			linked_sel_fetch_area->y2 = sel_fetch_area->y2;
2775 			crtc_state->update_planes |= BIT(linked->id);
2776 		}
2777 	}
2778 
2779 skip_sel_fetch_set_loop:
2780 	psr2_man_trk_ctl_calc(crtc_state, full_update);
2781 	crtc_state->pipe_srcsz_early_tpt =
2782 		psr2_pipe_srcsz_early_tpt_calc(crtc_state, full_update);
2783 	return 0;
2784 }
2785 
2786 void intel_psr_pre_plane_update(struct intel_atomic_state *state,
2787 				struct intel_crtc *crtc)
2788 {
2789 	struct intel_display *display = to_intel_display(state);
2790 	const struct intel_crtc_state *old_crtc_state =
2791 		intel_atomic_get_old_crtc_state(state, crtc);
2792 	const struct intel_crtc_state *new_crtc_state =
2793 		intel_atomic_get_new_crtc_state(state, crtc);
2794 	struct intel_encoder *encoder;
2795 
2796 	if (!HAS_PSR(display))
2797 		return;
2798 
2799 	for_each_intel_encoder_mask_with_psr(state->base.dev, encoder,
2800 					     old_crtc_state->uapi.encoder_mask) {
2801 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2802 		struct intel_psr *psr = &intel_dp->psr;
2803 
2804 		mutex_lock(&psr->lock);
2805 
2806 		if (psr->enabled) {
2807 			/*
2808 			 * Reasons to disable:
2809 			 * - PSR disabled in new state
2810 			 * - All planes will go inactive
2811 			 * - Changing between PSR versions
2812 			 * - Region Early Transport changing
2813 			 * - Display WA #1136: skl, bxt
2814 			 */
2815 			if (intel_crtc_needs_modeset(new_crtc_state) ||
2816 			    !new_crtc_state->has_psr ||
2817 			    !new_crtc_state->active_planes ||
2818 			    new_crtc_state->has_sel_update != psr->sel_update_enabled ||
2819 			    new_crtc_state->enable_psr2_su_region_et != psr->su_region_et_enabled ||
2820 			    new_crtc_state->has_panel_replay != psr->panel_replay_enabled ||
2821 			    (DISPLAY_VER(display) < 11 && new_crtc_state->wm_level_disabled))
2822 				intel_psr_disable_locked(intel_dp);
2823 			else if (new_crtc_state->wm_level_disabled)
2824 				/* Wa_14015648006 */
2825 				wm_optimization_wa(intel_dp, new_crtc_state);
2826 		}
2827 
2828 		mutex_unlock(&psr->lock);
2829 	}
2830 }
2831 
2832 void intel_psr_post_plane_update(struct intel_atomic_state *state,
2833 				 struct intel_crtc *crtc)
2834 {
2835 	struct intel_display *display = to_intel_display(state);
2836 	const struct intel_crtc_state *crtc_state =
2837 		intel_atomic_get_new_crtc_state(state, crtc);
2838 	struct intel_encoder *encoder;
2839 
2840 	if (!crtc_state->has_psr)
2841 		return;
2842 
2843 	for_each_intel_encoder_mask_with_psr(state->base.dev, encoder,
2844 					     crtc_state->uapi.encoder_mask) {
2845 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2846 		struct intel_psr *psr = &intel_dp->psr;
2847 		bool keep_disabled = false;
2848 
2849 		mutex_lock(&psr->lock);
2850 
2851 		drm_WARN_ON(display->drm,
2852 			    psr->enabled && !crtc_state->active_planes);
2853 
2854 		keep_disabled |= psr->sink_not_reliable;
2855 		keep_disabled |= !crtc_state->active_planes;
2856 
2857 		/* Display WA #1136: skl, bxt */
2858 		keep_disabled |= DISPLAY_VER(display) < 11 &&
2859 			crtc_state->wm_level_disabled;
2860 
2861 		if (!psr->enabled && !keep_disabled)
2862 			intel_psr_enable_locked(intel_dp, crtc_state);
2863 		else if (psr->enabled && !crtc_state->wm_level_disabled)
2864 			/* Wa_14015648006 */
2865 			wm_optimization_wa(intel_dp, crtc_state);
2866 
2867 		/* Force a PSR exit when enabling CRC to avoid CRC timeouts */
2868 		if (crtc_state->crc_enabled && psr->enabled)
2869 			intel_psr_force_update(intel_dp);
2870 
2871 		/*
2872 		 * Clear possible busy bits in case we have
2873 		 * invalidate -> flip -> flush sequence.
2874 		 */
2875 		intel_dp->psr.busy_frontbuffer_bits = 0;
2876 
2877 		mutex_unlock(&psr->lock);
2878 	}
2879 }
2880 
2881 static int _psr2_ready_for_pipe_update_locked(struct intel_dp *intel_dp)
2882 {
2883 	struct intel_display *display = to_intel_display(intel_dp);
2884 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2885 
2886 	/*
2887 	 * Any state lower than EDP_PSR2_STATUS_STATE_DEEP_SLEEP is enough.
2888 	 * As all higher states has bit 4 of PSR2 state set we can just wait for
2889 	 * EDP_PSR2_STATUS_STATE_DEEP_SLEEP to be cleared.
2890 	 */
2891 	return intel_de_wait_for_clear(display,
2892 				       EDP_PSR2_STATUS(display, cpu_transcoder),
2893 				       EDP_PSR2_STATUS_STATE_DEEP_SLEEP, 50);
2894 }
2895 
2896 static int _psr1_ready_for_pipe_update_locked(struct intel_dp *intel_dp)
2897 {
2898 	struct intel_display *display = to_intel_display(intel_dp);
2899 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2900 
2901 	/*
2902 	 * From bspec: Panel Self Refresh (BDW+)
2903 	 * Max. time for PSR to idle = Inverse of the refresh rate + 6 ms of
2904 	 * exit training time + 1.5 ms of aux channel handshake. 50 ms is
2905 	 * defensive enough to cover everything.
2906 	 */
2907 	return intel_de_wait_for_clear(display,
2908 				       psr_status_reg(display, cpu_transcoder),
2909 				       EDP_PSR_STATUS_STATE_MASK, 50);
2910 }
2911 
2912 /**
2913  * intel_psr_wait_for_idle_locked - wait for PSR be ready for a pipe update
2914  * @new_crtc_state: new CRTC state
2915  *
2916  * This function is expected to be called from pipe_update_start() where it is
2917  * not expected to race with PSR enable or disable.
2918  */
2919 void intel_psr_wait_for_idle_locked(const struct intel_crtc_state *new_crtc_state)
2920 {
2921 	struct intel_display *display = to_intel_display(new_crtc_state);
2922 	struct intel_encoder *encoder;
2923 
2924 	if (!new_crtc_state->has_psr)
2925 		return;
2926 
2927 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
2928 					     new_crtc_state->uapi.encoder_mask) {
2929 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2930 		int ret;
2931 
2932 		lockdep_assert_held(&intel_dp->psr.lock);
2933 
2934 		if (!intel_dp->psr.enabled || intel_dp->psr.panel_replay_enabled)
2935 			continue;
2936 
2937 		if (intel_dp->psr.sel_update_enabled)
2938 			ret = _psr2_ready_for_pipe_update_locked(intel_dp);
2939 		else
2940 			ret = _psr1_ready_for_pipe_update_locked(intel_dp);
2941 
2942 		if (ret)
2943 			drm_err(display->drm,
2944 				"PSR wait timed out, atomic update may fail\n");
2945 	}
2946 }
2947 
2948 static bool __psr_wait_for_idle_locked(struct intel_dp *intel_dp)
2949 {
2950 	struct intel_display *display = to_intel_display(intel_dp);
2951 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2952 	i915_reg_t reg;
2953 	u32 mask;
2954 	int err;
2955 
2956 	if (!intel_dp->psr.enabled)
2957 		return false;
2958 
2959 	if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled ||
2960 					  intel_dp->psr.panel_replay_enabled)) {
2961 		reg = EDP_PSR2_STATUS(display, cpu_transcoder);
2962 		mask = EDP_PSR2_STATUS_STATE_MASK;
2963 	} else {
2964 		reg = psr_status_reg(display, cpu_transcoder);
2965 		mask = EDP_PSR_STATUS_STATE_MASK;
2966 	}
2967 
2968 	mutex_unlock(&intel_dp->psr.lock);
2969 
2970 	err = intel_de_wait_for_clear(display, reg, mask, 50);
2971 	if (err)
2972 		drm_err(display->drm,
2973 			"Timed out waiting for PSR Idle for re-enable\n");
2974 
2975 	/* After the unlocked wait, verify that PSR is still wanted! */
2976 	mutex_lock(&intel_dp->psr.lock);
2977 	return err == 0 && intel_dp->psr.enabled;
2978 }
2979 
2980 static int intel_psr_fastset_force(struct intel_display *display)
2981 {
2982 	struct drm_connector_list_iter conn_iter;
2983 	struct drm_modeset_acquire_ctx ctx;
2984 	struct drm_atomic_state *state;
2985 	struct drm_connector *conn;
2986 	int err = 0;
2987 
2988 	state = drm_atomic_state_alloc(display->drm);
2989 	if (!state)
2990 		return -ENOMEM;
2991 
2992 	drm_modeset_acquire_init(&ctx, DRM_MODESET_ACQUIRE_INTERRUPTIBLE);
2993 
2994 	state->acquire_ctx = &ctx;
2995 	to_intel_atomic_state(state)->internal = true;
2996 
2997 retry:
2998 	drm_connector_list_iter_begin(display->drm, &conn_iter);
2999 	drm_for_each_connector_iter(conn, &conn_iter) {
3000 		struct drm_connector_state *conn_state;
3001 		struct drm_crtc_state *crtc_state;
3002 
3003 		if (conn->connector_type != DRM_MODE_CONNECTOR_eDP)
3004 			continue;
3005 
3006 		conn_state = drm_atomic_get_connector_state(state, conn);
3007 		if (IS_ERR(conn_state)) {
3008 			err = PTR_ERR(conn_state);
3009 			break;
3010 		}
3011 
3012 		if (!conn_state->crtc)
3013 			continue;
3014 
3015 		crtc_state = drm_atomic_get_crtc_state(state, conn_state->crtc);
3016 		if (IS_ERR(crtc_state)) {
3017 			err = PTR_ERR(crtc_state);
3018 			break;
3019 		}
3020 
3021 		/* Mark mode as changed to trigger a pipe->update() */
3022 		crtc_state->mode_changed = true;
3023 	}
3024 	drm_connector_list_iter_end(&conn_iter);
3025 
3026 	if (err == 0)
3027 		err = drm_atomic_commit(state);
3028 
3029 	if (err == -EDEADLK) {
3030 		drm_atomic_state_clear(state);
3031 		err = drm_modeset_backoff(&ctx);
3032 		if (!err)
3033 			goto retry;
3034 	}
3035 
3036 	drm_modeset_drop_locks(&ctx);
3037 	drm_modeset_acquire_fini(&ctx);
3038 	drm_atomic_state_put(state);
3039 
3040 	return err;
3041 }
3042 
3043 int intel_psr_debug_set(struct intel_dp *intel_dp, u64 val)
3044 {
3045 	struct intel_display *display = to_intel_display(intel_dp);
3046 	const u32 mode = val & I915_PSR_DEBUG_MODE_MASK;
3047 	const u32 disable_bits = val & (I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
3048 					I915_PSR_DEBUG_PANEL_REPLAY_DISABLE);
3049 	u32 old_mode, old_disable_bits;
3050 	int ret;
3051 
3052 	if (val & ~(I915_PSR_DEBUG_IRQ | I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
3053 		    I915_PSR_DEBUG_PANEL_REPLAY_DISABLE |
3054 		    I915_PSR_DEBUG_MODE_MASK) ||
3055 	    mode > I915_PSR_DEBUG_ENABLE_SEL_FETCH) {
3056 		drm_dbg_kms(display->drm, "Invalid debug mask %llx\n", val);
3057 		return -EINVAL;
3058 	}
3059 
3060 	ret = mutex_lock_interruptible(&intel_dp->psr.lock);
3061 	if (ret)
3062 		return ret;
3063 
3064 	old_mode = intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK;
3065 	old_disable_bits = intel_dp->psr.debug &
3066 		(I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
3067 		 I915_PSR_DEBUG_PANEL_REPLAY_DISABLE);
3068 
3069 	intel_dp->psr.debug = val;
3070 
3071 	/*
3072 	 * Do it right away if it's already enabled, otherwise it will be done
3073 	 * when enabling the source.
3074 	 */
3075 	if (intel_dp->psr.enabled)
3076 		psr_irq_control(intel_dp);
3077 
3078 	mutex_unlock(&intel_dp->psr.lock);
3079 
3080 	if (old_mode != mode || old_disable_bits != disable_bits)
3081 		ret = intel_psr_fastset_force(display);
3082 
3083 	return ret;
3084 }
3085 
3086 static void intel_psr_handle_irq(struct intel_dp *intel_dp)
3087 {
3088 	struct intel_psr *psr = &intel_dp->psr;
3089 
3090 	intel_psr_disable_locked(intel_dp);
3091 	psr->sink_not_reliable = true;
3092 	/* let's make sure that sink is awaken */
3093 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
3094 }
3095 
3096 static void intel_psr_work(struct work_struct *work)
3097 {
3098 	struct intel_dp *intel_dp =
3099 		container_of(work, typeof(*intel_dp), psr.work);
3100 
3101 	mutex_lock(&intel_dp->psr.lock);
3102 
3103 	if (!intel_dp->psr.enabled)
3104 		goto unlock;
3105 
3106 	if (READ_ONCE(intel_dp->psr.irq_aux_error))
3107 		intel_psr_handle_irq(intel_dp);
3108 
3109 	/*
3110 	 * We have to make sure PSR is ready for re-enable
3111 	 * otherwise it keeps disabled until next full enable/disable cycle.
3112 	 * PSR might take some time to get fully disabled
3113 	 * and be ready for re-enable.
3114 	 */
3115 	if (!__psr_wait_for_idle_locked(intel_dp))
3116 		goto unlock;
3117 
3118 	/*
3119 	 * The delayed work can race with an invalidate hence we need to
3120 	 * recheck. Since psr_flush first clears this and then reschedules we
3121 	 * won't ever miss a flush when bailing out here.
3122 	 */
3123 	if (intel_dp->psr.busy_frontbuffer_bits || intel_dp->psr.active)
3124 		goto unlock;
3125 
3126 	intel_psr_activate(intel_dp);
3127 unlock:
3128 	mutex_unlock(&intel_dp->psr.lock);
3129 }
3130 
3131 static void intel_psr_configure_full_frame_update(struct intel_dp *intel_dp)
3132 {
3133 	struct intel_display *display = to_intel_display(intel_dp);
3134 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3135 
3136 	if (!intel_dp->psr.psr2_sel_fetch_enabled)
3137 		return;
3138 
3139 	if (DISPLAY_VER(display) >= 20)
3140 		intel_de_write(display, LNL_SFF_CTL(cpu_transcoder),
3141 			       LNL_SFF_CTL_SF_SINGLE_FULL_FRAME);
3142 	else
3143 		intel_de_write(display,
3144 			       PSR2_MAN_TRK_CTL(display, cpu_transcoder),
3145 			       man_trk_ctl_enable_bit_get(display) |
3146 			       man_trk_ctl_partial_frame_bit_get(display) |
3147 			       man_trk_ctl_single_full_frame_bit_get(display) |
3148 			       man_trk_ctl_continuos_full_frame(display));
3149 }
3150 
3151 static void _psr_invalidate_handle(struct intel_dp *intel_dp)
3152 {
3153 	if (intel_dp->psr.psr2_sel_fetch_enabled) {
3154 		if (!intel_dp->psr.psr2_sel_fetch_cff_enabled) {
3155 			intel_dp->psr.psr2_sel_fetch_cff_enabled = true;
3156 			intel_psr_configure_full_frame_update(intel_dp);
3157 		}
3158 
3159 		intel_psr_force_update(intel_dp);
3160 	} else {
3161 		intel_psr_exit(intel_dp);
3162 	}
3163 }
3164 
3165 /**
3166  * intel_psr_invalidate - Invalidate PSR
3167  * @display: display device
3168  * @frontbuffer_bits: frontbuffer plane tracking bits
3169  * @origin: which operation caused the invalidate
3170  *
3171  * Since the hardware frontbuffer tracking has gaps we need to integrate
3172  * with the software frontbuffer tracking. This function gets called every
3173  * time frontbuffer rendering starts and a buffer gets dirtied. PSR must be
3174  * disabled if the frontbuffer mask contains a buffer relevant to PSR.
3175  *
3176  * Dirty frontbuffers relevant to PSR are tracked in busy_frontbuffer_bits."
3177  */
3178 void intel_psr_invalidate(struct intel_display *display,
3179 			  unsigned frontbuffer_bits, enum fb_op_origin origin)
3180 {
3181 	struct intel_encoder *encoder;
3182 
3183 	if (origin == ORIGIN_FLIP)
3184 		return;
3185 
3186 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3187 		unsigned int pipe_frontbuffer_bits = frontbuffer_bits;
3188 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3189 
3190 		mutex_lock(&intel_dp->psr.lock);
3191 		if (!intel_dp->psr.enabled) {
3192 			mutex_unlock(&intel_dp->psr.lock);
3193 			continue;
3194 		}
3195 
3196 		pipe_frontbuffer_bits &=
3197 			INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe);
3198 		intel_dp->psr.busy_frontbuffer_bits |= pipe_frontbuffer_bits;
3199 
3200 		if (pipe_frontbuffer_bits)
3201 			_psr_invalidate_handle(intel_dp);
3202 
3203 		mutex_unlock(&intel_dp->psr.lock);
3204 	}
3205 }
3206 /*
3207  * When we will be completely rely on PSR2 S/W tracking in future,
3208  * intel_psr_flush() will invalidate and flush the PSR for ORIGIN_FLIP
3209  * event also therefore tgl_dc3co_flush_locked() require to be changed
3210  * accordingly in future.
3211  */
3212 static void
3213 tgl_dc3co_flush_locked(struct intel_dp *intel_dp, unsigned int frontbuffer_bits,
3214 		       enum fb_op_origin origin)
3215 {
3216 	struct intel_display *display = to_intel_display(intel_dp);
3217 	struct drm_i915_private *i915 = to_i915(display->drm);
3218 
3219 	if (!intel_dp->psr.dc3co_exitline || !intel_dp->psr.sel_update_enabled ||
3220 	    !intel_dp->psr.active)
3221 		return;
3222 
3223 	/*
3224 	 * At every frontbuffer flush flip event modified delay of delayed work,
3225 	 * when delayed work schedules that means display has been idle.
3226 	 */
3227 	if (!(frontbuffer_bits &
3228 	    INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe)))
3229 		return;
3230 
3231 	tgl_psr2_enable_dc3co(intel_dp);
3232 	mod_delayed_work(i915->unordered_wq, &intel_dp->psr.dc3co_work,
3233 			 intel_dp->psr.dc3co_exit_delay);
3234 }
3235 
3236 static void _psr_flush_handle(struct intel_dp *intel_dp)
3237 {
3238 	struct intel_display *display = to_intel_display(intel_dp);
3239 	struct drm_i915_private *dev_priv = to_i915(display->drm);
3240 
3241 	if (intel_dp->psr.psr2_sel_fetch_enabled) {
3242 		if (intel_dp->psr.psr2_sel_fetch_cff_enabled) {
3243 			/* can we turn CFF off? */
3244 			if (intel_dp->psr.busy_frontbuffer_bits == 0)
3245 				intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
3246 		}
3247 
3248 		/*
3249 		 * Still keep cff bit enabled as we don't have proper SU
3250 		 * configuration in case update is sent for any reason after
3251 		 * sff bit gets cleared by the HW on next vblank.
3252 		 *
3253 		 * NOTE: Setting cff bit is not needed for LunarLake onwards as
3254 		 * we have own register for SFF bit and we are not overwriting
3255 		 * existing SU configuration
3256 		 */
3257 		intel_psr_configure_full_frame_update(intel_dp);
3258 	}
3259 
3260 	intel_psr_force_update(intel_dp);
3261 
3262 	if (!intel_dp->psr.psr2_sel_fetch_enabled && !intel_dp->psr.active &&
3263 	    !intel_dp->psr.busy_frontbuffer_bits)
3264 		queue_work(dev_priv->unordered_wq, &intel_dp->psr.work);
3265 }
3266 
3267 /**
3268  * intel_psr_flush - Flush PSR
3269  * @display: display device
3270  * @frontbuffer_bits: frontbuffer plane tracking bits
3271  * @origin: which operation caused the flush
3272  *
3273  * Since the hardware frontbuffer tracking has gaps we need to integrate
3274  * with the software frontbuffer tracking. This function gets called every
3275  * time frontbuffer rendering has completed and flushed out to memory. PSR
3276  * can be enabled again if no other frontbuffer relevant to PSR is dirty.
3277  *
3278  * Dirty frontbuffers relevant to PSR are tracked in busy_frontbuffer_bits.
3279  */
3280 void intel_psr_flush(struct intel_display *display,
3281 		     unsigned frontbuffer_bits, enum fb_op_origin origin)
3282 {
3283 	struct intel_encoder *encoder;
3284 
3285 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3286 		unsigned int pipe_frontbuffer_bits = frontbuffer_bits;
3287 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3288 
3289 		mutex_lock(&intel_dp->psr.lock);
3290 		if (!intel_dp->psr.enabled) {
3291 			mutex_unlock(&intel_dp->psr.lock);
3292 			continue;
3293 		}
3294 
3295 		pipe_frontbuffer_bits &=
3296 			INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe);
3297 		intel_dp->psr.busy_frontbuffer_bits &= ~pipe_frontbuffer_bits;
3298 
3299 		/*
3300 		 * If the PSR is paused by an explicit intel_psr_paused() call,
3301 		 * we have to ensure that the PSR is not activated until
3302 		 * intel_psr_resume() is called.
3303 		 */
3304 		if (intel_dp->psr.pause_counter)
3305 			goto unlock;
3306 
3307 		if (origin == ORIGIN_FLIP ||
3308 		    (origin == ORIGIN_CURSOR_UPDATE &&
3309 		     !intel_dp->psr.psr2_sel_fetch_enabled)) {
3310 			tgl_dc3co_flush_locked(intel_dp, frontbuffer_bits, origin);
3311 			goto unlock;
3312 		}
3313 
3314 		if (pipe_frontbuffer_bits == 0)
3315 			goto unlock;
3316 
3317 		/* By definition flush = invalidate + flush */
3318 		_psr_flush_handle(intel_dp);
3319 unlock:
3320 		mutex_unlock(&intel_dp->psr.lock);
3321 	}
3322 }
3323 
3324 /**
3325  * intel_psr_init - Init basic PSR work and mutex.
3326  * @intel_dp: Intel DP
3327  *
3328  * This function is called after the initializing connector.
3329  * (the initializing of connector treats the handling of connector capabilities)
3330  * And it initializes basic PSR stuff for each DP Encoder.
3331  */
3332 void intel_psr_init(struct intel_dp *intel_dp)
3333 {
3334 	struct intel_display *display = to_intel_display(intel_dp);
3335 	struct intel_connector *connector = intel_dp->attached_connector;
3336 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
3337 
3338 	if (!(HAS_PSR(display) || HAS_DP20(display)))
3339 		return;
3340 
3341 	/*
3342 	 * HSW spec explicitly says PSR is tied to port A.
3343 	 * BDW+ platforms have a instance of PSR registers per transcoder but
3344 	 * BDW, GEN9 and GEN11 are not validated by HW team in other transcoder
3345 	 * than eDP one.
3346 	 * For now it only supports one instance of PSR for BDW, GEN9 and GEN11.
3347 	 * So lets keep it hardcoded to PORT_A for BDW, GEN9 and GEN11.
3348 	 * But GEN12 supports a instance of PSR registers per transcoder.
3349 	 */
3350 	if (DISPLAY_VER(display) < 12 && dig_port->base.port != PORT_A) {
3351 		drm_dbg_kms(display->drm,
3352 			    "PSR condition failed: Port not supported\n");
3353 		return;
3354 	}
3355 
3356 	if ((HAS_DP20(display) && !intel_dp_is_edp(intel_dp)) ||
3357 	    DISPLAY_VER(display) >= 20)
3358 		intel_dp->psr.source_panel_replay_support = true;
3359 
3360 	if (HAS_PSR(display) && intel_dp_is_edp(intel_dp))
3361 		intel_dp->psr.source_support = true;
3362 
3363 	/* Set link_standby x link_off defaults */
3364 	if (DISPLAY_VER(display) < 12)
3365 		/* For new platforms up to TGL let's respect VBT back again */
3366 		intel_dp->psr.link_standby = connector->panel.vbt.psr.full_link;
3367 
3368 	INIT_WORK(&intel_dp->psr.work, intel_psr_work);
3369 	INIT_DELAYED_WORK(&intel_dp->psr.dc3co_work, tgl_dc3co_disable_work);
3370 	mutex_init(&intel_dp->psr.lock);
3371 }
3372 
3373 static int psr_get_status_and_error_status(struct intel_dp *intel_dp,
3374 					   u8 *status, u8 *error_status)
3375 {
3376 	struct drm_dp_aux *aux = &intel_dp->aux;
3377 	int ret;
3378 	unsigned int offset;
3379 
3380 	offset = intel_dp->psr.panel_replay_enabled ?
3381 		 DP_SINK_DEVICE_PR_AND_FRAME_LOCK_STATUS : DP_PSR_STATUS;
3382 
3383 	ret = drm_dp_dpcd_readb(aux, offset, status);
3384 	if (ret != 1)
3385 		return ret;
3386 
3387 	offset = intel_dp->psr.panel_replay_enabled ?
3388 		 DP_PANEL_REPLAY_ERROR_STATUS : DP_PSR_ERROR_STATUS;
3389 
3390 	ret = drm_dp_dpcd_readb(aux, offset, error_status);
3391 	if (ret != 1)
3392 		return ret;
3393 
3394 	*status = *status & DP_PSR_SINK_STATE_MASK;
3395 
3396 	return 0;
3397 }
3398 
3399 static void psr_alpm_check(struct intel_dp *intel_dp)
3400 {
3401 	struct intel_display *display = to_intel_display(intel_dp);
3402 	struct drm_dp_aux *aux = &intel_dp->aux;
3403 	struct intel_psr *psr = &intel_dp->psr;
3404 	u8 val;
3405 	int r;
3406 
3407 	if (!psr->sel_update_enabled)
3408 		return;
3409 
3410 	r = drm_dp_dpcd_readb(aux, DP_RECEIVER_ALPM_STATUS, &val);
3411 	if (r != 1) {
3412 		drm_err(display->drm, "Error reading ALPM status\n");
3413 		return;
3414 	}
3415 
3416 	if (val & DP_ALPM_LOCK_TIMEOUT_ERROR) {
3417 		intel_psr_disable_locked(intel_dp);
3418 		psr->sink_not_reliable = true;
3419 		drm_dbg_kms(display->drm,
3420 			    "ALPM lock timeout error, disabling PSR\n");
3421 
3422 		/* Clearing error */
3423 		drm_dp_dpcd_writeb(aux, DP_RECEIVER_ALPM_STATUS, val);
3424 	}
3425 }
3426 
3427 static void psr_capability_changed_check(struct intel_dp *intel_dp)
3428 {
3429 	struct intel_display *display = to_intel_display(intel_dp);
3430 	struct intel_psr *psr = &intel_dp->psr;
3431 	u8 val;
3432 	int r;
3433 
3434 	r = drm_dp_dpcd_readb(&intel_dp->aux, DP_PSR_ESI, &val);
3435 	if (r != 1) {
3436 		drm_err(display->drm, "Error reading DP_PSR_ESI\n");
3437 		return;
3438 	}
3439 
3440 	if (val & DP_PSR_CAPS_CHANGE) {
3441 		intel_psr_disable_locked(intel_dp);
3442 		psr->sink_not_reliable = true;
3443 		drm_dbg_kms(display->drm,
3444 			    "Sink PSR capability changed, disabling PSR\n");
3445 
3446 		/* Clearing it */
3447 		drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_ESI, val);
3448 	}
3449 }
3450 
3451 /*
3452  * On common bits:
3453  * DP_PSR_RFB_STORAGE_ERROR == DP_PANEL_REPLAY_RFB_STORAGE_ERROR
3454  * DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR == DP_PANEL_REPLAY_VSC_SDP_UNCORRECTABLE_ERROR
3455  * DP_PSR_LINK_CRC_ERROR == DP_PANEL_REPLAY_LINK_CRC_ERROR
3456  * this function is relying on PSR definitions
3457  */
3458 void intel_psr_short_pulse(struct intel_dp *intel_dp)
3459 {
3460 	struct intel_display *display = to_intel_display(intel_dp);
3461 	struct intel_psr *psr = &intel_dp->psr;
3462 	u8 status, error_status;
3463 	const u8 errors = DP_PSR_RFB_STORAGE_ERROR |
3464 			  DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR |
3465 			  DP_PSR_LINK_CRC_ERROR;
3466 
3467 	if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
3468 		return;
3469 
3470 	mutex_lock(&psr->lock);
3471 
3472 	psr->link_ok = false;
3473 
3474 	if (!psr->enabled)
3475 		goto exit;
3476 
3477 	if (psr_get_status_and_error_status(intel_dp, &status, &error_status)) {
3478 		drm_err(display->drm,
3479 			"Error reading PSR status or error status\n");
3480 		goto exit;
3481 	}
3482 
3483 	if ((!psr->panel_replay_enabled && status == DP_PSR_SINK_INTERNAL_ERROR) ||
3484 	    (error_status & errors)) {
3485 		intel_psr_disable_locked(intel_dp);
3486 		psr->sink_not_reliable = true;
3487 	}
3488 
3489 	if (!psr->panel_replay_enabled && status == DP_PSR_SINK_INTERNAL_ERROR &&
3490 	    !error_status)
3491 		drm_dbg_kms(display->drm,
3492 			    "PSR sink internal error, disabling PSR\n");
3493 	if (error_status & DP_PSR_RFB_STORAGE_ERROR)
3494 		drm_dbg_kms(display->drm,
3495 			    "PSR RFB storage error, disabling PSR\n");
3496 	if (error_status & DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR)
3497 		drm_dbg_kms(display->drm,
3498 			    "PSR VSC SDP uncorrectable error, disabling PSR\n");
3499 	if (error_status & DP_PSR_LINK_CRC_ERROR)
3500 		drm_dbg_kms(display->drm,
3501 			    "PSR Link CRC error, disabling PSR\n");
3502 
3503 	if (error_status & ~errors)
3504 		drm_err(display->drm,
3505 			"PSR_ERROR_STATUS unhandled errors %x\n",
3506 			error_status & ~errors);
3507 	/* clear status register */
3508 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_ERROR_STATUS, error_status);
3509 
3510 	if (!psr->panel_replay_enabled) {
3511 		psr_alpm_check(intel_dp);
3512 		psr_capability_changed_check(intel_dp);
3513 	}
3514 
3515 exit:
3516 	mutex_unlock(&psr->lock);
3517 }
3518 
3519 bool intel_psr_enabled(struct intel_dp *intel_dp)
3520 {
3521 	bool ret;
3522 
3523 	if (!CAN_PSR(intel_dp))
3524 		return false;
3525 
3526 	mutex_lock(&intel_dp->psr.lock);
3527 	ret = intel_dp->psr.enabled;
3528 	mutex_unlock(&intel_dp->psr.lock);
3529 
3530 	return ret;
3531 }
3532 
3533 /**
3534  * intel_psr_link_ok - return psr->link_ok
3535  * @intel_dp: struct intel_dp
3536  *
3537  * We are seeing unexpected link re-trainings with some panels. This is caused
3538  * by panel stating bad link status after PSR is enabled. Code checking link
3539  * status can call this to ensure it can ignore bad link status stated by the
3540  * panel I.e. if panel is stating bad link and intel_psr_link_ok is stating link
3541  * is ok caller should rely on latter.
3542  *
3543  * Return value of link_ok
3544  */
3545 bool intel_psr_link_ok(struct intel_dp *intel_dp)
3546 {
3547 	bool ret;
3548 
3549 	if ((!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp)) ||
3550 	    !intel_dp_is_edp(intel_dp))
3551 		return false;
3552 
3553 	mutex_lock(&intel_dp->psr.lock);
3554 	ret = intel_dp->psr.link_ok;
3555 	mutex_unlock(&intel_dp->psr.lock);
3556 
3557 	return ret;
3558 }
3559 
3560 /**
3561  * intel_psr_lock - grab PSR lock
3562  * @crtc_state: the crtc state
3563  *
3564  * This is initially meant to be used by around CRTC update, when
3565  * vblank sensitive registers are updated and we need grab the lock
3566  * before it to avoid vblank evasion.
3567  */
3568 void intel_psr_lock(const struct intel_crtc_state *crtc_state)
3569 {
3570 	struct intel_display *display = to_intel_display(crtc_state);
3571 	struct intel_encoder *encoder;
3572 
3573 	if (!crtc_state->has_psr)
3574 		return;
3575 
3576 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
3577 					     crtc_state->uapi.encoder_mask) {
3578 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3579 
3580 		mutex_lock(&intel_dp->psr.lock);
3581 		break;
3582 	}
3583 }
3584 
3585 /**
3586  * intel_psr_unlock - release PSR lock
3587  * @crtc_state: the crtc state
3588  *
3589  * Release the PSR lock that was held during pipe update.
3590  */
3591 void intel_psr_unlock(const struct intel_crtc_state *crtc_state)
3592 {
3593 	struct intel_display *display = to_intel_display(crtc_state);
3594 	struct intel_encoder *encoder;
3595 
3596 	if (!crtc_state->has_psr)
3597 		return;
3598 
3599 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
3600 					     crtc_state->uapi.encoder_mask) {
3601 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3602 
3603 		mutex_unlock(&intel_dp->psr.lock);
3604 		break;
3605 	}
3606 }
3607 
3608 static void
3609 psr_source_status(struct intel_dp *intel_dp, struct seq_file *m)
3610 {
3611 	struct intel_display *display = to_intel_display(intel_dp);
3612 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3613 	const char *status = "unknown";
3614 	u32 val, status_val;
3615 
3616 	if ((intel_dp_is_edp(intel_dp) || DISPLAY_VER(display) >= 30) &&
3617 	    (intel_dp->psr.sel_update_enabled || intel_dp->psr.panel_replay_enabled)) {
3618 		static const char * const live_status[] = {
3619 			"IDLE",
3620 			"CAPTURE",
3621 			"CAPTURE_FS",
3622 			"SLEEP",
3623 			"BUFON_FW",
3624 			"ML_UP",
3625 			"SU_STANDBY",
3626 			"FAST_SLEEP",
3627 			"DEEP_SLEEP",
3628 			"BUF_ON",
3629 			"TG_ON"
3630 		};
3631 		val = intel_de_read(display,
3632 				    EDP_PSR2_STATUS(display, cpu_transcoder));
3633 		status_val = REG_FIELD_GET(EDP_PSR2_STATUS_STATE_MASK, val);
3634 		if (status_val < ARRAY_SIZE(live_status))
3635 			status = live_status[status_val];
3636 	} else {
3637 		static const char * const live_status[] = {
3638 			"IDLE",
3639 			"SRDONACK",
3640 			"SRDENT",
3641 			"BUFOFF",
3642 			"BUFON",
3643 			"AUXACK",
3644 			"SRDOFFACK",
3645 			"SRDENT_ON",
3646 		};
3647 		val = intel_de_read(display,
3648 				    psr_status_reg(display, cpu_transcoder));
3649 		status_val = REG_FIELD_GET(EDP_PSR_STATUS_STATE_MASK, val);
3650 		if (status_val < ARRAY_SIZE(live_status))
3651 			status = live_status[status_val];
3652 	}
3653 
3654 	seq_printf(m, "Source PSR/PanelReplay status: %s [0x%08x]\n", status, val);
3655 }
3656 
3657 static void intel_psr_sink_capability(struct intel_dp *intel_dp,
3658 				      struct seq_file *m)
3659 {
3660 	struct intel_psr *psr = &intel_dp->psr;
3661 
3662 	seq_printf(m, "Sink support: PSR = %s",
3663 		   str_yes_no(psr->sink_support));
3664 
3665 	if (psr->sink_support)
3666 		seq_printf(m, " [0x%02x]", intel_dp->psr_dpcd[0]);
3667 	if (intel_dp->psr_dpcd[0] == DP_PSR2_WITH_Y_COORD_ET_SUPPORTED)
3668 		seq_printf(m, " (Early Transport)");
3669 	seq_printf(m, ", Panel Replay = %s", str_yes_no(psr->sink_panel_replay_support));
3670 	seq_printf(m, ", Panel Replay Selective Update = %s",
3671 		   str_yes_no(psr->sink_panel_replay_su_support));
3672 	if (intel_dp->pr_dpcd & DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT)
3673 		seq_printf(m, " (Early Transport)");
3674 	seq_printf(m, "\n");
3675 }
3676 
3677 static void intel_psr_print_mode(struct intel_dp *intel_dp,
3678 				 struct seq_file *m)
3679 {
3680 	struct intel_psr *psr = &intel_dp->psr;
3681 	const char *status, *mode, *region_et;
3682 
3683 	if (psr->enabled)
3684 		status = " enabled";
3685 	else
3686 		status = "disabled";
3687 
3688 	if (psr->panel_replay_enabled && psr->sel_update_enabled)
3689 		mode = "Panel Replay Selective Update";
3690 	else if (psr->panel_replay_enabled)
3691 		mode = "Panel Replay";
3692 	else if (psr->sel_update_enabled)
3693 		mode = "PSR2";
3694 	else if (psr->enabled)
3695 		mode = "PSR1";
3696 	else
3697 		mode = "";
3698 
3699 	if (psr->su_region_et_enabled)
3700 		region_et = " (Early Transport)";
3701 	else
3702 		region_et = "";
3703 
3704 	seq_printf(m, "PSR mode: %s%s%s\n", mode, status, region_et);
3705 }
3706 
3707 static int intel_psr_status(struct seq_file *m, struct intel_dp *intel_dp)
3708 {
3709 	struct intel_display *display = to_intel_display(intel_dp);
3710 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3711 	struct intel_psr *psr = &intel_dp->psr;
3712 	struct ref_tracker *wakeref;
3713 	bool enabled;
3714 	u32 val, psr2_ctl;
3715 
3716 	intel_psr_sink_capability(intel_dp, m);
3717 
3718 	if (!(psr->sink_support || psr->sink_panel_replay_support))
3719 		return 0;
3720 
3721 	wakeref = intel_display_rpm_get(display);
3722 	mutex_lock(&psr->lock);
3723 
3724 	intel_psr_print_mode(intel_dp, m);
3725 
3726 	if (!psr->enabled) {
3727 		seq_printf(m, "PSR sink not reliable: %s\n",
3728 			   str_yes_no(psr->sink_not_reliable));
3729 
3730 		goto unlock;
3731 	}
3732 
3733 	if (psr->panel_replay_enabled) {
3734 		val = intel_de_read(display, TRANS_DP2_CTL(cpu_transcoder));
3735 
3736 		if (intel_dp_is_edp(intel_dp))
3737 			psr2_ctl = intel_de_read(display,
3738 						 EDP_PSR2_CTL(display,
3739 							      cpu_transcoder));
3740 
3741 		enabled = val & TRANS_DP2_PANEL_REPLAY_ENABLE;
3742 	} else if (psr->sel_update_enabled) {
3743 		val = intel_de_read(display,
3744 				    EDP_PSR2_CTL(display, cpu_transcoder));
3745 		enabled = val & EDP_PSR2_ENABLE;
3746 	} else {
3747 		val = intel_de_read(display, psr_ctl_reg(display, cpu_transcoder));
3748 		enabled = val & EDP_PSR_ENABLE;
3749 	}
3750 	seq_printf(m, "Source PSR/PanelReplay ctl: %s [0x%08x]\n",
3751 		   str_enabled_disabled(enabled), val);
3752 	if (psr->panel_replay_enabled && intel_dp_is_edp(intel_dp))
3753 		seq_printf(m, "PSR2_CTL: 0x%08x\n",
3754 			   psr2_ctl);
3755 	psr_source_status(intel_dp, m);
3756 	seq_printf(m, "Busy frontbuffer bits: 0x%08x\n",
3757 		   psr->busy_frontbuffer_bits);
3758 
3759 	/*
3760 	 * SKL+ Perf counter is reset to 0 everytime DC state is entered
3761 	 */
3762 	val = intel_de_read(display, psr_perf_cnt_reg(display, cpu_transcoder));
3763 	seq_printf(m, "Performance counter: %u\n",
3764 		   REG_FIELD_GET(EDP_PSR_PERF_CNT_MASK, val));
3765 
3766 	if (psr->debug & I915_PSR_DEBUG_IRQ) {
3767 		seq_printf(m, "Last attempted entry at: %lld\n",
3768 			   psr->last_entry_attempt);
3769 		seq_printf(m, "Last exit at: %lld\n", psr->last_exit);
3770 	}
3771 
3772 	if (psr->sel_update_enabled) {
3773 		u32 su_frames_val[3];
3774 		int frame;
3775 
3776 		/*
3777 		 * Reading all 3 registers before hand to minimize crossing a
3778 		 * frame boundary between register reads
3779 		 */
3780 		for (frame = 0; frame < PSR2_SU_STATUS_FRAMES; frame += 3) {
3781 			val = intel_de_read(display,
3782 					    PSR2_SU_STATUS(display, cpu_transcoder, frame));
3783 			su_frames_val[frame / 3] = val;
3784 		}
3785 
3786 		seq_puts(m, "Frame:\tPSR2 SU blocks:\n");
3787 
3788 		for (frame = 0; frame < PSR2_SU_STATUS_FRAMES; frame++) {
3789 			u32 su_blocks;
3790 
3791 			su_blocks = su_frames_val[frame / 3] &
3792 				    PSR2_SU_STATUS_MASK(frame);
3793 			su_blocks = su_blocks >> PSR2_SU_STATUS_SHIFT(frame);
3794 			seq_printf(m, "%d\t%d\n", frame, su_blocks);
3795 		}
3796 
3797 		seq_printf(m, "PSR2 selective fetch: %s\n",
3798 			   str_enabled_disabled(psr->psr2_sel_fetch_enabled));
3799 	}
3800 
3801 unlock:
3802 	mutex_unlock(&psr->lock);
3803 	intel_display_rpm_put(display, wakeref);
3804 
3805 	return 0;
3806 }
3807 
3808 static int i915_edp_psr_status_show(struct seq_file *m, void *data)
3809 {
3810 	struct intel_display *display = m->private;
3811 	struct intel_dp *intel_dp = NULL;
3812 	struct intel_encoder *encoder;
3813 
3814 	if (!HAS_PSR(display))
3815 		return -ENODEV;
3816 
3817 	/* Find the first EDP which supports PSR */
3818 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3819 		intel_dp = enc_to_intel_dp(encoder);
3820 		break;
3821 	}
3822 
3823 	if (!intel_dp)
3824 		return -ENODEV;
3825 
3826 	return intel_psr_status(m, intel_dp);
3827 }
3828 DEFINE_SHOW_ATTRIBUTE(i915_edp_psr_status);
3829 
3830 static int
3831 i915_edp_psr_debug_set(void *data, u64 val)
3832 {
3833 	struct intel_display *display = data;
3834 	struct intel_encoder *encoder;
3835 	int ret = -ENODEV;
3836 
3837 	if (!HAS_PSR(display))
3838 		return ret;
3839 
3840 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3841 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3842 
3843 		drm_dbg_kms(display->drm, "Setting PSR debug to %llx\n", val);
3844 
3845 		// TODO: split to each transcoder's PSR debug state
3846 		with_intel_display_rpm(display)
3847 			ret = intel_psr_debug_set(intel_dp, val);
3848 	}
3849 
3850 	return ret;
3851 }
3852 
3853 static int
3854 i915_edp_psr_debug_get(void *data, u64 *val)
3855 {
3856 	struct intel_display *display = data;
3857 	struct intel_encoder *encoder;
3858 
3859 	if (!HAS_PSR(display))
3860 		return -ENODEV;
3861 
3862 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3863 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3864 
3865 		// TODO: split to each transcoder's PSR debug state
3866 		*val = READ_ONCE(intel_dp->psr.debug);
3867 		return 0;
3868 	}
3869 
3870 	return -ENODEV;
3871 }
3872 
3873 DEFINE_SIMPLE_ATTRIBUTE(i915_edp_psr_debug_fops,
3874 			i915_edp_psr_debug_get, i915_edp_psr_debug_set,
3875 			"%llu\n");
3876 
3877 void intel_psr_debugfs_register(struct intel_display *display)
3878 {
3879 	struct drm_minor *minor = display->drm->primary;
3880 
3881 	debugfs_create_file("i915_edp_psr_debug", 0644, minor->debugfs_root,
3882 			    display, &i915_edp_psr_debug_fops);
3883 
3884 	debugfs_create_file("i915_edp_psr_status", 0444, minor->debugfs_root,
3885 			    display, &i915_edp_psr_status_fops);
3886 }
3887 
3888 static const char *psr_mode_str(struct intel_dp *intel_dp)
3889 {
3890 	if (intel_dp->psr.panel_replay_enabled)
3891 		return "PANEL-REPLAY";
3892 	else if (intel_dp->psr.enabled)
3893 		return "PSR";
3894 
3895 	return "unknown";
3896 }
3897 
3898 static int i915_psr_sink_status_show(struct seq_file *m, void *data)
3899 {
3900 	struct intel_connector *connector = m->private;
3901 	struct intel_dp *intel_dp = intel_attached_dp(connector);
3902 	static const char * const sink_status[] = {
3903 		"inactive",
3904 		"transition to active, capture and display",
3905 		"active, display from RFB",
3906 		"active, capture and display on sink device timings",
3907 		"transition to inactive, capture and display, timing re-sync",
3908 		"reserved",
3909 		"reserved",
3910 		"sink internal error",
3911 	};
3912 	const char *str;
3913 	int ret;
3914 	u8 status, error_status;
3915 
3916 	if (!(CAN_PSR(intel_dp) || CAN_PANEL_REPLAY(intel_dp))) {
3917 		seq_puts(m, "PSR/Panel-Replay Unsupported\n");
3918 		return -ENODEV;
3919 	}
3920 
3921 	if (connector->base.status != connector_status_connected)
3922 		return -ENODEV;
3923 
3924 	ret = psr_get_status_and_error_status(intel_dp, &status, &error_status);
3925 	if (ret)
3926 		return ret;
3927 
3928 	status &= DP_PSR_SINK_STATE_MASK;
3929 	if (status < ARRAY_SIZE(sink_status))
3930 		str = sink_status[status];
3931 	else
3932 		str = "unknown";
3933 
3934 	seq_printf(m, "Sink %s status: 0x%x [%s]\n", psr_mode_str(intel_dp), status, str);
3935 
3936 	seq_printf(m, "Sink %s error status: 0x%x", psr_mode_str(intel_dp), error_status);
3937 
3938 	if (error_status & (DP_PSR_RFB_STORAGE_ERROR |
3939 			    DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR |
3940 			    DP_PSR_LINK_CRC_ERROR))
3941 		seq_puts(m, ":\n");
3942 	else
3943 		seq_puts(m, "\n");
3944 	if (error_status & DP_PSR_RFB_STORAGE_ERROR)
3945 		seq_printf(m, "\t%s RFB storage error\n", psr_mode_str(intel_dp));
3946 	if (error_status & DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR)
3947 		seq_printf(m, "\t%s VSC SDP uncorrectable error\n", psr_mode_str(intel_dp));
3948 	if (error_status & DP_PSR_LINK_CRC_ERROR)
3949 		seq_printf(m, "\t%s Link CRC error\n", psr_mode_str(intel_dp));
3950 
3951 	return ret;
3952 }
3953 DEFINE_SHOW_ATTRIBUTE(i915_psr_sink_status);
3954 
3955 static int i915_psr_status_show(struct seq_file *m, void *data)
3956 {
3957 	struct intel_connector *connector = m->private;
3958 	struct intel_dp *intel_dp = intel_attached_dp(connector);
3959 
3960 	return intel_psr_status(m, intel_dp);
3961 }
3962 DEFINE_SHOW_ATTRIBUTE(i915_psr_status);
3963 
3964 void intel_psr_connector_debugfs_add(struct intel_connector *connector)
3965 {
3966 	struct intel_display *display = to_intel_display(connector);
3967 	struct dentry *root = connector->base.debugfs_entry;
3968 
3969 	if (connector->base.connector_type != DRM_MODE_CONNECTOR_eDP &&
3970 	    connector->base.connector_type != DRM_MODE_CONNECTOR_DisplayPort)
3971 		return;
3972 
3973 	debugfs_create_file("i915_psr_sink_status", 0444, root,
3974 			    connector, &i915_psr_sink_status_fops);
3975 
3976 	if (HAS_PSR(display) || HAS_DP20(display))
3977 		debugfs_create_file("i915_psr_status", 0444, root,
3978 				    connector, &i915_psr_status_fops);
3979 }
3980