xref: /linux/drivers/gpu/drm/i915/display/intel_psr.c (revision 390db60f8e2bd21fae544917eb3a8618265c058c)
1 /*
2  * Copyright © 2014 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21  * DEALINGS IN THE SOFTWARE.
22  */
23 
24 #include <linux/debugfs.h>
25 
26 #include <drm/drm_atomic_helper.h>
27 #include <drm/drm_damage_helper.h>
28 #include <drm/drm_debugfs.h>
29 #include <drm/drm_vblank.h>
30 
31 #include "i915_reg.h"
32 #include "intel_alpm.h"
33 #include "intel_atomic.h"
34 #include "intel_crtc.h"
35 #include "intel_cursor_regs.h"
36 #include "intel_ddi.h"
37 #include "intel_de.h"
38 #include "intel_display_irq.h"
39 #include "intel_display_regs.h"
40 #include "intel_display_rpm.h"
41 #include "intel_display_types.h"
42 #include "intel_dmc.h"
43 #include "intel_dp.h"
44 #include "intel_dp_aux.h"
45 #include "intel_dsb.h"
46 #include "intel_frontbuffer.h"
47 #include "intel_hdmi.h"
48 #include "intel_psr.h"
49 #include "intel_psr_regs.h"
50 #include "intel_snps_phy.h"
51 #include "intel_step.h"
52 #include "intel_vblank.h"
53 #include "intel_vrr.h"
54 #include "skl_universal_plane.h"
55 
56 /**
57  * DOC: Panel Self Refresh (PSR/SRD)
58  *
59  * Since Haswell Display controller supports Panel Self-Refresh on display
60  * panels witch have a remote frame buffer (RFB) implemented according to PSR
61  * spec in eDP1.3. PSR feature allows the display to go to lower standby states
62  * when system is idle but display is on as it eliminates display refresh
63  * request to DDR memory completely as long as the frame buffer for that
64  * display is unchanged.
65  *
66  * Panel Self Refresh must be supported by both Hardware (source) and
67  * Panel (sink).
68  *
69  * PSR saves power by caching the framebuffer in the panel RFB, which allows us
70  * to power down the link and memory controller. For DSI panels the same idea
71  * is called "manual mode".
72  *
73  * The implementation uses the hardware-based PSR support which automatically
74  * enters/exits self-refresh mode. The hardware takes care of sending the
75  * required DP aux message and could even retrain the link (that part isn't
76  * enabled yet though). The hardware also keeps track of any frontbuffer
77  * changes to know when to exit self-refresh mode again. Unfortunately that
78  * part doesn't work too well, hence why the i915 PSR support uses the
79  * software frontbuffer tracking to make sure it doesn't miss a screen
80  * update. For this integration intel_psr_invalidate() and intel_psr_flush()
81  * get called by the frontbuffer tracking code. Note that because of locking
82  * issues the self-refresh re-enable code is done from a work queue, which
83  * must be correctly synchronized/cancelled when shutting down the pipe."
84  *
85  * DC3CO (DC3 clock off)
86  *
87  * On top of PSR2, GEN12 adds a intermediate power savings state that turns
88  * clock off automatically during PSR2 idle state.
89  * The smaller overhead of DC3co entry/exit vs. the overhead of PSR2 deep sleep
90  * entry/exit allows the HW to enter a low-power state even when page flipping
91  * periodically (for instance a 30fps video playback scenario).
92  *
93  * Every time a flips occurs PSR2 will get out of deep sleep state(if it was),
94  * so DC3CO is enabled and tgl_dc3co_disable_work is schedule to run after 6
95  * frames, if no other flip occurs and the function above is executed, DC3CO is
96  * disabled and PSR2 is configured to enter deep sleep, resetting again in case
97  * of another flip.
98  * Front buffer modifications do not trigger DC3CO activation on purpose as it
99  * would bring a lot of complexity and most of the moderns systems will only
100  * use page flips.
101  */
102 
103 /*
104  * Description of PSR mask bits:
105  *
106  * EDP_PSR_DEBUG[16]/EDP_PSR_DEBUG_MASK_DISP_REG_WRITE (hsw-skl):
107  *
108  *  When unmasked (nearly) all display register writes (eg. even
109  *  SWF) trigger a PSR exit. Some registers are excluded from this
110  *  and they have a more specific mask (described below). On icl+
111  *  this bit no longer exists and is effectively always set.
112  *
113  * PIPE_MISC[21]/PIPE_MISC_PSR_MASK_PIPE_REG_WRITE (skl+):
114  *
115  *  When unmasked (nearly) all pipe/plane register writes
116  *  trigger a PSR exit. Some plane registers are excluded from this
117  *  and they have a more specific mask (described below).
118  *
119  * CHICKEN_PIPESL_1[11]/SKL_PSR_MASK_PLANE_FLIP (skl+):
120  * PIPE_MISC[23]/PIPE_MISC_PSR_MASK_PRIMARY_FLIP (bdw):
121  * EDP_PSR_DEBUG[23]/EDP_PSR_DEBUG_MASK_PRIMARY_FLIP (hsw):
122  *
123  *  When unmasked PRI_SURF/PLANE_SURF writes trigger a PSR exit.
124  *  SPR_SURF/CURBASE are not included in this and instead are
125  *  controlled by PIPE_MISC_PSR_MASK_PIPE_REG_WRITE (skl+) or
126  *  EDP_PSR_DEBUG_MASK_DISP_REG_WRITE (hsw/bdw).
127  *
128  * PIPE_MISC[22]/PIPE_MISC_PSR_MASK_SPRITE_ENABLE (bdw):
129  * EDP_PSR_DEBUG[21]/EDP_PSR_DEBUG_MASK_SPRITE_ENABLE (hsw):
130  *
131  *  When unmasked PSR is blocked as long as the sprite
132  *  plane is enabled. skl+ with their universal planes no
133  *  longer have a mask bit like this, and no plane being
134  *  enabledb blocks PSR.
135  *
136  * PIPE_MISC[21]/PIPE_MISC_PSR_MASK_CURSOR_MOVE (bdw):
137  * EDP_PSR_DEBUG[20]/EDP_PSR_DEBUG_MASK_CURSOR_MOVE (hsw):
138  *
139  *  When umasked CURPOS writes trigger a PSR exit. On skl+
140  *  this doesn't exit but CURPOS is included in the
141  *  PIPE_MISC_PSR_MASK_PIPE_REG_WRITE mask.
142  *
143  * PIPE_MISC[20]/PIPE_MISC_PSR_MASK_VBLANK_VSYNC_INT (bdw+):
144  * EDP_PSR_DEBUG[19]/EDP_PSR_DEBUG_MASK_VBLANK_VSYNC_INT (hsw):
145  *
146  *  When unmasked PSR is blocked as long as vblank and/or vsync
147  *  interrupt is unmasked in IMR *and* enabled in IER.
148  *
149  * CHICKEN_TRANS[30]/SKL_UNMASK_VBL_TO_PIPE_IN_SRD (skl+):
150  * CHICKEN_PAR1_1[15]/HSW_MASK_VBL_TO_PIPE_IN_SRD (hsw/bdw):
151  *
152  *  Selectcs whether PSR exit generates an extra vblank before
153  *  the first frame is transmitted. Also note the opposite polarity
154  *  if the bit on hsw/bdw vs. skl+ (masked==generate the extra vblank,
155  *  unmasked==do not generate the extra vblank).
156  *
157  *  With DC states enabled the extra vblank happens after link training,
158  *  with DC states disabled it happens immediately upuon PSR exit trigger.
159  *  No idea as of now why there is a difference. HSW/BDW (which don't
160  *  even have DMC) always generate it after link training. Go figure.
161  *
162  *  Unfortunately CHICKEN_TRANS itself seems to be double buffered
163  *  and thus won't latch until the first vblank. So with DC states
164  *  enabled the register effectively uses the reset value during DC5
165  *  exit+PSR exit sequence, and thus the bit does nothing until
166  *  latched by the vblank that it was trying to prevent from being
167  *  generated in the first place. So we should probably call this
168  *  one a chicken/egg bit instead on skl+.
169  *
170  *  In standby mode (as opposed to link-off) this makes no difference
171  *  as the timing generator keeps running the whole time generating
172  *  normal periodic vblanks.
173  *
174  *  WaPsrDPAMaskVBlankInSRD asks us to set the bit on hsw/bdw,
175  *  and doing so makes the behaviour match the skl+ reset value.
176  *
177  * CHICKEN_PIPESL_1[0]/BDW_UNMASK_VBL_TO_REGS_IN_SRD (bdw):
178  * CHICKEN_PIPESL_1[15]/HSW_UNMASK_VBL_TO_REGS_IN_SRD (hsw):
179  *
180  *  On BDW without this bit is no vblanks whatsoever are
181  *  generated after PSR exit. On HSW this has no apparent effect.
182  *  WaPsrDPRSUnmaskVBlankInSRD says to set this.
183  *
184  * The rest of the bits are more self-explanatory and/or
185  * irrelevant for normal operation.
186  *
187  * Description of intel_crtc_state variables. has_psr, has_panel_replay and
188  * has_sel_update:
189  *
190  *  has_psr (alone):					PSR1
191  *  has_psr + has_sel_update:				PSR2
192  *  has_psr + has_panel_replay:				Panel Replay
193  *  has_psr + has_panel_replay + has_sel_update:	Panel Replay Selective Update
194  *
195  * Description of some intel_psr variables. enabled, panel_replay_enabled,
196  * sel_update_enabled
197  *
198  *  enabled (alone):						PSR1
199  *  enabled + sel_update_enabled:				PSR2
200  *  enabled + panel_replay_enabled:				Panel Replay
201  *  enabled + panel_replay_enabled + sel_update_enabled:	Panel Replay SU
202  */
203 
204 #define CAN_PSR(intel_dp) ((intel_dp)->psr.sink_support && \
205 			   (intel_dp)->psr.source_support)
206 
intel_encoder_can_psr(struct intel_encoder * encoder)207 bool intel_encoder_can_psr(struct intel_encoder *encoder)
208 {
209 	if (intel_encoder_is_dp(encoder) || encoder->type == INTEL_OUTPUT_DP_MST)
210 		return CAN_PSR(enc_to_intel_dp(encoder)) ||
211 		       CAN_PANEL_REPLAY(enc_to_intel_dp(encoder));
212 	else
213 		return false;
214 }
215 
intel_psr_needs_aux_io_power(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state)216 bool intel_psr_needs_aux_io_power(struct intel_encoder *encoder,
217 				  const struct intel_crtc_state *crtc_state)
218 {
219 	/*
220 	 * For PSR/PR modes only eDP requires the AUX IO power to be enabled whenever
221 	 * the output is enabled. For non-eDP outputs the main link is always
222 	 * on, hence it doesn't require the HW initiated AUX wake-up signaling used
223 	 * for eDP.
224 	 *
225 	 * TODO:
226 	 * - Consider leaving AUX IO disabled for eDP / PR as well, in case
227 	 *   the ALPM with main-link off mode is not enabled.
228 	 * - Leave AUX IO enabled for DP / PR, once support for ALPM with
229 	 *   main-link off mode is added for it and this mode gets enabled.
230 	 */
231 	return intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP) &&
232 	       intel_encoder_can_psr(encoder);
233 }
234 
psr_global_enabled(struct intel_dp * intel_dp)235 static bool psr_global_enabled(struct intel_dp *intel_dp)
236 {
237 	struct intel_connector *connector = intel_dp->attached_connector;
238 
239 	switch (intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK) {
240 	case I915_PSR_DEBUG_DEFAULT:
241 		return intel_dp_is_edp(intel_dp) ?
242 			connector->panel.vbt.psr.enable : true;
243 	case I915_PSR_DEBUG_DISABLE:
244 		return false;
245 	default:
246 		return true;
247 	}
248 }
249 
sel_update_global_enabled(struct intel_dp * intel_dp)250 static bool sel_update_global_enabled(struct intel_dp *intel_dp)
251 {
252 	switch (intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK) {
253 	case I915_PSR_DEBUG_DISABLE:
254 	case I915_PSR_DEBUG_FORCE_PSR1:
255 		return false;
256 	default:
257 		return true;
258 	}
259 }
260 
panel_replay_global_enabled(struct intel_dp * intel_dp)261 static bool panel_replay_global_enabled(struct intel_dp *intel_dp)
262 {
263 	struct intel_display *display = to_intel_display(intel_dp);
264 
265 	return !(intel_dp->psr.debug & I915_PSR_DEBUG_PANEL_REPLAY_DISABLE) &&
266 		display->params.enable_panel_replay;
267 }
268 
psr_irq_psr_error_bit_get(struct intel_dp * intel_dp)269 static u32 psr_irq_psr_error_bit_get(struct intel_dp *intel_dp)
270 {
271 	struct intel_display *display = to_intel_display(intel_dp);
272 
273 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_ERROR :
274 		EDP_PSR_ERROR(intel_dp->psr.transcoder);
275 }
276 
psr_irq_post_exit_bit_get(struct intel_dp * intel_dp)277 static u32 psr_irq_post_exit_bit_get(struct intel_dp *intel_dp)
278 {
279 	struct intel_display *display = to_intel_display(intel_dp);
280 
281 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_POST_EXIT :
282 		EDP_PSR_POST_EXIT(intel_dp->psr.transcoder);
283 }
284 
psr_irq_pre_entry_bit_get(struct intel_dp * intel_dp)285 static u32 psr_irq_pre_entry_bit_get(struct intel_dp *intel_dp)
286 {
287 	struct intel_display *display = to_intel_display(intel_dp);
288 
289 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_PRE_ENTRY :
290 		EDP_PSR_PRE_ENTRY(intel_dp->psr.transcoder);
291 }
292 
psr_irq_mask_get(struct intel_dp * intel_dp)293 static u32 psr_irq_mask_get(struct intel_dp *intel_dp)
294 {
295 	struct intel_display *display = to_intel_display(intel_dp);
296 
297 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_MASK :
298 		EDP_PSR_MASK(intel_dp->psr.transcoder);
299 }
300 
psr_ctl_reg(struct intel_display * display,enum transcoder cpu_transcoder)301 static i915_reg_t psr_ctl_reg(struct intel_display *display,
302 			      enum transcoder cpu_transcoder)
303 {
304 	if (DISPLAY_VER(display) >= 8)
305 		return EDP_PSR_CTL(display, cpu_transcoder);
306 	else
307 		return HSW_SRD_CTL;
308 }
309 
psr_debug_reg(struct intel_display * display,enum transcoder cpu_transcoder)310 static i915_reg_t psr_debug_reg(struct intel_display *display,
311 				enum transcoder cpu_transcoder)
312 {
313 	if (DISPLAY_VER(display) >= 8)
314 		return EDP_PSR_DEBUG(display, cpu_transcoder);
315 	else
316 		return HSW_SRD_DEBUG;
317 }
318 
psr_perf_cnt_reg(struct intel_display * display,enum transcoder cpu_transcoder)319 static i915_reg_t psr_perf_cnt_reg(struct intel_display *display,
320 				   enum transcoder cpu_transcoder)
321 {
322 	if (DISPLAY_VER(display) >= 8)
323 		return EDP_PSR_PERF_CNT(display, cpu_transcoder);
324 	else
325 		return HSW_SRD_PERF_CNT;
326 }
327 
psr_status_reg(struct intel_display * display,enum transcoder cpu_transcoder)328 static i915_reg_t psr_status_reg(struct intel_display *display,
329 				 enum transcoder cpu_transcoder)
330 {
331 	if (DISPLAY_VER(display) >= 8)
332 		return EDP_PSR_STATUS(display, cpu_transcoder);
333 	else
334 		return HSW_SRD_STATUS;
335 }
336 
psr_imr_reg(struct intel_display * display,enum transcoder cpu_transcoder)337 static i915_reg_t psr_imr_reg(struct intel_display *display,
338 			      enum transcoder cpu_transcoder)
339 {
340 	if (DISPLAY_VER(display) >= 12)
341 		return TRANS_PSR_IMR(display, cpu_transcoder);
342 	else
343 		return EDP_PSR_IMR;
344 }
345 
psr_iir_reg(struct intel_display * display,enum transcoder cpu_transcoder)346 static i915_reg_t psr_iir_reg(struct intel_display *display,
347 			      enum transcoder cpu_transcoder)
348 {
349 	if (DISPLAY_VER(display) >= 12)
350 		return TRANS_PSR_IIR(display, cpu_transcoder);
351 	else
352 		return EDP_PSR_IIR;
353 }
354 
psr_aux_ctl_reg(struct intel_display * display,enum transcoder cpu_transcoder)355 static i915_reg_t psr_aux_ctl_reg(struct intel_display *display,
356 				  enum transcoder cpu_transcoder)
357 {
358 	if (DISPLAY_VER(display) >= 8)
359 		return EDP_PSR_AUX_CTL(display, cpu_transcoder);
360 	else
361 		return HSW_SRD_AUX_CTL;
362 }
363 
psr_aux_data_reg(struct intel_display * display,enum transcoder cpu_transcoder,int i)364 static i915_reg_t psr_aux_data_reg(struct intel_display *display,
365 				   enum transcoder cpu_transcoder, int i)
366 {
367 	if (DISPLAY_VER(display) >= 8)
368 		return EDP_PSR_AUX_DATA(display, cpu_transcoder, i);
369 	else
370 		return HSW_SRD_AUX_DATA(i);
371 }
372 
psr_irq_control(struct intel_dp * intel_dp)373 static void psr_irq_control(struct intel_dp *intel_dp)
374 {
375 	struct intel_display *display = to_intel_display(intel_dp);
376 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
377 	u32 mask;
378 
379 	if (intel_dp->psr.panel_replay_enabled)
380 		return;
381 
382 	mask = psr_irq_psr_error_bit_get(intel_dp);
383 	if (intel_dp->psr.debug & I915_PSR_DEBUG_IRQ)
384 		mask |= psr_irq_post_exit_bit_get(intel_dp) |
385 			psr_irq_pre_entry_bit_get(intel_dp);
386 
387 	intel_de_rmw(display, psr_imr_reg(display, cpu_transcoder),
388 		     psr_irq_mask_get(intel_dp), ~mask);
389 }
390 
psr_event_print(struct intel_display * display,u32 val,bool sel_update_enabled)391 static void psr_event_print(struct intel_display *display,
392 			    u32 val, bool sel_update_enabled)
393 {
394 	drm_dbg_kms(display->drm, "PSR exit events: 0x%x\n", val);
395 	if (val & PSR_EVENT_PSR2_WD_TIMER_EXPIRE)
396 		drm_dbg_kms(display->drm, "\tPSR2 watchdog timer expired\n");
397 	if ((val & PSR_EVENT_PSR2_DISABLED) && sel_update_enabled)
398 		drm_dbg_kms(display->drm, "\tPSR2 disabled\n");
399 	if (val & PSR_EVENT_SU_DIRTY_FIFO_UNDERRUN)
400 		drm_dbg_kms(display->drm, "\tSU dirty FIFO underrun\n");
401 	if (val & PSR_EVENT_SU_CRC_FIFO_UNDERRUN)
402 		drm_dbg_kms(display->drm, "\tSU CRC FIFO underrun\n");
403 	if (val & PSR_EVENT_GRAPHICS_RESET)
404 		drm_dbg_kms(display->drm, "\tGraphics reset\n");
405 	if (val & PSR_EVENT_PCH_INTERRUPT)
406 		drm_dbg_kms(display->drm, "\tPCH interrupt\n");
407 	if (val & PSR_EVENT_MEMORY_UP)
408 		drm_dbg_kms(display->drm, "\tMemory up\n");
409 	if (val & PSR_EVENT_FRONT_BUFFER_MODIFY)
410 		drm_dbg_kms(display->drm, "\tFront buffer modification\n");
411 	if (val & PSR_EVENT_WD_TIMER_EXPIRE)
412 		drm_dbg_kms(display->drm, "\tPSR watchdog timer expired\n");
413 	if (val & PSR_EVENT_PIPE_REGISTERS_UPDATE)
414 		drm_dbg_kms(display->drm, "\tPIPE registers updated\n");
415 	if (val & PSR_EVENT_REGISTER_UPDATE)
416 		drm_dbg_kms(display->drm, "\tRegister updated\n");
417 	if (val & PSR_EVENT_HDCP_ENABLE)
418 		drm_dbg_kms(display->drm, "\tHDCP enabled\n");
419 	if (val & PSR_EVENT_KVMR_SESSION_ENABLE)
420 		drm_dbg_kms(display->drm, "\tKVMR session enabled\n");
421 	if (val & PSR_EVENT_VBI_ENABLE)
422 		drm_dbg_kms(display->drm, "\tVBI enabled\n");
423 	if (val & PSR_EVENT_LPSP_MODE_EXIT)
424 		drm_dbg_kms(display->drm, "\tLPSP mode exited\n");
425 	if ((val & PSR_EVENT_PSR_DISABLE) && !sel_update_enabled)
426 		drm_dbg_kms(display->drm, "\tPSR disabled\n");
427 }
428 
intel_psr_irq_handler(struct intel_dp * intel_dp,u32 psr_iir)429 void intel_psr_irq_handler(struct intel_dp *intel_dp, u32 psr_iir)
430 {
431 	struct intel_display *display = to_intel_display(intel_dp);
432 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
433 	ktime_t time_ns =  ktime_get();
434 
435 	if (psr_iir & psr_irq_pre_entry_bit_get(intel_dp)) {
436 		intel_dp->psr.last_entry_attempt = time_ns;
437 		drm_dbg_kms(display->drm,
438 			    "[transcoder %s] PSR entry attempt in 2 vblanks\n",
439 			    transcoder_name(cpu_transcoder));
440 	}
441 
442 	if (psr_iir & psr_irq_post_exit_bit_get(intel_dp)) {
443 		intel_dp->psr.last_exit = time_ns;
444 		drm_dbg_kms(display->drm,
445 			    "[transcoder %s] PSR exit completed\n",
446 			    transcoder_name(cpu_transcoder));
447 
448 		if (DISPLAY_VER(display) >= 9) {
449 			u32 val;
450 
451 			val = intel_de_rmw(display,
452 					   PSR_EVENT(display, cpu_transcoder),
453 					   0, 0);
454 
455 			psr_event_print(display, val, intel_dp->psr.sel_update_enabled);
456 		}
457 	}
458 
459 	if (psr_iir & psr_irq_psr_error_bit_get(intel_dp)) {
460 		drm_warn(display->drm, "[transcoder %s] PSR aux error\n",
461 			 transcoder_name(cpu_transcoder));
462 
463 		intel_dp->psr.irq_aux_error = true;
464 
465 		/*
466 		 * If this interruption is not masked it will keep
467 		 * interrupting so fast that it prevents the scheduled
468 		 * work to run.
469 		 * Also after a PSR error, we don't want to arm PSR
470 		 * again so we don't care about unmask the interruption
471 		 * or unset irq_aux_error.
472 		 */
473 		intel_de_rmw(display, psr_imr_reg(display, cpu_transcoder),
474 			     0, psr_irq_psr_error_bit_get(intel_dp));
475 
476 		queue_work(display->wq.unordered, &intel_dp->psr.work);
477 	}
478 }
479 
intel_dp_get_sink_sync_latency(struct intel_dp * intel_dp)480 static u8 intel_dp_get_sink_sync_latency(struct intel_dp *intel_dp)
481 {
482 	struct intel_display *display = to_intel_display(intel_dp);
483 	u8 val = 8; /* assume the worst if we can't read the value */
484 
485 	if (drm_dp_dpcd_readb(&intel_dp->aux,
486 			      DP_SYNCHRONIZATION_LATENCY_IN_SINK, &val) == 1)
487 		val &= DP_MAX_RESYNC_FRAME_COUNT_MASK;
488 	else
489 		drm_dbg_kms(display->drm,
490 			    "Unable to get sink synchronization latency, assuming 8 frames\n");
491 	return val;
492 }
493 
intel_dp_get_su_capability(struct intel_dp * intel_dp)494 static u8 intel_dp_get_su_capability(struct intel_dp *intel_dp)
495 {
496 	u8 su_capability = 0;
497 
498 	if (intel_dp->psr.sink_panel_replay_su_support) {
499 		if (drm_dp_dpcd_read_byte(&intel_dp->aux,
500 					  DP_PANEL_REPLAY_CAP_CAPABILITY,
501 					  &su_capability) < 0)
502 			return 0;
503 	} else {
504 		su_capability = intel_dp->psr_dpcd[1];
505 	}
506 
507 	return su_capability;
508 }
509 
510 static unsigned int
intel_dp_get_su_x_granularity_offset(struct intel_dp * intel_dp)511 intel_dp_get_su_x_granularity_offset(struct intel_dp *intel_dp)
512 {
513 	return intel_dp->psr.sink_panel_replay_su_support ?
514 		DP_PANEL_REPLAY_CAP_X_GRANULARITY :
515 		DP_PSR2_SU_X_GRANULARITY;
516 }
517 
518 static unsigned int
intel_dp_get_su_y_granularity_offset(struct intel_dp * intel_dp)519 intel_dp_get_su_y_granularity_offset(struct intel_dp *intel_dp)
520 {
521 	return intel_dp->psr.sink_panel_replay_su_support ?
522 		DP_PANEL_REPLAY_CAP_Y_GRANULARITY :
523 		DP_PSR2_SU_Y_GRANULARITY;
524 }
525 
526 /*
527  * Note: Bits related to granularity are same in panel replay and psr
528  * registers. Rely on PSR definitions on these "common" bits.
529  */
intel_dp_get_su_granularity(struct intel_dp * intel_dp)530 static void intel_dp_get_su_granularity(struct intel_dp *intel_dp)
531 {
532 	struct intel_display *display = to_intel_display(intel_dp);
533 	ssize_t r;
534 	u16 w;
535 	u8 y;
536 
537 	/*
538 	 * TODO: Do we need to take into account panel supporting both PSR and
539 	 * Panel replay?
540 	 */
541 
542 	/*
543 	 * If sink don't have specific granularity requirements set legacy
544 	 * ones.
545 	 */
546 	if (!(intel_dp_get_su_capability(intel_dp) &
547 	      DP_PSR2_SU_GRANULARITY_REQUIRED)) {
548 		/* As PSR2 HW sends full lines, we do not care about x granularity */
549 		w = 4;
550 		y = 4;
551 		goto exit;
552 	}
553 
554 	r = drm_dp_dpcd_read(&intel_dp->aux,
555 			     intel_dp_get_su_x_granularity_offset(intel_dp),
556 			     &w, 2);
557 	if (r != 2)
558 		drm_dbg_kms(display->drm,
559 			    "Unable to read selective update x granularity\n");
560 	/*
561 	 * Spec says that if the value read is 0 the default granularity should
562 	 * be used instead.
563 	 */
564 	if (r != 2 || w == 0)
565 		w = 4;
566 
567 	r = drm_dp_dpcd_read(&intel_dp->aux,
568 			     intel_dp_get_su_y_granularity_offset(intel_dp),
569 			     &y, 1);
570 	if (r != 1) {
571 		drm_dbg_kms(display->drm,
572 			    "Unable to read selective update y granularity\n");
573 		y = 4;
574 	}
575 	if (y == 0)
576 		y = 1;
577 
578 exit:
579 	intel_dp->psr.su_w_granularity = w;
580 	intel_dp->psr.su_y_granularity = y;
581 }
582 
_panel_replay_init_dpcd(struct intel_dp * intel_dp)583 static void _panel_replay_init_dpcd(struct intel_dp *intel_dp)
584 {
585 	struct intel_display *display = to_intel_display(intel_dp);
586 	int ret;
587 
588 	ret = drm_dp_dpcd_read_data(&intel_dp->aux, DP_PANEL_REPLAY_CAP_SUPPORT,
589 				    &intel_dp->pr_dpcd, sizeof(intel_dp->pr_dpcd));
590 	if (ret < 0)
591 		return;
592 
593 	if (!(intel_dp->pr_dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] &
594 	      DP_PANEL_REPLAY_SUPPORT))
595 		return;
596 
597 	if (intel_dp_is_edp(intel_dp)) {
598 		if (!intel_alpm_aux_less_wake_supported(intel_dp)) {
599 			drm_dbg_kms(display->drm,
600 				    "Panel doesn't support AUX-less ALPM, eDP Panel Replay not possible\n");
601 			return;
602 		}
603 
604 		if (!(intel_dp->pr_dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] &
605 		      DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT)) {
606 			drm_dbg_kms(display->drm,
607 				    "Panel doesn't support early transport, eDP Panel Replay not possible\n");
608 			return;
609 		}
610 	}
611 
612 	intel_dp->psr.sink_panel_replay_support = true;
613 
614 	if (intel_dp->pr_dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] &
615 	    DP_PANEL_REPLAY_SU_SUPPORT)
616 		intel_dp->psr.sink_panel_replay_su_support = true;
617 
618 	drm_dbg_kms(display->drm,
619 		    "Panel replay %sis supported by panel\n",
620 		    intel_dp->psr.sink_panel_replay_su_support ?
621 		    "selective_update " : "");
622 }
623 
_psr_init_dpcd(struct intel_dp * intel_dp)624 static void _psr_init_dpcd(struct intel_dp *intel_dp)
625 {
626 	struct intel_display *display = to_intel_display(intel_dp);
627 	int ret;
628 
629 	ret = drm_dp_dpcd_read_data(&intel_dp->aux, DP_PSR_SUPPORT, intel_dp->psr_dpcd,
630 				    sizeof(intel_dp->psr_dpcd));
631 	if (ret < 0)
632 		return;
633 
634 	if (!intel_dp->psr_dpcd[0])
635 		return;
636 
637 	drm_dbg_kms(display->drm, "eDP panel supports PSR version %x\n",
638 		    intel_dp->psr_dpcd[0]);
639 
640 	if (drm_dp_has_quirk(&intel_dp->desc, DP_DPCD_QUIRK_NO_PSR)) {
641 		drm_dbg_kms(display->drm,
642 			    "PSR support not currently available for this panel\n");
643 		return;
644 	}
645 
646 	if (!(intel_dp->edp_dpcd[1] & DP_EDP_SET_POWER_CAP)) {
647 		drm_dbg_kms(display->drm,
648 			    "Panel lacks power state control, PSR cannot be enabled\n");
649 		return;
650 	}
651 
652 	intel_dp->psr.sink_support = true;
653 	intel_dp->psr.sink_sync_latency =
654 		intel_dp_get_sink_sync_latency(intel_dp);
655 
656 	if (DISPLAY_VER(display) >= 9 &&
657 	    intel_dp->psr_dpcd[0] >= DP_PSR2_WITH_Y_COORD_IS_SUPPORTED) {
658 		bool y_req = intel_dp->psr_dpcd[1] &
659 			     DP_PSR2_SU_Y_COORDINATE_REQUIRED;
660 
661 		/*
662 		 * All panels that supports PSR version 03h (PSR2 +
663 		 * Y-coordinate) can handle Y-coordinates in VSC but we are
664 		 * only sure that it is going to be used when required by the
665 		 * panel. This way panel is capable to do selective update
666 		 * without a aux frame sync.
667 		 *
668 		 * To support PSR version 02h and PSR version 03h without
669 		 * Y-coordinate requirement panels we would need to enable
670 		 * GTC first.
671 		 */
672 		intel_dp->psr.sink_psr2_support = y_req &&
673 			intel_alpm_aux_wake_supported(intel_dp);
674 		drm_dbg_kms(display->drm, "PSR2 %ssupported\n",
675 			    intel_dp->psr.sink_psr2_support ? "" : "not ");
676 	}
677 }
678 
intel_psr_init_dpcd(struct intel_dp * intel_dp)679 void intel_psr_init_dpcd(struct intel_dp *intel_dp)
680 {
681 	_psr_init_dpcd(intel_dp);
682 
683 	_panel_replay_init_dpcd(intel_dp);
684 
685 	if (intel_dp->psr.sink_psr2_support ||
686 	    intel_dp->psr.sink_panel_replay_su_support)
687 		intel_dp_get_su_granularity(intel_dp);
688 }
689 
hsw_psr_setup_aux(struct intel_dp * intel_dp)690 static void hsw_psr_setup_aux(struct intel_dp *intel_dp)
691 {
692 	struct intel_display *display = to_intel_display(intel_dp);
693 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
694 	u32 aux_clock_divider, aux_ctl;
695 	/* write DP_SET_POWER=D0 */
696 	static const u8 aux_msg[] = {
697 		[0] = (DP_AUX_NATIVE_WRITE << 4) | ((DP_SET_POWER >> 16) & 0xf),
698 		[1] = (DP_SET_POWER >> 8) & 0xff,
699 		[2] = DP_SET_POWER & 0xff,
700 		[3] = 1 - 1,
701 		[4] = DP_SET_POWER_D0,
702 	};
703 	int i;
704 
705 	BUILD_BUG_ON(sizeof(aux_msg) > 20);
706 	for (i = 0; i < sizeof(aux_msg); i += 4)
707 		intel_de_write(display,
708 			       psr_aux_data_reg(display, cpu_transcoder, i >> 2),
709 			       intel_dp_aux_pack(&aux_msg[i], sizeof(aux_msg) - i));
710 
711 	aux_clock_divider = intel_dp->get_aux_clock_divider(intel_dp, 0);
712 
713 	/* Start with bits set for DDI_AUX_CTL register */
714 	aux_ctl = intel_dp->get_aux_send_ctl(intel_dp, sizeof(aux_msg),
715 					     aux_clock_divider);
716 
717 	/* Select only valid bits for SRD_AUX_CTL */
718 	aux_ctl &= EDP_PSR_AUX_CTL_TIME_OUT_MASK |
719 		EDP_PSR_AUX_CTL_MESSAGE_SIZE_MASK |
720 		EDP_PSR_AUX_CTL_PRECHARGE_2US_MASK |
721 		EDP_PSR_AUX_CTL_BIT_CLOCK_2X_MASK;
722 
723 	intel_de_write(display, psr_aux_ctl_reg(display, cpu_transcoder),
724 		       aux_ctl);
725 }
726 
psr2_su_region_et_valid(struct intel_dp * intel_dp,bool panel_replay)727 static bool psr2_su_region_et_valid(struct intel_dp *intel_dp, bool panel_replay)
728 {
729 	struct intel_display *display = to_intel_display(intel_dp);
730 
731 	if (DISPLAY_VER(display) < 20 || !intel_dp_is_edp(intel_dp) ||
732 	    intel_dp->psr.debug & I915_PSR_DEBUG_SU_REGION_ET_DISABLE)
733 		return false;
734 
735 	return panel_replay ?
736 		intel_dp->pr_dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] &
737 		DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT :
738 		intel_dp->psr_dpcd[0] == DP_PSR2_WITH_Y_COORD_ET_SUPPORTED;
739 }
740 
_panel_replay_enable_sink(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)741 static void _panel_replay_enable_sink(struct intel_dp *intel_dp,
742 				      const struct intel_crtc_state *crtc_state)
743 {
744 	u8 val = DP_PANEL_REPLAY_ENABLE |
745 		DP_PANEL_REPLAY_VSC_SDP_CRC_EN |
746 		DP_PANEL_REPLAY_UNRECOVERABLE_ERROR_EN |
747 		DP_PANEL_REPLAY_RFB_STORAGE_ERROR_EN |
748 		DP_PANEL_REPLAY_ACTIVE_FRAME_CRC_ERROR_EN;
749 	u8 panel_replay_config2 = DP_PANEL_REPLAY_CRC_VERIFICATION;
750 
751 	if (crtc_state->has_sel_update)
752 		val |= DP_PANEL_REPLAY_SU_ENABLE;
753 
754 	if (crtc_state->enable_psr2_su_region_et)
755 		val |= DP_PANEL_REPLAY_ENABLE_SU_REGION_ET;
756 
757 	if (crtc_state->req_psr2_sdp_prior_scanline)
758 		panel_replay_config2 |=
759 			DP_PANEL_REPLAY_SU_REGION_SCANLINE_CAPTURE;
760 
761 	drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG, val);
762 
763 	drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG2,
764 			   panel_replay_config2);
765 }
766 
_psr_enable_sink(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)767 static void _psr_enable_sink(struct intel_dp *intel_dp,
768 			     const struct intel_crtc_state *crtc_state)
769 {
770 	struct intel_display *display = to_intel_display(intel_dp);
771 	u8 val = 0;
772 
773 	if (crtc_state->has_sel_update) {
774 		val |= DP_PSR_ENABLE_PSR2 | DP_PSR_IRQ_HPD_WITH_CRC_ERRORS;
775 	} else {
776 		if (intel_dp->psr.link_standby)
777 			val |= DP_PSR_MAIN_LINK_ACTIVE;
778 
779 		if (DISPLAY_VER(display) >= 8)
780 			val |= DP_PSR_CRC_VERIFICATION;
781 	}
782 
783 	if (crtc_state->req_psr2_sdp_prior_scanline)
784 		val |= DP_PSR_SU_REGION_SCANLINE_CAPTURE;
785 
786 	if (crtc_state->enable_psr2_su_region_et)
787 		val |= DP_PANEL_REPLAY_ENABLE_SU_REGION_ET;
788 
789 	if (intel_dp->psr.entry_setup_frames > 0)
790 		val |= DP_PSR_FRAME_CAPTURE;
791 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, val);
792 
793 	val |= DP_PSR_ENABLE;
794 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, val);
795 }
796 
intel_psr_enable_sink(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)797 static void intel_psr_enable_sink(struct intel_dp *intel_dp,
798 				  const struct intel_crtc_state *crtc_state)
799 {
800 	intel_alpm_enable_sink(intel_dp, crtc_state);
801 
802 	crtc_state->has_panel_replay ?
803 		_panel_replay_enable_sink(intel_dp, crtc_state) :
804 		_psr_enable_sink(intel_dp, crtc_state);
805 
806 	if (intel_dp_is_edp(intel_dp))
807 		drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
808 }
809 
intel_psr_panel_replay_enable_sink(struct intel_dp * intel_dp)810 void intel_psr_panel_replay_enable_sink(struct intel_dp *intel_dp)
811 {
812 	if (CAN_PANEL_REPLAY(intel_dp))
813 		drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG,
814 				   DP_PANEL_REPLAY_ENABLE);
815 }
816 
intel_psr1_get_tp_time(struct intel_dp * intel_dp)817 static u32 intel_psr1_get_tp_time(struct intel_dp *intel_dp)
818 {
819 	struct intel_display *display = to_intel_display(intel_dp);
820 	struct intel_connector *connector = intel_dp->attached_connector;
821 	u32 val = 0;
822 
823 	if (DISPLAY_VER(display) >= 11)
824 		val |= EDP_PSR_TP4_TIME_0us;
825 
826 	if (display->params.psr_safest_params) {
827 		val |= EDP_PSR_TP1_TIME_2500us;
828 		val |= EDP_PSR_TP2_TP3_TIME_2500us;
829 		goto check_tp3_sel;
830 	}
831 
832 	if (connector->panel.vbt.psr.tp1_wakeup_time_us == 0)
833 		val |= EDP_PSR_TP1_TIME_0us;
834 	else if (connector->panel.vbt.psr.tp1_wakeup_time_us <= 100)
835 		val |= EDP_PSR_TP1_TIME_100us;
836 	else if (connector->panel.vbt.psr.tp1_wakeup_time_us <= 500)
837 		val |= EDP_PSR_TP1_TIME_500us;
838 	else
839 		val |= EDP_PSR_TP1_TIME_2500us;
840 
841 	if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us == 0)
842 		val |= EDP_PSR_TP2_TP3_TIME_0us;
843 	else if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us <= 100)
844 		val |= EDP_PSR_TP2_TP3_TIME_100us;
845 	else if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us <= 500)
846 		val |= EDP_PSR_TP2_TP3_TIME_500us;
847 	else
848 		val |= EDP_PSR_TP2_TP3_TIME_2500us;
849 
850 	/*
851 	 * WA 0479: hsw,bdw
852 	 * "Do not skip both TP1 and TP2/TP3"
853 	 */
854 	if (DISPLAY_VER(display) < 9 &&
855 	    connector->panel.vbt.psr.tp1_wakeup_time_us == 0 &&
856 	    connector->panel.vbt.psr.tp2_tp3_wakeup_time_us == 0)
857 		val |= EDP_PSR_TP2_TP3_TIME_100us;
858 
859 check_tp3_sel:
860 	if (intel_dp_source_supports_tps3(display) &&
861 	    drm_dp_tps3_supported(intel_dp->dpcd))
862 		val |= EDP_PSR_TP_TP1_TP3;
863 	else
864 		val |= EDP_PSR_TP_TP1_TP2;
865 
866 	return val;
867 }
868 
psr_compute_idle_frames(struct intel_dp * intel_dp)869 static u8 psr_compute_idle_frames(struct intel_dp *intel_dp)
870 {
871 	struct intel_display *display = to_intel_display(intel_dp);
872 	struct intel_connector *connector = intel_dp->attached_connector;
873 	int idle_frames;
874 
875 	/* Let's use 6 as the minimum to cover all known cases including the
876 	 * off-by-one issue that HW has in some cases.
877 	 */
878 	idle_frames = max(6, connector->panel.vbt.psr.idle_frames);
879 	idle_frames = max(idle_frames, intel_dp->psr.sink_sync_latency + 1);
880 
881 	if (drm_WARN_ON(display->drm, idle_frames > 0xf))
882 		idle_frames = 0xf;
883 
884 	return idle_frames;
885 }
886 
is_dc5_dc6_blocked(struct intel_dp * intel_dp)887 static bool is_dc5_dc6_blocked(struct intel_dp *intel_dp)
888 {
889 	struct intel_display *display = to_intel_display(intel_dp);
890 	u32 current_dc_state = intel_display_power_get_current_dc_state(display);
891 	struct drm_vblank_crtc *vblank = &display->drm->vblank[intel_dp->psr.pipe];
892 
893 	return (current_dc_state != DC_STATE_EN_UPTO_DC5 &&
894 		current_dc_state != DC_STATE_EN_UPTO_DC6) ||
895 		intel_dp->psr.active_non_psr_pipes ||
896 		READ_ONCE(vblank->enabled);
897 }
898 
hsw_activate_psr1(struct intel_dp * intel_dp)899 static void hsw_activate_psr1(struct intel_dp *intel_dp)
900 {
901 	struct intel_display *display = to_intel_display(intel_dp);
902 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
903 	u32 max_sleep_time = 0x1f;
904 	u32 val = EDP_PSR_ENABLE;
905 
906 	val |= EDP_PSR_IDLE_FRAMES(psr_compute_idle_frames(intel_dp));
907 
908 	if (DISPLAY_VER(display) < 20)
909 		val |= EDP_PSR_MAX_SLEEP_TIME(max_sleep_time);
910 
911 	if (display->platform.haswell)
912 		val |= EDP_PSR_MIN_LINK_ENTRY_TIME_8_LINES;
913 
914 	if (intel_dp->psr.link_standby)
915 		val |= EDP_PSR_LINK_STANDBY;
916 
917 	val |= intel_psr1_get_tp_time(intel_dp);
918 
919 	if (DISPLAY_VER(display) >= 8)
920 		val |= EDP_PSR_CRC_ENABLE;
921 
922 	if (DISPLAY_VER(display) >= 20)
923 		val |= LNL_EDP_PSR_ENTRY_SETUP_FRAMES(intel_dp->psr.entry_setup_frames);
924 
925 	intel_de_rmw(display, psr_ctl_reg(display, cpu_transcoder),
926 		     ~EDP_PSR_RESTORE_PSR_ACTIVE_CTX_MASK, val);
927 
928 	/* Wa_16025596647 */
929 	if ((DISPLAY_VER(display) == 20 ||
930 	     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
931 	    is_dc5_dc6_blocked(intel_dp) && intel_dp->psr.pkg_c_latency_used)
932 		intel_dmc_start_pkgc_exit_at_start_of_undelayed_vblank(display,
933 								       intel_dp->psr.pipe,
934 								       true);
935 }
936 
intel_psr2_get_tp_time(struct intel_dp * intel_dp)937 static u32 intel_psr2_get_tp_time(struct intel_dp *intel_dp)
938 {
939 	struct intel_display *display = to_intel_display(intel_dp);
940 	struct intel_connector *connector = intel_dp->attached_connector;
941 	u32 val = 0;
942 
943 	if (display->params.psr_safest_params)
944 		return EDP_PSR2_TP2_TIME_2500us;
945 
946 	if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us >= 0 &&
947 	    connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 50)
948 		val |= EDP_PSR2_TP2_TIME_50us;
949 	else if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 100)
950 		val |= EDP_PSR2_TP2_TIME_100us;
951 	else if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 500)
952 		val |= EDP_PSR2_TP2_TIME_500us;
953 	else
954 		val |= EDP_PSR2_TP2_TIME_2500us;
955 
956 	return val;
957 }
958 
psr2_block_count_lines(struct intel_dp * intel_dp)959 static int psr2_block_count_lines(struct intel_dp *intel_dp)
960 {
961 	return intel_dp->alpm_parameters.io_wake_lines < 9 &&
962 		intel_dp->alpm_parameters.fast_wake_lines < 9 ? 8 : 12;
963 }
964 
psr2_block_count(struct intel_dp * intel_dp)965 static int psr2_block_count(struct intel_dp *intel_dp)
966 {
967 	return psr2_block_count_lines(intel_dp) / 4;
968 }
969 
frames_before_su_entry(struct intel_dp * intel_dp)970 static u8 frames_before_su_entry(struct intel_dp *intel_dp)
971 {
972 	u8 frames_before_su_entry;
973 
974 	frames_before_su_entry = max_t(u8,
975 				       intel_dp->psr.sink_sync_latency + 1,
976 				       2);
977 
978 	/* Entry setup frames must be at least 1 less than frames before SU entry */
979 	if (intel_dp->psr.entry_setup_frames >= frames_before_su_entry)
980 		frames_before_su_entry = intel_dp->psr.entry_setup_frames + 1;
981 
982 	return frames_before_su_entry;
983 }
984 
dg2_activate_panel_replay(struct intel_dp * intel_dp)985 static void dg2_activate_panel_replay(struct intel_dp *intel_dp)
986 {
987 	struct intel_display *display = to_intel_display(intel_dp);
988 	struct intel_psr *psr = &intel_dp->psr;
989 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
990 
991 	if (intel_dp_is_edp(intel_dp) && psr->sel_update_enabled) {
992 		u32 val = psr->su_region_et_enabled ?
993 			LNL_EDP_PSR2_SU_REGION_ET_ENABLE : 0;
994 
995 		if (intel_dp->psr.req_psr2_sdp_prior_scanline)
996 			val |= EDP_PSR2_SU_SDP_SCANLINE;
997 
998 		intel_de_write(display, EDP_PSR2_CTL(display, cpu_transcoder),
999 			       val);
1000 	}
1001 
1002 	intel_de_rmw(display,
1003 		     PSR2_MAN_TRK_CTL(display, intel_dp->psr.transcoder),
1004 		     0, ADLP_PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME);
1005 
1006 	intel_de_rmw(display, TRANS_DP2_CTL(intel_dp->psr.transcoder), 0,
1007 		     TRANS_DP2_PANEL_REPLAY_ENABLE);
1008 }
1009 
hsw_activate_psr2(struct intel_dp * intel_dp)1010 static void hsw_activate_psr2(struct intel_dp *intel_dp)
1011 {
1012 	struct intel_display *display = to_intel_display(intel_dp);
1013 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1014 	u32 val = EDP_PSR2_ENABLE;
1015 	u32 psr_val = 0;
1016 	u8 idle_frames;
1017 
1018 	/* Wa_16025596647 */
1019 	if ((DISPLAY_VER(display) == 20 ||
1020 	     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
1021 	    is_dc5_dc6_blocked(intel_dp) && intel_dp->psr.pkg_c_latency_used)
1022 		idle_frames = 0;
1023 	else
1024 		idle_frames = psr_compute_idle_frames(intel_dp);
1025 	val |= EDP_PSR2_IDLE_FRAMES(idle_frames);
1026 
1027 	if (DISPLAY_VER(display) < 14 && !display->platform.alderlake_p)
1028 		val |= EDP_SU_TRACK_ENABLE;
1029 
1030 	if (DISPLAY_VER(display) >= 10 && DISPLAY_VER(display) < 13)
1031 		val |= EDP_Y_COORDINATE_ENABLE;
1032 
1033 	val |= EDP_PSR2_FRAME_BEFORE_SU(frames_before_su_entry(intel_dp));
1034 
1035 	val |= intel_psr2_get_tp_time(intel_dp);
1036 
1037 	if (DISPLAY_VER(display) >= 12 && DISPLAY_VER(display) < 20) {
1038 		if (psr2_block_count(intel_dp) > 2)
1039 			val |= TGL_EDP_PSR2_BLOCK_COUNT_NUM_3;
1040 		else
1041 			val |= TGL_EDP_PSR2_BLOCK_COUNT_NUM_2;
1042 	}
1043 
1044 	/* Wa_22012278275:adl-p */
1045 	if (display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_E0)) {
1046 		static const u8 map[] = {
1047 			2, /* 5 lines */
1048 			1, /* 6 lines */
1049 			0, /* 7 lines */
1050 			3, /* 8 lines */
1051 			6, /* 9 lines */
1052 			5, /* 10 lines */
1053 			4, /* 11 lines */
1054 			7, /* 12 lines */
1055 		};
1056 		/*
1057 		 * Still using the default IO_BUFFER_WAKE and FAST_WAKE, see
1058 		 * comments below for more information
1059 		 */
1060 		int tmp;
1061 
1062 		tmp = map[intel_dp->alpm_parameters.io_wake_lines -
1063 			  TGL_EDP_PSR2_IO_BUFFER_WAKE_MIN_LINES];
1064 		val |= TGL_EDP_PSR2_IO_BUFFER_WAKE(tmp + TGL_EDP_PSR2_IO_BUFFER_WAKE_MIN_LINES);
1065 
1066 		tmp = map[intel_dp->alpm_parameters.fast_wake_lines - TGL_EDP_PSR2_FAST_WAKE_MIN_LINES];
1067 		val |= TGL_EDP_PSR2_FAST_WAKE(tmp + TGL_EDP_PSR2_FAST_WAKE_MIN_LINES);
1068 	} else if (DISPLAY_VER(display) >= 20) {
1069 		val |= LNL_EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines);
1070 	} else if (DISPLAY_VER(display) >= 12) {
1071 		val |= TGL_EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines);
1072 		val |= TGL_EDP_PSR2_FAST_WAKE(intel_dp->alpm_parameters.fast_wake_lines);
1073 	} else if (DISPLAY_VER(display) >= 9) {
1074 		val |= EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines);
1075 		val |= EDP_PSR2_FAST_WAKE(intel_dp->alpm_parameters.fast_wake_lines);
1076 	}
1077 
1078 	if (intel_dp->psr.req_psr2_sdp_prior_scanline)
1079 		val |= EDP_PSR2_SU_SDP_SCANLINE;
1080 
1081 	if (DISPLAY_VER(display) >= 20)
1082 		psr_val |= LNL_EDP_PSR_ENTRY_SETUP_FRAMES(intel_dp->psr.entry_setup_frames);
1083 
1084 	if (intel_dp->psr.psr2_sel_fetch_enabled) {
1085 		u32 tmp;
1086 
1087 		tmp = intel_de_read(display,
1088 				    PSR2_MAN_TRK_CTL(display, cpu_transcoder));
1089 		drm_WARN_ON(display->drm, !(tmp & PSR2_MAN_TRK_CTL_ENABLE));
1090 	} else if (HAS_PSR2_SEL_FETCH(display)) {
1091 		intel_de_write(display,
1092 			       PSR2_MAN_TRK_CTL(display, cpu_transcoder), 0);
1093 	}
1094 
1095 	if (intel_dp->psr.su_region_et_enabled)
1096 		val |= LNL_EDP_PSR2_SU_REGION_ET_ENABLE;
1097 
1098 	/*
1099 	 * PSR2 HW is incorrectly using EDP_PSR_TP1_TP3_SEL and BSpec is
1100 	 * recommending keep this bit unset while PSR2 is enabled.
1101 	 */
1102 	intel_de_write(display, psr_ctl_reg(display, cpu_transcoder), psr_val);
1103 
1104 	intel_de_write(display, EDP_PSR2_CTL(display, cpu_transcoder), val);
1105 }
1106 
1107 static bool
transcoder_has_psr2(struct intel_display * display,enum transcoder cpu_transcoder)1108 transcoder_has_psr2(struct intel_display *display, enum transcoder cpu_transcoder)
1109 {
1110 	if (display->platform.alderlake_p || DISPLAY_VER(display) >= 14)
1111 		return cpu_transcoder == TRANSCODER_A || cpu_transcoder == TRANSCODER_B;
1112 	else if (DISPLAY_VER(display) >= 12)
1113 		return cpu_transcoder == TRANSCODER_A;
1114 	else if (DISPLAY_VER(display) >= 9)
1115 		return cpu_transcoder == TRANSCODER_EDP;
1116 	else
1117 		return false;
1118 }
1119 
intel_get_frame_time_us(const struct intel_crtc_state * crtc_state)1120 static u32 intel_get_frame_time_us(const struct intel_crtc_state *crtc_state)
1121 {
1122 	if (!crtc_state->hw.active)
1123 		return 0;
1124 
1125 	return DIV_ROUND_UP(1000 * 1000,
1126 			    drm_mode_vrefresh(&crtc_state->hw.adjusted_mode));
1127 }
1128 
psr2_program_idle_frames(struct intel_dp * intel_dp,u32 idle_frames)1129 static void psr2_program_idle_frames(struct intel_dp *intel_dp,
1130 				     u32 idle_frames)
1131 {
1132 	struct intel_display *display = to_intel_display(intel_dp);
1133 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1134 
1135 	intel_de_rmw(display, EDP_PSR2_CTL(display, cpu_transcoder),
1136 		     EDP_PSR2_IDLE_FRAMES_MASK,
1137 		     EDP_PSR2_IDLE_FRAMES(idle_frames));
1138 }
1139 
tgl_psr2_enable_dc3co(struct intel_dp * intel_dp)1140 static void tgl_psr2_enable_dc3co(struct intel_dp *intel_dp)
1141 {
1142 	struct intel_display *display = to_intel_display(intel_dp);
1143 
1144 	psr2_program_idle_frames(intel_dp, 0);
1145 	intel_display_power_set_target_dc_state(display, DC_STATE_EN_DC3CO);
1146 }
1147 
tgl_psr2_disable_dc3co(struct intel_dp * intel_dp)1148 static void tgl_psr2_disable_dc3co(struct intel_dp *intel_dp)
1149 {
1150 	struct intel_display *display = to_intel_display(intel_dp);
1151 
1152 	intel_display_power_set_target_dc_state(display, DC_STATE_EN_UPTO_DC6);
1153 	psr2_program_idle_frames(intel_dp, psr_compute_idle_frames(intel_dp));
1154 }
1155 
tgl_dc3co_disable_work(struct work_struct * work)1156 static void tgl_dc3co_disable_work(struct work_struct *work)
1157 {
1158 	struct intel_dp *intel_dp =
1159 		container_of(work, typeof(*intel_dp), psr.dc3co_work.work);
1160 
1161 	mutex_lock(&intel_dp->psr.lock);
1162 	/* If delayed work is pending, it is not idle */
1163 	if (delayed_work_pending(&intel_dp->psr.dc3co_work))
1164 		goto unlock;
1165 
1166 	tgl_psr2_disable_dc3co(intel_dp);
1167 unlock:
1168 	mutex_unlock(&intel_dp->psr.lock);
1169 }
1170 
tgl_disallow_dc3co_on_psr2_exit(struct intel_dp * intel_dp)1171 static void tgl_disallow_dc3co_on_psr2_exit(struct intel_dp *intel_dp)
1172 {
1173 	if (!intel_dp->psr.dc3co_exitline)
1174 		return;
1175 
1176 	cancel_delayed_work(&intel_dp->psr.dc3co_work);
1177 	/* Before PSR2 exit disallow dc3co*/
1178 	tgl_psr2_disable_dc3co(intel_dp);
1179 }
1180 
1181 static bool
dc3co_is_pipe_port_compatible(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1182 dc3co_is_pipe_port_compatible(struct intel_dp *intel_dp,
1183 			      struct intel_crtc_state *crtc_state)
1184 {
1185 	struct intel_display *display = to_intel_display(intel_dp);
1186 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1187 	enum pipe pipe = to_intel_crtc(crtc_state->uapi.crtc)->pipe;
1188 	enum port port = dig_port->base.port;
1189 
1190 	if (display->platform.alderlake_p || DISPLAY_VER(display) >= 14)
1191 		return pipe <= PIPE_B && port <= PORT_B;
1192 	else
1193 		return pipe == PIPE_A && port == PORT_A;
1194 }
1195 
1196 static void
tgl_dc3co_exitline_compute_config(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1197 tgl_dc3co_exitline_compute_config(struct intel_dp *intel_dp,
1198 				  struct intel_crtc_state *crtc_state)
1199 {
1200 	struct intel_display *display = to_intel_display(intel_dp);
1201 	const u32 crtc_vdisplay = crtc_state->uapi.adjusted_mode.crtc_vdisplay;
1202 	struct i915_power_domains *power_domains = &display->power.domains;
1203 	u32 exit_scanlines;
1204 
1205 	/*
1206 	 * FIXME: Due to the changed sequence of activating/deactivating DC3CO,
1207 	 * disable DC3CO until the changed dc3co activating/deactivating sequence
1208 	 * is applied. B.Specs:49196
1209 	 */
1210 	return;
1211 
1212 	/*
1213 	 * DMC's DC3CO exit mechanism has an issue with Selective Fecth
1214 	 * TODO: when the issue is addressed, this restriction should be removed.
1215 	 */
1216 	if (crtc_state->enable_psr2_sel_fetch)
1217 		return;
1218 
1219 	if (!(power_domains->allowed_dc_mask & DC_STATE_EN_DC3CO))
1220 		return;
1221 
1222 	if (!dc3co_is_pipe_port_compatible(intel_dp, crtc_state))
1223 		return;
1224 
1225 	/* Wa_16011303918:adl-p */
1226 	if (display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0))
1227 		return;
1228 
1229 	/*
1230 	 * DC3CO Exit time 200us B.Spec 49196
1231 	 * PSR2 transcoder Early Exit scanlines = ROUNDUP(200 / line time) + 1
1232 	 */
1233 	exit_scanlines =
1234 		intel_usecs_to_scanlines(&crtc_state->uapi.adjusted_mode, 200) + 1;
1235 
1236 	if (drm_WARN_ON(display->drm, exit_scanlines > crtc_vdisplay))
1237 		return;
1238 
1239 	crtc_state->dc3co_exitline = crtc_vdisplay - exit_scanlines;
1240 }
1241 
intel_psr2_sel_fetch_config_valid(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1242 static bool intel_psr2_sel_fetch_config_valid(struct intel_dp *intel_dp,
1243 					      struct intel_crtc_state *crtc_state)
1244 {
1245 	struct intel_display *display = to_intel_display(intel_dp);
1246 
1247 	if (!display->params.enable_psr2_sel_fetch &&
1248 	    intel_dp->psr.debug != I915_PSR_DEBUG_ENABLE_SEL_FETCH) {
1249 		drm_dbg_kms(display->drm,
1250 			    "PSR2 sel fetch not enabled, disabled by parameter\n");
1251 		return false;
1252 	}
1253 
1254 	if (crtc_state->uapi.async_flip) {
1255 		drm_dbg_kms(display->drm,
1256 			    "PSR2 sel fetch not enabled, async flip enabled\n");
1257 		return false;
1258 	}
1259 
1260 	return crtc_state->enable_psr2_sel_fetch = true;
1261 }
1262 
psr2_granularity_check(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1263 static bool psr2_granularity_check(struct intel_dp *intel_dp,
1264 				   struct intel_crtc_state *crtc_state)
1265 {
1266 	struct intel_display *display = to_intel_display(intel_dp);
1267 	const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
1268 	const int crtc_hdisplay = crtc_state->hw.adjusted_mode.crtc_hdisplay;
1269 	const int crtc_vdisplay = crtc_state->hw.adjusted_mode.crtc_vdisplay;
1270 	u16 y_granularity = 0;
1271 
1272 	/* PSR2 HW only send full lines so we only need to validate the width */
1273 	if (crtc_hdisplay % intel_dp->psr.su_w_granularity)
1274 		return false;
1275 
1276 	if (crtc_vdisplay % intel_dp->psr.su_y_granularity)
1277 		return false;
1278 
1279 	/* HW tracking is only aligned to 4 lines */
1280 	if (!crtc_state->enable_psr2_sel_fetch)
1281 		return intel_dp->psr.su_y_granularity == 4;
1282 
1283 	/*
1284 	 * adl_p and mtl platforms have 1 line granularity.
1285 	 * For other platforms with SW tracking we can adjust the y coordinates
1286 	 * to match sink requirement if multiple of 4.
1287 	 */
1288 	if (display->platform.alderlake_p || DISPLAY_VER(display) >= 14)
1289 		y_granularity = intel_dp->psr.su_y_granularity;
1290 	else if (intel_dp->psr.su_y_granularity <= 2)
1291 		y_granularity = 4;
1292 	else if ((intel_dp->psr.su_y_granularity % 4) == 0)
1293 		y_granularity = intel_dp->psr.su_y_granularity;
1294 
1295 	if (y_granularity == 0 || crtc_vdisplay % y_granularity)
1296 		return false;
1297 
1298 	if (crtc_state->dsc.compression_enable &&
1299 	    vdsc_cfg->slice_height % y_granularity)
1300 		return false;
1301 
1302 	crtc_state->su_y_granularity = y_granularity;
1303 	return true;
1304 }
1305 
_compute_psr2_sdp_prior_scanline_indication(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1306 static bool _compute_psr2_sdp_prior_scanline_indication(struct intel_dp *intel_dp,
1307 							struct intel_crtc_state *crtc_state)
1308 {
1309 	struct intel_display *display = to_intel_display(intel_dp);
1310 	const struct drm_display_mode *adjusted_mode = &crtc_state->uapi.adjusted_mode;
1311 	u32 hblank_total, hblank_ns, req_ns;
1312 
1313 	hblank_total = adjusted_mode->crtc_hblank_end - adjusted_mode->crtc_hblank_start;
1314 	hblank_ns = div_u64(1000000ULL * hblank_total, adjusted_mode->crtc_clock);
1315 
1316 	/* From spec: ((60 / number of lanes) + 11) * 1000 / symbol clock frequency MHz */
1317 	req_ns = ((60 / crtc_state->lane_count) + 11) * 1000 / (crtc_state->port_clock / 1000);
1318 
1319 	if ((hblank_ns - req_ns) > 100)
1320 		return true;
1321 
1322 	/* Not supported <13 / Wa_22012279113:adl-p */
1323 	if (DISPLAY_VER(display) < 14 || intel_dp->edp_dpcd[0] < DP_EDP_14b)
1324 		return false;
1325 
1326 	crtc_state->req_psr2_sdp_prior_scanline = true;
1327 	return true;
1328 }
1329 
intel_psr_entry_setup_frames(struct intel_dp * intel_dp,const struct drm_display_mode * adjusted_mode)1330 static int intel_psr_entry_setup_frames(struct intel_dp *intel_dp,
1331 					const struct drm_display_mode *adjusted_mode)
1332 {
1333 	struct intel_display *display = to_intel_display(intel_dp);
1334 	int psr_setup_time = drm_dp_psr_setup_time(intel_dp->psr_dpcd);
1335 	int entry_setup_frames = 0;
1336 
1337 	if (psr_setup_time < 0) {
1338 		drm_dbg_kms(display->drm,
1339 			    "PSR condition failed: Invalid PSR setup time (0x%02x)\n",
1340 			    intel_dp->psr_dpcd[1]);
1341 		return -ETIME;
1342 	}
1343 
1344 	if (intel_usecs_to_scanlines(adjusted_mode, psr_setup_time) >
1345 	    adjusted_mode->crtc_vtotal - adjusted_mode->crtc_vdisplay - 1) {
1346 		if (DISPLAY_VER(display) >= 20) {
1347 			/* setup entry frames can be up to 3 frames */
1348 			entry_setup_frames = 1;
1349 			drm_dbg_kms(display->drm,
1350 				    "PSR setup entry frames %d\n",
1351 				    entry_setup_frames);
1352 		} else {
1353 			drm_dbg_kms(display->drm,
1354 				    "PSR condition failed: PSR setup time (%d us) too long\n",
1355 				    psr_setup_time);
1356 			return -ETIME;
1357 		}
1358 	}
1359 
1360 	return entry_setup_frames;
1361 }
1362 
wake_lines_fit_into_vblank(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state,bool aux_less)1363 static bool wake_lines_fit_into_vblank(struct intel_dp *intel_dp,
1364 				       const struct intel_crtc_state *crtc_state,
1365 				       bool aux_less)
1366 {
1367 	struct intel_display *display = to_intel_display(intel_dp);
1368 	int vblank = crtc_state->hw.adjusted_mode.crtc_vblank_end -
1369 		crtc_state->hw.adjusted_mode.crtc_vblank_start;
1370 	int wake_lines;
1371 
1372 	if (aux_less)
1373 		wake_lines = intel_dp->alpm_parameters.aux_less_wake_lines;
1374 	else
1375 		wake_lines = DISPLAY_VER(display) < 20 ?
1376 			psr2_block_count_lines(intel_dp) :
1377 			intel_dp->alpm_parameters.io_wake_lines;
1378 
1379 	if (crtc_state->req_psr2_sdp_prior_scanline)
1380 		vblank -= 1;
1381 
1382 	/* Vblank >= PSR2_CTL Block Count Number maximum line count */
1383 	if (vblank < wake_lines)
1384 		return false;
1385 
1386 	return true;
1387 }
1388 
alpm_config_valid(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state,bool aux_less)1389 static bool alpm_config_valid(struct intel_dp *intel_dp,
1390 			      const struct intel_crtc_state *crtc_state,
1391 			      bool aux_less)
1392 {
1393 	struct intel_display *display = to_intel_display(intel_dp);
1394 
1395 	if (!intel_alpm_compute_params(intel_dp, crtc_state)) {
1396 		drm_dbg_kms(display->drm,
1397 			    "PSR2/Panel Replay  not enabled, Unable to use long enough wake times\n");
1398 		return false;
1399 	}
1400 
1401 	if (!wake_lines_fit_into_vblank(intel_dp, crtc_state, aux_less)) {
1402 		drm_dbg_kms(display->drm,
1403 			    "PSR2/Panel Replay not enabled, too short vblank time\n");
1404 		return false;
1405 	}
1406 
1407 	return true;
1408 }
1409 
intel_psr2_config_valid(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1410 static bool intel_psr2_config_valid(struct intel_dp *intel_dp,
1411 				    struct intel_crtc_state *crtc_state)
1412 {
1413 	struct intel_display *display = to_intel_display(intel_dp);
1414 	int crtc_hdisplay = crtc_state->hw.adjusted_mode.crtc_hdisplay;
1415 	int crtc_vdisplay = crtc_state->hw.adjusted_mode.crtc_vdisplay;
1416 	int psr_max_h = 0, psr_max_v = 0, max_bpp = 0;
1417 
1418 	if (!intel_dp->psr.sink_psr2_support || display->params.enable_psr == 1)
1419 		return false;
1420 
1421 	/* JSL and EHL only supports eDP 1.3 */
1422 	if (display->platform.jasperlake || display->platform.elkhartlake) {
1423 		drm_dbg_kms(display->drm, "PSR2 not supported by phy\n");
1424 		return false;
1425 	}
1426 
1427 	/* Wa_16011181250 */
1428 	if (display->platform.rocketlake || display->platform.alderlake_s ||
1429 	    display->platform.dg2) {
1430 		drm_dbg_kms(display->drm,
1431 			    "PSR2 is defeatured for this platform\n");
1432 		return false;
1433 	}
1434 
1435 	if (display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) {
1436 		drm_dbg_kms(display->drm,
1437 			    "PSR2 not completely functional in this stepping\n");
1438 		return false;
1439 	}
1440 
1441 	if (!transcoder_has_psr2(display, crtc_state->cpu_transcoder)) {
1442 		drm_dbg_kms(display->drm,
1443 			    "PSR2 not supported in transcoder %s\n",
1444 			    transcoder_name(crtc_state->cpu_transcoder));
1445 		return false;
1446 	}
1447 
1448 	/*
1449 	 * DSC and PSR2 cannot be enabled simultaneously. If a requested
1450 	 * resolution requires DSC to be enabled, priority is given to DSC
1451 	 * over PSR2.
1452 	 */
1453 	if (crtc_state->dsc.compression_enable &&
1454 	    (DISPLAY_VER(display) < 14 && !display->platform.alderlake_p)) {
1455 		drm_dbg_kms(display->drm,
1456 			    "PSR2 cannot be enabled since DSC is enabled\n");
1457 		return false;
1458 	}
1459 
1460 	if (DISPLAY_VER(display) >= 20) {
1461 		psr_max_h = crtc_hdisplay;
1462 		psr_max_v = crtc_vdisplay;
1463 		max_bpp = crtc_state->pipe_bpp;
1464 	} else if (IS_DISPLAY_VER(display, 12, 14)) {
1465 		psr_max_h = 5120;
1466 		psr_max_v = 3200;
1467 		max_bpp = 30;
1468 	} else if (IS_DISPLAY_VER(display, 10, 11)) {
1469 		psr_max_h = 4096;
1470 		psr_max_v = 2304;
1471 		max_bpp = 24;
1472 	} else if (DISPLAY_VER(display) == 9) {
1473 		psr_max_h = 3640;
1474 		psr_max_v = 2304;
1475 		max_bpp = 24;
1476 	}
1477 
1478 	if (crtc_state->pipe_bpp > max_bpp) {
1479 		drm_dbg_kms(display->drm,
1480 			    "PSR2 not enabled, pipe bpp %d > max supported %d\n",
1481 			    crtc_state->pipe_bpp, max_bpp);
1482 		return false;
1483 	}
1484 
1485 	/* Wa_16011303918:adl-p */
1486 	if (crtc_state->vrr.enable &&
1487 	    display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) {
1488 		drm_dbg_kms(display->drm,
1489 			    "PSR2 not enabled, not compatible with HW stepping + VRR\n");
1490 		return false;
1491 	}
1492 
1493 	if (!alpm_config_valid(intel_dp, crtc_state, false))
1494 		return false;
1495 
1496 	if (!crtc_state->enable_psr2_sel_fetch &&
1497 	    (crtc_hdisplay > psr_max_h || crtc_vdisplay > psr_max_v)) {
1498 		drm_dbg_kms(display->drm,
1499 			    "PSR2 not enabled, resolution %dx%d > max supported %dx%d\n",
1500 			    crtc_hdisplay, crtc_vdisplay,
1501 			    psr_max_h, psr_max_v);
1502 		return false;
1503 	}
1504 
1505 	tgl_dc3co_exitline_compute_config(intel_dp, crtc_state);
1506 
1507 	return true;
1508 }
1509 
intel_sel_update_config_valid(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1510 static bool intel_sel_update_config_valid(struct intel_dp *intel_dp,
1511 					  struct intel_crtc_state *crtc_state)
1512 {
1513 	struct intel_display *display = to_intel_display(intel_dp);
1514 
1515 	if (HAS_PSR2_SEL_FETCH(display) &&
1516 	    !intel_psr2_sel_fetch_config_valid(intel_dp, crtc_state) &&
1517 	    !HAS_PSR_HW_TRACKING(display)) {
1518 		drm_dbg_kms(display->drm,
1519 			    "Selective update not enabled, selective fetch not valid and no HW tracking available\n");
1520 		goto unsupported;
1521 	}
1522 
1523 	if (!sel_update_global_enabled(intel_dp)) {
1524 		drm_dbg_kms(display->drm,
1525 			    "Selective update disabled by flag\n");
1526 		goto unsupported;
1527 	}
1528 
1529 	if (!crtc_state->has_panel_replay && !intel_psr2_config_valid(intel_dp, crtc_state))
1530 		goto unsupported;
1531 
1532 	if (!_compute_psr2_sdp_prior_scanline_indication(intel_dp, crtc_state)) {
1533 		drm_dbg_kms(display->drm,
1534 			    "Selective update not enabled, SDP indication do not fit in hblank\n");
1535 		goto unsupported;
1536 	}
1537 
1538 	if (crtc_state->has_panel_replay && (DISPLAY_VER(display) < 14 ||
1539 					     !intel_dp->psr.sink_panel_replay_su_support))
1540 		goto unsupported;
1541 
1542 	if (crtc_state->crc_enabled) {
1543 		drm_dbg_kms(display->drm,
1544 			    "Selective update not enabled because it would inhibit pipe CRC calculation\n");
1545 		goto unsupported;
1546 	}
1547 
1548 	if (!psr2_granularity_check(intel_dp, crtc_state)) {
1549 		drm_dbg_kms(display->drm,
1550 			    "Selective update not enabled, SU granularity not compatible\n");
1551 		goto unsupported;
1552 	}
1553 
1554 	crtc_state->enable_psr2_su_region_et =
1555 		psr2_su_region_et_valid(intel_dp, crtc_state->has_panel_replay);
1556 
1557 	return true;
1558 
1559 unsupported:
1560 	crtc_state->enable_psr2_sel_fetch = false;
1561 	return false;
1562 }
1563 
_psr_compute_config(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1564 static bool _psr_compute_config(struct intel_dp *intel_dp,
1565 				struct intel_crtc_state *crtc_state)
1566 {
1567 	struct intel_display *display = to_intel_display(intel_dp);
1568 	const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
1569 	int entry_setup_frames;
1570 
1571 	if (!CAN_PSR(intel_dp) || !display->params.enable_psr)
1572 		return false;
1573 
1574 	/*
1575 	 * Currently PSR doesn't work reliably with VRR enabled.
1576 	 */
1577 	if (crtc_state->vrr.enable)
1578 		return false;
1579 
1580 	entry_setup_frames = intel_psr_entry_setup_frames(intel_dp, adjusted_mode);
1581 
1582 	if (entry_setup_frames >= 0) {
1583 		intel_dp->psr.entry_setup_frames = entry_setup_frames;
1584 	} else {
1585 		drm_dbg_kms(display->drm,
1586 			    "PSR condition failed: PSR setup timing not met\n");
1587 		return false;
1588 	}
1589 
1590 	return true;
1591 }
1592 
1593 static bool
_panel_replay_compute_config(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state,const struct drm_connector_state * conn_state)1594 _panel_replay_compute_config(struct intel_dp *intel_dp,
1595 			     const struct intel_crtc_state *crtc_state,
1596 			     const struct drm_connector_state *conn_state)
1597 {
1598 	struct intel_display *display = to_intel_display(intel_dp);
1599 	struct intel_connector *connector =
1600 		to_intel_connector(conn_state->connector);
1601 	struct intel_hdcp *hdcp = &connector->hdcp;
1602 
1603 	if (!CAN_PANEL_REPLAY(intel_dp))
1604 		return false;
1605 
1606 	if (!panel_replay_global_enabled(intel_dp)) {
1607 		drm_dbg_kms(display->drm, "Panel Replay disabled by flag\n");
1608 		return false;
1609 	}
1610 
1611 	if (crtc_state->crc_enabled) {
1612 		drm_dbg_kms(display->drm,
1613 			    "Panel Replay not enabled because it would inhibit pipe CRC calculation\n");
1614 		return false;
1615 	}
1616 
1617 	if (!intel_dp_is_edp(intel_dp))
1618 		return true;
1619 
1620 	/* Remaining checks are for eDP only */
1621 
1622 	if (to_intel_crtc(crtc_state->uapi.crtc)->pipe != PIPE_A &&
1623 	    to_intel_crtc(crtc_state->uapi.crtc)->pipe != PIPE_B)
1624 		return false;
1625 
1626 	/* 128b/132b Panel Replay is not supported on eDP */
1627 	if (intel_dp_is_uhbr(crtc_state)) {
1628 		drm_dbg_kms(display->drm,
1629 			    "Panel Replay is not supported with 128b/132b\n");
1630 		return false;
1631 	}
1632 
1633 	/* HW will not allow Panel Replay on eDP when HDCP enabled */
1634 	if (conn_state->content_protection ==
1635 	    DRM_MODE_CONTENT_PROTECTION_DESIRED ||
1636 	    (conn_state->content_protection ==
1637 	     DRM_MODE_CONTENT_PROTECTION_ENABLED && hdcp->value ==
1638 	     DRM_MODE_CONTENT_PROTECTION_UNDESIRED)) {
1639 		drm_dbg_kms(display->drm,
1640 			    "Panel Replay is not supported with HDCP\n");
1641 		return false;
1642 	}
1643 
1644 	if (!alpm_config_valid(intel_dp, crtc_state, true))
1645 		return false;
1646 
1647 	return true;
1648 }
1649 
intel_psr_needs_wa_18037818876(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1650 static bool intel_psr_needs_wa_18037818876(struct intel_dp *intel_dp,
1651 					   struct intel_crtc_state *crtc_state)
1652 {
1653 	struct intel_display *display = to_intel_display(intel_dp);
1654 
1655 	return (DISPLAY_VER(display) == 20 && intel_dp->psr.entry_setup_frames > 0 &&
1656 		!crtc_state->has_sel_update);
1657 }
1658 
intel_psr_compute_config(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state,struct drm_connector_state * conn_state)1659 void intel_psr_compute_config(struct intel_dp *intel_dp,
1660 			      struct intel_crtc_state *crtc_state,
1661 			      struct drm_connector_state *conn_state)
1662 {
1663 	struct intel_display *display = to_intel_display(intel_dp);
1664 	const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
1665 	struct intel_atomic_state *state = to_intel_atomic_state(crtc_state->uapi.state);
1666 	struct intel_crtc *crtc;
1667 	u8 active_pipes = 0;
1668 
1669 	if (!psr_global_enabled(intel_dp)) {
1670 		drm_dbg_kms(display->drm, "PSR disabled by flag\n");
1671 		return;
1672 	}
1673 
1674 	if (intel_dp->psr.sink_not_reliable) {
1675 		drm_dbg_kms(display->drm,
1676 			    "PSR sink implementation is not reliable\n");
1677 		return;
1678 	}
1679 
1680 	if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) {
1681 		drm_dbg_kms(display->drm,
1682 			    "PSR condition failed: Interlaced mode enabled\n");
1683 		return;
1684 	}
1685 
1686 	/*
1687 	 * FIXME figure out what is wrong with PSR+joiner and
1688 	 * fix it. Presumably something related to the fact that
1689 	 * PSR is a transcoder level feature.
1690 	 */
1691 	if (crtc_state->joiner_pipes) {
1692 		drm_dbg_kms(display->drm,
1693 			    "PSR disabled due to joiner\n");
1694 		return;
1695 	}
1696 
1697 	crtc_state->has_panel_replay = _panel_replay_compute_config(intel_dp,
1698 								    crtc_state,
1699 								    conn_state);
1700 
1701 	crtc_state->has_psr = crtc_state->has_panel_replay ? true :
1702 		_psr_compute_config(intel_dp, crtc_state);
1703 
1704 	if (!crtc_state->has_psr)
1705 		return;
1706 
1707 	crtc_state->has_sel_update = intel_sel_update_config_valid(intel_dp, crtc_state);
1708 
1709 	/* Wa_18037818876 */
1710 	if (intel_psr_needs_wa_18037818876(intel_dp, crtc_state)) {
1711 		crtc_state->has_psr = false;
1712 		drm_dbg_kms(display->drm,
1713 			    "PSR disabled to workaround PSR FSM hang issue\n");
1714 	}
1715 
1716 	/* Rest is for Wa_16025596647 */
1717 	if (DISPLAY_VER(display) != 20 &&
1718 	    !IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0))
1719 		return;
1720 
1721 	/* Not needed by Panel Replay  */
1722 	if (crtc_state->has_panel_replay)
1723 		return;
1724 
1725 	/* We ignore possible secondary PSR/Panel Replay capable eDP */
1726 	for_each_intel_crtc(display->drm, crtc)
1727 		active_pipes |= crtc->active ? BIT(crtc->pipe) : 0;
1728 
1729 	active_pipes = intel_calc_active_pipes(state, active_pipes);
1730 
1731 	crtc_state->active_non_psr_pipes = active_pipes &
1732 		~BIT(to_intel_crtc(crtc_state->uapi.crtc)->pipe);
1733 }
1734 
intel_psr_get_config(struct intel_encoder * encoder,struct intel_crtc_state * pipe_config)1735 void intel_psr_get_config(struct intel_encoder *encoder,
1736 			  struct intel_crtc_state *pipe_config)
1737 {
1738 	struct intel_display *display = to_intel_display(encoder);
1739 	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
1740 	enum transcoder cpu_transcoder = pipe_config->cpu_transcoder;
1741 	struct intel_dp *intel_dp;
1742 	u32 val;
1743 
1744 	if (!dig_port)
1745 		return;
1746 
1747 	intel_dp = &dig_port->dp;
1748 	if (!(CAN_PSR(intel_dp) || CAN_PANEL_REPLAY(intel_dp)))
1749 		return;
1750 
1751 	mutex_lock(&intel_dp->psr.lock);
1752 	if (!intel_dp->psr.enabled)
1753 		goto unlock;
1754 
1755 	if (intel_dp->psr.panel_replay_enabled) {
1756 		pipe_config->has_psr = pipe_config->has_panel_replay = true;
1757 	} else {
1758 		/*
1759 		 * Not possible to read EDP_PSR/PSR2_CTL registers as it is
1760 		 * enabled/disabled because of frontbuffer tracking and others.
1761 		 */
1762 		pipe_config->has_psr = true;
1763 	}
1764 
1765 	pipe_config->has_sel_update = intel_dp->psr.sel_update_enabled;
1766 	pipe_config->infoframes.enable |= intel_hdmi_infoframe_enable(DP_SDP_VSC);
1767 
1768 	if (!intel_dp->psr.sel_update_enabled)
1769 		goto unlock;
1770 
1771 	if (HAS_PSR2_SEL_FETCH(display)) {
1772 		val = intel_de_read(display,
1773 				    PSR2_MAN_TRK_CTL(display, cpu_transcoder));
1774 		if (val & PSR2_MAN_TRK_CTL_ENABLE)
1775 			pipe_config->enable_psr2_sel_fetch = true;
1776 	}
1777 
1778 	pipe_config->enable_psr2_su_region_et = intel_dp->psr.su_region_et_enabled;
1779 
1780 	if (DISPLAY_VER(display) >= 12) {
1781 		val = intel_de_read(display,
1782 				    TRANS_EXITLINE(display, cpu_transcoder));
1783 		pipe_config->dc3co_exitline = REG_FIELD_GET(EXITLINE_MASK, val);
1784 	}
1785 unlock:
1786 	mutex_unlock(&intel_dp->psr.lock);
1787 }
1788 
intel_psr_activate(struct intel_dp * intel_dp)1789 static void intel_psr_activate(struct intel_dp *intel_dp)
1790 {
1791 	struct intel_display *display = to_intel_display(intel_dp);
1792 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1793 
1794 	drm_WARN_ON(display->drm,
1795 		    transcoder_has_psr2(display, cpu_transcoder) &&
1796 		    intel_de_read(display, EDP_PSR2_CTL(display, cpu_transcoder)) & EDP_PSR2_ENABLE);
1797 
1798 	drm_WARN_ON(display->drm,
1799 		    intel_de_read(display, psr_ctl_reg(display, cpu_transcoder)) & EDP_PSR_ENABLE);
1800 
1801 	drm_WARN_ON(display->drm, intel_dp->psr.active);
1802 
1803 	drm_WARN_ON(display->drm, !intel_dp->psr.enabled);
1804 
1805 	lockdep_assert_held(&intel_dp->psr.lock);
1806 
1807 	/* psr1, psr2 and panel-replay are mutually exclusive.*/
1808 	if (intel_dp->psr.panel_replay_enabled)
1809 		dg2_activate_panel_replay(intel_dp);
1810 	else if (intel_dp->psr.sel_update_enabled)
1811 		hsw_activate_psr2(intel_dp);
1812 	else
1813 		hsw_activate_psr1(intel_dp);
1814 
1815 	intel_dp->psr.active = true;
1816 }
1817 
1818 /*
1819  * Wa_16013835468
1820  * Wa_14015648006
1821  */
wm_optimization_wa(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)1822 static void wm_optimization_wa(struct intel_dp *intel_dp,
1823 			       const struct intel_crtc_state *crtc_state)
1824 {
1825 	struct intel_display *display = to_intel_display(intel_dp);
1826 	enum pipe pipe = intel_dp->psr.pipe;
1827 	bool activate = false;
1828 
1829 	/* Wa_14015648006 */
1830 	if (IS_DISPLAY_VER(display, 11, 14) && crtc_state->wm_level_disabled)
1831 		activate = true;
1832 
1833 	/* Wa_16013835468 */
1834 	if (DISPLAY_VER(display) == 12 &&
1835 	    crtc_state->hw.adjusted_mode.crtc_vblank_start !=
1836 	    crtc_state->hw.adjusted_mode.crtc_vdisplay)
1837 		activate = true;
1838 
1839 	if (activate)
1840 		intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
1841 			     0, LATENCY_REPORTING_REMOVED(pipe));
1842 	else
1843 		intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
1844 			     LATENCY_REPORTING_REMOVED(pipe), 0);
1845 }
1846 
intel_psr_enable_source(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)1847 static void intel_psr_enable_source(struct intel_dp *intel_dp,
1848 				    const struct intel_crtc_state *crtc_state)
1849 {
1850 	struct intel_display *display = to_intel_display(intel_dp);
1851 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1852 	u32 mask = 0;
1853 
1854 	/*
1855 	 * Only HSW and BDW have PSR AUX registers that need to be setup.
1856 	 * SKL+ use hardcoded values PSR AUX transactions
1857 	 */
1858 	if (DISPLAY_VER(display) < 9)
1859 		hsw_psr_setup_aux(intel_dp);
1860 
1861 	/*
1862 	 * Per Spec: Avoid continuous PSR exit by masking MEMUP and HPD also
1863 	 * mask LPSP to avoid dependency on other drivers that might block
1864 	 * runtime_pm besides preventing  other hw tracking issues now we
1865 	 * can rely on frontbuffer tracking.
1866 	 *
1867 	 * From bspec prior LunarLake:
1868 	 * Only PSR_MASK[Mask FBC modify] and PSR_MASK[Mask Hotplug] are used in
1869 	 * panel replay mode.
1870 	 *
1871 	 * From bspec beyod LunarLake:
1872 	 * Panel Replay on DP: No bits are applicable
1873 	 * Panel Replay on eDP: All bits are applicable
1874 	 */
1875 	if (DISPLAY_VER(display) < 20 || intel_dp_is_edp(intel_dp))
1876 		mask = EDP_PSR_DEBUG_MASK_HPD;
1877 
1878 	if (intel_dp_is_edp(intel_dp)) {
1879 		mask |= EDP_PSR_DEBUG_MASK_MEMUP;
1880 
1881 		/*
1882 		 * For some unknown reason on HSW non-ULT (or at least on
1883 		 * Dell Latitude E6540) external displays start to flicker
1884 		 * when PSR is enabled on the eDP. SR/PC6 residency is much
1885 		 * higher than should be possible with an external display.
1886 		 * As a workaround leave LPSP unmasked to prevent PSR entry
1887 		 * when external displays are active.
1888 		 */
1889 		if (DISPLAY_VER(display) >= 8 || display->platform.haswell_ult)
1890 			mask |= EDP_PSR_DEBUG_MASK_LPSP;
1891 
1892 		if (DISPLAY_VER(display) < 20)
1893 			mask |= EDP_PSR_DEBUG_MASK_MAX_SLEEP;
1894 
1895 		/*
1896 		 * No separate pipe reg write mask on hsw/bdw, so have to unmask all
1897 		 * registers in order to keep the CURSURFLIVE tricks working :(
1898 		 */
1899 		if (IS_DISPLAY_VER(display, 9, 10))
1900 			mask |= EDP_PSR_DEBUG_MASK_DISP_REG_WRITE;
1901 
1902 		/* allow PSR with sprite enabled */
1903 		if (display->platform.haswell)
1904 			mask |= EDP_PSR_DEBUG_MASK_SPRITE_ENABLE;
1905 	}
1906 
1907 	intel_de_write(display, psr_debug_reg(display, cpu_transcoder), mask);
1908 
1909 	psr_irq_control(intel_dp);
1910 
1911 	/*
1912 	 * TODO: if future platforms supports DC3CO in more than one
1913 	 * transcoder, EXITLINE will need to be unset when disabling PSR
1914 	 */
1915 	if (intel_dp->psr.dc3co_exitline)
1916 		intel_de_rmw(display,
1917 			     TRANS_EXITLINE(display, cpu_transcoder),
1918 			     EXITLINE_MASK,
1919 			     intel_dp->psr.dc3co_exitline << EXITLINE_SHIFT | EXITLINE_ENABLE);
1920 
1921 	if (HAS_PSR_HW_TRACKING(display) && HAS_PSR2_SEL_FETCH(display))
1922 		intel_de_rmw(display, CHICKEN_PAR1_1, IGNORE_PSR2_HW_TRACKING,
1923 			     intel_dp->psr.psr2_sel_fetch_enabled ?
1924 			     IGNORE_PSR2_HW_TRACKING : 0);
1925 
1926 	/*
1927 	 * Wa_16013835468
1928 	 * Wa_14015648006
1929 	 */
1930 	wm_optimization_wa(intel_dp, crtc_state);
1931 
1932 	if (intel_dp->psr.sel_update_enabled) {
1933 		if (DISPLAY_VER(display) == 9)
1934 			intel_de_rmw(display, CHICKEN_TRANS(display, cpu_transcoder), 0,
1935 				     PSR2_VSC_ENABLE_PROG_HEADER |
1936 				     PSR2_ADD_VERTICAL_LINE_COUNT);
1937 
1938 		/*
1939 		 * Wa_16014451276:adlp,mtl[a0,b0]
1940 		 * All supported adlp panels have 1-based X granularity, this may
1941 		 * cause issues if non-supported panels are used.
1942 		 */
1943 		if (!intel_dp->psr.panel_replay_enabled &&
1944 		    (IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0) ||
1945 		     display->platform.alderlake_p))
1946 			intel_de_rmw(display, CHICKEN_TRANS(display, cpu_transcoder),
1947 				     0, ADLP_1_BASED_X_GRANULARITY);
1948 
1949 		/* Wa_16012604467:adlp,mtl[a0,b0] */
1950 		if (!intel_dp->psr.panel_replay_enabled &&
1951 		    IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0))
1952 			intel_de_rmw(display,
1953 				     MTL_CLKGATE_DIS_TRANS(display, cpu_transcoder),
1954 				     0,
1955 				     MTL_CLKGATE_DIS_TRANS_DMASC_GATING_DIS);
1956 		else if (display->platform.alderlake_p)
1957 			intel_de_rmw(display, CLKGATE_DIS_MISC, 0,
1958 				     CLKGATE_DIS_MISC_DMASC_GATING_DIS);
1959 	}
1960 
1961 	/* Wa_16025596647 */
1962 	if ((DISPLAY_VER(display) == 20 ||
1963 	     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
1964 	    !intel_dp->psr.panel_replay_enabled)
1965 		intel_dmc_block_pkgc(display, intel_dp->psr.pipe, true);
1966 
1967 	intel_alpm_configure(intel_dp, crtc_state);
1968 }
1969 
psr_interrupt_error_check(struct intel_dp * intel_dp)1970 static bool psr_interrupt_error_check(struct intel_dp *intel_dp)
1971 {
1972 	struct intel_display *display = to_intel_display(intel_dp);
1973 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1974 	u32 val;
1975 
1976 	if (intel_dp->psr.panel_replay_enabled)
1977 		goto no_err;
1978 
1979 	/*
1980 	 * If a PSR error happened and the driver is reloaded, the EDP_PSR_IIR
1981 	 * will still keep the error set even after the reset done in the
1982 	 * irq_preinstall and irq_uninstall hooks.
1983 	 * And enabling in this situation cause the screen to freeze in the
1984 	 * first time that PSR HW tries to activate so lets keep PSR disabled
1985 	 * to avoid any rendering problems.
1986 	 */
1987 	val = intel_de_read(display, psr_iir_reg(display, cpu_transcoder));
1988 	val &= psr_irq_psr_error_bit_get(intel_dp);
1989 	if (val) {
1990 		intel_dp->psr.sink_not_reliable = true;
1991 		drm_dbg_kms(display->drm,
1992 			    "PSR interruption error set, not enabling PSR\n");
1993 		return false;
1994 	}
1995 
1996 no_err:
1997 	return true;
1998 }
1999 
intel_psr_enable_locked(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)2000 static void intel_psr_enable_locked(struct intel_dp *intel_dp,
2001 				    const struct intel_crtc_state *crtc_state)
2002 {
2003 	struct intel_display *display = to_intel_display(intel_dp);
2004 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
2005 	u32 val;
2006 
2007 	drm_WARN_ON(display->drm, intel_dp->psr.enabled);
2008 
2009 	intel_dp->psr.sel_update_enabled = crtc_state->has_sel_update;
2010 	intel_dp->psr.panel_replay_enabled = crtc_state->has_panel_replay;
2011 	intel_dp->psr.busy_frontbuffer_bits = 0;
2012 	intel_dp->psr.pipe = to_intel_crtc(crtc_state->uapi.crtc)->pipe;
2013 	intel_dp->psr.transcoder = crtc_state->cpu_transcoder;
2014 	/* DC5/DC6 requires at least 6 idle frames */
2015 	val = usecs_to_jiffies(intel_get_frame_time_us(crtc_state) * 6);
2016 	intel_dp->psr.dc3co_exit_delay = val;
2017 	intel_dp->psr.dc3co_exitline = crtc_state->dc3co_exitline;
2018 	intel_dp->psr.psr2_sel_fetch_enabled = crtc_state->enable_psr2_sel_fetch;
2019 	intel_dp->psr.su_region_et_enabled = crtc_state->enable_psr2_su_region_et;
2020 	intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
2021 	intel_dp->psr.req_psr2_sdp_prior_scanline =
2022 		crtc_state->req_psr2_sdp_prior_scanline;
2023 	intel_dp->psr.active_non_psr_pipes = crtc_state->active_non_psr_pipes;
2024 	intel_dp->psr.pkg_c_latency_used = crtc_state->pkg_c_latency_used;
2025 
2026 	if (!psr_interrupt_error_check(intel_dp))
2027 		return;
2028 
2029 	if (intel_dp->psr.panel_replay_enabled)
2030 		drm_dbg_kms(display->drm, "Enabling Panel Replay\n");
2031 	else
2032 		drm_dbg_kms(display->drm, "Enabling PSR%s\n",
2033 			    intel_dp->psr.sel_update_enabled ? "2" : "1");
2034 
2035 	/*
2036 	 * Enabling sink PSR/Panel Replay here only for PSR. Panel Replay enable
2037 	 * bit is already written at this point. Sink ALPM is enabled here for
2038 	 * PSR and Panel Replay. See
2039 	 * intel_psr_panel_replay_enable_sink. Modifiers/options:
2040 	 *  - Selective Update
2041 	 *  - Region Early Transport
2042 	 *  - Selective Update Region Scanline Capture
2043 	 *  - VSC_SDP_CRC
2044 	 *  - HPD on different Errors
2045 	 *  - CRC verification
2046 	 * are written for PSR and Panel Replay here.
2047 	 */
2048 	intel_psr_enable_sink(intel_dp, crtc_state);
2049 
2050 	if (intel_dp_is_edp(intel_dp))
2051 		intel_snps_phy_update_psr_power_state(&dig_port->base, true);
2052 
2053 	intel_psr_enable_source(intel_dp, crtc_state);
2054 	intel_dp->psr.enabled = true;
2055 	intel_dp->psr.pause_counter = 0;
2056 
2057 	/*
2058 	 * Link_ok is sticky and set here on PSR enable. We can assume link
2059 	 * training is complete as we never continue to PSR enable with
2060 	 * untrained link. Link_ok is kept as set until first short pulse
2061 	 * interrupt. This is targeted to workaround panels stating bad link
2062 	 * after PSR is enabled.
2063 	 */
2064 	intel_dp->psr.link_ok = true;
2065 
2066 	intel_psr_activate(intel_dp);
2067 }
2068 
intel_psr_exit(struct intel_dp * intel_dp)2069 static void intel_psr_exit(struct intel_dp *intel_dp)
2070 {
2071 	struct intel_display *display = to_intel_display(intel_dp);
2072 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2073 	u32 val;
2074 
2075 	if (!intel_dp->psr.active) {
2076 		if (transcoder_has_psr2(display, cpu_transcoder)) {
2077 			val = intel_de_read(display,
2078 					    EDP_PSR2_CTL(display, cpu_transcoder));
2079 			drm_WARN_ON(display->drm, val & EDP_PSR2_ENABLE);
2080 		}
2081 
2082 		val = intel_de_read(display,
2083 				    psr_ctl_reg(display, cpu_transcoder));
2084 		drm_WARN_ON(display->drm, val & EDP_PSR_ENABLE);
2085 
2086 		return;
2087 	}
2088 
2089 	if (intel_dp->psr.panel_replay_enabled) {
2090 		intel_de_rmw(display, TRANS_DP2_CTL(intel_dp->psr.transcoder),
2091 			     TRANS_DP2_PANEL_REPLAY_ENABLE, 0);
2092 	} else if (intel_dp->psr.sel_update_enabled) {
2093 		tgl_disallow_dc3co_on_psr2_exit(intel_dp);
2094 
2095 		val = intel_de_rmw(display,
2096 				   EDP_PSR2_CTL(display, cpu_transcoder),
2097 				   EDP_PSR2_ENABLE, 0);
2098 
2099 		drm_WARN_ON(display->drm, !(val & EDP_PSR2_ENABLE));
2100 	} else {
2101 		if ((DISPLAY_VER(display) == 20 ||
2102 		     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
2103 			intel_dp->psr.pkg_c_latency_used)
2104 			intel_dmc_start_pkgc_exit_at_start_of_undelayed_vblank(display,
2105 								       intel_dp->psr.pipe,
2106 								       false);
2107 
2108 		val = intel_de_rmw(display,
2109 				   psr_ctl_reg(display, cpu_transcoder),
2110 				   EDP_PSR_ENABLE, 0);
2111 
2112 		drm_WARN_ON(display->drm, !(val & EDP_PSR_ENABLE));
2113 	}
2114 	intel_dp->psr.active = false;
2115 }
2116 
intel_psr_wait_exit_locked(struct intel_dp * intel_dp)2117 static void intel_psr_wait_exit_locked(struct intel_dp *intel_dp)
2118 {
2119 	struct intel_display *display = to_intel_display(intel_dp);
2120 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2121 	i915_reg_t psr_status;
2122 	u32 psr_status_mask;
2123 
2124 	if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled ||
2125 					  intel_dp->psr.panel_replay_enabled)) {
2126 		psr_status = EDP_PSR2_STATUS(display, cpu_transcoder);
2127 		psr_status_mask = EDP_PSR2_STATUS_STATE_MASK;
2128 	} else {
2129 		psr_status = psr_status_reg(display, cpu_transcoder);
2130 		psr_status_mask = EDP_PSR_STATUS_STATE_MASK;
2131 	}
2132 
2133 	/* Wait till PSR is idle */
2134 	if (intel_de_wait_for_clear(display, psr_status,
2135 				    psr_status_mask, 2000))
2136 		drm_err(display->drm, "Timed out waiting PSR idle state\n");
2137 }
2138 
intel_psr_disable_locked(struct intel_dp * intel_dp)2139 static void intel_psr_disable_locked(struct intel_dp *intel_dp)
2140 {
2141 	struct intel_display *display = to_intel_display(intel_dp);
2142 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2143 
2144 	lockdep_assert_held(&intel_dp->psr.lock);
2145 
2146 	if (!intel_dp->psr.enabled)
2147 		return;
2148 
2149 	if (intel_dp->psr.panel_replay_enabled)
2150 		drm_dbg_kms(display->drm, "Disabling Panel Replay\n");
2151 	else
2152 		drm_dbg_kms(display->drm, "Disabling PSR%s\n",
2153 			    intel_dp->psr.sel_update_enabled ? "2" : "1");
2154 
2155 	intel_psr_exit(intel_dp);
2156 	intel_psr_wait_exit_locked(intel_dp);
2157 
2158 	/*
2159 	 * Wa_16013835468
2160 	 * Wa_14015648006
2161 	 */
2162 	if (DISPLAY_VER(display) >= 11)
2163 		intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
2164 			     LATENCY_REPORTING_REMOVED(intel_dp->psr.pipe), 0);
2165 
2166 	if (intel_dp->psr.sel_update_enabled) {
2167 		/* Wa_16012604467:adlp,mtl[a0,b0] */
2168 		if (!intel_dp->psr.panel_replay_enabled &&
2169 		    IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0))
2170 			intel_de_rmw(display,
2171 				     MTL_CLKGATE_DIS_TRANS(display, cpu_transcoder),
2172 				     MTL_CLKGATE_DIS_TRANS_DMASC_GATING_DIS, 0);
2173 		else if (display->platform.alderlake_p)
2174 			intel_de_rmw(display, CLKGATE_DIS_MISC,
2175 				     CLKGATE_DIS_MISC_DMASC_GATING_DIS, 0);
2176 	}
2177 
2178 	if (intel_dp_is_edp(intel_dp))
2179 		intel_snps_phy_update_psr_power_state(&dp_to_dig_port(intel_dp)->base, false);
2180 
2181 	if (intel_dp->psr.panel_replay_enabled && intel_dp_is_edp(intel_dp))
2182 		intel_alpm_disable(intel_dp);
2183 
2184 	/* Disable PSR on Sink */
2185 	if (!intel_dp->psr.panel_replay_enabled) {
2186 		drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, 0);
2187 
2188 		if (intel_dp->psr.sel_update_enabled)
2189 			drm_dp_dpcd_writeb(&intel_dp->aux,
2190 					   DP_RECEIVER_ALPM_CONFIG, 0);
2191 	}
2192 
2193 	/* Wa_16025596647 */
2194 	if ((DISPLAY_VER(display) == 20 ||
2195 	     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
2196 	    !intel_dp->psr.panel_replay_enabled)
2197 		intel_dmc_block_pkgc(display, intel_dp->psr.pipe, false);
2198 
2199 	intel_dp->psr.enabled = false;
2200 	intel_dp->psr.panel_replay_enabled = false;
2201 	intel_dp->psr.sel_update_enabled = false;
2202 	intel_dp->psr.psr2_sel_fetch_enabled = false;
2203 	intel_dp->psr.su_region_et_enabled = false;
2204 	intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
2205 	intel_dp->psr.active_non_psr_pipes = 0;
2206 	intel_dp->psr.pkg_c_latency_used = 0;
2207 }
2208 
2209 /**
2210  * intel_psr_disable - Disable PSR
2211  * @intel_dp: Intel DP
2212  * @old_crtc_state: old CRTC state
2213  *
2214  * This function needs to be called before disabling pipe.
2215  */
intel_psr_disable(struct intel_dp * intel_dp,const struct intel_crtc_state * old_crtc_state)2216 void intel_psr_disable(struct intel_dp *intel_dp,
2217 		       const struct intel_crtc_state *old_crtc_state)
2218 {
2219 	struct intel_display *display = to_intel_display(intel_dp);
2220 
2221 	if (!old_crtc_state->has_psr)
2222 		return;
2223 
2224 	if (drm_WARN_ON(display->drm, !CAN_PSR(intel_dp) &&
2225 			!CAN_PANEL_REPLAY(intel_dp)))
2226 		return;
2227 
2228 	mutex_lock(&intel_dp->psr.lock);
2229 
2230 	intel_psr_disable_locked(intel_dp);
2231 
2232 	intel_dp->psr.link_ok = false;
2233 
2234 	mutex_unlock(&intel_dp->psr.lock);
2235 	cancel_work_sync(&intel_dp->psr.work);
2236 	cancel_delayed_work_sync(&intel_dp->psr.dc3co_work);
2237 }
2238 
2239 /**
2240  * intel_psr_pause - Pause PSR
2241  * @intel_dp: Intel DP
2242  *
2243  * This function need to be called after enabling psr.
2244  */
intel_psr_pause(struct intel_dp * intel_dp)2245 void intel_psr_pause(struct intel_dp *intel_dp)
2246 {
2247 	struct intel_psr *psr = &intel_dp->psr;
2248 
2249 	if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
2250 		return;
2251 
2252 	mutex_lock(&psr->lock);
2253 
2254 	if (!psr->enabled) {
2255 		mutex_unlock(&psr->lock);
2256 		return;
2257 	}
2258 
2259 	if (intel_dp->psr.pause_counter++ == 0) {
2260 		intel_psr_exit(intel_dp);
2261 		intel_psr_wait_exit_locked(intel_dp);
2262 	}
2263 
2264 	mutex_unlock(&psr->lock);
2265 
2266 	cancel_work_sync(&psr->work);
2267 	cancel_delayed_work_sync(&psr->dc3co_work);
2268 }
2269 
2270 /**
2271  * intel_psr_resume - Resume PSR
2272  * @intel_dp: Intel DP
2273  *
2274  * This function need to be called after pausing psr.
2275  */
intel_psr_resume(struct intel_dp * intel_dp)2276 void intel_psr_resume(struct intel_dp *intel_dp)
2277 {
2278 	struct intel_display *display = to_intel_display(intel_dp);
2279 	struct intel_psr *psr = &intel_dp->psr;
2280 
2281 	if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
2282 		return;
2283 
2284 	mutex_lock(&psr->lock);
2285 
2286 	if (!psr->enabled)
2287 		goto out;
2288 
2289 	if (!psr->pause_counter) {
2290 		drm_warn(display->drm, "Unbalanced PSR pause/resume!\n");
2291 		goto out;
2292 	}
2293 
2294 	if (--intel_dp->psr.pause_counter == 0)
2295 		intel_psr_activate(intel_dp);
2296 
2297 out:
2298 	mutex_unlock(&psr->lock);
2299 }
2300 
2301 /**
2302  * intel_psr_needs_vblank_notification - Check if PSR need vblank enable/disable
2303  * notification.
2304  * @crtc_state: CRTC status
2305  *
2306  * We need to block DC6 entry in case of Panel Replay as enabling VBI doesn't
2307  * prevent it in case of Panel Replay. Panel Replay switches main link off on
2308  * DC entry. This means vblank interrupts are not fired and is a problem if
2309  * user-space is polling for vblank events. Also Wa_16025596647 needs
2310  * information when vblank is enabled/disabled.
2311  */
intel_psr_needs_vblank_notification(const struct intel_crtc_state * crtc_state)2312 bool intel_psr_needs_vblank_notification(const struct intel_crtc_state *crtc_state)
2313 {
2314 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2315 	struct intel_display *display = to_intel_display(crtc_state);
2316 	struct intel_encoder *encoder;
2317 
2318 	for_each_encoder_on_crtc(crtc->base.dev, &crtc->base, encoder) {
2319 		struct intel_dp *intel_dp;
2320 
2321 		if (!intel_encoder_is_dp(encoder))
2322 			continue;
2323 
2324 		intel_dp = enc_to_intel_dp(encoder);
2325 
2326 		if (!intel_dp_is_edp(intel_dp))
2327 			continue;
2328 
2329 		if (CAN_PANEL_REPLAY(intel_dp))
2330 			return true;
2331 
2332 		if ((DISPLAY_VER(display) == 20 ||
2333 		     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
2334 		    CAN_PSR(intel_dp))
2335 			return true;
2336 	}
2337 
2338 	return false;
2339 }
2340 
2341 /**
2342  * intel_psr_trigger_frame_change_event - Trigger "Frame Change" event
2343  * @dsb: DSB context
2344  * @state: the atomic state
2345  * @crtc: the CRTC
2346  *
2347  * Generate PSR "Frame Change" event.
2348  */
intel_psr_trigger_frame_change_event(struct intel_dsb * dsb,struct intel_atomic_state * state,struct intel_crtc * crtc)2349 void intel_psr_trigger_frame_change_event(struct intel_dsb *dsb,
2350 					  struct intel_atomic_state *state,
2351 					  struct intel_crtc *crtc)
2352 {
2353 	const struct intel_crtc_state *crtc_state =
2354 		intel_pre_commit_crtc_state(state, crtc);
2355 	struct intel_display *display = to_intel_display(crtc);
2356 
2357 	if (crtc_state->has_psr)
2358 		intel_de_write_dsb(display, dsb,
2359 				   CURSURFLIVE(display, crtc->pipe), 0);
2360 }
2361 
2362 /**
2363  * intel_psr_min_vblank_delay - Minimum vblank delay needed by PSR
2364  * @crtc_state: the crtc state
2365  *
2366  * Return minimum vblank delay needed by PSR.
2367  */
intel_psr_min_vblank_delay(const struct intel_crtc_state * crtc_state)2368 int intel_psr_min_vblank_delay(const struct intel_crtc_state *crtc_state)
2369 {
2370 	struct intel_display *display = to_intel_display(crtc_state);
2371 
2372 	if (!crtc_state->has_psr)
2373 		return 0;
2374 
2375 	/* Wa_14015401596 */
2376 	if (intel_vrr_possible(crtc_state) && IS_DISPLAY_VER(display, 13, 14))
2377 		return 1;
2378 
2379 	/* Rest is for SRD_STATUS needed on LunarLake and onwards */
2380 	if (DISPLAY_VER(display) < 20)
2381 		return 0;
2382 
2383 	/*
2384 	 * Comment on SRD_STATUS register in Bspec for LunarLake and onwards:
2385 	 *
2386 	 * To deterministically capture the transition of the state machine
2387 	 * going from SRDOFFACK to IDLE, the delayed V. Blank should be at least
2388 	 * one line after the non-delayed V. Blank.
2389 	 *
2390 	 * Legacy TG: TRANS_SET_CONTEXT_LATENCY > 0
2391 	 * VRR TG: TRANS_VRR_CTL[ VRR Guardband ] < (TRANS_VRR_VMAX[ VRR Vmax ]
2392 	 * - TRANS_VTOTAL[ Vertical Active ])
2393 	 *
2394 	 * SRD_STATUS is used only by PSR1 on PantherLake.
2395 	 * SRD_STATUS is used by PSR1 and Panel Replay DP on LunarLake.
2396 	 */
2397 
2398 	if (DISPLAY_VER(display) >= 30 && (crtc_state->has_panel_replay ||
2399 					   crtc_state->has_sel_update))
2400 		return 0;
2401 	else if (DISPLAY_VER(display) < 30 && (crtc_state->has_sel_update ||
2402 					       intel_crtc_has_type(crtc_state,
2403 								   INTEL_OUTPUT_EDP)))
2404 		return 0;
2405 	else
2406 		return 1;
2407 }
2408 
man_trk_ctl_enable_bit_get(struct intel_display * display)2409 static u32 man_trk_ctl_enable_bit_get(struct intel_display *display)
2410 {
2411 	return display->platform.alderlake_p || DISPLAY_VER(display) >= 14 ? 0 :
2412 		PSR2_MAN_TRK_CTL_ENABLE;
2413 }
2414 
man_trk_ctl_single_full_frame_bit_get(struct intel_display * display)2415 static u32 man_trk_ctl_single_full_frame_bit_get(struct intel_display *display)
2416 {
2417 	return display->platform.alderlake_p || DISPLAY_VER(display) >= 14 ?
2418 	       ADLP_PSR2_MAN_TRK_CTL_SF_SINGLE_FULL_FRAME :
2419 	       PSR2_MAN_TRK_CTL_SF_SINGLE_FULL_FRAME;
2420 }
2421 
man_trk_ctl_partial_frame_bit_get(struct intel_display * display)2422 static u32 man_trk_ctl_partial_frame_bit_get(struct intel_display *display)
2423 {
2424 	return display->platform.alderlake_p || DISPLAY_VER(display) >= 14 ?
2425 	       ADLP_PSR2_MAN_TRK_CTL_SF_PARTIAL_FRAME_UPDATE :
2426 	       PSR2_MAN_TRK_CTL_SF_PARTIAL_FRAME_UPDATE;
2427 }
2428 
man_trk_ctl_continuos_full_frame(struct intel_display * display)2429 static u32 man_trk_ctl_continuos_full_frame(struct intel_display *display)
2430 {
2431 	return display->platform.alderlake_p || DISPLAY_VER(display) >= 14 ?
2432 	       ADLP_PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME :
2433 	       PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME;
2434 }
2435 
intel_psr_force_update(struct intel_dp * intel_dp)2436 static void intel_psr_force_update(struct intel_dp *intel_dp)
2437 {
2438 	struct intel_display *display = to_intel_display(intel_dp);
2439 
2440 	/*
2441 	 * Display WA #0884: skl+
2442 	 * This documented WA for bxt can be safely applied
2443 	 * broadly so we can force HW tracking to exit PSR
2444 	 * instead of disabling and re-enabling.
2445 	 * Workaround tells us to write 0 to CUR_SURFLIVE_A,
2446 	 * but it makes more sense write to the current active
2447 	 * pipe.
2448 	 *
2449 	 * This workaround do not exist for platforms with display 10 or newer
2450 	 * but testing proved that it works for up display 13, for newer
2451 	 * than that testing will be needed.
2452 	 */
2453 	intel_de_write(display, CURSURFLIVE(display, intel_dp->psr.pipe), 0);
2454 }
2455 
intel_psr2_program_trans_man_trk_ctl(struct intel_dsb * dsb,const struct intel_crtc_state * crtc_state)2456 void intel_psr2_program_trans_man_trk_ctl(struct intel_dsb *dsb,
2457 					  const struct intel_crtc_state *crtc_state)
2458 {
2459 	struct intel_display *display = to_intel_display(crtc_state);
2460 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2461 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
2462 	struct intel_encoder *encoder;
2463 
2464 	if (!crtc_state->enable_psr2_sel_fetch)
2465 		return;
2466 
2467 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
2468 					     crtc_state->uapi.encoder_mask) {
2469 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2470 
2471 		if (!dsb)
2472 			lockdep_assert_held(&intel_dp->psr.lock);
2473 
2474 		if (DISPLAY_VER(display) < 20 && intel_dp->psr.psr2_sel_fetch_cff_enabled)
2475 			return;
2476 		break;
2477 	}
2478 
2479 	intel_de_write_dsb(display, dsb,
2480 			   PSR2_MAN_TRK_CTL(display, cpu_transcoder),
2481 			   crtc_state->psr2_man_track_ctl);
2482 
2483 	if (!crtc_state->enable_psr2_su_region_et)
2484 		return;
2485 
2486 	intel_de_write_dsb(display, dsb, PIPE_SRCSZ_ERLY_TPT(crtc->pipe),
2487 			   crtc_state->pipe_srcsz_early_tpt);
2488 }
2489 
psr2_man_trk_ctl_calc(struct intel_crtc_state * crtc_state,bool full_update)2490 static void psr2_man_trk_ctl_calc(struct intel_crtc_state *crtc_state,
2491 				  bool full_update)
2492 {
2493 	struct intel_display *display = to_intel_display(crtc_state);
2494 	u32 val = man_trk_ctl_enable_bit_get(display);
2495 
2496 	/* SF partial frame enable has to be set even on full update */
2497 	val |= man_trk_ctl_partial_frame_bit_get(display);
2498 
2499 	if (full_update) {
2500 		val |= man_trk_ctl_continuos_full_frame(display);
2501 		goto exit;
2502 	}
2503 
2504 	if (crtc_state->psr2_su_area.y1 == -1)
2505 		goto exit;
2506 
2507 	if (display->platform.alderlake_p || DISPLAY_VER(display) >= 14) {
2508 		val |= ADLP_PSR2_MAN_TRK_CTL_SU_REGION_START_ADDR(crtc_state->psr2_su_area.y1);
2509 		val |= ADLP_PSR2_MAN_TRK_CTL_SU_REGION_END_ADDR(crtc_state->psr2_su_area.y2 - 1);
2510 	} else {
2511 		drm_WARN_ON(crtc_state->uapi.crtc->dev,
2512 			    crtc_state->psr2_su_area.y1 % 4 ||
2513 			    crtc_state->psr2_su_area.y2 % 4);
2514 
2515 		val |= PSR2_MAN_TRK_CTL_SU_REGION_START_ADDR(
2516 			crtc_state->psr2_su_area.y1 / 4 + 1);
2517 		val |= PSR2_MAN_TRK_CTL_SU_REGION_END_ADDR(
2518 			crtc_state->psr2_su_area.y2 / 4 + 1);
2519 	}
2520 exit:
2521 	crtc_state->psr2_man_track_ctl = val;
2522 }
2523 
psr2_pipe_srcsz_early_tpt_calc(struct intel_crtc_state * crtc_state,bool full_update)2524 static u32 psr2_pipe_srcsz_early_tpt_calc(struct intel_crtc_state *crtc_state,
2525 					  bool full_update)
2526 {
2527 	int width, height;
2528 
2529 	if (!crtc_state->enable_psr2_su_region_et || full_update)
2530 		return 0;
2531 
2532 	width = drm_rect_width(&crtc_state->psr2_su_area);
2533 	height = drm_rect_height(&crtc_state->psr2_su_area);
2534 
2535 	return PIPESRC_WIDTH(width - 1) | PIPESRC_HEIGHT(height - 1);
2536 }
2537 
clip_area_update(struct drm_rect * overlap_damage_area,struct drm_rect * damage_area,struct drm_rect * pipe_src)2538 static void clip_area_update(struct drm_rect *overlap_damage_area,
2539 			     struct drm_rect *damage_area,
2540 			     struct drm_rect *pipe_src)
2541 {
2542 	if (!drm_rect_intersect(damage_area, pipe_src))
2543 		return;
2544 
2545 	if (overlap_damage_area->y1 == -1) {
2546 		overlap_damage_area->y1 = damage_area->y1;
2547 		overlap_damage_area->y2 = damage_area->y2;
2548 		return;
2549 	}
2550 
2551 	if (damage_area->y1 < overlap_damage_area->y1)
2552 		overlap_damage_area->y1 = damage_area->y1;
2553 
2554 	if (damage_area->y2 > overlap_damage_area->y2)
2555 		overlap_damage_area->y2 = damage_area->y2;
2556 }
2557 
intel_psr2_sel_fetch_pipe_alignment(struct intel_crtc_state * crtc_state)2558 static void intel_psr2_sel_fetch_pipe_alignment(struct intel_crtc_state *crtc_state)
2559 {
2560 	struct intel_display *display = to_intel_display(crtc_state);
2561 	const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
2562 	u16 y_alignment;
2563 
2564 	/* ADLP aligns the SU region to vdsc slice height in case dsc is enabled */
2565 	if (crtc_state->dsc.compression_enable &&
2566 	    (display->platform.alderlake_p || DISPLAY_VER(display) >= 14))
2567 		y_alignment = vdsc_cfg->slice_height;
2568 	else
2569 		y_alignment = crtc_state->su_y_granularity;
2570 
2571 	crtc_state->psr2_su_area.y1 -= crtc_state->psr2_su_area.y1 % y_alignment;
2572 	if (crtc_state->psr2_su_area.y2 % y_alignment)
2573 		crtc_state->psr2_su_area.y2 = ((crtc_state->psr2_su_area.y2 /
2574 						y_alignment) + 1) * y_alignment;
2575 }
2576 
2577 /*
2578  * When early transport is in use we need to extend SU area to cover
2579  * cursor fully when cursor is in SU area.
2580  */
2581 static void
intel_psr2_sel_fetch_et_alignment(struct intel_atomic_state * state,struct intel_crtc * crtc,bool * cursor_in_su_area)2582 intel_psr2_sel_fetch_et_alignment(struct intel_atomic_state *state,
2583 				  struct intel_crtc *crtc,
2584 				  bool *cursor_in_su_area)
2585 {
2586 	struct intel_crtc_state *crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
2587 	struct intel_plane_state *new_plane_state;
2588 	struct intel_plane *plane;
2589 	int i;
2590 
2591 	if (!crtc_state->enable_psr2_su_region_et)
2592 		return;
2593 
2594 	for_each_new_intel_plane_in_state(state, plane, new_plane_state, i) {
2595 		struct drm_rect inter;
2596 
2597 		if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc)
2598 			continue;
2599 
2600 		if (plane->id != PLANE_CURSOR)
2601 			continue;
2602 
2603 		if (!new_plane_state->uapi.visible)
2604 			continue;
2605 
2606 		inter = crtc_state->psr2_su_area;
2607 		if (!drm_rect_intersect(&inter, &new_plane_state->uapi.dst))
2608 			continue;
2609 
2610 		clip_area_update(&crtc_state->psr2_su_area, &new_plane_state->uapi.dst,
2611 				 &crtc_state->pipe_src);
2612 		*cursor_in_su_area = true;
2613 	}
2614 }
2615 
2616 /*
2617  * TODO: Not clear how to handle planes with negative position,
2618  * also planes are not updated if they have a negative X
2619  * position so for now doing a full update in this cases
2620  *
2621  * Plane scaling and rotation is not supported by selective fetch and both
2622  * properties can change without a modeset, so need to be check at every
2623  * atomic commit.
2624  */
psr2_sel_fetch_plane_state_supported(const struct intel_plane_state * plane_state)2625 static bool psr2_sel_fetch_plane_state_supported(const struct intel_plane_state *plane_state)
2626 {
2627 	if (plane_state->uapi.dst.y1 < 0 ||
2628 	    plane_state->uapi.dst.x1 < 0 ||
2629 	    plane_state->scaler_id >= 0 ||
2630 	    plane_state->uapi.rotation != DRM_MODE_ROTATE_0)
2631 		return false;
2632 
2633 	return true;
2634 }
2635 
2636 /*
2637  * Check for pipe properties that is not supported by selective fetch.
2638  *
2639  * TODO: pipe scaling causes a modeset but skl_update_scaler_crtc() is executed
2640  * after intel_psr_compute_config(), so for now keeping PSR2 selective fetch
2641  * enabled and going to the full update path.
2642  */
psr2_sel_fetch_pipe_state_supported(const struct intel_crtc_state * crtc_state)2643 static bool psr2_sel_fetch_pipe_state_supported(const struct intel_crtc_state *crtc_state)
2644 {
2645 	if (crtc_state->scaler_state.scaler_id >= 0)
2646 		return false;
2647 
2648 	return true;
2649 }
2650 
2651 /* Wa 14019834836 */
intel_psr_apply_pr_link_on_su_wa(struct intel_crtc_state * crtc_state)2652 static void intel_psr_apply_pr_link_on_su_wa(struct intel_crtc_state *crtc_state)
2653 {
2654 	struct intel_display *display = to_intel_display(crtc_state);
2655 	struct intel_encoder *encoder;
2656 	int hactive_limit;
2657 
2658 	if (crtc_state->psr2_su_area.y1 != 0 ||
2659 	    crtc_state->psr2_su_area.y2 != 0)
2660 		return;
2661 
2662 	if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420)
2663 		hactive_limit = intel_dp_is_uhbr(crtc_state) ? 1230 : 546;
2664 	else
2665 		hactive_limit = intel_dp_is_uhbr(crtc_state) ? 615 : 273;
2666 
2667 	if (crtc_state->hw.adjusted_mode.hdisplay < hactive_limit)
2668 		return;
2669 
2670 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
2671 					     crtc_state->uapi.encoder_mask) {
2672 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2673 
2674 		if (!intel_dp_is_edp(intel_dp) &&
2675 		    intel_dp->psr.panel_replay_enabled &&
2676 		    intel_dp->psr.sel_update_enabled) {
2677 			crtc_state->psr2_su_area.y2++;
2678 			return;
2679 		}
2680 	}
2681 }
2682 
2683 static void
intel_psr_apply_su_area_workarounds(struct intel_crtc_state * crtc_state)2684 intel_psr_apply_su_area_workarounds(struct intel_crtc_state *crtc_state)
2685 {
2686 	struct intel_display *display = to_intel_display(crtc_state);
2687 
2688 	/* Wa_14014971492 */
2689 	if (!crtc_state->has_panel_replay &&
2690 	    ((IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0) ||
2691 	      display->platform.alderlake_p || display->platform.tigerlake)) &&
2692 	    crtc_state->splitter.enable)
2693 		crtc_state->psr2_su_area.y1 = 0;
2694 
2695 	/* Wa 14019834836 */
2696 	if (DISPLAY_VER(display) == 30)
2697 		intel_psr_apply_pr_link_on_su_wa(crtc_state);
2698 }
2699 
intel_psr2_sel_fetch_update(struct intel_atomic_state * state,struct intel_crtc * crtc)2700 int intel_psr2_sel_fetch_update(struct intel_atomic_state *state,
2701 				struct intel_crtc *crtc)
2702 {
2703 	struct intel_display *display = to_intel_display(state);
2704 	struct intel_crtc_state *crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
2705 	struct intel_plane_state *new_plane_state, *old_plane_state;
2706 	struct intel_plane *plane;
2707 	bool full_update = false, cursor_in_su_area = false;
2708 	int i, ret;
2709 
2710 	if (!crtc_state->enable_psr2_sel_fetch)
2711 		return 0;
2712 
2713 	if (!psr2_sel_fetch_pipe_state_supported(crtc_state)) {
2714 		full_update = true;
2715 		goto skip_sel_fetch_set_loop;
2716 	}
2717 
2718 	crtc_state->psr2_su_area.x1 = 0;
2719 	crtc_state->psr2_su_area.y1 = -1;
2720 	crtc_state->psr2_su_area.x2 = drm_rect_width(&crtc_state->pipe_src);
2721 	crtc_state->psr2_su_area.y2 = -1;
2722 
2723 	/*
2724 	 * Calculate minimal selective fetch area of each plane and calculate
2725 	 * the pipe damaged area.
2726 	 * In the next loop the plane selective fetch area will actually be set
2727 	 * using whole pipe damaged area.
2728 	 */
2729 	for_each_oldnew_intel_plane_in_state(state, plane, old_plane_state,
2730 					     new_plane_state, i) {
2731 		struct drm_rect src, damaged_area = { .x1 = 0, .y1 = -1,
2732 						      .x2 = INT_MAX };
2733 
2734 		if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc)
2735 			continue;
2736 
2737 		if (!new_plane_state->uapi.visible &&
2738 		    !old_plane_state->uapi.visible)
2739 			continue;
2740 
2741 		if (!psr2_sel_fetch_plane_state_supported(new_plane_state)) {
2742 			full_update = true;
2743 			break;
2744 		}
2745 
2746 		/*
2747 		 * If visibility or plane moved, mark the whole plane area as
2748 		 * damaged as it needs to be complete redraw in the new and old
2749 		 * position.
2750 		 */
2751 		if (new_plane_state->uapi.visible != old_plane_state->uapi.visible ||
2752 		    !drm_rect_equals(&new_plane_state->uapi.dst,
2753 				     &old_plane_state->uapi.dst)) {
2754 			if (old_plane_state->uapi.visible) {
2755 				damaged_area.y1 = old_plane_state->uapi.dst.y1;
2756 				damaged_area.y2 = old_plane_state->uapi.dst.y2;
2757 				clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2758 						 &crtc_state->pipe_src);
2759 			}
2760 
2761 			if (new_plane_state->uapi.visible) {
2762 				damaged_area.y1 = new_plane_state->uapi.dst.y1;
2763 				damaged_area.y2 = new_plane_state->uapi.dst.y2;
2764 				clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2765 						 &crtc_state->pipe_src);
2766 			}
2767 			continue;
2768 		} else if (new_plane_state->uapi.alpha != old_plane_state->uapi.alpha) {
2769 			/* If alpha changed mark the whole plane area as damaged */
2770 			damaged_area.y1 = new_plane_state->uapi.dst.y1;
2771 			damaged_area.y2 = new_plane_state->uapi.dst.y2;
2772 			clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2773 					 &crtc_state->pipe_src);
2774 			continue;
2775 		}
2776 
2777 		src = drm_plane_state_src(&new_plane_state->uapi);
2778 		drm_rect_fp_to_int(&src, &src);
2779 
2780 		if (!drm_atomic_helper_damage_merged(&old_plane_state->uapi,
2781 						     &new_plane_state->uapi, &damaged_area))
2782 			continue;
2783 
2784 		damaged_area.y1 += new_plane_state->uapi.dst.y1 - src.y1;
2785 		damaged_area.y2 += new_plane_state->uapi.dst.y1 - src.y1;
2786 		damaged_area.x1 += new_plane_state->uapi.dst.x1 - src.x1;
2787 		damaged_area.x2 += new_plane_state->uapi.dst.x1 - src.x1;
2788 
2789 		clip_area_update(&crtc_state->psr2_su_area, &damaged_area, &crtc_state->pipe_src);
2790 	}
2791 
2792 	/*
2793 	 * TODO: For now we are just using full update in case
2794 	 * selective fetch area calculation fails. To optimize this we
2795 	 * should identify cases where this happens and fix the area
2796 	 * calculation for those.
2797 	 */
2798 	if (crtc_state->psr2_su_area.y1 == -1) {
2799 		drm_info_once(display->drm,
2800 			      "Selective fetch area calculation failed in pipe %c\n",
2801 			      pipe_name(crtc->pipe));
2802 		full_update = true;
2803 	}
2804 
2805 	if (full_update)
2806 		goto skip_sel_fetch_set_loop;
2807 
2808 	intel_psr_apply_su_area_workarounds(crtc_state);
2809 
2810 	ret = drm_atomic_add_affected_planes(&state->base, &crtc->base);
2811 	if (ret)
2812 		return ret;
2813 
2814 	/*
2815 	 * Adjust su area to cover cursor fully as necessary (early
2816 	 * transport). This needs to be done after
2817 	 * drm_atomic_add_affected_planes to ensure visible cursor is added into
2818 	 * affected planes even when cursor is not updated by itself.
2819 	 */
2820 	intel_psr2_sel_fetch_et_alignment(state, crtc, &cursor_in_su_area);
2821 
2822 	intel_psr2_sel_fetch_pipe_alignment(crtc_state);
2823 
2824 	/*
2825 	 * Now that we have the pipe damaged area check if it intersect with
2826 	 * every plane, if it does set the plane selective fetch area.
2827 	 */
2828 	for_each_oldnew_intel_plane_in_state(state, plane, old_plane_state,
2829 					     new_plane_state, i) {
2830 		struct drm_rect *sel_fetch_area, inter;
2831 		struct intel_plane *linked = new_plane_state->planar_linked_plane;
2832 
2833 		if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc ||
2834 		    !new_plane_state->uapi.visible)
2835 			continue;
2836 
2837 		inter = crtc_state->psr2_su_area;
2838 		sel_fetch_area = &new_plane_state->psr2_sel_fetch_area;
2839 		if (!drm_rect_intersect(&inter, &new_plane_state->uapi.dst)) {
2840 			sel_fetch_area->y1 = -1;
2841 			sel_fetch_area->y2 = -1;
2842 			/*
2843 			 * if plane sel fetch was previously enabled ->
2844 			 * disable it
2845 			 */
2846 			if (drm_rect_height(&old_plane_state->psr2_sel_fetch_area) > 0)
2847 				crtc_state->update_planes |= BIT(plane->id);
2848 
2849 			continue;
2850 		}
2851 
2852 		if (!psr2_sel_fetch_plane_state_supported(new_plane_state)) {
2853 			full_update = true;
2854 			break;
2855 		}
2856 
2857 		sel_fetch_area = &new_plane_state->psr2_sel_fetch_area;
2858 		sel_fetch_area->y1 = inter.y1 - new_plane_state->uapi.dst.y1;
2859 		sel_fetch_area->y2 = inter.y2 - new_plane_state->uapi.dst.y1;
2860 		crtc_state->update_planes |= BIT(plane->id);
2861 
2862 		/*
2863 		 * Sel_fetch_area is calculated for UV plane. Use
2864 		 * same area for Y plane as well.
2865 		 */
2866 		if (linked) {
2867 			struct intel_plane_state *linked_new_plane_state;
2868 			struct drm_rect *linked_sel_fetch_area;
2869 
2870 			linked_new_plane_state = intel_atomic_get_plane_state(state, linked);
2871 			if (IS_ERR(linked_new_plane_state))
2872 				return PTR_ERR(linked_new_plane_state);
2873 
2874 			linked_sel_fetch_area = &linked_new_plane_state->psr2_sel_fetch_area;
2875 			linked_sel_fetch_area->y1 = sel_fetch_area->y1;
2876 			linked_sel_fetch_area->y2 = sel_fetch_area->y2;
2877 			crtc_state->update_planes |= BIT(linked->id);
2878 		}
2879 	}
2880 
2881 skip_sel_fetch_set_loop:
2882 	psr2_man_trk_ctl_calc(crtc_state, full_update);
2883 	crtc_state->pipe_srcsz_early_tpt =
2884 		psr2_pipe_srcsz_early_tpt_calc(crtc_state, full_update);
2885 	return 0;
2886 }
2887 
intel_psr2_panic_force_full_update(struct intel_display * display,struct intel_crtc_state * crtc_state)2888 void intel_psr2_panic_force_full_update(struct intel_display *display,
2889 					struct intel_crtc_state *crtc_state)
2890 {
2891 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2892 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
2893 	u32 val = man_trk_ctl_enable_bit_get(display);
2894 
2895 	/* SF partial frame enable has to be set even on full update */
2896 	val |= man_trk_ctl_partial_frame_bit_get(display);
2897 	val |= man_trk_ctl_continuos_full_frame(display);
2898 
2899 	/* Directly write the register */
2900 	intel_de_write_fw(display, PSR2_MAN_TRK_CTL(display, cpu_transcoder), val);
2901 
2902 	if (!crtc_state->enable_psr2_su_region_et)
2903 		return;
2904 
2905 	intel_de_write_fw(display, PIPE_SRCSZ_ERLY_TPT(crtc->pipe), 0);
2906 }
2907 
intel_psr_pre_plane_update(struct intel_atomic_state * state,struct intel_crtc * crtc)2908 void intel_psr_pre_plane_update(struct intel_atomic_state *state,
2909 				struct intel_crtc *crtc)
2910 {
2911 	struct intel_display *display = to_intel_display(state);
2912 	const struct intel_crtc_state *old_crtc_state =
2913 		intel_atomic_get_old_crtc_state(state, crtc);
2914 	const struct intel_crtc_state *new_crtc_state =
2915 		intel_atomic_get_new_crtc_state(state, crtc);
2916 	struct intel_encoder *encoder;
2917 
2918 	if (!HAS_PSR(display))
2919 		return;
2920 
2921 	for_each_intel_encoder_mask_with_psr(state->base.dev, encoder,
2922 					     old_crtc_state->uapi.encoder_mask) {
2923 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2924 		struct intel_psr *psr = &intel_dp->psr;
2925 
2926 		mutex_lock(&psr->lock);
2927 
2928 		if (psr->enabled) {
2929 			/*
2930 			 * Reasons to disable:
2931 			 * - PSR disabled in new state
2932 			 * - All planes will go inactive
2933 			 * - Changing between PSR versions
2934 			 * - Region Early Transport changing
2935 			 * - Display WA #1136: skl, bxt
2936 			 */
2937 			if (intel_crtc_needs_modeset(new_crtc_state) ||
2938 			    !new_crtc_state->has_psr ||
2939 			    !new_crtc_state->active_planes ||
2940 			    new_crtc_state->has_sel_update != psr->sel_update_enabled ||
2941 			    new_crtc_state->enable_psr2_su_region_et != psr->su_region_et_enabled ||
2942 			    new_crtc_state->has_panel_replay != psr->panel_replay_enabled ||
2943 			    (DISPLAY_VER(display) < 11 && new_crtc_state->wm_level_disabled))
2944 				intel_psr_disable_locked(intel_dp);
2945 			else if (new_crtc_state->wm_level_disabled)
2946 				/* Wa_14015648006 */
2947 				wm_optimization_wa(intel_dp, new_crtc_state);
2948 		}
2949 
2950 		mutex_unlock(&psr->lock);
2951 	}
2952 }
2953 
intel_psr_post_plane_update(struct intel_atomic_state * state,struct intel_crtc * crtc)2954 void intel_psr_post_plane_update(struct intel_atomic_state *state,
2955 				 struct intel_crtc *crtc)
2956 {
2957 	struct intel_display *display = to_intel_display(state);
2958 	const struct intel_crtc_state *crtc_state =
2959 		intel_atomic_get_new_crtc_state(state, crtc);
2960 	struct intel_encoder *encoder;
2961 
2962 	if (!crtc_state->has_psr)
2963 		return;
2964 
2965 	for_each_intel_encoder_mask_with_psr(state->base.dev, encoder,
2966 					     crtc_state->uapi.encoder_mask) {
2967 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2968 		struct intel_psr *psr = &intel_dp->psr;
2969 		bool keep_disabled = false;
2970 
2971 		mutex_lock(&psr->lock);
2972 
2973 		drm_WARN_ON(display->drm,
2974 			    psr->enabled && !crtc_state->active_planes);
2975 
2976 		keep_disabled |= psr->sink_not_reliable;
2977 		keep_disabled |= !crtc_state->active_planes;
2978 
2979 		/* Display WA #1136: skl, bxt */
2980 		keep_disabled |= DISPLAY_VER(display) < 11 &&
2981 			crtc_state->wm_level_disabled;
2982 
2983 		if (!psr->enabled && !keep_disabled)
2984 			intel_psr_enable_locked(intel_dp, crtc_state);
2985 		else if (psr->enabled && !crtc_state->wm_level_disabled)
2986 			/* Wa_14015648006 */
2987 			wm_optimization_wa(intel_dp, crtc_state);
2988 
2989 		/* Force a PSR exit when enabling CRC to avoid CRC timeouts */
2990 		if (crtc_state->crc_enabled && psr->enabled)
2991 			intel_psr_force_update(intel_dp);
2992 
2993 		/*
2994 		 * Clear possible busy bits in case we have
2995 		 * invalidate -> flip -> flush sequence.
2996 		 */
2997 		intel_dp->psr.busy_frontbuffer_bits = 0;
2998 
2999 		mutex_unlock(&psr->lock);
3000 	}
3001 }
3002 
3003 /*
3004  * From bspec: Panel Self Refresh (BDW+)
3005  * Max. time for PSR to idle = Inverse of the refresh rate + 6 ms of
3006  * exit training time + 1.5 ms of aux channel handshake. 50 ms is
3007  * defensive enough to cover everything.
3008  */
3009 #define PSR_IDLE_TIMEOUT_MS 50
3010 
3011 static int
_psr2_ready_for_pipe_update_locked(const struct intel_crtc_state * new_crtc_state,struct intel_dsb * dsb)3012 _psr2_ready_for_pipe_update_locked(const struct intel_crtc_state *new_crtc_state,
3013 				   struct intel_dsb *dsb)
3014 {
3015 	struct intel_display *display = to_intel_display(new_crtc_state);
3016 	enum transcoder cpu_transcoder = new_crtc_state->cpu_transcoder;
3017 
3018 	/*
3019 	 * Any state lower than EDP_PSR2_STATUS_STATE_DEEP_SLEEP is enough.
3020 	 * As all higher states has bit 4 of PSR2 state set we can just wait for
3021 	 * EDP_PSR2_STATUS_STATE_DEEP_SLEEP to be cleared.
3022 	 */
3023 	if (dsb) {
3024 		intel_dsb_poll(dsb, EDP_PSR2_STATUS(display, cpu_transcoder),
3025 			       EDP_PSR2_STATUS_STATE_DEEP_SLEEP, 0, 200,
3026 			       PSR_IDLE_TIMEOUT_MS * 1000 / 200);
3027 		return true;
3028 	}
3029 
3030 	return intel_de_wait_for_clear(display,
3031 				       EDP_PSR2_STATUS(display, cpu_transcoder),
3032 				       EDP_PSR2_STATUS_STATE_DEEP_SLEEP,
3033 				       PSR_IDLE_TIMEOUT_MS);
3034 }
3035 
3036 static int
_psr1_ready_for_pipe_update_locked(const struct intel_crtc_state * new_crtc_state,struct intel_dsb * dsb)3037 _psr1_ready_for_pipe_update_locked(const struct intel_crtc_state *new_crtc_state,
3038 				   struct intel_dsb *dsb)
3039 {
3040 	struct intel_display *display = to_intel_display(new_crtc_state);
3041 	enum transcoder cpu_transcoder = new_crtc_state->cpu_transcoder;
3042 
3043 	if (dsb) {
3044 		intel_dsb_poll(dsb, psr_status_reg(display, cpu_transcoder),
3045 			       EDP_PSR_STATUS_STATE_MASK, 0, 200,
3046 			       PSR_IDLE_TIMEOUT_MS * 1000 / 200);
3047 		return true;
3048 	}
3049 
3050 	return intel_de_wait_for_clear(display,
3051 				       psr_status_reg(display, cpu_transcoder),
3052 				       EDP_PSR_STATUS_STATE_MASK,
3053 				       PSR_IDLE_TIMEOUT_MS);
3054 }
3055 
3056 /**
3057  * intel_psr_wait_for_idle_locked - wait for PSR be ready for a pipe update
3058  * @new_crtc_state: new CRTC state
3059  *
3060  * This function is expected to be called from pipe_update_start() where it is
3061  * not expected to race with PSR enable or disable.
3062  */
intel_psr_wait_for_idle_locked(const struct intel_crtc_state * new_crtc_state)3063 void intel_psr_wait_for_idle_locked(const struct intel_crtc_state *new_crtc_state)
3064 {
3065 	struct intel_display *display = to_intel_display(new_crtc_state);
3066 	struct intel_encoder *encoder;
3067 
3068 	if (!new_crtc_state->has_psr)
3069 		return;
3070 
3071 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
3072 					     new_crtc_state->uapi.encoder_mask) {
3073 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3074 		int ret;
3075 
3076 		lockdep_assert_held(&intel_dp->psr.lock);
3077 
3078 		if (!intel_dp->psr.enabled || intel_dp->psr.panel_replay_enabled)
3079 			continue;
3080 
3081 		if (intel_dp->psr.sel_update_enabled)
3082 			ret = _psr2_ready_for_pipe_update_locked(new_crtc_state,
3083 								 NULL);
3084 		else
3085 			ret = _psr1_ready_for_pipe_update_locked(new_crtc_state,
3086 								 NULL);
3087 
3088 		if (ret)
3089 			drm_err(display->drm,
3090 				"PSR wait timed out, atomic update may fail\n");
3091 	}
3092 }
3093 
intel_psr_wait_for_idle_dsb(struct intel_dsb * dsb,const struct intel_crtc_state * new_crtc_state)3094 void intel_psr_wait_for_idle_dsb(struct intel_dsb *dsb,
3095 				 const struct intel_crtc_state *new_crtc_state)
3096 {
3097 	if (!new_crtc_state->has_psr || new_crtc_state->has_panel_replay)
3098 		return;
3099 
3100 	if (new_crtc_state->has_sel_update)
3101 		_psr2_ready_for_pipe_update_locked(new_crtc_state, dsb);
3102 	else
3103 		_psr1_ready_for_pipe_update_locked(new_crtc_state, dsb);
3104 }
3105 
__psr_wait_for_idle_locked(struct intel_dp * intel_dp)3106 static bool __psr_wait_for_idle_locked(struct intel_dp *intel_dp)
3107 {
3108 	struct intel_display *display = to_intel_display(intel_dp);
3109 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3110 	i915_reg_t reg;
3111 	u32 mask;
3112 	int err;
3113 
3114 	if (!intel_dp->psr.enabled)
3115 		return false;
3116 
3117 	if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled ||
3118 					  intel_dp->psr.panel_replay_enabled)) {
3119 		reg = EDP_PSR2_STATUS(display, cpu_transcoder);
3120 		mask = EDP_PSR2_STATUS_STATE_MASK;
3121 	} else {
3122 		reg = psr_status_reg(display, cpu_transcoder);
3123 		mask = EDP_PSR_STATUS_STATE_MASK;
3124 	}
3125 
3126 	mutex_unlock(&intel_dp->psr.lock);
3127 
3128 	err = intel_de_wait_for_clear(display, reg, mask, 50);
3129 	if (err)
3130 		drm_err(display->drm,
3131 			"Timed out waiting for PSR Idle for re-enable\n");
3132 
3133 	/* After the unlocked wait, verify that PSR is still wanted! */
3134 	mutex_lock(&intel_dp->psr.lock);
3135 	return err == 0 && intel_dp->psr.enabled && !intel_dp->psr.pause_counter;
3136 }
3137 
intel_psr_fastset_force(struct intel_display * display)3138 static int intel_psr_fastset_force(struct intel_display *display)
3139 {
3140 	struct drm_connector_list_iter conn_iter;
3141 	struct drm_modeset_acquire_ctx ctx;
3142 	struct drm_atomic_state *state;
3143 	struct drm_connector *conn;
3144 	int err = 0;
3145 
3146 	state = drm_atomic_state_alloc(display->drm);
3147 	if (!state)
3148 		return -ENOMEM;
3149 
3150 	drm_modeset_acquire_init(&ctx, DRM_MODESET_ACQUIRE_INTERRUPTIBLE);
3151 
3152 	state->acquire_ctx = &ctx;
3153 	to_intel_atomic_state(state)->internal = true;
3154 
3155 retry:
3156 	drm_connector_list_iter_begin(display->drm, &conn_iter);
3157 	drm_for_each_connector_iter(conn, &conn_iter) {
3158 		struct drm_connector_state *conn_state;
3159 		struct drm_crtc_state *crtc_state;
3160 
3161 		if (conn->connector_type != DRM_MODE_CONNECTOR_eDP)
3162 			continue;
3163 
3164 		conn_state = drm_atomic_get_connector_state(state, conn);
3165 		if (IS_ERR(conn_state)) {
3166 			err = PTR_ERR(conn_state);
3167 			break;
3168 		}
3169 
3170 		if (!conn_state->crtc)
3171 			continue;
3172 
3173 		crtc_state = drm_atomic_get_crtc_state(state, conn_state->crtc);
3174 		if (IS_ERR(crtc_state)) {
3175 			err = PTR_ERR(crtc_state);
3176 			break;
3177 		}
3178 
3179 		/* Mark mode as changed to trigger a pipe->update() */
3180 		crtc_state->mode_changed = true;
3181 	}
3182 	drm_connector_list_iter_end(&conn_iter);
3183 
3184 	if (err == 0)
3185 		err = drm_atomic_commit(state);
3186 
3187 	if (err == -EDEADLK) {
3188 		drm_atomic_state_clear(state);
3189 		err = drm_modeset_backoff(&ctx);
3190 		if (!err)
3191 			goto retry;
3192 	}
3193 
3194 	drm_modeset_drop_locks(&ctx);
3195 	drm_modeset_acquire_fini(&ctx);
3196 	drm_atomic_state_put(state);
3197 
3198 	return err;
3199 }
3200 
intel_psr_debug_set(struct intel_dp * intel_dp,u64 val)3201 int intel_psr_debug_set(struct intel_dp *intel_dp, u64 val)
3202 {
3203 	struct intel_display *display = to_intel_display(intel_dp);
3204 	const u32 mode = val & I915_PSR_DEBUG_MODE_MASK;
3205 	const u32 disable_bits = val & (I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
3206 					I915_PSR_DEBUG_PANEL_REPLAY_DISABLE);
3207 	u32 old_mode, old_disable_bits;
3208 	int ret;
3209 
3210 	if (val & ~(I915_PSR_DEBUG_IRQ | I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
3211 		    I915_PSR_DEBUG_PANEL_REPLAY_DISABLE |
3212 		    I915_PSR_DEBUG_MODE_MASK) ||
3213 	    mode > I915_PSR_DEBUG_ENABLE_SEL_FETCH) {
3214 		drm_dbg_kms(display->drm, "Invalid debug mask %llx\n", val);
3215 		return -EINVAL;
3216 	}
3217 
3218 	ret = mutex_lock_interruptible(&intel_dp->psr.lock);
3219 	if (ret)
3220 		return ret;
3221 
3222 	old_mode = intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK;
3223 	old_disable_bits = intel_dp->psr.debug &
3224 		(I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
3225 		 I915_PSR_DEBUG_PANEL_REPLAY_DISABLE);
3226 
3227 	intel_dp->psr.debug = val;
3228 
3229 	/*
3230 	 * Do it right away if it's already enabled, otherwise it will be done
3231 	 * when enabling the source.
3232 	 */
3233 	if (intel_dp->psr.enabled)
3234 		psr_irq_control(intel_dp);
3235 
3236 	mutex_unlock(&intel_dp->psr.lock);
3237 
3238 	if (old_mode != mode || old_disable_bits != disable_bits)
3239 		ret = intel_psr_fastset_force(display);
3240 
3241 	return ret;
3242 }
3243 
intel_psr_handle_irq(struct intel_dp * intel_dp)3244 static void intel_psr_handle_irq(struct intel_dp *intel_dp)
3245 {
3246 	struct intel_psr *psr = &intel_dp->psr;
3247 
3248 	intel_psr_disable_locked(intel_dp);
3249 	psr->sink_not_reliable = true;
3250 	/* let's make sure that sink is awaken */
3251 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
3252 }
3253 
intel_psr_work(struct work_struct * work)3254 static void intel_psr_work(struct work_struct *work)
3255 {
3256 	struct intel_dp *intel_dp =
3257 		container_of(work, typeof(*intel_dp), psr.work);
3258 
3259 	mutex_lock(&intel_dp->psr.lock);
3260 
3261 	if (!intel_dp->psr.enabled)
3262 		goto unlock;
3263 
3264 	if (READ_ONCE(intel_dp->psr.irq_aux_error)) {
3265 		intel_psr_handle_irq(intel_dp);
3266 		goto unlock;
3267 	}
3268 
3269 	if (intel_dp->psr.pause_counter)
3270 		goto unlock;
3271 
3272 	/*
3273 	 * We have to make sure PSR is ready for re-enable
3274 	 * otherwise it keeps disabled until next full enable/disable cycle.
3275 	 * PSR might take some time to get fully disabled
3276 	 * and be ready for re-enable.
3277 	 */
3278 	if (!__psr_wait_for_idle_locked(intel_dp))
3279 		goto unlock;
3280 
3281 	/*
3282 	 * The delayed work can race with an invalidate hence we need to
3283 	 * recheck. Since psr_flush first clears this and then reschedules we
3284 	 * won't ever miss a flush when bailing out here.
3285 	 */
3286 	if (intel_dp->psr.busy_frontbuffer_bits || intel_dp->psr.active)
3287 		goto unlock;
3288 
3289 	intel_psr_activate(intel_dp);
3290 unlock:
3291 	mutex_unlock(&intel_dp->psr.lock);
3292 }
3293 
intel_psr_configure_full_frame_update(struct intel_dp * intel_dp)3294 static void intel_psr_configure_full_frame_update(struct intel_dp *intel_dp)
3295 {
3296 	struct intel_display *display = to_intel_display(intel_dp);
3297 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3298 
3299 	if (!intel_dp->psr.psr2_sel_fetch_enabled)
3300 		return;
3301 
3302 	if (DISPLAY_VER(display) >= 20)
3303 		intel_de_write(display, LNL_SFF_CTL(cpu_transcoder),
3304 			       LNL_SFF_CTL_SF_SINGLE_FULL_FRAME);
3305 	else
3306 		intel_de_write(display,
3307 			       PSR2_MAN_TRK_CTL(display, cpu_transcoder),
3308 			       man_trk_ctl_enable_bit_get(display) |
3309 			       man_trk_ctl_partial_frame_bit_get(display) |
3310 			       man_trk_ctl_single_full_frame_bit_get(display) |
3311 			       man_trk_ctl_continuos_full_frame(display));
3312 }
3313 
_psr_invalidate_handle(struct intel_dp * intel_dp)3314 static void _psr_invalidate_handle(struct intel_dp *intel_dp)
3315 {
3316 	struct intel_display *display = to_intel_display(intel_dp);
3317 
3318 	if (DISPLAY_VER(display) < 20 && intel_dp->psr.psr2_sel_fetch_enabled) {
3319 		if (!intel_dp->psr.psr2_sel_fetch_cff_enabled) {
3320 			intel_dp->psr.psr2_sel_fetch_cff_enabled = true;
3321 			intel_psr_configure_full_frame_update(intel_dp);
3322 		}
3323 
3324 		intel_psr_force_update(intel_dp);
3325 	} else {
3326 		intel_psr_exit(intel_dp);
3327 	}
3328 }
3329 
3330 /**
3331  * intel_psr_invalidate - Invalidate PSR
3332  * @display: display device
3333  * @frontbuffer_bits: frontbuffer plane tracking bits
3334  * @origin: which operation caused the invalidate
3335  *
3336  * Since the hardware frontbuffer tracking has gaps we need to integrate
3337  * with the software frontbuffer tracking. This function gets called every
3338  * time frontbuffer rendering starts and a buffer gets dirtied. PSR must be
3339  * disabled if the frontbuffer mask contains a buffer relevant to PSR.
3340  *
3341  * Dirty frontbuffers relevant to PSR are tracked in busy_frontbuffer_bits."
3342  */
intel_psr_invalidate(struct intel_display * display,unsigned frontbuffer_bits,enum fb_op_origin origin)3343 void intel_psr_invalidate(struct intel_display *display,
3344 			  unsigned frontbuffer_bits, enum fb_op_origin origin)
3345 {
3346 	struct intel_encoder *encoder;
3347 
3348 	if (origin == ORIGIN_FLIP)
3349 		return;
3350 
3351 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3352 		unsigned int pipe_frontbuffer_bits = frontbuffer_bits;
3353 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3354 
3355 		mutex_lock(&intel_dp->psr.lock);
3356 		if (!intel_dp->psr.enabled) {
3357 			mutex_unlock(&intel_dp->psr.lock);
3358 			continue;
3359 		}
3360 
3361 		pipe_frontbuffer_bits &=
3362 			INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe);
3363 		intel_dp->psr.busy_frontbuffer_bits |= pipe_frontbuffer_bits;
3364 
3365 		if (pipe_frontbuffer_bits)
3366 			_psr_invalidate_handle(intel_dp);
3367 
3368 		mutex_unlock(&intel_dp->psr.lock);
3369 	}
3370 }
3371 /*
3372  * When we will be completely rely on PSR2 S/W tracking in future,
3373  * intel_psr_flush() will invalidate and flush the PSR for ORIGIN_FLIP
3374  * event also therefore tgl_dc3co_flush_locked() require to be changed
3375  * accordingly in future.
3376  */
3377 static void
tgl_dc3co_flush_locked(struct intel_dp * intel_dp,unsigned int frontbuffer_bits,enum fb_op_origin origin)3378 tgl_dc3co_flush_locked(struct intel_dp *intel_dp, unsigned int frontbuffer_bits,
3379 		       enum fb_op_origin origin)
3380 {
3381 	struct intel_display *display = to_intel_display(intel_dp);
3382 
3383 	if (!intel_dp->psr.dc3co_exitline || !intel_dp->psr.sel_update_enabled ||
3384 	    !intel_dp->psr.active)
3385 		return;
3386 
3387 	/*
3388 	 * At every frontbuffer flush flip event modified delay of delayed work,
3389 	 * when delayed work schedules that means display has been idle.
3390 	 */
3391 	if (!(frontbuffer_bits &
3392 	    INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe)))
3393 		return;
3394 
3395 	tgl_psr2_enable_dc3co(intel_dp);
3396 	mod_delayed_work(display->wq.unordered, &intel_dp->psr.dc3co_work,
3397 			 intel_dp->psr.dc3co_exit_delay);
3398 }
3399 
_psr_flush_handle(struct intel_dp * intel_dp)3400 static void _psr_flush_handle(struct intel_dp *intel_dp)
3401 {
3402 	struct intel_display *display = to_intel_display(intel_dp);
3403 
3404 	if (DISPLAY_VER(display) < 20 && intel_dp->psr.psr2_sel_fetch_enabled) {
3405 		/* Selective fetch prior LNL */
3406 		if (intel_dp->psr.psr2_sel_fetch_cff_enabled) {
3407 			/* can we turn CFF off? */
3408 			if (intel_dp->psr.busy_frontbuffer_bits == 0)
3409 				intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
3410 		}
3411 
3412 		/*
3413 		 * Still keep cff bit enabled as we don't have proper SU
3414 		 * configuration in case update is sent for any reason after
3415 		 * sff bit gets cleared by the HW on next vblank.
3416 		 *
3417 		 * NOTE: Setting cff bit is not needed for LunarLake onwards as
3418 		 * we have own register for SFF bit and we are not overwriting
3419 		 * existing SU configuration
3420 		 */
3421 		intel_psr_configure_full_frame_update(intel_dp);
3422 
3423 		intel_psr_force_update(intel_dp);
3424 	} else if (!intel_dp->psr.psr2_sel_fetch_enabled) {
3425 		/*
3426 		 * PSR1 on all platforms
3427 		 * PSR2 HW tracking
3428 		 * Panel Replay Full frame update
3429 		 */
3430 		intel_psr_force_update(intel_dp);
3431 	} else {
3432 		/* Selective update LNL onwards */
3433 		intel_psr_exit(intel_dp);
3434 	}
3435 
3436 	if (!intel_dp->psr.active && !intel_dp->psr.busy_frontbuffer_bits)
3437 		queue_work(display->wq.unordered, &intel_dp->psr.work);
3438 }
3439 
3440 /**
3441  * intel_psr_flush - Flush PSR
3442  * @display: display device
3443  * @frontbuffer_bits: frontbuffer plane tracking bits
3444  * @origin: which operation caused the flush
3445  *
3446  * Since the hardware frontbuffer tracking has gaps we need to integrate
3447  * with the software frontbuffer tracking. This function gets called every
3448  * time frontbuffer rendering has completed and flushed out to memory. PSR
3449  * can be enabled again if no other frontbuffer relevant to PSR is dirty.
3450  *
3451  * Dirty frontbuffers relevant to PSR are tracked in busy_frontbuffer_bits.
3452  */
intel_psr_flush(struct intel_display * display,unsigned frontbuffer_bits,enum fb_op_origin origin)3453 void intel_psr_flush(struct intel_display *display,
3454 		     unsigned frontbuffer_bits, enum fb_op_origin origin)
3455 {
3456 	struct intel_encoder *encoder;
3457 
3458 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3459 		unsigned int pipe_frontbuffer_bits = frontbuffer_bits;
3460 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3461 
3462 		mutex_lock(&intel_dp->psr.lock);
3463 		if (!intel_dp->psr.enabled) {
3464 			mutex_unlock(&intel_dp->psr.lock);
3465 			continue;
3466 		}
3467 
3468 		pipe_frontbuffer_bits &=
3469 			INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe);
3470 		intel_dp->psr.busy_frontbuffer_bits &= ~pipe_frontbuffer_bits;
3471 
3472 		/*
3473 		 * If the PSR is paused by an explicit intel_psr_paused() call,
3474 		 * we have to ensure that the PSR is not activated until
3475 		 * intel_psr_resume() is called.
3476 		 */
3477 		if (intel_dp->psr.pause_counter)
3478 			goto unlock;
3479 
3480 		if (origin == ORIGIN_FLIP ||
3481 		    (origin == ORIGIN_CURSOR_UPDATE &&
3482 		     !intel_dp->psr.psr2_sel_fetch_enabled)) {
3483 			tgl_dc3co_flush_locked(intel_dp, frontbuffer_bits, origin);
3484 			goto unlock;
3485 		}
3486 
3487 		if (pipe_frontbuffer_bits == 0)
3488 			goto unlock;
3489 
3490 		/* By definition flush = invalidate + flush */
3491 		_psr_flush_handle(intel_dp);
3492 unlock:
3493 		mutex_unlock(&intel_dp->psr.lock);
3494 	}
3495 }
3496 
3497 /**
3498  * intel_psr_init - Init basic PSR work and mutex.
3499  * @intel_dp: Intel DP
3500  *
3501  * This function is called after the initializing connector.
3502  * (the initializing of connector treats the handling of connector capabilities)
3503  * And it initializes basic PSR stuff for each DP Encoder.
3504  */
intel_psr_init(struct intel_dp * intel_dp)3505 void intel_psr_init(struct intel_dp *intel_dp)
3506 {
3507 	struct intel_display *display = to_intel_display(intel_dp);
3508 	struct intel_connector *connector = intel_dp->attached_connector;
3509 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
3510 
3511 	if (!(HAS_PSR(display) || HAS_DP20(display)))
3512 		return;
3513 
3514 	/*
3515 	 * HSW spec explicitly says PSR is tied to port A.
3516 	 * BDW+ platforms have a instance of PSR registers per transcoder but
3517 	 * BDW, GEN9 and GEN11 are not validated by HW team in other transcoder
3518 	 * than eDP one.
3519 	 * For now it only supports one instance of PSR for BDW, GEN9 and GEN11.
3520 	 * So lets keep it hardcoded to PORT_A for BDW, GEN9 and GEN11.
3521 	 * But GEN12 supports a instance of PSR registers per transcoder.
3522 	 */
3523 	if (DISPLAY_VER(display) < 12 && dig_port->base.port != PORT_A) {
3524 		drm_dbg_kms(display->drm,
3525 			    "PSR condition failed: Port not supported\n");
3526 		return;
3527 	}
3528 
3529 	if ((HAS_DP20(display) && !intel_dp_is_edp(intel_dp)) ||
3530 	    DISPLAY_VER(display) >= 20)
3531 		intel_dp->psr.source_panel_replay_support = true;
3532 
3533 	if (HAS_PSR(display) && intel_dp_is_edp(intel_dp))
3534 		intel_dp->psr.source_support = true;
3535 
3536 	/* Set link_standby x link_off defaults */
3537 	if (DISPLAY_VER(display) < 12)
3538 		/* For new platforms up to TGL let's respect VBT back again */
3539 		intel_dp->psr.link_standby = connector->panel.vbt.psr.full_link;
3540 
3541 	INIT_WORK(&intel_dp->psr.work, intel_psr_work);
3542 	INIT_DELAYED_WORK(&intel_dp->psr.dc3co_work, tgl_dc3co_disable_work);
3543 	mutex_init(&intel_dp->psr.lock);
3544 }
3545 
psr_get_status_and_error_status(struct intel_dp * intel_dp,u8 * status,u8 * error_status)3546 static int psr_get_status_and_error_status(struct intel_dp *intel_dp,
3547 					   u8 *status, u8 *error_status)
3548 {
3549 	struct drm_dp_aux *aux = &intel_dp->aux;
3550 	int ret;
3551 	unsigned int offset;
3552 
3553 	offset = intel_dp->psr.panel_replay_enabled ?
3554 		 DP_SINK_DEVICE_PR_AND_FRAME_LOCK_STATUS : DP_PSR_STATUS;
3555 
3556 	ret = drm_dp_dpcd_readb(aux, offset, status);
3557 	if (ret != 1)
3558 		return ret;
3559 
3560 	offset = intel_dp->psr.panel_replay_enabled ?
3561 		 DP_PANEL_REPLAY_ERROR_STATUS : DP_PSR_ERROR_STATUS;
3562 
3563 	ret = drm_dp_dpcd_readb(aux, offset, error_status);
3564 	if (ret != 1)
3565 		return ret;
3566 
3567 	*status = *status & DP_PSR_SINK_STATE_MASK;
3568 
3569 	return 0;
3570 }
3571 
psr_alpm_check(struct intel_dp * intel_dp)3572 static void psr_alpm_check(struct intel_dp *intel_dp)
3573 {
3574 	struct intel_psr *psr = &intel_dp->psr;
3575 
3576 	if (!psr->sel_update_enabled)
3577 		return;
3578 
3579 	if (intel_alpm_get_error(intel_dp)) {
3580 		intel_psr_disable_locked(intel_dp);
3581 		psr->sink_not_reliable = true;
3582 	}
3583 }
3584 
psr_capability_changed_check(struct intel_dp * intel_dp)3585 static void psr_capability_changed_check(struct intel_dp *intel_dp)
3586 {
3587 	struct intel_display *display = to_intel_display(intel_dp);
3588 	struct intel_psr *psr = &intel_dp->psr;
3589 	u8 val;
3590 	int r;
3591 
3592 	r = drm_dp_dpcd_readb(&intel_dp->aux, DP_PSR_ESI, &val);
3593 	if (r != 1) {
3594 		drm_err(display->drm, "Error reading DP_PSR_ESI\n");
3595 		return;
3596 	}
3597 
3598 	if (val & DP_PSR_CAPS_CHANGE) {
3599 		intel_psr_disable_locked(intel_dp);
3600 		psr->sink_not_reliable = true;
3601 		drm_dbg_kms(display->drm,
3602 			    "Sink PSR capability changed, disabling PSR\n");
3603 
3604 		/* Clearing it */
3605 		drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_ESI, val);
3606 	}
3607 }
3608 
3609 /*
3610  * On common bits:
3611  * DP_PSR_RFB_STORAGE_ERROR == DP_PANEL_REPLAY_RFB_STORAGE_ERROR
3612  * DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR == DP_PANEL_REPLAY_VSC_SDP_UNCORRECTABLE_ERROR
3613  * DP_PSR_LINK_CRC_ERROR == DP_PANEL_REPLAY_LINK_CRC_ERROR
3614  * this function is relying on PSR definitions
3615  */
intel_psr_short_pulse(struct intel_dp * intel_dp)3616 void intel_psr_short_pulse(struct intel_dp *intel_dp)
3617 {
3618 	struct intel_display *display = to_intel_display(intel_dp);
3619 	struct intel_psr *psr = &intel_dp->psr;
3620 	u8 status, error_status;
3621 	const u8 errors = DP_PSR_RFB_STORAGE_ERROR |
3622 			  DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR |
3623 			  DP_PSR_LINK_CRC_ERROR;
3624 
3625 	if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
3626 		return;
3627 
3628 	mutex_lock(&psr->lock);
3629 
3630 	psr->link_ok = false;
3631 
3632 	if (!psr->enabled)
3633 		goto exit;
3634 
3635 	if (psr_get_status_and_error_status(intel_dp, &status, &error_status)) {
3636 		drm_err(display->drm,
3637 			"Error reading PSR status or error status\n");
3638 		goto exit;
3639 	}
3640 
3641 	if ((!psr->panel_replay_enabled && status == DP_PSR_SINK_INTERNAL_ERROR) ||
3642 	    (error_status & errors)) {
3643 		intel_psr_disable_locked(intel_dp);
3644 		psr->sink_not_reliable = true;
3645 	}
3646 
3647 	if (!psr->panel_replay_enabled && status == DP_PSR_SINK_INTERNAL_ERROR &&
3648 	    !error_status)
3649 		drm_dbg_kms(display->drm,
3650 			    "PSR sink internal error, disabling PSR\n");
3651 	if (error_status & DP_PSR_RFB_STORAGE_ERROR)
3652 		drm_dbg_kms(display->drm,
3653 			    "PSR RFB storage error, disabling PSR\n");
3654 	if (error_status & DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR)
3655 		drm_dbg_kms(display->drm,
3656 			    "PSR VSC SDP uncorrectable error, disabling PSR\n");
3657 	if (error_status & DP_PSR_LINK_CRC_ERROR)
3658 		drm_dbg_kms(display->drm,
3659 			    "PSR Link CRC error, disabling PSR\n");
3660 
3661 	if (error_status & ~errors)
3662 		drm_err(display->drm,
3663 			"PSR_ERROR_STATUS unhandled errors %x\n",
3664 			error_status & ~errors);
3665 	/* clear status register */
3666 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_ERROR_STATUS, error_status);
3667 
3668 	if (!psr->panel_replay_enabled) {
3669 		psr_alpm_check(intel_dp);
3670 		psr_capability_changed_check(intel_dp);
3671 	}
3672 
3673 exit:
3674 	mutex_unlock(&psr->lock);
3675 }
3676 
intel_psr_enabled(struct intel_dp * intel_dp)3677 bool intel_psr_enabled(struct intel_dp *intel_dp)
3678 {
3679 	bool ret;
3680 
3681 	if (!CAN_PSR(intel_dp))
3682 		return false;
3683 
3684 	mutex_lock(&intel_dp->psr.lock);
3685 	ret = intel_dp->psr.enabled;
3686 	mutex_unlock(&intel_dp->psr.lock);
3687 
3688 	return ret;
3689 }
3690 
3691 /**
3692  * intel_psr_link_ok - return psr->link_ok
3693  * @intel_dp: struct intel_dp
3694  *
3695  * We are seeing unexpected link re-trainings with some panels. This is caused
3696  * by panel stating bad link status after PSR is enabled. Code checking link
3697  * status can call this to ensure it can ignore bad link status stated by the
3698  * panel I.e. if panel is stating bad link and intel_psr_link_ok is stating link
3699  * is ok caller should rely on latter.
3700  *
3701  * Return value of link_ok
3702  */
intel_psr_link_ok(struct intel_dp * intel_dp)3703 bool intel_psr_link_ok(struct intel_dp *intel_dp)
3704 {
3705 	bool ret;
3706 
3707 	if ((!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp)) ||
3708 	    !intel_dp_is_edp(intel_dp))
3709 		return false;
3710 
3711 	mutex_lock(&intel_dp->psr.lock);
3712 	ret = intel_dp->psr.link_ok;
3713 	mutex_unlock(&intel_dp->psr.lock);
3714 
3715 	return ret;
3716 }
3717 
3718 /**
3719  * intel_psr_lock - grab PSR lock
3720  * @crtc_state: the crtc state
3721  *
3722  * This is initially meant to be used by around CRTC update, when
3723  * vblank sensitive registers are updated and we need grab the lock
3724  * before it to avoid vblank evasion.
3725  */
intel_psr_lock(const struct intel_crtc_state * crtc_state)3726 void intel_psr_lock(const struct intel_crtc_state *crtc_state)
3727 {
3728 	struct intel_display *display = to_intel_display(crtc_state);
3729 	struct intel_encoder *encoder;
3730 
3731 	if (!crtc_state->has_psr)
3732 		return;
3733 
3734 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
3735 					     crtc_state->uapi.encoder_mask) {
3736 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3737 
3738 		mutex_lock(&intel_dp->psr.lock);
3739 		break;
3740 	}
3741 }
3742 
3743 /**
3744  * intel_psr_unlock - release PSR lock
3745  * @crtc_state: the crtc state
3746  *
3747  * Release the PSR lock that was held during pipe update.
3748  */
intel_psr_unlock(const struct intel_crtc_state * crtc_state)3749 void intel_psr_unlock(const struct intel_crtc_state *crtc_state)
3750 {
3751 	struct intel_display *display = to_intel_display(crtc_state);
3752 	struct intel_encoder *encoder;
3753 
3754 	if (!crtc_state->has_psr)
3755 		return;
3756 
3757 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
3758 					     crtc_state->uapi.encoder_mask) {
3759 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3760 
3761 		mutex_unlock(&intel_dp->psr.lock);
3762 		break;
3763 	}
3764 }
3765 
3766 /* Wa_16025596647 */
intel_psr_apply_underrun_on_idle_wa_locked(struct intel_dp * intel_dp)3767 static void intel_psr_apply_underrun_on_idle_wa_locked(struct intel_dp *intel_dp)
3768 {
3769 	struct intel_display *display = to_intel_display(intel_dp);
3770 	bool dc5_dc6_blocked;
3771 
3772 	if (!intel_dp->psr.active || !intel_dp->psr.pkg_c_latency_used)
3773 		return;
3774 
3775 	dc5_dc6_blocked = is_dc5_dc6_blocked(intel_dp);
3776 
3777 	if (intel_dp->psr.sel_update_enabled)
3778 		psr2_program_idle_frames(intel_dp, dc5_dc6_blocked ? 0 :
3779 					 psr_compute_idle_frames(intel_dp));
3780 	else
3781 		intel_dmc_start_pkgc_exit_at_start_of_undelayed_vblank(display,
3782 								       intel_dp->psr.pipe,
3783 								       dc5_dc6_blocked);
3784 }
3785 
psr_dc5_dc6_wa_work(struct work_struct * work)3786 static void psr_dc5_dc6_wa_work(struct work_struct *work)
3787 {
3788 	struct intel_display *display = container_of(work, typeof(*display),
3789 						     psr_dc5_dc6_wa_work);
3790 	struct intel_encoder *encoder;
3791 
3792 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3793 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3794 
3795 		mutex_lock(&intel_dp->psr.lock);
3796 
3797 		if (intel_dp->psr.enabled && !intel_dp->psr.panel_replay_enabled &&
3798 		    !intel_dp->psr.pkg_c_latency_used)
3799 			intel_psr_apply_underrun_on_idle_wa_locked(intel_dp);
3800 
3801 		mutex_unlock(&intel_dp->psr.lock);
3802 	}
3803 }
3804 
3805 /**
3806  * intel_psr_notify_dc5_dc6 - Notify PSR about enable/disable dc5/dc6
3807  * @display: intel atomic state
3808  *
3809  * This is targeted for underrun on idle PSR HW bug (Wa_16025596647) to schedule
3810  * psr_dc5_dc6_wa_work used for applying/removing the workaround.
3811  */
intel_psr_notify_dc5_dc6(struct intel_display * display)3812 void intel_psr_notify_dc5_dc6(struct intel_display *display)
3813 {
3814 	if (DISPLAY_VER(display) != 20 &&
3815 	    !IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0))
3816 		return;
3817 
3818 	schedule_work(&display->psr_dc5_dc6_wa_work);
3819 }
3820 
3821 /**
3822  * intel_psr_dc5_dc6_wa_init - Init work for underrun on idle PSR HW bug wa
3823  * @display: intel atomic state
3824  *
3825  * This is targeted for underrun on idle PSR HW bug (Wa_16025596647) to init
3826  * psr_dc5_dc6_wa_work used for applying the workaround.
3827  */
intel_psr_dc5_dc6_wa_init(struct intel_display * display)3828 void intel_psr_dc5_dc6_wa_init(struct intel_display *display)
3829 {
3830 	if (DISPLAY_VER(display) != 20 &&
3831 	    !IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0))
3832 		return;
3833 
3834 	INIT_WORK(&display->psr_dc5_dc6_wa_work, psr_dc5_dc6_wa_work);
3835 }
3836 
3837 /**
3838  * intel_psr_notify_pipe_change - Notify PSR about enable/disable of a pipe
3839  * @state: intel atomic state
3840  * @crtc: intel crtc
3841  * @enable: enable/disable
3842  *
3843  * This is targeted for underrun on idle PSR HW bug (Wa_16025596647) to apply
3844  * remove the workaround when pipe is getting enabled/disabled
3845  */
intel_psr_notify_pipe_change(struct intel_atomic_state * state,struct intel_crtc * crtc,bool enable)3846 void intel_psr_notify_pipe_change(struct intel_atomic_state *state,
3847 				  struct intel_crtc *crtc, bool enable)
3848 {
3849 	struct intel_display *display = to_intel_display(state);
3850 	struct intel_encoder *encoder;
3851 
3852 	if (DISPLAY_VER(display) != 20 &&
3853 	    !IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0))
3854 		return;
3855 
3856 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3857 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3858 		u8 active_non_psr_pipes;
3859 
3860 		mutex_lock(&intel_dp->psr.lock);
3861 
3862 		if (!intel_dp->psr.enabled || intel_dp->psr.panel_replay_enabled)
3863 			goto unlock;
3864 
3865 		active_non_psr_pipes = intel_dp->psr.active_non_psr_pipes;
3866 
3867 		if (enable)
3868 			active_non_psr_pipes |= BIT(crtc->pipe);
3869 		else
3870 			active_non_psr_pipes &= ~BIT(crtc->pipe);
3871 
3872 		if (active_non_psr_pipes == intel_dp->psr.active_non_psr_pipes)
3873 			goto unlock;
3874 
3875 		if ((enable && intel_dp->psr.active_non_psr_pipes) ||
3876 		    (!enable && !intel_dp->psr.active_non_psr_pipes) ||
3877 		    !intel_dp->psr.pkg_c_latency_used) {
3878 			intel_dp->psr.active_non_psr_pipes = active_non_psr_pipes;
3879 			goto unlock;
3880 		}
3881 
3882 		intel_dp->psr.active_non_psr_pipes = active_non_psr_pipes;
3883 
3884 		intel_psr_apply_underrun_on_idle_wa_locked(intel_dp);
3885 unlock:
3886 		mutex_unlock(&intel_dp->psr.lock);
3887 	}
3888 }
3889 
3890 /**
3891  * intel_psr_notify_vblank_enable_disable - Notify PSR about enable/disable of vblank
3892  * @display: intel display struct
3893  * @enable: enable/disable
3894  *
3895  * This is targeted for underrun on idle PSR HW bug (Wa_16025596647) to apply
3896  * remove the workaround when vblank is getting enabled/disabled
3897  */
intel_psr_notify_vblank_enable_disable(struct intel_display * display,bool enable)3898 void intel_psr_notify_vblank_enable_disable(struct intel_display *display,
3899 					    bool enable)
3900 {
3901 	struct intel_encoder *encoder;
3902 
3903 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3904 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3905 
3906 		mutex_lock(&intel_dp->psr.lock);
3907 		if (intel_dp->psr.panel_replay_enabled) {
3908 			mutex_unlock(&intel_dp->psr.lock);
3909 			break;
3910 		}
3911 
3912 		if (intel_dp->psr.enabled && intel_dp->psr.pkg_c_latency_used)
3913 			intel_psr_apply_underrun_on_idle_wa_locked(intel_dp);
3914 
3915 		mutex_unlock(&intel_dp->psr.lock);
3916 		return;
3917 	}
3918 
3919 	/*
3920 	 * NOTE: intel_display_power_set_target_dc_state is used
3921 	 * only by PSR * code for DC3CO handling. DC3CO target
3922 	 * state is currently disabled in * PSR code. If DC3CO
3923 	 * is taken into use we need take that into account here
3924 	 * as well.
3925 	 */
3926 	intel_display_power_set_target_dc_state(display, enable ? DC_STATE_DISABLE :
3927 						DC_STATE_EN_UPTO_DC6);
3928 }
3929 
3930 static void
psr_source_status(struct intel_dp * intel_dp,struct seq_file * m)3931 psr_source_status(struct intel_dp *intel_dp, struct seq_file *m)
3932 {
3933 	struct intel_display *display = to_intel_display(intel_dp);
3934 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3935 	const char *status = "unknown";
3936 	u32 val, status_val;
3937 
3938 	if ((intel_dp_is_edp(intel_dp) || DISPLAY_VER(display) >= 30) &&
3939 	    (intel_dp->psr.sel_update_enabled || intel_dp->psr.panel_replay_enabled)) {
3940 		static const char * const live_status[] = {
3941 			"IDLE",
3942 			"CAPTURE",
3943 			"CAPTURE_FS",
3944 			"SLEEP",
3945 			"BUFON_FW",
3946 			"ML_UP",
3947 			"SU_STANDBY",
3948 			"FAST_SLEEP",
3949 			"DEEP_SLEEP",
3950 			"BUF_ON",
3951 			"TG_ON"
3952 		};
3953 		val = intel_de_read(display,
3954 				    EDP_PSR2_STATUS(display, cpu_transcoder));
3955 		status_val = REG_FIELD_GET(EDP_PSR2_STATUS_STATE_MASK, val);
3956 		if (status_val < ARRAY_SIZE(live_status))
3957 			status = live_status[status_val];
3958 	} else {
3959 		static const char * const live_status[] = {
3960 			"IDLE",
3961 			"SRDONACK",
3962 			"SRDENT",
3963 			"BUFOFF",
3964 			"BUFON",
3965 			"AUXACK",
3966 			"SRDOFFACK",
3967 			"SRDENT_ON",
3968 		};
3969 		val = intel_de_read(display,
3970 				    psr_status_reg(display, cpu_transcoder));
3971 		status_val = REG_FIELD_GET(EDP_PSR_STATUS_STATE_MASK, val);
3972 		if (status_val < ARRAY_SIZE(live_status))
3973 			status = live_status[status_val];
3974 	}
3975 
3976 	seq_printf(m, "Source PSR/PanelReplay status: %s [0x%08x]\n", status, val);
3977 }
3978 
intel_psr_sink_capability(struct intel_dp * intel_dp,struct seq_file * m)3979 static void intel_psr_sink_capability(struct intel_dp *intel_dp,
3980 				      struct seq_file *m)
3981 {
3982 	struct intel_psr *psr = &intel_dp->psr;
3983 
3984 	seq_printf(m, "Sink support: PSR = %s",
3985 		   str_yes_no(psr->sink_support));
3986 
3987 	if (psr->sink_support)
3988 		seq_printf(m, " [0x%02x]", intel_dp->psr_dpcd[0]);
3989 	if (intel_dp->psr_dpcd[0] == DP_PSR2_WITH_Y_COORD_ET_SUPPORTED)
3990 		seq_printf(m, " (Early Transport)");
3991 	seq_printf(m, ", Panel Replay = %s", str_yes_no(psr->sink_panel_replay_support));
3992 	seq_printf(m, ", Panel Replay Selective Update = %s",
3993 		   str_yes_no(psr->sink_panel_replay_su_support));
3994 	if (intel_dp->pr_dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] &
3995 	    DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT)
3996 		seq_printf(m, " (Early Transport)");
3997 	seq_printf(m, "\n");
3998 }
3999 
intel_psr_print_mode(struct intel_dp * intel_dp,struct seq_file * m)4000 static void intel_psr_print_mode(struct intel_dp *intel_dp,
4001 				 struct seq_file *m)
4002 {
4003 	struct intel_psr *psr = &intel_dp->psr;
4004 	const char *status, *mode, *region_et;
4005 
4006 	if (psr->enabled)
4007 		status = " enabled";
4008 	else
4009 		status = "disabled";
4010 
4011 	if (psr->panel_replay_enabled && psr->sel_update_enabled)
4012 		mode = "Panel Replay Selective Update";
4013 	else if (psr->panel_replay_enabled)
4014 		mode = "Panel Replay";
4015 	else if (psr->sel_update_enabled)
4016 		mode = "PSR2";
4017 	else if (psr->enabled)
4018 		mode = "PSR1";
4019 	else
4020 		mode = "";
4021 
4022 	if (psr->su_region_et_enabled)
4023 		region_et = " (Early Transport)";
4024 	else
4025 		region_et = "";
4026 
4027 	seq_printf(m, "PSR mode: %s%s%s\n", mode, status, region_et);
4028 }
4029 
intel_psr_status(struct seq_file * m,struct intel_dp * intel_dp)4030 static int intel_psr_status(struct seq_file *m, struct intel_dp *intel_dp)
4031 {
4032 	struct intel_display *display = to_intel_display(intel_dp);
4033 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
4034 	struct intel_psr *psr = &intel_dp->psr;
4035 	struct ref_tracker *wakeref;
4036 	bool enabled;
4037 	u32 val, psr2_ctl;
4038 
4039 	intel_psr_sink_capability(intel_dp, m);
4040 
4041 	if (!(psr->sink_support || psr->sink_panel_replay_support))
4042 		return 0;
4043 
4044 	wakeref = intel_display_rpm_get(display);
4045 	mutex_lock(&psr->lock);
4046 
4047 	intel_psr_print_mode(intel_dp, m);
4048 
4049 	if (!psr->enabled) {
4050 		seq_printf(m, "PSR sink not reliable: %s\n",
4051 			   str_yes_no(psr->sink_not_reliable));
4052 
4053 		goto unlock;
4054 	}
4055 
4056 	if (psr->panel_replay_enabled) {
4057 		val = intel_de_read(display, TRANS_DP2_CTL(cpu_transcoder));
4058 
4059 		if (intel_dp_is_edp(intel_dp))
4060 			psr2_ctl = intel_de_read(display,
4061 						 EDP_PSR2_CTL(display,
4062 							      cpu_transcoder));
4063 
4064 		enabled = val & TRANS_DP2_PANEL_REPLAY_ENABLE;
4065 	} else if (psr->sel_update_enabled) {
4066 		val = intel_de_read(display,
4067 				    EDP_PSR2_CTL(display, cpu_transcoder));
4068 		enabled = val & EDP_PSR2_ENABLE;
4069 	} else {
4070 		val = intel_de_read(display, psr_ctl_reg(display, cpu_transcoder));
4071 		enabled = val & EDP_PSR_ENABLE;
4072 	}
4073 	seq_printf(m, "Source PSR/PanelReplay ctl: %s [0x%08x]\n",
4074 		   str_enabled_disabled(enabled), val);
4075 	if (psr->panel_replay_enabled && intel_dp_is_edp(intel_dp))
4076 		seq_printf(m, "PSR2_CTL: 0x%08x\n",
4077 			   psr2_ctl);
4078 	psr_source_status(intel_dp, m);
4079 	seq_printf(m, "Busy frontbuffer bits: 0x%08x\n",
4080 		   psr->busy_frontbuffer_bits);
4081 
4082 	/*
4083 	 * SKL+ Perf counter is reset to 0 everytime DC state is entered
4084 	 */
4085 	val = intel_de_read(display, psr_perf_cnt_reg(display, cpu_transcoder));
4086 	seq_printf(m, "Performance counter: %u\n",
4087 		   REG_FIELD_GET(EDP_PSR_PERF_CNT_MASK, val));
4088 
4089 	if (psr->debug & I915_PSR_DEBUG_IRQ) {
4090 		seq_printf(m, "Last attempted entry at: %lld\n",
4091 			   psr->last_entry_attempt);
4092 		seq_printf(m, "Last exit at: %lld\n", psr->last_exit);
4093 	}
4094 
4095 	if (psr->sel_update_enabled) {
4096 		u32 su_frames_val[3];
4097 		int frame;
4098 
4099 		/*
4100 		 * PSR2_SU_STATUS register has been tied-off since DG2/ADL-P
4101 		 * (it returns zeros only) and it has been removed on Xe2_LPD.
4102 		 */
4103 		if (DISPLAY_VER(display) < 13) {
4104 			/*
4105 			 * Reading all 3 registers before hand to minimize crossing a
4106 			 * frame boundary between register reads
4107 			 */
4108 			for (frame = 0; frame < PSR2_SU_STATUS_FRAMES; frame += 3) {
4109 				val = intel_de_read(display,
4110 						    PSR2_SU_STATUS(display, cpu_transcoder, frame));
4111 				su_frames_val[frame / 3] = val;
4112 			}
4113 
4114 			seq_puts(m, "Frame:\tPSR2 SU blocks:\n");
4115 
4116 			for (frame = 0; frame < PSR2_SU_STATUS_FRAMES; frame++) {
4117 				u32 su_blocks;
4118 
4119 				su_blocks = su_frames_val[frame / 3] &
4120 					PSR2_SU_STATUS_MASK(frame);
4121 				su_blocks = su_blocks >> PSR2_SU_STATUS_SHIFT(frame);
4122 				seq_printf(m, "%d\t%d\n", frame, su_blocks);
4123 			}
4124 		}
4125 
4126 		seq_printf(m, "PSR2 selective fetch: %s\n",
4127 			   str_enabled_disabled(psr->psr2_sel_fetch_enabled));
4128 	}
4129 
4130 unlock:
4131 	mutex_unlock(&psr->lock);
4132 	intel_display_rpm_put(display, wakeref);
4133 
4134 	return 0;
4135 }
4136 
i915_edp_psr_status_show(struct seq_file * m,void * data)4137 static int i915_edp_psr_status_show(struct seq_file *m, void *data)
4138 {
4139 	struct intel_display *display = m->private;
4140 	struct intel_dp *intel_dp = NULL;
4141 	struct intel_encoder *encoder;
4142 
4143 	if (!HAS_PSR(display))
4144 		return -ENODEV;
4145 
4146 	/* Find the first EDP which supports PSR */
4147 	for_each_intel_encoder_with_psr(display->drm, encoder) {
4148 		intel_dp = enc_to_intel_dp(encoder);
4149 		break;
4150 	}
4151 
4152 	if (!intel_dp)
4153 		return -ENODEV;
4154 
4155 	return intel_psr_status(m, intel_dp);
4156 }
4157 DEFINE_SHOW_ATTRIBUTE(i915_edp_psr_status);
4158 
4159 static int
i915_edp_psr_debug_set(void * data,u64 val)4160 i915_edp_psr_debug_set(void *data, u64 val)
4161 {
4162 	struct intel_display *display = data;
4163 	struct intel_encoder *encoder;
4164 	int ret = -ENODEV;
4165 
4166 	if (!HAS_PSR(display))
4167 		return ret;
4168 
4169 	for_each_intel_encoder_with_psr(display->drm, encoder) {
4170 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4171 
4172 		drm_dbg_kms(display->drm, "Setting PSR debug to %llx\n", val);
4173 
4174 		// TODO: split to each transcoder's PSR debug state
4175 		with_intel_display_rpm(display)
4176 			ret = intel_psr_debug_set(intel_dp, val);
4177 	}
4178 
4179 	return ret;
4180 }
4181 
4182 static int
i915_edp_psr_debug_get(void * data,u64 * val)4183 i915_edp_psr_debug_get(void *data, u64 *val)
4184 {
4185 	struct intel_display *display = data;
4186 	struct intel_encoder *encoder;
4187 
4188 	if (!HAS_PSR(display))
4189 		return -ENODEV;
4190 
4191 	for_each_intel_encoder_with_psr(display->drm, encoder) {
4192 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4193 
4194 		// TODO: split to each transcoder's PSR debug state
4195 		*val = READ_ONCE(intel_dp->psr.debug);
4196 		return 0;
4197 	}
4198 
4199 	return -ENODEV;
4200 }
4201 
4202 DEFINE_SIMPLE_ATTRIBUTE(i915_edp_psr_debug_fops,
4203 			i915_edp_psr_debug_get, i915_edp_psr_debug_set,
4204 			"%llu\n");
4205 
intel_psr_debugfs_register(struct intel_display * display)4206 void intel_psr_debugfs_register(struct intel_display *display)
4207 {
4208 	struct dentry *debugfs_root = display->drm->debugfs_root;
4209 
4210 	debugfs_create_file("i915_edp_psr_debug", 0644, debugfs_root,
4211 			    display, &i915_edp_psr_debug_fops);
4212 
4213 	debugfs_create_file("i915_edp_psr_status", 0444, debugfs_root,
4214 			    display, &i915_edp_psr_status_fops);
4215 }
4216 
psr_mode_str(struct intel_dp * intel_dp)4217 static const char *psr_mode_str(struct intel_dp *intel_dp)
4218 {
4219 	if (intel_dp->psr.panel_replay_enabled)
4220 		return "PANEL-REPLAY";
4221 	else if (intel_dp->psr.enabled)
4222 		return "PSR";
4223 
4224 	return "unknown";
4225 }
4226 
i915_psr_sink_status_show(struct seq_file * m,void * data)4227 static int i915_psr_sink_status_show(struct seq_file *m, void *data)
4228 {
4229 	struct intel_connector *connector = m->private;
4230 	struct intel_dp *intel_dp = intel_attached_dp(connector);
4231 	static const char * const sink_status[] = {
4232 		"inactive",
4233 		"transition to active, capture and display",
4234 		"active, display from RFB",
4235 		"active, capture and display on sink device timings",
4236 		"transition to inactive, capture and display, timing re-sync",
4237 		"reserved",
4238 		"reserved",
4239 		"sink internal error",
4240 	};
4241 	const char *str;
4242 	int ret;
4243 	u8 status, error_status;
4244 
4245 	if (!(CAN_PSR(intel_dp) || CAN_PANEL_REPLAY(intel_dp))) {
4246 		seq_puts(m, "PSR/Panel-Replay Unsupported\n");
4247 		return -ENODEV;
4248 	}
4249 
4250 	if (connector->base.status != connector_status_connected)
4251 		return -ENODEV;
4252 
4253 	ret = psr_get_status_and_error_status(intel_dp, &status, &error_status);
4254 	if (ret)
4255 		return ret;
4256 
4257 	status &= DP_PSR_SINK_STATE_MASK;
4258 	if (status < ARRAY_SIZE(sink_status))
4259 		str = sink_status[status];
4260 	else
4261 		str = "unknown";
4262 
4263 	seq_printf(m, "Sink %s status: 0x%x [%s]\n", psr_mode_str(intel_dp), status, str);
4264 
4265 	seq_printf(m, "Sink %s error status: 0x%x", psr_mode_str(intel_dp), error_status);
4266 
4267 	if (error_status & (DP_PSR_RFB_STORAGE_ERROR |
4268 			    DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR |
4269 			    DP_PSR_LINK_CRC_ERROR))
4270 		seq_puts(m, ":\n");
4271 	else
4272 		seq_puts(m, "\n");
4273 	if (error_status & DP_PSR_RFB_STORAGE_ERROR)
4274 		seq_printf(m, "\t%s RFB storage error\n", psr_mode_str(intel_dp));
4275 	if (error_status & DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR)
4276 		seq_printf(m, "\t%s VSC SDP uncorrectable error\n", psr_mode_str(intel_dp));
4277 	if (error_status & DP_PSR_LINK_CRC_ERROR)
4278 		seq_printf(m, "\t%s Link CRC error\n", psr_mode_str(intel_dp));
4279 
4280 	return ret;
4281 }
4282 DEFINE_SHOW_ATTRIBUTE(i915_psr_sink_status);
4283 
i915_psr_status_show(struct seq_file * m,void * data)4284 static int i915_psr_status_show(struct seq_file *m, void *data)
4285 {
4286 	struct intel_connector *connector = m->private;
4287 	struct intel_dp *intel_dp = intel_attached_dp(connector);
4288 
4289 	return intel_psr_status(m, intel_dp);
4290 }
4291 DEFINE_SHOW_ATTRIBUTE(i915_psr_status);
4292 
intel_psr_connector_debugfs_add(struct intel_connector * connector)4293 void intel_psr_connector_debugfs_add(struct intel_connector *connector)
4294 {
4295 	struct intel_display *display = to_intel_display(connector);
4296 	struct dentry *root = connector->base.debugfs_entry;
4297 
4298 	if (connector->base.connector_type != DRM_MODE_CONNECTOR_eDP &&
4299 	    connector->base.connector_type != DRM_MODE_CONNECTOR_DisplayPort)
4300 		return;
4301 
4302 	debugfs_create_file("i915_psr_sink_status", 0444, root,
4303 			    connector, &i915_psr_sink_status_fops);
4304 
4305 	if (HAS_PSR(display) || HAS_DP20(display))
4306 		debugfs_create_file("i915_psr_status", 0444, root,
4307 				    connector, &i915_psr_status_fops);
4308 }
4309 
intel_psr_needs_alpm(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)4310 bool intel_psr_needs_alpm(struct intel_dp *intel_dp, const struct intel_crtc_state *crtc_state)
4311 {
4312 	/*
4313 	 * eDP Panel Replay uses always ALPM
4314 	 * PSR2 uses ALPM but PSR1 doesn't
4315 	 */
4316 	return intel_dp_is_edp(intel_dp) && (crtc_state->has_sel_update ||
4317 					     crtc_state->has_panel_replay);
4318 }
4319 
intel_psr_needs_alpm_aux_less(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)4320 bool intel_psr_needs_alpm_aux_less(struct intel_dp *intel_dp,
4321 				   const struct intel_crtc_state *crtc_state)
4322 {
4323 	return intel_dp_is_edp(intel_dp) && crtc_state->has_panel_replay;
4324 }
4325