xref: /linux/drivers/gpu/drm/i915/display/intel_psr.c (revision d5c1b4b43249bfa038df2f63e2d506bbf6e07df9)
1 /*
2  * Copyright © 2014 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21  * DEALINGS IN THE SOFTWARE.
22  */
23 
24 #include <linux/debugfs.h>
25 
26 #include <drm/drm_atomic_helper.h>
27 #include <drm/drm_damage_helper.h>
28 #include <drm/drm_debugfs.h>
29 #include <drm/drm_vblank.h>
30 
31 #include "i915_reg.h"
32 #include "intel_alpm.h"
33 #include "intel_atomic.h"
34 #include "intel_crtc.h"
35 #include "intel_cursor_regs.h"
36 #include "intel_ddi.h"
37 #include "intel_de.h"
38 #include "intel_display_irq.h"
39 #include "intel_display_regs.h"
40 #include "intel_display_rpm.h"
41 #include "intel_display_types.h"
42 #include "intel_dmc.h"
43 #include "intel_dp.h"
44 #include "intel_dp_aux.h"
45 #include "intel_dsb.h"
46 #include "intel_frontbuffer.h"
47 #include "intel_hdmi.h"
48 #include "intel_psr.h"
49 #include "intel_psr_regs.h"
50 #include "intel_snps_phy.h"
51 #include "intel_step.h"
52 #include "intel_vblank.h"
53 #include "intel_vrr.h"
54 #include "skl_universal_plane.h"
55 
56 /**
57  * DOC: Panel Self Refresh (PSR/SRD)
58  *
59  * Since Haswell Display controller supports Panel Self-Refresh on display
60  * panels witch have a remote frame buffer (RFB) implemented according to PSR
61  * spec in eDP1.3. PSR feature allows the display to go to lower standby states
62  * when system is idle but display is on as it eliminates display refresh
63  * request to DDR memory completely as long as the frame buffer for that
64  * display is unchanged.
65  *
66  * Panel Self Refresh must be supported by both Hardware (source) and
67  * Panel (sink).
68  *
69  * PSR saves power by caching the framebuffer in the panel RFB, which allows us
70  * to power down the link and memory controller. For DSI panels the same idea
71  * is called "manual mode".
72  *
73  * The implementation uses the hardware-based PSR support which automatically
74  * enters/exits self-refresh mode. The hardware takes care of sending the
75  * required DP aux message and could even retrain the link (that part isn't
76  * enabled yet though). The hardware also keeps track of any frontbuffer
77  * changes to know when to exit self-refresh mode again. Unfortunately that
78  * part doesn't work too well, hence why the i915 PSR support uses the
79  * software frontbuffer tracking to make sure it doesn't miss a screen
80  * update. For this integration intel_psr_invalidate() and intel_psr_flush()
81  * get called by the frontbuffer tracking code. Note that because of locking
82  * issues the self-refresh re-enable code is done from a work queue, which
83  * must be correctly synchronized/cancelled when shutting down the pipe."
84  *
85  * DC3CO (DC3 clock off)
86  *
87  * On top of PSR2, GEN12 adds a intermediate power savings state that turns
88  * clock off automatically during PSR2 idle state.
89  * The smaller overhead of DC3co entry/exit vs. the overhead of PSR2 deep sleep
90  * entry/exit allows the HW to enter a low-power state even when page flipping
91  * periodically (for instance a 30fps video playback scenario).
92  *
93  * Every time a flips occurs PSR2 will get out of deep sleep state(if it was),
94  * so DC3CO is enabled and tgl_dc3co_disable_work is schedule to run after 6
95  * frames, if no other flip occurs and the function above is executed, DC3CO is
96  * disabled and PSR2 is configured to enter deep sleep, resetting again in case
97  * of another flip.
98  * Front buffer modifications do not trigger DC3CO activation on purpose as it
99  * would bring a lot of complexity and most of the moderns systems will only
100  * use page flips.
101  */
102 
103 /*
104  * Description of PSR mask bits:
105  *
106  * EDP_PSR_DEBUG[16]/EDP_PSR_DEBUG_MASK_DISP_REG_WRITE (hsw-skl):
107  *
108  *  When unmasked (nearly) all display register writes (eg. even
109  *  SWF) trigger a PSR exit. Some registers are excluded from this
110  *  and they have a more specific mask (described below). On icl+
111  *  this bit no longer exists and is effectively always set.
112  *
113  * PIPE_MISC[21]/PIPE_MISC_PSR_MASK_PIPE_REG_WRITE (skl+):
114  *
115  *  When unmasked (nearly) all pipe/plane register writes
116  *  trigger a PSR exit. Some plane registers are excluded from this
117  *  and they have a more specific mask (described below).
118  *
119  * CHICKEN_PIPESL_1[11]/SKL_PSR_MASK_PLANE_FLIP (skl+):
120  * PIPE_MISC[23]/PIPE_MISC_PSR_MASK_PRIMARY_FLIP (bdw):
121  * EDP_PSR_DEBUG[23]/EDP_PSR_DEBUG_MASK_PRIMARY_FLIP (hsw):
122  *
123  *  When unmasked PRI_SURF/PLANE_SURF writes trigger a PSR exit.
124  *  SPR_SURF/CURBASE are not included in this and instead are
125  *  controlled by PIPE_MISC_PSR_MASK_PIPE_REG_WRITE (skl+) or
126  *  EDP_PSR_DEBUG_MASK_DISP_REG_WRITE (hsw/bdw).
127  *
128  * PIPE_MISC[22]/PIPE_MISC_PSR_MASK_SPRITE_ENABLE (bdw):
129  * EDP_PSR_DEBUG[21]/EDP_PSR_DEBUG_MASK_SPRITE_ENABLE (hsw):
130  *
131  *  When unmasked PSR is blocked as long as the sprite
132  *  plane is enabled. skl+ with their universal planes no
133  *  longer have a mask bit like this, and no plane being
134  *  enabledb blocks PSR.
135  *
136  * PIPE_MISC[21]/PIPE_MISC_PSR_MASK_CURSOR_MOVE (bdw):
137  * EDP_PSR_DEBUG[20]/EDP_PSR_DEBUG_MASK_CURSOR_MOVE (hsw):
138  *
139  *  When umasked CURPOS writes trigger a PSR exit. On skl+
140  *  this doesn't exit but CURPOS is included in the
141  *  PIPE_MISC_PSR_MASK_PIPE_REG_WRITE mask.
142  *
143  * PIPE_MISC[20]/PIPE_MISC_PSR_MASK_VBLANK_VSYNC_INT (bdw+):
144  * EDP_PSR_DEBUG[19]/EDP_PSR_DEBUG_MASK_VBLANK_VSYNC_INT (hsw):
145  *
146  *  When unmasked PSR is blocked as long as vblank and/or vsync
147  *  interrupt is unmasked in IMR *and* enabled in IER.
148  *
149  * CHICKEN_TRANS[30]/SKL_UNMASK_VBL_TO_PIPE_IN_SRD (skl+):
150  * CHICKEN_PAR1_1[15]/HSW_MASK_VBL_TO_PIPE_IN_SRD (hsw/bdw):
151  *
152  *  Selectcs whether PSR exit generates an extra vblank before
153  *  the first frame is transmitted. Also note the opposite polarity
154  *  if the bit on hsw/bdw vs. skl+ (masked==generate the extra vblank,
155  *  unmasked==do not generate the extra vblank).
156  *
157  *  With DC states enabled the extra vblank happens after link training,
158  *  with DC states disabled it happens immediately upuon PSR exit trigger.
159  *  No idea as of now why there is a difference. HSW/BDW (which don't
160  *  even have DMC) always generate it after link training. Go figure.
161  *
162  *  Unfortunately CHICKEN_TRANS itself seems to be double buffered
163  *  and thus won't latch until the first vblank. So with DC states
164  *  enabled the register effectively uses the reset value during DC5
165  *  exit+PSR exit sequence, and thus the bit does nothing until
166  *  latched by the vblank that it was trying to prevent from being
167  *  generated in the first place. So we should probably call this
168  *  one a chicken/egg bit instead on skl+.
169  *
170  *  In standby mode (as opposed to link-off) this makes no difference
171  *  as the timing generator keeps running the whole time generating
172  *  normal periodic vblanks.
173  *
174  *  WaPsrDPAMaskVBlankInSRD asks us to set the bit on hsw/bdw,
175  *  and doing so makes the behaviour match the skl+ reset value.
176  *
177  * CHICKEN_PIPESL_1[0]/BDW_UNMASK_VBL_TO_REGS_IN_SRD (bdw):
178  * CHICKEN_PIPESL_1[15]/HSW_UNMASK_VBL_TO_REGS_IN_SRD (hsw):
179  *
180  *  On BDW without this bit is no vblanks whatsoever are
181  *  generated after PSR exit. On HSW this has no apparent effect.
182  *  WaPsrDPRSUnmaskVBlankInSRD says to set this.
183  *
184  * The rest of the bits are more self-explanatory and/or
185  * irrelevant for normal operation.
186  *
187  * Description of intel_crtc_state variables. has_psr, has_panel_replay and
188  * has_sel_update:
189  *
190  *  has_psr (alone):					PSR1
191  *  has_psr + has_sel_update:				PSR2
192  *  has_psr + has_panel_replay:				Panel Replay
193  *  has_psr + has_panel_replay + has_sel_update:	Panel Replay Selective Update
194  *
195  * Description of some intel_psr variables. enabled, panel_replay_enabled,
196  * sel_update_enabled
197  *
198  *  enabled (alone):						PSR1
199  *  enabled + sel_update_enabled:				PSR2
200  *  enabled + panel_replay_enabled:				Panel Replay
201  *  enabled + panel_replay_enabled + sel_update_enabled:	Panel Replay SU
202  */
203 
204 #define CAN_PSR(intel_dp) ((intel_dp)->psr.sink_support && \
205 			   (intel_dp)->psr.source_support)
206 
intel_encoder_can_psr(struct intel_encoder * encoder)207 bool intel_encoder_can_psr(struct intel_encoder *encoder)
208 {
209 	if (intel_encoder_is_dp(encoder) || encoder->type == INTEL_OUTPUT_DP_MST)
210 		return CAN_PSR(enc_to_intel_dp(encoder)) ||
211 		       CAN_PANEL_REPLAY(enc_to_intel_dp(encoder));
212 	else
213 		return false;
214 }
215 
intel_psr_needs_aux_io_power(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state)216 bool intel_psr_needs_aux_io_power(struct intel_encoder *encoder,
217 				  const struct intel_crtc_state *crtc_state)
218 {
219 	/*
220 	 * For PSR/PR modes only eDP requires the AUX IO power to be enabled whenever
221 	 * the output is enabled. For non-eDP outputs the main link is always
222 	 * on, hence it doesn't require the HW initiated AUX wake-up signaling used
223 	 * for eDP.
224 	 *
225 	 * TODO:
226 	 * - Consider leaving AUX IO disabled for eDP / PR as well, in case
227 	 *   the ALPM with main-link off mode is not enabled.
228 	 * - Leave AUX IO enabled for DP / PR, once support for ALPM with
229 	 *   main-link off mode is added for it and this mode gets enabled.
230 	 */
231 	return intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP) &&
232 	       intel_encoder_can_psr(encoder);
233 }
234 
psr_global_enabled(struct intel_dp * intel_dp)235 static bool psr_global_enabled(struct intel_dp *intel_dp)
236 {
237 	struct intel_connector *connector = intel_dp->attached_connector;
238 
239 	switch (intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK) {
240 	case I915_PSR_DEBUG_DEFAULT:
241 		return intel_dp_is_edp(intel_dp) ?
242 			connector->panel.vbt.psr.enable : true;
243 	case I915_PSR_DEBUG_DISABLE:
244 		return false;
245 	default:
246 		return true;
247 	}
248 }
249 
sel_update_global_enabled(struct intel_dp * intel_dp)250 static bool sel_update_global_enabled(struct intel_dp *intel_dp)
251 {
252 	switch (intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK) {
253 	case I915_PSR_DEBUG_DISABLE:
254 	case I915_PSR_DEBUG_FORCE_PSR1:
255 		return false;
256 	default:
257 		return true;
258 	}
259 }
260 
panel_replay_global_enabled(struct intel_dp * intel_dp)261 static bool panel_replay_global_enabled(struct intel_dp *intel_dp)
262 {
263 	struct intel_display *display = to_intel_display(intel_dp);
264 
265 	return !(intel_dp->psr.debug & I915_PSR_DEBUG_PANEL_REPLAY_DISABLE) &&
266 		display->params.enable_panel_replay;
267 }
268 
psr_irq_psr_error_bit_get(struct intel_dp * intel_dp)269 static u32 psr_irq_psr_error_bit_get(struct intel_dp *intel_dp)
270 {
271 	struct intel_display *display = to_intel_display(intel_dp);
272 
273 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_ERROR :
274 		EDP_PSR_ERROR(intel_dp->psr.transcoder);
275 }
276 
psr_irq_post_exit_bit_get(struct intel_dp * intel_dp)277 static u32 psr_irq_post_exit_bit_get(struct intel_dp *intel_dp)
278 {
279 	struct intel_display *display = to_intel_display(intel_dp);
280 
281 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_POST_EXIT :
282 		EDP_PSR_POST_EXIT(intel_dp->psr.transcoder);
283 }
284 
psr_irq_pre_entry_bit_get(struct intel_dp * intel_dp)285 static u32 psr_irq_pre_entry_bit_get(struct intel_dp *intel_dp)
286 {
287 	struct intel_display *display = to_intel_display(intel_dp);
288 
289 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_PRE_ENTRY :
290 		EDP_PSR_PRE_ENTRY(intel_dp->psr.transcoder);
291 }
292 
psr_irq_mask_get(struct intel_dp * intel_dp)293 static u32 psr_irq_mask_get(struct intel_dp *intel_dp)
294 {
295 	struct intel_display *display = to_intel_display(intel_dp);
296 
297 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_MASK :
298 		EDP_PSR_MASK(intel_dp->psr.transcoder);
299 }
300 
psr_ctl_reg(struct intel_display * display,enum transcoder cpu_transcoder)301 static i915_reg_t psr_ctl_reg(struct intel_display *display,
302 			      enum transcoder cpu_transcoder)
303 {
304 	if (DISPLAY_VER(display) >= 8)
305 		return EDP_PSR_CTL(display, cpu_transcoder);
306 	else
307 		return HSW_SRD_CTL;
308 }
309 
psr_debug_reg(struct intel_display * display,enum transcoder cpu_transcoder)310 static i915_reg_t psr_debug_reg(struct intel_display *display,
311 				enum transcoder cpu_transcoder)
312 {
313 	if (DISPLAY_VER(display) >= 8)
314 		return EDP_PSR_DEBUG(display, cpu_transcoder);
315 	else
316 		return HSW_SRD_DEBUG;
317 }
318 
psr_perf_cnt_reg(struct intel_display * display,enum transcoder cpu_transcoder)319 static i915_reg_t psr_perf_cnt_reg(struct intel_display *display,
320 				   enum transcoder cpu_transcoder)
321 {
322 	if (DISPLAY_VER(display) >= 8)
323 		return EDP_PSR_PERF_CNT(display, cpu_transcoder);
324 	else
325 		return HSW_SRD_PERF_CNT;
326 }
327 
psr_status_reg(struct intel_display * display,enum transcoder cpu_transcoder)328 static i915_reg_t psr_status_reg(struct intel_display *display,
329 				 enum transcoder cpu_transcoder)
330 {
331 	if (DISPLAY_VER(display) >= 8)
332 		return EDP_PSR_STATUS(display, cpu_transcoder);
333 	else
334 		return HSW_SRD_STATUS;
335 }
336 
psr_imr_reg(struct intel_display * display,enum transcoder cpu_transcoder)337 static i915_reg_t psr_imr_reg(struct intel_display *display,
338 			      enum transcoder cpu_transcoder)
339 {
340 	if (DISPLAY_VER(display) >= 12)
341 		return TRANS_PSR_IMR(display, cpu_transcoder);
342 	else
343 		return EDP_PSR_IMR;
344 }
345 
psr_iir_reg(struct intel_display * display,enum transcoder cpu_transcoder)346 static i915_reg_t psr_iir_reg(struct intel_display *display,
347 			      enum transcoder cpu_transcoder)
348 {
349 	if (DISPLAY_VER(display) >= 12)
350 		return TRANS_PSR_IIR(display, cpu_transcoder);
351 	else
352 		return EDP_PSR_IIR;
353 }
354 
psr_aux_ctl_reg(struct intel_display * display,enum transcoder cpu_transcoder)355 static i915_reg_t psr_aux_ctl_reg(struct intel_display *display,
356 				  enum transcoder cpu_transcoder)
357 {
358 	if (DISPLAY_VER(display) >= 8)
359 		return EDP_PSR_AUX_CTL(display, cpu_transcoder);
360 	else
361 		return HSW_SRD_AUX_CTL;
362 }
363 
psr_aux_data_reg(struct intel_display * display,enum transcoder cpu_transcoder,int i)364 static i915_reg_t psr_aux_data_reg(struct intel_display *display,
365 				   enum transcoder cpu_transcoder, int i)
366 {
367 	if (DISPLAY_VER(display) >= 8)
368 		return EDP_PSR_AUX_DATA(display, cpu_transcoder, i);
369 	else
370 		return HSW_SRD_AUX_DATA(i);
371 }
372 
psr_irq_control(struct intel_dp * intel_dp)373 static void psr_irq_control(struct intel_dp *intel_dp)
374 {
375 	struct intel_display *display = to_intel_display(intel_dp);
376 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
377 	u32 mask;
378 
379 	if (intel_dp->psr.panel_replay_enabled)
380 		return;
381 
382 	mask = psr_irq_psr_error_bit_get(intel_dp);
383 	if (intel_dp->psr.debug & I915_PSR_DEBUG_IRQ)
384 		mask |= psr_irq_post_exit_bit_get(intel_dp) |
385 			psr_irq_pre_entry_bit_get(intel_dp);
386 
387 	intel_de_rmw(display, psr_imr_reg(display, cpu_transcoder),
388 		     psr_irq_mask_get(intel_dp), ~mask);
389 }
390 
psr_event_print(struct intel_display * display,u32 val,bool sel_update_enabled)391 static void psr_event_print(struct intel_display *display,
392 			    u32 val, bool sel_update_enabled)
393 {
394 	drm_dbg_kms(display->drm, "PSR exit events: 0x%x\n", val);
395 	if (val & PSR_EVENT_PSR2_WD_TIMER_EXPIRE)
396 		drm_dbg_kms(display->drm, "\tPSR2 watchdog timer expired\n");
397 	if ((val & PSR_EVENT_PSR2_DISABLED) && sel_update_enabled)
398 		drm_dbg_kms(display->drm, "\tPSR2 disabled\n");
399 	if (val & PSR_EVENT_SU_DIRTY_FIFO_UNDERRUN)
400 		drm_dbg_kms(display->drm, "\tSU dirty FIFO underrun\n");
401 	if (val & PSR_EVENT_SU_CRC_FIFO_UNDERRUN)
402 		drm_dbg_kms(display->drm, "\tSU CRC FIFO underrun\n");
403 	if (val & PSR_EVENT_GRAPHICS_RESET)
404 		drm_dbg_kms(display->drm, "\tGraphics reset\n");
405 	if (val & PSR_EVENT_PCH_INTERRUPT)
406 		drm_dbg_kms(display->drm, "\tPCH interrupt\n");
407 	if (val & PSR_EVENT_MEMORY_UP)
408 		drm_dbg_kms(display->drm, "\tMemory up\n");
409 	if (val & PSR_EVENT_FRONT_BUFFER_MODIFY)
410 		drm_dbg_kms(display->drm, "\tFront buffer modification\n");
411 	if (val & PSR_EVENT_WD_TIMER_EXPIRE)
412 		drm_dbg_kms(display->drm, "\tPSR watchdog timer expired\n");
413 	if (val & PSR_EVENT_PIPE_REGISTERS_UPDATE)
414 		drm_dbg_kms(display->drm, "\tPIPE registers updated\n");
415 	if (val & PSR_EVENT_REGISTER_UPDATE)
416 		drm_dbg_kms(display->drm, "\tRegister updated\n");
417 	if (val & PSR_EVENT_HDCP_ENABLE)
418 		drm_dbg_kms(display->drm, "\tHDCP enabled\n");
419 	if (val & PSR_EVENT_KVMR_SESSION_ENABLE)
420 		drm_dbg_kms(display->drm, "\tKVMR session enabled\n");
421 	if (val & PSR_EVENT_VBI_ENABLE)
422 		drm_dbg_kms(display->drm, "\tVBI enabled\n");
423 	if (val & PSR_EVENT_LPSP_MODE_EXIT)
424 		drm_dbg_kms(display->drm, "\tLPSP mode exited\n");
425 	if ((val & PSR_EVENT_PSR_DISABLE) && !sel_update_enabled)
426 		drm_dbg_kms(display->drm, "\tPSR disabled\n");
427 }
428 
intel_psr_irq_handler(struct intel_dp * intel_dp,u32 psr_iir)429 void intel_psr_irq_handler(struct intel_dp *intel_dp, u32 psr_iir)
430 {
431 	struct intel_display *display = to_intel_display(intel_dp);
432 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
433 	ktime_t time_ns =  ktime_get();
434 
435 	if (psr_iir & psr_irq_pre_entry_bit_get(intel_dp)) {
436 		intel_dp->psr.last_entry_attempt = time_ns;
437 		drm_dbg_kms(display->drm,
438 			    "[transcoder %s] PSR entry attempt in 2 vblanks\n",
439 			    transcoder_name(cpu_transcoder));
440 	}
441 
442 	if (psr_iir & psr_irq_post_exit_bit_get(intel_dp)) {
443 		intel_dp->psr.last_exit = time_ns;
444 		drm_dbg_kms(display->drm,
445 			    "[transcoder %s] PSR exit completed\n",
446 			    transcoder_name(cpu_transcoder));
447 
448 		if (DISPLAY_VER(display) >= 9) {
449 			u32 val;
450 
451 			val = intel_de_rmw(display,
452 					   PSR_EVENT(display, cpu_transcoder),
453 					   0, 0);
454 
455 			psr_event_print(display, val, intel_dp->psr.sel_update_enabled);
456 		}
457 	}
458 
459 	if (psr_iir & psr_irq_psr_error_bit_get(intel_dp)) {
460 		drm_warn(display->drm, "[transcoder %s] PSR aux error\n",
461 			 transcoder_name(cpu_transcoder));
462 
463 		intel_dp->psr.irq_aux_error = true;
464 
465 		/*
466 		 * If this interruption is not masked it will keep
467 		 * interrupting so fast that it prevents the scheduled
468 		 * work to run.
469 		 * Also after a PSR error, we don't want to arm PSR
470 		 * again so we don't care about unmask the interruption
471 		 * or unset irq_aux_error.
472 		 */
473 		intel_de_rmw(display, psr_imr_reg(display, cpu_transcoder),
474 			     0, psr_irq_psr_error_bit_get(intel_dp));
475 
476 		queue_work(display->wq.unordered, &intel_dp->psr.work);
477 	}
478 }
479 
intel_dp_get_sink_sync_latency(struct intel_dp * intel_dp)480 static u8 intel_dp_get_sink_sync_latency(struct intel_dp *intel_dp)
481 {
482 	struct intel_display *display = to_intel_display(intel_dp);
483 	u8 val = 8; /* assume the worst if we can't read the value */
484 
485 	if (drm_dp_dpcd_readb(&intel_dp->aux,
486 			      DP_SYNCHRONIZATION_LATENCY_IN_SINK, &val) == 1)
487 		val &= DP_MAX_RESYNC_FRAME_COUNT_MASK;
488 	else
489 		drm_dbg_kms(display->drm,
490 			    "Unable to get sink synchronization latency, assuming 8 frames\n");
491 	return val;
492 }
493 
intel_dp_get_su_capability(struct intel_dp * intel_dp)494 static u8 intel_dp_get_su_capability(struct intel_dp *intel_dp)
495 {
496 	u8 su_capability = 0;
497 
498 	if (intel_dp->psr.sink_panel_replay_su_support) {
499 		if (drm_dp_dpcd_read_byte(&intel_dp->aux,
500 					  DP_PANEL_REPLAY_CAP_CAPABILITY,
501 					  &su_capability) < 0)
502 			return 0;
503 	} else {
504 		su_capability = intel_dp->psr_dpcd[1];
505 	}
506 
507 	return su_capability;
508 }
509 
510 static unsigned int
intel_dp_get_su_x_granularity_offset(struct intel_dp * intel_dp)511 intel_dp_get_su_x_granularity_offset(struct intel_dp *intel_dp)
512 {
513 	return intel_dp->psr.sink_panel_replay_su_support ?
514 		DP_PANEL_REPLAY_CAP_X_GRANULARITY :
515 		DP_PSR2_SU_X_GRANULARITY;
516 }
517 
518 static unsigned int
intel_dp_get_su_y_granularity_offset(struct intel_dp * intel_dp)519 intel_dp_get_su_y_granularity_offset(struct intel_dp *intel_dp)
520 {
521 	return intel_dp->psr.sink_panel_replay_su_support ?
522 		DP_PANEL_REPLAY_CAP_Y_GRANULARITY :
523 		DP_PSR2_SU_Y_GRANULARITY;
524 }
525 
526 /*
527  * Note: Bits related to granularity are same in panel replay and psr
528  * registers. Rely on PSR definitions on these "common" bits.
529  */
intel_dp_get_su_granularity(struct intel_dp * intel_dp)530 static void intel_dp_get_su_granularity(struct intel_dp *intel_dp)
531 {
532 	struct intel_display *display = to_intel_display(intel_dp);
533 	ssize_t r;
534 	u16 w;
535 	u8 y;
536 
537 	/*
538 	 * TODO: Do we need to take into account panel supporting both PSR and
539 	 * Panel replay?
540 	 */
541 
542 	/*
543 	 * If sink don't have specific granularity requirements set legacy
544 	 * ones.
545 	 */
546 	if (!(intel_dp_get_su_capability(intel_dp) &
547 	      DP_PSR2_SU_GRANULARITY_REQUIRED)) {
548 		/* As PSR2 HW sends full lines, we do not care about x granularity */
549 		w = 4;
550 		y = 4;
551 		goto exit;
552 	}
553 
554 	r = drm_dp_dpcd_read(&intel_dp->aux,
555 			     intel_dp_get_su_x_granularity_offset(intel_dp),
556 			     &w, 2);
557 	if (r != 2)
558 		drm_dbg_kms(display->drm,
559 			    "Unable to read selective update x granularity\n");
560 	/*
561 	 * Spec says that if the value read is 0 the default granularity should
562 	 * be used instead.
563 	 */
564 	if (r != 2 || w == 0)
565 		w = 4;
566 
567 	r = drm_dp_dpcd_read(&intel_dp->aux,
568 			     intel_dp_get_su_y_granularity_offset(intel_dp),
569 			     &y, 1);
570 	if (r != 1) {
571 		drm_dbg_kms(display->drm,
572 			    "Unable to read selective update y granularity\n");
573 		y = 4;
574 	}
575 	if (y == 0)
576 		y = 1;
577 
578 exit:
579 	intel_dp->psr.su_w_granularity = w;
580 	intel_dp->psr.su_y_granularity = y;
581 }
582 
_panel_replay_init_dpcd(struct intel_dp * intel_dp)583 static void _panel_replay_init_dpcd(struct intel_dp *intel_dp)
584 {
585 	struct intel_display *display = to_intel_display(intel_dp);
586 	int ret;
587 
588 	/* TODO: Enable Panel Replay on MST once it's properly implemented. */
589 	if (intel_dp->mst_detect == DRM_DP_MST)
590 		return;
591 
592 	ret = drm_dp_dpcd_read_data(&intel_dp->aux, DP_PANEL_REPLAY_CAP_SUPPORT,
593 				    &intel_dp->pr_dpcd, sizeof(intel_dp->pr_dpcd));
594 	if (ret < 0)
595 		return;
596 
597 	if (!(intel_dp->pr_dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] &
598 	      DP_PANEL_REPLAY_SUPPORT))
599 		return;
600 
601 	if (intel_dp_is_edp(intel_dp)) {
602 		if (!intel_alpm_aux_less_wake_supported(intel_dp)) {
603 			drm_dbg_kms(display->drm,
604 				    "Panel doesn't support AUX-less ALPM, eDP Panel Replay not possible\n");
605 			return;
606 		}
607 
608 		if (!(intel_dp->pr_dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] &
609 		      DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT)) {
610 			drm_dbg_kms(display->drm,
611 				    "Panel doesn't support early transport, eDP Panel Replay not possible\n");
612 			return;
613 		}
614 	}
615 
616 	intel_dp->psr.sink_panel_replay_support = true;
617 
618 	if (intel_dp->pr_dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] &
619 	    DP_PANEL_REPLAY_SU_SUPPORT)
620 		intel_dp->psr.sink_panel_replay_su_support = true;
621 
622 	drm_dbg_kms(display->drm,
623 		    "Panel replay %sis supported by panel\n",
624 		    intel_dp->psr.sink_panel_replay_su_support ?
625 		    "selective_update " : "");
626 }
627 
_psr_init_dpcd(struct intel_dp * intel_dp)628 static void _psr_init_dpcd(struct intel_dp *intel_dp)
629 {
630 	struct intel_display *display = to_intel_display(intel_dp);
631 	int ret;
632 
633 	ret = drm_dp_dpcd_read_data(&intel_dp->aux, DP_PSR_SUPPORT, intel_dp->psr_dpcd,
634 				    sizeof(intel_dp->psr_dpcd));
635 	if (ret < 0)
636 		return;
637 
638 	if (!intel_dp->psr_dpcd[0])
639 		return;
640 
641 	drm_dbg_kms(display->drm, "eDP panel supports PSR version %x\n",
642 		    intel_dp->psr_dpcd[0]);
643 
644 	if (drm_dp_has_quirk(&intel_dp->desc, DP_DPCD_QUIRK_NO_PSR)) {
645 		drm_dbg_kms(display->drm,
646 			    "PSR support not currently available for this panel\n");
647 		return;
648 	}
649 
650 	if (!(intel_dp->edp_dpcd[1] & DP_EDP_SET_POWER_CAP)) {
651 		drm_dbg_kms(display->drm,
652 			    "Panel lacks power state control, PSR cannot be enabled\n");
653 		return;
654 	}
655 
656 	intel_dp->psr.sink_support = true;
657 	intel_dp->psr.sink_sync_latency =
658 		intel_dp_get_sink_sync_latency(intel_dp);
659 
660 	if (DISPLAY_VER(display) >= 9 &&
661 	    intel_dp->psr_dpcd[0] >= DP_PSR2_WITH_Y_COORD_IS_SUPPORTED) {
662 		bool y_req = intel_dp->psr_dpcd[1] &
663 			     DP_PSR2_SU_Y_COORDINATE_REQUIRED;
664 
665 		/*
666 		 * All panels that supports PSR version 03h (PSR2 +
667 		 * Y-coordinate) can handle Y-coordinates in VSC but we are
668 		 * only sure that it is going to be used when required by the
669 		 * panel. This way panel is capable to do selective update
670 		 * without a aux frame sync.
671 		 *
672 		 * To support PSR version 02h and PSR version 03h without
673 		 * Y-coordinate requirement panels we would need to enable
674 		 * GTC first.
675 		 */
676 		intel_dp->psr.sink_psr2_support = y_req &&
677 			intel_alpm_aux_wake_supported(intel_dp);
678 		drm_dbg_kms(display->drm, "PSR2 %ssupported\n",
679 			    intel_dp->psr.sink_psr2_support ? "" : "not ");
680 	}
681 }
682 
intel_psr_init_dpcd(struct intel_dp * intel_dp)683 void intel_psr_init_dpcd(struct intel_dp *intel_dp)
684 {
685 	_psr_init_dpcd(intel_dp);
686 
687 	_panel_replay_init_dpcd(intel_dp);
688 
689 	if (intel_dp->psr.sink_psr2_support ||
690 	    intel_dp->psr.sink_panel_replay_su_support)
691 		intel_dp_get_su_granularity(intel_dp);
692 }
693 
hsw_psr_setup_aux(struct intel_dp * intel_dp)694 static void hsw_psr_setup_aux(struct intel_dp *intel_dp)
695 {
696 	struct intel_display *display = to_intel_display(intel_dp);
697 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
698 	u32 aux_clock_divider, aux_ctl;
699 	/* write DP_SET_POWER=D0 */
700 	static const u8 aux_msg[] = {
701 		[0] = (DP_AUX_NATIVE_WRITE << 4) | ((DP_SET_POWER >> 16) & 0xf),
702 		[1] = (DP_SET_POWER >> 8) & 0xff,
703 		[2] = DP_SET_POWER & 0xff,
704 		[3] = 1 - 1,
705 		[4] = DP_SET_POWER_D0,
706 	};
707 	int i;
708 
709 	BUILD_BUG_ON(sizeof(aux_msg) > 20);
710 	for (i = 0; i < sizeof(aux_msg); i += 4)
711 		intel_de_write(display,
712 			       psr_aux_data_reg(display, cpu_transcoder, i >> 2),
713 			       intel_dp_aux_pack(&aux_msg[i], sizeof(aux_msg) - i));
714 
715 	aux_clock_divider = intel_dp->get_aux_clock_divider(intel_dp, 0);
716 
717 	/* Start with bits set for DDI_AUX_CTL register */
718 	aux_ctl = intel_dp->get_aux_send_ctl(intel_dp, sizeof(aux_msg),
719 					     aux_clock_divider);
720 
721 	/* Select only valid bits for SRD_AUX_CTL */
722 	aux_ctl &= EDP_PSR_AUX_CTL_TIME_OUT_MASK |
723 		EDP_PSR_AUX_CTL_MESSAGE_SIZE_MASK |
724 		EDP_PSR_AUX_CTL_PRECHARGE_2US_MASK |
725 		EDP_PSR_AUX_CTL_BIT_CLOCK_2X_MASK;
726 
727 	intel_de_write(display, psr_aux_ctl_reg(display, cpu_transcoder),
728 		       aux_ctl);
729 }
730 
psr2_su_region_et_valid(struct intel_dp * intel_dp,bool panel_replay)731 static bool psr2_su_region_et_valid(struct intel_dp *intel_dp, bool panel_replay)
732 {
733 	struct intel_display *display = to_intel_display(intel_dp);
734 
735 	if (DISPLAY_VER(display) < 20 || !intel_dp_is_edp(intel_dp) ||
736 	    intel_dp->psr.debug & I915_PSR_DEBUG_SU_REGION_ET_DISABLE)
737 		return false;
738 
739 	return panel_replay ?
740 		intel_dp->pr_dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] &
741 		DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT :
742 		intel_dp->psr_dpcd[0] == DP_PSR2_WITH_Y_COORD_ET_SUPPORTED;
743 }
744 
_panel_replay_enable_sink(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)745 static void _panel_replay_enable_sink(struct intel_dp *intel_dp,
746 				      const struct intel_crtc_state *crtc_state)
747 {
748 	u8 val = DP_PANEL_REPLAY_ENABLE |
749 		DP_PANEL_REPLAY_VSC_SDP_CRC_EN |
750 		DP_PANEL_REPLAY_UNRECOVERABLE_ERROR_EN |
751 		DP_PANEL_REPLAY_RFB_STORAGE_ERROR_EN |
752 		DP_PANEL_REPLAY_ACTIVE_FRAME_CRC_ERROR_EN;
753 	u8 panel_replay_config2 = DP_PANEL_REPLAY_CRC_VERIFICATION;
754 
755 	if (crtc_state->has_sel_update)
756 		val |= DP_PANEL_REPLAY_SU_ENABLE;
757 
758 	if (crtc_state->enable_psr2_su_region_et)
759 		val |= DP_PANEL_REPLAY_ENABLE_SU_REGION_ET;
760 
761 	if (crtc_state->req_psr2_sdp_prior_scanline)
762 		panel_replay_config2 |=
763 			DP_PANEL_REPLAY_SU_REGION_SCANLINE_CAPTURE;
764 
765 	drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG, val);
766 
767 	drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG2,
768 			   panel_replay_config2);
769 }
770 
_psr_enable_sink(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)771 static void _psr_enable_sink(struct intel_dp *intel_dp,
772 			     const struct intel_crtc_state *crtc_state)
773 {
774 	struct intel_display *display = to_intel_display(intel_dp);
775 	u8 val = 0;
776 
777 	if (crtc_state->has_sel_update) {
778 		val |= DP_PSR_ENABLE_PSR2 | DP_PSR_IRQ_HPD_WITH_CRC_ERRORS;
779 	} else {
780 		if (intel_dp->psr.link_standby)
781 			val |= DP_PSR_MAIN_LINK_ACTIVE;
782 
783 		if (DISPLAY_VER(display) >= 8)
784 			val |= DP_PSR_CRC_VERIFICATION;
785 	}
786 
787 	if (crtc_state->req_psr2_sdp_prior_scanline)
788 		val |= DP_PSR_SU_REGION_SCANLINE_CAPTURE;
789 
790 	if (crtc_state->enable_psr2_su_region_et)
791 		val |= DP_PANEL_REPLAY_ENABLE_SU_REGION_ET;
792 
793 	if (intel_dp->psr.entry_setup_frames > 0)
794 		val |= DP_PSR_FRAME_CAPTURE;
795 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, val);
796 
797 	val |= DP_PSR_ENABLE;
798 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, val);
799 }
800 
intel_psr_enable_sink(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)801 static void intel_psr_enable_sink(struct intel_dp *intel_dp,
802 				  const struct intel_crtc_state *crtc_state)
803 {
804 	intel_alpm_enable_sink(intel_dp, crtc_state);
805 
806 	crtc_state->has_panel_replay ?
807 		_panel_replay_enable_sink(intel_dp, crtc_state) :
808 		_psr_enable_sink(intel_dp, crtc_state);
809 
810 	if (intel_dp_is_edp(intel_dp))
811 		drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
812 }
813 
intel_psr_panel_replay_enable_sink(struct intel_dp * intel_dp)814 void intel_psr_panel_replay_enable_sink(struct intel_dp *intel_dp)
815 {
816 	if (CAN_PANEL_REPLAY(intel_dp))
817 		drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG,
818 				   DP_PANEL_REPLAY_ENABLE);
819 }
820 
intel_psr1_get_tp_time(struct intel_dp * intel_dp)821 static u32 intel_psr1_get_tp_time(struct intel_dp *intel_dp)
822 {
823 	struct intel_display *display = to_intel_display(intel_dp);
824 	struct intel_connector *connector = intel_dp->attached_connector;
825 	u32 val = 0;
826 
827 	if (DISPLAY_VER(display) >= 11)
828 		val |= EDP_PSR_TP4_TIME_0us;
829 
830 	if (display->params.psr_safest_params) {
831 		val |= EDP_PSR_TP1_TIME_2500us;
832 		val |= EDP_PSR_TP2_TP3_TIME_2500us;
833 		goto check_tp3_sel;
834 	}
835 
836 	if (connector->panel.vbt.psr.tp1_wakeup_time_us == 0)
837 		val |= EDP_PSR_TP1_TIME_0us;
838 	else if (connector->panel.vbt.psr.tp1_wakeup_time_us <= 100)
839 		val |= EDP_PSR_TP1_TIME_100us;
840 	else if (connector->panel.vbt.psr.tp1_wakeup_time_us <= 500)
841 		val |= EDP_PSR_TP1_TIME_500us;
842 	else
843 		val |= EDP_PSR_TP1_TIME_2500us;
844 
845 	if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us == 0)
846 		val |= EDP_PSR_TP2_TP3_TIME_0us;
847 	else if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us <= 100)
848 		val |= EDP_PSR_TP2_TP3_TIME_100us;
849 	else if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us <= 500)
850 		val |= EDP_PSR_TP2_TP3_TIME_500us;
851 	else
852 		val |= EDP_PSR_TP2_TP3_TIME_2500us;
853 
854 	/*
855 	 * WA 0479: hsw,bdw
856 	 * "Do not skip both TP1 and TP2/TP3"
857 	 */
858 	if (DISPLAY_VER(display) < 9 &&
859 	    connector->panel.vbt.psr.tp1_wakeup_time_us == 0 &&
860 	    connector->panel.vbt.psr.tp2_tp3_wakeup_time_us == 0)
861 		val |= EDP_PSR_TP2_TP3_TIME_100us;
862 
863 check_tp3_sel:
864 	if (intel_dp_source_supports_tps3(display) &&
865 	    drm_dp_tps3_supported(intel_dp->dpcd))
866 		val |= EDP_PSR_TP_TP1_TP3;
867 	else
868 		val |= EDP_PSR_TP_TP1_TP2;
869 
870 	return val;
871 }
872 
psr_compute_idle_frames(struct intel_dp * intel_dp)873 static u8 psr_compute_idle_frames(struct intel_dp *intel_dp)
874 {
875 	struct intel_display *display = to_intel_display(intel_dp);
876 	struct intel_connector *connector = intel_dp->attached_connector;
877 	int idle_frames;
878 
879 	/* Let's use 6 as the minimum to cover all known cases including the
880 	 * off-by-one issue that HW has in some cases.
881 	 */
882 	idle_frames = max(6, connector->panel.vbt.psr.idle_frames);
883 	idle_frames = max(idle_frames, intel_dp->psr.sink_sync_latency + 1);
884 
885 	if (drm_WARN_ON(display->drm, idle_frames > 0xf))
886 		idle_frames = 0xf;
887 
888 	return idle_frames;
889 }
890 
is_dc5_dc6_blocked(struct intel_dp * intel_dp)891 static bool is_dc5_dc6_blocked(struct intel_dp *intel_dp)
892 {
893 	struct intel_display *display = to_intel_display(intel_dp);
894 	u32 current_dc_state = intel_display_power_get_current_dc_state(display);
895 	struct intel_crtc *crtc = intel_crtc_for_pipe(display, intel_dp->psr.pipe);
896 	struct drm_vblank_crtc *vblank = drm_crtc_vblank_crtc(&crtc->base);
897 
898 	return (current_dc_state != DC_STATE_EN_UPTO_DC5 &&
899 		current_dc_state != DC_STATE_EN_UPTO_DC6) ||
900 		intel_dp->psr.active_non_psr_pipes ||
901 		READ_ONCE(vblank->enabled);
902 }
903 
hsw_activate_psr1(struct intel_dp * intel_dp)904 static void hsw_activate_psr1(struct intel_dp *intel_dp)
905 {
906 	struct intel_display *display = to_intel_display(intel_dp);
907 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
908 	u32 max_sleep_time = 0x1f;
909 	u32 val = EDP_PSR_ENABLE;
910 
911 	val |= EDP_PSR_IDLE_FRAMES(psr_compute_idle_frames(intel_dp));
912 
913 	if (DISPLAY_VER(display) < 20)
914 		val |= EDP_PSR_MAX_SLEEP_TIME(max_sleep_time);
915 
916 	if (display->platform.haswell)
917 		val |= EDP_PSR_MIN_LINK_ENTRY_TIME_8_LINES;
918 
919 	if (intel_dp->psr.link_standby)
920 		val |= EDP_PSR_LINK_STANDBY;
921 
922 	val |= intel_psr1_get_tp_time(intel_dp);
923 
924 	if (DISPLAY_VER(display) >= 8)
925 		val |= EDP_PSR_CRC_ENABLE;
926 
927 	if (DISPLAY_VER(display) >= 20)
928 		val |= LNL_EDP_PSR_ENTRY_SETUP_FRAMES(intel_dp->psr.entry_setup_frames);
929 
930 	intel_de_rmw(display, psr_ctl_reg(display, cpu_transcoder),
931 		     ~EDP_PSR_RESTORE_PSR_ACTIVE_CTX_MASK, val);
932 
933 	/* Wa_16025596647 */
934 	if ((DISPLAY_VER(display) == 20 ||
935 	     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
936 	    is_dc5_dc6_blocked(intel_dp) && intel_dp->psr.pkg_c_latency_used)
937 		intel_dmc_start_pkgc_exit_at_start_of_undelayed_vblank(display,
938 								       intel_dp->psr.pipe,
939 								       true);
940 }
941 
intel_psr2_get_tp_time(struct intel_dp * intel_dp)942 static u32 intel_psr2_get_tp_time(struct intel_dp *intel_dp)
943 {
944 	struct intel_display *display = to_intel_display(intel_dp);
945 	struct intel_connector *connector = intel_dp->attached_connector;
946 	u32 val = 0;
947 
948 	if (display->params.psr_safest_params)
949 		return EDP_PSR2_TP2_TIME_2500us;
950 
951 	if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us >= 0 &&
952 	    connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 50)
953 		val |= EDP_PSR2_TP2_TIME_50us;
954 	else if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 100)
955 		val |= EDP_PSR2_TP2_TIME_100us;
956 	else if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 500)
957 		val |= EDP_PSR2_TP2_TIME_500us;
958 	else
959 		val |= EDP_PSR2_TP2_TIME_2500us;
960 
961 	return val;
962 }
963 
psr2_block_count_lines(struct intel_dp * intel_dp)964 static int psr2_block_count_lines(struct intel_dp *intel_dp)
965 {
966 	return intel_dp->alpm_parameters.io_wake_lines < 9 &&
967 		intel_dp->alpm_parameters.fast_wake_lines < 9 ? 8 : 12;
968 }
969 
psr2_block_count(struct intel_dp * intel_dp)970 static int psr2_block_count(struct intel_dp *intel_dp)
971 {
972 	return psr2_block_count_lines(intel_dp) / 4;
973 }
974 
frames_before_su_entry(struct intel_dp * intel_dp)975 static u8 frames_before_su_entry(struct intel_dp *intel_dp)
976 {
977 	u8 frames_before_su_entry;
978 
979 	frames_before_su_entry = max_t(u8,
980 				       intel_dp->psr.sink_sync_latency + 1,
981 				       2);
982 
983 	/* Entry setup frames must be at least 1 less than frames before SU entry */
984 	if (intel_dp->psr.entry_setup_frames >= frames_before_su_entry)
985 		frames_before_su_entry = intel_dp->psr.entry_setup_frames + 1;
986 
987 	return frames_before_su_entry;
988 }
989 
dg2_activate_panel_replay(struct intel_dp * intel_dp)990 static void dg2_activate_panel_replay(struct intel_dp *intel_dp)
991 {
992 	struct intel_display *display = to_intel_display(intel_dp);
993 	struct intel_psr *psr = &intel_dp->psr;
994 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
995 
996 	if (intel_dp_is_edp(intel_dp) && psr->sel_update_enabled) {
997 		u32 val = psr->su_region_et_enabled ?
998 			LNL_EDP_PSR2_SU_REGION_ET_ENABLE : 0;
999 
1000 		if (intel_dp->psr.req_psr2_sdp_prior_scanline)
1001 			val |= EDP_PSR2_SU_SDP_SCANLINE;
1002 
1003 		intel_de_write(display, EDP_PSR2_CTL(display, cpu_transcoder),
1004 			       val);
1005 	}
1006 
1007 	intel_de_rmw(display,
1008 		     PSR2_MAN_TRK_CTL(display, intel_dp->psr.transcoder),
1009 		     0, ADLP_PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME);
1010 
1011 	intel_de_rmw(display, TRANS_DP2_CTL(intel_dp->psr.transcoder), 0,
1012 		     TRANS_DP2_PANEL_REPLAY_ENABLE);
1013 }
1014 
hsw_activate_psr2(struct intel_dp * intel_dp)1015 static void hsw_activate_psr2(struct intel_dp *intel_dp)
1016 {
1017 	struct intel_display *display = to_intel_display(intel_dp);
1018 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1019 	u32 val = EDP_PSR2_ENABLE;
1020 	u32 psr_val = 0;
1021 	u8 idle_frames;
1022 
1023 	/* Wa_16025596647 */
1024 	if ((DISPLAY_VER(display) == 20 ||
1025 	     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
1026 	    is_dc5_dc6_blocked(intel_dp) && intel_dp->psr.pkg_c_latency_used)
1027 		idle_frames = 0;
1028 	else
1029 		idle_frames = psr_compute_idle_frames(intel_dp);
1030 	val |= EDP_PSR2_IDLE_FRAMES(idle_frames);
1031 
1032 	if (DISPLAY_VER(display) < 14 && !display->platform.alderlake_p)
1033 		val |= EDP_SU_TRACK_ENABLE;
1034 
1035 	if (DISPLAY_VER(display) >= 10 && DISPLAY_VER(display) < 13)
1036 		val |= EDP_Y_COORDINATE_ENABLE;
1037 
1038 	val |= EDP_PSR2_FRAME_BEFORE_SU(frames_before_su_entry(intel_dp));
1039 
1040 	val |= intel_psr2_get_tp_time(intel_dp);
1041 
1042 	if (DISPLAY_VER(display) >= 12 && DISPLAY_VER(display) < 20) {
1043 		if (psr2_block_count(intel_dp) > 2)
1044 			val |= TGL_EDP_PSR2_BLOCK_COUNT_NUM_3;
1045 		else
1046 			val |= TGL_EDP_PSR2_BLOCK_COUNT_NUM_2;
1047 	}
1048 
1049 	/* Wa_22012278275:adl-p */
1050 	if (display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_E0)) {
1051 		static const u8 map[] = {
1052 			2, /* 5 lines */
1053 			1, /* 6 lines */
1054 			0, /* 7 lines */
1055 			3, /* 8 lines */
1056 			6, /* 9 lines */
1057 			5, /* 10 lines */
1058 			4, /* 11 lines */
1059 			7, /* 12 lines */
1060 		};
1061 		/*
1062 		 * Still using the default IO_BUFFER_WAKE and FAST_WAKE, see
1063 		 * comments below for more information
1064 		 */
1065 		int tmp;
1066 
1067 		tmp = map[intel_dp->alpm_parameters.io_wake_lines -
1068 			  TGL_EDP_PSR2_IO_BUFFER_WAKE_MIN_LINES];
1069 		val |= TGL_EDP_PSR2_IO_BUFFER_WAKE(tmp + TGL_EDP_PSR2_IO_BUFFER_WAKE_MIN_LINES);
1070 
1071 		tmp = map[intel_dp->alpm_parameters.fast_wake_lines - TGL_EDP_PSR2_FAST_WAKE_MIN_LINES];
1072 		val |= TGL_EDP_PSR2_FAST_WAKE(tmp + TGL_EDP_PSR2_FAST_WAKE_MIN_LINES);
1073 	} else if (DISPLAY_VER(display) >= 20) {
1074 		val |= LNL_EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines);
1075 	} else if (DISPLAY_VER(display) >= 12) {
1076 		val |= TGL_EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines);
1077 		val |= TGL_EDP_PSR2_FAST_WAKE(intel_dp->alpm_parameters.fast_wake_lines);
1078 	} else if (DISPLAY_VER(display) >= 9) {
1079 		val |= EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines);
1080 		val |= EDP_PSR2_FAST_WAKE(intel_dp->alpm_parameters.fast_wake_lines);
1081 	}
1082 
1083 	if (intel_dp->psr.req_psr2_sdp_prior_scanline)
1084 		val |= EDP_PSR2_SU_SDP_SCANLINE;
1085 
1086 	if (DISPLAY_VER(display) >= 20)
1087 		psr_val |= LNL_EDP_PSR_ENTRY_SETUP_FRAMES(intel_dp->psr.entry_setup_frames);
1088 
1089 	if (intel_dp->psr.psr2_sel_fetch_enabled) {
1090 		u32 tmp;
1091 
1092 		tmp = intel_de_read(display,
1093 				    PSR2_MAN_TRK_CTL(display, cpu_transcoder));
1094 		drm_WARN_ON(display->drm, !(tmp & PSR2_MAN_TRK_CTL_ENABLE));
1095 	} else if (HAS_PSR2_SEL_FETCH(display)) {
1096 		intel_de_write(display,
1097 			       PSR2_MAN_TRK_CTL(display, cpu_transcoder), 0);
1098 	}
1099 
1100 	if (intel_dp->psr.su_region_et_enabled)
1101 		val |= LNL_EDP_PSR2_SU_REGION_ET_ENABLE;
1102 
1103 	/*
1104 	 * PSR2 HW is incorrectly using EDP_PSR_TP1_TP3_SEL and BSpec is
1105 	 * recommending keep this bit unset while PSR2 is enabled.
1106 	 */
1107 	intel_de_write(display, psr_ctl_reg(display, cpu_transcoder), psr_val);
1108 
1109 	intel_de_write(display, EDP_PSR2_CTL(display, cpu_transcoder), val);
1110 }
1111 
1112 static bool
transcoder_has_psr2(struct intel_display * display,enum transcoder cpu_transcoder)1113 transcoder_has_psr2(struct intel_display *display, enum transcoder cpu_transcoder)
1114 {
1115 	if (display->platform.alderlake_p || DISPLAY_VER(display) >= 14)
1116 		return cpu_transcoder == TRANSCODER_A || cpu_transcoder == TRANSCODER_B;
1117 	else if (DISPLAY_VER(display) >= 12)
1118 		return cpu_transcoder == TRANSCODER_A;
1119 	else if (DISPLAY_VER(display) >= 9)
1120 		return cpu_transcoder == TRANSCODER_EDP;
1121 	else
1122 		return false;
1123 }
1124 
intel_get_frame_time_us(const struct intel_crtc_state * crtc_state)1125 static u32 intel_get_frame_time_us(const struct intel_crtc_state *crtc_state)
1126 {
1127 	if (!crtc_state->hw.active)
1128 		return 0;
1129 
1130 	return DIV_ROUND_UP(1000 * 1000,
1131 			    drm_mode_vrefresh(&crtc_state->hw.adjusted_mode));
1132 }
1133 
psr2_program_idle_frames(struct intel_dp * intel_dp,u32 idle_frames)1134 static void psr2_program_idle_frames(struct intel_dp *intel_dp,
1135 				     u32 idle_frames)
1136 {
1137 	struct intel_display *display = to_intel_display(intel_dp);
1138 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1139 
1140 	intel_de_rmw(display, EDP_PSR2_CTL(display, cpu_transcoder),
1141 		     EDP_PSR2_IDLE_FRAMES_MASK,
1142 		     EDP_PSR2_IDLE_FRAMES(idle_frames));
1143 }
1144 
tgl_psr2_enable_dc3co(struct intel_dp * intel_dp)1145 static void tgl_psr2_enable_dc3co(struct intel_dp *intel_dp)
1146 {
1147 	struct intel_display *display = to_intel_display(intel_dp);
1148 
1149 	psr2_program_idle_frames(intel_dp, 0);
1150 	intel_display_power_set_target_dc_state(display, DC_STATE_EN_DC3CO);
1151 }
1152 
tgl_psr2_disable_dc3co(struct intel_dp * intel_dp)1153 static void tgl_psr2_disable_dc3co(struct intel_dp *intel_dp)
1154 {
1155 	struct intel_display *display = to_intel_display(intel_dp);
1156 
1157 	intel_display_power_set_target_dc_state(display, DC_STATE_EN_UPTO_DC6);
1158 	psr2_program_idle_frames(intel_dp, psr_compute_idle_frames(intel_dp));
1159 }
1160 
tgl_dc3co_disable_work(struct work_struct * work)1161 static void tgl_dc3co_disable_work(struct work_struct *work)
1162 {
1163 	struct intel_dp *intel_dp =
1164 		container_of(work, typeof(*intel_dp), psr.dc3co_work.work);
1165 
1166 	mutex_lock(&intel_dp->psr.lock);
1167 	/* If delayed work is pending, it is not idle */
1168 	if (delayed_work_pending(&intel_dp->psr.dc3co_work))
1169 		goto unlock;
1170 
1171 	tgl_psr2_disable_dc3co(intel_dp);
1172 unlock:
1173 	mutex_unlock(&intel_dp->psr.lock);
1174 }
1175 
tgl_disallow_dc3co_on_psr2_exit(struct intel_dp * intel_dp)1176 static void tgl_disallow_dc3co_on_psr2_exit(struct intel_dp *intel_dp)
1177 {
1178 	if (!intel_dp->psr.dc3co_exitline)
1179 		return;
1180 
1181 	cancel_delayed_work(&intel_dp->psr.dc3co_work);
1182 	/* Before PSR2 exit disallow dc3co*/
1183 	tgl_psr2_disable_dc3co(intel_dp);
1184 }
1185 
1186 static bool
dc3co_is_pipe_port_compatible(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1187 dc3co_is_pipe_port_compatible(struct intel_dp *intel_dp,
1188 			      struct intel_crtc_state *crtc_state)
1189 {
1190 	struct intel_display *display = to_intel_display(intel_dp);
1191 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1192 	enum pipe pipe = to_intel_crtc(crtc_state->uapi.crtc)->pipe;
1193 	enum port port = dig_port->base.port;
1194 
1195 	if (display->platform.alderlake_p || DISPLAY_VER(display) >= 14)
1196 		return pipe <= PIPE_B && port <= PORT_B;
1197 	else
1198 		return pipe == PIPE_A && port == PORT_A;
1199 }
1200 
1201 static void
tgl_dc3co_exitline_compute_config(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1202 tgl_dc3co_exitline_compute_config(struct intel_dp *intel_dp,
1203 				  struct intel_crtc_state *crtc_state)
1204 {
1205 	struct intel_display *display = to_intel_display(intel_dp);
1206 	const u32 crtc_vdisplay = crtc_state->uapi.adjusted_mode.crtc_vdisplay;
1207 	struct i915_power_domains *power_domains = &display->power.domains;
1208 	u32 exit_scanlines;
1209 
1210 	/*
1211 	 * FIXME: Due to the changed sequence of activating/deactivating DC3CO,
1212 	 * disable DC3CO until the changed dc3co activating/deactivating sequence
1213 	 * is applied. B.Specs:49196
1214 	 */
1215 	return;
1216 
1217 	/*
1218 	 * DMC's DC3CO exit mechanism has an issue with Selective Fecth
1219 	 * TODO: when the issue is addressed, this restriction should be removed.
1220 	 */
1221 	if (crtc_state->enable_psr2_sel_fetch)
1222 		return;
1223 
1224 	if (!(power_domains->allowed_dc_mask & DC_STATE_EN_DC3CO))
1225 		return;
1226 
1227 	if (!dc3co_is_pipe_port_compatible(intel_dp, crtc_state))
1228 		return;
1229 
1230 	/* Wa_16011303918:adl-p */
1231 	if (display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0))
1232 		return;
1233 
1234 	/*
1235 	 * DC3CO Exit time 200us B.Spec 49196
1236 	 * PSR2 transcoder Early Exit scanlines = ROUNDUP(200 / line time) + 1
1237 	 */
1238 	exit_scanlines =
1239 		intel_usecs_to_scanlines(&crtc_state->uapi.adjusted_mode, 200) + 1;
1240 
1241 	if (drm_WARN_ON(display->drm, exit_scanlines > crtc_vdisplay))
1242 		return;
1243 
1244 	crtc_state->dc3co_exitline = crtc_vdisplay - exit_scanlines;
1245 }
1246 
intel_psr2_sel_fetch_config_valid(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1247 static bool intel_psr2_sel_fetch_config_valid(struct intel_dp *intel_dp,
1248 					      struct intel_crtc_state *crtc_state)
1249 {
1250 	struct intel_display *display = to_intel_display(intel_dp);
1251 
1252 	if (!display->params.enable_psr2_sel_fetch &&
1253 	    intel_dp->psr.debug != I915_PSR_DEBUG_ENABLE_SEL_FETCH) {
1254 		drm_dbg_kms(display->drm,
1255 			    "PSR2 sel fetch not enabled, disabled by parameter\n");
1256 		return false;
1257 	}
1258 
1259 	if (crtc_state->uapi.async_flip) {
1260 		drm_dbg_kms(display->drm,
1261 			    "PSR2 sel fetch not enabled, async flip enabled\n");
1262 		return false;
1263 	}
1264 
1265 	return crtc_state->enable_psr2_sel_fetch = true;
1266 }
1267 
psr2_granularity_check(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1268 static bool psr2_granularity_check(struct intel_dp *intel_dp,
1269 				   struct intel_crtc_state *crtc_state)
1270 {
1271 	struct intel_display *display = to_intel_display(intel_dp);
1272 	const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
1273 	const int crtc_hdisplay = crtc_state->hw.adjusted_mode.crtc_hdisplay;
1274 	const int crtc_vdisplay = crtc_state->hw.adjusted_mode.crtc_vdisplay;
1275 	u16 y_granularity = 0;
1276 
1277 	/* PSR2 HW only send full lines so we only need to validate the width */
1278 	if (crtc_hdisplay % intel_dp->psr.su_w_granularity)
1279 		return false;
1280 
1281 	if (crtc_vdisplay % intel_dp->psr.su_y_granularity)
1282 		return false;
1283 
1284 	/* HW tracking is only aligned to 4 lines */
1285 	if (!crtc_state->enable_psr2_sel_fetch)
1286 		return intel_dp->psr.su_y_granularity == 4;
1287 
1288 	/*
1289 	 * adl_p and mtl platforms have 1 line granularity.
1290 	 * For other platforms with SW tracking we can adjust the y coordinates
1291 	 * to match sink requirement if multiple of 4.
1292 	 */
1293 	if (display->platform.alderlake_p || DISPLAY_VER(display) >= 14)
1294 		y_granularity = intel_dp->psr.su_y_granularity;
1295 	else if (intel_dp->psr.su_y_granularity <= 2)
1296 		y_granularity = 4;
1297 	else if ((intel_dp->psr.su_y_granularity % 4) == 0)
1298 		y_granularity = intel_dp->psr.su_y_granularity;
1299 
1300 	if (y_granularity == 0 || crtc_vdisplay % y_granularity)
1301 		return false;
1302 
1303 	if (crtc_state->dsc.compression_enable &&
1304 	    vdsc_cfg->slice_height % y_granularity)
1305 		return false;
1306 
1307 	crtc_state->su_y_granularity = y_granularity;
1308 	return true;
1309 }
1310 
_compute_psr2_sdp_prior_scanline_indication(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1311 static bool _compute_psr2_sdp_prior_scanline_indication(struct intel_dp *intel_dp,
1312 							struct intel_crtc_state *crtc_state)
1313 {
1314 	struct intel_display *display = to_intel_display(intel_dp);
1315 	const struct drm_display_mode *adjusted_mode = &crtc_state->uapi.adjusted_mode;
1316 	u32 hblank_total, hblank_ns, req_ns;
1317 
1318 	hblank_total = adjusted_mode->crtc_hblank_end - adjusted_mode->crtc_hblank_start;
1319 	hblank_ns = div_u64(1000000ULL * hblank_total, adjusted_mode->crtc_clock);
1320 
1321 	/* From spec: ((60 / number of lanes) + 11) * 1000 / symbol clock frequency MHz */
1322 	req_ns = ((60 / crtc_state->lane_count) + 11) * 1000 / (crtc_state->port_clock / 1000);
1323 
1324 	if ((hblank_ns - req_ns) > 100)
1325 		return true;
1326 
1327 	/* Not supported <13 / Wa_22012279113:adl-p */
1328 	if (DISPLAY_VER(display) < 14 || intel_dp->edp_dpcd[0] < DP_EDP_14b)
1329 		return false;
1330 
1331 	crtc_state->req_psr2_sdp_prior_scanline = true;
1332 	return true;
1333 }
1334 
intel_psr_entry_setup_frames(struct intel_dp * intel_dp,const struct drm_display_mode * adjusted_mode)1335 static int intel_psr_entry_setup_frames(struct intel_dp *intel_dp,
1336 					const struct drm_display_mode *adjusted_mode)
1337 {
1338 	struct intel_display *display = to_intel_display(intel_dp);
1339 	int psr_setup_time = drm_dp_psr_setup_time(intel_dp->psr_dpcd);
1340 	int entry_setup_frames = 0;
1341 
1342 	if (psr_setup_time < 0) {
1343 		drm_dbg_kms(display->drm,
1344 			    "PSR condition failed: Invalid PSR setup time (0x%02x)\n",
1345 			    intel_dp->psr_dpcd[1]);
1346 		return -ETIME;
1347 	}
1348 
1349 	if (intel_usecs_to_scanlines(adjusted_mode, psr_setup_time) >
1350 	    adjusted_mode->crtc_vtotal - adjusted_mode->crtc_vdisplay - 1) {
1351 		if (DISPLAY_VER(display) >= 20) {
1352 			/* setup entry frames can be up to 3 frames */
1353 			entry_setup_frames = 1;
1354 			drm_dbg_kms(display->drm,
1355 				    "PSR setup entry frames %d\n",
1356 				    entry_setup_frames);
1357 		} else {
1358 			drm_dbg_kms(display->drm,
1359 				    "PSR condition failed: PSR setup time (%d us) too long\n",
1360 				    psr_setup_time);
1361 			return -ETIME;
1362 		}
1363 	}
1364 
1365 	return entry_setup_frames;
1366 }
1367 
wake_lines_fit_into_vblank(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state,bool aux_less)1368 static bool wake_lines_fit_into_vblank(struct intel_dp *intel_dp,
1369 				       const struct intel_crtc_state *crtc_state,
1370 				       bool aux_less)
1371 {
1372 	struct intel_display *display = to_intel_display(intel_dp);
1373 	int vblank = crtc_state->hw.adjusted_mode.crtc_vblank_end -
1374 		crtc_state->hw.adjusted_mode.crtc_vblank_start;
1375 	int wake_lines;
1376 
1377 	if (aux_less)
1378 		wake_lines = intel_dp->alpm_parameters.aux_less_wake_lines;
1379 	else
1380 		wake_lines = DISPLAY_VER(display) < 20 ?
1381 			psr2_block_count_lines(intel_dp) :
1382 			intel_dp->alpm_parameters.io_wake_lines;
1383 
1384 	if (crtc_state->req_psr2_sdp_prior_scanline)
1385 		vblank -= 1;
1386 
1387 	/* Vblank >= PSR2_CTL Block Count Number maximum line count */
1388 	if (vblank < wake_lines)
1389 		return false;
1390 
1391 	return true;
1392 }
1393 
alpm_config_valid(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state,bool aux_less)1394 static bool alpm_config_valid(struct intel_dp *intel_dp,
1395 			      const struct intel_crtc_state *crtc_state,
1396 			      bool aux_less)
1397 {
1398 	struct intel_display *display = to_intel_display(intel_dp);
1399 
1400 	if (!intel_alpm_compute_params(intel_dp, crtc_state)) {
1401 		drm_dbg_kms(display->drm,
1402 			    "PSR2/Panel Replay  not enabled, Unable to use long enough wake times\n");
1403 		return false;
1404 	}
1405 
1406 	if (!wake_lines_fit_into_vblank(intel_dp, crtc_state, aux_less)) {
1407 		drm_dbg_kms(display->drm,
1408 			    "PSR2/Panel Replay not enabled, too short vblank time\n");
1409 		return false;
1410 	}
1411 
1412 	return true;
1413 }
1414 
intel_psr2_config_valid(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1415 static bool intel_psr2_config_valid(struct intel_dp *intel_dp,
1416 				    struct intel_crtc_state *crtc_state)
1417 {
1418 	struct intel_display *display = to_intel_display(intel_dp);
1419 	int crtc_hdisplay = crtc_state->hw.adjusted_mode.crtc_hdisplay;
1420 	int crtc_vdisplay = crtc_state->hw.adjusted_mode.crtc_vdisplay;
1421 	int psr_max_h = 0, psr_max_v = 0, max_bpp = 0;
1422 
1423 	if (!intel_dp->psr.sink_psr2_support || display->params.enable_psr == 1)
1424 		return false;
1425 
1426 	/* JSL and EHL only supports eDP 1.3 */
1427 	if (display->platform.jasperlake || display->platform.elkhartlake) {
1428 		drm_dbg_kms(display->drm, "PSR2 not supported by phy\n");
1429 		return false;
1430 	}
1431 
1432 	/* Wa_16011181250 */
1433 	if (display->platform.rocketlake || display->platform.alderlake_s ||
1434 	    display->platform.dg2) {
1435 		drm_dbg_kms(display->drm,
1436 			    "PSR2 is defeatured for this platform\n");
1437 		return false;
1438 	}
1439 
1440 	if (display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) {
1441 		drm_dbg_kms(display->drm,
1442 			    "PSR2 not completely functional in this stepping\n");
1443 		return false;
1444 	}
1445 
1446 	if (!transcoder_has_psr2(display, crtc_state->cpu_transcoder)) {
1447 		drm_dbg_kms(display->drm,
1448 			    "PSR2 not supported in transcoder %s\n",
1449 			    transcoder_name(crtc_state->cpu_transcoder));
1450 		return false;
1451 	}
1452 
1453 	/*
1454 	 * DSC and PSR2 cannot be enabled simultaneously. If a requested
1455 	 * resolution requires DSC to be enabled, priority is given to DSC
1456 	 * over PSR2.
1457 	 */
1458 	if (crtc_state->dsc.compression_enable &&
1459 	    (DISPLAY_VER(display) < 14 && !display->platform.alderlake_p)) {
1460 		drm_dbg_kms(display->drm,
1461 			    "PSR2 cannot be enabled since DSC is enabled\n");
1462 		return false;
1463 	}
1464 
1465 	if (DISPLAY_VER(display) >= 20) {
1466 		psr_max_h = crtc_hdisplay;
1467 		psr_max_v = crtc_vdisplay;
1468 		max_bpp = crtc_state->pipe_bpp;
1469 	} else if (IS_DISPLAY_VER(display, 12, 14)) {
1470 		psr_max_h = 5120;
1471 		psr_max_v = 3200;
1472 		max_bpp = 30;
1473 	} else if (IS_DISPLAY_VER(display, 10, 11)) {
1474 		psr_max_h = 4096;
1475 		psr_max_v = 2304;
1476 		max_bpp = 24;
1477 	} else if (DISPLAY_VER(display) == 9) {
1478 		psr_max_h = 3640;
1479 		psr_max_v = 2304;
1480 		max_bpp = 24;
1481 	}
1482 
1483 	if (crtc_state->pipe_bpp > max_bpp) {
1484 		drm_dbg_kms(display->drm,
1485 			    "PSR2 not enabled, pipe bpp %d > max supported %d\n",
1486 			    crtc_state->pipe_bpp, max_bpp);
1487 		return false;
1488 	}
1489 
1490 	/* Wa_16011303918:adl-p */
1491 	if (crtc_state->vrr.enable &&
1492 	    display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) {
1493 		drm_dbg_kms(display->drm,
1494 			    "PSR2 not enabled, not compatible with HW stepping + VRR\n");
1495 		return false;
1496 	}
1497 
1498 	if (!alpm_config_valid(intel_dp, crtc_state, false))
1499 		return false;
1500 
1501 	if (!crtc_state->enable_psr2_sel_fetch &&
1502 	    (crtc_hdisplay > psr_max_h || crtc_vdisplay > psr_max_v)) {
1503 		drm_dbg_kms(display->drm,
1504 			    "PSR2 not enabled, resolution %dx%d > max supported %dx%d\n",
1505 			    crtc_hdisplay, crtc_vdisplay,
1506 			    psr_max_h, psr_max_v);
1507 		return false;
1508 	}
1509 
1510 	tgl_dc3co_exitline_compute_config(intel_dp, crtc_state);
1511 
1512 	return true;
1513 }
1514 
intel_sel_update_config_valid(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1515 static bool intel_sel_update_config_valid(struct intel_dp *intel_dp,
1516 					  struct intel_crtc_state *crtc_state)
1517 {
1518 	struct intel_display *display = to_intel_display(intel_dp);
1519 
1520 	if (HAS_PSR2_SEL_FETCH(display) &&
1521 	    !intel_psr2_sel_fetch_config_valid(intel_dp, crtc_state) &&
1522 	    !HAS_PSR_HW_TRACKING(display)) {
1523 		drm_dbg_kms(display->drm,
1524 			    "Selective update not enabled, selective fetch not valid and no HW tracking available\n");
1525 		goto unsupported;
1526 	}
1527 
1528 	if (!sel_update_global_enabled(intel_dp)) {
1529 		drm_dbg_kms(display->drm,
1530 			    "Selective update disabled by flag\n");
1531 		goto unsupported;
1532 	}
1533 
1534 	if (!crtc_state->has_panel_replay && !intel_psr2_config_valid(intel_dp, crtc_state))
1535 		goto unsupported;
1536 
1537 	if (!_compute_psr2_sdp_prior_scanline_indication(intel_dp, crtc_state)) {
1538 		drm_dbg_kms(display->drm,
1539 			    "Selective update not enabled, SDP indication do not fit in hblank\n");
1540 		goto unsupported;
1541 	}
1542 
1543 	if (crtc_state->has_panel_replay && (DISPLAY_VER(display) < 14 ||
1544 					     !intel_dp->psr.sink_panel_replay_su_support))
1545 		goto unsupported;
1546 
1547 	if (crtc_state->crc_enabled) {
1548 		drm_dbg_kms(display->drm,
1549 			    "Selective update not enabled because it would inhibit pipe CRC calculation\n");
1550 		goto unsupported;
1551 	}
1552 
1553 	if (!psr2_granularity_check(intel_dp, crtc_state)) {
1554 		drm_dbg_kms(display->drm,
1555 			    "Selective update not enabled, SU granularity not compatible\n");
1556 		goto unsupported;
1557 	}
1558 
1559 	crtc_state->enable_psr2_su_region_et =
1560 		psr2_su_region_et_valid(intel_dp, crtc_state->has_panel_replay);
1561 
1562 	return true;
1563 
1564 unsupported:
1565 	crtc_state->enable_psr2_sel_fetch = false;
1566 	return false;
1567 }
1568 
_psr_compute_config(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1569 static bool _psr_compute_config(struct intel_dp *intel_dp,
1570 				struct intel_crtc_state *crtc_state)
1571 {
1572 	struct intel_display *display = to_intel_display(intel_dp);
1573 	const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
1574 	int entry_setup_frames;
1575 
1576 	if (!CAN_PSR(intel_dp) || !display->params.enable_psr)
1577 		return false;
1578 
1579 	/*
1580 	 * Currently PSR doesn't work reliably with VRR enabled.
1581 	 */
1582 	if (crtc_state->vrr.enable)
1583 		return false;
1584 
1585 	entry_setup_frames = intel_psr_entry_setup_frames(intel_dp, adjusted_mode);
1586 
1587 	if (entry_setup_frames >= 0) {
1588 		intel_dp->psr.entry_setup_frames = entry_setup_frames;
1589 	} else {
1590 		drm_dbg_kms(display->drm,
1591 			    "PSR condition failed: PSR setup timing not met\n");
1592 		return false;
1593 	}
1594 
1595 	return true;
1596 }
1597 
1598 static bool
_panel_replay_compute_config(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state,const struct drm_connector_state * conn_state)1599 _panel_replay_compute_config(struct intel_dp *intel_dp,
1600 			     const struct intel_crtc_state *crtc_state,
1601 			     const struct drm_connector_state *conn_state)
1602 {
1603 	struct intel_display *display = to_intel_display(intel_dp);
1604 	struct intel_connector *connector =
1605 		to_intel_connector(conn_state->connector);
1606 	struct intel_hdcp *hdcp = &connector->hdcp;
1607 
1608 	if (!CAN_PANEL_REPLAY(intel_dp))
1609 		return false;
1610 
1611 	if (!panel_replay_global_enabled(intel_dp)) {
1612 		drm_dbg_kms(display->drm, "Panel Replay disabled by flag\n");
1613 		return false;
1614 	}
1615 
1616 	if (crtc_state->crc_enabled) {
1617 		drm_dbg_kms(display->drm,
1618 			    "Panel Replay not enabled because it would inhibit pipe CRC calculation\n");
1619 		return false;
1620 	}
1621 
1622 	if (!intel_dp_is_edp(intel_dp))
1623 		return true;
1624 
1625 	/* Remaining checks are for eDP only */
1626 
1627 	if (to_intel_crtc(crtc_state->uapi.crtc)->pipe != PIPE_A &&
1628 	    to_intel_crtc(crtc_state->uapi.crtc)->pipe != PIPE_B)
1629 		return false;
1630 
1631 	/* 128b/132b Panel Replay is not supported on eDP */
1632 	if (intel_dp_is_uhbr(crtc_state)) {
1633 		drm_dbg_kms(display->drm,
1634 			    "Panel Replay is not supported with 128b/132b\n");
1635 		return false;
1636 	}
1637 
1638 	/* HW will not allow Panel Replay on eDP when HDCP enabled */
1639 	if (conn_state->content_protection ==
1640 	    DRM_MODE_CONTENT_PROTECTION_DESIRED ||
1641 	    (conn_state->content_protection ==
1642 	     DRM_MODE_CONTENT_PROTECTION_ENABLED && hdcp->value ==
1643 	     DRM_MODE_CONTENT_PROTECTION_UNDESIRED)) {
1644 		drm_dbg_kms(display->drm,
1645 			    "Panel Replay is not supported with HDCP\n");
1646 		return false;
1647 	}
1648 
1649 	if (!alpm_config_valid(intel_dp, crtc_state, true))
1650 		return false;
1651 
1652 	return true;
1653 }
1654 
intel_psr_needs_wa_18037818876(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1655 static bool intel_psr_needs_wa_18037818876(struct intel_dp *intel_dp,
1656 					   struct intel_crtc_state *crtc_state)
1657 {
1658 	struct intel_display *display = to_intel_display(intel_dp);
1659 
1660 	return (DISPLAY_VER(display) == 20 && intel_dp->psr.entry_setup_frames > 0 &&
1661 		!crtc_state->has_sel_update);
1662 }
1663 
intel_psr_compute_config(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state,struct drm_connector_state * conn_state)1664 void intel_psr_compute_config(struct intel_dp *intel_dp,
1665 			      struct intel_crtc_state *crtc_state,
1666 			      struct drm_connector_state *conn_state)
1667 {
1668 	struct intel_display *display = to_intel_display(intel_dp);
1669 	const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
1670 	struct intel_atomic_state *state = to_intel_atomic_state(crtc_state->uapi.state);
1671 	struct intel_crtc *crtc;
1672 	u8 active_pipes = 0;
1673 
1674 	if (!psr_global_enabled(intel_dp)) {
1675 		drm_dbg_kms(display->drm, "PSR disabled by flag\n");
1676 		return;
1677 	}
1678 
1679 	if (intel_dp->psr.sink_not_reliable) {
1680 		drm_dbg_kms(display->drm,
1681 			    "PSR sink implementation is not reliable\n");
1682 		return;
1683 	}
1684 
1685 	if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) {
1686 		drm_dbg_kms(display->drm,
1687 			    "PSR condition failed: Interlaced mode enabled\n");
1688 		return;
1689 	}
1690 
1691 	/*
1692 	 * FIXME figure out what is wrong with PSR+joiner and
1693 	 * fix it. Presumably something related to the fact that
1694 	 * PSR is a transcoder level feature.
1695 	 */
1696 	if (crtc_state->joiner_pipes) {
1697 		drm_dbg_kms(display->drm,
1698 			    "PSR disabled due to joiner\n");
1699 		return;
1700 	}
1701 
1702 	crtc_state->has_panel_replay = _panel_replay_compute_config(intel_dp,
1703 								    crtc_state,
1704 								    conn_state);
1705 
1706 	crtc_state->has_psr = crtc_state->has_panel_replay ? true :
1707 		_psr_compute_config(intel_dp, crtc_state);
1708 
1709 	if (!crtc_state->has_psr)
1710 		return;
1711 
1712 	crtc_state->has_sel_update = intel_sel_update_config_valid(intel_dp, crtc_state);
1713 
1714 	/* Wa_18037818876 */
1715 	if (intel_psr_needs_wa_18037818876(intel_dp, crtc_state)) {
1716 		crtc_state->has_psr = false;
1717 		drm_dbg_kms(display->drm,
1718 			    "PSR disabled to workaround PSR FSM hang issue\n");
1719 	}
1720 
1721 	/* Rest is for Wa_16025596647 */
1722 	if (DISPLAY_VER(display) != 20 &&
1723 	    !IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0))
1724 		return;
1725 
1726 	/* Not needed by Panel Replay  */
1727 	if (crtc_state->has_panel_replay)
1728 		return;
1729 
1730 	/* We ignore possible secondary PSR/Panel Replay capable eDP */
1731 	for_each_intel_crtc(display->drm, crtc)
1732 		active_pipes |= crtc->active ? BIT(crtc->pipe) : 0;
1733 
1734 	active_pipes = intel_calc_active_pipes(state, active_pipes);
1735 
1736 	crtc_state->active_non_psr_pipes = active_pipes &
1737 		~BIT(to_intel_crtc(crtc_state->uapi.crtc)->pipe);
1738 }
1739 
intel_psr_get_config(struct intel_encoder * encoder,struct intel_crtc_state * pipe_config)1740 void intel_psr_get_config(struct intel_encoder *encoder,
1741 			  struct intel_crtc_state *pipe_config)
1742 {
1743 	struct intel_display *display = to_intel_display(encoder);
1744 	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
1745 	enum transcoder cpu_transcoder = pipe_config->cpu_transcoder;
1746 	struct intel_dp *intel_dp;
1747 	u32 val;
1748 
1749 	if (!dig_port)
1750 		return;
1751 
1752 	intel_dp = &dig_port->dp;
1753 	if (!(CAN_PSR(intel_dp) || CAN_PANEL_REPLAY(intel_dp)))
1754 		return;
1755 
1756 	mutex_lock(&intel_dp->psr.lock);
1757 	if (!intel_dp->psr.enabled)
1758 		goto unlock;
1759 
1760 	if (intel_dp->psr.panel_replay_enabled) {
1761 		pipe_config->has_psr = pipe_config->has_panel_replay = true;
1762 	} else {
1763 		/*
1764 		 * Not possible to read EDP_PSR/PSR2_CTL registers as it is
1765 		 * enabled/disabled because of frontbuffer tracking and others.
1766 		 */
1767 		pipe_config->has_psr = true;
1768 	}
1769 
1770 	pipe_config->has_sel_update = intel_dp->psr.sel_update_enabled;
1771 	pipe_config->infoframes.enable |= intel_hdmi_infoframe_enable(DP_SDP_VSC);
1772 
1773 	if (!intel_dp->psr.sel_update_enabled)
1774 		goto unlock;
1775 
1776 	if (HAS_PSR2_SEL_FETCH(display)) {
1777 		val = intel_de_read(display,
1778 				    PSR2_MAN_TRK_CTL(display, cpu_transcoder));
1779 		if (val & PSR2_MAN_TRK_CTL_ENABLE)
1780 			pipe_config->enable_psr2_sel_fetch = true;
1781 	}
1782 
1783 	pipe_config->enable_psr2_su_region_et = intel_dp->psr.su_region_et_enabled;
1784 
1785 	if (DISPLAY_VER(display) >= 12) {
1786 		val = intel_de_read(display,
1787 				    TRANS_EXITLINE(display, cpu_transcoder));
1788 		pipe_config->dc3co_exitline = REG_FIELD_GET(EXITLINE_MASK, val);
1789 	}
1790 unlock:
1791 	mutex_unlock(&intel_dp->psr.lock);
1792 }
1793 
intel_psr_activate(struct intel_dp * intel_dp)1794 static void intel_psr_activate(struct intel_dp *intel_dp)
1795 {
1796 	struct intel_display *display = to_intel_display(intel_dp);
1797 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1798 
1799 	drm_WARN_ON(display->drm,
1800 		    transcoder_has_psr2(display, cpu_transcoder) &&
1801 		    intel_de_read(display, EDP_PSR2_CTL(display, cpu_transcoder)) & EDP_PSR2_ENABLE);
1802 
1803 	drm_WARN_ON(display->drm,
1804 		    intel_de_read(display, psr_ctl_reg(display, cpu_transcoder)) & EDP_PSR_ENABLE);
1805 
1806 	drm_WARN_ON(display->drm, intel_dp->psr.active);
1807 
1808 	drm_WARN_ON(display->drm, !intel_dp->psr.enabled);
1809 
1810 	lockdep_assert_held(&intel_dp->psr.lock);
1811 
1812 	/* psr1, psr2 and panel-replay are mutually exclusive.*/
1813 	if (intel_dp->psr.panel_replay_enabled)
1814 		dg2_activate_panel_replay(intel_dp);
1815 	else if (intel_dp->psr.sel_update_enabled)
1816 		hsw_activate_psr2(intel_dp);
1817 	else
1818 		hsw_activate_psr1(intel_dp);
1819 
1820 	intel_dp->psr.active = true;
1821 }
1822 
1823 /*
1824  * Wa_16013835468
1825  * Wa_14015648006
1826  */
wm_optimization_wa(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)1827 static void wm_optimization_wa(struct intel_dp *intel_dp,
1828 			       const struct intel_crtc_state *crtc_state)
1829 {
1830 	struct intel_display *display = to_intel_display(intel_dp);
1831 	enum pipe pipe = intel_dp->psr.pipe;
1832 	bool activate = false;
1833 
1834 	/* Wa_14015648006 */
1835 	if (IS_DISPLAY_VER(display, 11, 14) && crtc_state->wm_level_disabled)
1836 		activate = true;
1837 
1838 	/* Wa_16013835468 */
1839 	if (DISPLAY_VER(display) == 12 &&
1840 	    crtc_state->hw.adjusted_mode.crtc_vblank_start !=
1841 	    crtc_state->hw.adjusted_mode.crtc_vdisplay)
1842 		activate = true;
1843 
1844 	if (activate)
1845 		intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
1846 			     0, LATENCY_REPORTING_REMOVED(pipe));
1847 	else
1848 		intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
1849 			     LATENCY_REPORTING_REMOVED(pipe), 0);
1850 }
1851 
intel_psr_enable_source(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)1852 static void intel_psr_enable_source(struct intel_dp *intel_dp,
1853 				    const struct intel_crtc_state *crtc_state)
1854 {
1855 	struct intel_display *display = to_intel_display(intel_dp);
1856 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1857 	u32 mask = 0;
1858 
1859 	/*
1860 	 * Only HSW and BDW have PSR AUX registers that need to be setup.
1861 	 * SKL+ use hardcoded values PSR AUX transactions
1862 	 */
1863 	if (DISPLAY_VER(display) < 9)
1864 		hsw_psr_setup_aux(intel_dp);
1865 
1866 	/*
1867 	 * Per Spec: Avoid continuous PSR exit by masking MEMUP and HPD also
1868 	 * mask LPSP to avoid dependency on other drivers that might block
1869 	 * runtime_pm besides preventing  other hw tracking issues now we
1870 	 * can rely on frontbuffer tracking.
1871 	 *
1872 	 * From bspec prior LunarLake:
1873 	 * Only PSR_MASK[Mask FBC modify] and PSR_MASK[Mask Hotplug] are used in
1874 	 * panel replay mode.
1875 	 *
1876 	 * From bspec beyod LunarLake:
1877 	 * Panel Replay on DP: No bits are applicable
1878 	 * Panel Replay on eDP: All bits are applicable
1879 	 */
1880 	if (DISPLAY_VER(display) < 20 || intel_dp_is_edp(intel_dp))
1881 		mask = EDP_PSR_DEBUG_MASK_HPD;
1882 
1883 	if (intel_dp_is_edp(intel_dp)) {
1884 		mask |= EDP_PSR_DEBUG_MASK_MEMUP;
1885 
1886 		/*
1887 		 * For some unknown reason on HSW non-ULT (or at least on
1888 		 * Dell Latitude E6540) external displays start to flicker
1889 		 * when PSR is enabled on the eDP. SR/PC6 residency is much
1890 		 * higher than should be possible with an external display.
1891 		 * As a workaround leave LPSP unmasked to prevent PSR entry
1892 		 * when external displays are active.
1893 		 */
1894 		if (DISPLAY_VER(display) >= 8 || display->platform.haswell_ult)
1895 			mask |= EDP_PSR_DEBUG_MASK_LPSP;
1896 
1897 		if (DISPLAY_VER(display) < 20)
1898 			mask |= EDP_PSR_DEBUG_MASK_MAX_SLEEP;
1899 
1900 		/*
1901 		 * No separate pipe reg write mask on hsw/bdw, so have to unmask all
1902 		 * registers in order to keep the CURSURFLIVE tricks working :(
1903 		 */
1904 		if (IS_DISPLAY_VER(display, 9, 10))
1905 			mask |= EDP_PSR_DEBUG_MASK_DISP_REG_WRITE;
1906 
1907 		/* allow PSR with sprite enabled */
1908 		if (display->platform.haswell)
1909 			mask |= EDP_PSR_DEBUG_MASK_SPRITE_ENABLE;
1910 	}
1911 
1912 	intel_de_write(display, psr_debug_reg(display, cpu_transcoder), mask);
1913 
1914 	psr_irq_control(intel_dp);
1915 
1916 	/*
1917 	 * TODO: if future platforms supports DC3CO in more than one
1918 	 * transcoder, EXITLINE will need to be unset when disabling PSR
1919 	 */
1920 	if (intel_dp->psr.dc3co_exitline)
1921 		intel_de_rmw(display,
1922 			     TRANS_EXITLINE(display, cpu_transcoder),
1923 			     EXITLINE_MASK,
1924 			     intel_dp->psr.dc3co_exitline << EXITLINE_SHIFT | EXITLINE_ENABLE);
1925 
1926 	if (HAS_PSR_HW_TRACKING(display) && HAS_PSR2_SEL_FETCH(display))
1927 		intel_de_rmw(display, CHICKEN_PAR1_1, IGNORE_PSR2_HW_TRACKING,
1928 			     intel_dp->psr.psr2_sel_fetch_enabled ?
1929 			     IGNORE_PSR2_HW_TRACKING : 0);
1930 
1931 	/*
1932 	 * Wa_16013835468
1933 	 * Wa_14015648006
1934 	 */
1935 	wm_optimization_wa(intel_dp, crtc_state);
1936 
1937 	if (intel_dp->psr.sel_update_enabled) {
1938 		if (DISPLAY_VER(display) == 9)
1939 			intel_de_rmw(display, CHICKEN_TRANS(display, cpu_transcoder), 0,
1940 				     PSR2_VSC_ENABLE_PROG_HEADER |
1941 				     PSR2_ADD_VERTICAL_LINE_COUNT);
1942 
1943 		/*
1944 		 * Wa_16014451276:adlp,mtl[a0,b0]
1945 		 * All supported adlp panels have 1-based X granularity, this may
1946 		 * cause issues if non-supported panels are used.
1947 		 */
1948 		if (!intel_dp->psr.panel_replay_enabled &&
1949 		    (IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0) ||
1950 		     display->platform.alderlake_p))
1951 			intel_de_rmw(display, CHICKEN_TRANS(display, cpu_transcoder),
1952 				     0, ADLP_1_BASED_X_GRANULARITY);
1953 
1954 		/* Wa_16012604467:adlp,mtl[a0,b0] */
1955 		if (!intel_dp->psr.panel_replay_enabled &&
1956 		    IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0))
1957 			intel_de_rmw(display,
1958 				     MTL_CLKGATE_DIS_TRANS(display, cpu_transcoder),
1959 				     0,
1960 				     MTL_CLKGATE_DIS_TRANS_DMASC_GATING_DIS);
1961 		else if (display->platform.alderlake_p)
1962 			intel_de_rmw(display, CLKGATE_DIS_MISC, 0,
1963 				     CLKGATE_DIS_MISC_DMASC_GATING_DIS);
1964 	}
1965 
1966 	/* Wa_16025596647 */
1967 	if ((DISPLAY_VER(display) == 20 ||
1968 	     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
1969 	    !intel_dp->psr.panel_replay_enabled)
1970 		intel_dmc_block_pkgc(display, intel_dp->psr.pipe, true);
1971 
1972 	intel_alpm_configure(intel_dp, crtc_state);
1973 }
1974 
psr_interrupt_error_check(struct intel_dp * intel_dp)1975 static bool psr_interrupt_error_check(struct intel_dp *intel_dp)
1976 {
1977 	struct intel_display *display = to_intel_display(intel_dp);
1978 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1979 	u32 val;
1980 
1981 	if (intel_dp->psr.panel_replay_enabled)
1982 		goto no_err;
1983 
1984 	/*
1985 	 * If a PSR error happened and the driver is reloaded, the EDP_PSR_IIR
1986 	 * will still keep the error set even after the reset done in the
1987 	 * irq_preinstall and irq_uninstall hooks.
1988 	 * And enabling in this situation cause the screen to freeze in the
1989 	 * first time that PSR HW tries to activate so lets keep PSR disabled
1990 	 * to avoid any rendering problems.
1991 	 */
1992 	val = intel_de_read(display, psr_iir_reg(display, cpu_transcoder));
1993 	val &= psr_irq_psr_error_bit_get(intel_dp);
1994 	if (val) {
1995 		intel_dp->psr.sink_not_reliable = true;
1996 		drm_dbg_kms(display->drm,
1997 			    "PSR interruption error set, not enabling PSR\n");
1998 		return false;
1999 	}
2000 
2001 no_err:
2002 	return true;
2003 }
2004 
intel_psr_enable_locked(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)2005 static void intel_psr_enable_locked(struct intel_dp *intel_dp,
2006 				    const struct intel_crtc_state *crtc_state)
2007 {
2008 	struct intel_display *display = to_intel_display(intel_dp);
2009 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
2010 	u32 val;
2011 
2012 	drm_WARN_ON(display->drm, intel_dp->psr.enabled);
2013 
2014 	intel_dp->psr.sel_update_enabled = crtc_state->has_sel_update;
2015 	intel_dp->psr.panel_replay_enabled = crtc_state->has_panel_replay;
2016 	intel_dp->psr.busy_frontbuffer_bits = 0;
2017 	intel_dp->psr.pipe = to_intel_crtc(crtc_state->uapi.crtc)->pipe;
2018 	intel_dp->psr.transcoder = crtc_state->cpu_transcoder;
2019 	/* DC5/DC6 requires at least 6 idle frames */
2020 	val = usecs_to_jiffies(intel_get_frame_time_us(crtc_state) * 6);
2021 	intel_dp->psr.dc3co_exit_delay = val;
2022 	intel_dp->psr.dc3co_exitline = crtc_state->dc3co_exitline;
2023 	intel_dp->psr.psr2_sel_fetch_enabled = crtc_state->enable_psr2_sel_fetch;
2024 	intel_dp->psr.su_region_et_enabled = crtc_state->enable_psr2_su_region_et;
2025 	intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
2026 	intel_dp->psr.req_psr2_sdp_prior_scanline =
2027 		crtc_state->req_psr2_sdp_prior_scanline;
2028 	intel_dp->psr.active_non_psr_pipes = crtc_state->active_non_psr_pipes;
2029 	intel_dp->psr.pkg_c_latency_used = crtc_state->pkg_c_latency_used;
2030 
2031 	if (!psr_interrupt_error_check(intel_dp))
2032 		return;
2033 
2034 	if (intel_dp->psr.panel_replay_enabled)
2035 		drm_dbg_kms(display->drm, "Enabling Panel Replay\n");
2036 	else
2037 		drm_dbg_kms(display->drm, "Enabling PSR%s\n",
2038 			    intel_dp->psr.sel_update_enabled ? "2" : "1");
2039 
2040 	/*
2041 	 * Enabling sink PSR/Panel Replay here only for PSR. Panel Replay enable
2042 	 * bit is already written at this point. Sink ALPM is enabled here for
2043 	 * PSR and Panel Replay. See
2044 	 * intel_psr_panel_replay_enable_sink. Modifiers/options:
2045 	 *  - Selective Update
2046 	 *  - Region Early Transport
2047 	 *  - Selective Update Region Scanline Capture
2048 	 *  - VSC_SDP_CRC
2049 	 *  - HPD on different Errors
2050 	 *  - CRC verification
2051 	 * are written for PSR and Panel Replay here.
2052 	 */
2053 	intel_psr_enable_sink(intel_dp, crtc_state);
2054 
2055 	if (intel_dp_is_edp(intel_dp))
2056 		intel_snps_phy_update_psr_power_state(&dig_port->base, true);
2057 
2058 	intel_psr_enable_source(intel_dp, crtc_state);
2059 	intel_dp->psr.enabled = true;
2060 	intel_dp->psr.pause_counter = 0;
2061 
2062 	/*
2063 	 * Link_ok is sticky and set here on PSR enable. We can assume link
2064 	 * training is complete as we never continue to PSR enable with
2065 	 * untrained link. Link_ok is kept as set until first short pulse
2066 	 * interrupt. This is targeted to workaround panels stating bad link
2067 	 * after PSR is enabled.
2068 	 */
2069 	intel_dp->psr.link_ok = true;
2070 
2071 	intel_psr_activate(intel_dp);
2072 }
2073 
intel_psr_exit(struct intel_dp * intel_dp)2074 static void intel_psr_exit(struct intel_dp *intel_dp)
2075 {
2076 	struct intel_display *display = to_intel_display(intel_dp);
2077 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2078 	u32 val;
2079 
2080 	if (!intel_dp->psr.active) {
2081 		if (transcoder_has_psr2(display, cpu_transcoder)) {
2082 			val = intel_de_read(display,
2083 					    EDP_PSR2_CTL(display, cpu_transcoder));
2084 			drm_WARN_ON(display->drm, val & EDP_PSR2_ENABLE);
2085 		}
2086 
2087 		val = intel_de_read(display,
2088 				    psr_ctl_reg(display, cpu_transcoder));
2089 		drm_WARN_ON(display->drm, val & EDP_PSR_ENABLE);
2090 
2091 		return;
2092 	}
2093 
2094 	if (intel_dp->psr.panel_replay_enabled) {
2095 		intel_de_rmw(display, TRANS_DP2_CTL(intel_dp->psr.transcoder),
2096 			     TRANS_DP2_PANEL_REPLAY_ENABLE, 0);
2097 	} else if (intel_dp->psr.sel_update_enabled) {
2098 		tgl_disallow_dc3co_on_psr2_exit(intel_dp);
2099 
2100 		val = intel_de_rmw(display,
2101 				   EDP_PSR2_CTL(display, cpu_transcoder),
2102 				   EDP_PSR2_ENABLE, 0);
2103 
2104 		drm_WARN_ON(display->drm, !(val & EDP_PSR2_ENABLE));
2105 	} else {
2106 		if ((DISPLAY_VER(display) == 20 ||
2107 		     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
2108 			intel_dp->psr.pkg_c_latency_used)
2109 			intel_dmc_start_pkgc_exit_at_start_of_undelayed_vblank(display,
2110 								       intel_dp->psr.pipe,
2111 								       false);
2112 
2113 		val = intel_de_rmw(display,
2114 				   psr_ctl_reg(display, cpu_transcoder),
2115 				   EDP_PSR_ENABLE, 0);
2116 
2117 		drm_WARN_ON(display->drm, !(val & EDP_PSR_ENABLE));
2118 	}
2119 	intel_dp->psr.active = false;
2120 }
2121 
intel_psr_wait_exit_locked(struct intel_dp * intel_dp)2122 static void intel_psr_wait_exit_locked(struct intel_dp *intel_dp)
2123 {
2124 	struct intel_display *display = to_intel_display(intel_dp);
2125 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2126 	i915_reg_t psr_status;
2127 	u32 psr_status_mask;
2128 
2129 	if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled ||
2130 					  intel_dp->psr.panel_replay_enabled)) {
2131 		psr_status = EDP_PSR2_STATUS(display, cpu_transcoder);
2132 		psr_status_mask = EDP_PSR2_STATUS_STATE_MASK;
2133 	} else {
2134 		psr_status = psr_status_reg(display, cpu_transcoder);
2135 		psr_status_mask = EDP_PSR_STATUS_STATE_MASK;
2136 	}
2137 
2138 	/* Wait till PSR is idle */
2139 	if (intel_de_wait_for_clear(display, psr_status,
2140 				    psr_status_mask, 2000))
2141 		drm_err(display->drm, "Timed out waiting PSR idle state\n");
2142 }
2143 
intel_psr_disable_locked(struct intel_dp * intel_dp)2144 static void intel_psr_disable_locked(struct intel_dp *intel_dp)
2145 {
2146 	struct intel_display *display = to_intel_display(intel_dp);
2147 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2148 
2149 	lockdep_assert_held(&intel_dp->psr.lock);
2150 
2151 	if (!intel_dp->psr.enabled)
2152 		return;
2153 
2154 	if (intel_dp->psr.panel_replay_enabled)
2155 		drm_dbg_kms(display->drm, "Disabling Panel Replay\n");
2156 	else
2157 		drm_dbg_kms(display->drm, "Disabling PSR%s\n",
2158 			    intel_dp->psr.sel_update_enabled ? "2" : "1");
2159 
2160 	intel_psr_exit(intel_dp);
2161 	intel_psr_wait_exit_locked(intel_dp);
2162 
2163 	/*
2164 	 * Wa_16013835468
2165 	 * Wa_14015648006
2166 	 */
2167 	if (DISPLAY_VER(display) >= 11)
2168 		intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
2169 			     LATENCY_REPORTING_REMOVED(intel_dp->psr.pipe), 0);
2170 
2171 	if (intel_dp->psr.sel_update_enabled) {
2172 		/* Wa_16012604467:adlp,mtl[a0,b0] */
2173 		if (!intel_dp->psr.panel_replay_enabled &&
2174 		    IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0))
2175 			intel_de_rmw(display,
2176 				     MTL_CLKGATE_DIS_TRANS(display, cpu_transcoder),
2177 				     MTL_CLKGATE_DIS_TRANS_DMASC_GATING_DIS, 0);
2178 		else if (display->platform.alderlake_p)
2179 			intel_de_rmw(display, CLKGATE_DIS_MISC,
2180 				     CLKGATE_DIS_MISC_DMASC_GATING_DIS, 0);
2181 	}
2182 
2183 	if (intel_dp_is_edp(intel_dp))
2184 		intel_snps_phy_update_psr_power_state(&dp_to_dig_port(intel_dp)->base, false);
2185 
2186 	if (intel_dp->psr.panel_replay_enabled && intel_dp_is_edp(intel_dp))
2187 		intel_alpm_disable(intel_dp);
2188 
2189 	/* Disable PSR on Sink */
2190 	if (!intel_dp->psr.panel_replay_enabled) {
2191 		drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, 0);
2192 
2193 		if (intel_dp->psr.sel_update_enabled)
2194 			drm_dp_dpcd_writeb(&intel_dp->aux,
2195 					   DP_RECEIVER_ALPM_CONFIG, 0);
2196 	}
2197 
2198 	/* Wa_16025596647 */
2199 	if ((DISPLAY_VER(display) == 20 ||
2200 	     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
2201 	    !intel_dp->psr.panel_replay_enabled)
2202 		intel_dmc_block_pkgc(display, intel_dp->psr.pipe, false);
2203 
2204 	intel_dp->psr.enabled = false;
2205 	intel_dp->psr.panel_replay_enabled = false;
2206 	intel_dp->psr.sel_update_enabled = false;
2207 	intel_dp->psr.psr2_sel_fetch_enabled = false;
2208 	intel_dp->psr.su_region_et_enabled = false;
2209 	intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
2210 	intel_dp->psr.active_non_psr_pipes = 0;
2211 	intel_dp->psr.pkg_c_latency_used = 0;
2212 }
2213 
2214 /**
2215  * intel_psr_disable - Disable PSR
2216  * @intel_dp: Intel DP
2217  * @old_crtc_state: old CRTC state
2218  *
2219  * This function needs to be called before disabling pipe.
2220  */
intel_psr_disable(struct intel_dp * intel_dp,const struct intel_crtc_state * old_crtc_state)2221 void intel_psr_disable(struct intel_dp *intel_dp,
2222 		       const struct intel_crtc_state *old_crtc_state)
2223 {
2224 	struct intel_display *display = to_intel_display(intel_dp);
2225 
2226 	if (!old_crtc_state->has_psr)
2227 		return;
2228 
2229 	if (drm_WARN_ON(display->drm, !CAN_PSR(intel_dp) &&
2230 			!CAN_PANEL_REPLAY(intel_dp)))
2231 		return;
2232 
2233 	mutex_lock(&intel_dp->psr.lock);
2234 
2235 	intel_psr_disable_locked(intel_dp);
2236 
2237 	intel_dp->psr.link_ok = false;
2238 
2239 	mutex_unlock(&intel_dp->psr.lock);
2240 	cancel_work_sync(&intel_dp->psr.work);
2241 	cancel_delayed_work_sync(&intel_dp->psr.dc3co_work);
2242 }
2243 
2244 /**
2245  * intel_psr_pause - Pause PSR
2246  * @intel_dp: Intel DP
2247  *
2248  * This function need to be called after enabling psr.
2249  */
intel_psr_pause(struct intel_dp * intel_dp)2250 void intel_psr_pause(struct intel_dp *intel_dp)
2251 {
2252 	struct intel_psr *psr = &intel_dp->psr;
2253 
2254 	if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
2255 		return;
2256 
2257 	mutex_lock(&psr->lock);
2258 
2259 	if (!psr->enabled) {
2260 		mutex_unlock(&psr->lock);
2261 		return;
2262 	}
2263 
2264 	if (intel_dp->psr.pause_counter++ == 0) {
2265 		intel_psr_exit(intel_dp);
2266 		intel_psr_wait_exit_locked(intel_dp);
2267 	}
2268 
2269 	mutex_unlock(&psr->lock);
2270 
2271 	cancel_work_sync(&psr->work);
2272 	cancel_delayed_work_sync(&psr->dc3co_work);
2273 }
2274 
2275 /**
2276  * intel_psr_resume - Resume PSR
2277  * @intel_dp: Intel DP
2278  *
2279  * This function need to be called after pausing psr.
2280  */
intel_psr_resume(struct intel_dp * intel_dp)2281 void intel_psr_resume(struct intel_dp *intel_dp)
2282 {
2283 	struct intel_display *display = to_intel_display(intel_dp);
2284 	struct intel_psr *psr = &intel_dp->psr;
2285 
2286 	if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
2287 		return;
2288 
2289 	mutex_lock(&psr->lock);
2290 
2291 	if (!psr->enabled)
2292 		goto out;
2293 
2294 	if (!psr->pause_counter) {
2295 		drm_warn(display->drm, "Unbalanced PSR pause/resume!\n");
2296 		goto out;
2297 	}
2298 
2299 	if (--intel_dp->psr.pause_counter == 0)
2300 		intel_psr_activate(intel_dp);
2301 
2302 out:
2303 	mutex_unlock(&psr->lock);
2304 }
2305 
2306 /**
2307  * intel_psr_needs_vblank_notification - Check if PSR need vblank enable/disable
2308  * notification.
2309  * @crtc_state: CRTC status
2310  *
2311  * We need to block DC6 entry in case of Panel Replay as enabling VBI doesn't
2312  * prevent it in case of Panel Replay. Panel Replay switches main link off on
2313  * DC entry. This means vblank interrupts are not fired and is a problem if
2314  * user-space is polling for vblank events. Also Wa_16025596647 needs
2315  * information when vblank is enabled/disabled.
2316  */
intel_psr_needs_vblank_notification(const struct intel_crtc_state * crtc_state)2317 bool intel_psr_needs_vblank_notification(const struct intel_crtc_state *crtc_state)
2318 {
2319 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2320 	struct intel_display *display = to_intel_display(crtc_state);
2321 	struct intel_encoder *encoder;
2322 
2323 	for_each_encoder_on_crtc(crtc->base.dev, &crtc->base, encoder) {
2324 		struct intel_dp *intel_dp;
2325 
2326 		if (!intel_encoder_is_dp(encoder))
2327 			continue;
2328 
2329 		intel_dp = enc_to_intel_dp(encoder);
2330 
2331 		if (!intel_dp_is_edp(intel_dp))
2332 			continue;
2333 
2334 		if (CAN_PANEL_REPLAY(intel_dp))
2335 			return true;
2336 
2337 		if ((DISPLAY_VER(display) == 20 ||
2338 		     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
2339 		    CAN_PSR(intel_dp))
2340 			return true;
2341 	}
2342 
2343 	return false;
2344 }
2345 
2346 /**
2347  * intel_psr_trigger_frame_change_event - Trigger "Frame Change" event
2348  * @dsb: DSB context
2349  * @state: the atomic state
2350  * @crtc: the CRTC
2351  *
2352  * Generate PSR "Frame Change" event.
2353  */
intel_psr_trigger_frame_change_event(struct intel_dsb * dsb,struct intel_atomic_state * state,struct intel_crtc * crtc)2354 void intel_psr_trigger_frame_change_event(struct intel_dsb *dsb,
2355 					  struct intel_atomic_state *state,
2356 					  struct intel_crtc *crtc)
2357 {
2358 	const struct intel_crtc_state *crtc_state =
2359 		intel_pre_commit_crtc_state(state, crtc);
2360 	struct intel_display *display = to_intel_display(crtc);
2361 
2362 	if (crtc_state->has_psr)
2363 		intel_de_write_dsb(display, dsb,
2364 				   CURSURFLIVE(display, crtc->pipe), 0);
2365 }
2366 
2367 /**
2368  * intel_psr_min_vblank_delay - Minimum vblank delay needed by PSR
2369  * @crtc_state: the crtc state
2370  *
2371  * Return minimum vblank delay needed by PSR.
2372  */
intel_psr_min_vblank_delay(const struct intel_crtc_state * crtc_state)2373 int intel_psr_min_vblank_delay(const struct intel_crtc_state *crtc_state)
2374 {
2375 	struct intel_display *display = to_intel_display(crtc_state);
2376 
2377 	if (!crtc_state->has_psr)
2378 		return 0;
2379 
2380 	/* Wa_14015401596 */
2381 	if (intel_vrr_possible(crtc_state) && IS_DISPLAY_VER(display, 13, 14))
2382 		return 1;
2383 
2384 	/* Rest is for SRD_STATUS needed on LunarLake and onwards */
2385 	if (DISPLAY_VER(display) < 20)
2386 		return 0;
2387 
2388 	/*
2389 	 * Comment on SRD_STATUS register in Bspec for LunarLake and onwards:
2390 	 *
2391 	 * To deterministically capture the transition of the state machine
2392 	 * going from SRDOFFACK to IDLE, the delayed V. Blank should be at least
2393 	 * one line after the non-delayed V. Blank.
2394 	 *
2395 	 * Legacy TG: TRANS_SET_CONTEXT_LATENCY > 0
2396 	 * VRR TG: TRANS_VRR_CTL[ VRR Guardband ] < (TRANS_VRR_VMAX[ VRR Vmax ]
2397 	 * - TRANS_VTOTAL[ Vertical Active ])
2398 	 *
2399 	 * SRD_STATUS is used only by PSR1 on PantherLake.
2400 	 * SRD_STATUS is used by PSR1 and Panel Replay DP on LunarLake.
2401 	 */
2402 
2403 	if (DISPLAY_VER(display) >= 30 && (crtc_state->has_panel_replay ||
2404 					   crtc_state->has_sel_update))
2405 		return 0;
2406 	else if (DISPLAY_VER(display) < 30 && (crtc_state->has_sel_update ||
2407 					       intel_crtc_has_type(crtc_state,
2408 								   INTEL_OUTPUT_EDP)))
2409 		return 0;
2410 	else
2411 		return 1;
2412 }
2413 
man_trk_ctl_enable_bit_get(struct intel_display * display)2414 static u32 man_trk_ctl_enable_bit_get(struct intel_display *display)
2415 {
2416 	return display->platform.alderlake_p || DISPLAY_VER(display) >= 14 ? 0 :
2417 		PSR2_MAN_TRK_CTL_ENABLE;
2418 }
2419 
man_trk_ctl_single_full_frame_bit_get(struct intel_display * display)2420 static u32 man_trk_ctl_single_full_frame_bit_get(struct intel_display *display)
2421 {
2422 	return display->platform.alderlake_p || DISPLAY_VER(display) >= 14 ?
2423 	       ADLP_PSR2_MAN_TRK_CTL_SF_SINGLE_FULL_FRAME :
2424 	       PSR2_MAN_TRK_CTL_SF_SINGLE_FULL_FRAME;
2425 }
2426 
man_trk_ctl_partial_frame_bit_get(struct intel_display * display)2427 static u32 man_trk_ctl_partial_frame_bit_get(struct intel_display *display)
2428 {
2429 	return display->platform.alderlake_p || DISPLAY_VER(display) >= 14 ?
2430 	       ADLP_PSR2_MAN_TRK_CTL_SF_PARTIAL_FRAME_UPDATE :
2431 	       PSR2_MAN_TRK_CTL_SF_PARTIAL_FRAME_UPDATE;
2432 }
2433 
man_trk_ctl_continuos_full_frame(struct intel_display * display)2434 static u32 man_trk_ctl_continuos_full_frame(struct intel_display *display)
2435 {
2436 	return display->platform.alderlake_p || DISPLAY_VER(display) >= 14 ?
2437 	       ADLP_PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME :
2438 	       PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME;
2439 }
2440 
intel_psr_force_update(struct intel_dp * intel_dp)2441 static void intel_psr_force_update(struct intel_dp *intel_dp)
2442 {
2443 	struct intel_display *display = to_intel_display(intel_dp);
2444 
2445 	/*
2446 	 * Display WA #0884: skl+
2447 	 * This documented WA for bxt can be safely applied
2448 	 * broadly so we can force HW tracking to exit PSR
2449 	 * instead of disabling and re-enabling.
2450 	 * Workaround tells us to write 0 to CUR_SURFLIVE_A,
2451 	 * but it makes more sense write to the current active
2452 	 * pipe.
2453 	 *
2454 	 * This workaround do not exist for platforms with display 10 or newer
2455 	 * but testing proved that it works for up display 13, for newer
2456 	 * than that testing will be needed.
2457 	 */
2458 	intel_de_write(display, CURSURFLIVE(display, intel_dp->psr.pipe), 0);
2459 }
2460 
intel_psr2_program_trans_man_trk_ctl(struct intel_dsb * dsb,const struct intel_crtc_state * crtc_state)2461 void intel_psr2_program_trans_man_trk_ctl(struct intel_dsb *dsb,
2462 					  const struct intel_crtc_state *crtc_state)
2463 {
2464 	struct intel_display *display = to_intel_display(crtc_state);
2465 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2466 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
2467 	struct intel_encoder *encoder;
2468 
2469 	if (!crtc_state->enable_psr2_sel_fetch)
2470 		return;
2471 
2472 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
2473 					     crtc_state->uapi.encoder_mask) {
2474 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2475 
2476 		if (!dsb)
2477 			lockdep_assert_held(&intel_dp->psr.lock);
2478 
2479 		if (DISPLAY_VER(display) < 20 && intel_dp->psr.psr2_sel_fetch_cff_enabled)
2480 			return;
2481 		break;
2482 	}
2483 
2484 	intel_de_write_dsb(display, dsb,
2485 			   PSR2_MAN_TRK_CTL(display, cpu_transcoder),
2486 			   crtc_state->psr2_man_track_ctl);
2487 
2488 	if (!crtc_state->enable_psr2_su_region_et)
2489 		return;
2490 
2491 	intel_de_write_dsb(display, dsb, PIPE_SRCSZ_ERLY_TPT(crtc->pipe),
2492 			   crtc_state->pipe_srcsz_early_tpt);
2493 }
2494 
psr2_man_trk_ctl_calc(struct intel_crtc_state * crtc_state,bool full_update)2495 static void psr2_man_trk_ctl_calc(struct intel_crtc_state *crtc_state,
2496 				  bool full_update)
2497 {
2498 	struct intel_display *display = to_intel_display(crtc_state);
2499 	u32 val = man_trk_ctl_enable_bit_get(display);
2500 
2501 	/* SF partial frame enable has to be set even on full update */
2502 	val |= man_trk_ctl_partial_frame_bit_get(display);
2503 
2504 	if (full_update) {
2505 		val |= man_trk_ctl_continuos_full_frame(display);
2506 		goto exit;
2507 	}
2508 
2509 	if (crtc_state->psr2_su_area.y1 == -1)
2510 		goto exit;
2511 
2512 	if (display->platform.alderlake_p || DISPLAY_VER(display) >= 14) {
2513 		val |= ADLP_PSR2_MAN_TRK_CTL_SU_REGION_START_ADDR(crtc_state->psr2_su_area.y1);
2514 		val |= ADLP_PSR2_MAN_TRK_CTL_SU_REGION_END_ADDR(crtc_state->psr2_su_area.y2 - 1);
2515 	} else {
2516 		drm_WARN_ON(crtc_state->uapi.crtc->dev,
2517 			    crtc_state->psr2_su_area.y1 % 4 ||
2518 			    crtc_state->psr2_su_area.y2 % 4);
2519 
2520 		val |= PSR2_MAN_TRK_CTL_SU_REGION_START_ADDR(
2521 			crtc_state->psr2_su_area.y1 / 4 + 1);
2522 		val |= PSR2_MAN_TRK_CTL_SU_REGION_END_ADDR(
2523 			crtc_state->psr2_su_area.y2 / 4 + 1);
2524 	}
2525 exit:
2526 	crtc_state->psr2_man_track_ctl = val;
2527 }
2528 
psr2_pipe_srcsz_early_tpt_calc(struct intel_crtc_state * crtc_state,bool full_update)2529 static u32 psr2_pipe_srcsz_early_tpt_calc(struct intel_crtc_state *crtc_state,
2530 					  bool full_update)
2531 {
2532 	int width, height;
2533 
2534 	if (!crtc_state->enable_psr2_su_region_et || full_update)
2535 		return 0;
2536 
2537 	width = drm_rect_width(&crtc_state->psr2_su_area);
2538 	height = drm_rect_height(&crtc_state->psr2_su_area);
2539 
2540 	return PIPESRC_WIDTH(width - 1) | PIPESRC_HEIGHT(height - 1);
2541 }
2542 
clip_area_update(struct drm_rect * overlap_damage_area,struct drm_rect * damage_area,struct drm_rect * pipe_src)2543 static void clip_area_update(struct drm_rect *overlap_damage_area,
2544 			     struct drm_rect *damage_area,
2545 			     struct drm_rect *pipe_src)
2546 {
2547 	if (!drm_rect_intersect(damage_area, pipe_src))
2548 		return;
2549 
2550 	if (overlap_damage_area->y1 == -1) {
2551 		overlap_damage_area->y1 = damage_area->y1;
2552 		overlap_damage_area->y2 = damage_area->y2;
2553 		return;
2554 	}
2555 
2556 	if (damage_area->y1 < overlap_damage_area->y1)
2557 		overlap_damage_area->y1 = damage_area->y1;
2558 
2559 	if (damage_area->y2 > overlap_damage_area->y2)
2560 		overlap_damage_area->y2 = damage_area->y2;
2561 }
2562 
intel_psr2_sel_fetch_pipe_alignment(struct intel_crtc_state * crtc_state)2563 static void intel_psr2_sel_fetch_pipe_alignment(struct intel_crtc_state *crtc_state)
2564 {
2565 	struct intel_display *display = to_intel_display(crtc_state);
2566 	const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
2567 	u16 y_alignment;
2568 
2569 	/* ADLP aligns the SU region to vdsc slice height in case dsc is enabled */
2570 	if (crtc_state->dsc.compression_enable &&
2571 	    (display->platform.alderlake_p || DISPLAY_VER(display) >= 14))
2572 		y_alignment = vdsc_cfg->slice_height;
2573 	else
2574 		y_alignment = crtc_state->su_y_granularity;
2575 
2576 	crtc_state->psr2_su_area.y1 -= crtc_state->psr2_su_area.y1 % y_alignment;
2577 	if (crtc_state->psr2_su_area.y2 % y_alignment)
2578 		crtc_state->psr2_su_area.y2 = ((crtc_state->psr2_su_area.y2 /
2579 						y_alignment) + 1) * y_alignment;
2580 }
2581 
2582 /*
2583  * When early transport is in use we need to extend SU area to cover
2584  * cursor fully when cursor is in SU area.
2585  */
2586 static void
intel_psr2_sel_fetch_et_alignment(struct intel_atomic_state * state,struct intel_crtc * crtc,bool * cursor_in_su_area)2587 intel_psr2_sel_fetch_et_alignment(struct intel_atomic_state *state,
2588 				  struct intel_crtc *crtc,
2589 				  bool *cursor_in_su_area)
2590 {
2591 	struct intel_crtc_state *crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
2592 	struct intel_plane_state *new_plane_state;
2593 	struct intel_plane *plane;
2594 	int i;
2595 
2596 	if (!crtc_state->enable_psr2_su_region_et)
2597 		return;
2598 
2599 	for_each_new_intel_plane_in_state(state, plane, new_plane_state, i) {
2600 		struct drm_rect inter;
2601 
2602 		if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc)
2603 			continue;
2604 
2605 		if (plane->id != PLANE_CURSOR)
2606 			continue;
2607 
2608 		if (!new_plane_state->uapi.visible)
2609 			continue;
2610 
2611 		inter = crtc_state->psr2_su_area;
2612 		if (!drm_rect_intersect(&inter, &new_plane_state->uapi.dst))
2613 			continue;
2614 
2615 		clip_area_update(&crtc_state->psr2_su_area, &new_plane_state->uapi.dst,
2616 				 &crtc_state->pipe_src);
2617 		*cursor_in_su_area = true;
2618 	}
2619 }
2620 
2621 /*
2622  * TODO: Not clear how to handle planes with negative position,
2623  * also planes are not updated if they have a negative X
2624  * position so for now doing a full update in this cases
2625  *
2626  * Plane scaling and rotation is not supported by selective fetch and both
2627  * properties can change without a modeset, so need to be check at every
2628  * atomic commit.
2629  */
psr2_sel_fetch_plane_state_supported(const struct intel_plane_state * plane_state)2630 static bool psr2_sel_fetch_plane_state_supported(const struct intel_plane_state *plane_state)
2631 {
2632 	if (plane_state->uapi.dst.y1 < 0 ||
2633 	    plane_state->uapi.dst.x1 < 0 ||
2634 	    plane_state->scaler_id >= 0 ||
2635 	    plane_state->uapi.rotation != DRM_MODE_ROTATE_0)
2636 		return false;
2637 
2638 	return true;
2639 }
2640 
2641 /*
2642  * Check for pipe properties that is not supported by selective fetch.
2643  *
2644  * TODO: pipe scaling causes a modeset but skl_update_scaler_crtc() is executed
2645  * after intel_psr_compute_config(), so for now keeping PSR2 selective fetch
2646  * enabled and going to the full update path.
2647  */
psr2_sel_fetch_pipe_state_supported(const struct intel_crtc_state * crtc_state)2648 static bool psr2_sel_fetch_pipe_state_supported(const struct intel_crtc_state *crtc_state)
2649 {
2650 	if (crtc_state->scaler_state.scaler_id >= 0)
2651 		return false;
2652 
2653 	return true;
2654 }
2655 
2656 /* Wa 14019834836 */
intel_psr_apply_pr_link_on_su_wa(struct intel_crtc_state * crtc_state)2657 static void intel_psr_apply_pr_link_on_su_wa(struct intel_crtc_state *crtc_state)
2658 {
2659 	struct intel_display *display = to_intel_display(crtc_state);
2660 	struct intel_encoder *encoder;
2661 	int hactive_limit;
2662 
2663 	if (crtc_state->psr2_su_area.y1 != 0 ||
2664 	    crtc_state->psr2_su_area.y2 != 0)
2665 		return;
2666 
2667 	if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420)
2668 		hactive_limit = intel_dp_is_uhbr(crtc_state) ? 1230 : 546;
2669 	else
2670 		hactive_limit = intel_dp_is_uhbr(crtc_state) ? 615 : 273;
2671 
2672 	if (crtc_state->hw.adjusted_mode.hdisplay < hactive_limit)
2673 		return;
2674 
2675 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
2676 					     crtc_state->uapi.encoder_mask) {
2677 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2678 
2679 		if (!intel_dp_is_edp(intel_dp) &&
2680 		    intel_dp->psr.panel_replay_enabled &&
2681 		    intel_dp->psr.sel_update_enabled) {
2682 			crtc_state->psr2_su_area.y2++;
2683 			return;
2684 		}
2685 	}
2686 }
2687 
2688 static void
intel_psr_apply_su_area_workarounds(struct intel_crtc_state * crtc_state)2689 intel_psr_apply_su_area_workarounds(struct intel_crtc_state *crtc_state)
2690 {
2691 	struct intel_display *display = to_intel_display(crtc_state);
2692 
2693 	/* Wa_14014971492 */
2694 	if (!crtc_state->has_panel_replay &&
2695 	    ((IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0) ||
2696 	      display->platform.alderlake_p || display->platform.tigerlake)) &&
2697 	    crtc_state->splitter.enable)
2698 		crtc_state->psr2_su_area.y1 = 0;
2699 
2700 	/* Wa 14019834836 */
2701 	if (DISPLAY_VER(display) == 30)
2702 		intel_psr_apply_pr_link_on_su_wa(crtc_state);
2703 }
2704 
intel_psr2_sel_fetch_update(struct intel_atomic_state * state,struct intel_crtc * crtc)2705 int intel_psr2_sel_fetch_update(struct intel_atomic_state *state,
2706 				struct intel_crtc *crtc)
2707 {
2708 	struct intel_display *display = to_intel_display(state);
2709 	struct intel_crtc_state *crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
2710 	struct intel_plane_state *new_plane_state, *old_plane_state;
2711 	struct intel_plane *plane;
2712 	bool full_update = false, cursor_in_su_area = false;
2713 	int i, ret;
2714 
2715 	if (!crtc_state->enable_psr2_sel_fetch)
2716 		return 0;
2717 
2718 	if (!psr2_sel_fetch_pipe_state_supported(crtc_state)) {
2719 		full_update = true;
2720 		goto skip_sel_fetch_set_loop;
2721 	}
2722 
2723 	crtc_state->psr2_su_area.x1 = 0;
2724 	crtc_state->psr2_su_area.y1 = -1;
2725 	crtc_state->psr2_su_area.x2 = drm_rect_width(&crtc_state->pipe_src);
2726 	crtc_state->psr2_su_area.y2 = -1;
2727 
2728 	/*
2729 	 * Calculate minimal selective fetch area of each plane and calculate
2730 	 * the pipe damaged area.
2731 	 * In the next loop the plane selective fetch area will actually be set
2732 	 * using whole pipe damaged area.
2733 	 */
2734 	for_each_oldnew_intel_plane_in_state(state, plane, old_plane_state,
2735 					     new_plane_state, i) {
2736 		struct drm_rect src, damaged_area = { .x1 = 0, .y1 = -1,
2737 						      .x2 = INT_MAX };
2738 
2739 		if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc)
2740 			continue;
2741 
2742 		if (!new_plane_state->uapi.visible &&
2743 		    !old_plane_state->uapi.visible)
2744 			continue;
2745 
2746 		if (!psr2_sel_fetch_plane_state_supported(new_plane_state)) {
2747 			full_update = true;
2748 			break;
2749 		}
2750 
2751 		/*
2752 		 * If visibility or plane moved, mark the whole plane area as
2753 		 * damaged as it needs to be complete redraw in the new and old
2754 		 * position.
2755 		 */
2756 		if (new_plane_state->uapi.visible != old_plane_state->uapi.visible ||
2757 		    !drm_rect_equals(&new_plane_state->uapi.dst,
2758 				     &old_plane_state->uapi.dst)) {
2759 			if (old_plane_state->uapi.visible) {
2760 				damaged_area.y1 = old_plane_state->uapi.dst.y1;
2761 				damaged_area.y2 = old_plane_state->uapi.dst.y2;
2762 				clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2763 						 &crtc_state->pipe_src);
2764 			}
2765 
2766 			if (new_plane_state->uapi.visible) {
2767 				damaged_area.y1 = new_plane_state->uapi.dst.y1;
2768 				damaged_area.y2 = new_plane_state->uapi.dst.y2;
2769 				clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2770 						 &crtc_state->pipe_src);
2771 			}
2772 			continue;
2773 		} else if (new_plane_state->uapi.alpha != old_plane_state->uapi.alpha) {
2774 			/* If alpha changed mark the whole plane area as damaged */
2775 			damaged_area.y1 = new_plane_state->uapi.dst.y1;
2776 			damaged_area.y2 = new_plane_state->uapi.dst.y2;
2777 			clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2778 					 &crtc_state->pipe_src);
2779 			continue;
2780 		}
2781 
2782 		src = drm_plane_state_src(&new_plane_state->uapi);
2783 		drm_rect_fp_to_int(&src, &src);
2784 
2785 		if (!drm_atomic_helper_damage_merged(&old_plane_state->uapi,
2786 						     &new_plane_state->uapi, &damaged_area))
2787 			continue;
2788 
2789 		damaged_area.y1 += new_plane_state->uapi.dst.y1 - src.y1;
2790 		damaged_area.y2 += new_plane_state->uapi.dst.y1 - src.y1;
2791 		damaged_area.x1 += new_plane_state->uapi.dst.x1 - src.x1;
2792 		damaged_area.x2 += new_plane_state->uapi.dst.x1 - src.x1;
2793 
2794 		clip_area_update(&crtc_state->psr2_su_area, &damaged_area, &crtc_state->pipe_src);
2795 	}
2796 
2797 	/*
2798 	 * TODO: For now we are just using full update in case
2799 	 * selective fetch area calculation fails. To optimize this we
2800 	 * should identify cases where this happens and fix the area
2801 	 * calculation for those.
2802 	 */
2803 	if (crtc_state->psr2_su_area.y1 == -1) {
2804 		drm_info_once(display->drm,
2805 			      "Selective fetch area calculation failed in pipe %c\n",
2806 			      pipe_name(crtc->pipe));
2807 		full_update = true;
2808 	}
2809 
2810 	if (full_update)
2811 		goto skip_sel_fetch_set_loop;
2812 
2813 	intel_psr_apply_su_area_workarounds(crtc_state);
2814 
2815 	ret = drm_atomic_add_affected_planes(&state->base, &crtc->base);
2816 	if (ret)
2817 		return ret;
2818 
2819 	/*
2820 	 * Adjust su area to cover cursor fully as necessary (early
2821 	 * transport). This needs to be done after
2822 	 * drm_atomic_add_affected_planes to ensure visible cursor is added into
2823 	 * affected planes even when cursor is not updated by itself.
2824 	 */
2825 	intel_psr2_sel_fetch_et_alignment(state, crtc, &cursor_in_su_area);
2826 
2827 	intel_psr2_sel_fetch_pipe_alignment(crtc_state);
2828 
2829 	/*
2830 	 * Now that we have the pipe damaged area check if it intersect with
2831 	 * every plane, if it does set the plane selective fetch area.
2832 	 */
2833 	for_each_oldnew_intel_plane_in_state(state, plane, old_plane_state,
2834 					     new_plane_state, i) {
2835 		struct drm_rect *sel_fetch_area, inter;
2836 		struct intel_plane *linked = new_plane_state->planar_linked_plane;
2837 
2838 		if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc ||
2839 		    !new_plane_state->uapi.visible)
2840 			continue;
2841 
2842 		inter = crtc_state->psr2_su_area;
2843 		sel_fetch_area = &new_plane_state->psr2_sel_fetch_area;
2844 		if (!drm_rect_intersect(&inter, &new_plane_state->uapi.dst)) {
2845 			sel_fetch_area->y1 = -1;
2846 			sel_fetch_area->y2 = -1;
2847 			/*
2848 			 * if plane sel fetch was previously enabled ->
2849 			 * disable it
2850 			 */
2851 			if (drm_rect_height(&old_plane_state->psr2_sel_fetch_area) > 0)
2852 				crtc_state->update_planes |= BIT(plane->id);
2853 
2854 			continue;
2855 		}
2856 
2857 		if (!psr2_sel_fetch_plane_state_supported(new_plane_state)) {
2858 			full_update = true;
2859 			break;
2860 		}
2861 
2862 		sel_fetch_area = &new_plane_state->psr2_sel_fetch_area;
2863 		sel_fetch_area->y1 = inter.y1 - new_plane_state->uapi.dst.y1;
2864 		sel_fetch_area->y2 = inter.y2 - new_plane_state->uapi.dst.y1;
2865 		crtc_state->update_planes |= BIT(plane->id);
2866 
2867 		/*
2868 		 * Sel_fetch_area is calculated for UV plane. Use
2869 		 * same area for Y plane as well.
2870 		 */
2871 		if (linked) {
2872 			struct intel_plane_state *linked_new_plane_state;
2873 			struct drm_rect *linked_sel_fetch_area;
2874 
2875 			linked_new_plane_state = intel_atomic_get_plane_state(state, linked);
2876 			if (IS_ERR(linked_new_plane_state))
2877 				return PTR_ERR(linked_new_plane_state);
2878 
2879 			linked_sel_fetch_area = &linked_new_plane_state->psr2_sel_fetch_area;
2880 			linked_sel_fetch_area->y1 = sel_fetch_area->y1;
2881 			linked_sel_fetch_area->y2 = sel_fetch_area->y2;
2882 			crtc_state->update_planes |= BIT(linked->id);
2883 		}
2884 	}
2885 
2886 skip_sel_fetch_set_loop:
2887 	psr2_man_trk_ctl_calc(crtc_state, full_update);
2888 	crtc_state->pipe_srcsz_early_tpt =
2889 		psr2_pipe_srcsz_early_tpt_calc(crtc_state, full_update);
2890 	return 0;
2891 }
2892 
intel_psr2_panic_force_full_update(struct intel_display * display,struct intel_crtc_state * crtc_state)2893 void intel_psr2_panic_force_full_update(struct intel_display *display,
2894 					struct intel_crtc_state *crtc_state)
2895 {
2896 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2897 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
2898 	u32 val = man_trk_ctl_enable_bit_get(display);
2899 
2900 	/* SF partial frame enable has to be set even on full update */
2901 	val |= man_trk_ctl_partial_frame_bit_get(display);
2902 	val |= man_trk_ctl_continuos_full_frame(display);
2903 
2904 	/* Directly write the register */
2905 	intel_de_write_fw(display, PSR2_MAN_TRK_CTL(display, cpu_transcoder), val);
2906 
2907 	if (!crtc_state->enable_psr2_su_region_et)
2908 		return;
2909 
2910 	intel_de_write_fw(display, PIPE_SRCSZ_ERLY_TPT(crtc->pipe), 0);
2911 }
2912 
intel_psr_pre_plane_update(struct intel_atomic_state * state,struct intel_crtc * crtc)2913 void intel_psr_pre_plane_update(struct intel_atomic_state *state,
2914 				struct intel_crtc *crtc)
2915 {
2916 	struct intel_display *display = to_intel_display(state);
2917 	const struct intel_crtc_state *old_crtc_state =
2918 		intel_atomic_get_old_crtc_state(state, crtc);
2919 	const struct intel_crtc_state *new_crtc_state =
2920 		intel_atomic_get_new_crtc_state(state, crtc);
2921 	struct intel_encoder *encoder;
2922 
2923 	if (!HAS_PSR(display))
2924 		return;
2925 
2926 	for_each_intel_encoder_mask_with_psr(state->base.dev, encoder,
2927 					     old_crtc_state->uapi.encoder_mask) {
2928 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2929 		struct intel_psr *psr = &intel_dp->psr;
2930 
2931 		mutex_lock(&psr->lock);
2932 
2933 		if (psr->enabled) {
2934 			/*
2935 			 * Reasons to disable:
2936 			 * - PSR disabled in new state
2937 			 * - All planes will go inactive
2938 			 * - Changing between PSR versions
2939 			 * - Region Early Transport changing
2940 			 * - Display WA #1136: skl, bxt
2941 			 */
2942 			if (intel_crtc_needs_modeset(new_crtc_state) ||
2943 			    !new_crtc_state->has_psr ||
2944 			    !new_crtc_state->active_planes ||
2945 			    new_crtc_state->has_sel_update != psr->sel_update_enabled ||
2946 			    new_crtc_state->enable_psr2_su_region_et != psr->su_region_et_enabled ||
2947 			    new_crtc_state->has_panel_replay != psr->panel_replay_enabled ||
2948 			    (DISPLAY_VER(display) < 11 && new_crtc_state->wm_level_disabled))
2949 				intel_psr_disable_locked(intel_dp);
2950 			else if (new_crtc_state->wm_level_disabled)
2951 				/* Wa_14015648006 */
2952 				wm_optimization_wa(intel_dp, new_crtc_state);
2953 		}
2954 
2955 		mutex_unlock(&psr->lock);
2956 	}
2957 }
2958 
intel_psr_post_plane_update(struct intel_atomic_state * state,struct intel_crtc * crtc)2959 void intel_psr_post_plane_update(struct intel_atomic_state *state,
2960 				 struct intel_crtc *crtc)
2961 {
2962 	struct intel_display *display = to_intel_display(state);
2963 	const struct intel_crtc_state *crtc_state =
2964 		intel_atomic_get_new_crtc_state(state, crtc);
2965 	struct intel_encoder *encoder;
2966 
2967 	if (!crtc_state->has_psr)
2968 		return;
2969 
2970 	for_each_intel_encoder_mask_with_psr(state->base.dev, encoder,
2971 					     crtc_state->uapi.encoder_mask) {
2972 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2973 		struct intel_psr *psr = &intel_dp->psr;
2974 		bool keep_disabled = false;
2975 
2976 		mutex_lock(&psr->lock);
2977 
2978 		drm_WARN_ON(display->drm,
2979 			    psr->enabled && !crtc_state->active_planes);
2980 
2981 		keep_disabled |= psr->sink_not_reliable;
2982 		keep_disabled |= !crtc_state->active_planes;
2983 
2984 		/* Display WA #1136: skl, bxt */
2985 		keep_disabled |= DISPLAY_VER(display) < 11 &&
2986 			crtc_state->wm_level_disabled;
2987 
2988 		if (!psr->enabled && !keep_disabled)
2989 			intel_psr_enable_locked(intel_dp, crtc_state);
2990 		else if (psr->enabled && !crtc_state->wm_level_disabled)
2991 			/* Wa_14015648006 */
2992 			wm_optimization_wa(intel_dp, crtc_state);
2993 
2994 		/* Force a PSR exit when enabling CRC to avoid CRC timeouts */
2995 		if (crtc_state->crc_enabled && psr->enabled)
2996 			intel_psr_force_update(intel_dp);
2997 
2998 		/*
2999 		 * Clear possible busy bits in case we have
3000 		 * invalidate -> flip -> flush sequence.
3001 		 */
3002 		intel_dp->psr.busy_frontbuffer_bits = 0;
3003 
3004 		mutex_unlock(&psr->lock);
3005 	}
3006 }
3007 
3008 /*
3009  * From bspec: Panel Self Refresh (BDW+)
3010  * Max. time for PSR to idle = Inverse of the refresh rate + 6 ms of
3011  * exit training time + 1.5 ms of aux channel handshake. 50 ms is
3012  * defensive enough to cover everything.
3013  */
3014 #define PSR_IDLE_TIMEOUT_MS 50
3015 
3016 static int
_psr2_ready_for_pipe_update_locked(const struct intel_crtc_state * new_crtc_state,struct intel_dsb * dsb)3017 _psr2_ready_for_pipe_update_locked(const struct intel_crtc_state *new_crtc_state,
3018 				   struct intel_dsb *dsb)
3019 {
3020 	struct intel_display *display = to_intel_display(new_crtc_state);
3021 	enum transcoder cpu_transcoder = new_crtc_state->cpu_transcoder;
3022 
3023 	/*
3024 	 * Any state lower than EDP_PSR2_STATUS_STATE_DEEP_SLEEP is enough.
3025 	 * As all higher states has bit 4 of PSR2 state set we can just wait for
3026 	 * EDP_PSR2_STATUS_STATE_DEEP_SLEEP to be cleared.
3027 	 */
3028 	if (dsb) {
3029 		intel_dsb_poll(dsb, EDP_PSR2_STATUS(display, cpu_transcoder),
3030 			       EDP_PSR2_STATUS_STATE_DEEP_SLEEP, 0, 200,
3031 			       PSR_IDLE_TIMEOUT_MS * 1000 / 200);
3032 		return true;
3033 	}
3034 
3035 	return intel_de_wait_for_clear(display,
3036 				       EDP_PSR2_STATUS(display, cpu_transcoder),
3037 				       EDP_PSR2_STATUS_STATE_DEEP_SLEEP,
3038 				       PSR_IDLE_TIMEOUT_MS);
3039 }
3040 
3041 static int
_psr1_ready_for_pipe_update_locked(const struct intel_crtc_state * new_crtc_state,struct intel_dsb * dsb)3042 _psr1_ready_for_pipe_update_locked(const struct intel_crtc_state *new_crtc_state,
3043 				   struct intel_dsb *dsb)
3044 {
3045 	struct intel_display *display = to_intel_display(new_crtc_state);
3046 	enum transcoder cpu_transcoder = new_crtc_state->cpu_transcoder;
3047 
3048 	if (dsb) {
3049 		intel_dsb_poll(dsb, psr_status_reg(display, cpu_transcoder),
3050 			       EDP_PSR_STATUS_STATE_MASK, 0, 200,
3051 			       PSR_IDLE_TIMEOUT_MS * 1000 / 200);
3052 		return true;
3053 	}
3054 
3055 	return intel_de_wait_for_clear(display,
3056 				       psr_status_reg(display, cpu_transcoder),
3057 				       EDP_PSR_STATUS_STATE_MASK,
3058 				       PSR_IDLE_TIMEOUT_MS);
3059 }
3060 
3061 /**
3062  * intel_psr_wait_for_idle_locked - wait for PSR be ready for a pipe update
3063  * @new_crtc_state: new CRTC state
3064  *
3065  * This function is expected to be called from pipe_update_start() where it is
3066  * not expected to race with PSR enable or disable.
3067  */
intel_psr_wait_for_idle_locked(const struct intel_crtc_state * new_crtc_state)3068 void intel_psr_wait_for_idle_locked(const struct intel_crtc_state *new_crtc_state)
3069 {
3070 	struct intel_display *display = to_intel_display(new_crtc_state);
3071 	struct intel_encoder *encoder;
3072 
3073 	if (!new_crtc_state->has_psr)
3074 		return;
3075 
3076 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
3077 					     new_crtc_state->uapi.encoder_mask) {
3078 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3079 		int ret;
3080 
3081 		lockdep_assert_held(&intel_dp->psr.lock);
3082 
3083 		if (!intel_dp->psr.enabled || intel_dp->psr.panel_replay_enabled)
3084 			continue;
3085 
3086 		if (intel_dp->psr.sel_update_enabled)
3087 			ret = _psr2_ready_for_pipe_update_locked(new_crtc_state,
3088 								 NULL);
3089 		else
3090 			ret = _psr1_ready_for_pipe_update_locked(new_crtc_state,
3091 								 NULL);
3092 
3093 		if (ret)
3094 			drm_err(display->drm,
3095 				"PSR wait timed out, atomic update may fail\n");
3096 	}
3097 }
3098 
intel_psr_wait_for_idle_dsb(struct intel_dsb * dsb,const struct intel_crtc_state * new_crtc_state)3099 void intel_psr_wait_for_idle_dsb(struct intel_dsb *dsb,
3100 				 const struct intel_crtc_state *new_crtc_state)
3101 {
3102 	if (!new_crtc_state->has_psr || new_crtc_state->has_panel_replay)
3103 		return;
3104 
3105 	if (new_crtc_state->has_sel_update)
3106 		_psr2_ready_for_pipe_update_locked(new_crtc_state, dsb);
3107 	else
3108 		_psr1_ready_for_pipe_update_locked(new_crtc_state, dsb);
3109 }
3110 
__psr_wait_for_idle_locked(struct intel_dp * intel_dp)3111 static bool __psr_wait_for_idle_locked(struct intel_dp *intel_dp)
3112 {
3113 	struct intel_display *display = to_intel_display(intel_dp);
3114 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3115 	i915_reg_t reg;
3116 	u32 mask;
3117 	int err;
3118 
3119 	if (!intel_dp->psr.enabled)
3120 		return false;
3121 
3122 	if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled ||
3123 					  intel_dp->psr.panel_replay_enabled)) {
3124 		reg = EDP_PSR2_STATUS(display, cpu_transcoder);
3125 		mask = EDP_PSR2_STATUS_STATE_MASK;
3126 	} else {
3127 		reg = psr_status_reg(display, cpu_transcoder);
3128 		mask = EDP_PSR_STATUS_STATE_MASK;
3129 	}
3130 
3131 	mutex_unlock(&intel_dp->psr.lock);
3132 
3133 	err = intel_de_wait_for_clear(display, reg, mask, 50);
3134 	if (err)
3135 		drm_err(display->drm,
3136 			"Timed out waiting for PSR Idle for re-enable\n");
3137 
3138 	/* After the unlocked wait, verify that PSR is still wanted! */
3139 	mutex_lock(&intel_dp->psr.lock);
3140 	return err == 0 && intel_dp->psr.enabled && !intel_dp->psr.pause_counter;
3141 }
3142 
intel_psr_fastset_force(struct intel_display * display)3143 static int intel_psr_fastset_force(struct intel_display *display)
3144 {
3145 	struct drm_connector_list_iter conn_iter;
3146 	struct drm_modeset_acquire_ctx ctx;
3147 	struct drm_atomic_state *state;
3148 	struct drm_connector *conn;
3149 	int err = 0;
3150 
3151 	state = drm_atomic_state_alloc(display->drm);
3152 	if (!state)
3153 		return -ENOMEM;
3154 
3155 	drm_modeset_acquire_init(&ctx, DRM_MODESET_ACQUIRE_INTERRUPTIBLE);
3156 
3157 	state->acquire_ctx = &ctx;
3158 	to_intel_atomic_state(state)->internal = true;
3159 
3160 retry:
3161 	drm_connector_list_iter_begin(display->drm, &conn_iter);
3162 	drm_for_each_connector_iter(conn, &conn_iter) {
3163 		struct drm_connector_state *conn_state;
3164 		struct drm_crtc_state *crtc_state;
3165 
3166 		if (conn->connector_type != DRM_MODE_CONNECTOR_eDP)
3167 			continue;
3168 
3169 		conn_state = drm_atomic_get_connector_state(state, conn);
3170 		if (IS_ERR(conn_state)) {
3171 			err = PTR_ERR(conn_state);
3172 			break;
3173 		}
3174 
3175 		if (!conn_state->crtc)
3176 			continue;
3177 
3178 		crtc_state = drm_atomic_get_crtc_state(state, conn_state->crtc);
3179 		if (IS_ERR(crtc_state)) {
3180 			err = PTR_ERR(crtc_state);
3181 			break;
3182 		}
3183 
3184 		/* Mark mode as changed to trigger a pipe->update() */
3185 		crtc_state->mode_changed = true;
3186 	}
3187 	drm_connector_list_iter_end(&conn_iter);
3188 
3189 	if (err == 0)
3190 		err = drm_atomic_commit(state);
3191 
3192 	if (err == -EDEADLK) {
3193 		drm_atomic_state_clear(state);
3194 		err = drm_modeset_backoff(&ctx);
3195 		if (!err)
3196 			goto retry;
3197 	}
3198 
3199 	drm_modeset_drop_locks(&ctx);
3200 	drm_modeset_acquire_fini(&ctx);
3201 	drm_atomic_state_put(state);
3202 
3203 	return err;
3204 }
3205 
intel_psr_debug_set(struct intel_dp * intel_dp,u64 val)3206 int intel_psr_debug_set(struct intel_dp *intel_dp, u64 val)
3207 {
3208 	struct intel_display *display = to_intel_display(intel_dp);
3209 	const u32 mode = val & I915_PSR_DEBUG_MODE_MASK;
3210 	const u32 disable_bits = val & (I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
3211 					I915_PSR_DEBUG_PANEL_REPLAY_DISABLE);
3212 	u32 old_mode, old_disable_bits;
3213 	int ret;
3214 
3215 	if (val & ~(I915_PSR_DEBUG_IRQ | I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
3216 		    I915_PSR_DEBUG_PANEL_REPLAY_DISABLE |
3217 		    I915_PSR_DEBUG_MODE_MASK) ||
3218 	    mode > I915_PSR_DEBUG_ENABLE_SEL_FETCH) {
3219 		drm_dbg_kms(display->drm, "Invalid debug mask %llx\n", val);
3220 		return -EINVAL;
3221 	}
3222 
3223 	ret = mutex_lock_interruptible(&intel_dp->psr.lock);
3224 	if (ret)
3225 		return ret;
3226 
3227 	old_mode = intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK;
3228 	old_disable_bits = intel_dp->psr.debug &
3229 		(I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
3230 		 I915_PSR_DEBUG_PANEL_REPLAY_DISABLE);
3231 
3232 	intel_dp->psr.debug = val;
3233 
3234 	/*
3235 	 * Do it right away if it's already enabled, otherwise it will be done
3236 	 * when enabling the source.
3237 	 */
3238 	if (intel_dp->psr.enabled)
3239 		psr_irq_control(intel_dp);
3240 
3241 	mutex_unlock(&intel_dp->psr.lock);
3242 
3243 	if (old_mode != mode || old_disable_bits != disable_bits)
3244 		ret = intel_psr_fastset_force(display);
3245 
3246 	return ret;
3247 }
3248 
intel_psr_handle_irq(struct intel_dp * intel_dp)3249 static void intel_psr_handle_irq(struct intel_dp *intel_dp)
3250 {
3251 	struct intel_psr *psr = &intel_dp->psr;
3252 
3253 	intel_psr_disable_locked(intel_dp);
3254 	psr->sink_not_reliable = true;
3255 	/* let's make sure that sink is awaken */
3256 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
3257 }
3258 
intel_psr_work(struct work_struct * work)3259 static void intel_psr_work(struct work_struct *work)
3260 {
3261 	struct intel_dp *intel_dp =
3262 		container_of(work, typeof(*intel_dp), psr.work);
3263 
3264 	mutex_lock(&intel_dp->psr.lock);
3265 
3266 	if (!intel_dp->psr.enabled)
3267 		goto unlock;
3268 
3269 	if (READ_ONCE(intel_dp->psr.irq_aux_error)) {
3270 		intel_psr_handle_irq(intel_dp);
3271 		goto unlock;
3272 	}
3273 
3274 	if (intel_dp->psr.pause_counter)
3275 		goto unlock;
3276 
3277 	/*
3278 	 * We have to make sure PSR is ready for re-enable
3279 	 * otherwise it keeps disabled until next full enable/disable cycle.
3280 	 * PSR might take some time to get fully disabled
3281 	 * and be ready for re-enable.
3282 	 */
3283 	if (!__psr_wait_for_idle_locked(intel_dp))
3284 		goto unlock;
3285 
3286 	/*
3287 	 * The delayed work can race with an invalidate hence we need to
3288 	 * recheck. Since psr_flush first clears this and then reschedules we
3289 	 * won't ever miss a flush when bailing out here.
3290 	 */
3291 	if (intel_dp->psr.busy_frontbuffer_bits || intel_dp->psr.active)
3292 		goto unlock;
3293 
3294 	intel_psr_activate(intel_dp);
3295 unlock:
3296 	mutex_unlock(&intel_dp->psr.lock);
3297 }
3298 
intel_psr_configure_full_frame_update(struct intel_dp * intel_dp)3299 static void intel_psr_configure_full_frame_update(struct intel_dp *intel_dp)
3300 {
3301 	struct intel_display *display = to_intel_display(intel_dp);
3302 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3303 
3304 	if (!intel_dp->psr.psr2_sel_fetch_enabled)
3305 		return;
3306 
3307 	if (DISPLAY_VER(display) >= 20)
3308 		intel_de_write(display, LNL_SFF_CTL(cpu_transcoder),
3309 			       LNL_SFF_CTL_SF_SINGLE_FULL_FRAME);
3310 	else
3311 		intel_de_write(display,
3312 			       PSR2_MAN_TRK_CTL(display, cpu_transcoder),
3313 			       man_trk_ctl_enable_bit_get(display) |
3314 			       man_trk_ctl_partial_frame_bit_get(display) |
3315 			       man_trk_ctl_single_full_frame_bit_get(display) |
3316 			       man_trk_ctl_continuos_full_frame(display));
3317 }
3318 
_psr_invalidate_handle(struct intel_dp * intel_dp)3319 static void _psr_invalidate_handle(struct intel_dp *intel_dp)
3320 {
3321 	struct intel_display *display = to_intel_display(intel_dp);
3322 
3323 	if (DISPLAY_VER(display) < 20 && intel_dp->psr.psr2_sel_fetch_enabled) {
3324 		if (!intel_dp->psr.psr2_sel_fetch_cff_enabled) {
3325 			intel_dp->psr.psr2_sel_fetch_cff_enabled = true;
3326 			intel_psr_configure_full_frame_update(intel_dp);
3327 		}
3328 
3329 		intel_psr_force_update(intel_dp);
3330 	} else {
3331 		intel_psr_exit(intel_dp);
3332 	}
3333 }
3334 
3335 /**
3336  * intel_psr_invalidate - Invalidate PSR
3337  * @display: display device
3338  * @frontbuffer_bits: frontbuffer plane tracking bits
3339  * @origin: which operation caused the invalidate
3340  *
3341  * Since the hardware frontbuffer tracking has gaps we need to integrate
3342  * with the software frontbuffer tracking. This function gets called every
3343  * time frontbuffer rendering starts and a buffer gets dirtied. PSR must be
3344  * disabled if the frontbuffer mask contains a buffer relevant to PSR.
3345  *
3346  * Dirty frontbuffers relevant to PSR are tracked in busy_frontbuffer_bits."
3347  */
intel_psr_invalidate(struct intel_display * display,unsigned frontbuffer_bits,enum fb_op_origin origin)3348 void intel_psr_invalidate(struct intel_display *display,
3349 			  unsigned frontbuffer_bits, enum fb_op_origin origin)
3350 {
3351 	struct intel_encoder *encoder;
3352 
3353 	if (origin == ORIGIN_FLIP)
3354 		return;
3355 
3356 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3357 		unsigned int pipe_frontbuffer_bits = frontbuffer_bits;
3358 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3359 
3360 		mutex_lock(&intel_dp->psr.lock);
3361 		if (!intel_dp->psr.enabled) {
3362 			mutex_unlock(&intel_dp->psr.lock);
3363 			continue;
3364 		}
3365 
3366 		pipe_frontbuffer_bits &=
3367 			INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe);
3368 		intel_dp->psr.busy_frontbuffer_bits |= pipe_frontbuffer_bits;
3369 
3370 		if (pipe_frontbuffer_bits)
3371 			_psr_invalidate_handle(intel_dp);
3372 
3373 		mutex_unlock(&intel_dp->psr.lock);
3374 	}
3375 }
3376 /*
3377  * When we will be completely rely on PSR2 S/W tracking in future,
3378  * intel_psr_flush() will invalidate and flush the PSR for ORIGIN_FLIP
3379  * event also therefore tgl_dc3co_flush_locked() require to be changed
3380  * accordingly in future.
3381  */
3382 static void
tgl_dc3co_flush_locked(struct intel_dp * intel_dp,unsigned int frontbuffer_bits,enum fb_op_origin origin)3383 tgl_dc3co_flush_locked(struct intel_dp *intel_dp, unsigned int frontbuffer_bits,
3384 		       enum fb_op_origin origin)
3385 {
3386 	struct intel_display *display = to_intel_display(intel_dp);
3387 
3388 	if (!intel_dp->psr.dc3co_exitline || !intel_dp->psr.sel_update_enabled ||
3389 	    !intel_dp->psr.active)
3390 		return;
3391 
3392 	/*
3393 	 * At every frontbuffer flush flip event modified delay of delayed work,
3394 	 * when delayed work schedules that means display has been idle.
3395 	 */
3396 	if (!(frontbuffer_bits &
3397 	    INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe)))
3398 		return;
3399 
3400 	tgl_psr2_enable_dc3co(intel_dp);
3401 	mod_delayed_work(display->wq.unordered, &intel_dp->psr.dc3co_work,
3402 			 intel_dp->psr.dc3co_exit_delay);
3403 }
3404 
_psr_flush_handle(struct intel_dp * intel_dp)3405 static void _psr_flush_handle(struct intel_dp *intel_dp)
3406 {
3407 	struct intel_display *display = to_intel_display(intel_dp);
3408 
3409 	if (DISPLAY_VER(display) < 20 && intel_dp->psr.psr2_sel_fetch_enabled) {
3410 		/* Selective fetch prior LNL */
3411 		if (intel_dp->psr.psr2_sel_fetch_cff_enabled) {
3412 			/* can we turn CFF off? */
3413 			if (intel_dp->psr.busy_frontbuffer_bits == 0)
3414 				intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
3415 		}
3416 
3417 		/*
3418 		 * Still keep cff bit enabled as we don't have proper SU
3419 		 * configuration in case update is sent for any reason after
3420 		 * sff bit gets cleared by the HW on next vblank.
3421 		 *
3422 		 * NOTE: Setting cff bit is not needed for LunarLake onwards as
3423 		 * we have own register for SFF bit and we are not overwriting
3424 		 * existing SU configuration
3425 		 */
3426 		intel_psr_configure_full_frame_update(intel_dp);
3427 
3428 		intel_psr_force_update(intel_dp);
3429 	} else if (!intel_dp->psr.psr2_sel_fetch_enabled) {
3430 		/*
3431 		 * PSR1 on all platforms
3432 		 * PSR2 HW tracking
3433 		 * Panel Replay Full frame update
3434 		 */
3435 		intel_psr_force_update(intel_dp);
3436 	} else {
3437 		/* Selective update LNL onwards */
3438 		intel_psr_exit(intel_dp);
3439 	}
3440 
3441 	if (!intel_dp->psr.active && !intel_dp->psr.busy_frontbuffer_bits)
3442 		queue_work(display->wq.unordered, &intel_dp->psr.work);
3443 }
3444 
3445 /**
3446  * intel_psr_flush - Flush PSR
3447  * @display: display device
3448  * @frontbuffer_bits: frontbuffer plane tracking bits
3449  * @origin: which operation caused the flush
3450  *
3451  * Since the hardware frontbuffer tracking has gaps we need to integrate
3452  * with the software frontbuffer tracking. This function gets called every
3453  * time frontbuffer rendering has completed and flushed out to memory. PSR
3454  * can be enabled again if no other frontbuffer relevant to PSR is dirty.
3455  *
3456  * Dirty frontbuffers relevant to PSR are tracked in busy_frontbuffer_bits.
3457  */
intel_psr_flush(struct intel_display * display,unsigned frontbuffer_bits,enum fb_op_origin origin)3458 void intel_psr_flush(struct intel_display *display,
3459 		     unsigned frontbuffer_bits, enum fb_op_origin origin)
3460 {
3461 	struct intel_encoder *encoder;
3462 
3463 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3464 		unsigned int pipe_frontbuffer_bits = frontbuffer_bits;
3465 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3466 
3467 		mutex_lock(&intel_dp->psr.lock);
3468 		if (!intel_dp->psr.enabled) {
3469 			mutex_unlock(&intel_dp->psr.lock);
3470 			continue;
3471 		}
3472 
3473 		pipe_frontbuffer_bits &=
3474 			INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe);
3475 		intel_dp->psr.busy_frontbuffer_bits &= ~pipe_frontbuffer_bits;
3476 
3477 		/*
3478 		 * If the PSR is paused by an explicit intel_psr_paused() call,
3479 		 * we have to ensure that the PSR is not activated until
3480 		 * intel_psr_resume() is called.
3481 		 */
3482 		if (intel_dp->psr.pause_counter)
3483 			goto unlock;
3484 
3485 		if (origin == ORIGIN_FLIP ||
3486 		    (origin == ORIGIN_CURSOR_UPDATE &&
3487 		     !intel_dp->psr.psr2_sel_fetch_enabled)) {
3488 			tgl_dc3co_flush_locked(intel_dp, frontbuffer_bits, origin);
3489 			goto unlock;
3490 		}
3491 
3492 		if (pipe_frontbuffer_bits == 0)
3493 			goto unlock;
3494 
3495 		/* By definition flush = invalidate + flush */
3496 		_psr_flush_handle(intel_dp);
3497 unlock:
3498 		mutex_unlock(&intel_dp->psr.lock);
3499 	}
3500 }
3501 
3502 /**
3503  * intel_psr_init - Init basic PSR work and mutex.
3504  * @intel_dp: Intel DP
3505  *
3506  * This function is called after the initializing connector.
3507  * (the initializing of connector treats the handling of connector capabilities)
3508  * And it initializes basic PSR stuff for each DP Encoder.
3509  */
intel_psr_init(struct intel_dp * intel_dp)3510 void intel_psr_init(struct intel_dp *intel_dp)
3511 {
3512 	struct intel_display *display = to_intel_display(intel_dp);
3513 	struct intel_connector *connector = intel_dp->attached_connector;
3514 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
3515 
3516 	if (!(HAS_PSR(display) || HAS_DP20(display)))
3517 		return;
3518 
3519 	/*
3520 	 * HSW spec explicitly says PSR is tied to port A.
3521 	 * BDW+ platforms have a instance of PSR registers per transcoder but
3522 	 * BDW, GEN9 and GEN11 are not validated by HW team in other transcoder
3523 	 * than eDP one.
3524 	 * For now it only supports one instance of PSR for BDW, GEN9 and GEN11.
3525 	 * So lets keep it hardcoded to PORT_A for BDW, GEN9 and GEN11.
3526 	 * But GEN12 supports a instance of PSR registers per transcoder.
3527 	 */
3528 	if (DISPLAY_VER(display) < 12 && dig_port->base.port != PORT_A) {
3529 		drm_dbg_kms(display->drm,
3530 			    "PSR condition failed: Port not supported\n");
3531 		return;
3532 	}
3533 
3534 	if ((HAS_DP20(display) && !intel_dp_is_edp(intel_dp)) ||
3535 	    DISPLAY_VER(display) >= 20)
3536 		intel_dp->psr.source_panel_replay_support = true;
3537 
3538 	if (HAS_PSR(display) && intel_dp_is_edp(intel_dp))
3539 		intel_dp->psr.source_support = true;
3540 
3541 	/* Set link_standby x link_off defaults */
3542 	if (DISPLAY_VER(display) < 12)
3543 		/* For new platforms up to TGL let's respect VBT back again */
3544 		intel_dp->psr.link_standby = connector->panel.vbt.psr.full_link;
3545 
3546 	INIT_WORK(&intel_dp->psr.work, intel_psr_work);
3547 	INIT_DELAYED_WORK(&intel_dp->psr.dc3co_work, tgl_dc3co_disable_work);
3548 	mutex_init(&intel_dp->psr.lock);
3549 }
3550 
psr_get_status_and_error_status(struct intel_dp * intel_dp,u8 * status,u8 * error_status)3551 static int psr_get_status_and_error_status(struct intel_dp *intel_dp,
3552 					   u8 *status, u8 *error_status)
3553 {
3554 	struct drm_dp_aux *aux = &intel_dp->aux;
3555 	int ret;
3556 	unsigned int offset;
3557 
3558 	offset = intel_dp->psr.panel_replay_enabled ?
3559 		 DP_SINK_DEVICE_PR_AND_FRAME_LOCK_STATUS : DP_PSR_STATUS;
3560 
3561 	ret = drm_dp_dpcd_readb(aux, offset, status);
3562 	if (ret != 1)
3563 		return ret;
3564 
3565 	offset = intel_dp->psr.panel_replay_enabled ?
3566 		 DP_PANEL_REPLAY_ERROR_STATUS : DP_PSR_ERROR_STATUS;
3567 
3568 	ret = drm_dp_dpcd_readb(aux, offset, error_status);
3569 	if (ret != 1)
3570 		return ret;
3571 
3572 	*status = *status & DP_PSR_SINK_STATE_MASK;
3573 
3574 	return 0;
3575 }
3576 
psr_alpm_check(struct intel_dp * intel_dp)3577 static void psr_alpm_check(struct intel_dp *intel_dp)
3578 {
3579 	struct intel_psr *psr = &intel_dp->psr;
3580 
3581 	if (!psr->sel_update_enabled)
3582 		return;
3583 
3584 	if (intel_alpm_get_error(intel_dp)) {
3585 		intel_psr_disable_locked(intel_dp);
3586 		psr->sink_not_reliable = true;
3587 	}
3588 }
3589 
psr_capability_changed_check(struct intel_dp * intel_dp)3590 static void psr_capability_changed_check(struct intel_dp *intel_dp)
3591 {
3592 	struct intel_display *display = to_intel_display(intel_dp);
3593 	struct intel_psr *psr = &intel_dp->psr;
3594 	u8 val;
3595 	int r;
3596 
3597 	r = drm_dp_dpcd_readb(&intel_dp->aux, DP_PSR_ESI, &val);
3598 	if (r != 1) {
3599 		drm_err(display->drm, "Error reading DP_PSR_ESI\n");
3600 		return;
3601 	}
3602 
3603 	if (val & DP_PSR_CAPS_CHANGE) {
3604 		intel_psr_disable_locked(intel_dp);
3605 		psr->sink_not_reliable = true;
3606 		drm_dbg_kms(display->drm,
3607 			    "Sink PSR capability changed, disabling PSR\n");
3608 
3609 		/* Clearing it */
3610 		drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_ESI, val);
3611 	}
3612 }
3613 
3614 /*
3615  * On common bits:
3616  * DP_PSR_RFB_STORAGE_ERROR == DP_PANEL_REPLAY_RFB_STORAGE_ERROR
3617  * DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR == DP_PANEL_REPLAY_VSC_SDP_UNCORRECTABLE_ERROR
3618  * DP_PSR_LINK_CRC_ERROR == DP_PANEL_REPLAY_LINK_CRC_ERROR
3619  * this function is relying on PSR definitions
3620  */
intel_psr_short_pulse(struct intel_dp * intel_dp)3621 void intel_psr_short_pulse(struct intel_dp *intel_dp)
3622 {
3623 	struct intel_display *display = to_intel_display(intel_dp);
3624 	struct intel_psr *psr = &intel_dp->psr;
3625 	u8 status, error_status;
3626 	const u8 errors = DP_PSR_RFB_STORAGE_ERROR |
3627 			  DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR |
3628 			  DP_PSR_LINK_CRC_ERROR;
3629 
3630 	if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
3631 		return;
3632 
3633 	mutex_lock(&psr->lock);
3634 
3635 	psr->link_ok = false;
3636 
3637 	if (!psr->enabled)
3638 		goto exit;
3639 
3640 	if (psr_get_status_and_error_status(intel_dp, &status, &error_status)) {
3641 		drm_err(display->drm,
3642 			"Error reading PSR status or error status\n");
3643 		goto exit;
3644 	}
3645 
3646 	if ((!psr->panel_replay_enabled && status == DP_PSR_SINK_INTERNAL_ERROR) ||
3647 	    (error_status & errors)) {
3648 		intel_psr_disable_locked(intel_dp);
3649 		psr->sink_not_reliable = true;
3650 	}
3651 
3652 	if (!psr->panel_replay_enabled && status == DP_PSR_SINK_INTERNAL_ERROR &&
3653 	    !error_status)
3654 		drm_dbg_kms(display->drm,
3655 			    "PSR sink internal error, disabling PSR\n");
3656 	if (error_status & DP_PSR_RFB_STORAGE_ERROR)
3657 		drm_dbg_kms(display->drm,
3658 			    "PSR RFB storage error, disabling PSR\n");
3659 	if (error_status & DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR)
3660 		drm_dbg_kms(display->drm,
3661 			    "PSR VSC SDP uncorrectable error, disabling PSR\n");
3662 	if (error_status & DP_PSR_LINK_CRC_ERROR)
3663 		drm_dbg_kms(display->drm,
3664 			    "PSR Link CRC error, disabling PSR\n");
3665 
3666 	if (error_status & ~errors)
3667 		drm_err(display->drm,
3668 			"PSR_ERROR_STATUS unhandled errors %x\n",
3669 			error_status & ~errors);
3670 	/* clear status register */
3671 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_ERROR_STATUS, error_status);
3672 
3673 	if (!psr->panel_replay_enabled) {
3674 		psr_alpm_check(intel_dp);
3675 		psr_capability_changed_check(intel_dp);
3676 	}
3677 
3678 exit:
3679 	mutex_unlock(&psr->lock);
3680 }
3681 
intel_psr_enabled(struct intel_dp * intel_dp)3682 bool intel_psr_enabled(struct intel_dp *intel_dp)
3683 {
3684 	bool ret;
3685 
3686 	if (!CAN_PSR(intel_dp))
3687 		return false;
3688 
3689 	mutex_lock(&intel_dp->psr.lock);
3690 	ret = intel_dp->psr.enabled;
3691 	mutex_unlock(&intel_dp->psr.lock);
3692 
3693 	return ret;
3694 }
3695 
3696 /**
3697  * intel_psr_link_ok - return psr->link_ok
3698  * @intel_dp: struct intel_dp
3699  *
3700  * We are seeing unexpected link re-trainings with some panels. This is caused
3701  * by panel stating bad link status after PSR is enabled. Code checking link
3702  * status can call this to ensure it can ignore bad link status stated by the
3703  * panel I.e. if panel is stating bad link and intel_psr_link_ok is stating link
3704  * is ok caller should rely on latter.
3705  *
3706  * Return value of link_ok
3707  */
intel_psr_link_ok(struct intel_dp * intel_dp)3708 bool intel_psr_link_ok(struct intel_dp *intel_dp)
3709 {
3710 	bool ret;
3711 
3712 	if ((!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp)) ||
3713 	    !intel_dp_is_edp(intel_dp))
3714 		return false;
3715 
3716 	mutex_lock(&intel_dp->psr.lock);
3717 	ret = intel_dp->psr.link_ok;
3718 	mutex_unlock(&intel_dp->psr.lock);
3719 
3720 	return ret;
3721 }
3722 
3723 /**
3724  * intel_psr_lock - grab PSR lock
3725  * @crtc_state: the crtc state
3726  *
3727  * This is initially meant to be used by around CRTC update, when
3728  * vblank sensitive registers are updated and we need grab the lock
3729  * before it to avoid vblank evasion.
3730  */
intel_psr_lock(const struct intel_crtc_state * crtc_state)3731 void intel_psr_lock(const struct intel_crtc_state *crtc_state)
3732 {
3733 	struct intel_display *display = to_intel_display(crtc_state);
3734 	struct intel_encoder *encoder;
3735 
3736 	if (!crtc_state->has_psr)
3737 		return;
3738 
3739 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
3740 					     crtc_state->uapi.encoder_mask) {
3741 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3742 
3743 		mutex_lock(&intel_dp->psr.lock);
3744 		break;
3745 	}
3746 }
3747 
3748 /**
3749  * intel_psr_unlock - release PSR lock
3750  * @crtc_state: the crtc state
3751  *
3752  * Release the PSR lock that was held during pipe update.
3753  */
intel_psr_unlock(const struct intel_crtc_state * crtc_state)3754 void intel_psr_unlock(const struct intel_crtc_state *crtc_state)
3755 {
3756 	struct intel_display *display = to_intel_display(crtc_state);
3757 	struct intel_encoder *encoder;
3758 
3759 	if (!crtc_state->has_psr)
3760 		return;
3761 
3762 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
3763 					     crtc_state->uapi.encoder_mask) {
3764 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3765 
3766 		mutex_unlock(&intel_dp->psr.lock);
3767 		break;
3768 	}
3769 }
3770 
3771 /* Wa_16025596647 */
intel_psr_apply_underrun_on_idle_wa_locked(struct intel_dp * intel_dp)3772 static void intel_psr_apply_underrun_on_idle_wa_locked(struct intel_dp *intel_dp)
3773 {
3774 	struct intel_display *display = to_intel_display(intel_dp);
3775 	bool dc5_dc6_blocked;
3776 
3777 	if (!intel_dp->psr.active || !intel_dp->psr.pkg_c_latency_used)
3778 		return;
3779 
3780 	dc5_dc6_blocked = is_dc5_dc6_blocked(intel_dp);
3781 
3782 	if (intel_dp->psr.sel_update_enabled)
3783 		psr2_program_idle_frames(intel_dp, dc5_dc6_blocked ? 0 :
3784 					 psr_compute_idle_frames(intel_dp));
3785 	else
3786 		intel_dmc_start_pkgc_exit_at_start_of_undelayed_vblank(display,
3787 								       intel_dp->psr.pipe,
3788 								       dc5_dc6_blocked);
3789 }
3790 
psr_dc5_dc6_wa_work(struct work_struct * work)3791 static void psr_dc5_dc6_wa_work(struct work_struct *work)
3792 {
3793 	struct intel_display *display = container_of(work, typeof(*display),
3794 						     psr_dc5_dc6_wa_work);
3795 	struct intel_encoder *encoder;
3796 
3797 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3798 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3799 
3800 		mutex_lock(&intel_dp->psr.lock);
3801 
3802 		if (intel_dp->psr.enabled && !intel_dp->psr.panel_replay_enabled &&
3803 		    !intel_dp->psr.pkg_c_latency_used)
3804 			intel_psr_apply_underrun_on_idle_wa_locked(intel_dp);
3805 
3806 		mutex_unlock(&intel_dp->psr.lock);
3807 	}
3808 }
3809 
3810 /**
3811  * intel_psr_notify_dc5_dc6 - Notify PSR about enable/disable dc5/dc6
3812  * @display: intel atomic state
3813  *
3814  * This is targeted for underrun on idle PSR HW bug (Wa_16025596647) to schedule
3815  * psr_dc5_dc6_wa_work used for applying/removing the workaround.
3816  */
intel_psr_notify_dc5_dc6(struct intel_display * display)3817 void intel_psr_notify_dc5_dc6(struct intel_display *display)
3818 {
3819 	if (DISPLAY_VER(display) != 20 &&
3820 	    !IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0))
3821 		return;
3822 
3823 	schedule_work(&display->psr_dc5_dc6_wa_work);
3824 }
3825 
3826 /**
3827  * intel_psr_dc5_dc6_wa_init - Init work for underrun on idle PSR HW bug wa
3828  * @display: intel atomic state
3829  *
3830  * This is targeted for underrun on idle PSR HW bug (Wa_16025596647) to init
3831  * psr_dc5_dc6_wa_work used for applying the workaround.
3832  */
intel_psr_dc5_dc6_wa_init(struct intel_display * display)3833 void intel_psr_dc5_dc6_wa_init(struct intel_display *display)
3834 {
3835 	if (DISPLAY_VER(display) != 20 &&
3836 	    !IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0))
3837 		return;
3838 
3839 	INIT_WORK(&display->psr_dc5_dc6_wa_work, psr_dc5_dc6_wa_work);
3840 }
3841 
3842 /**
3843  * intel_psr_notify_pipe_change - Notify PSR about enable/disable of a pipe
3844  * @state: intel atomic state
3845  * @crtc: intel crtc
3846  * @enable: enable/disable
3847  *
3848  * This is targeted for underrun on idle PSR HW bug (Wa_16025596647) to apply
3849  * remove the workaround when pipe is getting enabled/disabled
3850  */
intel_psr_notify_pipe_change(struct intel_atomic_state * state,struct intel_crtc * crtc,bool enable)3851 void intel_psr_notify_pipe_change(struct intel_atomic_state *state,
3852 				  struct intel_crtc *crtc, bool enable)
3853 {
3854 	struct intel_display *display = to_intel_display(state);
3855 	struct intel_encoder *encoder;
3856 
3857 	if (DISPLAY_VER(display) != 20 &&
3858 	    !IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0))
3859 		return;
3860 
3861 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3862 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3863 		u8 active_non_psr_pipes;
3864 
3865 		mutex_lock(&intel_dp->psr.lock);
3866 
3867 		if (!intel_dp->psr.enabled || intel_dp->psr.panel_replay_enabled)
3868 			goto unlock;
3869 
3870 		active_non_psr_pipes = intel_dp->psr.active_non_psr_pipes;
3871 
3872 		if (enable)
3873 			active_non_psr_pipes |= BIT(crtc->pipe);
3874 		else
3875 			active_non_psr_pipes &= ~BIT(crtc->pipe);
3876 
3877 		if (active_non_psr_pipes == intel_dp->psr.active_non_psr_pipes)
3878 			goto unlock;
3879 
3880 		if ((enable && intel_dp->psr.active_non_psr_pipes) ||
3881 		    (!enable && !intel_dp->psr.active_non_psr_pipes) ||
3882 		    !intel_dp->psr.pkg_c_latency_used) {
3883 			intel_dp->psr.active_non_psr_pipes = active_non_psr_pipes;
3884 			goto unlock;
3885 		}
3886 
3887 		intel_dp->psr.active_non_psr_pipes = active_non_psr_pipes;
3888 
3889 		intel_psr_apply_underrun_on_idle_wa_locked(intel_dp);
3890 unlock:
3891 		mutex_unlock(&intel_dp->psr.lock);
3892 	}
3893 }
3894 
3895 /**
3896  * intel_psr_notify_vblank_enable_disable - Notify PSR about enable/disable of vblank
3897  * @display: intel display struct
3898  * @enable: enable/disable
3899  *
3900  * This is targeted for underrun on idle PSR HW bug (Wa_16025596647) to apply
3901  * remove the workaround when vblank is getting enabled/disabled
3902  */
intel_psr_notify_vblank_enable_disable(struct intel_display * display,bool enable)3903 void intel_psr_notify_vblank_enable_disable(struct intel_display *display,
3904 					    bool enable)
3905 {
3906 	struct intel_encoder *encoder;
3907 
3908 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3909 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3910 
3911 		mutex_lock(&intel_dp->psr.lock);
3912 		if (intel_dp->psr.panel_replay_enabled) {
3913 			mutex_unlock(&intel_dp->psr.lock);
3914 			break;
3915 		}
3916 
3917 		if (intel_dp->psr.enabled && intel_dp->psr.pkg_c_latency_used)
3918 			intel_psr_apply_underrun_on_idle_wa_locked(intel_dp);
3919 
3920 		mutex_unlock(&intel_dp->psr.lock);
3921 		return;
3922 	}
3923 
3924 	/*
3925 	 * NOTE: intel_display_power_set_target_dc_state is used
3926 	 * only by PSR * code for DC3CO handling. DC3CO target
3927 	 * state is currently disabled in * PSR code. If DC3CO
3928 	 * is taken into use we need take that into account here
3929 	 * as well.
3930 	 */
3931 	intel_display_power_set_target_dc_state(display, enable ? DC_STATE_DISABLE :
3932 						DC_STATE_EN_UPTO_DC6);
3933 }
3934 
3935 static void
psr_source_status(struct intel_dp * intel_dp,struct seq_file * m)3936 psr_source_status(struct intel_dp *intel_dp, struct seq_file *m)
3937 {
3938 	struct intel_display *display = to_intel_display(intel_dp);
3939 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3940 	const char *status = "unknown";
3941 	u32 val, status_val;
3942 
3943 	if ((intel_dp_is_edp(intel_dp) || DISPLAY_VER(display) >= 30) &&
3944 	    (intel_dp->psr.sel_update_enabled || intel_dp->psr.panel_replay_enabled)) {
3945 		static const char * const live_status[] = {
3946 			"IDLE",
3947 			"CAPTURE",
3948 			"CAPTURE_FS",
3949 			"SLEEP",
3950 			"BUFON_FW",
3951 			"ML_UP",
3952 			"SU_STANDBY",
3953 			"FAST_SLEEP",
3954 			"DEEP_SLEEP",
3955 			"BUF_ON",
3956 			"TG_ON"
3957 		};
3958 		val = intel_de_read(display,
3959 				    EDP_PSR2_STATUS(display, cpu_transcoder));
3960 		status_val = REG_FIELD_GET(EDP_PSR2_STATUS_STATE_MASK, val);
3961 		if (status_val < ARRAY_SIZE(live_status))
3962 			status = live_status[status_val];
3963 	} else {
3964 		static const char * const live_status[] = {
3965 			"IDLE",
3966 			"SRDONACK",
3967 			"SRDENT",
3968 			"BUFOFF",
3969 			"BUFON",
3970 			"AUXACK",
3971 			"SRDOFFACK",
3972 			"SRDENT_ON",
3973 		};
3974 		val = intel_de_read(display,
3975 				    psr_status_reg(display, cpu_transcoder));
3976 		status_val = REG_FIELD_GET(EDP_PSR_STATUS_STATE_MASK, val);
3977 		if (status_val < ARRAY_SIZE(live_status))
3978 			status = live_status[status_val];
3979 	}
3980 
3981 	seq_printf(m, "Source PSR/PanelReplay status: %s [0x%08x]\n", status, val);
3982 }
3983 
intel_psr_sink_capability(struct intel_dp * intel_dp,struct seq_file * m)3984 static void intel_psr_sink_capability(struct intel_dp *intel_dp,
3985 				      struct seq_file *m)
3986 {
3987 	struct intel_psr *psr = &intel_dp->psr;
3988 
3989 	seq_printf(m, "Sink support: PSR = %s",
3990 		   str_yes_no(psr->sink_support));
3991 
3992 	if (psr->sink_support)
3993 		seq_printf(m, " [0x%02x]", intel_dp->psr_dpcd[0]);
3994 	if (intel_dp->psr_dpcd[0] == DP_PSR2_WITH_Y_COORD_ET_SUPPORTED)
3995 		seq_printf(m, " (Early Transport)");
3996 	seq_printf(m, ", Panel Replay = %s", str_yes_no(psr->sink_panel_replay_support));
3997 	seq_printf(m, ", Panel Replay Selective Update = %s",
3998 		   str_yes_no(psr->sink_panel_replay_su_support));
3999 	if (intel_dp->pr_dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] &
4000 	    DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT)
4001 		seq_printf(m, " (Early Transport)");
4002 	seq_printf(m, "\n");
4003 }
4004 
intel_psr_print_mode(struct intel_dp * intel_dp,struct seq_file * m)4005 static void intel_psr_print_mode(struct intel_dp *intel_dp,
4006 				 struct seq_file *m)
4007 {
4008 	struct intel_psr *psr = &intel_dp->psr;
4009 	const char *status, *mode, *region_et;
4010 
4011 	if (psr->enabled)
4012 		status = " enabled";
4013 	else
4014 		status = "disabled";
4015 
4016 	if (psr->panel_replay_enabled && psr->sel_update_enabled)
4017 		mode = "Panel Replay Selective Update";
4018 	else if (psr->panel_replay_enabled)
4019 		mode = "Panel Replay";
4020 	else if (psr->sel_update_enabled)
4021 		mode = "PSR2";
4022 	else if (psr->enabled)
4023 		mode = "PSR1";
4024 	else
4025 		mode = "";
4026 
4027 	if (psr->su_region_et_enabled)
4028 		region_et = " (Early Transport)";
4029 	else
4030 		region_et = "";
4031 
4032 	seq_printf(m, "PSR mode: %s%s%s\n", mode, status, region_et);
4033 }
4034 
intel_psr_status(struct seq_file * m,struct intel_dp * intel_dp)4035 static int intel_psr_status(struct seq_file *m, struct intel_dp *intel_dp)
4036 {
4037 	struct intel_display *display = to_intel_display(intel_dp);
4038 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
4039 	struct intel_psr *psr = &intel_dp->psr;
4040 	struct ref_tracker *wakeref;
4041 	bool enabled;
4042 	u32 val, psr2_ctl;
4043 
4044 	intel_psr_sink_capability(intel_dp, m);
4045 
4046 	if (!(psr->sink_support || psr->sink_panel_replay_support))
4047 		return 0;
4048 
4049 	wakeref = intel_display_rpm_get(display);
4050 	mutex_lock(&psr->lock);
4051 
4052 	intel_psr_print_mode(intel_dp, m);
4053 
4054 	if (!psr->enabled) {
4055 		seq_printf(m, "PSR sink not reliable: %s\n",
4056 			   str_yes_no(psr->sink_not_reliable));
4057 
4058 		goto unlock;
4059 	}
4060 
4061 	if (psr->panel_replay_enabled) {
4062 		val = intel_de_read(display, TRANS_DP2_CTL(cpu_transcoder));
4063 
4064 		if (intel_dp_is_edp(intel_dp))
4065 			psr2_ctl = intel_de_read(display,
4066 						 EDP_PSR2_CTL(display,
4067 							      cpu_transcoder));
4068 
4069 		enabled = val & TRANS_DP2_PANEL_REPLAY_ENABLE;
4070 	} else if (psr->sel_update_enabled) {
4071 		val = intel_de_read(display,
4072 				    EDP_PSR2_CTL(display, cpu_transcoder));
4073 		enabled = val & EDP_PSR2_ENABLE;
4074 	} else {
4075 		val = intel_de_read(display, psr_ctl_reg(display, cpu_transcoder));
4076 		enabled = val & EDP_PSR_ENABLE;
4077 	}
4078 	seq_printf(m, "Source PSR/PanelReplay ctl: %s [0x%08x]\n",
4079 		   str_enabled_disabled(enabled), val);
4080 	if (psr->panel_replay_enabled && intel_dp_is_edp(intel_dp))
4081 		seq_printf(m, "PSR2_CTL: 0x%08x\n",
4082 			   psr2_ctl);
4083 	psr_source_status(intel_dp, m);
4084 	seq_printf(m, "Busy frontbuffer bits: 0x%08x\n",
4085 		   psr->busy_frontbuffer_bits);
4086 
4087 	/*
4088 	 * SKL+ Perf counter is reset to 0 everytime DC state is entered
4089 	 */
4090 	val = intel_de_read(display, psr_perf_cnt_reg(display, cpu_transcoder));
4091 	seq_printf(m, "Performance counter: %u\n",
4092 		   REG_FIELD_GET(EDP_PSR_PERF_CNT_MASK, val));
4093 
4094 	if (psr->debug & I915_PSR_DEBUG_IRQ) {
4095 		seq_printf(m, "Last attempted entry at: %lld\n",
4096 			   psr->last_entry_attempt);
4097 		seq_printf(m, "Last exit at: %lld\n", psr->last_exit);
4098 	}
4099 
4100 	if (psr->sel_update_enabled) {
4101 		u32 su_frames_val[3];
4102 		int frame;
4103 
4104 		/*
4105 		 * PSR2_SU_STATUS register has been tied-off since DG2/ADL-P
4106 		 * (it returns zeros only) and it has been removed on Xe2_LPD.
4107 		 */
4108 		if (DISPLAY_VER(display) < 13) {
4109 			/*
4110 			 * Reading all 3 registers before hand to minimize crossing a
4111 			 * frame boundary between register reads
4112 			 */
4113 			for (frame = 0; frame < PSR2_SU_STATUS_FRAMES; frame += 3) {
4114 				val = intel_de_read(display,
4115 						    PSR2_SU_STATUS(display, cpu_transcoder, frame));
4116 				su_frames_val[frame / 3] = val;
4117 			}
4118 
4119 			seq_puts(m, "Frame:\tPSR2 SU blocks:\n");
4120 
4121 			for (frame = 0; frame < PSR2_SU_STATUS_FRAMES; frame++) {
4122 				u32 su_blocks;
4123 
4124 				su_blocks = su_frames_val[frame / 3] &
4125 					PSR2_SU_STATUS_MASK(frame);
4126 				su_blocks = su_blocks >> PSR2_SU_STATUS_SHIFT(frame);
4127 				seq_printf(m, "%d\t%d\n", frame, su_blocks);
4128 			}
4129 		}
4130 
4131 		seq_printf(m, "PSR2 selective fetch: %s\n",
4132 			   str_enabled_disabled(psr->psr2_sel_fetch_enabled));
4133 	}
4134 
4135 unlock:
4136 	mutex_unlock(&psr->lock);
4137 	intel_display_rpm_put(display, wakeref);
4138 
4139 	return 0;
4140 }
4141 
i915_edp_psr_status_show(struct seq_file * m,void * data)4142 static int i915_edp_psr_status_show(struct seq_file *m, void *data)
4143 {
4144 	struct intel_display *display = m->private;
4145 	struct intel_dp *intel_dp = NULL;
4146 	struct intel_encoder *encoder;
4147 
4148 	if (!HAS_PSR(display))
4149 		return -ENODEV;
4150 
4151 	/* Find the first EDP which supports PSR */
4152 	for_each_intel_encoder_with_psr(display->drm, encoder) {
4153 		intel_dp = enc_to_intel_dp(encoder);
4154 		break;
4155 	}
4156 
4157 	if (!intel_dp)
4158 		return -ENODEV;
4159 
4160 	return intel_psr_status(m, intel_dp);
4161 }
4162 DEFINE_SHOW_ATTRIBUTE(i915_edp_psr_status);
4163 
4164 static int
i915_edp_psr_debug_set(void * data,u64 val)4165 i915_edp_psr_debug_set(void *data, u64 val)
4166 {
4167 	struct intel_display *display = data;
4168 	struct intel_encoder *encoder;
4169 	int ret = -ENODEV;
4170 
4171 	if (!HAS_PSR(display))
4172 		return ret;
4173 
4174 	for_each_intel_encoder_with_psr(display->drm, encoder) {
4175 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4176 
4177 		drm_dbg_kms(display->drm, "Setting PSR debug to %llx\n", val);
4178 
4179 		// TODO: split to each transcoder's PSR debug state
4180 		with_intel_display_rpm(display)
4181 			ret = intel_psr_debug_set(intel_dp, val);
4182 	}
4183 
4184 	return ret;
4185 }
4186 
4187 static int
i915_edp_psr_debug_get(void * data,u64 * val)4188 i915_edp_psr_debug_get(void *data, u64 *val)
4189 {
4190 	struct intel_display *display = data;
4191 	struct intel_encoder *encoder;
4192 
4193 	if (!HAS_PSR(display))
4194 		return -ENODEV;
4195 
4196 	for_each_intel_encoder_with_psr(display->drm, encoder) {
4197 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4198 
4199 		// TODO: split to each transcoder's PSR debug state
4200 		*val = READ_ONCE(intel_dp->psr.debug);
4201 		return 0;
4202 	}
4203 
4204 	return -ENODEV;
4205 }
4206 
4207 DEFINE_SIMPLE_ATTRIBUTE(i915_edp_psr_debug_fops,
4208 			i915_edp_psr_debug_get, i915_edp_psr_debug_set,
4209 			"%llu\n");
4210 
intel_psr_debugfs_register(struct intel_display * display)4211 void intel_psr_debugfs_register(struct intel_display *display)
4212 {
4213 	struct dentry *debugfs_root = display->drm->debugfs_root;
4214 
4215 	debugfs_create_file("i915_edp_psr_debug", 0644, debugfs_root,
4216 			    display, &i915_edp_psr_debug_fops);
4217 
4218 	debugfs_create_file("i915_edp_psr_status", 0444, debugfs_root,
4219 			    display, &i915_edp_psr_status_fops);
4220 }
4221 
psr_mode_str(struct intel_dp * intel_dp)4222 static const char *psr_mode_str(struct intel_dp *intel_dp)
4223 {
4224 	if (intel_dp->psr.panel_replay_enabled)
4225 		return "PANEL-REPLAY";
4226 	else if (intel_dp->psr.enabled)
4227 		return "PSR";
4228 
4229 	return "unknown";
4230 }
4231 
i915_psr_sink_status_show(struct seq_file * m,void * data)4232 static int i915_psr_sink_status_show(struct seq_file *m, void *data)
4233 {
4234 	struct intel_connector *connector = m->private;
4235 	struct intel_dp *intel_dp = intel_attached_dp(connector);
4236 	static const char * const sink_status[] = {
4237 		"inactive",
4238 		"transition to active, capture and display",
4239 		"active, display from RFB",
4240 		"active, capture and display on sink device timings",
4241 		"transition to inactive, capture and display, timing re-sync",
4242 		"reserved",
4243 		"reserved",
4244 		"sink internal error",
4245 	};
4246 	const char *str;
4247 	int ret;
4248 	u8 status, error_status;
4249 
4250 	if (!(CAN_PSR(intel_dp) || CAN_PANEL_REPLAY(intel_dp))) {
4251 		seq_puts(m, "PSR/Panel-Replay Unsupported\n");
4252 		return -ENODEV;
4253 	}
4254 
4255 	if (connector->base.status != connector_status_connected)
4256 		return -ENODEV;
4257 
4258 	ret = psr_get_status_and_error_status(intel_dp, &status, &error_status);
4259 	if (ret)
4260 		return ret;
4261 
4262 	status &= DP_PSR_SINK_STATE_MASK;
4263 	if (status < ARRAY_SIZE(sink_status))
4264 		str = sink_status[status];
4265 	else
4266 		str = "unknown";
4267 
4268 	seq_printf(m, "Sink %s status: 0x%x [%s]\n", psr_mode_str(intel_dp), status, str);
4269 
4270 	seq_printf(m, "Sink %s error status: 0x%x", psr_mode_str(intel_dp), error_status);
4271 
4272 	if (error_status & (DP_PSR_RFB_STORAGE_ERROR |
4273 			    DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR |
4274 			    DP_PSR_LINK_CRC_ERROR))
4275 		seq_puts(m, ":\n");
4276 	else
4277 		seq_puts(m, "\n");
4278 	if (error_status & DP_PSR_RFB_STORAGE_ERROR)
4279 		seq_printf(m, "\t%s RFB storage error\n", psr_mode_str(intel_dp));
4280 	if (error_status & DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR)
4281 		seq_printf(m, "\t%s VSC SDP uncorrectable error\n", psr_mode_str(intel_dp));
4282 	if (error_status & DP_PSR_LINK_CRC_ERROR)
4283 		seq_printf(m, "\t%s Link CRC error\n", psr_mode_str(intel_dp));
4284 
4285 	return ret;
4286 }
4287 DEFINE_SHOW_ATTRIBUTE(i915_psr_sink_status);
4288 
i915_psr_status_show(struct seq_file * m,void * data)4289 static int i915_psr_status_show(struct seq_file *m, void *data)
4290 {
4291 	struct intel_connector *connector = m->private;
4292 	struct intel_dp *intel_dp = intel_attached_dp(connector);
4293 
4294 	return intel_psr_status(m, intel_dp);
4295 }
4296 DEFINE_SHOW_ATTRIBUTE(i915_psr_status);
4297 
intel_psr_connector_debugfs_add(struct intel_connector * connector)4298 void intel_psr_connector_debugfs_add(struct intel_connector *connector)
4299 {
4300 	struct intel_display *display = to_intel_display(connector);
4301 	struct dentry *root = connector->base.debugfs_entry;
4302 
4303 	if (connector->base.connector_type != DRM_MODE_CONNECTOR_eDP &&
4304 	    connector->base.connector_type != DRM_MODE_CONNECTOR_DisplayPort)
4305 		return;
4306 
4307 	debugfs_create_file("i915_psr_sink_status", 0444, root,
4308 			    connector, &i915_psr_sink_status_fops);
4309 
4310 	if (HAS_PSR(display) || HAS_DP20(display))
4311 		debugfs_create_file("i915_psr_status", 0444, root,
4312 				    connector, &i915_psr_status_fops);
4313 }
4314 
intel_psr_needs_alpm(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)4315 bool intel_psr_needs_alpm(struct intel_dp *intel_dp, const struct intel_crtc_state *crtc_state)
4316 {
4317 	/*
4318 	 * eDP Panel Replay uses always ALPM
4319 	 * PSR2 uses ALPM but PSR1 doesn't
4320 	 */
4321 	return intel_dp_is_edp(intel_dp) && (crtc_state->has_sel_update ||
4322 					     crtc_state->has_panel_replay);
4323 }
4324 
intel_psr_needs_alpm_aux_less(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)4325 bool intel_psr_needs_alpm_aux_less(struct intel_dp *intel_dp,
4326 				   const struct intel_crtc_state *crtc_state)
4327 {
4328 	return intel_dp_is_edp(intel_dp) && crtc_state->has_panel_replay;
4329 }
4330