xref: /linux/drivers/gpu/drm/i915/display/intel_psr.c (revision dfd4b508c8c6106083698a0dd5e35aecc7c48725)
1 /*
2  * Copyright © 2014 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21  * DEALINGS IN THE SOFTWARE.
22  */
23 
24 #include <linux/debugfs.h>
25 
26 #include <drm/drm_atomic_helper.h>
27 #include <drm/drm_damage_helper.h>
28 #include <drm/drm_debugfs.h>
29 #include <drm/drm_vblank.h>
30 
31 #include "i915_reg.h"
32 #include "intel_alpm.h"
33 #include "intel_atomic.h"
34 #include "intel_crtc.h"
35 #include "intel_cursor_regs.h"
36 #include "intel_ddi.h"
37 #include "intel_de.h"
38 #include "intel_display_irq.h"
39 #include "intel_display_regs.h"
40 #include "intel_display_rpm.h"
41 #include "intel_display_types.h"
42 #include "intel_dmc.h"
43 #include "intel_dp.h"
44 #include "intel_dp_aux.h"
45 #include "intel_frontbuffer.h"
46 #include "intel_hdmi.h"
47 #include "intel_psr.h"
48 #include "intel_psr_regs.h"
49 #include "intel_snps_phy.h"
50 #include "intel_step.h"
51 #include "intel_vblank.h"
52 #include "intel_vrr.h"
53 #include "skl_universal_plane.h"
54 
55 /**
56  * DOC: Panel Self Refresh (PSR/SRD)
57  *
58  * Since Haswell Display controller supports Panel Self-Refresh on display
59  * panels witch have a remote frame buffer (RFB) implemented according to PSR
60  * spec in eDP1.3. PSR feature allows the display to go to lower standby states
61  * when system is idle but display is on as it eliminates display refresh
62  * request to DDR memory completely as long as the frame buffer for that
63  * display is unchanged.
64  *
65  * Panel Self Refresh must be supported by both Hardware (source) and
66  * Panel (sink).
67  *
68  * PSR saves power by caching the framebuffer in the panel RFB, which allows us
69  * to power down the link and memory controller. For DSI panels the same idea
70  * is called "manual mode".
71  *
72  * The implementation uses the hardware-based PSR support which automatically
73  * enters/exits self-refresh mode. The hardware takes care of sending the
74  * required DP aux message and could even retrain the link (that part isn't
75  * enabled yet though). The hardware also keeps track of any frontbuffer
76  * changes to know when to exit self-refresh mode again. Unfortunately that
77  * part doesn't work too well, hence why the i915 PSR support uses the
78  * software frontbuffer tracking to make sure it doesn't miss a screen
79  * update. For this integration intel_psr_invalidate() and intel_psr_flush()
80  * get called by the frontbuffer tracking code. Note that because of locking
81  * issues the self-refresh re-enable code is done from a work queue, which
82  * must be correctly synchronized/cancelled when shutting down the pipe."
83  *
84  * DC3CO (DC3 clock off)
85  *
86  * On top of PSR2, GEN12 adds a intermediate power savings state that turns
87  * clock off automatically during PSR2 idle state.
88  * The smaller overhead of DC3co entry/exit vs. the overhead of PSR2 deep sleep
89  * entry/exit allows the HW to enter a low-power state even when page flipping
90  * periodically (for instance a 30fps video playback scenario).
91  *
92  * Every time a flips occurs PSR2 will get out of deep sleep state(if it was),
93  * so DC3CO is enabled and tgl_dc3co_disable_work is schedule to run after 6
94  * frames, if no other flip occurs and the function above is executed, DC3CO is
95  * disabled and PSR2 is configured to enter deep sleep, resetting again in case
96  * of another flip.
97  * Front buffer modifications do not trigger DC3CO activation on purpose as it
98  * would bring a lot of complexity and most of the moderns systems will only
99  * use page flips.
100  */
101 
102 /*
103  * Description of PSR mask bits:
104  *
105  * EDP_PSR_DEBUG[16]/EDP_PSR_DEBUG_MASK_DISP_REG_WRITE (hsw-skl):
106  *
107  *  When unmasked (nearly) all display register writes (eg. even
108  *  SWF) trigger a PSR exit. Some registers are excluded from this
109  *  and they have a more specific mask (described below). On icl+
110  *  this bit no longer exists and is effectively always set.
111  *
112  * PIPE_MISC[21]/PIPE_MISC_PSR_MASK_PIPE_REG_WRITE (skl+):
113  *
114  *  When unmasked (nearly) all pipe/plane register writes
115  *  trigger a PSR exit. Some plane registers are excluded from this
116  *  and they have a more specific mask (described below).
117  *
118  * CHICKEN_PIPESL_1[11]/SKL_PSR_MASK_PLANE_FLIP (skl+):
119  * PIPE_MISC[23]/PIPE_MISC_PSR_MASK_PRIMARY_FLIP (bdw):
120  * EDP_PSR_DEBUG[23]/EDP_PSR_DEBUG_MASK_PRIMARY_FLIP (hsw):
121  *
122  *  When unmasked PRI_SURF/PLANE_SURF writes trigger a PSR exit.
123  *  SPR_SURF/CURBASE are not included in this and instead are
124  *  controlled by PIPE_MISC_PSR_MASK_PIPE_REG_WRITE (skl+) or
125  *  EDP_PSR_DEBUG_MASK_DISP_REG_WRITE (hsw/bdw).
126  *
127  * PIPE_MISC[22]/PIPE_MISC_PSR_MASK_SPRITE_ENABLE (bdw):
128  * EDP_PSR_DEBUG[21]/EDP_PSR_DEBUG_MASK_SPRITE_ENABLE (hsw):
129  *
130  *  When unmasked PSR is blocked as long as the sprite
131  *  plane is enabled. skl+ with their universal planes no
132  *  longer have a mask bit like this, and no plane being
133  *  enabledb blocks PSR.
134  *
135  * PIPE_MISC[21]/PIPE_MISC_PSR_MASK_CURSOR_MOVE (bdw):
136  * EDP_PSR_DEBUG[20]/EDP_PSR_DEBUG_MASK_CURSOR_MOVE (hsw):
137  *
138  *  When umasked CURPOS writes trigger a PSR exit. On skl+
139  *  this doesn't exit but CURPOS is included in the
140  *  PIPE_MISC_PSR_MASK_PIPE_REG_WRITE mask.
141  *
142  * PIPE_MISC[20]/PIPE_MISC_PSR_MASK_VBLANK_VSYNC_INT (bdw+):
143  * EDP_PSR_DEBUG[19]/EDP_PSR_DEBUG_MASK_VBLANK_VSYNC_INT (hsw):
144  *
145  *  When unmasked PSR is blocked as long as vblank and/or vsync
146  *  interrupt is unmasked in IMR *and* enabled in IER.
147  *
148  * CHICKEN_TRANS[30]/SKL_UNMASK_VBL_TO_PIPE_IN_SRD (skl+):
149  * CHICKEN_PAR1_1[15]/HSW_MASK_VBL_TO_PIPE_IN_SRD (hsw/bdw):
150  *
151  *  Selectcs whether PSR exit generates an extra vblank before
152  *  the first frame is transmitted. Also note the opposite polarity
153  *  if the bit on hsw/bdw vs. skl+ (masked==generate the extra vblank,
154  *  unmasked==do not generate the extra vblank).
155  *
156  *  With DC states enabled the extra vblank happens after link training,
157  *  with DC states disabled it happens immediately upuon PSR exit trigger.
158  *  No idea as of now why there is a difference. HSW/BDW (which don't
159  *  even have DMC) always generate it after link training. Go figure.
160  *
161  *  Unfortunately CHICKEN_TRANS itself seems to be double buffered
162  *  and thus won't latch until the first vblank. So with DC states
163  *  enabled the register effectively uses the reset value during DC5
164  *  exit+PSR exit sequence, and thus the bit does nothing until
165  *  latched by the vblank that it was trying to prevent from being
166  *  generated in the first place. So we should probably call this
167  *  one a chicken/egg bit instead on skl+.
168  *
169  *  In standby mode (as opposed to link-off) this makes no difference
170  *  as the timing generator keeps running the whole time generating
171  *  normal periodic vblanks.
172  *
173  *  WaPsrDPAMaskVBlankInSRD asks us to set the bit on hsw/bdw,
174  *  and doing so makes the behaviour match the skl+ reset value.
175  *
176  * CHICKEN_PIPESL_1[0]/BDW_UNMASK_VBL_TO_REGS_IN_SRD (bdw):
177  * CHICKEN_PIPESL_1[15]/HSW_UNMASK_VBL_TO_REGS_IN_SRD (hsw):
178  *
179  *  On BDW without this bit is no vblanks whatsoever are
180  *  generated after PSR exit. On HSW this has no apparent effect.
181  *  WaPsrDPRSUnmaskVBlankInSRD says to set this.
182  *
183  * The rest of the bits are more self-explanatory and/or
184  * irrelevant for normal operation.
185  *
186  * Description of intel_crtc_state variables. has_psr, has_panel_replay and
187  * has_sel_update:
188  *
189  *  has_psr (alone):					PSR1
190  *  has_psr + has_sel_update:				PSR2
191  *  has_psr + has_panel_replay:				Panel Replay
192  *  has_psr + has_panel_replay + has_sel_update:	Panel Replay Selective Update
193  *
194  * Description of some intel_psr variables. enabled, panel_replay_enabled,
195  * sel_update_enabled
196  *
197  *  enabled (alone):						PSR1
198  *  enabled + sel_update_enabled:				PSR2
199  *  enabled + panel_replay_enabled:				Panel Replay
200  *  enabled + panel_replay_enabled + sel_update_enabled:	Panel Replay SU
201  */
202 
203 #define CAN_PSR(intel_dp) ((intel_dp)->psr.sink_support && \
204 			   (intel_dp)->psr.source_support)
205 
intel_encoder_can_psr(struct intel_encoder * encoder)206 bool intel_encoder_can_psr(struct intel_encoder *encoder)
207 {
208 	if (intel_encoder_is_dp(encoder) || encoder->type == INTEL_OUTPUT_DP_MST)
209 		return CAN_PSR(enc_to_intel_dp(encoder)) ||
210 		       CAN_PANEL_REPLAY(enc_to_intel_dp(encoder));
211 	else
212 		return false;
213 }
214 
intel_psr_needs_aux_io_power(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state)215 bool intel_psr_needs_aux_io_power(struct intel_encoder *encoder,
216 				  const struct intel_crtc_state *crtc_state)
217 {
218 	/*
219 	 * For PSR/PR modes only eDP requires the AUX IO power to be enabled whenever
220 	 * the output is enabled. For non-eDP outputs the main link is always
221 	 * on, hence it doesn't require the HW initiated AUX wake-up signaling used
222 	 * for eDP.
223 	 *
224 	 * TODO:
225 	 * - Consider leaving AUX IO disabled for eDP / PR as well, in case
226 	 *   the ALPM with main-link off mode is not enabled.
227 	 * - Leave AUX IO enabled for DP / PR, once support for ALPM with
228 	 *   main-link off mode is added for it and this mode gets enabled.
229 	 */
230 	return intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP) &&
231 	       intel_encoder_can_psr(encoder);
232 }
233 
psr_global_enabled(struct intel_dp * intel_dp)234 static bool psr_global_enabled(struct intel_dp *intel_dp)
235 {
236 	struct intel_display *display = to_intel_display(intel_dp);
237 	struct intel_connector *connector = intel_dp->attached_connector;
238 
239 	switch (intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK) {
240 	case I915_PSR_DEBUG_DEFAULT:
241 		if (display->params.enable_psr == -1)
242 			return intel_dp_is_edp(intel_dp) ?
243 				connector->panel.vbt.psr.enable :
244 				true;
245 		return display->params.enable_psr;
246 	case I915_PSR_DEBUG_DISABLE:
247 		return false;
248 	default:
249 		return true;
250 	}
251 }
252 
psr2_global_enabled(struct intel_dp * intel_dp)253 static bool psr2_global_enabled(struct intel_dp *intel_dp)
254 {
255 	struct intel_display *display = to_intel_display(intel_dp);
256 
257 	switch (intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK) {
258 	case I915_PSR_DEBUG_DISABLE:
259 	case I915_PSR_DEBUG_FORCE_PSR1:
260 		return false;
261 	default:
262 		if (display->params.enable_psr == 1)
263 			return false;
264 		return true;
265 	}
266 }
267 
psr2_su_region_et_global_enabled(struct intel_dp * intel_dp)268 static bool psr2_su_region_et_global_enabled(struct intel_dp *intel_dp)
269 {
270 	struct intel_display *display = to_intel_display(intel_dp);
271 
272 	if (display->params.enable_psr != -1)
273 		return false;
274 
275 	return true;
276 }
277 
panel_replay_global_enabled(struct intel_dp * intel_dp)278 static bool panel_replay_global_enabled(struct intel_dp *intel_dp)
279 {
280 	struct intel_display *display = to_intel_display(intel_dp);
281 
282 	if ((display->params.enable_psr != -1) ||
283 	    (intel_dp->psr.debug & I915_PSR_DEBUG_PANEL_REPLAY_DISABLE))
284 		return false;
285 	return true;
286 }
287 
psr_irq_psr_error_bit_get(struct intel_dp * intel_dp)288 static u32 psr_irq_psr_error_bit_get(struct intel_dp *intel_dp)
289 {
290 	struct intel_display *display = to_intel_display(intel_dp);
291 
292 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_ERROR :
293 		EDP_PSR_ERROR(intel_dp->psr.transcoder);
294 }
295 
psr_irq_post_exit_bit_get(struct intel_dp * intel_dp)296 static u32 psr_irq_post_exit_bit_get(struct intel_dp *intel_dp)
297 {
298 	struct intel_display *display = to_intel_display(intel_dp);
299 
300 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_POST_EXIT :
301 		EDP_PSR_POST_EXIT(intel_dp->psr.transcoder);
302 }
303 
psr_irq_pre_entry_bit_get(struct intel_dp * intel_dp)304 static u32 psr_irq_pre_entry_bit_get(struct intel_dp *intel_dp)
305 {
306 	struct intel_display *display = to_intel_display(intel_dp);
307 
308 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_PRE_ENTRY :
309 		EDP_PSR_PRE_ENTRY(intel_dp->psr.transcoder);
310 }
311 
psr_irq_mask_get(struct intel_dp * intel_dp)312 static u32 psr_irq_mask_get(struct intel_dp *intel_dp)
313 {
314 	struct intel_display *display = to_intel_display(intel_dp);
315 
316 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_MASK :
317 		EDP_PSR_MASK(intel_dp->psr.transcoder);
318 }
319 
psr_ctl_reg(struct intel_display * display,enum transcoder cpu_transcoder)320 static i915_reg_t psr_ctl_reg(struct intel_display *display,
321 			      enum transcoder cpu_transcoder)
322 {
323 	if (DISPLAY_VER(display) >= 8)
324 		return EDP_PSR_CTL(display, cpu_transcoder);
325 	else
326 		return HSW_SRD_CTL;
327 }
328 
psr_debug_reg(struct intel_display * display,enum transcoder cpu_transcoder)329 static i915_reg_t psr_debug_reg(struct intel_display *display,
330 				enum transcoder cpu_transcoder)
331 {
332 	if (DISPLAY_VER(display) >= 8)
333 		return EDP_PSR_DEBUG(display, cpu_transcoder);
334 	else
335 		return HSW_SRD_DEBUG;
336 }
337 
psr_perf_cnt_reg(struct intel_display * display,enum transcoder cpu_transcoder)338 static i915_reg_t psr_perf_cnt_reg(struct intel_display *display,
339 				   enum transcoder cpu_transcoder)
340 {
341 	if (DISPLAY_VER(display) >= 8)
342 		return EDP_PSR_PERF_CNT(display, cpu_transcoder);
343 	else
344 		return HSW_SRD_PERF_CNT;
345 }
346 
psr_status_reg(struct intel_display * display,enum transcoder cpu_transcoder)347 static i915_reg_t psr_status_reg(struct intel_display *display,
348 				 enum transcoder cpu_transcoder)
349 {
350 	if (DISPLAY_VER(display) >= 8)
351 		return EDP_PSR_STATUS(display, cpu_transcoder);
352 	else
353 		return HSW_SRD_STATUS;
354 }
355 
psr_imr_reg(struct intel_display * display,enum transcoder cpu_transcoder)356 static i915_reg_t psr_imr_reg(struct intel_display *display,
357 			      enum transcoder cpu_transcoder)
358 {
359 	if (DISPLAY_VER(display) >= 12)
360 		return TRANS_PSR_IMR(display, cpu_transcoder);
361 	else
362 		return EDP_PSR_IMR;
363 }
364 
psr_iir_reg(struct intel_display * display,enum transcoder cpu_transcoder)365 static i915_reg_t psr_iir_reg(struct intel_display *display,
366 			      enum transcoder cpu_transcoder)
367 {
368 	if (DISPLAY_VER(display) >= 12)
369 		return TRANS_PSR_IIR(display, cpu_transcoder);
370 	else
371 		return EDP_PSR_IIR;
372 }
373 
psr_aux_ctl_reg(struct intel_display * display,enum transcoder cpu_transcoder)374 static i915_reg_t psr_aux_ctl_reg(struct intel_display *display,
375 				  enum transcoder cpu_transcoder)
376 {
377 	if (DISPLAY_VER(display) >= 8)
378 		return EDP_PSR_AUX_CTL(display, cpu_transcoder);
379 	else
380 		return HSW_SRD_AUX_CTL;
381 }
382 
psr_aux_data_reg(struct intel_display * display,enum transcoder cpu_transcoder,int i)383 static i915_reg_t psr_aux_data_reg(struct intel_display *display,
384 				   enum transcoder cpu_transcoder, int i)
385 {
386 	if (DISPLAY_VER(display) >= 8)
387 		return EDP_PSR_AUX_DATA(display, cpu_transcoder, i);
388 	else
389 		return HSW_SRD_AUX_DATA(i);
390 }
391 
psr_irq_control(struct intel_dp * intel_dp)392 static void psr_irq_control(struct intel_dp *intel_dp)
393 {
394 	struct intel_display *display = to_intel_display(intel_dp);
395 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
396 	u32 mask;
397 
398 	if (intel_dp->psr.panel_replay_enabled)
399 		return;
400 
401 	mask = psr_irq_psr_error_bit_get(intel_dp);
402 	if (intel_dp->psr.debug & I915_PSR_DEBUG_IRQ)
403 		mask |= psr_irq_post_exit_bit_get(intel_dp) |
404 			psr_irq_pre_entry_bit_get(intel_dp);
405 
406 	intel_de_rmw(display, psr_imr_reg(display, cpu_transcoder),
407 		     psr_irq_mask_get(intel_dp), ~mask);
408 }
409 
psr_event_print(struct intel_display * display,u32 val,bool sel_update_enabled)410 static void psr_event_print(struct intel_display *display,
411 			    u32 val, bool sel_update_enabled)
412 {
413 	drm_dbg_kms(display->drm, "PSR exit events: 0x%x\n", val);
414 	if (val & PSR_EVENT_PSR2_WD_TIMER_EXPIRE)
415 		drm_dbg_kms(display->drm, "\tPSR2 watchdog timer expired\n");
416 	if ((val & PSR_EVENT_PSR2_DISABLED) && sel_update_enabled)
417 		drm_dbg_kms(display->drm, "\tPSR2 disabled\n");
418 	if (val & PSR_EVENT_SU_DIRTY_FIFO_UNDERRUN)
419 		drm_dbg_kms(display->drm, "\tSU dirty FIFO underrun\n");
420 	if (val & PSR_EVENT_SU_CRC_FIFO_UNDERRUN)
421 		drm_dbg_kms(display->drm, "\tSU CRC FIFO underrun\n");
422 	if (val & PSR_EVENT_GRAPHICS_RESET)
423 		drm_dbg_kms(display->drm, "\tGraphics reset\n");
424 	if (val & PSR_EVENT_PCH_INTERRUPT)
425 		drm_dbg_kms(display->drm, "\tPCH interrupt\n");
426 	if (val & PSR_EVENT_MEMORY_UP)
427 		drm_dbg_kms(display->drm, "\tMemory up\n");
428 	if (val & PSR_EVENT_FRONT_BUFFER_MODIFY)
429 		drm_dbg_kms(display->drm, "\tFront buffer modification\n");
430 	if (val & PSR_EVENT_WD_TIMER_EXPIRE)
431 		drm_dbg_kms(display->drm, "\tPSR watchdog timer expired\n");
432 	if (val & PSR_EVENT_PIPE_REGISTERS_UPDATE)
433 		drm_dbg_kms(display->drm, "\tPIPE registers updated\n");
434 	if (val & PSR_EVENT_REGISTER_UPDATE)
435 		drm_dbg_kms(display->drm, "\tRegister updated\n");
436 	if (val & PSR_EVENT_HDCP_ENABLE)
437 		drm_dbg_kms(display->drm, "\tHDCP enabled\n");
438 	if (val & PSR_EVENT_KVMR_SESSION_ENABLE)
439 		drm_dbg_kms(display->drm, "\tKVMR session enabled\n");
440 	if (val & PSR_EVENT_VBI_ENABLE)
441 		drm_dbg_kms(display->drm, "\tVBI enabled\n");
442 	if (val & PSR_EVENT_LPSP_MODE_EXIT)
443 		drm_dbg_kms(display->drm, "\tLPSP mode exited\n");
444 	if ((val & PSR_EVENT_PSR_DISABLE) && !sel_update_enabled)
445 		drm_dbg_kms(display->drm, "\tPSR disabled\n");
446 }
447 
intel_psr_irq_handler(struct intel_dp * intel_dp,u32 psr_iir)448 void intel_psr_irq_handler(struct intel_dp *intel_dp, u32 psr_iir)
449 {
450 	struct intel_display *display = to_intel_display(intel_dp);
451 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
452 	ktime_t time_ns =  ktime_get();
453 
454 	if (psr_iir & psr_irq_pre_entry_bit_get(intel_dp)) {
455 		intel_dp->psr.last_entry_attempt = time_ns;
456 		drm_dbg_kms(display->drm,
457 			    "[transcoder %s] PSR entry attempt in 2 vblanks\n",
458 			    transcoder_name(cpu_transcoder));
459 	}
460 
461 	if (psr_iir & psr_irq_post_exit_bit_get(intel_dp)) {
462 		intel_dp->psr.last_exit = time_ns;
463 		drm_dbg_kms(display->drm,
464 			    "[transcoder %s] PSR exit completed\n",
465 			    transcoder_name(cpu_transcoder));
466 
467 		if (DISPLAY_VER(display) >= 9) {
468 			u32 val;
469 
470 			val = intel_de_rmw(display,
471 					   PSR_EVENT(display, cpu_transcoder),
472 					   0, 0);
473 
474 			psr_event_print(display, val, intel_dp->psr.sel_update_enabled);
475 		}
476 	}
477 
478 	if (psr_iir & psr_irq_psr_error_bit_get(intel_dp)) {
479 		drm_warn(display->drm, "[transcoder %s] PSR aux error\n",
480 			 transcoder_name(cpu_transcoder));
481 
482 		intel_dp->psr.irq_aux_error = true;
483 
484 		/*
485 		 * If this interruption is not masked it will keep
486 		 * interrupting so fast that it prevents the scheduled
487 		 * work to run.
488 		 * Also after a PSR error, we don't want to arm PSR
489 		 * again so we don't care about unmask the interruption
490 		 * or unset irq_aux_error.
491 		 */
492 		intel_de_rmw(display, psr_imr_reg(display, cpu_transcoder),
493 			     0, psr_irq_psr_error_bit_get(intel_dp));
494 
495 		queue_work(display->wq.unordered, &intel_dp->psr.work);
496 	}
497 }
498 
intel_dp_get_sink_sync_latency(struct intel_dp * intel_dp)499 static u8 intel_dp_get_sink_sync_latency(struct intel_dp *intel_dp)
500 {
501 	struct intel_display *display = to_intel_display(intel_dp);
502 	u8 val = 8; /* assume the worst if we can't read the value */
503 
504 	if (drm_dp_dpcd_readb(&intel_dp->aux,
505 			      DP_SYNCHRONIZATION_LATENCY_IN_SINK, &val) == 1)
506 		val &= DP_MAX_RESYNC_FRAME_COUNT_MASK;
507 	else
508 		drm_dbg_kms(display->drm,
509 			    "Unable to get sink synchronization latency, assuming 8 frames\n");
510 	return val;
511 }
512 
intel_dp_get_su_capability(struct intel_dp * intel_dp)513 static u8 intel_dp_get_su_capability(struct intel_dp *intel_dp)
514 {
515 	u8 su_capability = 0;
516 
517 	if (intel_dp->psr.sink_panel_replay_su_support)
518 		drm_dp_dpcd_readb(&intel_dp->aux,
519 				  DP_PANEL_REPLAY_CAP_CAPABILITY,
520 				  &su_capability);
521 	else
522 		su_capability = intel_dp->psr_dpcd[1];
523 
524 	return su_capability;
525 }
526 
527 static unsigned int
intel_dp_get_su_x_granularity_offset(struct intel_dp * intel_dp)528 intel_dp_get_su_x_granularity_offset(struct intel_dp *intel_dp)
529 {
530 	return intel_dp->psr.sink_panel_replay_su_support ?
531 		DP_PANEL_REPLAY_CAP_X_GRANULARITY :
532 		DP_PSR2_SU_X_GRANULARITY;
533 }
534 
535 static unsigned int
intel_dp_get_su_y_granularity_offset(struct intel_dp * intel_dp)536 intel_dp_get_su_y_granularity_offset(struct intel_dp *intel_dp)
537 {
538 	return intel_dp->psr.sink_panel_replay_su_support ?
539 		DP_PANEL_REPLAY_CAP_Y_GRANULARITY :
540 		DP_PSR2_SU_Y_GRANULARITY;
541 }
542 
543 /*
544  * Note: Bits related to granularity are same in panel replay and psr
545  * registers. Rely on PSR definitions on these "common" bits.
546  */
intel_dp_get_su_granularity(struct intel_dp * intel_dp)547 static void intel_dp_get_su_granularity(struct intel_dp *intel_dp)
548 {
549 	struct intel_display *display = to_intel_display(intel_dp);
550 	ssize_t r;
551 	u16 w;
552 	u8 y;
553 
554 	/*
555 	 * TODO: Do we need to take into account panel supporting both PSR and
556 	 * Panel replay?
557 	 */
558 
559 	/*
560 	 * If sink don't have specific granularity requirements set legacy
561 	 * ones.
562 	 */
563 	if (!(intel_dp_get_su_capability(intel_dp) &
564 	      DP_PSR2_SU_GRANULARITY_REQUIRED)) {
565 		/* As PSR2 HW sends full lines, we do not care about x granularity */
566 		w = 4;
567 		y = 4;
568 		goto exit;
569 	}
570 
571 	r = drm_dp_dpcd_read(&intel_dp->aux,
572 			     intel_dp_get_su_x_granularity_offset(intel_dp),
573 			     &w, 2);
574 	if (r != 2)
575 		drm_dbg_kms(display->drm,
576 			    "Unable to read selective update x granularity\n");
577 	/*
578 	 * Spec says that if the value read is 0 the default granularity should
579 	 * be used instead.
580 	 */
581 	if (r != 2 || w == 0)
582 		w = 4;
583 
584 	r = drm_dp_dpcd_read(&intel_dp->aux,
585 			     intel_dp_get_su_y_granularity_offset(intel_dp),
586 			     &y, 1);
587 	if (r != 1) {
588 		drm_dbg_kms(display->drm,
589 			    "Unable to read selective update y granularity\n");
590 		y = 4;
591 	}
592 	if (y == 0)
593 		y = 1;
594 
595 exit:
596 	intel_dp->psr.su_w_granularity = w;
597 	intel_dp->psr.su_y_granularity = y;
598 }
599 
_panel_replay_init_dpcd(struct intel_dp * intel_dp)600 static void _panel_replay_init_dpcd(struct intel_dp *intel_dp)
601 {
602 	struct intel_display *display = to_intel_display(intel_dp);
603 
604 	if (intel_dp_is_edp(intel_dp)) {
605 		if (!intel_alpm_aux_less_wake_supported(intel_dp)) {
606 			drm_dbg_kms(display->drm,
607 				    "Panel doesn't support AUX-less ALPM, eDP Panel Replay not possible\n");
608 			return;
609 		}
610 
611 		if (!(intel_dp->pr_dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] &
612 		      DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT)) {
613 			drm_dbg_kms(display->drm,
614 				    "Panel doesn't support early transport, eDP Panel Replay not possible\n");
615 			return;
616 		}
617 	}
618 
619 	intel_dp->psr.sink_panel_replay_support = true;
620 
621 	if (intel_dp->pr_dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] &
622 	    DP_PANEL_REPLAY_SU_SUPPORT)
623 		intel_dp->psr.sink_panel_replay_su_support = true;
624 
625 	drm_dbg_kms(display->drm,
626 		    "Panel replay %sis supported by panel\n",
627 		    intel_dp->psr.sink_panel_replay_su_support ?
628 		    "selective_update " : "");
629 }
630 
_psr_init_dpcd(struct intel_dp * intel_dp)631 static void _psr_init_dpcd(struct intel_dp *intel_dp)
632 {
633 	struct intel_display *display = to_intel_display(intel_dp);
634 
635 	drm_dbg_kms(display->drm, "eDP panel supports PSR version %x\n",
636 		    intel_dp->psr_dpcd[0]);
637 
638 	if (drm_dp_has_quirk(&intel_dp->desc, DP_DPCD_QUIRK_NO_PSR)) {
639 		drm_dbg_kms(display->drm,
640 			    "PSR support not currently available for this panel\n");
641 		return;
642 	}
643 
644 	if (!(intel_dp->edp_dpcd[1] & DP_EDP_SET_POWER_CAP)) {
645 		drm_dbg_kms(display->drm,
646 			    "Panel lacks power state control, PSR cannot be enabled\n");
647 		return;
648 	}
649 
650 	intel_dp->psr.sink_support = true;
651 	intel_dp->psr.sink_sync_latency =
652 		intel_dp_get_sink_sync_latency(intel_dp);
653 
654 	if (DISPLAY_VER(display) >= 9 &&
655 	    intel_dp->psr_dpcd[0] >= DP_PSR2_WITH_Y_COORD_IS_SUPPORTED) {
656 		bool y_req = intel_dp->psr_dpcd[1] &
657 			     DP_PSR2_SU_Y_COORDINATE_REQUIRED;
658 
659 		/*
660 		 * All panels that supports PSR version 03h (PSR2 +
661 		 * Y-coordinate) can handle Y-coordinates in VSC but we are
662 		 * only sure that it is going to be used when required by the
663 		 * panel. This way panel is capable to do selective update
664 		 * without a aux frame sync.
665 		 *
666 		 * To support PSR version 02h and PSR version 03h without
667 		 * Y-coordinate requirement panels we would need to enable
668 		 * GTC first.
669 		 */
670 		intel_dp->psr.sink_psr2_support = y_req &&
671 			intel_alpm_aux_wake_supported(intel_dp);
672 		drm_dbg_kms(display->drm, "PSR2 %ssupported\n",
673 			    intel_dp->psr.sink_psr2_support ? "" : "not ");
674 	}
675 }
676 
intel_psr_init_dpcd(struct intel_dp * intel_dp)677 void intel_psr_init_dpcd(struct intel_dp *intel_dp)
678 {
679 	drm_dp_dpcd_read(&intel_dp->aux, DP_PSR_SUPPORT, intel_dp->psr_dpcd,
680 			 sizeof(intel_dp->psr_dpcd));
681 
682 	drm_dp_dpcd_read(&intel_dp->aux, DP_PANEL_REPLAY_CAP_SUPPORT,
683 			 &intel_dp->pr_dpcd, sizeof(intel_dp->pr_dpcd));
684 
685 	if (intel_dp->pr_dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] &
686 	    DP_PANEL_REPLAY_SUPPORT)
687 		_panel_replay_init_dpcd(intel_dp);
688 
689 	if (intel_dp->psr_dpcd[0])
690 		_psr_init_dpcd(intel_dp);
691 
692 	if (intel_dp->psr.sink_psr2_support ||
693 	    intel_dp->psr.sink_panel_replay_su_support)
694 		intel_dp_get_su_granularity(intel_dp);
695 }
696 
hsw_psr_setup_aux(struct intel_dp * intel_dp)697 static void hsw_psr_setup_aux(struct intel_dp *intel_dp)
698 {
699 	struct intel_display *display = to_intel_display(intel_dp);
700 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
701 	u32 aux_clock_divider, aux_ctl;
702 	/* write DP_SET_POWER=D0 */
703 	static const u8 aux_msg[] = {
704 		[0] = (DP_AUX_NATIVE_WRITE << 4) | ((DP_SET_POWER >> 16) & 0xf),
705 		[1] = (DP_SET_POWER >> 8) & 0xff,
706 		[2] = DP_SET_POWER & 0xff,
707 		[3] = 1 - 1,
708 		[4] = DP_SET_POWER_D0,
709 	};
710 	int i;
711 
712 	BUILD_BUG_ON(sizeof(aux_msg) > 20);
713 	for (i = 0; i < sizeof(aux_msg); i += 4)
714 		intel_de_write(display,
715 			       psr_aux_data_reg(display, cpu_transcoder, i >> 2),
716 			       intel_dp_aux_pack(&aux_msg[i], sizeof(aux_msg) - i));
717 
718 	aux_clock_divider = intel_dp->get_aux_clock_divider(intel_dp, 0);
719 
720 	/* Start with bits set for DDI_AUX_CTL register */
721 	aux_ctl = intel_dp->get_aux_send_ctl(intel_dp, sizeof(aux_msg),
722 					     aux_clock_divider);
723 
724 	/* Select only valid bits for SRD_AUX_CTL */
725 	aux_ctl &= EDP_PSR_AUX_CTL_TIME_OUT_MASK |
726 		EDP_PSR_AUX_CTL_MESSAGE_SIZE_MASK |
727 		EDP_PSR_AUX_CTL_PRECHARGE_2US_MASK |
728 		EDP_PSR_AUX_CTL_BIT_CLOCK_2X_MASK;
729 
730 	intel_de_write(display, psr_aux_ctl_reg(display, cpu_transcoder),
731 		       aux_ctl);
732 }
733 
psr2_su_region_et_valid(struct intel_dp * intel_dp,bool panel_replay)734 static bool psr2_su_region_et_valid(struct intel_dp *intel_dp, bool panel_replay)
735 {
736 	struct intel_display *display = to_intel_display(intel_dp);
737 
738 	if (DISPLAY_VER(display) < 20 || !intel_dp_is_edp(intel_dp) ||
739 	    intel_dp->psr.debug & I915_PSR_DEBUG_SU_REGION_ET_DISABLE)
740 		return false;
741 
742 	return panel_replay ?
743 		intel_dp->pr_dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] &
744 		DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT :
745 		intel_dp->psr_dpcd[0] == DP_PSR2_WITH_Y_COORD_ET_SUPPORTED &&
746 		psr2_su_region_et_global_enabled(intel_dp);
747 }
748 
_panel_replay_enable_sink(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)749 static void _panel_replay_enable_sink(struct intel_dp *intel_dp,
750 				      const struct intel_crtc_state *crtc_state)
751 {
752 	u8 val = DP_PANEL_REPLAY_ENABLE |
753 		DP_PANEL_REPLAY_VSC_SDP_CRC_EN |
754 		DP_PANEL_REPLAY_UNRECOVERABLE_ERROR_EN |
755 		DP_PANEL_REPLAY_RFB_STORAGE_ERROR_EN |
756 		DP_PANEL_REPLAY_ACTIVE_FRAME_CRC_ERROR_EN;
757 	u8 panel_replay_config2 = DP_PANEL_REPLAY_CRC_VERIFICATION;
758 
759 	if (crtc_state->has_sel_update)
760 		val |= DP_PANEL_REPLAY_SU_ENABLE;
761 
762 	if (crtc_state->enable_psr2_su_region_et)
763 		val |= DP_PANEL_REPLAY_ENABLE_SU_REGION_ET;
764 
765 	if (crtc_state->req_psr2_sdp_prior_scanline)
766 		panel_replay_config2 |=
767 			DP_PANEL_REPLAY_SU_REGION_SCANLINE_CAPTURE;
768 
769 	drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG, val);
770 
771 	drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG2,
772 			   panel_replay_config2);
773 }
774 
_psr_enable_sink(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)775 static void _psr_enable_sink(struct intel_dp *intel_dp,
776 			     const struct intel_crtc_state *crtc_state)
777 {
778 	struct intel_display *display = to_intel_display(intel_dp);
779 	u8 val = 0;
780 
781 	if (crtc_state->has_sel_update) {
782 		val |= DP_PSR_ENABLE_PSR2 | DP_PSR_IRQ_HPD_WITH_CRC_ERRORS;
783 	} else {
784 		if (intel_dp->psr.link_standby)
785 			val |= DP_PSR_MAIN_LINK_ACTIVE;
786 
787 		if (DISPLAY_VER(display) >= 8)
788 			val |= DP_PSR_CRC_VERIFICATION;
789 	}
790 
791 	if (crtc_state->req_psr2_sdp_prior_scanline)
792 		val |= DP_PSR_SU_REGION_SCANLINE_CAPTURE;
793 
794 	if (crtc_state->enable_psr2_su_region_et)
795 		val |= DP_PANEL_REPLAY_ENABLE_SU_REGION_ET;
796 
797 	if (intel_dp->psr.entry_setup_frames > 0)
798 		val |= DP_PSR_FRAME_CAPTURE;
799 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, val);
800 
801 	val |= DP_PSR_ENABLE;
802 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, val);
803 }
804 
intel_psr_enable_sink(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)805 static void intel_psr_enable_sink(struct intel_dp *intel_dp,
806 				  const struct intel_crtc_state *crtc_state)
807 {
808 	intel_alpm_enable_sink(intel_dp, crtc_state);
809 
810 	crtc_state->has_panel_replay ?
811 		_panel_replay_enable_sink(intel_dp, crtc_state) :
812 		_psr_enable_sink(intel_dp, crtc_state);
813 
814 	if (intel_dp_is_edp(intel_dp))
815 		drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
816 }
817 
intel_psr_panel_replay_enable_sink(struct intel_dp * intel_dp)818 void intel_psr_panel_replay_enable_sink(struct intel_dp *intel_dp)
819 {
820 	if (CAN_PANEL_REPLAY(intel_dp))
821 		drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG,
822 				   DP_PANEL_REPLAY_ENABLE);
823 }
824 
intel_psr1_get_tp_time(struct intel_dp * intel_dp)825 static u32 intel_psr1_get_tp_time(struct intel_dp *intel_dp)
826 {
827 	struct intel_display *display = to_intel_display(intel_dp);
828 	struct intel_connector *connector = intel_dp->attached_connector;
829 	u32 val = 0;
830 
831 	if (DISPLAY_VER(display) >= 11)
832 		val |= EDP_PSR_TP4_TIME_0us;
833 
834 	if (display->params.psr_safest_params) {
835 		val |= EDP_PSR_TP1_TIME_2500us;
836 		val |= EDP_PSR_TP2_TP3_TIME_2500us;
837 		goto check_tp3_sel;
838 	}
839 
840 	if (connector->panel.vbt.psr.tp1_wakeup_time_us == 0)
841 		val |= EDP_PSR_TP1_TIME_0us;
842 	else if (connector->panel.vbt.psr.tp1_wakeup_time_us <= 100)
843 		val |= EDP_PSR_TP1_TIME_100us;
844 	else if (connector->panel.vbt.psr.tp1_wakeup_time_us <= 500)
845 		val |= EDP_PSR_TP1_TIME_500us;
846 	else
847 		val |= EDP_PSR_TP1_TIME_2500us;
848 
849 	if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us == 0)
850 		val |= EDP_PSR_TP2_TP3_TIME_0us;
851 	else if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us <= 100)
852 		val |= EDP_PSR_TP2_TP3_TIME_100us;
853 	else if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us <= 500)
854 		val |= EDP_PSR_TP2_TP3_TIME_500us;
855 	else
856 		val |= EDP_PSR_TP2_TP3_TIME_2500us;
857 
858 	/*
859 	 * WA 0479: hsw,bdw
860 	 * "Do not skip both TP1 and TP2/TP3"
861 	 */
862 	if (DISPLAY_VER(display) < 9 &&
863 	    connector->panel.vbt.psr.tp1_wakeup_time_us == 0 &&
864 	    connector->panel.vbt.psr.tp2_tp3_wakeup_time_us == 0)
865 		val |= EDP_PSR_TP2_TP3_TIME_100us;
866 
867 check_tp3_sel:
868 	if (intel_dp_source_supports_tps3(display) &&
869 	    drm_dp_tps3_supported(intel_dp->dpcd))
870 		val |= EDP_PSR_TP_TP1_TP3;
871 	else
872 		val |= EDP_PSR_TP_TP1_TP2;
873 
874 	return val;
875 }
876 
psr_compute_idle_frames(struct intel_dp * intel_dp)877 static u8 psr_compute_idle_frames(struct intel_dp *intel_dp)
878 {
879 	struct intel_display *display = to_intel_display(intel_dp);
880 	struct intel_connector *connector = intel_dp->attached_connector;
881 	int idle_frames;
882 
883 	/* Let's use 6 as the minimum to cover all known cases including the
884 	 * off-by-one issue that HW has in some cases.
885 	 */
886 	idle_frames = max(6, connector->panel.vbt.psr.idle_frames);
887 	idle_frames = max(idle_frames, intel_dp->psr.sink_sync_latency + 1);
888 
889 	if (drm_WARN_ON(display->drm, idle_frames > 0xf))
890 		idle_frames = 0xf;
891 
892 	return idle_frames;
893 }
894 
is_dc5_dc6_blocked(struct intel_dp * intel_dp)895 static bool is_dc5_dc6_blocked(struct intel_dp *intel_dp)
896 {
897 	struct intel_display *display = to_intel_display(intel_dp);
898 	u32 current_dc_state = intel_display_power_get_current_dc_state(display);
899 	struct drm_vblank_crtc *vblank = &display->drm->vblank[intel_dp->psr.pipe];
900 
901 	return (current_dc_state != DC_STATE_EN_UPTO_DC5 &&
902 		current_dc_state != DC_STATE_EN_UPTO_DC6) ||
903 		intel_dp->psr.active_non_psr_pipes ||
904 		READ_ONCE(vblank->enabled);
905 }
906 
hsw_activate_psr1(struct intel_dp * intel_dp)907 static void hsw_activate_psr1(struct intel_dp *intel_dp)
908 {
909 	struct intel_display *display = to_intel_display(intel_dp);
910 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
911 	u32 max_sleep_time = 0x1f;
912 	u32 val = EDP_PSR_ENABLE;
913 
914 	val |= EDP_PSR_IDLE_FRAMES(psr_compute_idle_frames(intel_dp));
915 
916 	if (DISPLAY_VER(display) < 20)
917 		val |= EDP_PSR_MAX_SLEEP_TIME(max_sleep_time);
918 
919 	if (display->platform.haswell)
920 		val |= EDP_PSR_MIN_LINK_ENTRY_TIME_8_LINES;
921 
922 	if (intel_dp->psr.link_standby)
923 		val |= EDP_PSR_LINK_STANDBY;
924 
925 	val |= intel_psr1_get_tp_time(intel_dp);
926 
927 	if (DISPLAY_VER(display) >= 8)
928 		val |= EDP_PSR_CRC_ENABLE;
929 
930 	if (DISPLAY_VER(display) >= 20)
931 		val |= LNL_EDP_PSR_ENTRY_SETUP_FRAMES(intel_dp->psr.entry_setup_frames);
932 
933 	intel_de_rmw(display, psr_ctl_reg(display, cpu_transcoder),
934 		     ~EDP_PSR_RESTORE_PSR_ACTIVE_CTX_MASK, val);
935 
936 	/* Wa_16025596647 */
937 	if ((DISPLAY_VER(display) == 20 ||
938 	     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
939 	    is_dc5_dc6_blocked(intel_dp))
940 		intel_dmc_start_pkgc_exit_at_start_of_undelayed_vblank(display,
941 								       intel_dp->psr.pipe,
942 								       true);
943 }
944 
intel_psr2_get_tp_time(struct intel_dp * intel_dp)945 static u32 intel_psr2_get_tp_time(struct intel_dp *intel_dp)
946 {
947 	struct intel_display *display = to_intel_display(intel_dp);
948 	struct intel_connector *connector = intel_dp->attached_connector;
949 	u32 val = 0;
950 
951 	if (display->params.psr_safest_params)
952 		return EDP_PSR2_TP2_TIME_2500us;
953 
954 	if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us >= 0 &&
955 	    connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 50)
956 		val |= EDP_PSR2_TP2_TIME_50us;
957 	else if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 100)
958 		val |= EDP_PSR2_TP2_TIME_100us;
959 	else if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 500)
960 		val |= EDP_PSR2_TP2_TIME_500us;
961 	else
962 		val |= EDP_PSR2_TP2_TIME_2500us;
963 
964 	return val;
965 }
966 
psr2_block_count_lines(struct intel_dp * intel_dp)967 static int psr2_block_count_lines(struct intel_dp *intel_dp)
968 {
969 	return intel_dp->alpm_parameters.io_wake_lines < 9 &&
970 		intel_dp->alpm_parameters.fast_wake_lines < 9 ? 8 : 12;
971 }
972 
psr2_block_count(struct intel_dp * intel_dp)973 static int psr2_block_count(struct intel_dp *intel_dp)
974 {
975 	return psr2_block_count_lines(intel_dp) / 4;
976 }
977 
frames_before_su_entry(struct intel_dp * intel_dp)978 static u8 frames_before_su_entry(struct intel_dp *intel_dp)
979 {
980 	u8 frames_before_su_entry;
981 
982 	frames_before_su_entry = max_t(u8,
983 				       intel_dp->psr.sink_sync_latency + 1,
984 				       2);
985 
986 	/* Entry setup frames must be at least 1 less than frames before SU entry */
987 	if (intel_dp->psr.entry_setup_frames >= frames_before_su_entry)
988 		frames_before_su_entry = intel_dp->psr.entry_setup_frames + 1;
989 
990 	return frames_before_su_entry;
991 }
992 
dg2_activate_panel_replay(struct intel_dp * intel_dp)993 static void dg2_activate_panel_replay(struct intel_dp *intel_dp)
994 {
995 	struct intel_display *display = to_intel_display(intel_dp);
996 	struct intel_psr *psr = &intel_dp->psr;
997 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
998 
999 	if (intel_dp_is_edp(intel_dp) && psr->sel_update_enabled) {
1000 		u32 val = psr->su_region_et_enabled ?
1001 			LNL_EDP_PSR2_SU_REGION_ET_ENABLE : 0;
1002 
1003 		if (intel_dp->psr.req_psr2_sdp_prior_scanline)
1004 			val |= EDP_PSR2_SU_SDP_SCANLINE;
1005 
1006 		intel_de_write(display, EDP_PSR2_CTL(display, cpu_transcoder),
1007 			       val);
1008 	}
1009 
1010 	intel_de_rmw(display,
1011 		     PSR2_MAN_TRK_CTL(display, intel_dp->psr.transcoder),
1012 		     0, ADLP_PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME);
1013 
1014 	intel_de_rmw(display, TRANS_DP2_CTL(intel_dp->psr.transcoder), 0,
1015 		     TRANS_DP2_PANEL_REPLAY_ENABLE);
1016 }
1017 
hsw_activate_psr2(struct intel_dp * intel_dp)1018 static void hsw_activate_psr2(struct intel_dp *intel_dp)
1019 {
1020 	struct intel_display *display = to_intel_display(intel_dp);
1021 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1022 	u32 val = EDP_PSR2_ENABLE;
1023 	u32 psr_val = 0;
1024 	u8 idle_frames;
1025 
1026 	/* Wa_16025596647 */
1027 	if ((DISPLAY_VER(display) == 20 ||
1028 	     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
1029 	    is_dc5_dc6_blocked(intel_dp))
1030 		idle_frames = 0;
1031 	else
1032 		idle_frames = psr_compute_idle_frames(intel_dp);
1033 	val |= EDP_PSR2_IDLE_FRAMES(idle_frames);
1034 
1035 	if (DISPLAY_VER(display) < 14 && !display->platform.alderlake_p)
1036 		val |= EDP_SU_TRACK_ENABLE;
1037 
1038 	if (DISPLAY_VER(display) >= 10 && DISPLAY_VER(display) < 13)
1039 		val |= EDP_Y_COORDINATE_ENABLE;
1040 
1041 	val |= EDP_PSR2_FRAME_BEFORE_SU(frames_before_su_entry(intel_dp));
1042 
1043 	val |= intel_psr2_get_tp_time(intel_dp);
1044 
1045 	if (DISPLAY_VER(display) >= 12 && DISPLAY_VER(display) < 20) {
1046 		if (psr2_block_count(intel_dp) > 2)
1047 			val |= TGL_EDP_PSR2_BLOCK_COUNT_NUM_3;
1048 		else
1049 			val |= TGL_EDP_PSR2_BLOCK_COUNT_NUM_2;
1050 	}
1051 
1052 	/* Wa_22012278275:adl-p */
1053 	if (display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_E0)) {
1054 		static const u8 map[] = {
1055 			2, /* 5 lines */
1056 			1, /* 6 lines */
1057 			0, /* 7 lines */
1058 			3, /* 8 lines */
1059 			6, /* 9 lines */
1060 			5, /* 10 lines */
1061 			4, /* 11 lines */
1062 			7, /* 12 lines */
1063 		};
1064 		/*
1065 		 * Still using the default IO_BUFFER_WAKE and FAST_WAKE, see
1066 		 * comments below for more information
1067 		 */
1068 		int tmp;
1069 
1070 		tmp = map[intel_dp->alpm_parameters.io_wake_lines -
1071 			  TGL_EDP_PSR2_IO_BUFFER_WAKE_MIN_LINES];
1072 		val |= TGL_EDP_PSR2_IO_BUFFER_WAKE(tmp + TGL_EDP_PSR2_IO_BUFFER_WAKE_MIN_LINES);
1073 
1074 		tmp = map[intel_dp->alpm_parameters.fast_wake_lines - TGL_EDP_PSR2_FAST_WAKE_MIN_LINES];
1075 		val |= TGL_EDP_PSR2_FAST_WAKE(tmp + TGL_EDP_PSR2_FAST_WAKE_MIN_LINES);
1076 	} else if (DISPLAY_VER(display) >= 20) {
1077 		val |= LNL_EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines);
1078 	} else if (DISPLAY_VER(display) >= 12) {
1079 		val |= TGL_EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines);
1080 		val |= TGL_EDP_PSR2_FAST_WAKE(intel_dp->alpm_parameters.fast_wake_lines);
1081 	} else if (DISPLAY_VER(display) >= 9) {
1082 		val |= EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines);
1083 		val |= EDP_PSR2_FAST_WAKE(intel_dp->alpm_parameters.fast_wake_lines);
1084 	}
1085 
1086 	if (intel_dp->psr.req_psr2_sdp_prior_scanline)
1087 		val |= EDP_PSR2_SU_SDP_SCANLINE;
1088 
1089 	if (DISPLAY_VER(display) >= 20)
1090 		psr_val |= LNL_EDP_PSR_ENTRY_SETUP_FRAMES(intel_dp->psr.entry_setup_frames);
1091 
1092 	if (intel_dp->psr.psr2_sel_fetch_enabled) {
1093 		u32 tmp;
1094 
1095 		tmp = intel_de_read(display,
1096 				    PSR2_MAN_TRK_CTL(display, cpu_transcoder));
1097 		drm_WARN_ON(display->drm, !(tmp & PSR2_MAN_TRK_CTL_ENABLE));
1098 	} else if (HAS_PSR2_SEL_FETCH(display)) {
1099 		intel_de_write(display,
1100 			       PSR2_MAN_TRK_CTL(display, cpu_transcoder), 0);
1101 	}
1102 
1103 	if (intel_dp->psr.su_region_et_enabled)
1104 		val |= LNL_EDP_PSR2_SU_REGION_ET_ENABLE;
1105 
1106 	/*
1107 	 * PSR2 HW is incorrectly using EDP_PSR_TP1_TP3_SEL and BSpec is
1108 	 * recommending keep this bit unset while PSR2 is enabled.
1109 	 */
1110 	intel_de_write(display, psr_ctl_reg(display, cpu_transcoder), psr_val);
1111 
1112 	intel_de_write(display, EDP_PSR2_CTL(display, cpu_transcoder), val);
1113 }
1114 
1115 static bool
transcoder_has_psr2(struct intel_display * display,enum transcoder cpu_transcoder)1116 transcoder_has_psr2(struct intel_display *display, enum transcoder cpu_transcoder)
1117 {
1118 	if (display->platform.alderlake_p || DISPLAY_VER(display) >= 14)
1119 		return cpu_transcoder == TRANSCODER_A || cpu_transcoder == TRANSCODER_B;
1120 	else if (DISPLAY_VER(display) >= 12)
1121 		return cpu_transcoder == TRANSCODER_A;
1122 	else if (DISPLAY_VER(display) >= 9)
1123 		return cpu_transcoder == TRANSCODER_EDP;
1124 	else
1125 		return false;
1126 }
1127 
intel_get_frame_time_us(const struct intel_crtc_state * crtc_state)1128 static u32 intel_get_frame_time_us(const struct intel_crtc_state *crtc_state)
1129 {
1130 	if (!crtc_state->hw.active)
1131 		return 0;
1132 
1133 	return DIV_ROUND_UP(1000 * 1000,
1134 			    drm_mode_vrefresh(&crtc_state->hw.adjusted_mode));
1135 }
1136 
psr2_program_idle_frames(struct intel_dp * intel_dp,u32 idle_frames)1137 static void psr2_program_idle_frames(struct intel_dp *intel_dp,
1138 				     u32 idle_frames)
1139 {
1140 	struct intel_display *display = to_intel_display(intel_dp);
1141 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1142 
1143 	intel_de_rmw(display, EDP_PSR2_CTL(display, cpu_transcoder),
1144 		     EDP_PSR2_IDLE_FRAMES_MASK,
1145 		     EDP_PSR2_IDLE_FRAMES(idle_frames));
1146 }
1147 
tgl_psr2_enable_dc3co(struct intel_dp * intel_dp)1148 static void tgl_psr2_enable_dc3co(struct intel_dp *intel_dp)
1149 {
1150 	struct intel_display *display = to_intel_display(intel_dp);
1151 
1152 	psr2_program_idle_frames(intel_dp, 0);
1153 	intel_display_power_set_target_dc_state(display, DC_STATE_EN_DC3CO);
1154 }
1155 
tgl_psr2_disable_dc3co(struct intel_dp * intel_dp)1156 static void tgl_psr2_disable_dc3co(struct intel_dp *intel_dp)
1157 {
1158 	struct intel_display *display = to_intel_display(intel_dp);
1159 
1160 	intel_display_power_set_target_dc_state(display, DC_STATE_EN_UPTO_DC6);
1161 	psr2_program_idle_frames(intel_dp, psr_compute_idle_frames(intel_dp));
1162 }
1163 
tgl_dc3co_disable_work(struct work_struct * work)1164 static void tgl_dc3co_disable_work(struct work_struct *work)
1165 {
1166 	struct intel_dp *intel_dp =
1167 		container_of(work, typeof(*intel_dp), psr.dc3co_work.work);
1168 
1169 	mutex_lock(&intel_dp->psr.lock);
1170 	/* If delayed work is pending, it is not idle */
1171 	if (delayed_work_pending(&intel_dp->psr.dc3co_work))
1172 		goto unlock;
1173 
1174 	tgl_psr2_disable_dc3co(intel_dp);
1175 unlock:
1176 	mutex_unlock(&intel_dp->psr.lock);
1177 }
1178 
tgl_disallow_dc3co_on_psr2_exit(struct intel_dp * intel_dp)1179 static void tgl_disallow_dc3co_on_psr2_exit(struct intel_dp *intel_dp)
1180 {
1181 	if (!intel_dp->psr.dc3co_exitline)
1182 		return;
1183 
1184 	cancel_delayed_work(&intel_dp->psr.dc3co_work);
1185 	/* Before PSR2 exit disallow dc3co*/
1186 	tgl_psr2_disable_dc3co(intel_dp);
1187 }
1188 
1189 static bool
dc3co_is_pipe_port_compatible(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1190 dc3co_is_pipe_port_compatible(struct intel_dp *intel_dp,
1191 			      struct intel_crtc_state *crtc_state)
1192 {
1193 	struct intel_display *display = to_intel_display(intel_dp);
1194 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1195 	enum pipe pipe = to_intel_crtc(crtc_state->uapi.crtc)->pipe;
1196 	enum port port = dig_port->base.port;
1197 
1198 	if (display->platform.alderlake_p || DISPLAY_VER(display) >= 14)
1199 		return pipe <= PIPE_B && port <= PORT_B;
1200 	else
1201 		return pipe == PIPE_A && port == PORT_A;
1202 }
1203 
1204 static void
tgl_dc3co_exitline_compute_config(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1205 tgl_dc3co_exitline_compute_config(struct intel_dp *intel_dp,
1206 				  struct intel_crtc_state *crtc_state)
1207 {
1208 	struct intel_display *display = to_intel_display(intel_dp);
1209 	const u32 crtc_vdisplay = crtc_state->uapi.adjusted_mode.crtc_vdisplay;
1210 	struct i915_power_domains *power_domains = &display->power.domains;
1211 	u32 exit_scanlines;
1212 
1213 	/*
1214 	 * FIXME: Due to the changed sequence of activating/deactivating DC3CO,
1215 	 * disable DC3CO until the changed dc3co activating/deactivating sequence
1216 	 * is applied. B.Specs:49196
1217 	 */
1218 	return;
1219 
1220 	/*
1221 	 * DMC's DC3CO exit mechanism has an issue with Selective Fecth
1222 	 * TODO: when the issue is addressed, this restriction should be removed.
1223 	 */
1224 	if (crtc_state->enable_psr2_sel_fetch)
1225 		return;
1226 
1227 	if (!(power_domains->allowed_dc_mask & DC_STATE_EN_DC3CO))
1228 		return;
1229 
1230 	if (!dc3co_is_pipe_port_compatible(intel_dp, crtc_state))
1231 		return;
1232 
1233 	/* Wa_16011303918:adl-p */
1234 	if (display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0))
1235 		return;
1236 
1237 	/*
1238 	 * DC3CO Exit time 200us B.Spec 49196
1239 	 * PSR2 transcoder Early Exit scanlines = ROUNDUP(200 / line time) + 1
1240 	 */
1241 	exit_scanlines =
1242 		intel_usecs_to_scanlines(&crtc_state->uapi.adjusted_mode, 200) + 1;
1243 
1244 	if (drm_WARN_ON(display->drm, exit_scanlines > crtc_vdisplay))
1245 		return;
1246 
1247 	crtc_state->dc3co_exitline = crtc_vdisplay - exit_scanlines;
1248 }
1249 
intel_psr2_sel_fetch_config_valid(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1250 static bool intel_psr2_sel_fetch_config_valid(struct intel_dp *intel_dp,
1251 					      struct intel_crtc_state *crtc_state)
1252 {
1253 	struct intel_display *display = to_intel_display(intel_dp);
1254 
1255 	if (!display->params.enable_psr2_sel_fetch &&
1256 	    intel_dp->psr.debug != I915_PSR_DEBUG_ENABLE_SEL_FETCH) {
1257 		drm_dbg_kms(display->drm,
1258 			    "PSR2 sel fetch not enabled, disabled by parameter\n");
1259 		return false;
1260 	}
1261 
1262 	if (crtc_state->uapi.async_flip) {
1263 		drm_dbg_kms(display->drm,
1264 			    "PSR2 sel fetch not enabled, async flip enabled\n");
1265 		return false;
1266 	}
1267 
1268 	return crtc_state->enable_psr2_sel_fetch = true;
1269 }
1270 
psr2_granularity_check(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1271 static bool psr2_granularity_check(struct intel_dp *intel_dp,
1272 				   struct intel_crtc_state *crtc_state)
1273 {
1274 	struct intel_display *display = to_intel_display(intel_dp);
1275 	const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
1276 	const int crtc_hdisplay = crtc_state->hw.adjusted_mode.crtc_hdisplay;
1277 	const int crtc_vdisplay = crtc_state->hw.adjusted_mode.crtc_vdisplay;
1278 	u16 y_granularity = 0;
1279 
1280 	/* PSR2 HW only send full lines so we only need to validate the width */
1281 	if (crtc_hdisplay % intel_dp->psr.su_w_granularity)
1282 		return false;
1283 
1284 	if (crtc_vdisplay % intel_dp->psr.su_y_granularity)
1285 		return false;
1286 
1287 	/* HW tracking is only aligned to 4 lines */
1288 	if (!crtc_state->enable_psr2_sel_fetch)
1289 		return intel_dp->psr.su_y_granularity == 4;
1290 
1291 	/*
1292 	 * adl_p and mtl platforms have 1 line granularity.
1293 	 * For other platforms with SW tracking we can adjust the y coordinates
1294 	 * to match sink requirement if multiple of 4.
1295 	 */
1296 	if (display->platform.alderlake_p || DISPLAY_VER(display) >= 14)
1297 		y_granularity = intel_dp->psr.su_y_granularity;
1298 	else if (intel_dp->psr.su_y_granularity <= 2)
1299 		y_granularity = 4;
1300 	else if ((intel_dp->psr.su_y_granularity % 4) == 0)
1301 		y_granularity = intel_dp->psr.su_y_granularity;
1302 
1303 	if (y_granularity == 0 || crtc_vdisplay % y_granularity)
1304 		return false;
1305 
1306 	if (crtc_state->dsc.compression_enable &&
1307 	    vdsc_cfg->slice_height % y_granularity)
1308 		return false;
1309 
1310 	crtc_state->su_y_granularity = y_granularity;
1311 	return true;
1312 }
1313 
_compute_psr2_sdp_prior_scanline_indication(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1314 static bool _compute_psr2_sdp_prior_scanline_indication(struct intel_dp *intel_dp,
1315 							struct intel_crtc_state *crtc_state)
1316 {
1317 	struct intel_display *display = to_intel_display(intel_dp);
1318 	const struct drm_display_mode *adjusted_mode = &crtc_state->uapi.adjusted_mode;
1319 	u32 hblank_total, hblank_ns, req_ns;
1320 
1321 	hblank_total = adjusted_mode->crtc_hblank_end - adjusted_mode->crtc_hblank_start;
1322 	hblank_ns = div_u64(1000000ULL * hblank_total, adjusted_mode->crtc_clock);
1323 
1324 	/* From spec: ((60 / number of lanes) + 11) * 1000 / symbol clock frequency MHz */
1325 	req_ns = ((60 / crtc_state->lane_count) + 11) * 1000 / (crtc_state->port_clock / 1000);
1326 
1327 	if ((hblank_ns - req_ns) > 100)
1328 		return true;
1329 
1330 	/* Not supported <13 / Wa_22012279113:adl-p */
1331 	if (DISPLAY_VER(display) < 14 || intel_dp->edp_dpcd[0] < DP_EDP_14b)
1332 		return false;
1333 
1334 	crtc_state->req_psr2_sdp_prior_scanline = true;
1335 	return true;
1336 }
1337 
intel_psr_entry_setup_frames(struct intel_dp * intel_dp,const struct drm_display_mode * adjusted_mode)1338 static int intel_psr_entry_setup_frames(struct intel_dp *intel_dp,
1339 					const struct drm_display_mode *adjusted_mode)
1340 {
1341 	struct intel_display *display = to_intel_display(intel_dp);
1342 	int psr_setup_time = drm_dp_psr_setup_time(intel_dp->psr_dpcd);
1343 	int entry_setup_frames = 0;
1344 
1345 	if (psr_setup_time < 0) {
1346 		drm_dbg_kms(display->drm,
1347 			    "PSR condition failed: Invalid PSR setup time (0x%02x)\n",
1348 			    intel_dp->psr_dpcd[1]);
1349 		return -ETIME;
1350 	}
1351 
1352 	if (intel_usecs_to_scanlines(adjusted_mode, psr_setup_time) >
1353 	    adjusted_mode->crtc_vtotal - adjusted_mode->crtc_vdisplay - 1) {
1354 		if (DISPLAY_VER(display) >= 20) {
1355 			/* setup entry frames can be up to 3 frames */
1356 			entry_setup_frames = 1;
1357 			drm_dbg_kms(display->drm,
1358 				    "PSR setup entry frames %d\n",
1359 				    entry_setup_frames);
1360 		} else {
1361 			drm_dbg_kms(display->drm,
1362 				    "PSR condition failed: PSR setup time (%d us) too long\n",
1363 				    psr_setup_time);
1364 			return -ETIME;
1365 		}
1366 	}
1367 
1368 	return entry_setup_frames;
1369 }
1370 
wake_lines_fit_into_vblank(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state,bool aux_less)1371 static bool wake_lines_fit_into_vblank(struct intel_dp *intel_dp,
1372 				       const struct intel_crtc_state *crtc_state,
1373 				       bool aux_less)
1374 {
1375 	struct intel_display *display = to_intel_display(intel_dp);
1376 	int vblank = crtc_state->hw.adjusted_mode.crtc_vblank_end -
1377 		crtc_state->hw.adjusted_mode.crtc_vblank_start;
1378 	int wake_lines;
1379 
1380 	if (aux_less)
1381 		wake_lines = intel_dp->alpm_parameters.aux_less_wake_lines;
1382 	else
1383 		wake_lines = DISPLAY_VER(display) < 20 ?
1384 			psr2_block_count_lines(intel_dp) :
1385 			intel_dp->alpm_parameters.io_wake_lines;
1386 
1387 	if (crtc_state->req_psr2_sdp_prior_scanline)
1388 		vblank -= 1;
1389 
1390 	/* Vblank >= PSR2_CTL Block Count Number maximum line count */
1391 	if (vblank < wake_lines)
1392 		return false;
1393 
1394 	return true;
1395 }
1396 
alpm_config_valid(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state,bool aux_less)1397 static bool alpm_config_valid(struct intel_dp *intel_dp,
1398 			      const struct intel_crtc_state *crtc_state,
1399 			      bool aux_less)
1400 {
1401 	struct intel_display *display = to_intel_display(intel_dp);
1402 
1403 	if (!intel_alpm_compute_params(intel_dp, crtc_state)) {
1404 		drm_dbg_kms(display->drm,
1405 			    "PSR2/Panel Replay  not enabled, Unable to use long enough wake times\n");
1406 		return false;
1407 	}
1408 
1409 	if (!wake_lines_fit_into_vblank(intel_dp, crtc_state, aux_less)) {
1410 		drm_dbg_kms(display->drm,
1411 			    "PSR2/Panel Replay not enabled, too short vblank time\n");
1412 		return false;
1413 	}
1414 
1415 	return true;
1416 }
1417 
intel_psr2_config_valid(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1418 static bool intel_psr2_config_valid(struct intel_dp *intel_dp,
1419 				    struct intel_crtc_state *crtc_state)
1420 {
1421 	struct intel_display *display = to_intel_display(intel_dp);
1422 	int crtc_hdisplay = crtc_state->hw.adjusted_mode.crtc_hdisplay;
1423 	int crtc_vdisplay = crtc_state->hw.adjusted_mode.crtc_vdisplay;
1424 	int psr_max_h = 0, psr_max_v = 0, max_bpp = 0;
1425 
1426 	if (!intel_dp->psr.sink_psr2_support)
1427 		return false;
1428 
1429 	/* JSL and EHL only supports eDP 1.3 */
1430 	if (display->platform.jasperlake || display->platform.elkhartlake) {
1431 		drm_dbg_kms(display->drm, "PSR2 not supported by phy\n");
1432 		return false;
1433 	}
1434 
1435 	/* Wa_16011181250 */
1436 	if (display->platform.rocketlake || display->platform.alderlake_s ||
1437 	    display->platform.dg2) {
1438 		drm_dbg_kms(display->drm,
1439 			    "PSR2 is defeatured for this platform\n");
1440 		return false;
1441 	}
1442 
1443 	if (display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) {
1444 		drm_dbg_kms(display->drm,
1445 			    "PSR2 not completely functional in this stepping\n");
1446 		return false;
1447 	}
1448 
1449 	if (!transcoder_has_psr2(display, crtc_state->cpu_transcoder)) {
1450 		drm_dbg_kms(display->drm,
1451 			    "PSR2 not supported in transcoder %s\n",
1452 			    transcoder_name(crtc_state->cpu_transcoder));
1453 		return false;
1454 	}
1455 
1456 	/*
1457 	 * DSC and PSR2 cannot be enabled simultaneously. If a requested
1458 	 * resolution requires DSC to be enabled, priority is given to DSC
1459 	 * over PSR2.
1460 	 */
1461 	if (crtc_state->dsc.compression_enable &&
1462 	    (DISPLAY_VER(display) < 14 && !display->platform.alderlake_p)) {
1463 		drm_dbg_kms(display->drm,
1464 			    "PSR2 cannot be enabled since DSC is enabled\n");
1465 		return false;
1466 	}
1467 
1468 	if (DISPLAY_VER(display) >= 20) {
1469 		psr_max_h = crtc_hdisplay;
1470 		psr_max_v = crtc_vdisplay;
1471 		max_bpp = crtc_state->pipe_bpp;
1472 	} else if (IS_DISPLAY_VER(display, 12, 14)) {
1473 		psr_max_h = 5120;
1474 		psr_max_v = 3200;
1475 		max_bpp = 30;
1476 	} else if (IS_DISPLAY_VER(display, 10, 11)) {
1477 		psr_max_h = 4096;
1478 		psr_max_v = 2304;
1479 		max_bpp = 24;
1480 	} else if (DISPLAY_VER(display) == 9) {
1481 		psr_max_h = 3640;
1482 		psr_max_v = 2304;
1483 		max_bpp = 24;
1484 	}
1485 
1486 	if (crtc_state->pipe_bpp > max_bpp) {
1487 		drm_dbg_kms(display->drm,
1488 			    "PSR2 not enabled, pipe bpp %d > max supported %d\n",
1489 			    crtc_state->pipe_bpp, max_bpp);
1490 		return false;
1491 	}
1492 
1493 	/* Wa_16011303918:adl-p */
1494 	if (crtc_state->vrr.enable &&
1495 	    display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) {
1496 		drm_dbg_kms(display->drm,
1497 			    "PSR2 not enabled, not compatible with HW stepping + VRR\n");
1498 		return false;
1499 	}
1500 
1501 	if (!alpm_config_valid(intel_dp, crtc_state, false))
1502 		return false;
1503 
1504 	if (!crtc_state->enable_psr2_sel_fetch &&
1505 	    (crtc_hdisplay > psr_max_h || crtc_vdisplay > psr_max_v)) {
1506 		drm_dbg_kms(display->drm,
1507 			    "PSR2 not enabled, resolution %dx%d > max supported %dx%d\n",
1508 			    crtc_hdisplay, crtc_vdisplay,
1509 			    psr_max_h, psr_max_v);
1510 		return false;
1511 	}
1512 
1513 	tgl_dc3co_exitline_compute_config(intel_dp, crtc_state);
1514 
1515 	return true;
1516 }
1517 
intel_sel_update_config_valid(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1518 static bool intel_sel_update_config_valid(struct intel_dp *intel_dp,
1519 					  struct intel_crtc_state *crtc_state)
1520 {
1521 	struct intel_display *display = to_intel_display(intel_dp);
1522 
1523 	if (HAS_PSR2_SEL_FETCH(display) &&
1524 	    !intel_psr2_sel_fetch_config_valid(intel_dp, crtc_state) &&
1525 	    !HAS_PSR_HW_TRACKING(display)) {
1526 		drm_dbg_kms(display->drm,
1527 			    "Selective update not enabled, selective fetch not valid and no HW tracking available\n");
1528 		goto unsupported;
1529 	}
1530 
1531 	if (!psr2_global_enabled(intel_dp)) {
1532 		drm_dbg_kms(display->drm,
1533 			    "Selective update disabled by flag\n");
1534 		goto unsupported;
1535 	}
1536 
1537 	if (!crtc_state->has_panel_replay && !intel_psr2_config_valid(intel_dp, crtc_state))
1538 		goto unsupported;
1539 
1540 	if (!_compute_psr2_sdp_prior_scanline_indication(intel_dp, crtc_state)) {
1541 		drm_dbg_kms(display->drm,
1542 			    "Selective update not enabled, SDP indication do not fit in hblank\n");
1543 		goto unsupported;
1544 	}
1545 
1546 	if (crtc_state->has_panel_replay && (DISPLAY_VER(display) < 14 ||
1547 					     !intel_dp->psr.sink_panel_replay_su_support))
1548 		goto unsupported;
1549 
1550 	if (crtc_state->crc_enabled) {
1551 		drm_dbg_kms(display->drm,
1552 			    "Selective update not enabled because it would inhibit pipe CRC calculation\n");
1553 		goto unsupported;
1554 	}
1555 
1556 	if (!psr2_granularity_check(intel_dp, crtc_state)) {
1557 		drm_dbg_kms(display->drm,
1558 			    "Selective update not enabled, SU granularity not compatible\n");
1559 		goto unsupported;
1560 	}
1561 
1562 	crtc_state->enable_psr2_su_region_et =
1563 		psr2_su_region_et_valid(intel_dp, crtc_state->has_panel_replay);
1564 
1565 	return true;
1566 
1567 unsupported:
1568 	crtc_state->enable_psr2_sel_fetch = false;
1569 	return false;
1570 }
1571 
_psr_compute_config(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1572 static bool _psr_compute_config(struct intel_dp *intel_dp,
1573 				struct intel_crtc_state *crtc_state)
1574 {
1575 	struct intel_display *display = to_intel_display(intel_dp);
1576 	const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
1577 	int entry_setup_frames;
1578 
1579 	if (!CAN_PSR(intel_dp))
1580 		return false;
1581 
1582 	/*
1583 	 * Currently PSR doesn't work reliably with VRR enabled.
1584 	 */
1585 	if (crtc_state->vrr.enable)
1586 		return false;
1587 
1588 	entry_setup_frames = intel_psr_entry_setup_frames(intel_dp, adjusted_mode);
1589 
1590 	if (entry_setup_frames >= 0) {
1591 		intel_dp->psr.entry_setup_frames = entry_setup_frames;
1592 	} else {
1593 		drm_dbg_kms(display->drm,
1594 			    "PSR condition failed: PSR setup timing not met\n");
1595 		return false;
1596 	}
1597 
1598 	return true;
1599 }
1600 
1601 static bool
_panel_replay_compute_config(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state,const struct drm_connector_state * conn_state)1602 _panel_replay_compute_config(struct intel_dp *intel_dp,
1603 			     const struct intel_crtc_state *crtc_state,
1604 			     const struct drm_connector_state *conn_state)
1605 {
1606 	struct intel_display *display = to_intel_display(intel_dp);
1607 	struct intel_connector *connector =
1608 		to_intel_connector(conn_state->connector);
1609 	struct intel_hdcp *hdcp = &connector->hdcp;
1610 
1611 	if (!CAN_PANEL_REPLAY(intel_dp))
1612 		return false;
1613 
1614 	if (!panel_replay_global_enabled(intel_dp)) {
1615 		drm_dbg_kms(display->drm, "Panel Replay disabled by flag\n");
1616 		return false;
1617 	}
1618 
1619 	if (crtc_state->crc_enabled) {
1620 		drm_dbg_kms(display->drm,
1621 			    "Panel Replay not enabled because it would inhibit pipe CRC calculation\n");
1622 		return false;
1623 	}
1624 
1625 	if (!intel_dp_is_edp(intel_dp))
1626 		return true;
1627 
1628 	/* Remaining checks are for eDP only */
1629 
1630 	if (to_intel_crtc(crtc_state->uapi.crtc)->pipe != PIPE_A &&
1631 	    to_intel_crtc(crtc_state->uapi.crtc)->pipe != PIPE_B)
1632 		return false;
1633 
1634 	/* 128b/132b Panel Replay is not supported on eDP */
1635 	if (intel_dp_is_uhbr(crtc_state)) {
1636 		drm_dbg_kms(display->drm,
1637 			    "Panel Replay is not supported with 128b/132b\n");
1638 		return false;
1639 	}
1640 
1641 	/* HW will not allow Panel Replay on eDP when HDCP enabled */
1642 	if (conn_state->content_protection ==
1643 	    DRM_MODE_CONTENT_PROTECTION_DESIRED ||
1644 	    (conn_state->content_protection ==
1645 	     DRM_MODE_CONTENT_PROTECTION_ENABLED && hdcp->value ==
1646 	     DRM_MODE_CONTENT_PROTECTION_UNDESIRED)) {
1647 		drm_dbg_kms(display->drm,
1648 			    "Panel Replay is not supported with HDCP\n");
1649 		return false;
1650 	}
1651 
1652 	if (!alpm_config_valid(intel_dp, crtc_state, true))
1653 		return false;
1654 
1655 	return true;
1656 }
1657 
intel_psr_needs_wa_18037818876(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1658 static bool intel_psr_needs_wa_18037818876(struct intel_dp *intel_dp,
1659 					   struct intel_crtc_state *crtc_state)
1660 {
1661 	struct intel_display *display = to_intel_display(intel_dp);
1662 
1663 	return (DISPLAY_VER(display) == 20 && intel_dp->psr.entry_setup_frames > 0 &&
1664 		!crtc_state->has_sel_update);
1665 }
1666 
intel_psr_compute_config(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state,struct drm_connector_state * conn_state)1667 void intel_psr_compute_config(struct intel_dp *intel_dp,
1668 			      struct intel_crtc_state *crtc_state,
1669 			      struct drm_connector_state *conn_state)
1670 {
1671 	struct intel_display *display = to_intel_display(intel_dp);
1672 	const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
1673 	struct intel_atomic_state *state = to_intel_atomic_state(crtc_state->uapi.state);
1674 	struct intel_crtc *crtc;
1675 	u8 active_pipes = 0;
1676 
1677 	if (!psr_global_enabled(intel_dp)) {
1678 		drm_dbg_kms(display->drm, "PSR disabled by flag\n");
1679 		return;
1680 	}
1681 
1682 	if (intel_dp->psr.sink_not_reliable) {
1683 		drm_dbg_kms(display->drm,
1684 			    "PSR sink implementation is not reliable\n");
1685 		return;
1686 	}
1687 
1688 	if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) {
1689 		drm_dbg_kms(display->drm,
1690 			    "PSR condition failed: Interlaced mode enabled\n");
1691 		return;
1692 	}
1693 
1694 	/*
1695 	 * FIXME figure out what is wrong with PSR+joiner and
1696 	 * fix it. Presumably something related to the fact that
1697 	 * PSR is a transcoder level feature.
1698 	 */
1699 	if (crtc_state->joiner_pipes) {
1700 		drm_dbg_kms(display->drm,
1701 			    "PSR disabled due to joiner\n");
1702 		return;
1703 	}
1704 
1705 	crtc_state->has_panel_replay = _panel_replay_compute_config(intel_dp,
1706 								    crtc_state,
1707 								    conn_state);
1708 
1709 	crtc_state->has_psr = crtc_state->has_panel_replay ? true :
1710 		_psr_compute_config(intel_dp, crtc_state);
1711 
1712 	if (!crtc_state->has_psr)
1713 		return;
1714 
1715 	crtc_state->has_sel_update = intel_sel_update_config_valid(intel_dp, crtc_state);
1716 
1717 	/* Wa_18037818876 */
1718 	if (intel_psr_needs_wa_18037818876(intel_dp, crtc_state)) {
1719 		crtc_state->has_psr = false;
1720 		drm_dbg_kms(display->drm,
1721 			    "PSR disabled to workaround PSR FSM hang issue\n");
1722 	}
1723 
1724 	/* Rest is for Wa_16025596647 */
1725 	if (DISPLAY_VER(display) != 20 &&
1726 	    !IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0))
1727 		return;
1728 
1729 	/* Not needed by Panel Replay  */
1730 	if (crtc_state->has_panel_replay)
1731 		return;
1732 
1733 	/* We ignore possible secondary PSR/Panel Replay capable eDP */
1734 	for_each_intel_crtc(display->drm, crtc)
1735 		active_pipes |= crtc->active ? BIT(crtc->pipe) : 0;
1736 
1737 	active_pipes = intel_calc_active_pipes(state, active_pipes);
1738 
1739 	crtc_state->active_non_psr_pipes = active_pipes &
1740 		~BIT(to_intel_crtc(crtc_state->uapi.crtc)->pipe);
1741 }
1742 
intel_psr_get_config(struct intel_encoder * encoder,struct intel_crtc_state * pipe_config)1743 void intel_psr_get_config(struct intel_encoder *encoder,
1744 			  struct intel_crtc_state *pipe_config)
1745 {
1746 	struct intel_display *display = to_intel_display(encoder);
1747 	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
1748 	enum transcoder cpu_transcoder = pipe_config->cpu_transcoder;
1749 	struct intel_dp *intel_dp;
1750 	u32 val;
1751 
1752 	if (!dig_port)
1753 		return;
1754 
1755 	intel_dp = &dig_port->dp;
1756 	if (!(CAN_PSR(intel_dp) || CAN_PANEL_REPLAY(intel_dp)))
1757 		return;
1758 
1759 	mutex_lock(&intel_dp->psr.lock);
1760 	if (!intel_dp->psr.enabled)
1761 		goto unlock;
1762 
1763 	if (intel_dp->psr.panel_replay_enabled) {
1764 		pipe_config->has_psr = pipe_config->has_panel_replay = true;
1765 	} else {
1766 		/*
1767 		 * Not possible to read EDP_PSR/PSR2_CTL registers as it is
1768 		 * enabled/disabled because of frontbuffer tracking and others.
1769 		 */
1770 		pipe_config->has_psr = true;
1771 	}
1772 
1773 	pipe_config->has_sel_update = intel_dp->psr.sel_update_enabled;
1774 	pipe_config->infoframes.enable |= intel_hdmi_infoframe_enable(DP_SDP_VSC);
1775 
1776 	if (!intel_dp->psr.sel_update_enabled)
1777 		goto unlock;
1778 
1779 	if (HAS_PSR2_SEL_FETCH(display)) {
1780 		val = intel_de_read(display,
1781 				    PSR2_MAN_TRK_CTL(display, cpu_transcoder));
1782 		if (val & PSR2_MAN_TRK_CTL_ENABLE)
1783 			pipe_config->enable_psr2_sel_fetch = true;
1784 	}
1785 
1786 	pipe_config->enable_psr2_su_region_et = intel_dp->psr.su_region_et_enabled;
1787 
1788 	if (DISPLAY_VER(display) >= 12) {
1789 		val = intel_de_read(display,
1790 				    TRANS_EXITLINE(display, cpu_transcoder));
1791 		pipe_config->dc3co_exitline = REG_FIELD_GET(EXITLINE_MASK, val);
1792 	}
1793 unlock:
1794 	mutex_unlock(&intel_dp->psr.lock);
1795 }
1796 
intel_psr_activate(struct intel_dp * intel_dp)1797 static void intel_psr_activate(struct intel_dp *intel_dp)
1798 {
1799 	struct intel_display *display = to_intel_display(intel_dp);
1800 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1801 
1802 	drm_WARN_ON(display->drm,
1803 		    transcoder_has_psr2(display, cpu_transcoder) &&
1804 		    intel_de_read(display, EDP_PSR2_CTL(display, cpu_transcoder)) & EDP_PSR2_ENABLE);
1805 
1806 	drm_WARN_ON(display->drm,
1807 		    intel_de_read(display, psr_ctl_reg(display, cpu_transcoder)) & EDP_PSR_ENABLE);
1808 
1809 	drm_WARN_ON(display->drm, intel_dp->psr.active);
1810 
1811 	lockdep_assert_held(&intel_dp->psr.lock);
1812 
1813 	/* psr1, psr2 and panel-replay are mutually exclusive.*/
1814 	if (intel_dp->psr.panel_replay_enabled)
1815 		dg2_activate_panel_replay(intel_dp);
1816 	else if (intel_dp->psr.sel_update_enabled)
1817 		hsw_activate_psr2(intel_dp);
1818 	else
1819 		hsw_activate_psr1(intel_dp);
1820 
1821 	intel_dp->psr.active = true;
1822 }
1823 
1824 /*
1825  * Wa_16013835468
1826  * Wa_14015648006
1827  */
wm_optimization_wa(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)1828 static void wm_optimization_wa(struct intel_dp *intel_dp,
1829 			       const struct intel_crtc_state *crtc_state)
1830 {
1831 	struct intel_display *display = to_intel_display(intel_dp);
1832 	enum pipe pipe = intel_dp->psr.pipe;
1833 	bool activate = false;
1834 
1835 	/* Wa_14015648006 */
1836 	if (IS_DISPLAY_VER(display, 11, 14) && crtc_state->wm_level_disabled)
1837 		activate = true;
1838 
1839 	/* Wa_16013835468 */
1840 	if (DISPLAY_VER(display) == 12 &&
1841 	    crtc_state->hw.adjusted_mode.crtc_vblank_start !=
1842 	    crtc_state->hw.adjusted_mode.crtc_vdisplay)
1843 		activate = true;
1844 
1845 	if (activate)
1846 		intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
1847 			     0, LATENCY_REPORTING_REMOVED(pipe));
1848 	else
1849 		intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
1850 			     LATENCY_REPORTING_REMOVED(pipe), 0);
1851 }
1852 
intel_psr_enable_source(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)1853 static void intel_psr_enable_source(struct intel_dp *intel_dp,
1854 				    const struct intel_crtc_state *crtc_state)
1855 {
1856 	struct intel_display *display = to_intel_display(intel_dp);
1857 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1858 	u32 mask = 0;
1859 
1860 	/*
1861 	 * Only HSW and BDW have PSR AUX registers that need to be setup.
1862 	 * SKL+ use hardcoded values PSR AUX transactions
1863 	 */
1864 	if (DISPLAY_VER(display) < 9)
1865 		hsw_psr_setup_aux(intel_dp);
1866 
1867 	/*
1868 	 * Per Spec: Avoid continuous PSR exit by masking MEMUP and HPD also
1869 	 * mask LPSP to avoid dependency on other drivers that might block
1870 	 * runtime_pm besides preventing  other hw tracking issues now we
1871 	 * can rely on frontbuffer tracking.
1872 	 *
1873 	 * From bspec prior LunarLake:
1874 	 * Only PSR_MASK[Mask FBC modify] and PSR_MASK[Mask Hotplug] are used in
1875 	 * panel replay mode.
1876 	 *
1877 	 * From bspec beyod LunarLake:
1878 	 * Panel Replay on DP: No bits are applicable
1879 	 * Panel Replay on eDP: All bits are applicable
1880 	 */
1881 	if (DISPLAY_VER(display) < 20 || intel_dp_is_edp(intel_dp))
1882 		mask = EDP_PSR_DEBUG_MASK_HPD;
1883 
1884 	if (intel_dp_is_edp(intel_dp)) {
1885 		mask |= EDP_PSR_DEBUG_MASK_MEMUP;
1886 
1887 		/*
1888 		 * For some unknown reason on HSW non-ULT (or at least on
1889 		 * Dell Latitude E6540) external displays start to flicker
1890 		 * when PSR is enabled on the eDP. SR/PC6 residency is much
1891 		 * higher than should be possible with an external display.
1892 		 * As a workaround leave LPSP unmasked to prevent PSR entry
1893 		 * when external displays are active.
1894 		 */
1895 		if (DISPLAY_VER(display) >= 8 || display->platform.haswell_ult)
1896 			mask |= EDP_PSR_DEBUG_MASK_LPSP;
1897 
1898 		if (DISPLAY_VER(display) < 20)
1899 			mask |= EDP_PSR_DEBUG_MASK_MAX_SLEEP;
1900 
1901 		/*
1902 		 * No separate pipe reg write mask on hsw/bdw, so have to unmask all
1903 		 * registers in order to keep the CURSURFLIVE tricks working :(
1904 		 */
1905 		if (IS_DISPLAY_VER(display, 9, 10))
1906 			mask |= EDP_PSR_DEBUG_MASK_DISP_REG_WRITE;
1907 
1908 		/* allow PSR with sprite enabled */
1909 		if (display->platform.haswell)
1910 			mask |= EDP_PSR_DEBUG_MASK_SPRITE_ENABLE;
1911 	}
1912 
1913 	intel_de_write(display, psr_debug_reg(display, cpu_transcoder), mask);
1914 
1915 	psr_irq_control(intel_dp);
1916 
1917 	/*
1918 	 * TODO: if future platforms supports DC3CO in more than one
1919 	 * transcoder, EXITLINE will need to be unset when disabling PSR
1920 	 */
1921 	if (intel_dp->psr.dc3co_exitline)
1922 		intel_de_rmw(display,
1923 			     TRANS_EXITLINE(display, cpu_transcoder),
1924 			     EXITLINE_MASK,
1925 			     intel_dp->psr.dc3co_exitline << EXITLINE_SHIFT | EXITLINE_ENABLE);
1926 
1927 	if (HAS_PSR_HW_TRACKING(display) && HAS_PSR2_SEL_FETCH(display))
1928 		intel_de_rmw(display, CHICKEN_PAR1_1, IGNORE_PSR2_HW_TRACKING,
1929 			     intel_dp->psr.psr2_sel_fetch_enabled ?
1930 			     IGNORE_PSR2_HW_TRACKING : 0);
1931 
1932 	/*
1933 	 * Wa_16013835468
1934 	 * Wa_14015648006
1935 	 */
1936 	wm_optimization_wa(intel_dp, crtc_state);
1937 
1938 	if (intel_dp->psr.sel_update_enabled) {
1939 		if (DISPLAY_VER(display) == 9)
1940 			intel_de_rmw(display, CHICKEN_TRANS(display, cpu_transcoder), 0,
1941 				     PSR2_VSC_ENABLE_PROG_HEADER |
1942 				     PSR2_ADD_VERTICAL_LINE_COUNT);
1943 
1944 		/*
1945 		 * Wa_16014451276:adlp,mtl[a0,b0]
1946 		 * All supported adlp panels have 1-based X granularity, this may
1947 		 * cause issues if non-supported panels are used.
1948 		 */
1949 		if (!intel_dp->psr.panel_replay_enabled &&
1950 		    (IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0) ||
1951 		     display->platform.alderlake_p))
1952 			intel_de_rmw(display, CHICKEN_TRANS(display, cpu_transcoder),
1953 				     0, ADLP_1_BASED_X_GRANULARITY);
1954 
1955 		/* Wa_16012604467:adlp,mtl[a0,b0] */
1956 		if (!intel_dp->psr.panel_replay_enabled &&
1957 		    IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0))
1958 			intel_de_rmw(display,
1959 				     MTL_CLKGATE_DIS_TRANS(display, cpu_transcoder),
1960 				     0,
1961 				     MTL_CLKGATE_DIS_TRANS_DMASC_GATING_DIS);
1962 		else if (display->platform.alderlake_p)
1963 			intel_de_rmw(display, CLKGATE_DIS_MISC, 0,
1964 				     CLKGATE_DIS_MISC_DMASC_GATING_DIS);
1965 	}
1966 
1967 	/* Wa_16025596647 */
1968 	if ((DISPLAY_VER(display) == 20 ||
1969 	     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
1970 	    !intel_dp->psr.panel_replay_enabled)
1971 		intel_dmc_block_pkgc(display, intel_dp->psr.pipe, true);
1972 
1973 	intel_alpm_configure(intel_dp, crtc_state);
1974 }
1975 
psr_interrupt_error_check(struct intel_dp * intel_dp)1976 static bool psr_interrupt_error_check(struct intel_dp *intel_dp)
1977 {
1978 	struct intel_display *display = to_intel_display(intel_dp);
1979 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1980 	u32 val;
1981 
1982 	if (intel_dp->psr.panel_replay_enabled)
1983 		goto no_err;
1984 
1985 	/*
1986 	 * If a PSR error happened and the driver is reloaded, the EDP_PSR_IIR
1987 	 * will still keep the error set even after the reset done in the
1988 	 * irq_preinstall and irq_uninstall hooks.
1989 	 * And enabling in this situation cause the screen to freeze in the
1990 	 * first time that PSR HW tries to activate so lets keep PSR disabled
1991 	 * to avoid any rendering problems.
1992 	 */
1993 	val = intel_de_read(display, psr_iir_reg(display, cpu_transcoder));
1994 	val &= psr_irq_psr_error_bit_get(intel_dp);
1995 	if (val) {
1996 		intel_dp->psr.sink_not_reliable = true;
1997 		drm_dbg_kms(display->drm,
1998 			    "PSR interruption error set, not enabling PSR\n");
1999 		return false;
2000 	}
2001 
2002 no_err:
2003 	return true;
2004 }
2005 
intel_psr_enable_locked(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)2006 static void intel_psr_enable_locked(struct intel_dp *intel_dp,
2007 				    const struct intel_crtc_state *crtc_state)
2008 {
2009 	struct intel_display *display = to_intel_display(intel_dp);
2010 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
2011 	u32 val;
2012 
2013 	drm_WARN_ON(display->drm, intel_dp->psr.enabled);
2014 
2015 	intel_dp->psr.sel_update_enabled = crtc_state->has_sel_update;
2016 	intel_dp->psr.panel_replay_enabled = crtc_state->has_panel_replay;
2017 	intel_dp->psr.busy_frontbuffer_bits = 0;
2018 	intel_dp->psr.pipe = to_intel_crtc(crtc_state->uapi.crtc)->pipe;
2019 	intel_dp->psr.transcoder = crtc_state->cpu_transcoder;
2020 	/* DC5/DC6 requires at least 6 idle frames */
2021 	val = usecs_to_jiffies(intel_get_frame_time_us(crtc_state) * 6);
2022 	intel_dp->psr.dc3co_exit_delay = val;
2023 	intel_dp->psr.dc3co_exitline = crtc_state->dc3co_exitline;
2024 	intel_dp->psr.psr2_sel_fetch_enabled = crtc_state->enable_psr2_sel_fetch;
2025 	intel_dp->psr.su_region_et_enabled = crtc_state->enable_psr2_su_region_et;
2026 	intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
2027 	intel_dp->psr.req_psr2_sdp_prior_scanline =
2028 		crtc_state->req_psr2_sdp_prior_scanline;
2029 	intel_dp->psr.active_non_psr_pipes = crtc_state->active_non_psr_pipes;
2030 
2031 	if (!psr_interrupt_error_check(intel_dp))
2032 		return;
2033 
2034 	if (intel_dp->psr.panel_replay_enabled)
2035 		drm_dbg_kms(display->drm, "Enabling Panel Replay\n");
2036 	else
2037 		drm_dbg_kms(display->drm, "Enabling PSR%s\n",
2038 			    intel_dp->psr.sel_update_enabled ? "2" : "1");
2039 
2040 	/*
2041 	 * Enabling sink PSR/Panel Replay here only for PSR. Panel Replay enable
2042 	 * bit is already written at this point. Sink ALPM is enabled here for
2043 	 * PSR and Panel Replay. See
2044 	 * intel_psr_panel_replay_enable_sink. Modifiers/options:
2045 	 *  - Selective Update
2046 	 *  - Region Early Transport
2047 	 *  - Selective Update Region Scanline Capture
2048 	 *  - VSC_SDP_CRC
2049 	 *  - HPD on different Errors
2050 	 *  - CRC verification
2051 	 * are written for PSR and Panel Replay here.
2052 	 */
2053 	intel_psr_enable_sink(intel_dp, crtc_state);
2054 
2055 	if (intel_dp_is_edp(intel_dp))
2056 		intel_snps_phy_update_psr_power_state(&dig_port->base, true);
2057 
2058 	intel_psr_enable_source(intel_dp, crtc_state);
2059 	intel_dp->psr.enabled = true;
2060 	intel_dp->psr.pause_counter = 0;
2061 
2062 	/*
2063 	 * Link_ok is sticky and set here on PSR enable. We can assume link
2064 	 * training is complete as we never continue to PSR enable with
2065 	 * untrained link. Link_ok is kept as set until first short pulse
2066 	 * interrupt. This is targeted to workaround panels stating bad link
2067 	 * after PSR is enabled.
2068 	 */
2069 	intel_dp->psr.link_ok = true;
2070 
2071 	intel_psr_activate(intel_dp);
2072 }
2073 
intel_psr_exit(struct intel_dp * intel_dp)2074 static void intel_psr_exit(struct intel_dp *intel_dp)
2075 {
2076 	struct intel_display *display = to_intel_display(intel_dp);
2077 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2078 	u32 val;
2079 
2080 	if (!intel_dp->psr.active) {
2081 		if (transcoder_has_psr2(display, cpu_transcoder)) {
2082 			val = intel_de_read(display,
2083 					    EDP_PSR2_CTL(display, cpu_transcoder));
2084 			drm_WARN_ON(display->drm, val & EDP_PSR2_ENABLE);
2085 		}
2086 
2087 		val = intel_de_read(display,
2088 				    psr_ctl_reg(display, cpu_transcoder));
2089 		drm_WARN_ON(display->drm, val & EDP_PSR_ENABLE);
2090 
2091 		return;
2092 	}
2093 
2094 	if (intel_dp->psr.panel_replay_enabled) {
2095 		intel_de_rmw(display, TRANS_DP2_CTL(intel_dp->psr.transcoder),
2096 			     TRANS_DP2_PANEL_REPLAY_ENABLE, 0);
2097 	} else if (intel_dp->psr.sel_update_enabled) {
2098 		tgl_disallow_dc3co_on_psr2_exit(intel_dp);
2099 
2100 		val = intel_de_rmw(display,
2101 				   EDP_PSR2_CTL(display, cpu_transcoder),
2102 				   EDP_PSR2_ENABLE, 0);
2103 
2104 		drm_WARN_ON(display->drm, !(val & EDP_PSR2_ENABLE));
2105 	} else {
2106 		if (DISPLAY_VER(display) == 20 ||
2107 		    IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0))
2108 			intel_dmc_start_pkgc_exit_at_start_of_undelayed_vblank(display,
2109 								       intel_dp->psr.pipe,
2110 								       false);
2111 
2112 		val = intel_de_rmw(display,
2113 				   psr_ctl_reg(display, cpu_transcoder),
2114 				   EDP_PSR_ENABLE, 0);
2115 
2116 		drm_WARN_ON(display->drm, !(val & EDP_PSR_ENABLE));
2117 	}
2118 	intel_dp->psr.active = false;
2119 }
2120 
intel_psr_wait_exit_locked(struct intel_dp * intel_dp)2121 static void intel_psr_wait_exit_locked(struct intel_dp *intel_dp)
2122 {
2123 	struct intel_display *display = to_intel_display(intel_dp);
2124 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2125 	i915_reg_t psr_status;
2126 	u32 psr_status_mask;
2127 
2128 	if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled ||
2129 					  intel_dp->psr.panel_replay_enabled)) {
2130 		psr_status = EDP_PSR2_STATUS(display, cpu_transcoder);
2131 		psr_status_mask = EDP_PSR2_STATUS_STATE_MASK;
2132 	} else {
2133 		psr_status = psr_status_reg(display, cpu_transcoder);
2134 		psr_status_mask = EDP_PSR_STATUS_STATE_MASK;
2135 	}
2136 
2137 	/* Wait till PSR is idle */
2138 	if (intel_de_wait_for_clear(display, psr_status,
2139 				    psr_status_mask, 2000))
2140 		drm_err(display->drm, "Timed out waiting PSR idle state\n");
2141 }
2142 
intel_psr_disable_locked(struct intel_dp * intel_dp)2143 static void intel_psr_disable_locked(struct intel_dp *intel_dp)
2144 {
2145 	struct intel_display *display = to_intel_display(intel_dp);
2146 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2147 
2148 	lockdep_assert_held(&intel_dp->psr.lock);
2149 
2150 	if (!intel_dp->psr.enabled)
2151 		return;
2152 
2153 	if (intel_dp->psr.panel_replay_enabled)
2154 		drm_dbg_kms(display->drm, "Disabling Panel Replay\n");
2155 	else
2156 		drm_dbg_kms(display->drm, "Disabling PSR%s\n",
2157 			    intel_dp->psr.sel_update_enabled ? "2" : "1");
2158 
2159 	intel_psr_exit(intel_dp);
2160 	intel_psr_wait_exit_locked(intel_dp);
2161 
2162 	/*
2163 	 * Wa_16013835468
2164 	 * Wa_14015648006
2165 	 */
2166 	if (DISPLAY_VER(display) >= 11)
2167 		intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
2168 			     LATENCY_REPORTING_REMOVED(intel_dp->psr.pipe), 0);
2169 
2170 	if (intel_dp->psr.sel_update_enabled) {
2171 		/* Wa_16012604467:adlp,mtl[a0,b0] */
2172 		if (!intel_dp->psr.panel_replay_enabled &&
2173 		    IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0))
2174 			intel_de_rmw(display,
2175 				     MTL_CLKGATE_DIS_TRANS(display, cpu_transcoder),
2176 				     MTL_CLKGATE_DIS_TRANS_DMASC_GATING_DIS, 0);
2177 		else if (display->platform.alderlake_p)
2178 			intel_de_rmw(display, CLKGATE_DIS_MISC,
2179 				     CLKGATE_DIS_MISC_DMASC_GATING_DIS, 0);
2180 	}
2181 
2182 	if (intel_dp_is_edp(intel_dp))
2183 		intel_snps_phy_update_psr_power_state(&dp_to_dig_port(intel_dp)->base, false);
2184 
2185 	if (intel_dp->psr.panel_replay_enabled && intel_dp_is_edp(intel_dp))
2186 		intel_alpm_disable(intel_dp);
2187 
2188 	/* Disable PSR on Sink */
2189 	if (!intel_dp->psr.panel_replay_enabled) {
2190 		drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, 0);
2191 
2192 		if (intel_dp->psr.sel_update_enabled)
2193 			drm_dp_dpcd_writeb(&intel_dp->aux,
2194 					   DP_RECEIVER_ALPM_CONFIG, 0);
2195 	}
2196 
2197 	/* Wa_16025596647 */
2198 	if ((DISPLAY_VER(display) == 20 ||
2199 	     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
2200 	    !intel_dp->psr.panel_replay_enabled)
2201 		intel_dmc_block_pkgc(display, intel_dp->psr.pipe, false);
2202 
2203 	intel_dp->psr.enabled = false;
2204 	intel_dp->psr.panel_replay_enabled = false;
2205 	intel_dp->psr.sel_update_enabled = false;
2206 	intel_dp->psr.psr2_sel_fetch_enabled = false;
2207 	intel_dp->psr.su_region_et_enabled = false;
2208 	intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
2209 	intel_dp->psr.active_non_psr_pipes = 0;
2210 }
2211 
2212 /**
2213  * intel_psr_disable - Disable PSR
2214  * @intel_dp: Intel DP
2215  * @old_crtc_state: old CRTC state
2216  *
2217  * This function needs to be called before disabling pipe.
2218  */
intel_psr_disable(struct intel_dp * intel_dp,const struct intel_crtc_state * old_crtc_state)2219 void intel_psr_disable(struct intel_dp *intel_dp,
2220 		       const struct intel_crtc_state *old_crtc_state)
2221 {
2222 	struct intel_display *display = to_intel_display(intel_dp);
2223 
2224 	if (!old_crtc_state->has_psr)
2225 		return;
2226 
2227 	if (drm_WARN_ON(display->drm, !CAN_PSR(intel_dp) &&
2228 			!CAN_PANEL_REPLAY(intel_dp)))
2229 		return;
2230 
2231 	mutex_lock(&intel_dp->psr.lock);
2232 
2233 	intel_psr_disable_locked(intel_dp);
2234 
2235 	intel_dp->psr.link_ok = false;
2236 
2237 	mutex_unlock(&intel_dp->psr.lock);
2238 	cancel_work_sync(&intel_dp->psr.work);
2239 	cancel_delayed_work_sync(&intel_dp->psr.dc3co_work);
2240 }
2241 
2242 /**
2243  * intel_psr_pause - Pause PSR
2244  * @intel_dp: Intel DP
2245  *
2246  * This function need to be called after enabling psr.
2247  */
intel_psr_pause(struct intel_dp * intel_dp)2248 void intel_psr_pause(struct intel_dp *intel_dp)
2249 {
2250 	struct intel_psr *psr = &intel_dp->psr;
2251 
2252 	if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
2253 		return;
2254 
2255 	mutex_lock(&psr->lock);
2256 
2257 	if (!psr->enabled) {
2258 		mutex_unlock(&psr->lock);
2259 		return;
2260 	}
2261 
2262 	if (intel_dp->psr.pause_counter++ == 0) {
2263 		intel_psr_exit(intel_dp);
2264 		intel_psr_wait_exit_locked(intel_dp);
2265 	}
2266 
2267 	mutex_unlock(&psr->lock);
2268 
2269 	cancel_work_sync(&psr->work);
2270 	cancel_delayed_work_sync(&psr->dc3co_work);
2271 }
2272 
2273 /**
2274  * intel_psr_resume - Resume PSR
2275  * @intel_dp: Intel DP
2276  *
2277  * This function need to be called after pausing psr.
2278  */
intel_psr_resume(struct intel_dp * intel_dp)2279 void intel_psr_resume(struct intel_dp *intel_dp)
2280 {
2281 	struct intel_display *display = to_intel_display(intel_dp);
2282 	struct intel_psr *psr = &intel_dp->psr;
2283 
2284 	if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
2285 		return;
2286 
2287 	mutex_lock(&psr->lock);
2288 
2289 	if (!psr->enabled)
2290 		goto out;
2291 
2292 	if (!psr->pause_counter) {
2293 		drm_warn(display->drm, "Unbalanced PSR pause/resume!\n");
2294 		goto out;
2295 	}
2296 
2297 	if (--intel_dp->psr.pause_counter == 0)
2298 		intel_psr_activate(intel_dp);
2299 
2300 out:
2301 	mutex_unlock(&psr->lock);
2302 }
2303 
2304 /**
2305  * intel_psr_needs_vblank_notification - Check if PSR need vblank enable/disable
2306  * notification.
2307  * @crtc_state: CRTC status
2308  *
2309  * We need to block DC6 entry in case of Panel Replay as enabling VBI doesn't
2310  * prevent it in case of Panel Replay. Panel Replay switches main link off on
2311  * DC entry. This means vblank interrupts are not fired and is a problem if
2312  * user-space is polling for vblank events. Also Wa_16025596647 needs
2313  * information when vblank is enabled/disabled.
2314  */
intel_psr_needs_vblank_notification(const struct intel_crtc_state * crtc_state)2315 bool intel_psr_needs_vblank_notification(const struct intel_crtc_state *crtc_state)
2316 {
2317 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2318 	struct intel_display *display = to_intel_display(crtc_state);
2319 	struct intel_encoder *encoder;
2320 
2321 	for_each_encoder_on_crtc(crtc->base.dev, &crtc->base, encoder) {
2322 		struct intel_dp *intel_dp;
2323 
2324 		if (!intel_encoder_is_dp(encoder))
2325 			continue;
2326 
2327 		intel_dp = enc_to_intel_dp(encoder);
2328 
2329 		if (!intel_dp_is_edp(intel_dp))
2330 			continue;
2331 
2332 		if (CAN_PANEL_REPLAY(intel_dp))
2333 			return true;
2334 
2335 		if ((DISPLAY_VER(display) == 20 ||
2336 		     IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
2337 		    CAN_PSR(intel_dp))
2338 			return true;
2339 	}
2340 
2341 	return false;
2342 }
2343 
2344 /**
2345  * intel_psr_trigger_frame_change_event - Trigger "Frame Change" event
2346  * @dsb: DSB context
2347  * @state: the atomic state
2348  * @crtc: the CRTC
2349  *
2350  * Generate PSR "Frame Change" event.
2351  */
intel_psr_trigger_frame_change_event(struct intel_dsb * dsb,struct intel_atomic_state * state,struct intel_crtc * crtc)2352 void intel_psr_trigger_frame_change_event(struct intel_dsb *dsb,
2353 					  struct intel_atomic_state *state,
2354 					  struct intel_crtc *crtc)
2355 {
2356 	const struct intel_crtc_state *crtc_state =
2357 		intel_pre_commit_crtc_state(state, crtc);
2358 	struct intel_display *display = to_intel_display(crtc);
2359 
2360 	if (crtc_state->has_psr)
2361 		intel_de_write_dsb(display, dsb,
2362 				   CURSURFLIVE(display, crtc->pipe), 0);
2363 }
2364 
2365 /**
2366  * intel_psr_min_vblank_delay - Minimum vblank delay needed by PSR
2367  * @crtc_state: the crtc state
2368  *
2369  * Return minimum vblank delay needed by PSR.
2370  */
intel_psr_min_vblank_delay(const struct intel_crtc_state * crtc_state)2371 int intel_psr_min_vblank_delay(const struct intel_crtc_state *crtc_state)
2372 {
2373 	struct intel_display *display = to_intel_display(crtc_state);
2374 
2375 	if (!crtc_state->has_psr)
2376 		return 0;
2377 
2378 	/* Wa_14015401596 */
2379 	if (intel_vrr_possible(crtc_state) && IS_DISPLAY_VER(display, 13, 14))
2380 		return 1;
2381 
2382 	/* Rest is for SRD_STATUS needed on LunarLake and onwards */
2383 	if (DISPLAY_VER(display) < 20)
2384 		return 0;
2385 
2386 	/*
2387 	 * Comment on SRD_STATUS register in Bspec for LunarLake and onwards:
2388 	 *
2389 	 * To deterministically capture the transition of the state machine
2390 	 * going from SRDOFFACK to IDLE, the delayed V. Blank should be at least
2391 	 * one line after the non-delayed V. Blank.
2392 	 *
2393 	 * Legacy TG: TRANS_SET_CONTEXT_LATENCY > 0
2394 	 * VRR TG: TRANS_VRR_CTL[ VRR Guardband ] < (TRANS_VRR_VMAX[ VRR Vmax ]
2395 	 * - TRANS_VTOTAL[ Vertical Active ])
2396 	 *
2397 	 * SRD_STATUS is used only by PSR1 on PantherLake.
2398 	 * SRD_STATUS is used by PSR1 and Panel Replay DP on LunarLake.
2399 	 */
2400 
2401 	if (DISPLAY_VER(display) >= 30 && (crtc_state->has_panel_replay ||
2402 					   crtc_state->has_sel_update))
2403 		return 0;
2404 	else if (DISPLAY_VER(display) < 30 && (crtc_state->has_sel_update ||
2405 					       intel_crtc_has_type(crtc_state,
2406 								   INTEL_OUTPUT_EDP)))
2407 		return 0;
2408 	else
2409 		return 1;
2410 }
2411 
man_trk_ctl_enable_bit_get(struct intel_display * display)2412 static u32 man_trk_ctl_enable_bit_get(struct intel_display *display)
2413 {
2414 	return display->platform.alderlake_p || DISPLAY_VER(display) >= 14 ? 0 :
2415 		PSR2_MAN_TRK_CTL_ENABLE;
2416 }
2417 
man_trk_ctl_single_full_frame_bit_get(struct intel_display * display)2418 static u32 man_trk_ctl_single_full_frame_bit_get(struct intel_display *display)
2419 {
2420 	return display->platform.alderlake_p || DISPLAY_VER(display) >= 14 ?
2421 	       ADLP_PSR2_MAN_TRK_CTL_SF_SINGLE_FULL_FRAME :
2422 	       PSR2_MAN_TRK_CTL_SF_SINGLE_FULL_FRAME;
2423 }
2424 
man_trk_ctl_partial_frame_bit_get(struct intel_display * display)2425 static u32 man_trk_ctl_partial_frame_bit_get(struct intel_display *display)
2426 {
2427 	return display->platform.alderlake_p || DISPLAY_VER(display) >= 14 ?
2428 	       ADLP_PSR2_MAN_TRK_CTL_SF_PARTIAL_FRAME_UPDATE :
2429 	       PSR2_MAN_TRK_CTL_SF_PARTIAL_FRAME_UPDATE;
2430 }
2431 
man_trk_ctl_continuos_full_frame(struct intel_display * display)2432 static u32 man_trk_ctl_continuos_full_frame(struct intel_display *display)
2433 {
2434 	return display->platform.alderlake_p || DISPLAY_VER(display) >= 14 ?
2435 	       ADLP_PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME :
2436 	       PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME;
2437 }
2438 
intel_psr_force_update(struct intel_dp * intel_dp)2439 static void intel_psr_force_update(struct intel_dp *intel_dp)
2440 {
2441 	struct intel_display *display = to_intel_display(intel_dp);
2442 
2443 	/*
2444 	 * Display WA #0884: skl+
2445 	 * This documented WA for bxt can be safely applied
2446 	 * broadly so we can force HW tracking to exit PSR
2447 	 * instead of disabling and re-enabling.
2448 	 * Workaround tells us to write 0 to CUR_SURFLIVE_A,
2449 	 * but it makes more sense write to the current active
2450 	 * pipe.
2451 	 *
2452 	 * This workaround do not exist for platforms with display 10 or newer
2453 	 * but testing proved that it works for up display 13, for newer
2454 	 * than that testing will be needed.
2455 	 */
2456 	intel_de_write(display, CURSURFLIVE(display, intel_dp->psr.pipe), 0);
2457 }
2458 
intel_psr2_program_trans_man_trk_ctl(struct intel_dsb * dsb,const struct intel_crtc_state * crtc_state)2459 void intel_psr2_program_trans_man_trk_ctl(struct intel_dsb *dsb,
2460 					  const struct intel_crtc_state *crtc_state)
2461 {
2462 	struct intel_display *display = to_intel_display(crtc_state);
2463 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2464 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
2465 	struct intel_encoder *encoder;
2466 
2467 	if (!crtc_state->enable_psr2_sel_fetch)
2468 		return;
2469 
2470 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
2471 					     crtc_state->uapi.encoder_mask) {
2472 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2473 
2474 		if (!dsb)
2475 			lockdep_assert_held(&intel_dp->psr.lock);
2476 
2477 		if (DISPLAY_VER(display) < 20 && intel_dp->psr.psr2_sel_fetch_cff_enabled)
2478 			return;
2479 		break;
2480 	}
2481 
2482 	intel_de_write_dsb(display, dsb,
2483 			   PSR2_MAN_TRK_CTL(display, cpu_transcoder),
2484 			   crtc_state->psr2_man_track_ctl);
2485 
2486 	if (!crtc_state->enable_psr2_su_region_et)
2487 		return;
2488 
2489 	intel_de_write_dsb(display, dsb, PIPE_SRCSZ_ERLY_TPT(crtc->pipe),
2490 			   crtc_state->pipe_srcsz_early_tpt);
2491 }
2492 
psr2_man_trk_ctl_calc(struct intel_crtc_state * crtc_state,bool full_update)2493 static void psr2_man_trk_ctl_calc(struct intel_crtc_state *crtc_state,
2494 				  bool full_update)
2495 {
2496 	struct intel_display *display = to_intel_display(crtc_state);
2497 	u32 val = man_trk_ctl_enable_bit_get(display);
2498 
2499 	/* SF partial frame enable has to be set even on full update */
2500 	val |= man_trk_ctl_partial_frame_bit_get(display);
2501 
2502 	if (full_update) {
2503 		val |= man_trk_ctl_continuos_full_frame(display);
2504 		goto exit;
2505 	}
2506 
2507 	if (crtc_state->psr2_su_area.y1 == -1)
2508 		goto exit;
2509 
2510 	if (display->platform.alderlake_p || DISPLAY_VER(display) >= 14) {
2511 		val |= ADLP_PSR2_MAN_TRK_CTL_SU_REGION_START_ADDR(crtc_state->psr2_su_area.y1);
2512 		val |= ADLP_PSR2_MAN_TRK_CTL_SU_REGION_END_ADDR(crtc_state->psr2_su_area.y2 - 1);
2513 	} else {
2514 		drm_WARN_ON(crtc_state->uapi.crtc->dev,
2515 			    crtc_state->psr2_su_area.y1 % 4 ||
2516 			    crtc_state->psr2_su_area.y2 % 4);
2517 
2518 		val |= PSR2_MAN_TRK_CTL_SU_REGION_START_ADDR(
2519 			crtc_state->psr2_su_area.y1 / 4 + 1);
2520 		val |= PSR2_MAN_TRK_CTL_SU_REGION_END_ADDR(
2521 			crtc_state->psr2_su_area.y2 / 4 + 1);
2522 	}
2523 exit:
2524 	crtc_state->psr2_man_track_ctl = val;
2525 }
2526 
psr2_pipe_srcsz_early_tpt_calc(struct intel_crtc_state * crtc_state,bool full_update)2527 static u32 psr2_pipe_srcsz_early_tpt_calc(struct intel_crtc_state *crtc_state,
2528 					  bool full_update)
2529 {
2530 	int width, height;
2531 
2532 	if (!crtc_state->enable_psr2_su_region_et || full_update)
2533 		return 0;
2534 
2535 	width = drm_rect_width(&crtc_state->psr2_su_area);
2536 	height = drm_rect_height(&crtc_state->psr2_su_area);
2537 
2538 	return PIPESRC_WIDTH(width - 1) | PIPESRC_HEIGHT(height - 1);
2539 }
2540 
clip_area_update(struct drm_rect * overlap_damage_area,struct drm_rect * damage_area,struct drm_rect * pipe_src)2541 static void clip_area_update(struct drm_rect *overlap_damage_area,
2542 			     struct drm_rect *damage_area,
2543 			     struct drm_rect *pipe_src)
2544 {
2545 	if (!drm_rect_intersect(damage_area, pipe_src))
2546 		return;
2547 
2548 	if (overlap_damage_area->y1 == -1) {
2549 		overlap_damage_area->y1 = damage_area->y1;
2550 		overlap_damage_area->y2 = damage_area->y2;
2551 		return;
2552 	}
2553 
2554 	if (damage_area->y1 < overlap_damage_area->y1)
2555 		overlap_damage_area->y1 = damage_area->y1;
2556 
2557 	if (damage_area->y2 > overlap_damage_area->y2)
2558 		overlap_damage_area->y2 = damage_area->y2;
2559 }
2560 
intel_psr2_sel_fetch_pipe_alignment(struct intel_crtc_state * crtc_state)2561 static void intel_psr2_sel_fetch_pipe_alignment(struct intel_crtc_state *crtc_state)
2562 {
2563 	struct intel_display *display = to_intel_display(crtc_state);
2564 	const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
2565 	u16 y_alignment;
2566 
2567 	/* ADLP aligns the SU region to vdsc slice height in case dsc is enabled */
2568 	if (crtc_state->dsc.compression_enable &&
2569 	    (display->platform.alderlake_p || DISPLAY_VER(display) >= 14))
2570 		y_alignment = vdsc_cfg->slice_height;
2571 	else
2572 		y_alignment = crtc_state->su_y_granularity;
2573 
2574 	crtc_state->psr2_su_area.y1 -= crtc_state->psr2_su_area.y1 % y_alignment;
2575 	if (crtc_state->psr2_su_area.y2 % y_alignment)
2576 		crtc_state->psr2_su_area.y2 = ((crtc_state->psr2_su_area.y2 /
2577 						y_alignment) + 1) * y_alignment;
2578 }
2579 
2580 /*
2581  * When early transport is in use we need to extend SU area to cover
2582  * cursor fully when cursor is in SU area.
2583  */
2584 static void
intel_psr2_sel_fetch_et_alignment(struct intel_atomic_state * state,struct intel_crtc * crtc,bool * cursor_in_su_area)2585 intel_psr2_sel_fetch_et_alignment(struct intel_atomic_state *state,
2586 				  struct intel_crtc *crtc,
2587 				  bool *cursor_in_su_area)
2588 {
2589 	struct intel_crtc_state *crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
2590 	struct intel_plane_state *new_plane_state;
2591 	struct intel_plane *plane;
2592 	int i;
2593 
2594 	if (!crtc_state->enable_psr2_su_region_et)
2595 		return;
2596 
2597 	for_each_new_intel_plane_in_state(state, plane, new_plane_state, i) {
2598 		struct drm_rect inter;
2599 
2600 		if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc)
2601 			continue;
2602 
2603 		if (plane->id != PLANE_CURSOR)
2604 			continue;
2605 
2606 		if (!new_plane_state->uapi.visible)
2607 			continue;
2608 
2609 		inter = crtc_state->psr2_su_area;
2610 		if (!drm_rect_intersect(&inter, &new_plane_state->uapi.dst))
2611 			continue;
2612 
2613 		clip_area_update(&crtc_state->psr2_su_area, &new_plane_state->uapi.dst,
2614 				 &crtc_state->pipe_src);
2615 		*cursor_in_su_area = true;
2616 	}
2617 }
2618 
2619 /*
2620  * TODO: Not clear how to handle planes with negative position,
2621  * also planes are not updated if they have a negative X
2622  * position so for now doing a full update in this cases
2623  *
2624  * Plane scaling and rotation is not supported by selective fetch and both
2625  * properties can change without a modeset, so need to be check at every
2626  * atomic commit.
2627  */
psr2_sel_fetch_plane_state_supported(const struct intel_plane_state * plane_state)2628 static bool psr2_sel_fetch_plane_state_supported(const struct intel_plane_state *plane_state)
2629 {
2630 	if (plane_state->uapi.dst.y1 < 0 ||
2631 	    plane_state->uapi.dst.x1 < 0 ||
2632 	    plane_state->scaler_id >= 0 ||
2633 	    plane_state->uapi.rotation != DRM_MODE_ROTATE_0)
2634 		return false;
2635 
2636 	return true;
2637 }
2638 
2639 /*
2640  * Check for pipe properties that is not supported by selective fetch.
2641  *
2642  * TODO: pipe scaling causes a modeset but skl_update_scaler_crtc() is executed
2643  * after intel_psr_compute_config(), so for now keeping PSR2 selective fetch
2644  * enabled and going to the full update path.
2645  */
psr2_sel_fetch_pipe_state_supported(const struct intel_crtc_state * crtc_state)2646 static bool psr2_sel_fetch_pipe_state_supported(const struct intel_crtc_state *crtc_state)
2647 {
2648 	if (crtc_state->scaler_state.scaler_id >= 0)
2649 		return false;
2650 
2651 	return true;
2652 }
2653 
2654 /* Wa 14019834836 */
intel_psr_apply_pr_link_on_su_wa(struct intel_crtc_state * crtc_state)2655 static void intel_psr_apply_pr_link_on_su_wa(struct intel_crtc_state *crtc_state)
2656 {
2657 	struct intel_display *display = to_intel_display(crtc_state);
2658 	struct intel_encoder *encoder;
2659 	int hactive_limit;
2660 
2661 	if (crtc_state->psr2_su_area.y1 != 0 ||
2662 	    crtc_state->psr2_su_area.y2 != 0)
2663 		return;
2664 
2665 	if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420)
2666 		hactive_limit = intel_dp_is_uhbr(crtc_state) ? 1230 : 546;
2667 	else
2668 		hactive_limit = intel_dp_is_uhbr(crtc_state) ? 615 : 273;
2669 
2670 	if (crtc_state->hw.adjusted_mode.hdisplay < hactive_limit)
2671 		return;
2672 
2673 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
2674 					     crtc_state->uapi.encoder_mask) {
2675 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2676 
2677 		if (!intel_dp_is_edp(intel_dp) &&
2678 		    intel_dp->psr.panel_replay_enabled &&
2679 		    intel_dp->psr.sel_update_enabled) {
2680 			crtc_state->psr2_su_area.y2++;
2681 			return;
2682 		}
2683 	}
2684 }
2685 
2686 static void
intel_psr_apply_su_area_workarounds(struct intel_crtc_state * crtc_state)2687 intel_psr_apply_su_area_workarounds(struct intel_crtc_state *crtc_state)
2688 {
2689 	struct intel_display *display = to_intel_display(crtc_state);
2690 
2691 	/* Wa_14014971492 */
2692 	if (!crtc_state->has_panel_replay &&
2693 	    ((IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0) ||
2694 	      display->platform.alderlake_p || display->platform.tigerlake)) &&
2695 	    crtc_state->splitter.enable)
2696 		crtc_state->psr2_su_area.y1 = 0;
2697 
2698 	/* Wa 14019834836 */
2699 	if (DISPLAY_VER(display) == 30)
2700 		intel_psr_apply_pr_link_on_su_wa(crtc_state);
2701 }
2702 
intel_psr2_sel_fetch_update(struct intel_atomic_state * state,struct intel_crtc * crtc)2703 int intel_psr2_sel_fetch_update(struct intel_atomic_state *state,
2704 				struct intel_crtc *crtc)
2705 {
2706 	struct intel_display *display = to_intel_display(state);
2707 	struct intel_crtc_state *crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
2708 	struct intel_plane_state *new_plane_state, *old_plane_state;
2709 	struct intel_plane *plane;
2710 	bool full_update = false, cursor_in_su_area = false;
2711 	int i, ret;
2712 
2713 	if (!crtc_state->enable_psr2_sel_fetch)
2714 		return 0;
2715 
2716 	if (!psr2_sel_fetch_pipe_state_supported(crtc_state)) {
2717 		full_update = true;
2718 		goto skip_sel_fetch_set_loop;
2719 	}
2720 
2721 	crtc_state->psr2_su_area.x1 = 0;
2722 	crtc_state->psr2_su_area.y1 = -1;
2723 	crtc_state->psr2_su_area.x2 = drm_rect_width(&crtc_state->pipe_src);
2724 	crtc_state->psr2_su_area.y2 = -1;
2725 
2726 	/*
2727 	 * Calculate minimal selective fetch area of each plane and calculate
2728 	 * the pipe damaged area.
2729 	 * In the next loop the plane selective fetch area will actually be set
2730 	 * using whole pipe damaged area.
2731 	 */
2732 	for_each_oldnew_intel_plane_in_state(state, plane, old_plane_state,
2733 					     new_plane_state, i) {
2734 		struct drm_rect src, damaged_area = { .x1 = 0, .y1 = -1,
2735 						      .x2 = INT_MAX };
2736 
2737 		if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc)
2738 			continue;
2739 
2740 		if (!new_plane_state->uapi.visible &&
2741 		    !old_plane_state->uapi.visible)
2742 			continue;
2743 
2744 		if (!psr2_sel_fetch_plane_state_supported(new_plane_state)) {
2745 			full_update = true;
2746 			break;
2747 		}
2748 
2749 		/*
2750 		 * If visibility or plane moved, mark the whole plane area as
2751 		 * damaged as it needs to be complete redraw in the new and old
2752 		 * position.
2753 		 */
2754 		if (new_plane_state->uapi.visible != old_plane_state->uapi.visible ||
2755 		    !drm_rect_equals(&new_plane_state->uapi.dst,
2756 				     &old_plane_state->uapi.dst)) {
2757 			if (old_plane_state->uapi.visible) {
2758 				damaged_area.y1 = old_plane_state->uapi.dst.y1;
2759 				damaged_area.y2 = old_plane_state->uapi.dst.y2;
2760 				clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2761 						 &crtc_state->pipe_src);
2762 			}
2763 
2764 			if (new_plane_state->uapi.visible) {
2765 				damaged_area.y1 = new_plane_state->uapi.dst.y1;
2766 				damaged_area.y2 = new_plane_state->uapi.dst.y2;
2767 				clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2768 						 &crtc_state->pipe_src);
2769 			}
2770 			continue;
2771 		} else if (new_plane_state->uapi.alpha != old_plane_state->uapi.alpha) {
2772 			/* If alpha changed mark the whole plane area as damaged */
2773 			damaged_area.y1 = new_plane_state->uapi.dst.y1;
2774 			damaged_area.y2 = new_plane_state->uapi.dst.y2;
2775 			clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2776 					 &crtc_state->pipe_src);
2777 			continue;
2778 		}
2779 
2780 		src = drm_plane_state_src(&new_plane_state->uapi);
2781 		drm_rect_fp_to_int(&src, &src);
2782 
2783 		if (!drm_atomic_helper_damage_merged(&old_plane_state->uapi,
2784 						     &new_plane_state->uapi, &damaged_area))
2785 			continue;
2786 
2787 		damaged_area.y1 += new_plane_state->uapi.dst.y1 - src.y1;
2788 		damaged_area.y2 += new_plane_state->uapi.dst.y1 - src.y1;
2789 		damaged_area.x1 += new_plane_state->uapi.dst.x1 - src.x1;
2790 		damaged_area.x2 += new_plane_state->uapi.dst.x1 - src.x1;
2791 
2792 		clip_area_update(&crtc_state->psr2_su_area, &damaged_area, &crtc_state->pipe_src);
2793 	}
2794 
2795 	/*
2796 	 * TODO: For now we are just using full update in case
2797 	 * selective fetch area calculation fails. To optimize this we
2798 	 * should identify cases where this happens and fix the area
2799 	 * calculation for those.
2800 	 */
2801 	if (crtc_state->psr2_su_area.y1 == -1) {
2802 		drm_info_once(display->drm,
2803 			      "Selective fetch area calculation failed in pipe %c\n",
2804 			      pipe_name(crtc->pipe));
2805 		full_update = true;
2806 	}
2807 
2808 	if (full_update)
2809 		goto skip_sel_fetch_set_loop;
2810 
2811 	intel_psr_apply_su_area_workarounds(crtc_state);
2812 
2813 	ret = drm_atomic_add_affected_planes(&state->base, &crtc->base);
2814 	if (ret)
2815 		return ret;
2816 
2817 	/*
2818 	 * Adjust su area to cover cursor fully as necessary (early
2819 	 * transport). This needs to be done after
2820 	 * drm_atomic_add_affected_planes to ensure visible cursor is added into
2821 	 * affected planes even when cursor is not updated by itself.
2822 	 */
2823 	intel_psr2_sel_fetch_et_alignment(state, crtc, &cursor_in_su_area);
2824 
2825 	intel_psr2_sel_fetch_pipe_alignment(crtc_state);
2826 
2827 	/*
2828 	 * Now that we have the pipe damaged area check if it intersect with
2829 	 * every plane, if it does set the plane selective fetch area.
2830 	 */
2831 	for_each_oldnew_intel_plane_in_state(state, plane, old_plane_state,
2832 					     new_plane_state, i) {
2833 		struct drm_rect *sel_fetch_area, inter;
2834 		struct intel_plane *linked = new_plane_state->planar_linked_plane;
2835 
2836 		if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc ||
2837 		    !new_plane_state->uapi.visible)
2838 			continue;
2839 
2840 		inter = crtc_state->psr2_su_area;
2841 		sel_fetch_area = &new_plane_state->psr2_sel_fetch_area;
2842 		if (!drm_rect_intersect(&inter, &new_plane_state->uapi.dst)) {
2843 			sel_fetch_area->y1 = -1;
2844 			sel_fetch_area->y2 = -1;
2845 			/*
2846 			 * if plane sel fetch was previously enabled ->
2847 			 * disable it
2848 			 */
2849 			if (drm_rect_height(&old_plane_state->psr2_sel_fetch_area) > 0)
2850 				crtc_state->update_planes |= BIT(plane->id);
2851 
2852 			continue;
2853 		}
2854 
2855 		if (!psr2_sel_fetch_plane_state_supported(new_plane_state)) {
2856 			full_update = true;
2857 			break;
2858 		}
2859 
2860 		sel_fetch_area = &new_plane_state->psr2_sel_fetch_area;
2861 		sel_fetch_area->y1 = inter.y1 - new_plane_state->uapi.dst.y1;
2862 		sel_fetch_area->y2 = inter.y2 - new_plane_state->uapi.dst.y1;
2863 		crtc_state->update_planes |= BIT(plane->id);
2864 
2865 		/*
2866 		 * Sel_fetch_area is calculated for UV plane. Use
2867 		 * same area for Y plane as well.
2868 		 */
2869 		if (linked) {
2870 			struct intel_plane_state *linked_new_plane_state;
2871 			struct drm_rect *linked_sel_fetch_area;
2872 
2873 			linked_new_plane_state = intel_atomic_get_plane_state(state, linked);
2874 			if (IS_ERR(linked_new_plane_state))
2875 				return PTR_ERR(linked_new_plane_state);
2876 
2877 			linked_sel_fetch_area = &linked_new_plane_state->psr2_sel_fetch_area;
2878 			linked_sel_fetch_area->y1 = sel_fetch_area->y1;
2879 			linked_sel_fetch_area->y2 = sel_fetch_area->y2;
2880 			crtc_state->update_planes |= BIT(linked->id);
2881 		}
2882 	}
2883 
2884 skip_sel_fetch_set_loop:
2885 	psr2_man_trk_ctl_calc(crtc_state, full_update);
2886 	crtc_state->pipe_srcsz_early_tpt =
2887 		psr2_pipe_srcsz_early_tpt_calc(crtc_state, full_update);
2888 	return 0;
2889 }
2890 
intel_psr2_panic_force_full_update(struct intel_display * display,struct intel_crtc_state * crtc_state)2891 void intel_psr2_panic_force_full_update(struct intel_display *display,
2892 					struct intel_crtc_state *crtc_state)
2893 {
2894 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2895 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
2896 	u32 val = man_trk_ctl_enable_bit_get(display);
2897 
2898 	/* SF partial frame enable has to be set even on full update */
2899 	val |= man_trk_ctl_partial_frame_bit_get(display);
2900 	val |= man_trk_ctl_continuos_full_frame(display);
2901 
2902 	/* Directly write the register */
2903 	intel_de_write_fw(display, PSR2_MAN_TRK_CTL(display, cpu_transcoder), val);
2904 
2905 	if (!crtc_state->enable_psr2_su_region_et)
2906 		return;
2907 
2908 	intel_de_write_fw(display, PIPE_SRCSZ_ERLY_TPT(crtc->pipe), 0);
2909 }
2910 
intel_psr_pre_plane_update(struct intel_atomic_state * state,struct intel_crtc * crtc)2911 void intel_psr_pre_plane_update(struct intel_atomic_state *state,
2912 				struct intel_crtc *crtc)
2913 {
2914 	struct intel_display *display = to_intel_display(state);
2915 	const struct intel_crtc_state *old_crtc_state =
2916 		intel_atomic_get_old_crtc_state(state, crtc);
2917 	const struct intel_crtc_state *new_crtc_state =
2918 		intel_atomic_get_new_crtc_state(state, crtc);
2919 	struct intel_encoder *encoder;
2920 
2921 	if (!HAS_PSR(display))
2922 		return;
2923 
2924 	for_each_intel_encoder_mask_with_psr(state->base.dev, encoder,
2925 					     old_crtc_state->uapi.encoder_mask) {
2926 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2927 		struct intel_psr *psr = &intel_dp->psr;
2928 
2929 		mutex_lock(&psr->lock);
2930 
2931 		if (psr->enabled) {
2932 			/*
2933 			 * Reasons to disable:
2934 			 * - PSR disabled in new state
2935 			 * - All planes will go inactive
2936 			 * - Changing between PSR versions
2937 			 * - Region Early Transport changing
2938 			 * - Display WA #1136: skl, bxt
2939 			 */
2940 			if (intel_crtc_needs_modeset(new_crtc_state) ||
2941 			    !new_crtc_state->has_psr ||
2942 			    !new_crtc_state->active_planes ||
2943 			    new_crtc_state->has_sel_update != psr->sel_update_enabled ||
2944 			    new_crtc_state->enable_psr2_su_region_et != psr->su_region_et_enabled ||
2945 			    new_crtc_state->has_panel_replay != psr->panel_replay_enabled ||
2946 			    (DISPLAY_VER(display) < 11 && new_crtc_state->wm_level_disabled))
2947 				intel_psr_disable_locked(intel_dp);
2948 			else if (new_crtc_state->wm_level_disabled)
2949 				/* Wa_14015648006 */
2950 				wm_optimization_wa(intel_dp, new_crtc_state);
2951 		}
2952 
2953 		mutex_unlock(&psr->lock);
2954 	}
2955 }
2956 
intel_psr_post_plane_update(struct intel_atomic_state * state,struct intel_crtc * crtc)2957 void intel_psr_post_plane_update(struct intel_atomic_state *state,
2958 				 struct intel_crtc *crtc)
2959 {
2960 	struct intel_display *display = to_intel_display(state);
2961 	const struct intel_crtc_state *crtc_state =
2962 		intel_atomic_get_new_crtc_state(state, crtc);
2963 	struct intel_encoder *encoder;
2964 
2965 	if (!crtc_state->has_psr)
2966 		return;
2967 
2968 	for_each_intel_encoder_mask_with_psr(state->base.dev, encoder,
2969 					     crtc_state->uapi.encoder_mask) {
2970 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2971 		struct intel_psr *psr = &intel_dp->psr;
2972 		bool keep_disabled = false;
2973 
2974 		mutex_lock(&psr->lock);
2975 
2976 		drm_WARN_ON(display->drm,
2977 			    psr->enabled && !crtc_state->active_planes);
2978 
2979 		keep_disabled |= psr->sink_not_reliable;
2980 		keep_disabled |= !crtc_state->active_planes;
2981 
2982 		/* Display WA #1136: skl, bxt */
2983 		keep_disabled |= DISPLAY_VER(display) < 11 &&
2984 			crtc_state->wm_level_disabled;
2985 
2986 		if (!psr->enabled && !keep_disabled)
2987 			intel_psr_enable_locked(intel_dp, crtc_state);
2988 		else if (psr->enabled && !crtc_state->wm_level_disabled)
2989 			/* Wa_14015648006 */
2990 			wm_optimization_wa(intel_dp, crtc_state);
2991 
2992 		/* Force a PSR exit when enabling CRC to avoid CRC timeouts */
2993 		if (crtc_state->crc_enabled && psr->enabled)
2994 			intel_psr_force_update(intel_dp);
2995 
2996 		/*
2997 		 * Clear possible busy bits in case we have
2998 		 * invalidate -> flip -> flush sequence.
2999 		 */
3000 		intel_dp->psr.busy_frontbuffer_bits = 0;
3001 
3002 		mutex_unlock(&psr->lock);
3003 	}
3004 }
3005 
_psr2_ready_for_pipe_update_locked(struct intel_dp * intel_dp)3006 static int _psr2_ready_for_pipe_update_locked(struct intel_dp *intel_dp)
3007 {
3008 	struct intel_display *display = to_intel_display(intel_dp);
3009 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3010 
3011 	/*
3012 	 * Any state lower than EDP_PSR2_STATUS_STATE_DEEP_SLEEP is enough.
3013 	 * As all higher states has bit 4 of PSR2 state set we can just wait for
3014 	 * EDP_PSR2_STATUS_STATE_DEEP_SLEEP to be cleared.
3015 	 */
3016 	return intel_de_wait_for_clear(display,
3017 				       EDP_PSR2_STATUS(display, cpu_transcoder),
3018 				       EDP_PSR2_STATUS_STATE_DEEP_SLEEP, 50);
3019 }
3020 
_psr1_ready_for_pipe_update_locked(struct intel_dp * intel_dp)3021 static int _psr1_ready_for_pipe_update_locked(struct intel_dp *intel_dp)
3022 {
3023 	struct intel_display *display = to_intel_display(intel_dp);
3024 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3025 
3026 	/*
3027 	 * From bspec: Panel Self Refresh (BDW+)
3028 	 * Max. time for PSR to idle = Inverse of the refresh rate + 6 ms of
3029 	 * exit training time + 1.5 ms of aux channel handshake. 50 ms is
3030 	 * defensive enough to cover everything.
3031 	 */
3032 	return intel_de_wait_for_clear(display,
3033 				       psr_status_reg(display, cpu_transcoder),
3034 				       EDP_PSR_STATUS_STATE_MASK, 50);
3035 }
3036 
3037 /**
3038  * intel_psr_wait_for_idle_locked - wait for PSR be ready for a pipe update
3039  * @new_crtc_state: new CRTC state
3040  *
3041  * This function is expected to be called from pipe_update_start() where it is
3042  * not expected to race with PSR enable or disable.
3043  */
intel_psr_wait_for_idle_locked(const struct intel_crtc_state * new_crtc_state)3044 void intel_psr_wait_for_idle_locked(const struct intel_crtc_state *new_crtc_state)
3045 {
3046 	struct intel_display *display = to_intel_display(new_crtc_state);
3047 	struct intel_encoder *encoder;
3048 
3049 	if (!new_crtc_state->has_psr)
3050 		return;
3051 
3052 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
3053 					     new_crtc_state->uapi.encoder_mask) {
3054 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3055 		int ret;
3056 
3057 		lockdep_assert_held(&intel_dp->psr.lock);
3058 
3059 		if (!intel_dp->psr.enabled || intel_dp->psr.panel_replay_enabled)
3060 			continue;
3061 
3062 		if (intel_dp->psr.sel_update_enabled)
3063 			ret = _psr2_ready_for_pipe_update_locked(intel_dp);
3064 		else
3065 			ret = _psr1_ready_for_pipe_update_locked(intel_dp);
3066 
3067 		if (ret)
3068 			drm_err(display->drm,
3069 				"PSR wait timed out, atomic update may fail\n");
3070 	}
3071 }
3072 
__psr_wait_for_idle_locked(struct intel_dp * intel_dp)3073 static bool __psr_wait_for_idle_locked(struct intel_dp *intel_dp)
3074 {
3075 	struct intel_display *display = to_intel_display(intel_dp);
3076 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3077 	i915_reg_t reg;
3078 	u32 mask;
3079 	int err;
3080 
3081 	if (!intel_dp->psr.enabled)
3082 		return false;
3083 
3084 	if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled ||
3085 					  intel_dp->psr.panel_replay_enabled)) {
3086 		reg = EDP_PSR2_STATUS(display, cpu_transcoder);
3087 		mask = EDP_PSR2_STATUS_STATE_MASK;
3088 	} else {
3089 		reg = psr_status_reg(display, cpu_transcoder);
3090 		mask = EDP_PSR_STATUS_STATE_MASK;
3091 	}
3092 
3093 	mutex_unlock(&intel_dp->psr.lock);
3094 
3095 	err = intel_de_wait_for_clear(display, reg, mask, 50);
3096 	if (err)
3097 		drm_err(display->drm,
3098 			"Timed out waiting for PSR Idle for re-enable\n");
3099 
3100 	/* After the unlocked wait, verify that PSR is still wanted! */
3101 	mutex_lock(&intel_dp->psr.lock);
3102 	return err == 0 && intel_dp->psr.enabled;
3103 }
3104 
intel_psr_fastset_force(struct intel_display * display)3105 static int intel_psr_fastset_force(struct intel_display *display)
3106 {
3107 	struct drm_connector_list_iter conn_iter;
3108 	struct drm_modeset_acquire_ctx ctx;
3109 	struct drm_atomic_state *state;
3110 	struct drm_connector *conn;
3111 	int err = 0;
3112 
3113 	state = drm_atomic_state_alloc(display->drm);
3114 	if (!state)
3115 		return -ENOMEM;
3116 
3117 	drm_modeset_acquire_init(&ctx, DRM_MODESET_ACQUIRE_INTERRUPTIBLE);
3118 
3119 	state->acquire_ctx = &ctx;
3120 	to_intel_atomic_state(state)->internal = true;
3121 
3122 retry:
3123 	drm_connector_list_iter_begin(display->drm, &conn_iter);
3124 	drm_for_each_connector_iter(conn, &conn_iter) {
3125 		struct drm_connector_state *conn_state;
3126 		struct drm_crtc_state *crtc_state;
3127 
3128 		if (conn->connector_type != DRM_MODE_CONNECTOR_eDP)
3129 			continue;
3130 
3131 		conn_state = drm_atomic_get_connector_state(state, conn);
3132 		if (IS_ERR(conn_state)) {
3133 			err = PTR_ERR(conn_state);
3134 			break;
3135 		}
3136 
3137 		if (!conn_state->crtc)
3138 			continue;
3139 
3140 		crtc_state = drm_atomic_get_crtc_state(state, conn_state->crtc);
3141 		if (IS_ERR(crtc_state)) {
3142 			err = PTR_ERR(crtc_state);
3143 			break;
3144 		}
3145 
3146 		/* Mark mode as changed to trigger a pipe->update() */
3147 		crtc_state->mode_changed = true;
3148 	}
3149 	drm_connector_list_iter_end(&conn_iter);
3150 
3151 	if (err == 0)
3152 		err = drm_atomic_commit(state);
3153 
3154 	if (err == -EDEADLK) {
3155 		drm_atomic_state_clear(state);
3156 		err = drm_modeset_backoff(&ctx);
3157 		if (!err)
3158 			goto retry;
3159 	}
3160 
3161 	drm_modeset_drop_locks(&ctx);
3162 	drm_modeset_acquire_fini(&ctx);
3163 	drm_atomic_state_put(state);
3164 
3165 	return err;
3166 }
3167 
intel_psr_debug_set(struct intel_dp * intel_dp,u64 val)3168 int intel_psr_debug_set(struct intel_dp *intel_dp, u64 val)
3169 {
3170 	struct intel_display *display = to_intel_display(intel_dp);
3171 	const u32 mode = val & I915_PSR_DEBUG_MODE_MASK;
3172 	const u32 disable_bits = val & (I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
3173 					I915_PSR_DEBUG_PANEL_REPLAY_DISABLE);
3174 	u32 old_mode, old_disable_bits;
3175 	int ret;
3176 
3177 	if (val & ~(I915_PSR_DEBUG_IRQ | I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
3178 		    I915_PSR_DEBUG_PANEL_REPLAY_DISABLE |
3179 		    I915_PSR_DEBUG_MODE_MASK) ||
3180 	    mode > I915_PSR_DEBUG_ENABLE_SEL_FETCH) {
3181 		drm_dbg_kms(display->drm, "Invalid debug mask %llx\n", val);
3182 		return -EINVAL;
3183 	}
3184 
3185 	ret = mutex_lock_interruptible(&intel_dp->psr.lock);
3186 	if (ret)
3187 		return ret;
3188 
3189 	old_mode = intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK;
3190 	old_disable_bits = intel_dp->psr.debug &
3191 		(I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
3192 		 I915_PSR_DEBUG_PANEL_REPLAY_DISABLE);
3193 
3194 	intel_dp->psr.debug = val;
3195 
3196 	/*
3197 	 * Do it right away if it's already enabled, otherwise it will be done
3198 	 * when enabling the source.
3199 	 */
3200 	if (intel_dp->psr.enabled)
3201 		psr_irq_control(intel_dp);
3202 
3203 	mutex_unlock(&intel_dp->psr.lock);
3204 
3205 	if (old_mode != mode || old_disable_bits != disable_bits)
3206 		ret = intel_psr_fastset_force(display);
3207 
3208 	return ret;
3209 }
3210 
intel_psr_handle_irq(struct intel_dp * intel_dp)3211 static void intel_psr_handle_irq(struct intel_dp *intel_dp)
3212 {
3213 	struct intel_psr *psr = &intel_dp->psr;
3214 
3215 	intel_psr_disable_locked(intel_dp);
3216 	psr->sink_not_reliable = true;
3217 	/* let's make sure that sink is awaken */
3218 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
3219 }
3220 
intel_psr_work(struct work_struct * work)3221 static void intel_psr_work(struct work_struct *work)
3222 {
3223 	struct intel_dp *intel_dp =
3224 		container_of(work, typeof(*intel_dp), psr.work);
3225 
3226 	mutex_lock(&intel_dp->psr.lock);
3227 
3228 	if (!intel_dp->psr.enabled)
3229 		goto unlock;
3230 
3231 	if (READ_ONCE(intel_dp->psr.irq_aux_error))
3232 		intel_psr_handle_irq(intel_dp);
3233 
3234 	/*
3235 	 * We have to make sure PSR is ready for re-enable
3236 	 * otherwise it keeps disabled until next full enable/disable cycle.
3237 	 * PSR might take some time to get fully disabled
3238 	 * and be ready for re-enable.
3239 	 */
3240 	if (!__psr_wait_for_idle_locked(intel_dp))
3241 		goto unlock;
3242 
3243 	/*
3244 	 * The delayed work can race with an invalidate hence we need to
3245 	 * recheck. Since psr_flush first clears this and then reschedules we
3246 	 * won't ever miss a flush when bailing out here.
3247 	 */
3248 	if (intel_dp->psr.busy_frontbuffer_bits || intel_dp->psr.active)
3249 		goto unlock;
3250 
3251 	intel_psr_activate(intel_dp);
3252 unlock:
3253 	mutex_unlock(&intel_dp->psr.lock);
3254 }
3255 
intel_psr_configure_full_frame_update(struct intel_dp * intel_dp)3256 static void intel_psr_configure_full_frame_update(struct intel_dp *intel_dp)
3257 {
3258 	struct intel_display *display = to_intel_display(intel_dp);
3259 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3260 
3261 	if (!intel_dp->psr.psr2_sel_fetch_enabled)
3262 		return;
3263 
3264 	if (DISPLAY_VER(display) >= 20)
3265 		intel_de_write(display, LNL_SFF_CTL(cpu_transcoder),
3266 			       LNL_SFF_CTL_SF_SINGLE_FULL_FRAME);
3267 	else
3268 		intel_de_write(display,
3269 			       PSR2_MAN_TRK_CTL(display, cpu_transcoder),
3270 			       man_trk_ctl_enable_bit_get(display) |
3271 			       man_trk_ctl_partial_frame_bit_get(display) |
3272 			       man_trk_ctl_single_full_frame_bit_get(display) |
3273 			       man_trk_ctl_continuos_full_frame(display));
3274 }
3275 
_psr_invalidate_handle(struct intel_dp * intel_dp)3276 static void _psr_invalidate_handle(struct intel_dp *intel_dp)
3277 {
3278 	struct intel_display *display = to_intel_display(intel_dp);
3279 
3280 	if (DISPLAY_VER(display) < 20 && intel_dp->psr.psr2_sel_fetch_enabled) {
3281 		if (!intel_dp->psr.psr2_sel_fetch_cff_enabled) {
3282 			intel_dp->psr.psr2_sel_fetch_cff_enabled = true;
3283 			intel_psr_configure_full_frame_update(intel_dp);
3284 		}
3285 
3286 		intel_psr_force_update(intel_dp);
3287 	} else {
3288 		intel_psr_exit(intel_dp);
3289 	}
3290 }
3291 
3292 /**
3293  * intel_psr_invalidate - Invalidate PSR
3294  * @display: display device
3295  * @frontbuffer_bits: frontbuffer plane tracking bits
3296  * @origin: which operation caused the invalidate
3297  *
3298  * Since the hardware frontbuffer tracking has gaps we need to integrate
3299  * with the software frontbuffer tracking. This function gets called every
3300  * time frontbuffer rendering starts and a buffer gets dirtied. PSR must be
3301  * disabled if the frontbuffer mask contains a buffer relevant to PSR.
3302  *
3303  * Dirty frontbuffers relevant to PSR are tracked in busy_frontbuffer_bits."
3304  */
intel_psr_invalidate(struct intel_display * display,unsigned frontbuffer_bits,enum fb_op_origin origin)3305 void intel_psr_invalidate(struct intel_display *display,
3306 			  unsigned frontbuffer_bits, enum fb_op_origin origin)
3307 {
3308 	struct intel_encoder *encoder;
3309 
3310 	if (origin == ORIGIN_FLIP)
3311 		return;
3312 
3313 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3314 		unsigned int pipe_frontbuffer_bits = frontbuffer_bits;
3315 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3316 
3317 		mutex_lock(&intel_dp->psr.lock);
3318 		if (!intel_dp->psr.enabled) {
3319 			mutex_unlock(&intel_dp->psr.lock);
3320 			continue;
3321 		}
3322 
3323 		pipe_frontbuffer_bits &=
3324 			INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe);
3325 		intel_dp->psr.busy_frontbuffer_bits |= pipe_frontbuffer_bits;
3326 
3327 		if (pipe_frontbuffer_bits)
3328 			_psr_invalidate_handle(intel_dp);
3329 
3330 		mutex_unlock(&intel_dp->psr.lock);
3331 	}
3332 }
3333 /*
3334  * When we will be completely rely on PSR2 S/W tracking in future,
3335  * intel_psr_flush() will invalidate and flush the PSR for ORIGIN_FLIP
3336  * event also therefore tgl_dc3co_flush_locked() require to be changed
3337  * accordingly in future.
3338  */
3339 static void
tgl_dc3co_flush_locked(struct intel_dp * intel_dp,unsigned int frontbuffer_bits,enum fb_op_origin origin)3340 tgl_dc3co_flush_locked(struct intel_dp *intel_dp, unsigned int frontbuffer_bits,
3341 		       enum fb_op_origin origin)
3342 {
3343 	struct intel_display *display = to_intel_display(intel_dp);
3344 
3345 	if (!intel_dp->psr.dc3co_exitline || !intel_dp->psr.sel_update_enabled ||
3346 	    !intel_dp->psr.active)
3347 		return;
3348 
3349 	/*
3350 	 * At every frontbuffer flush flip event modified delay of delayed work,
3351 	 * when delayed work schedules that means display has been idle.
3352 	 */
3353 	if (!(frontbuffer_bits &
3354 	    INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe)))
3355 		return;
3356 
3357 	tgl_psr2_enable_dc3co(intel_dp);
3358 	mod_delayed_work(display->wq.unordered, &intel_dp->psr.dc3co_work,
3359 			 intel_dp->psr.dc3co_exit_delay);
3360 }
3361 
_psr_flush_handle(struct intel_dp * intel_dp)3362 static void _psr_flush_handle(struct intel_dp *intel_dp)
3363 {
3364 	struct intel_display *display = to_intel_display(intel_dp);
3365 
3366 	if (DISPLAY_VER(display) < 20 && intel_dp->psr.psr2_sel_fetch_enabled) {
3367 		if (intel_dp->psr.psr2_sel_fetch_cff_enabled) {
3368 			/* can we turn CFF off? */
3369 			if (intel_dp->psr.busy_frontbuffer_bits == 0)
3370 				intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
3371 		}
3372 
3373 		/*
3374 		 * Still keep cff bit enabled as we don't have proper SU
3375 		 * configuration in case update is sent for any reason after
3376 		 * sff bit gets cleared by the HW on next vblank.
3377 		 *
3378 		 * NOTE: Setting cff bit is not needed for LunarLake onwards as
3379 		 * we have own register for SFF bit and we are not overwriting
3380 		 * existing SU configuration
3381 		 */
3382 		intel_psr_configure_full_frame_update(intel_dp);
3383 
3384 		intel_psr_force_update(intel_dp);
3385 	} else {
3386 		intel_psr_exit(intel_dp);
3387 	}
3388 
3389 	if ((!intel_dp->psr.psr2_sel_fetch_enabled || DISPLAY_VER(display) >= 20) &&
3390 	    !intel_dp->psr.busy_frontbuffer_bits)
3391 		queue_work(display->wq.unordered, &intel_dp->psr.work);
3392 }
3393 
3394 /**
3395  * intel_psr_flush - Flush PSR
3396  * @display: display device
3397  * @frontbuffer_bits: frontbuffer plane tracking bits
3398  * @origin: which operation caused the flush
3399  *
3400  * Since the hardware frontbuffer tracking has gaps we need to integrate
3401  * with the software frontbuffer tracking. This function gets called every
3402  * time frontbuffer rendering has completed and flushed out to memory. PSR
3403  * can be enabled again if no other frontbuffer relevant to PSR is dirty.
3404  *
3405  * Dirty frontbuffers relevant to PSR are tracked in busy_frontbuffer_bits.
3406  */
intel_psr_flush(struct intel_display * display,unsigned frontbuffer_bits,enum fb_op_origin origin)3407 void intel_psr_flush(struct intel_display *display,
3408 		     unsigned frontbuffer_bits, enum fb_op_origin origin)
3409 {
3410 	struct intel_encoder *encoder;
3411 
3412 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3413 		unsigned int pipe_frontbuffer_bits = frontbuffer_bits;
3414 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3415 
3416 		mutex_lock(&intel_dp->psr.lock);
3417 		if (!intel_dp->psr.enabled) {
3418 			mutex_unlock(&intel_dp->psr.lock);
3419 			continue;
3420 		}
3421 
3422 		pipe_frontbuffer_bits &=
3423 			INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe);
3424 		intel_dp->psr.busy_frontbuffer_bits &= ~pipe_frontbuffer_bits;
3425 
3426 		/*
3427 		 * If the PSR is paused by an explicit intel_psr_paused() call,
3428 		 * we have to ensure that the PSR is not activated until
3429 		 * intel_psr_resume() is called.
3430 		 */
3431 		if (intel_dp->psr.pause_counter)
3432 			goto unlock;
3433 
3434 		if (origin == ORIGIN_FLIP ||
3435 		    (origin == ORIGIN_CURSOR_UPDATE &&
3436 		     !intel_dp->psr.psr2_sel_fetch_enabled)) {
3437 			tgl_dc3co_flush_locked(intel_dp, frontbuffer_bits, origin);
3438 			goto unlock;
3439 		}
3440 
3441 		if (pipe_frontbuffer_bits == 0)
3442 			goto unlock;
3443 
3444 		/* By definition flush = invalidate + flush */
3445 		_psr_flush_handle(intel_dp);
3446 unlock:
3447 		mutex_unlock(&intel_dp->psr.lock);
3448 	}
3449 }
3450 
3451 /**
3452  * intel_psr_init - Init basic PSR work and mutex.
3453  * @intel_dp: Intel DP
3454  *
3455  * This function is called after the initializing connector.
3456  * (the initializing of connector treats the handling of connector capabilities)
3457  * And it initializes basic PSR stuff for each DP Encoder.
3458  */
intel_psr_init(struct intel_dp * intel_dp)3459 void intel_psr_init(struct intel_dp *intel_dp)
3460 {
3461 	struct intel_display *display = to_intel_display(intel_dp);
3462 	struct intel_connector *connector = intel_dp->attached_connector;
3463 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
3464 
3465 	if (!(HAS_PSR(display) || HAS_DP20(display)))
3466 		return;
3467 
3468 	/*
3469 	 * HSW spec explicitly says PSR is tied to port A.
3470 	 * BDW+ platforms have a instance of PSR registers per transcoder but
3471 	 * BDW, GEN9 and GEN11 are not validated by HW team in other transcoder
3472 	 * than eDP one.
3473 	 * For now it only supports one instance of PSR for BDW, GEN9 and GEN11.
3474 	 * So lets keep it hardcoded to PORT_A for BDW, GEN9 and GEN11.
3475 	 * But GEN12 supports a instance of PSR registers per transcoder.
3476 	 */
3477 	if (DISPLAY_VER(display) < 12 && dig_port->base.port != PORT_A) {
3478 		drm_dbg_kms(display->drm,
3479 			    "PSR condition failed: Port not supported\n");
3480 		return;
3481 	}
3482 
3483 	if ((HAS_DP20(display) && !intel_dp_is_edp(intel_dp)) ||
3484 	    DISPLAY_VER(display) >= 20)
3485 		intel_dp->psr.source_panel_replay_support = true;
3486 
3487 	if (HAS_PSR(display) && intel_dp_is_edp(intel_dp))
3488 		intel_dp->psr.source_support = true;
3489 
3490 	/* Set link_standby x link_off defaults */
3491 	if (DISPLAY_VER(display) < 12)
3492 		/* For new platforms up to TGL let's respect VBT back again */
3493 		intel_dp->psr.link_standby = connector->panel.vbt.psr.full_link;
3494 
3495 	INIT_WORK(&intel_dp->psr.work, intel_psr_work);
3496 	INIT_DELAYED_WORK(&intel_dp->psr.dc3co_work, tgl_dc3co_disable_work);
3497 	mutex_init(&intel_dp->psr.lock);
3498 }
3499 
psr_get_status_and_error_status(struct intel_dp * intel_dp,u8 * status,u8 * error_status)3500 static int psr_get_status_and_error_status(struct intel_dp *intel_dp,
3501 					   u8 *status, u8 *error_status)
3502 {
3503 	struct drm_dp_aux *aux = &intel_dp->aux;
3504 	int ret;
3505 	unsigned int offset;
3506 
3507 	offset = intel_dp->psr.panel_replay_enabled ?
3508 		 DP_SINK_DEVICE_PR_AND_FRAME_LOCK_STATUS : DP_PSR_STATUS;
3509 
3510 	ret = drm_dp_dpcd_readb(aux, offset, status);
3511 	if (ret != 1)
3512 		return ret;
3513 
3514 	offset = intel_dp->psr.panel_replay_enabled ?
3515 		 DP_PANEL_REPLAY_ERROR_STATUS : DP_PSR_ERROR_STATUS;
3516 
3517 	ret = drm_dp_dpcd_readb(aux, offset, error_status);
3518 	if (ret != 1)
3519 		return ret;
3520 
3521 	*status = *status & DP_PSR_SINK_STATE_MASK;
3522 
3523 	return 0;
3524 }
3525 
psr_alpm_check(struct intel_dp * intel_dp)3526 static void psr_alpm_check(struct intel_dp *intel_dp)
3527 {
3528 	struct intel_psr *psr = &intel_dp->psr;
3529 
3530 	if (!psr->sel_update_enabled)
3531 		return;
3532 
3533 	if (intel_alpm_get_error(intel_dp)) {
3534 		intel_psr_disable_locked(intel_dp);
3535 		psr->sink_not_reliable = true;
3536 	}
3537 }
3538 
psr_capability_changed_check(struct intel_dp * intel_dp)3539 static void psr_capability_changed_check(struct intel_dp *intel_dp)
3540 {
3541 	struct intel_display *display = to_intel_display(intel_dp);
3542 	struct intel_psr *psr = &intel_dp->psr;
3543 	u8 val;
3544 	int r;
3545 
3546 	r = drm_dp_dpcd_readb(&intel_dp->aux, DP_PSR_ESI, &val);
3547 	if (r != 1) {
3548 		drm_err(display->drm, "Error reading DP_PSR_ESI\n");
3549 		return;
3550 	}
3551 
3552 	if (val & DP_PSR_CAPS_CHANGE) {
3553 		intel_psr_disable_locked(intel_dp);
3554 		psr->sink_not_reliable = true;
3555 		drm_dbg_kms(display->drm,
3556 			    "Sink PSR capability changed, disabling PSR\n");
3557 
3558 		/* Clearing it */
3559 		drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_ESI, val);
3560 	}
3561 }
3562 
3563 /*
3564  * On common bits:
3565  * DP_PSR_RFB_STORAGE_ERROR == DP_PANEL_REPLAY_RFB_STORAGE_ERROR
3566  * DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR == DP_PANEL_REPLAY_VSC_SDP_UNCORRECTABLE_ERROR
3567  * DP_PSR_LINK_CRC_ERROR == DP_PANEL_REPLAY_LINK_CRC_ERROR
3568  * this function is relying on PSR definitions
3569  */
intel_psr_short_pulse(struct intel_dp * intel_dp)3570 void intel_psr_short_pulse(struct intel_dp *intel_dp)
3571 {
3572 	struct intel_display *display = to_intel_display(intel_dp);
3573 	struct intel_psr *psr = &intel_dp->psr;
3574 	u8 status, error_status;
3575 	const u8 errors = DP_PSR_RFB_STORAGE_ERROR |
3576 			  DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR |
3577 			  DP_PSR_LINK_CRC_ERROR;
3578 
3579 	if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
3580 		return;
3581 
3582 	mutex_lock(&psr->lock);
3583 
3584 	psr->link_ok = false;
3585 
3586 	if (!psr->enabled)
3587 		goto exit;
3588 
3589 	if (psr_get_status_and_error_status(intel_dp, &status, &error_status)) {
3590 		drm_err(display->drm,
3591 			"Error reading PSR status or error status\n");
3592 		goto exit;
3593 	}
3594 
3595 	if ((!psr->panel_replay_enabled && status == DP_PSR_SINK_INTERNAL_ERROR) ||
3596 	    (error_status & errors)) {
3597 		intel_psr_disable_locked(intel_dp);
3598 		psr->sink_not_reliable = true;
3599 	}
3600 
3601 	if (!psr->panel_replay_enabled && status == DP_PSR_SINK_INTERNAL_ERROR &&
3602 	    !error_status)
3603 		drm_dbg_kms(display->drm,
3604 			    "PSR sink internal error, disabling PSR\n");
3605 	if (error_status & DP_PSR_RFB_STORAGE_ERROR)
3606 		drm_dbg_kms(display->drm,
3607 			    "PSR RFB storage error, disabling PSR\n");
3608 	if (error_status & DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR)
3609 		drm_dbg_kms(display->drm,
3610 			    "PSR VSC SDP uncorrectable error, disabling PSR\n");
3611 	if (error_status & DP_PSR_LINK_CRC_ERROR)
3612 		drm_dbg_kms(display->drm,
3613 			    "PSR Link CRC error, disabling PSR\n");
3614 
3615 	if (error_status & ~errors)
3616 		drm_err(display->drm,
3617 			"PSR_ERROR_STATUS unhandled errors %x\n",
3618 			error_status & ~errors);
3619 	/* clear status register */
3620 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_ERROR_STATUS, error_status);
3621 
3622 	if (!psr->panel_replay_enabled) {
3623 		psr_alpm_check(intel_dp);
3624 		psr_capability_changed_check(intel_dp);
3625 	}
3626 
3627 exit:
3628 	mutex_unlock(&psr->lock);
3629 }
3630 
intel_psr_enabled(struct intel_dp * intel_dp)3631 bool intel_psr_enabled(struct intel_dp *intel_dp)
3632 {
3633 	bool ret;
3634 
3635 	if (!CAN_PSR(intel_dp))
3636 		return false;
3637 
3638 	mutex_lock(&intel_dp->psr.lock);
3639 	ret = intel_dp->psr.enabled;
3640 	mutex_unlock(&intel_dp->psr.lock);
3641 
3642 	return ret;
3643 }
3644 
3645 /**
3646  * intel_psr_link_ok - return psr->link_ok
3647  * @intel_dp: struct intel_dp
3648  *
3649  * We are seeing unexpected link re-trainings with some panels. This is caused
3650  * by panel stating bad link status after PSR is enabled. Code checking link
3651  * status can call this to ensure it can ignore bad link status stated by the
3652  * panel I.e. if panel is stating bad link and intel_psr_link_ok is stating link
3653  * is ok caller should rely on latter.
3654  *
3655  * Return value of link_ok
3656  */
intel_psr_link_ok(struct intel_dp * intel_dp)3657 bool intel_psr_link_ok(struct intel_dp *intel_dp)
3658 {
3659 	bool ret;
3660 
3661 	if ((!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp)) ||
3662 	    !intel_dp_is_edp(intel_dp))
3663 		return false;
3664 
3665 	mutex_lock(&intel_dp->psr.lock);
3666 	ret = intel_dp->psr.link_ok;
3667 	mutex_unlock(&intel_dp->psr.lock);
3668 
3669 	return ret;
3670 }
3671 
3672 /**
3673  * intel_psr_lock - grab PSR lock
3674  * @crtc_state: the crtc state
3675  *
3676  * This is initially meant to be used by around CRTC update, when
3677  * vblank sensitive registers are updated and we need grab the lock
3678  * before it to avoid vblank evasion.
3679  */
intel_psr_lock(const struct intel_crtc_state * crtc_state)3680 void intel_psr_lock(const struct intel_crtc_state *crtc_state)
3681 {
3682 	struct intel_display *display = to_intel_display(crtc_state);
3683 	struct intel_encoder *encoder;
3684 
3685 	if (!crtc_state->has_psr)
3686 		return;
3687 
3688 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
3689 					     crtc_state->uapi.encoder_mask) {
3690 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3691 
3692 		mutex_lock(&intel_dp->psr.lock);
3693 		break;
3694 	}
3695 }
3696 
3697 /**
3698  * intel_psr_unlock - release PSR lock
3699  * @crtc_state: the crtc state
3700  *
3701  * Release the PSR lock that was held during pipe update.
3702  */
intel_psr_unlock(const struct intel_crtc_state * crtc_state)3703 void intel_psr_unlock(const struct intel_crtc_state *crtc_state)
3704 {
3705 	struct intel_display *display = to_intel_display(crtc_state);
3706 	struct intel_encoder *encoder;
3707 
3708 	if (!crtc_state->has_psr)
3709 		return;
3710 
3711 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
3712 					     crtc_state->uapi.encoder_mask) {
3713 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3714 
3715 		mutex_unlock(&intel_dp->psr.lock);
3716 		break;
3717 	}
3718 }
3719 
3720 /* Wa_16025596647 */
intel_psr_apply_underrun_on_idle_wa_locked(struct intel_dp * intel_dp)3721 static void intel_psr_apply_underrun_on_idle_wa_locked(struct intel_dp *intel_dp)
3722 {
3723 	struct intel_display *display = to_intel_display(intel_dp);
3724 	bool dc5_dc6_blocked;
3725 
3726 	if (!intel_dp->psr.active)
3727 		return;
3728 
3729 	dc5_dc6_blocked = is_dc5_dc6_blocked(intel_dp);
3730 
3731 	if (intel_dp->psr.sel_update_enabled)
3732 		psr2_program_idle_frames(intel_dp, dc5_dc6_blocked ? 0 :
3733 					 psr_compute_idle_frames(intel_dp));
3734 	else
3735 		intel_dmc_start_pkgc_exit_at_start_of_undelayed_vblank(display,
3736 								       intel_dp->psr.pipe,
3737 								       dc5_dc6_blocked);
3738 }
3739 
psr_dc5_dc6_wa_work(struct work_struct * work)3740 static void psr_dc5_dc6_wa_work(struct work_struct *work)
3741 {
3742 	struct intel_display *display = container_of(work, typeof(*display),
3743 						     psr_dc5_dc6_wa_work);
3744 	struct intel_encoder *encoder;
3745 
3746 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3747 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3748 
3749 		mutex_lock(&intel_dp->psr.lock);
3750 
3751 		if (intel_dp->psr.enabled && !intel_dp->psr.panel_replay_enabled)
3752 			intel_psr_apply_underrun_on_idle_wa_locked(intel_dp);
3753 
3754 		mutex_unlock(&intel_dp->psr.lock);
3755 	}
3756 }
3757 
3758 /**
3759  * intel_psr_notify_dc5_dc6 - Notify PSR about enable/disable dc5/dc6
3760  * @display: intel atomic state
3761  *
3762  * This is targeted for underrun on idle PSR HW bug (Wa_16025596647) to schedule
3763  * psr_dc5_dc6_wa_work used for applying/removing the workaround.
3764  */
intel_psr_notify_dc5_dc6(struct intel_display * display)3765 void intel_psr_notify_dc5_dc6(struct intel_display *display)
3766 {
3767 	if (DISPLAY_VER(display) != 20 &&
3768 	    !IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0))
3769 		return;
3770 
3771 	schedule_work(&display->psr_dc5_dc6_wa_work);
3772 }
3773 
3774 /**
3775  * intel_psr_dc5_dc6_wa_init - Init work for underrun on idle PSR HW bug wa
3776  * @display: intel atomic state
3777  *
3778  * This is targeted for underrun on idle PSR HW bug (Wa_16025596647) to init
3779  * psr_dc5_dc6_wa_work used for applying the workaround.
3780  */
intel_psr_dc5_dc6_wa_init(struct intel_display * display)3781 void intel_psr_dc5_dc6_wa_init(struct intel_display *display)
3782 {
3783 	if (DISPLAY_VER(display) != 20 &&
3784 	    !IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0))
3785 		return;
3786 
3787 	INIT_WORK(&display->psr_dc5_dc6_wa_work, psr_dc5_dc6_wa_work);
3788 }
3789 
3790 /**
3791  * intel_psr_notify_pipe_change - Notify PSR about enable/disable of a pipe
3792  * @state: intel atomic state
3793  * @crtc: intel crtc
3794  * @enable: enable/disable
3795  *
3796  * This is targeted for underrun on idle PSR HW bug (Wa_16025596647) to apply
3797  * remove the workaround when pipe is getting enabled/disabled
3798  */
intel_psr_notify_pipe_change(struct intel_atomic_state * state,struct intel_crtc * crtc,bool enable)3799 void intel_psr_notify_pipe_change(struct intel_atomic_state *state,
3800 				  struct intel_crtc *crtc, bool enable)
3801 {
3802 	struct intel_display *display = to_intel_display(state);
3803 	struct intel_encoder *encoder;
3804 
3805 	if (DISPLAY_VER(display) != 20 &&
3806 	    !IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0))
3807 		return;
3808 
3809 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3810 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3811 		u8 active_non_psr_pipes;
3812 
3813 		mutex_lock(&intel_dp->psr.lock);
3814 
3815 		if (!intel_dp->psr.enabled || intel_dp->psr.panel_replay_enabled)
3816 			goto unlock;
3817 
3818 		active_non_psr_pipes = intel_dp->psr.active_non_psr_pipes;
3819 
3820 		if (enable)
3821 			active_non_psr_pipes |= BIT(crtc->pipe);
3822 		else
3823 			active_non_psr_pipes &= ~BIT(crtc->pipe);
3824 
3825 		if (active_non_psr_pipes == intel_dp->psr.active_non_psr_pipes)
3826 			goto unlock;
3827 
3828 		if ((enable && intel_dp->psr.active_non_psr_pipes) ||
3829 		    (!enable && !intel_dp->psr.active_non_psr_pipes)) {
3830 			intel_dp->psr.active_non_psr_pipes = active_non_psr_pipes;
3831 			goto unlock;
3832 		}
3833 
3834 		intel_dp->psr.active_non_psr_pipes = active_non_psr_pipes;
3835 
3836 		intel_psr_apply_underrun_on_idle_wa_locked(intel_dp);
3837 unlock:
3838 		mutex_unlock(&intel_dp->psr.lock);
3839 	}
3840 }
3841 
3842 /**
3843  * intel_psr_notify_vblank_enable_disable - Notify PSR about enable/disable of vblank
3844  * @display: intel display struct
3845  * @enable: enable/disable
3846  *
3847  * This is targeted for underrun on idle PSR HW bug (Wa_16025596647) to apply
3848  * remove the workaround when vblank is getting enabled/disabled
3849  */
intel_psr_notify_vblank_enable_disable(struct intel_display * display,bool enable)3850 void intel_psr_notify_vblank_enable_disable(struct intel_display *display,
3851 					    bool enable)
3852 {
3853 	struct intel_encoder *encoder;
3854 
3855 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3856 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3857 
3858 		mutex_lock(&intel_dp->psr.lock);
3859 		if (intel_dp->psr.panel_replay_enabled) {
3860 			mutex_unlock(&intel_dp->psr.lock);
3861 			break;
3862 		}
3863 
3864 		if (intel_dp->psr.enabled)
3865 			intel_psr_apply_underrun_on_idle_wa_locked(intel_dp);
3866 
3867 		mutex_unlock(&intel_dp->psr.lock);
3868 		return;
3869 	}
3870 
3871 	/*
3872 	 * NOTE: intel_display_power_set_target_dc_state is used
3873 	 * only by PSR * code for DC3CO handling. DC3CO target
3874 	 * state is currently disabled in * PSR code. If DC3CO
3875 	 * is taken into use we need take that into account here
3876 	 * as well.
3877 	 */
3878 	intel_display_power_set_target_dc_state(display, enable ? DC_STATE_DISABLE :
3879 						DC_STATE_EN_UPTO_DC6);
3880 }
3881 
3882 static void
psr_source_status(struct intel_dp * intel_dp,struct seq_file * m)3883 psr_source_status(struct intel_dp *intel_dp, struct seq_file *m)
3884 {
3885 	struct intel_display *display = to_intel_display(intel_dp);
3886 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3887 	const char *status = "unknown";
3888 	u32 val, status_val;
3889 
3890 	if ((intel_dp_is_edp(intel_dp) || DISPLAY_VER(display) >= 30) &&
3891 	    (intel_dp->psr.sel_update_enabled || intel_dp->psr.panel_replay_enabled)) {
3892 		static const char * const live_status[] = {
3893 			"IDLE",
3894 			"CAPTURE",
3895 			"CAPTURE_FS",
3896 			"SLEEP",
3897 			"BUFON_FW",
3898 			"ML_UP",
3899 			"SU_STANDBY",
3900 			"FAST_SLEEP",
3901 			"DEEP_SLEEP",
3902 			"BUF_ON",
3903 			"TG_ON"
3904 		};
3905 		val = intel_de_read(display,
3906 				    EDP_PSR2_STATUS(display, cpu_transcoder));
3907 		status_val = REG_FIELD_GET(EDP_PSR2_STATUS_STATE_MASK, val);
3908 		if (status_val < ARRAY_SIZE(live_status))
3909 			status = live_status[status_val];
3910 	} else {
3911 		static const char * const live_status[] = {
3912 			"IDLE",
3913 			"SRDONACK",
3914 			"SRDENT",
3915 			"BUFOFF",
3916 			"BUFON",
3917 			"AUXACK",
3918 			"SRDOFFACK",
3919 			"SRDENT_ON",
3920 		};
3921 		val = intel_de_read(display,
3922 				    psr_status_reg(display, cpu_transcoder));
3923 		status_val = REG_FIELD_GET(EDP_PSR_STATUS_STATE_MASK, val);
3924 		if (status_val < ARRAY_SIZE(live_status))
3925 			status = live_status[status_val];
3926 	}
3927 
3928 	seq_printf(m, "Source PSR/PanelReplay status: %s [0x%08x]\n", status, val);
3929 }
3930 
intel_psr_sink_capability(struct intel_dp * intel_dp,struct seq_file * m)3931 static void intel_psr_sink_capability(struct intel_dp *intel_dp,
3932 				      struct seq_file *m)
3933 {
3934 	struct intel_psr *psr = &intel_dp->psr;
3935 
3936 	seq_printf(m, "Sink support: PSR = %s",
3937 		   str_yes_no(psr->sink_support));
3938 
3939 	if (psr->sink_support)
3940 		seq_printf(m, " [0x%02x]", intel_dp->psr_dpcd[0]);
3941 	if (intel_dp->psr_dpcd[0] == DP_PSR2_WITH_Y_COORD_ET_SUPPORTED)
3942 		seq_printf(m, " (Early Transport)");
3943 	seq_printf(m, ", Panel Replay = %s", str_yes_no(psr->sink_panel_replay_support));
3944 	seq_printf(m, ", Panel Replay Selective Update = %s",
3945 		   str_yes_no(psr->sink_panel_replay_su_support));
3946 	if (intel_dp->pr_dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] &
3947 	    DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT)
3948 		seq_printf(m, " (Early Transport)");
3949 	seq_printf(m, "\n");
3950 }
3951 
intel_psr_print_mode(struct intel_dp * intel_dp,struct seq_file * m)3952 static void intel_psr_print_mode(struct intel_dp *intel_dp,
3953 				 struct seq_file *m)
3954 {
3955 	struct intel_psr *psr = &intel_dp->psr;
3956 	const char *status, *mode, *region_et;
3957 
3958 	if (psr->enabled)
3959 		status = " enabled";
3960 	else
3961 		status = "disabled";
3962 
3963 	if (psr->panel_replay_enabled && psr->sel_update_enabled)
3964 		mode = "Panel Replay Selective Update";
3965 	else if (psr->panel_replay_enabled)
3966 		mode = "Panel Replay";
3967 	else if (psr->sel_update_enabled)
3968 		mode = "PSR2";
3969 	else if (psr->enabled)
3970 		mode = "PSR1";
3971 	else
3972 		mode = "";
3973 
3974 	if (psr->su_region_et_enabled)
3975 		region_et = " (Early Transport)";
3976 	else
3977 		region_et = "";
3978 
3979 	seq_printf(m, "PSR mode: %s%s%s\n", mode, status, region_et);
3980 }
3981 
intel_psr_status(struct seq_file * m,struct intel_dp * intel_dp)3982 static int intel_psr_status(struct seq_file *m, struct intel_dp *intel_dp)
3983 {
3984 	struct intel_display *display = to_intel_display(intel_dp);
3985 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3986 	struct intel_psr *psr = &intel_dp->psr;
3987 	struct ref_tracker *wakeref;
3988 	bool enabled;
3989 	u32 val, psr2_ctl;
3990 
3991 	intel_psr_sink_capability(intel_dp, m);
3992 
3993 	if (!(psr->sink_support || psr->sink_panel_replay_support))
3994 		return 0;
3995 
3996 	wakeref = intel_display_rpm_get(display);
3997 	mutex_lock(&psr->lock);
3998 
3999 	intel_psr_print_mode(intel_dp, m);
4000 
4001 	if (!psr->enabled) {
4002 		seq_printf(m, "PSR sink not reliable: %s\n",
4003 			   str_yes_no(psr->sink_not_reliable));
4004 
4005 		goto unlock;
4006 	}
4007 
4008 	if (psr->panel_replay_enabled) {
4009 		val = intel_de_read(display, TRANS_DP2_CTL(cpu_transcoder));
4010 
4011 		if (intel_dp_is_edp(intel_dp))
4012 			psr2_ctl = intel_de_read(display,
4013 						 EDP_PSR2_CTL(display,
4014 							      cpu_transcoder));
4015 
4016 		enabled = val & TRANS_DP2_PANEL_REPLAY_ENABLE;
4017 	} else if (psr->sel_update_enabled) {
4018 		val = intel_de_read(display,
4019 				    EDP_PSR2_CTL(display, cpu_transcoder));
4020 		enabled = val & EDP_PSR2_ENABLE;
4021 	} else {
4022 		val = intel_de_read(display, psr_ctl_reg(display, cpu_transcoder));
4023 		enabled = val & EDP_PSR_ENABLE;
4024 	}
4025 	seq_printf(m, "Source PSR/PanelReplay ctl: %s [0x%08x]\n",
4026 		   str_enabled_disabled(enabled), val);
4027 	if (psr->panel_replay_enabled && intel_dp_is_edp(intel_dp))
4028 		seq_printf(m, "PSR2_CTL: 0x%08x\n",
4029 			   psr2_ctl);
4030 	psr_source_status(intel_dp, m);
4031 	seq_printf(m, "Busy frontbuffer bits: 0x%08x\n",
4032 		   psr->busy_frontbuffer_bits);
4033 
4034 	/*
4035 	 * SKL+ Perf counter is reset to 0 everytime DC state is entered
4036 	 */
4037 	val = intel_de_read(display, psr_perf_cnt_reg(display, cpu_transcoder));
4038 	seq_printf(m, "Performance counter: %u\n",
4039 		   REG_FIELD_GET(EDP_PSR_PERF_CNT_MASK, val));
4040 
4041 	if (psr->debug & I915_PSR_DEBUG_IRQ) {
4042 		seq_printf(m, "Last attempted entry at: %lld\n",
4043 			   psr->last_entry_attempt);
4044 		seq_printf(m, "Last exit at: %lld\n", psr->last_exit);
4045 	}
4046 
4047 	if (psr->sel_update_enabled) {
4048 		u32 su_frames_val[3];
4049 		int frame;
4050 
4051 		/*
4052 		 * PSR2_SU_STATUS register has been tied-off since DG2/ADL-P
4053 		 * (it returns zeros only) and it has been removed on Xe2_LPD.
4054 		 */
4055 		if (DISPLAY_VER(display) < 13) {
4056 			/*
4057 			 * Reading all 3 registers before hand to minimize crossing a
4058 			 * frame boundary between register reads
4059 			 */
4060 			for (frame = 0; frame < PSR2_SU_STATUS_FRAMES; frame += 3) {
4061 				val = intel_de_read(display,
4062 						    PSR2_SU_STATUS(display, cpu_transcoder, frame));
4063 				su_frames_val[frame / 3] = val;
4064 			}
4065 
4066 			seq_puts(m, "Frame:\tPSR2 SU blocks:\n");
4067 
4068 			for (frame = 0; frame < PSR2_SU_STATUS_FRAMES; frame++) {
4069 				u32 su_blocks;
4070 
4071 				su_blocks = su_frames_val[frame / 3] &
4072 					PSR2_SU_STATUS_MASK(frame);
4073 				su_blocks = su_blocks >> PSR2_SU_STATUS_SHIFT(frame);
4074 				seq_printf(m, "%d\t%d\n", frame, su_blocks);
4075 			}
4076 		}
4077 
4078 		seq_printf(m, "PSR2 selective fetch: %s\n",
4079 			   str_enabled_disabled(psr->psr2_sel_fetch_enabled));
4080 	}
4081 
4082 unlock:
4083 	mutex_unlock(&psr->lock);
4084 	intel_display_rpm_put(display, wakeref);
4085 
4086 	return 0;
4087 }
4088 
i915_edp_psr_status_show(struct seq_file * m,void * data)4089 static int i915_edp_psr_status_show(struct seq_file *m, void *data)
4090 {
4091 	struct intel_display *display = m->private;
4092 	struct intel_dp *intel_dp = NULL;
4093 	struct intel_encoder *encoder;
4094 
4095 	if (!HAS_PSR(display))
4096 		return -ENODEV;
4097 
4098 	/* Find the first EDP which supports PSR */
4099 	for_each_intel_encoder_with_psr(display->drm, encoder) {
4100 		intel_dp = enc_to_intel_dp(encoder);
4101 		break;
4102 	}
4103 
4104 	if (!intel_dp)
4105 		return -ENODEV;
4106 
4107 	return intel_psr_status(m, intel_dp);
4108 }
4109 DEFINE_SHOW_ATTRIBUTE(i915_edp_psr_status);
4110 
4111 static int
i915_edp_psr_debug_set(void * data,u64 val)4112 i915_edp_psr_debug_set(void *data, u64 val)
4113 {
4114 	struct intel_display *display = data;
4115 	struct intel_encoder *encoder;
4116 	int ret = -ENODEV;
4117 
4118 	if (!HAS_PSR(display))
4119 		return ret;
4120 
4121 	for_each_intel_encoder_with_psr(display->drm, encoder) {
4122 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4123 
4124 		drm_dbg_kms(display->drm, "Setting PSR debug to %llx\n", val);
4125 
4126 		// TODO: split to each transcoder's PSR debug state
4127 		with_intel_display_rpm(display)
4128 			ret = intel_psr_debug_set(intel_dp, val);
4129 	}
4130 
4131 	return ret;
4132 }
4133 
4134 static int
i915_edp_psr_debug_get(void * data,u64 * val)4135 i915_edp_psr_debug_get(void *data, u64 *val)
4136 {
4137 	struct intel_display *display = data;
4138 	struct intel_encoder *encoder;
4139 
4140 	if (!HAS_PSR(display))
4141 		return -ENODEV;
4142 
4143 	for_each_intel_encoder_with_psr(display->drm, encoder) {
4144 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4145 
4146 		// TODO: split to each transcoder's PSR debug state
4147 		*val = READ_ONCE(intel_dp->psr.debug);
4148 		return 0;
4149 	}
4150 
4151 	return -ENODEV;
4152 }
4153 
4154 DEFINE_SIMPLE_ATTRIBUTE(i915_edp_psr_debug_fops,
4155 			i915_edp_psr_debug_get, i915_edp_psr_debug_set,
4156 			"%llu\n");
4157 
intel_psr_debugfs_register(struct intel_display * display)4158 void intel_psr_debugfs_register(struct intel_display *display)
4159 {
4160 	struct drm_minor *minor = display->drm->primary;
4161 
4162 	debugfs_create_file("i915_edp_psr_debug", 0644, minor->debugfs_root,
4163 			    display, &i915_edp_psr_debug_fops);
4164 
4165 	debugfs_create_file("i915_edp_psr_status", 0444, minor->debugfs_root,
4166 			    display, &i915_edp_psr_status_fops);
4167 }
4168 
psr_mode_str(struct intel_dp * intel_dp)4169 static const char *psr_mode_str(struct intel_dp *intel_dp)
4170 {
4171 	if (intel_dp->psr.panel_replay_enabled)
4172 		return "PANEL-REPLAY";
4173 	else if (intel_dp->psr.enabled)
4174 		return "PSR";
4175 
4176 	return "unknown";
4177 }
4178 
i915_psr_sink_status_show(struct seq_file * m,void * data)4179 static int i915_psr_sink_status_show(struct seq_file *m, void *data)
4180 {
4181 	struct intel_connector *connector = m->private;
4182 	struct intel_dp *intel_dp = intel_attached_dp(connector);
4183 	static const char * const sink_status[] = {
4184 		"inactive",
4185 		"transition to active, capture and display",
4186 		"active, display from RFB",
4187 		"active, capture and display on sink device timings",
4188 		"transition to inactive, capture and display, timing re-sync",
4189 		"reserved",
4190 		"reserved",
4191 		"sink internal error",
4192 	};
4193 	const char *str;
4194 	int ret;
4195 	u8 status, error_status;
4196 
4197 	if (!(CAN_PSR(intel_dp) || CAN_PANEL_REPLAY(intel_dp))) {
4198 		seq_puts(m, "PSR/Panel-Replay Unsupported\n");
4199 		return -ENODEV;
4200 	}
4201 
4202 	if (connector->base.status != connector_status_connected)
4203 		return -ENODEV;
4204 
4205 	ret = psr_get_status_and_error_status(intel_dp, &status, &error_status);
4206 	if (ret)
4207 		return ret;
4208 
4209 	status &= DP_PSR_SINK_STATE_MASK;
4210 	if (status < ARRAY_SIZE(sink_status))
4211 		str = sink_status[status];
4212 	else
4213 		str = "unknown";
4214 
4215 	seq_printf(m, "Sink %s status: 0x%x [%s]\n", psr_mode_str(intel_dp), status, str);
4216 
4217 	seq_printf(m, "Sink %s error status: 0x%x", psr_mode_str(intel_dp), error_status);
4218 
4219 	if (error_status & (DP_PSR_RFB_STORAGE_ERROR |
4220 			    DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR |
4221 			    DP_PSR_LINK_CRC_ERROR))
4222 		seq_puts(m, ":\n");
4223 	else
4224 		seq_puts(m, "\n");
4225 	if (error_status & DP_PSR_RFB_STORAGE_ERROR)
4226 		seq_printf(m, "\t%s RFB storage error\n", psr_mode_str(intel_dp));
4227 	if (error_status & DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR)
4228 		seq_printf(m, "\t%s VSC SDP uncorrectable error\n", psr_mode_str(intel_dp));
4229 	if (error_status & DP_PSR_LINK_CRC_ERROR)
4230 		seq_printf(m, "\t%s Link CRC error\n", psr_mode_str(intel_dp));
4231 
4232 	return ret;
4233 }
4234 DEFINE_SHOW_ATTRIBUTE(i915_psr_sink_status);
4235 
i915_psr_status_show(struct seq_file * m,void * data)4236 static int i915_psr_status_show(struct seq_file *m, void *data)
4237 {
4238 	struct intel_connector *connector = m->private;
4239 	struct intel_dp *intel_dp = intel_attached_dp(connector);
4240 
4241 	return intel_psr_status(m, intel_dp);
4242 }
4243 DEFINE_SHOW_ATTRIBUTE(i915_psr_status);
4244 
intel_psr_connector_debugfs_add(struct intel_connector * connector)4245 void intel_psr_connector_debugfs_add(struct intel_connector *connector)
4246 {
4247 	struct intel_display *display = to_intel_display(connector);
4248 	struct dentry *root = connector->base.debugfs_entry;
4249 
4250 	if (connector->base.connector_type != DRM_MODE_CONNECTOR_eDP &&
4251 	    connector->base.connector_type != DRM_MODE_CONNECTOR_DisplayPort)
4252 		return;
4253 
4254 	debugfs_create_file("i915_psr_sink_status", 0444, root,
4255 			    connector, &i915_psr_sink_status_fops);
4256 
4257 	if (HAS_PSR(display) || HAS_DP20(display))
4258 		debugfs_create_file("i915_psr_status", 0444, root,
4259 				    connector, &i915_psr_status_fops);
4260 }
4261 
intel_psr_needs_alpm(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)4262 bool intel_psr_needs_alpm(struct intel_dp *intel_dp, const struct intel_crtc_state *crtc_state)
4263 {
4264 	/*
4265 	 * eDP Panel Replay uses always ALPM
4266 	 * PSR2 uses ALPM but PSR1 doesn't
4267 	 */
4268 	return intel_dp_is_edp(intel_dp) && (crtc_state->has_sel_update ||
4269 					     crtc_state->has_panel_replay);
4270 }
4271 
intel_psr_needs_alpm_aux_less(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)4272 bool intel_psr_needs_alpm_aux_less(struct intel_dp *intel_dp,
4273 				   const struct intel_crtc_state *crtc_state)
4274 {
4275 	return intel_dp_is_edp(intel_dp) && crtc_state->has_panel_replay;
4276 }
4277