xref: /linux/drivers/gpu/drm/i915/display/intel_psr.c (revision 2c1ed907520c50326b8f604907a8478b27881a2e)
1 /*
2  * Copyright © 2014 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21  * DEALINGS IN THE SOFTWARE.
22  */
23 
24 #include <linux/debugfs.h>
25 
26 #include <drm/drm_atomic_helper.h>
27 #include <drm/drm_damage_helper.h>
28 #include <drm/drm_debugfs.h>
29 
30 #include "i915_drv.h"
31 #include "i915_reg.h"
32 #include "intel_alpm.h"
33 #include "intel_atomic.h"
34 #include "intel_crtc.h"
35 #include "intel_cursor_regs.h"
36 #include "intel_ddi.h"
37 #include "intel_de.h"
38 #include "intel_display_irq.h"
39 #include "intel_display_types.h"
40 #include "intel_dp.h"
41 #include "intel_dp_aux.h"
42 #include "intel_frontbuffer.h"
43 #include "intel_hdmi.h"
44 #include "intel_psr.h"
45 #include "intel_psr_regs.h"
46 #include "intel_snps_phy.h"
47 #include "skl_universal_plane.h"
48 
49 /**
50  * DOC: Panel Self Refresh (PSR/SRD)
51  *
52  * Since Haswell Display controller supports Panel Self-Refresh on display
53  * panels witch have a remote frame buffer (RFB) implemented according to PSR
54  * spec in eDP1.3. PSR feature allows the display to go to lower standby states
55  * when system is idle but display is on as it eliminates display refresh
56  * request to DDR memory completely as long as the frame buffer for that
57  * display is unchanged.
58  *
59  * Panel Self Refresh must be supported by both Hardware (source) and
60  * Panel (sink).
61  *
62  * PSR saves power by caching the framebuffer in the panel RFB, which allows us
63  * to power down the link and memory controller. For DSI panels the same idea
64  * is called "manual mode".
65  *
66  * The implementation uses the hardware-based PSR support which automatically
67  * enters/exits self-refresh mode. The hardware takes care of sending the
68  * required DP aux message and could even retrain the link (that part isn't
69  * enabled yet though). The hardware also keeps track of any frontbuffer
70  * changes to know when to exit self-refresh mode again. Unfortunately that
71  * part doesn't work too well, hence why the i915 PSR support uses the
72  * software frontbuffer tracking to make sure it doesn't miss a screen
73  * update. For this integration intel_psr_invalidate() and intel_psr_flush()
74  * get called by the frontbuffer tracking code. Note that because of locking
75  * issues the self-refresh re-enable code is done from a work queue, which
76  * must be correctly synchronized/cancelled when shutting down the pipe."
77  *
78  * DC3CO (DC3 clock off)
79  *
80  * On top of PSR2, GEN12 adds a intermediate power savings state that turns
81  * clock off automatically during PSR2 idle state.
82  * The smaller overhead of DC3co entry/exit vs. the overhead of PSR2 deep sleep
83  * entry/exit allows the HW to enter a low-power state even when page flipping
84  * periodically (for instance a 30fps video playback scenario).
85  *
86  * Every time a flips occurs PSR2 will get out of deep sleep state(if it was),
87  * so DC3CO is enabled and tgl_dc3co_disable_work is schedule to run after 6
88  * frames, if no other flip occurs and the function above is executed, DC3CO is
89  * disabled and PSR2 is configured to enter deep sleep, resetting again in case
90  * of another flip.
91  * Front buffer modifications do not trigger DC3CO activation on purpose as it
92  * would bring a lot of complexity and most of the moderns systems will only
93  * use page flips.
94  */
95 
96 /*
97  * Description of PSR mask bits:
98  *
99  * EDP_PSR_DEBUG[16]/EDP_PSR_DEBUG_MASK_DISP_REG_WRITE (hsw-skl):
100  *
101  *  When unmasked (nearly) all display register writes (eg. even
102  *  SWF) trigger a PSR exit. Some registers are excluded from this
103  *  and they have a more specific mask (described below). On icl+
104  *  this bit no longer exists and is effectively always set.
105  *
106  * PIPE_MISC[21]/PIPE_MISC_PSR_MASK_PIPE_REG_WRITE (skl+):
107  *
108  *  When unmasked (nearly) all pipe/plane register writes
109  *  trigger a PSR exit. Some plane registers are excluded from this
110  *  and they have a more specific mask (described below).
111  *
112  * CHICKEN_PIPESL_1[11]/SKL_PSR_MASK_PLANE_FLIP (skl+):
113  * PIPE_MISC[23]/PIPE_MISC_PSR_MASK_PRIMARY_FLIP (bdw):
114  * EDP_PSR_DEBUG[23]/EDP_PSR_DEBUG_MASK_PRIMARY_FLIP (hsw):
115  *
116  *  When unmasked PRI_SURF/PLANE_SURF writes trigger a PSR exit.
117  *  SPR_SURF/CURBASE are not included in this and instead are
118  *  controlled by PIPE_MISC_PSR_MASK_PIPE_REG_WRITE (skl+) or
119  *  EDP_PSR_DEBUG_MASK_DISP_REG_WRITE (hsw/bdw).
120  *
121  * PIPE_MISC[22]/PIPE_MISC_PSR_MASK_SPRITE_ENABLE (bdw):
122  * EDP_PSR_DEBUG[21]/EDP_PSR_DEBUG_MASK_SPRITE_ENABLE (hsw):
123  *
124  *  When unmasked PSR is blocked as long as the sprite
125  *  plane is enabled. skl+ with their universal planes no
126  *  longer have a mask bit like this, and no plane being
127  *  enabledb blocks PSR.
128  *
129  * PIPE_MISC[21]/PIPE_MISC_PSR_MASK_CURSOR_MOVE (bdw):
130  * EDP_PSR_DEBUG[20]/EDP_PSR_DEBUG_MASK_CURSOR_MOVE (hsw):
131  *
132  *  When umasked CURPOS writes trigger a PSR exit. On skl+
133  *  this doesn't exit but CURPOS is included in the
134  *  PIPE_MISC_PSR_MASK_PIPE_REG_WRITE mask.
135  *
136  * PIPE_MISC[20]/PIPE_MISC_PSR_MASK_VBLANK_VSYNC_INT (bdw+):
137  * EDP_PSR_DEBUG[19]/EDP_PSR_DEBUG_MASK_VBLANK_VSYNC_INT (hsw):
138  *
139  *  When unmasked PSR is blocked as long as vblank and/or vsync
140  *  interrupt is unmasked in IMR *and* enabled in IER.
141  *
142  * CHICKEN_TRANS[30]/SKL_UNMASK_VBL_TO_PIPE_IN_SRD (skl+):
143  * CHICKEN_PAR1_1[15]/HSW_MASK_VBL_TO_PIPE_IN_SRD (hsw/bdw):
144  *
145  *  Selectcs whether PSR exit generates an extra vblank before
146  *  the first frame is transmitted. Also note the opposite polarity
147  *  if the bit on hsw/bdw vs. skl+ (masked==generate the extra vblank,
148  *  unmasked==do not generate the extra vblank).
149  *
150  *  With DC states enabled the extra vblank happens after link training,
151  *  with DC states disabled it happens immediately upuon PSR exit trigger.
152  *  No idea as of now why there is a difference. HSW/BDW (which don't
153  *  even have DMC) always generate it after link training. Go figure.
154  *
155  *  Unfortunately CHICKEN_TRANS itself seems to be double buffered
156  *  and thus won't latch until the first vblank. So with DC states
157  *  enabled the register effctively uses the reset value during DC5
158  *  exit+PSR exit sequence, and thus the bit does nothing until
159  *  latched by the vblank that it was trying to prevent from being
160  *  generated in the first place. So we should probably call this
161  *  one a chicken/egg bit instead on skl+.
162  *
163  *  In standby mode (as opposed to link-off) this makes no difference
164  *  as the timing generator keeps running the whole time generating
165  *  normal periodic vblanks.
166  *
167  *  WaPsrDPAMaskVBlankInSRD asks us to set the bit on hsw/bdw,
168  *  and doing so makes the behaviour match the skl+ reset value.
169  *
170  * CHICKEN_PIPESL_1[0]/BDW_UNMASK_VBL_TO_REGS_IN_SRD (bdw):
171  * CHICKEN_PIPESL_1[15]/HSW_UNMASK_VBL_TO_REGS_IN_SRD (hsw):
172  *
173  *  On BDW without this bit is no vblanks whatsoever are
174  *  generated after PSR exit. On HSW this has no apparant effect.
175  *  WaPsrDPRSUnmaskVBlankInSRD says to set this.
176  *
177  * The rest of the bits are more self-explanatory and/or
178  * irrelevant for normal operation.
179  *
180  * Description of intel_crtc_state variables. has_psr, has_panel_replay and
181  * has_sel_update:
182  *
183  *  has_psr (alone):					PSR1
184  *  has_psr + has_sel_update:				PSR2
185  *  has_psr + has_panel_replay:				Panel Replay
186  *  has_psr + has_panel_replay + has_sel_update:	Panel Replay Selective Update
187  *
188  * Description of some intel_psr varibles. enabled, panel_replay_enabled,
189  * sel_update_enabled
190  *
191  *  enabled (alone):						PSR1
192  *  enabled + sel_update_enabled:				PSR2
193  *  enabled + panel_replay_enabled:				Panel Replay
194  *  enabled + panel_replay_enabled + sel_update_enabled:	Panel Replay SU
195  */
196 
197 #define CAN_PSR(intel_dp) ((intel_dp)->psr.sink_support && \
198 			   (intel_dp)->psr.source_support)
199 
intel_encoder_can_psr(struct intel_encoder * encoder)200 bool intel_encoder_can_psr(struct intel_encoder *encoder)
201 {
202 	if (intel_encoder_is_dp(encoder) || encoder->type == INTEL_OUTPUT_DP_MST)
203 		return CAN_PSR(enc_to_intel_dp(encoder)) ||
204 		       CAN_PANEL_REPLAY(enc_to_intel_dp(encoder));
205 	else
206 		return false;
207 }
208 
intel_psr_needs_aux_io_power(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state)209 bool intel_psr_needs_aux_io_power(struct intel_encoder *encoder,
210 				  const struct intel_crtc_state *crtc_state)
211 {
212 	/*
213 	 * For PSR/PR modes only eDP requires the AUX IO power to be enabled whenever
214 	 * the output is enabled. For non-eDP outputs the main link is always
215 	 * on, hence it doesn't require the HW initiated AUX wake-up signaling used
216 	 * for eDP.
217 	 *
218 	 * TODO:
219 	 * - Consider leaving AUX IO disabled for eDP / PR as well, in case
220 	 *   the ALPM with main-link off mode is not enabled.
221 	 * - Leave AUX IO enabled for DP / PR, once support for ALPM with
222 	 *   main-link off mode is added for it and this mode gets enabled.
223 	 */
224 	return intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP) &&
225 	       intel_encoder_can_psr(encoder);
226 }
227 
psr_global_enabled(struct intel_dp * intel_dp)228 static bool psr_global_enabled(struct intel_dp *intel_dp)
229 {
230 	struct intel_display *display = to_intel_display(intel_dp);
231 	struct intel_connector *connector = intel_dp->attached_connector;
232 
233 	switch (intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK) {
234 	case I915_PSR_DEBUG_DEFAULT:
235 		if (display->params.enable_psr == -1)
236 			return intel_dp_is_edp(intel_dp) ?
237 				connector->panel.vbt.psr.enable :
238 				true;
239 		return display->params.enable_psr;
240 	case I915_PSR_DEBUG_DISABLE:
241 		return false;
242 	default:
243 		return true;
244 	}
245 }
246 
psr2_global_enabled(struct intel_dp * intel_dp)247 static bool psr2_global_enabled(struct intel_dp *intel_dp)
248 {
249 	struct intel_display *display = to_intel_display(intel_dp);
250 
251 	switch (intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK) {
252 	case I915_PSR_DEBUG_DISABLE:
253 	case I915_PSR_DEBUG_FORCE_PSR1:
254 		return false;
255 	default:
256 		if (display->params.enable_psr == 1)
257 			return false;
258 		return true;
259 	}
260 }
261 
psr2_su_region_et_global_enabled(struct intel_dp * intel_dp)262 static bool psr2_su_region_et_global_enabled(struct intel_dp *intel_dp)
263 {
264 	struct intel_display *display = to_intel_display(intel_dp);
265 
266 	if (display->params.enable_psr != -1)
267 		return false;
268 
269 	return true;
270 }
271 
panel_replay_global_enabled(struct intel_dp * intel_dp)272 static bool panel_replay_global_enabled(struct intel_dp *intel_dp)
273 {
274 	struct intel_display *display = to_intel_display(intel_dp);
275 
276 	if ((display->params.enable_psr != -1) ||
277 	    (intel_dp->psr.debug & I915_PSR_DEBUG_PANEL_REPLAY_DISABLE))
278 		return false;
279 	return true;
280 }
281 
psr_irq_psr_error_bit_get(struct intel_dp * intel_dp)282 static u32 psr_irq_psr_error_bit_get(struct intel_dp *intel_dp)
283 {
284 	struct intel_display *display = to_intel_display(intel_dp);
285 
286 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_ERROR :
287 		EDP_PSR_ERROR(intel_dp->psr.transcoder);
288 }
289 
psr_irq_post_exit_bit_get(struct intel_dp * intel_dp)290 static u32 psr_irq_post_exit_bit_get(struct intel_dp *intel_dp)
291 {
292 	struct intel_display *display = to_intel_display(intel_dp);
293 
294 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_POST_EXIT :
295 		EDP_PSR_POST_EXIT(intel_dp->psr.transcoder);
296 }
297 
psr_irq_pre_entry_bit_get(struct intel_dp * intel_dp)298 static u32 psr_irq_pre_entry_bit_get(struct intel_dp *intel_dp)
299 {
300 	struct intel_display *display = to_intel_display(intel_dp);
301 
302 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_PRE_ENTRY :
303 		EDP_PSR_PRE_ENTRY(intel_dp->psr.transcoder);
304 }
305 
psr_irq_mask_get(struct intel_dp * intel_dp)306 static u32 psr_irq_mask_get(struct intel_dp *intel_dp)
307 {
308 	struct intel_display *display = to_intel_display(intel_dp);
309 
310 	return DISPLAY_VER(display) >= 12 ? TGL_PSR_MASK :
311 		EDP_PSR_MASK(intel_dp->psr.transcoder);
312 }
313 
psr_ctl_reg(struct intel_display * display,enum transcoder cpu_transcoder)314 static i915_reg_t psr_ctl_reg(struct intel_display *display,
315 			      enum transcoder cpu_transcoder)
316 {
317 	if (DISPLAY_VER(display) >= 8)
318 		return EDP_PSR_CTL(display, cpu_transcoder);
319 	else
320 		return HSW_SRD_CTL;
321 }
322 
psr_debug_reg(struct intel_display * display,enum transcoder cpu_transcoder)323 static i915_reg_t psr_debug_reg(struct intel_display *display,
324 				enum transcoder cpu_transcoder)
325 {
326 	if (DISPLAY_VER(display) >= 8)
327 		return EDP_PSR_DEBUG(display, cpu_transcoder);
328 	else
329 		return HSW_SRD_DEBUG;
330 }
331 
psr_perf_cnt_reg(struct intel_display * display,enum transcoder cpu_transcoder)332 static i915_reg_t psr_perf_cnt_reg(struct intel_display *display,
333 				   enum transcoder cpu_transcoder)
334 {
335 	if (DISPLAY_VER(display) >= 8)
336 		return EDP_PSR_PERF_CNT(display, cpu_transcoder);
337 	else
338 		return HSW_SRD_PERF_CNT;
339 }
340 
psr_status_reg(struct intel_display * display,enum transcoder cpu_transcoder)341 static i915_reg_t psr_status_reg(struct intel_display *display,
342 				 enum transcoder cpu_transcoder)
343 {
344 	if (DISPLAY_VER(display) >= 8)
345 		return EDP_PSR_STATUS(display, cpu_transcoder);
346 	else
347 		return HSW_SRD_STATUS;
348 }
349 
psr_imr_reg(struct intel_display * display,enum transcoder cpu_transcoder)350 static i915_reg_t psr_imr_reg(struct intel_display *display,
351 			      enum transcoder cpu_transcoder)
352 {
353 	if (DISPLAY_VER(display) >= 12)
354 		return TRANS_PSR_IMR(display, cpu_transcoder);
355 	else
356 		return EDP_PSR_IMR;
357 }
358 
psr_iir_reg(struct intel_display * display,enum transcoder cpu_transcoder)359 static i915_reg_t psr_iir_reg(struct intel_display *display,
360 			      enum transcoder cpu_transcoder)
361 {
362 	if (DISPLAY_VER(display) >= 12)
363 		return TRANS_PSR_IIR(display, cpu_transcoder);
364 	else
365 		return EDP_PSR_IIR;
366 }
367 
psr_aux_ctl_reg(struct intel_display * display,enum transcoder cpu_transcoder)368 static i915_reg_t psr_aux_ctl_reg(struct intel_display *display,
369 				  enum transcoder cpu_transcoder)
370 {
371 	if (DISPLAY_VER(display) >= 8)
372 		return EDP_PSR_AUX_CTL(display, cpu_transcoder);
373 	else
374 		return HSW_SRD_AUX_CTL;
375 }
376 
psr_aux_data_reg(struct intel_display * display,enum transcoder cpu_transcoder,int i)377 static i915_reg_t psr_aux_data_reg(struct intel_display *display,
378 				   enum transcoder cpu_transcoder, int i)
379 {
380 	if (DISPLAY_VER(display) >= 8)
381 		return EDP_PSR_AUX_DATA(display, cpu_transcoder, i);
382 	else
383 		return HSW_SRD_AUX_DATA(i);
384 }
385 
psr_irq_control(struct intel_dp * intel_dp)386 static void psr_irq_control(struct intel_dp *intel_dp)
387 {
388 	struct intel_display *display = to_intel_display(intel_dp);
389 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
390 	u32 mask;
391 
392 	if (intel_dp->psr.panel_replay_enabled)
393 		return;
394 
395 	mask = psr_irq_psr_error_bit_get(intel_dp);
396 	if (intel_dp->psr.debug & I915_PSR_DEBUG_IRQ)
397 		mask |= psr_irq_post_exit_bit_get(intel_dp) |
398 			psr_irq_pre_entry_bit_get(intel_dp);
399 
400 	intel_de_rmw(display, psr_imr_reg(display, cpu_transcoder),
401 		     psr_irq_mask_get(intel_dp), ~mask);
402 }
403 
psr_event_print(struct intel_display * display,u32 val,bool sel_update_enabled)404 static void psr_event_print(struct intel_display *display,
405 			    u32 val, bool sel_update_enabled)
406 {
407 	drm_dbg_kms(display->drm, "PSR exit events: 0x%x\n", val);
408 	if (val & PSR_EVENT_PSR2_WD_TIMER_EXPIRE)
409 		drm_dbg_kms(display->drm, "\tPSR2 watchdog timer expired\n");
410 	if ((val & PSR_EVENT_PSR2_DISABLED) && sel_update_enabled)
411 		drm_dbg_kms(display->drm, "\tPSR2 disabled\n");
412 	if (val & PSR_EVENT_SU_DIRTY_FIFO_UNDERRUN)
413 		drm_dbg_kms(display->drm, "\tSU dirty FIFO underrun\n");
414 	if (val & PSR_EVENT_SU_CRC_FIFO_UNDERRUN)
415 		drm_dbg_kms(display->drm, "\tSU CRC FIFO underrun\n");
416 	if (val & PSR_EVENT_GRAPHICS_RESET)
417 		drm_dbg_kms(display->drm, "\tGraphics reset\n");
418 	if (val & PSR_EVENT_PCH_INTERRUPT)
419 		drm_dbg_kms(display->drm, "\tPCH interrupt\n");
420 	if (val & PSR_EVENT_MEMORY_UP)
421 		drm_dbg_kms(display->drm, "\tMemory up\n");
422 	if (val & PSR_EVENT_FRONT_BUFFER_MODIFY)
423 		drm_dbg_kms(display->drm, "\tFront buffer modification\n");
424 	if (val & PSR_EVENT_WD_TIMER_EXPIRE)
425 		drm_dbg_kms(display->drm, "\tPSR watchdog timer expired\n");
426 	if (val & PSR_EVENT_PIPE_REGISTERS_UPDATE)
427 		drm_dbg_kms(display->drm, "\tPIPE registers updated\n");
428 	if (val & PSR_EVENT_REGISTER_UPDATE)
429 		drm_dbg_kms(display->drm, "\tRegister updated\n");
430 	if (val & PSR_EVENT_HDCP_ENABLE)
431 		drm_dbg_kms(display->drm, "\tHDCP enabled\n");
432 	if (val & PSR_EVENT_KVMR_SESSION_ENABLE)
433 		drm_dbg_kms(display->drm, "\tKVMR session enabled\n");
434 	if (val & PSR_EVENT_VBI_ENABLE)
435 		drm_dbg_kms(display->drm, "\tVBI enabled\n");
436 	if (val & PSR_EVENT_LPSP_MODE_EXIT)
437 		drm_dbg_kms(display->drm, "\tLPSP mode exited\n");
438 	if ((val & PSR_EVENT_PSR_DISABLE) && !sel_update_enabled)
439 		drm_dbg_kms(display->drm, "\tPSR disabled\n");
440 }
441 
intel_psr_irq_handler(struct intel_dp * intel_dp,u32 psr_iir)442 void intel_psr_irq_handler(struct intel_dp *intel_dp, u32 psr_iir)
443 {
444 	struct intel_display *display = to_intel_display(intel_dp);
445 	struct drm_i915_private *dev_priv = to_i915(display->drm);
446 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
447 	ktime_t time_ns =  ktime_get();
448 
449 	if (psr_iir & psr_irq_pre_entry_bit_get(intel_dp)) {
450 		intel_dp->psr.last_entry_attempt = time_ns;
451 		drm_dbg_kms(display->drm,
452 			    "[transcoder %s] PSR entry attempt in 2 vblanks\n",
453 			    transcoder_name(cpu_transcoder));
454 	}
455 
456 	if (psr_iir & psr_irq_post_exit_bit_get(intel_dp)) {
457 		intel_dp->psr.last_exit = time_ns;
458 		drm_dbg_kms(display->drm,
459 			    "[transcoder %s] PSR exit completed\n",
460 			    transcoder_name(cpu_transcoder));
461 
462 		if (DISPLAY_VER(display) >= 9) {
463 			u32 val;
464 
465 			val = intel_de_rmw(dev_priv,
466 					   PSR_EVENT(dev_priv, cpu_transcoder),
467 					   0, 0);
468 
469 			psr_event_print(display, val, intel_dp->psr.sel_update_enabled);
470 		}
471 	}
472 
473 	if (psr_iir & psr_irq_psr_error_bit_get(intel_dp)) {
474 		drm_warn(display->drm, "[transcoder %s] PSR aux error\n",
475 			 transcoder_name(cpu_transcoder));
476 
477 		intel_dp->psr.irq_aux_error = true;
478 
479 		/*
480 		 * If this interruption is not masked it will keep
481 		 * interrupting so fast that it prevents the scheduled
482 		 * work to run.
483 		 * Also after a PSR error, we don't want to arm PSR
484 		 * again so we don't care about unmask the interruption
485 		 * or unset irq_aux_error.
486 		 */
487 		intel_de_rmw(display, psr_imr_reg(display, cpu_transcoder),
488 			     0, psr_irq_psr_error_bit_get(intel_dp));
489 
490 		queue_work(dev_priv->unordered_wq, &intel_dp->psr.work);
491 	}
492 }
493 
intel_dp_get_sink_sync_latency(struct intel_dp * intel_dp)494 static u8 intel_dp_get_sink_sync_latency(struct intel_dp *intel_dp)
495 {
496 	struct intel_display *display = to_intel_display(intel_dp);
497 	u8 val = 8; /* assume the worst if we can't read the value */
498 
499 	if (drm_dp_dpcd_readb(&intel_dp->aux,
500 			      DP_SYNCHRONIZATION_LATENCY_IN_SINK, &val) == 1)
501 		val &= DP_MAX_RESYNC_FRAME_COUNT_MASK;
502 	else
503 		drm_dbg_kms(display->drm,
504 			    "Unable to get sink synchronization latency, assuming 8 frames\n");
505 	return val;
506 }
507 
intel_dp_get_su_capability(struct intel_dp * intel_dp)508 static u8 intel_dp_get_su_capability(struct intel_dp *intel_dp)
509 {
510 	u8 su_capability = 0;
511 
512 	if (intel_dp->psr.sink_panel_replay_su_support)
513 		drm_dp_dpcd_readb(&intel_dp->aux,
514 				  DP_PANEL_PANEL_REPLAY_CAPABILITY,
515 				  &su_capability);
516 	else
517 		su_capability = intel_dp->psr_dpcd[1];
518 
519 	return su_capability;
520 }
521 
522 static unsigned int
intel_dp_get_su_x_granularity_offset(struct intel_dp * intel_dp)523 intel_dp_get_su_x_granularity_offset(struct intel_dp *intel_dp)
524 {
525 	return intel_dp->psr.sink_panel_replay_su_support ?
526 		DP_PANEL_PANEL_REPLAY_X_GRANULARITY :
527 		DP_PSR2_SU_X_GRANULARITY;
528 }
529 
530 static unsigned int
intel_dp_get_su_y_granularity_offset(struct intel_dp * intel_dp)531 intel_dp_get_su_y_granularity_offset(struct intel_dp *intel_dp)
532 {
533 	return intel_dp->psr.sink_panel_replay_su_support ?
534 		DP_PANEL_PANEL_REPLAY_Y_GRANULARITY :
535 		DP_PSR2_SU_Y_GRANULARITY;
536 }
537 
538 /*
539  * Note: Bits related to granularity are same in panel replay and psr
540  * registers. Rely on PSR definitions on these "common" bits.
541  */
intel_dp_get_su_granularity(struct intel_dp * intel_dp)542 static void intel_dp_get_su_granularity(struct intel_dp *intel_dp)
543 {
544 	struct intel_display *display = to_intel_display(intel_dp);
545 	ssize_t r;
546 	u16 w;
547 	u8 y;
548 
549 	/*
550 	 * TODO: Do we need to take into account panel supporting both PSR and
551 	 * Panel replay?
552 	 */
553 
554 	/*
555 	 * If sink don't have specific granularity requirements set legacy
556 	 * ones.
557 	 */
558 	if (!(intel_dp_get_su_capability(intel_dp) &
559 	      DP_PSR2_SU_GRANULARITY_REQUIRED)) {
560 		/* As PSR2 HW sends full lines, we do not care about x granularity */
561 		w = 4;
562 		y = 4;
563 		goto exit;
564 	}
565 
566 	r = drm_dp_dpcd_read(&intel_dp->aux,
567 			     intel_dp_get_su_x_granularity_offset(intel_dp),
568 			     &w, 2);
569 	if (r != 2)
570 		drm_dbg_kms(display->drm,
571 			    "Unable to read selective update x granularity\n");
572 	/*
573 	 * Spec says that if the value read is 0 the default granularity should
574 	 * be used instead.
575 	 */
576 	if (r != 2 || w == 0)
577 		w = 4;
578 
579 	r = drm_dp_dpcd_read(&intel_dp->aux,
580 			     intel_dp_get_su_y_granularity_offset(intel_dp),
581 			     &y, 1);
582 	if (r != 1) {
583 		drm_dbg_kms(display->drm,
584 			    "Unable to read selective update y granularity\n");
585 		y = 4;
586 	}
587 	if (y == 0)
588 		y = 1;
589 
590 exit:
591 	intel_dp->psr.su_w_granularity = w;
592 	intel_dp->psr.su_y_granularity = y;
593 }
594 
_panel_replay_init_dpcd(struct intel_dp * intel_dp)595 static void _panel_replay_init_dpcd(struct intel_dp *intel_dp)
596 {
597 	struct intel_display *display = to_intel_display(intel_dp);
598 
599 	if (intel_dp_is_edp(intel_dp)) {
600 		if (!intel_alpm_aux_less_wake_supported(intel_dp)) {
601 			drm_dbg_kms(display->drm,
602 				    "Panel doesn't support AUX-less ALPM, eDP Panel Replay not possible\n");
603 			return;
604 		}
605 
606 		if (!(intel_dp->pr_dpcd & DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT)) {
607 			drm_dbg_kms(display->drm,
608 				    "Panel doesn't support early transport, eDP Panel Replay not possible\n");
609 			return;
610 		}
611 	}
612 
613 	intel_dp->psr.sink_panel_replay_support = true;
614 
615 	if (intel_dp->pr_dpcd & DP_PANEL_REPLAY_SU_SUPPORT)
616 		intel_dp->psr.sink_panel_replay_su_support = true;
617 
618 	drm_dbg_kms(display->drm,
619 		    "Panel replay %sis supported by panel\n",
620 		    intel_dp->psr.sink_panel_replay_su_support ?
621 		    "selective_update " : "");
622 }
623 
_psr_init_dpcd(struct intel_dp * intel_dp)624 static void _psr_init_dpcd(struct intel_dp *intel_dp)
625 {
626 	struct intel_display *display = to_intel_display(intel_dp);
627 
628 	drm_dbg_kms(display->drm, "eDP panel supports PSR version %x\n",
629 		    intel_dp->psr_dpcd[0]);
630 
631 	if (drm_dp_has_quirk(&intel_dp->desc, DP_DPCD_QUIRK_NO_PSR)) {
632 		drm_dbg_kms(display->drm,
633 			    "PSR support not currently available for this panel\n");
634 		return;
635 	}
636 
637 	if (!(intel_dp->edp_dpcd[1] & DP_EDP_SET_POWER_CAP)) {
638 		drm_dbg_kms(display->drm,
639 			    "Panel lacks power state control, PSR cannot be enabled\n");
640 		return;
641 	}
642 
643 	intel_dp->psr.sink_support = true;
644 	intel_dp->psr.sink_sync_latency =
645 		intel_dp_get_sink_sync_latency(intel_dp);
646 
647 	if (DISPLAY_VER(display) >= 9 &&
648 	    intel_dp->psr_dpcd[0] >= DP_PSR2_WITH_Y_COORD_IS_SUPPORTED) {
649 		bool y_req = intel_dp->psr_dpcd[1] &
650 			     DP_PSR2_SU_Y_COORDINATE_REQUIRED;
651 
652 		/*
653 		 * All panels that supports PSR version 03h (PSR2 +
654 		 * Y-coordinate) can handle Y-coordinates in VSC but we are
655 		 * only sure that it is going to be used when required by the
656 		 * panel. This way panel is capable to do selective update
657 		 * without a aux frame sync.
658 		 *
659 		 * To support PSR version 02h and PSR version 03h without
660 		 * Y-coordinate requirement panels we would need to enable
661 		 * GTC first.
662 		 */
663 		intel_dp->psr.sink_psr2_support = y_req &&
664 			intel_alpm_aux_wake_supported(intel_dp);
665 		drm_dbg_kms(display->drm, "PSR2 %ssupported\n",
666 			    intel_dp->psr.sink_psr2_support ? "" : "not ");
667 	}
668 }
669 
intel_psr_init_dpcd(struct intel_dp * intel_dp)670 void intel_psr_init_dpcd(struct intel_dp *intel_dp)
671 {
672 	drm_dp_dpcd_read(&intel_dp->aux, DP_PSR_SUPPORT, intel_dp->psr_dpcd,
673 			 sizeof(intel_dp->psr_dpcd));
674 	drm_dp_dpcd_readb(&intel_dp->aux, DP_PANEL_REPLAY_CAP,
675 			  &intel_dp->pr_dpcd);
676 
677 	if (intel_dp->pr_dpcd & DP_PANEL_REPLAY_SUPPORT)
678 		_panel_replay_init_dpcd(intel_dp);
679 
680 	if (intel_dp->psr_dpcd[0])
681 		_psr_init_dpcd(intel_dp);
682 
683 	if (intel_dp->psr.sink_psr2_support ||
684 	    intel_dp->psr.sink_panel_replay_su_support)
685 		intel_dp_get_su_granularity(intel_dp);
686 }
687 
hsw_psr_setup_aux(struct intel_dp * intel_dp)688 static void hsw_psr_setup_aux(struct intel_dp *intel_dp)
689 {
690 	struct intel_display *display = to_intel_display(intel_dp);
691 	struct drm_i915_private *dev_priv = to_i915(display->drm);
692 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
693 	u32 aux_clock_divider, aux_ctl;
694 	/* write DP_SET_POWER=D0 */
695 	static const u8 aux_msg[] = {
696 		[0] = (DP_AUX_NATIVE_WRITE << 4) | ((DP_SET_POWER >> 16) & 0xf),
697 		[1] = (DP_SET_POWER >> 8) & 0xff,
698 		[2] = DP_SET_POWER & 0xff,
699 		[3] = 1 - 1,
700 		[4] = DP_SET_POWER_D0,
701 	};
702 	int i;
703 
704 	BUILD_BUG_ON(sizeof(aux_msg) > 20);
705 	for (i = 0; i < sizeof(aux_msg); i += 4)
706 		intel_de_write(dev_priv,
707 			       psr_aux_data_reg(display, cpu_transcoder, i >> 2),
708 			       intel_dp_aux_pack(&aux_msg[i], sizeof(aux_msg) - i));
709 
710 	aux_clock_divider = intel_dp->get_aux_clock_divider(intel_dp, 0);
711 
712 	/* Start with bits set for DDI_AUX_CTL register */
713 	aux_ctl = intel_dp->get_aux_send_ctl(intel_dp, sizeof(aux_msg),
714 					     aux_clock_divider);
715 
716 	/* Select only valid bits for SRD_AUX_CTL */
717 	aux_ctl &= EDP_PSR_AUX_CTL_TIME_OUT_MASK |
718 		EDP_PSR_AUX_CTL_MESSAGE_SIZE_MASK |
719 		EDP_PSR_AUX_CTL_PRECHARGE_2US_MASK |
720 		EDP_PSR_AUX_CTL_BIT_CLOCK_2X_MASK;
721 
722 	intel_de_write(display, psr_aux_ctl_reg(display, cpu_transcoder),
723 		       aux_ctl);
724 }
725 
psr2_su_region_et_valid(struct intel_dp * intel_dp,bool panel_replay)726 static bool psr2_su_region_et_valid(struct intel_dp *intel_dp, bool panel_replay)
727 {
728 	struct intel_display *display = to_intel_display(intel_dp);
729 
730 	if (DISPLAY_VER(display) < 20 || !intel_dp_is_edp(intel_dp) ||
731 	    intel_dp->psr.debug & I915_PSR_DEBUG_SU_REGION_ET_DISABLE)
732 		return false;
733 
734 	return panel_replay ?
735 		intel_dp->pr_dpcd & DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT :
736 		intel_dp->psr_dpcd[0] == DP_PSR2_WITH_Y_COORD_ET_SUPPORTED &&
737 		psr2_su_region_et_global_enabled(intel_dp);
738 }
739 
_panel_replay_enable_sink(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)740 static void _panel_replay_enable_sink(struct intel_dp *intel_dp,
741 				      const struct intel_crtc_state *crtc_state)
742 {
743 	u8 val = DP_PANEL_REPLAY_ENABLE |
744 		DP_PANEL_REPLAY_VSC_SDP_CRC_EN |
745 		DP_PANEL_REPLAY_UNRECOVERABLE_ERROR_EN |
746 		DP_PANEL_REPLAY_RFB_STORAGE_ERROR_EN |
747 		DP_PANEL_REPLAY_ACTIVE_FRAME_CRC_ERROR_EN;
748 	u8 panel_replay_config2 = DP_PANEL_REPLAY_CRC_VERIFICATION;
749 
750 	if (crtc_state->has_sel_update)
751 		val |= DP_PANEL_REPLAY_SU_ENABLE;
752 
753 	if (crtc_state->enable_psr2_su_region_et)
754 		val |= DP_PANEL_REPLAY_ENABLE_SU_REGION_ET;
755 
756 	if (crtc_state->req_psr2_sdp_prior_scanline)
757 		panel_replay_config2 |=
758 			DP_PANEL_REPLAY_SU_REGION_SCANLINE_CAPTURE;
759 
760 	drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG, val);
761 
762 	drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG2,
763 			   panel_replay_config2);
764 }
765 
_psr_enable_sink(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)766 static void _psr_enable_sink(struct intel_dp *intel_dp,
767 			     const struct intel_crtc_state *crtc_state)
768 {
769 	struct intel_display *display = to_intel_display(intel_dp);
770 	u8 val = 0;
771 
772 	if (crtc_state->has_sel_update) {
773 		val |= DP_PSR_ENABLE_PSR2 | DP_PSR_IRQ_HPD_WITH_CRC_ERRORS;
774 	} else {
775 		if (intel_dp->psr.link_standby)
776 			val |= DP_PSR_MAIN_LINK_ACTIVE;
777 
778 		if (DISPLAY_VER(display) >= 8)
779 			val |= DP_PSR_CRC_VERIFICATION;
780 	}
781 
782 	if (crtc_state->req_psr2_sdp_prior_scanline)
783 		val |= DP_PSR_SU_REGION_SCANLINE_CAPTURE;
784 
785 	if (crtc_state->enable_psr2_su_region_et)
786 		val |= DP_PANEL_REPLAY_ENABLE_SU_REGION_ET;
787 
788 	if (intel_dp->psr.entry_setup_frames > 0)
789 		val |= DP_PSR_FRAME_CAPTURE;
790 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, val);
791 
792 	val |= DP_PSR_ENABLE;
793 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, val);
794 }
795 
intel_psr_enable_sink_alpm(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)796 static void intel_psr_enable_sink_alpm(struct intel_dp *intel_dp,
797 				       const struct intel_crtc_state *crtc_state)
798 {
799 	u8 val;
800 
801 	/*
802 	 * eDP Panel Replay uses always ALPM
803 	 * PSR2 uses ALPM but PSR1 doesn't
804 	 */
805 	if (!intel_dp_is_edp(intel_dp) || (!crtc_state->has_panel_replay &&
806 					   !crtc_state->has_sel_update))
807 		return;
808 
809 	val = DP_ALPM_ENABLE | DP_ALPM_LOCK_ERROR_IRQ_HPD_ENABLE;
810 
811 	if (crtc_state->has_panel_replay)
812 		val |= DP_ALPM_MODE_AUX_LESS;
813 
814 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_RECEIVER_ALPM_CONFIG, val);
815 }
816 
intel_psr_enable_sink(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)817 void intel_psr_enable_sink(struct intel_dp *intel_dp,
818 			   const struct intel_crtc_state *crtc_state)
819 {
820 	intel_psr_enable_sink_alpm(intel_dp, crtc_state);
821 
822 	crtc_state->has_panel_replay ?
823 		_panel_replay_enable_sink(intel_dp, crtc_state) :
824 		_psr_enable_sink(intel_dp, crtc_state);
825 
826 	if (intel_dp_is_edp(intel_dp))
827 		drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
828 }
829 
intel_psr1_get_tp_time(struct intel_dp * intel_dp)830 static u32 intel_psr1_get_tp_time(struct intel_dp *intel_dp)
831 {
832 	struct intel_display *display = to_intel_display(intel_dp);
833 	struct intel_connector *connector = intel_dp->attached_connector;
834 	struct drm_i915_private *dev_priv = to_i915(display->drm);
835 	u32 val = 0;
836 
837 	if (DISPLAY_VER(display) >= 11)
838 		val |= EDP_PSR_TP4_TIME_0us;
839 
840 	if (display->params.psr_safest_params) {
841 		val |= EDP_PSR_TP1_TIME_2500us;
842 		val |= EDP_PSR_TP2_TP3_TIME_2500us;
843 		goto check_tp3_sel;
844 	}
845 
846 	if (connector->panel.vbt.psr.tp1_wakeup_time_us == 0)
847 		val |= EDP_PSR_TP1_TIME_0us;
848 	else if (connector->panel.vbt.psr.tp1_wakeup_time_us <= 100)
849 		val |= EDP_PSR_TP1_TIME_100us;
850 	else if (connector->panel.vbt.psr.tp1_wakeup_time_us <= 500)
851 		val |= EDP_PSR_TP1_TIME_500us;
852 	else
853 		val |= EDP_PSR_TP1_TIME_2500us;
854 
855 	if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us == 0)
856 		val |= EDP_PSR_TP2_TP3_TIME_0us;
857 	else if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us <= 100)
858 		val |= EDP_PSR_TP2_TP3_TIME_100us;
859 	else if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us <= 500)
860 		val |= EDP_PSR_TP2_TP3_TIME_500us;
861 	else
862 		val |= EDP_PSR_TP2_TP3_TIME_2500us;
863 
864 	/*
865 	 * WA 0479: hsw,bdw
866 	 * "Do not skip both TP1 and TP2/TP3"
867 	 */
868 	if (DISPLAY_VER(dev_priv) < 9 &&
869 	    connector->panel.vbt.psr.tp1_wakeup_time_us == 0 &&
870 	    connector->panel.vbt.psr.tp2_tp3_wakeup_time_us == 0)
871 		val |= EDP_PSR_TP2_TP3_TIME_100us;
872 
873 check_tp3_sel:
874 	if (intel_dp_source_supports_tps3(display) &&
875 	    drm_dp_tps3_supported(intel_dp->dpcd))
876 		val |= EDP_PSR_TP_TP1_TP3;
877 	else
878 		val |= EDP_PSR_TP_TP1_TP2;
879 
880 	return val;
881 }
882 
psr_compute_idle_frames(struct intel_dp * intel_dp)883 static u8 psr_compute_idle_frames(struct intel_dp *intel_dp)
884 {
885 	struct intel_display *display = to_intel_display(intel_dp);
886 	struct intel_connector *connector = intel_dp->attached_connector;
887 	int idle_frames;
888 
889 	/* Let's use 6 as the minimum to cover all known cases including the
890 	 * off-by-one issue that HW has in some cases.
891 	 */
892 	idle_frames = max(6, connector->panel.vbt.psr.idle_frames);
893 	idle_frames = max(idle_frames, intel_dp->psr.sink_sync_latency + 1);
894 
895 	if (drm_WARN_ON(display->drm, idle_frames > 0xf))
896 		idle_frames = 0xf;
897 
898 	return idle_frames;
899 }
900 
hsw_activate_psr1(struct intel_dp * intel_dp)901 static void hsw_activate_psr1(struct intel_dp *intel_dp)
902 {
903 	struct intel_display *display = to_intel_display(intel_dp);
904 	struct drm_i915_private *dev_priv = to_i915(display->drm);
905 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
906 	u32 max_sleep_time = 0x1f;
907 	u32 val = EDP_PSR_ENABLE;
908 
909 	val |= EDP_PSR_IDLE_FRAMES(psr_compute_idle_frames(intel_dp));
910 
911 	if (DISPLAY_VER(display) < 20)
912 		val |= EDP_PSR_MAX_SLEEP_TIME(max_sleep_time);
913 
914 	if (IS_HASWELL(dev_priv))
915 		val |= EDP_PSR_MIN_LINK_ENTRY_TIME_8_LINES;
916 
917 	if (intel_dp->psr.link_standby)
918 		val |= EDP_PSR_LINK_STANDBY;
919 
920 	val |= intel_psr1_get_tp_time(intel_dp);
921 
922 	if (DISPLAY_VER(display) >= 8)
923 		val |= EDP_PSR_CRC_ENABLE;
924 
925 	if (DISPLAY_VER(display) >= 20)
926 		val |= LNL_EDP_PSR_ENTRY_SETUP_FRAMES(intel_dp->psr.entry_setup_frames);
927 
928 	intel_de_rmw(display, psr_ctl_reg(display, cpu_transcoder),
929 		     ~EDP_PSR_RESTORE_PSR_ACTIVE_CTX_MASK, val);
930 }
931 
intel_psr2_get_tp_time(struct intel_dp * intel_dp)932 static u32 intel_psr2_get_tp_time(struct intel_dp *intel_dp)
933 {
934 	struct intel_display *display = to_intel_display(intel_dp);
935 	struct intel_connector *connector = intel_dp->attached_connector;
936 	u32 val = 0;
937 
938 	if (display->params.psr_safest_params)
939 		return EDP_PSR2_TP2_TIME_2500us;
940 
941 	if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us >= 0 &&
942 	    connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 50)
943 		val |= EDP_PSR2_TP2_TIME_50us;
944 	else if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 100)
945 		val |= EDP_PSR2_TP2_TIME_100us;
946 	else if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 500)
947 		val |= EDP_PSR2_TP2_TIME_500us;
948 	else
949 		val |= EDP_PSR2_TP2_TIME_2500us;
950 
951 	return val;
952 }
953 
psr2_block_count_lines(struct intel_dp * intel_dp)954 static int psr2_block_count_lines(struct intel_dp *intel_dp)
955 {
956 	return intel_dp->alpm_parameters.io_wake_lines < 9 &&
957 		intel_dp->alpm_parameters.fast_wake_lines < 9 ? 8 : 12;
958 }
959 
psr2_block_count(struct intel_dp * intel_dp)960 static int psr2_block_count(struct intel_dp *intel_dp)
961 {
962 	return psr2_block_count_lines(intel_dp) / 4;
963 }
964 
frames_before_su_entry(struct intel_dp * intel_dp)965 static u8 frames_before_su_entry(struct intel_dp *intel_dp)
966 {
967 	u8 frames_before_su_entry;
968 
969 	frames_before_su_entry = max_t(u8,
970 				       intel_dp->psr.sink_sync_latency + 1,
971 				       2);
972 
973 	/* Entry setup frames must be at least 1 less than frames before SU entry */
974 	if (intel_dp->psr.entry_setup_frames >= frames_before_su_entry)
975 		frames_before_su_entry = intel_dp->psr.entry_setup_frames + 1;
976 
977 	return frames_before_su_entry;
978 }
979 
dg2_activate_panel_replay(struct intel_dp * intel_dp)980 static void dg2_activate_panel_replay(struct intel_dp *intel_dp)
981 {
982 	struct intel_display *display = to_intel_display(intel_dp);
983 	struct intel_psr *psr = &intel_dp->psr;
984 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
985 
986 	if (intel_dp_is_edp(intel_dp) && psr->sel_update_enabled) {
987 		u32 val = psr->su_region_et_enabled ?
988 			LNL_EDP_PSR2_SU_REGION_ET_ENABLE : 0;
989 
990 		if (intel_dp->psr.req_psr2_sdp_prior_scanline)
991 			val |= EDP_PSR2_SU_SDP_SCANLINE;
992 
993 		intel_de_write(display, EDP_PSR2_CTL(display, cpu_transcoder),
994 			       val);
995 	}
996 
997 	intel_de_rmw(display,
998 		     PSR2_MAN_TRK_CTL(display, intel_dp->psr.transcoder),
999 		     0, ADLP_PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME);
1000 
1001 	intel_de_rmw(display, TRANS_DP2_CTL(intel_dp->psr.transcoder), 0,
1002 		     TRANS_DP2_PANEL_REPLAY_ENABLE);
1003 }
1004 
hsw_activate_psr2(struct intel_dp * intel_dp)1005 static void hsw_activate_psr2(struct intel_dp *intel_dp)
1006 {
1007 	struct intel_display *display = to_intel_display(intel_dp);
1008 	struct drm_i915_private *dev_priv = to_i915(display->drm);
1009 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1010 	u32 val = EDP_PSR2_ENABLE;
1011 	u32 psr_val = 0;
1012 
1013 	val |= EDP_PSR2_IDLE_FRAMES(psr_compute_idle_frames(intel_dp));
1014 
1015 	if (DISPLAY_VER(display) < 14 && !IS_ALDERLAKE_P(dev_priv))
1016 		val |= EDP_SU_TRACK_ENABLE;
1017 
1018 	if (DISPLAY_VER(display) >= 10 && DISPLAY_VER(display) < 13)
1019 		val |= EDP_Y_COORDINATE_ENABLE;
1020 
1021 	val |= EDP_PSR2_FRAME_BEFORE_SU(frames_before_su_entry(intel_dp));
1022 
1023 	val |= intel_psr2_get_tp_time(intel_dp);
1024 
1025 	if (DISPLAY_VER(display) >= 12 && DISPLAY_VER(display) < 20) {
1026 		if (psr2_block_count(intel_dp) > 2)
1027 			val |= TGL_EDP_PSR2_BLOCK_COUNT_NUM_3;
1028 		else
1029 			val |= TGL_EDP_PSR2_BLOCK_COUNT_NUM_2;
1030 	}
1031 
1032 	/* Wa_22012278275:adl-p */
1033 	if (IS_ALDERLAKE_P(dev_priv) && IS_DISPLAY_STEP(display, STEP_A0, STEP_E0)) {
1034 		static const u8 map[] = {
1035 			2, /* 5 lines */
1036 			1, /* 6 lines */
1037 			0, /* 7 lines */
1038 			3, /* 8 lines */
1039 			6, /* 9 lines */
1040 			5, /* 10 lines */
1041 			4, /* 11 lines */
1042 			7, /* 12 lines */
1043 		};
1044 		/*
1045 		 * Still using the default IO_BUFFER_WAKE and FAST_WAKE, see
1046 		 * comments bellow for more information
1047 		 */
1048 		int tmp;
1049 
1050 		tmp = map[intel_dp->alpm_parameters.io_wake_lines -
1051 			  TGL_EDP_PSR2_IO_BUFFER_WAKE_MIN_LINES];
1052 		val |= TGL_EDP_PSR2_IO_BUFFER_WAKE(tmp + TGL_EDP_PSR2_IO_BUFFER_WAKE_MIN_LINES);
1053 
1054 		tmp = map[intel_dp->alpm_parameters.fast_wake_lines - TGL_EDP_PSR2_FAST_WAKE_MIN_LINES];
1055 		val |= TGL_EDP_PSR2_FAST_WAKE(tmp + TGL_EDP_PSR2_FAST_WAKE_MIN_LINES);
1056 	} else if (DISPLAY_VER(display) >= 20) {
1057 		val |= LNL_EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines);
1058 	} else if (DISPLAY_VER(display) >= 12) {
1059 		val |= TGL_EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines);
1060 		val |= TGL_EDP_PSR2_FAST_WAKE(intel_dp->alpm_parameters.fast_wake_lines);
1061 	} else if (DISPLAY_VER(display) >= 9) {
1062 		val |= EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines);
1063 		val |= EDP_PSR2_FAST_WAKE(intel_dp->alpm_parameters.fast_wake_lines);
1064 	}
1065 
1066 	if (intel_dp->psr.req_psr2_sdp_prior_scanline)
1067 		val |= EDP_PSR2_SU_SDP_SCANLINE;
1068 
1069 	if (DISPLAY_VER(display) >= 20)
1070 		psr_val |= LNL_EDP_PSR_ENTRY_SETUP_FRAMES(intel_dp->psr.entry_setup_frames);
1071 
1072 	if (intel_dp->psr.psr2_sel_fetch_enabled) {
1073 		u32 tmp;
1074 
1075 		tmp = intel_de_read(display,
1076 				    PSR2_MAN_TRK_CTL(display, cpu_transcoder));
1077 		drm_WARN_ON(display->drm, !(tmp & PSR2_MAN_TRK_CTL_ENABLE));
1078 	} else if (HAS_PSR2_SEL_FETCH(display)) {
1079 		intel_de_write(display,
1080 			       PSR2_MAN_TRK_CTL(display, cpu_transcoder), 0);
1081 	}
1082 
1083 	if (intel_dp->psr.su_region_et_enabled)
1084 		val |= LNL_EDP_PSR2_SU_REGION_ET_ENABLE;
1085 
1086 	/*
1087 	 * PSR2 HW is incorrectly using EDP_PSR_TP1_TP3_SEL and BSpec is
1088 	 * recommending keep this bit unset while PSR2 is enabled.
1089 	 */
1090 	intel_de_write(display, psr_ctl_reg(display, cpu_transcoder), psr_val);
1091 
1092 	intel_de_write(display, EDP_PSR2_CTL(display, cpu_transcoder), val);
1093 }
1094 
1095 static bool
transcoder_has_psr2(struct intel_display * display,enum transcoder cpu_transcoder)1096 transcoder_has_psr2(struct intel_display *display, enum transcoder cpu_transcoder)
1097 {
1098 	struct drm_i915_private *dev_priv = to_i915(display->drm);
1099 
1100 	if (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14)
1101 		return cpu_transcoder == TRANSCODER_A || cpu_transcoder == TRANSCODER_B;
1102 	else if (DISPLAY_VER(display) >= 12)
1103 		return cpu_transcoder == TRANSCODER_A;
1104 	else if (DISPLAY_VER(display) >= 9)
1105 		return cpu_transcoder == TRANSCODER_EDP;
1106 	else
1107 		return false;
1108 }
1109 
intel_get_frame_time_us(const struct intel_crtc_state * crtc_state)1110 static u32 intel_get_frame_time_us(const struct intel_crtc_state *crtc_state)
1111 {
1112 	if (!crtc_state->hw.active)
1113 		return 0;
1114 
1115 	return DIV_ROUND_UP(1000 * 1000,
1116 			    drm_mode_vrefresh(&crtc_state->hw.adjusted_mode));
1117 }
1118 
psr2_program_idle_frames(struct intel_dp * intel_dp,u32 idle_frames)1119 static void psr2_program_idle_frames(struct intel_dp *intel_dp,
1120 				     u32 idle_frames)
1121 {
1122 	struct intel_display *display = to_intel_display(intel_dp);
1123 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1124 
1125 	intel_de_rmw(display, EDP_PSR2_CTL(display, cpu_transcoder),
1126 		     EDP_PSR2_IDLE_FRAMES_MASK,
1127 		     EDP_PSR2_IDLE_FRAMES(idle_frames));
1128 }
1129 
tgl_psr2_enable_dc3co(struct intel_dp * intel_dp)1130 static void tgl_psr2_enable_dc3co(struct intel_dp *intel_dp)
1131 {
1132 	struct intel_display *display = to_intel_display(intel_dp);
1133 
1134 	psr2_program_idle_frames(intel_dp, 0);
1135 	intel_display_power_set_target_dc_state(display, DC_STATE_EN_DC3CO);
1136 }
1137 
tgl_psr2_disable_dc3co(struct intel_dp * intel_dp)1138 static void tgl_psr2_disable_dc3co(struct intel_dp *intel_dp)
1139 {
1140 	struct intel_display *display = to_intel_display(intel_dp);
1141 
1142 	intel_display_power_set_target_dc_state(display, DC_STATE_EN_UPTO_DC6);
1143 	psr2_program_idle_frames(intel_dp, psr_compute_idle_frames(intel_dp));
1144 }
1145 
tgl_dc3co_disable_work(struct work_struct * work)1146 static void tgl_dc3co_disable_work(struct work_struct *work)
1147 {
1148 	struct intel_dp *intel_dp =
1149 		container_of(work, typeof(*intel_dp), psr.dc3co_work.work);
1150 
1151 	mutex_lock(&intel_dp->psr.lock);
1152 	/* If delayed work is pending, it is not idle */
1153 	if (delayed_work_pending(&intel_dp->psr.dc3co_work))
1154 		goto unlock;
1155 
1156 	tgl_psr2_disable_dc3co(intel_dp);
1157 unlock:
1158 	mutex_unlock(&intel_dp->psr.lock);
1159 }
1160 
tgl_disallow_dc3co_on_psr2_exit(struct intel_dp * intel_dp)1161 static void tgl_disallow_dc3co_on_psr2_exit(struct intel_dp *intel_dp)
1162 {
1163 	if (!intel_dp->psr.dc3co_exitline)
1164 		return;
1165 
1166 	cancel_delayed_work(&intel_dp->psr.dc3co_work);
1167 	/* Before PSR2 exit disallow dc3co*/
1168 	tgl_psr2_disable_dc3co(intel_dp);
1169 }
1170 
1171 static bool
dc3co_is_pipe_port_compatible(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1172 dc3co_is_pipe_port_compatible(struct intel_dp *intel_dp,
1173 			      struct intel_crtc_state *crtc_state)
1174 {
1175 	struct intel_display *display = to_intel_display(intel_dp);
1176 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1177 	enum pipe pipe = to_intel_crtc(crtc_state->uapi.crtc)->pipe;
1178 	struct drm_i915_private *dev_priv = to_i915(display->drm);
1179 	enum port port = dig_port->base.port;
1180 
1181 	if (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14)
1182 		return pipe <= PIPE_B && port <= PORT_B;
1183 	else
1184 		return pipe == PIPE_A && port == PORT_A;
1185 }
1186 
1187 static void
tgl_dc3co_exitline_compute_config(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1188 tgl_dc3co_exitline_compute_config(struct intel_dp *intel_dp,
1189 				  struct intel_crtc_state *crtc_state)
1190 {
1191 	struct intel_display *display = to_intel_display(intel_dp);
1192 	struct drm_i915_private *dev_priv = to_i915(display->drm);
1193 	const u32 crtc_vdisplay = crtc_state->uapi.adjusted_mode.crtc_vdisplay;
1194 	struct i915_power_domains *power_domains = &display->power.domains;
1195 	u32 exit_scanlines;
1196 
1197 	/*
1198 	 * FIXME: Due to the changed sequence of activating/deactivating DC3CO,
1199 	 * disable DC3CO until the changed dc3co activating/deactivating sequence
1200 	 * is applied. B.Specs:49196
1201 	 */
1202 	return;
1203 
1204 	/*
1205 	 * DMC's DC3CO exit mechanism has an issue with Selective Fecth
1206 	 * TODO: when the issue is addressed, this restriction should be removed.
1207 	 */
1208 	if (crtc_state->enable_psr2_sel_fetch)
1209 		return;
1210 
1211 	if (!(power_domains->allowed_dc_mask & DC_STATE_EN_DC3CO))
1212 		return;
1213 
1214 	if (!dc3co_is_pipe_port_compatible(intel_dp, crtc_state))
1215 		return;
1216 
1217 	/* Wa_16011303918:adl-p */
1218 	if (IS_ALDERLAKE_P(dev_priv) && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0))
1219 		return;
1220 
1221 	/*
1222 	 * DC3CO Exit time 200us B.Spec 49196
1223 	 * PSR2 transcoder Early Exit scanlines = ROUNDUP(200 / line time) + 1
1224 	 */
1225 	exit_scanlines =
1226 		intel_usecs_to_scanlines(&crtc_state->uapi.adjusted_mode, 200) + 1;
1227 
1228 	if (drm_WARN_ON(display->drm, exit_scanlines > crtc_vdisplay))
1229 		return;
1230 
1231 	crtc_state->dc3co_exitline = crtc_vdisplay - exit_scanlines;
1232 }
1233 
intel_psr2_sel_fetch_config_valid(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1234 static bool intel_psr2_sel_fetch_config_valid(struct intel_dp *intel_dp,
1235 					      struct intel_crtc_state *crtc_state)
1236 {
1237 	struct intel_display *display = to_intel_display(intel_dp);
1238 
1239 	if (!display->params.enable_psr2_sel_fetch &&
1240 	    intel_dp->psr.debug != I915_PSR_DEBUG_ENABLE_SEL_FETCH) {
1241 		drm_dbg_kms(display->drm,
1242 			    "PSR2 sel fetch not enabled, disabled by parameter\n");
1243 		return false;
1244 	}
1245 
1246 	if (crtc_state->uapi.async_flip) {
1247 		drm_dbg_kms(display->drm,
1248 			    "PSR2 sel fetch not enabled, async flip enabled\n");
1249 		return false;
1250 	}
1251 
1252 	return crtc_state->enable_psr2_sel_fetch = true;
1253 }
1254 
psr2_granularity_check(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1255 static bool psr2_granularity_check(struct intel_dp *intel_dp,
1256 				   struct intel_crtc_state *crtc_state)
1257 {
1258 	struct intel_display *display = to_intel_display(intel_dp);
1259 	struct drm_i915_private *dev_priv = to_i915(display->drm);
1260 	const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
1261 	const int crtc_hdisplay = crtc_state->hw.adjusted_mode.crtc_hdisplay;
1262 	const int crtc_vdisplay = crtc_state->hw.adjusted_mode.crtc_vdisplay;
1263 	u16 y_granularity = 0;
1264 
1265 	/* PSR2 HW only send full lines so we only need to validate the width */
1266 	if (crtc_hdisplay % intel_dp->psr.su_w_granularity)
1267 		return false;
1268 
1269 	if (crtc_vdisplay % intel_dp->psr.su_y_granularity)
1270 		return false;
1271 
1272 	/* HW tracking is only aligned to 4 lines */
1273 	if (!crtc_state->enable_psr2_sel_fetch)
1274 		return intel_dp->psr.su_y_granularity == 4;
1275 
1276 	/*
1277 	 * adl_p and mtl platforms have 1 line granularity.
1278 	 * For other platforms with SW tracking we can adjust the y coordinates
1279 	 * to match sink requirement if multiple of 4.
1280 	 */
1281 	if (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14)
1282 		y_granularity = intel_dp->psr.su_y_granularity;
1283 	else if (intel_dp->psr.su_y_granularity <= 2)
1284 		y_granularity = 4;
1285 	else if ((intel_dp->psr.su_y_granularity % 4) == 0)
1286 		y_granularity = intel_dp->psr.su_y_granularity;
1287 
1288 	if (y_granularity == 0 || crtc_vdisplay % y_granularity)
1289 		return false;
1290 
1291 	if (crtc_state->dsc.compression_enable &&
1292 	    vdsc_cfg->slice_height % y_granularity)
1293 		return false;
1294 
1295 	crtc_state->su_y_granularity = y_granularity;
1296 	return true;
1297 }
1298 
_compute_psr2_sdp_prior_scanline_indication(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1299 static bool _compute_psr2_sdp_prior_scanline_indication(struct intel_dp *intel_dp,
1300 							struct intel_crtc_state *crtc_state)
1301 {
1302 	struct intel_display *display = to_intel_display(intel_dp);
1303 	const struct drm_display_mode *adjusted_mode = &crtc_state->uapi.adjusted_mode;
1304 	u32 hblank_total, hblank_ns, req_ns;
1305 
1306 	hblank_total = adjusted_mode->crtc_hblank_end - adjusted_mode->crtc_hblank_start;
1307 	hblank_ns = div_u64(1000000ULL * hblank_total, adjusted_mode->crtc_clock);
1308 
1309 	/* From spec: ((60 / number of lanes) + 11) * 1000 / symbol clock frequency MHz */
1310 	req_ns = ((60 / crtc_state->lane_count) + 11) * 1000 / (crtc_state->port_clock / 1000);
1311 
1312 	if ((hblank_ns - req_ns) > 100)
1313 		return true;
1314 
1315 	/* Not supported <13 / Wa_22012279113:adl-p */
1316 	if (DISPLAY_VER(display) < 14 || intel_dp->edp_dpcd[0] < DP_EDP_14b)
1317 		return false;
1318 
1319 	crtc_state->req_psr2_sdp_prior_scanline = true;
1320 	return true;
1321 }
1322 
intel_psr_entry_setup_frames(struct intel_dp * intel_dp,const struct drm_display_mode * adjusted_mode)1323 static int intel_psr_entry_setup_frames(struct intel_dp *intel_dp,
1324 					const struct drm_display_mode *adjusted_mode)
1325 {
1326 	struct intel_display *display = to_intel_display(intel_dp);
1327 	int psr_setup_time = drm_dp_psr_setup_time(intel_dp->psr_dpcd);
1328 	int entry_setup_frames = 0;
1329 
1330 	if (psr_setup_time < 0) {
1331 		drm_dbg_kms(display->drm,
1332 			    "PSR condition failed: Invalid PSR setup time (0x%02x)\n",
1333 			    intel_dp->psr_dpcd[1]);
1334 		return -ETIME;
1335 	}
1336 
1337 	if (intel_usecs_to_scanlines(adjusted_mode, psr_setup_time) >
1338 	    adjusted_mode->crtc_vtotal - adjusted_mode->crtc_vdisplay - 1) {
1339 		if (DISPLAY_VER(display) >= 20) {
1340 			/* setup entry frames can be up to 3 frames */
1341 			entry_setup_frames = 1;
1342 			drm_dbg_kms(display->drm,
1343 				    "PSR setup entry frames %d\n",
1344 				    entry_setup_frames);
1345 		} else {
1346 			drm_dbg_kms(display->drm,
1347 				    "PSR condition failed: PSR setup time (%d us) too long\n",
1348 				    psr_setup_time);
1349 			return -ETIME;
1350 		}
1351 	}
1352 
1353 	return entry_setup_frames;
1354 }
1355 
wake_lines_fit_into_vblank(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state,bool aux_less)1356 static bool wake_lines_fit_into_vblank(struct intel_dp *intel_dp,
1357 				       const struct intel_crtc_state *crtc_state,
1358 				       bool aux_less)
1359 {
1360 	struct intel_display *display = to_intel_display(intel_dp);
1361 	int vblank = crtc_state->hw.adjusted_mode.crtc_vblank_end -
1362 		crtc_state->hw.adjusted_mode.crtc_vblank_start;
1363 	int wake_lines;
1364 
1365 	if (aux_less)
1366 		wake_lines = intel_dp->alpm_parameters.aux_less_wake_lines;
1367 	else
1368 		wake_lines = DISPLAY_VER(display) < 20 ?
1369 			psr2_block_count_lines(intel_dp) :
1370 			intel_dp->alpm_parameters.io_wake_lines;
1371 
1372 	if (crtc_state->req_psr2_sdp_prior_scanline)
1373 		vblank -= 1;
1374 
1375 	/* Vblank >= PSR2_CTL Block Count Number maximum line count */
1376 	if (vblank < wake_lines)
1377 		return false;
1378 
1379 	return true;
1380 }
1381 
alpm_config_valid(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state,bool aux_less)1382 static bool alpm_config_valid(struct intel_dp *intel_dp,
1383 			      const struct intel_crtc_state *crtc_state,
1384 			      bool aux_less)
1385 {
1386 	struct intel_display *display = to_intel_display(intel_dp);
1387 
1388 	if (!intel_alpm_compute_params(intel_dp, crtc_state)) {
1389 		drm_dbg_kms(display->drm,
1390 			    "PSR2/Panel Replay  not enabled, Unable to use long enough wake times\n");
1391 		return false;
1392 	}
1393 
1394 	if (!wake_lines_fit_into_vblank(intel_dp, crtc_state, aux_less)) {
1395 		drm_dbg_kms(display->drm,
1396 			    "PSR2/Panel Replay not enabled, too short vblank time\n");
1397 		return false;
1398 	}
1399 
1400 	return true;
1401 }
1402 
intel_psr2_config_valid(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1403 static bool intel_psr2_config_valid(struct intel_dp *intel_dp,
1404 				    struct intel_crtc_state *crtc_state)
1405 {
1406 	struct intel_display *display = to_intel_display(intel_dp);
1407 	struct drm_i915_private *dev_priv = to_i915(display->drm);
1408 	int crtc_hdisplay = crtc_state->hw.adjusted_mode.crtc_hdisplay;
1409 	int crtc_vdisplay = crtc_state->hw.adjusted_mode.crtc_vdisplay;
1410 	int psr_max_h = 0, psr_max_v = 0, max_bpp = 0;
1411 
1412 	if (!intel_dp->psr.sink_psr2_support)
1413 		return false;
1414 
1415 	/* JSL and EHL only supports eDP 1.3 */
1416 	if (IS_JASPERLAKE(dev_priv) || IS_ELKHARTLAKE(dev_priv)) {
1417 		drm_dbg_kms(display->drm, "PSR2 not supported by phy\n");
1418 		return false;
1419 	}
1420 
1421 	/* Wa_16011181250 */
1422 	if (IS_ROCKETLAKE(dev_priv) || IS_ALDERLAKE_S(dev_priv) ||
1423 	    IS_DG2(dev_priv)) {
1424 		drm_dbg_kms(display->drm,
1425 			    "PSR2 is defeatured for this platform\n");
1426 		return false;
1427 	}
1428 
1429 	if (IS_ALDERLAKE_P(dev_priv) && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) {
1430 		drm_dbg_kms(display->drm,
1431 			    "PSR2 not completely functional in this stepping\n");
1432 		return false;
1433 	}
1434 
1435 	if (!transcoder_has_psr2(display, crtc_state->cpu_transcoder)) {
1436 		drm_dbg_kms(display->drm,
1437 			    "PSR2 not supported in transcoder %s\n",
1438 			    transcoder_name(crtc_state->cpu_transcoder));
1439 		return false;
1440 	}
1441 
1442 	/*
1443 	 * DSC and PSR2 cannot be enabled simultaneously. If a requested
1444 	 * resolution requires DSC to be enabled, priority is given to DSC
1445 	 * over PSR2.
1446 	 */
1447 	if (crtc_state->dsc.compression_enable &&
1448 	    (DISPLAY_VER(display) < 14 && !IS_ALDERLAKE_P(dev_priv))) {
1449 		drm_dbg_kms(display->drm,
1450 			    "PSR2 cannot be enabled since DSC is enabled\n");
1451 		return false;
1452 	}
1453 
1454 	if (DISPLAY_VER(display) >= 20) {
1455 		psr_max_h = crtc_hdisplay;
1456 		psr_max_v = crtc_vdisplay;
1457 		max_bpp = crtc_state->pipe_bpp;
1458 	} else if (IS_DISPLAY_VER(display, 12, 14)) {
1459 		psr_max_h = 5120;
1460 		psr_max_v = 3200;
1461 		max_bpp = 30;
1462 	} else if (IS_DISPLAY_VER(display, 10, 11)) {
1463 		psr_max_h = 4096;
1464 		psr_max_v = 2304;
1465 		max_bpp = 24;
1466 	} else if (DISPLAY_VER(display) == 9) {
1467 		psr_max_h = 3640;
1468 		psr_max_v = 2304;
1469 		max_bpp = 24;
1470 	}
1471 
1472 	if (crtc_state->pipe_bpp > max_bpp) {
1473 		drm_dbg_kms(display->drm,
1474 			    "PSR2 not enabled, pipe bpp %d > max supported %d\n",
1475 			    crtc_state->pipe_bpp, max_bpp);
1476 		return false;
1477 	}
1478 
1479 	/* Wa_16011303918:adl-p */
1480 	if (crtc_state->vrr.enable &&
1481 	    IS_ALDERLAKE_P(dev_priv) && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) {
1482 		drm_dbg_kms(display->drm,
1483 			    "PSR2 not enabled, not compatible with HW stepping + VRR\n");
1484 		return false;
1485 	}
1486 
1487 	if (!alpm_config_valid(intel_dp, crtc_state, false))
1488 		return false;
1489 
1490 	if (!crtc_state->enable_psr2_sel_fetch &&
1491 	    (crtc_hdisplay > psr_max_h || crtc_vdisplay > psr_max_v)) {
1492 		drm_dbg_kms(display->drm,
1493 			    "PSR2 not enabled, resolution %dx%d > max supported %dx%d\n",
1494 			    crtc_hdisplay, crtc_vdisplay,
1495 			    psr_max_h, psr_max_v);
1496 		return false;
1497 	}
1498 
1499 	tgl_dc3co_exitline_compute_config(intel_dp, crtc_state);
1500 
1501 	return true;
1502 }
1503 
intel_sel_update_config_valid(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1504 static bool intel_sel_update_config_valid(struct intel_dp *intel_dp,
1505 					  struct intel_crtc_state *crtc_state)
1506 {
1507 	struct intel_display *display = to_intel_display(intel_dp);
1508 
1509 	if (HAS_PSR2_SEL_FETCH(display) &&
1510 	    !intel_psr2_sel_fetch_config_valid(intel_dp, crtc_state) &&
1511 	    !HAS_PSR_HW_TRACKING(display)) {
1512 		drm_dbg_kms(display->drm,
1513 			    "Selective update not enabled, selective fetch not valid and no HW tracking available\n");
1514 		goto unsupported;
1515 	}
1516 
1517 	if (!psr2_global_enabled(intel_dp)) {
1518 		drm_dbg_kms(display->drm,
1519 			    "Selective update disabled by flag\n");
1520 		goto unsupported;
1521 	}
1522 
1523 	if (!crtc_state->has_panel_replay && !intel_psr2_config_valid(intel_dp, crtc_state))
1524 		goto unsupported;
1525 
1526 	if (!_compute_psr2_sdp_prior_scanline_indication(intel_dp, crtc_state)) {
1527 		drm_dbg_kms(display->drm,
1528 			    "Selective update not enabled, SDP indication do not fit in hblank\n");
1529 		goto unsupported;
1530 	}
1531 
1532 	if (crtc_state->has_panel_replay && (DISPLAY_VER(display) < 14 ||
1533 					     !intel_dp->psr.sink_panel_replay_su_support))
1534 		goto unsupported;
1535 
1536 	if (crtc_state->crc_enabled) {
1537 		drm_dbg_kms(display->drm,
1538 			    "Selective update not enabled because it would inhibit pipe CRC calculation\n");
1539 		goto unsupported;
1540 	}
1541 
1542 	if (!psr2_granularity_check(intel_dp, crtc_state)) {
1543 		drm_dbg_kms(display->drm,
1544 			    "Selective update not enabled, SU granularity not compatible\n");
1545 		goto unsupported;
1546 	}
1547 
1548 	crtc_state->enable_psr2_su_region_et =
1549 		psr2_su_region_et_valid(intel_dp, crtc_state->has_panel_replay);
1550 
1551 	return true;
1552 
1553 unsupported:
1554 	crtc_state->enable_psr2_sel_fetch = false;
1555 	return false;
1556 }
1557 
_psr_compute_config(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1558 static bool _psr_compute_config(struct intel_dp *intel_dp,
1559 				struct intel_crtc_state *crtc_state)
1560 {
1561 	struct intel_display *display = to_intel_display(intel_dp);
1562 	const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
1563 	int entry_setup_frames;
1564 
1565 	if (!CAN_PSR(intel_dp))
1566 		return false;
1567 
1568 	entry_setup_frames = intel_psr_entry_setup_frames(intel_dp, adjusted_mode);
1569 
1570 	if (entry_setup_frames >= 0) {
1571 		intel_dp->psr.entry_setup_frames = entry_setup_frames;
1572 	} else {
1573 		drm_dbg_kms(display->drm,
1574 			    "PSR condition failed: PSR setup timing not met\n");
1575 		return false;
1576 	}
1577 
1578 	return true;
1579 }
1580 
1581 static bool
_panel_replay_compute_config(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state,const struct drm_connector_state * conn_state)1582 _panel_replay_compute_config(struct intel_dp *intel_dp,
1583 			     const struct intel_crtc_state *crtc_state,
1584 			     const struct drm_connector_state *conn_state)
1585 {
1586 	struct intel_display *display = to_intel_display(intel_dp);
1587 	struct intel_connector *connector =
1588 		to_intel_connector(conn_state->connector);
1589 	struct intel_hdcp *hdcp = &connector->hdcp;
1590 
1591 	if (!CAN_PANEL_REPLAY(intel_dp))
1592 		return false;
1593 
1594 	if (!panel_replay_global_enabled(intel_dp)) {
1595 		drm_dbg_kms(display->drm, "Panel Replay disabled by flag\n");
1596 		return false;
1597 	}
1598 
1599 	if (!intel_dp_is_edp(intel_dp))
1600 		return true;
1601 
1602 	/* Remaining checks are for eDP only */
1603 
1604 	if (to_intel_crtc(crtc_state->uapi.crtc)->pipe != PIPE_A &&
1605 	    to_intel_crtc(crtc_state->uapi.crtc)->pipe != PIPE_B)
1606 		return false;
1607 
1608 	/* 128b/132b Panel Replay is not supported on eDP */
1609 	if (intel_dp_is_uhbr(crtc_state)) {
1610 		drm_dbg_kms(display->drm,
1611 			    "Panel Replay is not supported with 128b/132b\n");
1612 		return false;
1613 	}
1614 
1615 	/* HW will not allow Panel Replay on eDP when HDCP enabled */
1616 	if (conn_state->content_protection ==
1617 	    DRM_MODE_CONTENT_PROTECTION_DESIRED ||
1618 	    (conn_state->content_protection ==
1619 	     DRM_MODE_CONTENT_PROTECTION_ENABLED && hdcp->value ==
1620 	     DRM_MODE_CONTENT_PROTECTION_UNDESIRED)) {
1621 		drm_dbg_kms(display->drm,
1622 			    "Panel Replay is not supported with HDCP\n");
1623 		return false;
1624 	}
1625 
1626 	if (!alpm_config_valid(intel_dp, crtc_state, true))
1627 		return false;
1628 
1629 	if (crtc_state->crc_enabled) {
1630 		drm_dbg_kms(display->drm,
1631 			    "Panel Replay not enabled because it would inhibit pipe CRC calculation\n");
1632 		return false;
1633 	}
1634 
1635 	return true;
1636 }
1637 
intel_psr_needs_wa_18037818876(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1638 static bool intel_psr_needs_wa_18037818876(struct intel_dp *intel_dp,
1639 					   struct intel_crtc_state *crtc_state)
1640 {
1641 	struct intel_display *display = to_intel_display(intel_dp);
1642 
1643 	return (DISPLAY_VER(display) == 20 && intel_dp->psr.entry_setup_frames > 0 &&
1644 		!crtc_state->has_sel_update);
1645 }
1646 
intel_psr_compute_config(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state,struct drm_connector_state * conn_state)1647 void intel_psr_compute_config(struct intel_dp *intel_dp,
1648 			      struct intel_crtc_state *crtc_state,
1649 			      struct drm_connector_state *conn_state)
1650 {
1651 	struct intel_display *display = to_intel_display(intel_dp);
1652 	const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
1653 
1654 	if (!psr_global_enabled(intel_dp)) {
1655 		drm_dbg_kms(display->drm, "PSR disabled by flag\n");
1656 		return;
1657 	}
1658 
1659 	if (intel_dp->psr.sink_not_reliable) {
1660 		drm_dbg_kms(display->drm,
1661 			    "PSR sink implementation is not reliable\n");
1662 		return;
1663 	}
1664 
1665 	if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) {
1666 		drm_dbg_kms(display->drm,
1667 			    "PSR condition failed: Interlaced mode enabled\n");
1668 		return;
1669 	}
1670 
1671 	/*
1672 	 * FIXME figure out what is wrong with PSR+joiner and
1673 	 * fix it. Presumably something related to the fact that
1674 	 * PSR is a transcoder level feature.
1675 	 */
1676 	if (crtc_state->joiner_pipes) {
1677 		drm_dbg_kms(display->drm,
1678 			    "PSR disabled due to joiner\n");
1679 		return;
1680 	}
1681 
1682 	/*
1683 	 * Currently PSR/PR doesn't work reliably with VRR enabled.
1684 	 */
1685 	if (crtc_state->vrr.enable)
1686 		return;
1687 
1688 	crtc_state->has_panel_replay = _panel_replay_compute_config(intel_dp,
1689 								    crtc_state,
1690 								    conn_state);
1691 
1692 	crtc_state->has_psr = crtc_state->has_panel_replay ? true :
1693 		_psr_compute_config(intel_dp, crtc_state);
1694 
1695 	if (!crtc_state->has_psr)
1696 		return;
1697 
1698 	crtc_state->has_sel_update = intel_sel_update_config_valid(intel_dp, crtc_state);
1699 
1700 	/* Wa_18037818876 */
1701 	if (intel_psr_needs_wa_18037818876(intel_dp, crtc_state)) {
1702 		crtc_state->has_psr = false;
1703 		drm_dbg_kms(display->drm,
1704 			    "PSR disabled to workaround PSR FSM hang issue\n");
1705 	}
1706 }
1707 
intel_psr_get_config(struct intel_encoder * encoder,struct intel_crtc_state * pipe_config)1708 void intel_psr_get_config(struct intel_encoder *encoder,
1709 			  struct intel_crtc_state *pipe_config)
1710 {
1711 	struct intel_display *display = to_intel_display(encoder);
1712 	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
1713 	enum transcoder cpu_transcoder = pipe_config->cpu_transcoder;
1714 	struct intel_dp *intel_dp;
1715 	u32 val;
1716 
1717 	if (!dig_port)
1718 		return;
1719 
1720 	intel_dp = &dig_port->dp;
1721 	if (!(CAN_PSR(intel_dp) || CAN_PANEL_REPLAY(intel_dp)))
1722 		return;
1723 
1724 	mutex_lock(&intel_dp->psr.lock);
1725 	if (!intel_dp->psr.enabled)
1726 		goto unlock;
1727 
1728 	if (intel_dp->psr.panel_replay_enabled) {
1729 		pipe_config->has_psr = pipe_config->has_panel_replay = true;
1730 	} else {
1731 		/*
1732 		 * Not possible to read EDP_PSR/PSR2_CTL registers as it is
1733 		 * enabled/disabled because of frontbuffer tracking and others.
1734 		 */
1735 		pipe_config->has_psr = true;
1736 	}
1737 
1738 	pipe_config->has_sel_update = intel_dp->psr.sel_update_enabled;
1739 	pipe_config->infoframes.enable |= intel_hdmi_infoframe_enable(DP_SDP_VSC);
1740 
1741 	if (!intel_dp->psr.sel_update_enabled)
1742 		goto unlock;
1743 
1744 	if (HAS_PSR2_SEL_FETCH(display)) {
1745 		val = intel_de_read(display,
1746 				    PSR2_MAN_TRK_CTL(display, cpu_transcoder));
1747 		if (val & PSR2_MAN_TRK_CTL_ENABLE)
1748 			pipe_config->enable_psr2_sel_fetch = true;
1749 	}
1750 
1751 	pipe_config->enable_psr2_su_region_et = intel_dp->psr.su_region_et_enabled;
1752 
1753 	if (DISPLAY_VER(display) >= 12) {
1754 		val = intel_de_read(display,
1755 				    TRANS_EXITLINE(display, cpu_transcoder));
1756 		pipe_config->dc3co_exitline = REG_FIELD_GET(EXITLINE_MASK, val);
1757 	}
1758 unlock:
1759 	mutex_unlock(&intel_dp->psr.lock);
1760 }
1761 
intel_psr_activate(struct intel_dp * intel_dp)1762 static void intel_psr_activate(struct intel_dp *intel_dp)
1763 {
1764 	struct intel_display *display = to_intel_display(intel_dp);
1765 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1766 
1767 	drm_WARN_ON(display->drm,
1768 		    transcoder_has_psr2(display, cpu_transcoder) &&
1769 		    intel_de_read(display, EDP_PSR2_CTL(display, cpu_transcoder)) & EDP_PSR2_ENABLE);
1770 
1771 	drm_WARN_ON(display->drm,
1772 		    intel_de_read(display, psr_ctl_reg(display, cpu_transcoder)) & EDP_PSR_ENABLE);
1773 
1774 	drm_WARN_ON(display->drm, intel_dp->psr.active);
1775 
1776 	lockdep_assert_held(&intel_dp->psr.lock);
1777 
1778 	/* psr1, psr2 and panel-replay are mutually exclusive.*/
1779 	if (intel_dp->psr.panel_replay_enabled)
1780 		dg2_activate_panel_replay(intel_dp);
1781 	else if (intel_dp->psr.sel_update_enabled)
1782 		hsw_activate_psr2(intel_dp);
1783 	else
1784 		hsw_activate_psr1(intel_dp);
1785 
1786 	intel_dp->psr.active = true;
1787 }
1788 
1789 /*
1790  * Wa_16013835468
1791  * Wa_14015648006
1792  */
wm_optimization_wa(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)1793 static void wm_optimization_wa(struct intel_dp *intel_dp,
1794 			       const struct intel_crtc_state *crtc_state)
1795 {
1796 	struct intel_display *display = to_intel_display(intel_dp);
1797 	enum pipe pipe = intel_dp->psr.pipe;
1798 	bool activate = false;
1799 
1800 	/* Wa_14015648006 */
1801 	if (IS_DISPLAY_VER(display, 11, 14) && crtc_state->wm_level_disabled)
1802 		activate = true;
1803 
1804 	/* Wa_16013835468 */
1805 	if (DISPLAY_VER(display) == 12 &&
1806 	    crtc_state->hw.adjusted_mode.crtc_vblank_start !=
1807 	    crtc_state->hw.adjusted_mode.crtc_vdisplay)
1808 		activate = true;
1809 
1810 	if (activate)
1811 		intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
1812 			     0, LATENCY_REPORTING_REMOVED(pipe));
1813 	else
1814 		intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
1815 			     LATENCY_REPORTING_REMOVED(pipe), 0);
1816 }
1817 
intel_psr_enable_source(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)1818 static void intel_psr_enable_source(struct intel_dp *intel_dp,
1819 				    const struct intel_crtc_state *crtc_state)
1820 {
1821 	struct intel_display *display = to_intel_display(intel_dp);
1822 	struct drm_i915_private *dev_priv = to_i915(display->drm);
1823 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1824 	u32 mask = 0;
1825 
1826 	/*
1827 	 * Only HSW and BDW have PSR AUX registers that need to be setup.
1828 	 * SKL+ use hardcoded values PSR AUX transactions
1829 	 */
1830 	if (DISPLAY_VER(display) < 9)
1831 		hsw_psr_setup_aux(intel_dp);
1832 
1833 	/*
1834 	 * Per Spec: Avoid continuous PSR exit by masking MEMUP and HPD also
1835 	 * mask LPSP to avoid dependency on other drivers that might block
1836 	 * runtime_pm besides preventing  other hw tracking issues now we
1837 	 * can rely on frontbuffer tracking.
1838 	 *
1839 	 * From bspec prior LunarLake:
1840 	 * Only PSR_MASK[Mask FBC modify] and PSR_MASK[Mask Hotplug] are used in
1841 	 * panel replay mode.
1842 	 *
1843 	 * From bspec beyod LunarLake:
1844 	 * Panel Replay on DP: No bits are applicable
1845 	 * Panel Replay on eDP: All bits are applicable
1846 	 */
1847 	if (DISPLAY_VER(display) < 20 || intel_dp_is_edp(intel_dp))
1848 		mask = EDP_PSR_DEBUG_MASK_HPD;
1849 
1850 	if (intel_dp_is_edp(intel_dp)) {
1851 		mask |= EDP_PSR_DEBUG_MASK_MEMUP;
1852 
1853 		/*
1854 		 * For some unknown reason on HSW non-ULT (or at least on
1855 		 * Dell Latitude E6540) external displays start to flicker
1856 		 * when PSR is enabled on the eDP. SR/PC6 residency is much
1857 		 * higher than should be possible with an external display.
1858 		 * As a workaround leave LPSP unmasked to prevent PSR entry
1859 		 * when external displays are active.
1860 		 */
1861 		if (DISPLAY_VER(display) >= 8 || IS_HASWELL_ULT(dev_priv))
1862 			mask |= EDP_PSR_DEBUG_MASK_LPSP;
1863 
1864 		if (DISPLAY_VER(display) < 20)
1865 			mask |= EDP_PSR_DEBUG_MASK_MAX_SLEEP;
1866 
1867 		/*
1868 		 * No separate pipe reg write mask on hsw/bdw, so have to unmask all
1869 		 * registers in order to keep the CURSURFLIVE tricks working :(
1870 		 */
1871 		if (IS_DISPLAY_VER(display, 9, 10))
1872 			mask |= EDP_PSR_DEBUG_MASK_DISP_REG_WRITE;
1873 
1874 		/* allow PSR with sprite enabled */
1875 		if (IS_HASWELL(dev_priv))
1876 			mask |= EDP_PSR_DEBUG_MASK_SPRITE_ENABLE;
1877 	}
1878 
1879 	intel_de_write(display, psr_debug_reg(display, cpu_transcoder), mask);
1880 
1881 	psr_irq_control(intel_dp);
1882 
1883 	/*
1884 	 * TODO: if future platforms supports DC3CO in more than one
1885 	 * transcoder, EXITLINE will need to be unset when disabling PSR
1886 	 */
1887 	if (intel_dp->psr.dc3co_exitline)
1888 		intel_de_rmw(display,
1889 			     TRANS_EXITLINE(display, cpu_transcoder),
1890 			     EXITLINE_MASK,
1891 			     intel_dp->psr.dc3co_exitline << EXITLINE_SHIFT | EXITLINE_ENABLE);
1892 
1893 	if (HAS_PSR_HW_TRACKING(display) && HAS_PSR2_SEL_FETCH(display))
1894 		intel_de_rmw(display, CHICKEN_PAR1_1, IGNORE_PSR2_HW_TRACKING,
1895 			     intel_dp->psr.psr2_sel_fetch_enabled ?
1896 			     IGNORE_PSR2_HW_TRACKING : 0);
1897 
1898 	if (intel_dp_is_edp(intel_dp))
1899 		intel_alpm_configure(intel_dp, crtc_state);
1900 
1901 	/*
1902 	 * Wa_16013835468
1903 	 * Wa_14015648006
1904 	 */
1905 	wm_optimization_wa(intel_dp, crtc_state);
1906 
1907 	if (intel_dp->psr.sel_update_enabled) {
1908 		if (DISPLAY_VER(display) == 9)
1909 			intel_de_rmw(display, CHICKEN_TRANS(display, cpu_transcoder), 0,
1910 				     PSR2_VSC_ENABLE_PROG_HEADER |
1911 				     PSR2_ADD_VERTICAL_LINE_COUNT);
1912 
1913 		/*
1914 		 * Wa_16014451276:adlp,mtl[a0,b0]
1915 		 * All supported adlp panels have 1-based X granularity, this may
1916 		 * cause issues if non-supported panels are used.
1917 		 */
1918 		if (!intel_dp->psr.panel_replay_enabled &&
1919 		    (IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0) ||
1920 		     IS_ALDERLAKE_P(dev_priv)))
1921 			intel_de_rmw(display, CHICKEN_TRANS(display, cpu_transcoder),
1922 				     0, ADLP_1_BASED_X_GRANULARITY);
1923 
1924 		/* Wa_16012604467:adlp,mtl[a0,b0] */
1925 		if (!intel_dp->psr.panel_replay_enabled &&
1926 		    IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0))
1927 			intel_de_rmw(display,
1928 				     MTL_CLKGATE_DIS_TRANS(display, cpu_transcoder),
1929 				     0,
1930 				     MTL_CLKGATE_DIS_TRANS_DMASC_GATING_DIS);
1931 		else if (IS_ALDERLAKE_P(dev_priv))
1932 			intel_de_rmw(display, CLKGATE_DIS_MISC, 0,
1933 				     CLKGATE_DIS_MISC_DMASC_GATING_DIS);
1934 	}
1935 }
1936 
psr_interrupt_error_check(struct intel_dp * intel_dp)1937 static bool psr_interrupt_error_check(struct intel_dp *intel_dp)
1938 {
1939 	struct intel_display *display = to_intel_display(intel_dp);
1940 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1941 	u32 val;
1942 
1943 	if (intel_dp->psr.panel_replay_enabled)
1944 		goto no_err;
1945 
1946 	/*
1947 	 * If a PSR error happened and the driver is reloaded, the EDP_PSR_IIR
1948 	 * will still keep the error set even after the reset done in the
1949 	 * irq_preinstall and irq_uninstall hooks.
1950 	 * And enabling in this situation cause the screen to freeze in the
1951 	 * first time that PSR HW tries to activate so lets keep PSR disabled
1952 	 * to avoid any rendering problems.
1953 	 */
1954 	val = intel_de_read(display, psr_iir_reg(display, cpu_transcoder));
1955 	val &= psr_irq_psr_error_bit_get(intel_dp);
1956 	if (val) {
1957 		intel_dp->psr.sink_not_reliable = true;
1958 		drm_dbg_kms(display->drm,
1959 			    "PSR interruption error set, not enabling PSR\n");
1960 		return false;
1961 	}
1962 
1963 no_err:
1964 	return true;
1965 }
1966 
intel_psr_enable_locked(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)1967 static void intel_psr_enable_locked(struct intel_dp *intel_dp,
1968 				    const struct intel_crtc_state *crtc_state)
1969 {
1970 	struct intel_display *display = to_intel_display(intel_dp);
1971 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1972 	u32 val;
1973 
1974 	drm_WARN_ON(display->drm, intel_dp->psr.enabled);
1975 
1976 	intel_dp->psr.sel_update_enabled = crtc_state->has_sel_update;
1977 	intel_dp->psr.panel_replay_enabled = crtc_state->has_panel_replay;
1978 	intel_dp->psr.busy_frontbuffer_bits = 0;
1979 	intel_dp->psr.pipe = to_intel_crtc(crtc_state->uapi.crtc)->pipe;
1980 	intel_dp->psr.transcoder = crtc_state->cpu_transcoder;
1981 	/* DC5/DC6 requires at least 6 idle frames */
1982 	val = usecs_to_jiffies(intel_get_frame_time_us(crtc_state) * 6);
1983 	intel_dp->psr.dc3co_exit_delay = val;
1984 	intel_dp->psr.dc3co_exitline = crtc_state->dc3co_exitline;
1985 	intel_dp->psr.psr2_sel_fetch_enabled = crtc_state->enable_psr2_sel_fetch;
1986 	intel_dp->psr.su_region_et_enabled = crtc_state->enable_psr2_su_region_et;
1987 	intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
1988 	intel_dp->psr.req_psr2_sdp_prior_scanline =
1989 		crtc_state->req_psr2_sdp_prior_scanline;
1990 
1991 	if (!psr_interrupt_error_check(intel_dp))
1992 		return;
1993 
1994 	if (intel_dp->psr.panel_replay_enabled) {
1995 		drm_dbg_kms(display->drm, "Enabling Panel Replay\n");
1996 	} else {
1997 		drm_dbg_kms(display->drm, "Enabling PSR%s\n",
1998 			    intel_dp->psr.sel_update_enabled ? "2" : "1");
1999 
2000 		/*
2001 		 * Panel replay has to be enabled before link training: doing it
2002 		 * only for PSR here.
2003 		 */
2004 		intel_psr_enable_sink(intel_dp, crtc_state);
2005 	}
2006 
2007 	if (intel_dp_is_edp(intel_dp))
2008 		intel_snps_phy_update_psr_power_state(&dig_port->base, true);
2009 
2010 	intel_psr_enable_source(intel_dp, crtc_state);
2011 	intel_dp->psr.enabled = true;
2012 	intel_dp->psr.paused = false;
2013 
2014 	/*
2015 	 * Link_ok is sticky and set here on PSR enable. We can assume link
2016 	 * training is complete as we never continue to PSR enable with
2017 	 * untrained link. Link_ok is kept as set until first short pulse
2018 	 * interrupt. This is targeted to workaround panels stating bad link
2019 	 * after PSR is enabled.
2020 	 */
2021 	intel_dp->psr.link_ok = true;
2022 
2023 	intel_psr_activate(intel_dp);
2024 }
2025 
intel_psr_exit(struct intel_dp * intel_dp)2026 static void intel_psr_exit(struct intel_dp *intel_dp)
2027 {
2028 	struct intel_display *display = to_intel_display(intel_dp);
2029 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2030 	u32 val;
2031 
2032 	if (!intel_dp->psr.active) {
2033 		if (transcoder_has_psr2(display, cpu_transcoder)) {
2034 			val = intel_de_read(display,
2035 					    EDP_PSR2_CTL(display, cpu_transcoder));
2036 			drm_WARN_ON(display->drm, val & EDP_PSR2_ENABLE);
2037 		}
2038 
2039 		val = intel_de_read(display,
2040 				    psr_ctl_reg(display, cpu_transcoder));
2041 		drm_WARN_ON(display->drm, val & EDP_PSR_ENABLE);
2042 
2043 		return;
2044 	}
2045 
2046 	if (intel_dp->psr.panel_replay_enabled) {
2047 		intel_de_rmw(display, TRANS_DP2_CTL(intel_dp->psr.transcoder),
2048 			     TRANS_DP2_PANEL_REPLAY_ENABLE, 0);
2049 	} else if (intel_dp->psr.sel_update_enabled) {
2050 		tgl_disallow_dc3co_on_psr2_exit(intel_dp);
2051 
2052 		val = intel_de_rmw(display,
2053 				   EDP_PSR2_CTL(display, cpu_transcoder),
2054 				   EDP_PSR2_ENABLE, 0);
2055 
2056 		drm_WARN_ON(display->drm, !(val & EDP_PSR2_ENABLE));
2057 	} else {
2058 		val = intel_de_rmw(display,
2059 				   psr_ctl_reg(display, cpu_transcoder),
2060 				   EDP_PSR_ENABLE, 0);
2061 
2062 		drm_WARN_ON(display->drm, !(val & EDP_PSR_ENABLE));
2063 	}
2064 	intel_dp->psr.active = false;
2065 }
2066 
intel_psr_wait_exit_locked(struct intel_dp * intel_dp)2067 static void intel_psr_wait_exit_locked(struct intel_dp *intel_dp)
2068 {
2069 	struct intel_display *display = to_intel_display(intel_dp);
2070 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2071 	i915_reg_t psr_status;
2072 	u32 psr_status_mask;
2073 
2074 	if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled ||
2075 					  intel_dp->psr.panel_replay_enabled)) {
2076 		psr_status = EDP_PSR2_STATUS(display, cpu_transcoder);
2077 		psr_status_mask = EDP_PSR2_STATUS_STATE_MASK;
2078 	} else {
2079 		psr_status = psr_status_reg(display, cpu_transcoder);
2080 		psr_status_mask = EDP_PSR_STATUS_STATE_MASK;
2081 	}
2082 
2083 	/* Wait till PSR is idle */
2084 	if (intel_de_wait_for_clear(display, psr_status,
2085 				    psr_status_mask, 2000))
2086 		drm_err(display->drm, "Timed out waiting PSR idle state\n");
2087 }
2088 
intel_psr_disable_locked(struct intel_dp * intel_dp)2089 static void intel_psr_disable_locked(struct intel_dp *intel_dp)
2090 {
2091 	struct intel_display *display = to_intel_display(intel_dp);
2092 	struct drm_i915_private *dev_priv = to_i915(display->drm);
2093 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2094 
2095 	lockdep_assert_held(&intel_dp->psr.lock);
2096 
2097 	if (!intel_dp->psr.enabled)
2098 		return;
2099 
2100 	if (intel_dp->psr.panel_replay_enabled)
2101 		drm_dbg_kms(display->drm, "Disabling Panel Replay\n");
2102 	else
2103 		drm_dbg_kms(display->drm, "Disabling PSR%s\n",
2104 			    intel_dp->psr.sel_update_enabled ? "2" : "1");
2105 
2106 	intel_psr_exit(intel_dp);
2107 	intel_psr_wait_exit_locked(intel_dp);
2108 
2109 	/*
2110 	 * Wa_16013835468
2111 	 * Wa_14015648006
2112 	 */
2113 	if (DISPLAY_VER(display) >= 11)
2114 		intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
2115 			     LATENCY_REPORTING_REMOVED(intel_dp->psr.pipe), 0);
2116 
2117 	if (intel_dp->psr.sel_update_enabled) {
2118 		/* Wa_16012604467:adlp,mtl[a0,b0] */
2119 		if (!intel_dp->psr.panel_replay_enabled &&
2120 		    IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0))
2121 			intel_de_rmw(display,
2122 				     MTL_CLKGATE_DIS_TRANS(display, cpu_transcoder),
2123 				     MTL_CLKGATE_DIS_TRANS_DMASC_GATING_DIS, 0);
2124 		else if (IS_ALDERLAKE_P(dev_priv))
2125 			intel_de_rmw(display, CLKGATE_DIS_MISC,
2126 				     CLKGATE_DIS_MISC_DMASC_GATING_DIS, 0);
2127 	}
2128 
2129 	if (intel_dp_is_edp(intel_dp))
2130 		intel_snps_phy_update_psr_power_state(&dp_to_dig_port(intel_dp)->base, false);
2131 
2132 	/* Panel Replay on eDP is always using ALPM aux less. */
2133 	if (intel_dp->psr.panel_replay_enabled && intel_dp_is_edp(intel_dp)) {
2134 		intel_de_rmw(display, ALPM_CTL(display, cpu_transcoder),
2135 			     ALPM_CTL_ALPM_ENABLE |
2136 			     ALPM_CTL_ALPM_AUX_LESS_ENABLE, 0);
2137 
2138 		intel_de_rmw(display,
2139 			     PORT_ALPM_CTL(cpu_transcoder),
2140 			     PORT_ALPM_CTL_ALPM_AUX_LESS_ENABLE, 0);
2141 	}
2142 
2143 	/* Disable PSR on Sink */
2144 	if (!intel_dp->psr.panel_replay_enabled) {
2145 		drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, 0);
2146 
2147 		if (intel_dp->psr.sel_update_enabled)
2148 			drm_dp_dpcd_writeb(&intel_dp->aux,
2149 					   DP_RECEIVER_ALPM_CONFIG, 0);
2150 	}
2151 
2152 	intel_dp->psr.enabled = false;
2153 	intel_dp->psr.panel_replay_enabled = false;
2154 	intel_dp->psr.sel_update_enabled = false;
2155 	intel_dp->psr.psr2_sel_fetch_enabled = false;
2156 	intel_dp->psr.su_region_et_enabled = false;
2157 	intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
2158 }
2159 
2160 /**
2161  * intel_psr_disable - Disable PSR
2162  * @intel_dp: Intel DP
2163  * @old_crtc_state: old CRTC state
2164  *
2165  * This function needs to be called before disabling pipe.
2166  */
intel_psr_disable(struct intel_dp * intel_dp,const struct intel_crtc_state * old_crtc_state)2167 void intel_psr_disable(struct intel_dp *intel_dp,
2168 		       const struct intel_crtc_state *old_crtc_state)
2169 {
2170 	struct intel_display *display = to_intel_display(intel_dp);
2171 
2172 	if (!old_crtc_state->has_psr)
2173 		return;
2174 
2175 	if (drm_WARN_ON(display->drm, !CAN_PSR(intel_dp)))
2176 		return;
2177 
2178 	mutex_lock(&intel_dp->psr.lock);
2179 
2180 	intel_psr_disable_locked(intel_dp);
2181 
2182 	intel_dp->psr.link_ok = false;
2183 
2184 	mutex_unlock(&intel_dp->psr.lock);
2185 	cancel_work_sync(&intel_dp->psr.work);
2186 	cancel_delayed_work_sync(&intel_dp->psr.dc3co_work);
2187 }
2188 
2189 /**
2190  * intel_psr_pause - Pause PSR
2191  * @intel_dp: Intel DP
2192  *
2193  * This function need to be called after enabling psr.
2194  */
intel_psr_pause(struct intel_dp * intel_dp)2195 void intel_psr_pause(struct intel_dp *intel_dp)
2196 {
2197 	struct intel_display *display = to_intel_display(intel_dp);
2198 	struct intel_psr *psr = &intel_dp->psr;
2199 
2200 	if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
2201 		return;
2202 
2203 	mutex_lock(&psr->lock);
2204 
2205 	if (!psr->enabled) {
2206 		mutex_unlock(&psr->lock);
2207 		return;
2208 	}
2209 
2210 	/* If we ever hit this, we will need to add refcount to pause/resume */
2211 	drm_WARN_ON(display->drm, psr->paused);
2212 
2213 	intel_psr_exit(intel_dp);
2214 	intel_psr_wait_exit_locked(intel_dp);
2215 	psr->paused = true;
2216 
2217 	mutex_unlock(&psr->lock);
2218 
2219 	cancel_work_sync(&psr->work);
2220 	cancel_delayed_work_sync(&psr->dc3co_work);
2221 }
2222 
2223 /**
2224  * intel_psr_resume - Resume PSR
2225  * @intel_dp: Intel DP
2226  *
2227  * This function need to be called after pausing psr.
2228  */
intel_psr_resume(struct intel_dp * intel_dp)2229 void intel_psr_resume(struct intel_dp *intel_dp)
2230 {
2231 	struct intel_psr *psr = &intel_dp->psr;
2232 
2233 	if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
2234 		return;
2235 
2236 	mutex_lock(&psr->lock);
2237 
2238 	if (!psr->paused)
2239 		goto unlock;
2240 
2241 	psr->paused = false;
2242 	intel_psr_activate(intel_dp);
2243 
2244 unlock:
2245 	mutex_unlock(&psr->lock);
2246 }
2247 
2248 /**
2249  * intel_psr_needs_block_dc_vblank - Check if block dc entry is needed
2250  * @crtc_state: CRTC status
2251  *
2252  * We need to block DC6 entry in case of Panel Replay as enabling VBI doesn't
2253  * prevent it in case of Panel Replay. Panel Replay switches main link off on
2254  * DC entry. This means vblank interrupts are not fired and is a problem if
2255  * user-space is polling for vblank events.
2256  */
intel_psr_needs_block_dc_vblank(const struct intel_crtc_state * crtc_state)2257 bool intel_psr_needs_block_dc_vblank(const struct intel_crtc_state *crtc_state)
2258 {
2259 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2260 	struct intel_encoder *encoder;
2261 
2262 	for_each_encoder_on_crtc(crtc->base.dev, &crtc->base, encoder) {
2263 		struct intel_dp *intel_dp;
2264 
2265 		if (!intel_encoder_is_dp(encoder))
2266 			continue;
2267 
2268 		intel_dp = enc_to_intel_dp(encoder);
2269 
2270 		if (intel_dp_is_edp(intel_dp) &&
2271 		    CAN_PANEL_REPLAY(intel_dp))
2272 			return true;
2273 	}
2274 
2275 	return false;
2276 }
2277 
man_trk_ctl_enable_bit_get(struct intel_display * display)2278 static u32 man_trk_ctl_enable_bit_get(struct intel_display *display)
2279 {
2280 	struct drm_i915_private *dev_priv = to_i915(display->drm);
2281 
2282 	return IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14 ? 0 :
2283 		PSR2_MAN_TRK_CTL_ENABLE;
2284 }
2285 
man_trk_ctl_single_full_frame_bit_get(struct intel_display * display)2286 static u32 man_trk_ctl_single_full_frame_bit_get(struct intel_display *display)
2287 {
2288 	struct drm_i915_private *dev_priv = to_i915(display->drm);
2289 
2290 	return IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14 ?
2291 	       ADLP_PSR2_MAN_TRK_CTL_SF_SINGLE_FULL_FRAME :
2292 	       PSR2_MAN_TRK_CTL_SF_SINGLE_FULL_FRAME;
2293 }
2294 
man_trk_ctl_partial_frame_bit_get(struct intel_display * display)2295 static u32 man_trk_ctl_partial_frame_bit_get(struct intel_display *display)
2296 {
2297 	struct drm_i915_private *dev_priv = to_i915(display->drm);
2298 
2299 	return IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14 ?
2300 	       ADLP_PSR2_MAN_TRK_CTL_SF_PARTIAL_FRAME_UPDATE :
2301 	       PSR2_MAN_TRK_CTL_SF_PARTIAL_FRAME_UPDATE;
2302 }
2303 
man_trk_ctl_continuos_full_frame(struct intel_display * display)2304 static u32 man_trk_ctl_continuos_full_frame(struct intel_display *display)
2305 {
2306 	struct drm_i915_private *dev_priv = to_i915(display->drm);
2307 
2308 	return IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14 ?
2309 	       ADLP_PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME :
2310 	       PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME;
2311 }
2312 
psr_force_hw_tracking_exit(struct intel_dp * intel_dp)2313 static void psr_force_hw_tracking_exit(struct intel_dp *intel_dp)
2314 {
2315 	struct intel_display *display = to_intel_display(intel_dp);
2316 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2317 
2318 	if (intel_dp->psr.psr2_sel_fetch_enabled)
2319 		intel_de_write(display,
2320 			       PSR2_MAN_TRK_CTL(display, cpu_transcoder),
2321 			       man_trk_ctl_enable_bit_get(display) |
2322 			       man_trk_ctl_partial_frame_bit_get(display) |
2323 			       man_trk_ctl_single_full_frame_bit_get(display) |
2324 			       man_trk_ctl_continuos_full_frame(display));
2325 
2326 	/*
2327 	 * Display WA #0884: skl+
2328 	 * This documented WA for bxt can be safely applied
2329 	 * broadly so we can force HW tracking to exit PSR
2330 	 * instead of disabling and re-enabling.
2331 	 * Workaround tells us to write 0 to CUR_SURFLIVE_A,
2332 	 * but it makes more sense write to the current active
2333 	 * pipe.
2334 	 *
2335 	 * This workaround do not exist for platforms with display 10 or newer
2336 	 * but testing proved that it works for up display 13, for newer
2337 	 * than that testing will be needed.
2338 	 */
2339 	intel_de_write(display, CURSURFLIVE(display, intel_dp->psr.pipe), 0);
2340 }
2341 
intel_psr2_program_trans_man_trk_ctl(const struct intel_crtc_state * crtc_state)2342 void intel_psr2_program_trans_man_trk_ctl(const struct intel_crtc_state *crtc_state)
2343 {
2344 	struct intel_display *display = to_intel_display(crtc_state);
2345 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2346 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
2347 	struct intel_encoder *encoder;
2348 
2349 	if (!crtc_state->enable_psr2_sel_fetch)
2350 		return;
2351 
2352 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
2353 					     crtc_state->uapi.encoder_mask) {
2354 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2355 
2356 		lockdep_assert_held(&intel_dp->psr.lock);
2357 		if (intel_dp->psr.psr2_sel_fetch_cff_enabled)
2358 			return;
2359 		break;
2360 	}
2361 
2362 	intel_de_write(display, PSR2_MAN_TRK_CTL(display, cpu_transcoder),
2363 		       crtc_state->psr2_man_track_ctl);
2364 
2365 	if (!crtc_state->enable_psr2_su_region_et)
2366 		return;
2367 
2368 	intel_de_write(display, PIPE_SRCSZ_ERLY_TPT(crtc->pipe),
2369 		       crtc_state->pipe_srcsz_early_tpt);
2370 }
2371 
psr2_man_trk_ctl_calc(struct intel_crtc_state * crtc_state,bool full_update)2372 static void psr2_man_trk_ctl_calc(struct intel_crtc_state *crtc_state,
2373 				  bool full_update)
2374 {
2375 	struct intel_display *display = to_intel_display(crtc_state);
2376 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2377 	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
2378 	u32 val = man_trk_ctl_enable_bit_get(display);
2379 
2380 	/* SF partial frame enable has to be set even on full update */
2381 	val |= man_trk_ctl_partial_frame_bit_get(display);
2382 
2383 	if (full_update) {
2384 		val |= man_trk_ctl_single_full_frame_bit_get(display);
2385 		val |= man_trk_ctl_continuos_full_frame(display);
2386 		goto exit;
2387 	}
2388 
2389 	if (crtc_state->psr2_su_area.y1 == -1)
2390 		goto exit;
2391 
2392 	if (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14) {
2393 		val |= ADLP_PSR2_MAN_TRK_CTL_SU_REGION_START_ADDR(crtc_state->psr2_su_area.y1);
2394 		val |= ADLP_PSR2_MAN_TRK_CTL_SU_REGION_END_ADDR(crtc_state->psr2_su_area.y2 - 1);
2395 	} else {
2396 		drm_WARN_ON(crtc_state->uapi.crtc->dev,
2397 			    crtc_state->psr2_su_area.y1 % 4 ||
2398 			    crtc_state->psr2_su_area.y2 % 4);
2399 
2400 		val |= PSR2_MAN_TRK_CTL_SU_REGION_START_ADDR(
2401 			crtc_state->psr2_su_area.y1 / 4 + 1);
2402 		val |= PSR2_MAN_TRK_CTL_SU_REGION_END_ADDR(
2403 			crtc_state->psr2_su_area.y2 / 4 + 1);
2404 	}
2405 exit:
2406 	crtc_state->psr2_man_track_ctl = val;
2407 }
2408 
psr2_pipe_srcsz_early_tpt_calc(struct intel_crtc_state * crtc_state,bool full_update)2409 static u32 psr2_pipe_srcsz_early_tpt_calc(struct intel_crtc_state *crtc_state,
2410 					  bool full_update)
2411 {
2412 	int width, height;
2413 
2414 	if (!crtc_state->enable_psr2_su_region_et || full_update)
2415 		return 0;
2416 
2417 	width = drm_rect_width(&crtc_state->psr2_su_area);
2418 	height = drm_rect_height(&crtc_state->psr2_su_area);
2419 
2420 	return PIPESRC_WIDTH(width - 1) | PIPESRC_HEIGHT(height - 1);
2421 }
2422 
clip_area_update(struct drm_rect * overlap_damage_area,struct drm_rect * damage_area,struct drm_rect * pipe_src)2423 static void clip_area_update(struct drm_rect *overlap_damage_area,
2424 			     struct drm_rect *damage_area,
2425 			     struct drm_rect *pipe_src)
2426 {
2427 	if (!drm_rect_intersect(damage_area, pipe_src))
2428 		return;
2429 
2430 	if (overlap_damage_area->y1 == -1) {
2431 		overlap_damage_area->y1 = damage_area->y1;
2432 		overlap_damage_area->y2 = damage_area->y2;
2433 		return;
2434 	}
2435 
2436 	if (damage_area->y1 < overlap_damage_area->y1)
2437 		overlap_damage_area->y1 = damage_area->y1;
2438 
2439 	if (damage_area->y2 > overlap_damage_area->y2)
2440 		overlap_damage_area->y2 = damage_area->y2;
2441 }
2442 
intel_psr2_sel_fetch_pipe_alignment(struct intel_crtc_state * crtc_state)2443 static void intel_psr2_sel_fetch_pipe_alignment(struct intel_crtc_state *crtc_state)
2444 {
2445 	struct intel_display *display = to_intel_display(crtc_state);
2446 	struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
2447 	const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
2448 	u16 y_alignment;
2449 
2450 	/* ADLP aligns the SU region to vdsc slice height in case dsc is enabled */
2451 	if (crtc_state->dsc.compression_enable &&
2452 	    (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14))
2453 		y_alignment = vdsc_cfg->slice_height;
2454 	else
2455 		y_alignment = crtc_state->su_y_granularity;
2456 
2457 	crtc_state->psr2_su_area.y1 -= crtc_state->psr2_su_area.y1 % y_alignment;
2458 	if (crtc_state->psr2_su_area.y2 % y_alignment)
2459 		crtc_state->psr2_su_area.y2 = ((crtc_state->psr2_su_area.y2 /
2460 						y_alignment) + 1) * y_alignment;
2461 }
2462 
2463 /*
2464  * When early transport is in use we need to extend SU area to cover
2465  * cursor fully when cursor is in SU area.
2466  */
2467 static void
intel_psr2_sel_fetch_et_alignment(struct intel_atomic_state * state,struct intel_crtc * crtc,bool * cursor_in_su_area)2468 intel_psr2_sel_fetch_et_alignment(struct intel_atomic_state *state,
2469 				  struct intel_crtc *crtc,
2470 				  bool *cursor_in_su_area)
2471 {
2472 	struct intel_crtc_state *crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
2473 	struct intel_plane_state *new_plane_state;
2474 	struct intel_plane *plane;
2475 	int i;
2476 
2477 	if (!crtc_state->enable_psr2_su_region_et)
2478 		return;
2479 
2480 	for_each_new_intel_plane_in_state(state, plane, new_plane_state, i) {
2481 		struct drm_rect inter;
2482 
2483 		if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc)
2484 			continue;
2485 
2486 		if (plane->id != PLANE_CURSOR)
2487 			continue;
2488 
2489 		if (!new_plane_state->uapi.visible)
2490 			continue;
2491 
2492 		inter = crtc_state->psr2_su_area;
2493 		if (!drm_rect_intersect(&inter, &new_plane_state->uapi.dst))
2494 			continue;
2495 
2496 		clip_area_update(&crtc_state->psr2_su_area, &new_plane_state->uapi.dst,
2497 				 &crtc_state->pipe_src);
2498 		*cursor_in_su_area = true;
2499 	}
2500 }
2501 
2502 /*
2503  * TODO: Not clear how to handle planes with negative position,
2504  * also planes are not updated if they have a negative X
2505  * position so for now doing a full update in this cases
2506  *
2507  * Plane scaling and rotation is not supported by selective fetch and both
2508  * properties can change without a modeset, so need to be check at every
2509  * atomic commit.
2510  */
psr2_sel_fetch_plane_state_supported(const struct intel_plane_state * plane_state)2511 static bool psr2_sel_fetch_plane_state_supported(const struct intel_plane_state *plane_state)
2512 {
2513 	if (plane_state->uapi.dst.y1 < 0 ||
2514 	    plane_state->uapi.dst.x1 < 0 ||
2515 	    plane_state->scaler_id >= 0 ||
2516 	    plane_state->uapi.rotation != DRM_MODE_ROTATE_0)
2517 		return false;
2518 
2519 	return true;
2520 }
2521 
2522 /*
2523  * Check for pipe properties that is not supported by selective fetch.
2524  *
2525  * TODO: pipe scaling causes a modeset but skl_update_scaler_crtc() is executed
2526  * after intel_psr_compute_config(), so for now keeping PSR2 selective fetch
2527  * enabled and going to the full update path.
2528  */
psr2_sel_fetch_pipe_state_supported(const struct intel_crtc_state * crtc_state)2529 static bool psr2_sel_fetch_pipe_state_supported(const struct intel_crtc_state *crtc_state)
2530 {
2531 	if (crtc_state->scaler_state.scaler_id >= 0)
2532 		return false;
2533 
2534 	return true;
2535 }
2536 
2537 /* Wa 14019834836 */
intel_psr_apply_pr_link_on_su_wa(struct intel_crtc_state * crtc_state)2538 static void intel_psr_apply_pr_link_on_su_wa(struct intel_crtc_state *crtc_state)
2539 {
2540 	struct intel_display *display = to_intel_display(crtc_state);
2541 	struct intel_encoder *encoder;
2542 	int hactive_limit;
2543 
2544 	if (crtc_state->psr2_su_area.y1 != 0 ||
2545 	    crtc_state->psr2_su_area.y2 != 0)
2546 		return;
2547 
2548 	if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420)
2549 		hactive_limit = intel_dp_is_uhbr(crtc_state) ? 1230 : 546;
2550 	else
2551 		hactive_limit = intel_dp_is_uhbr(crtc_state) ? 615 : 273;
2552 
2553 	if (crtc_state->hw.adjusted_mode.hdisplay < hactive_limit)
2554 		return;
2555 
2556 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
2557 					     crtc_state->uapi.encoder_mask) {
2558 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2559 
2560 		if (!intel_dp_is_edp(intel_dp) &&
2561 		    intel_dp->psr.panel_replay_enabled &&
2562 		    intel_dp->psr.sel_update_enabled) {
2563 			crtc_state->psr2_su_area.y2++;
2564 			return;
2565 		}
2566 	}
2567 }
2568 
2569 static void
intel_psr_apply_su_area_workarounds(struct intel_crtc_state * crtc_state)2570 intel_psr_apply_su_area_workarounds(struct intel_crtc_state *crtc_state)
2571 {
2572 	struct intel_display *display = to_intel_display(crtc_state);
2573 	struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev);
2574 
2575 	/* Wa_14014971492 */
2576 	if (!crtc_state->has_panel_replay &&
2577 	    ((IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0) ||
2578 	      IS_ALDERLAKE_P(i915) || IS_TIGERLAKE(i915))) &&
2579 	    crtc_state->splitter.enable)
2580 		crtc_state->psr2_su_area.y1 = 0;
2581 
2582 	/* Wa 14019834836 */
2583 	if (DISPLAY_VER(display) == 30)
2584 		intel_psr_apply_pr_link_on_su_wa(crtc_state);
2585 }
2586 
intel_psr2_sel_fetch_update(struct intel_atomic_state * state,struct intel_crtc * crtc)2587 int intel_psr2_sel_fetch_update(struct intel_atomic_state *state,
2588 				struct intel_crtc *crtc)
2589 {
2590 	struct intel_display *display = to_intel_display(state);
2591 	struct intel_crtc_state *crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
2592 	struct intel_plane_state *new_plane_state, *old_plane_state;
2593 	struct intel_plane *plane;
2594 	bool full_update = false, cursor_in_su_area = false;
2595 	int i, ret;
2596 
2597 	if (!crtc_state->enable_psr2_sel_fetch)
2598 		return 0;
2599 
2600 	if (!psr2_sel_fetch_pipe_state_supported(crtc_state)) {
2601 		full_update = true;
2602 		goto skip_sel_fetch_set_loop;
2603 	}
2604 
2605 	crtc_state->psr2_su_area.x1 = 0;
2606 	crtc_state->psr2_su_area.y1 = -1;
2607 	crtc_state->psr2_su_area.x2 = drm_rect_width(&crtc_state->pipe_src);
2608 	crtc_state->psr2_su_area.y2 = -1;
2609 
2610 	/*
2611 	 * Calculate minimal selective fetch area of each plane and calculate
2612 	 * the pipe damaged area.
2613 	 * In the next loop the plane selective fetch area will actually be set
2614 	 * using whole pipe damaged area.
2615 	 */
2616 	for_each_oldnew_intel_plane_in_state(state, plane, old_plane_state,
2617 					     new_plane_state, i) {
2618 		struct drm_rect src, damaged_area = { .x1 = 0, .y1 = -1,
2619 						      .x2 = INT_MAX };
2620 
2621 		if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc)
2622 			continue;
2623 
2624 		if (!new_plane_state->uapi.visible &&
2625 		    !old_plane_state->uapi.visible)
2626 			continue;
2627 
2628 		if (!psr2_sel_fetch_plane_state_supported(new_plane_state)) {
2629 			full_update = true;
2630 			break;
2631 		}
2632 
2633 		/*
2634 		 * If visibility or plane moved, mark the whole plane area as
2635 		 * damaged as it needs to be complete redraw in the new and old
2636 		 * position.
2637 		 */
2638 		if (new_plane_state->uapi.visible != old_plane_state->uapi.visible ||
2639 		    !drm_rect_equals(&new_plane_state->uapi.dst,
2640 				     &old_plane_state->uapi.dst)) {
2641 			if (old_plane_state->uapi.visible) {
2642 				damaged_area.y1 = old_plane_state->uapi.dst.y1;
2643 				damaged_area.y2 = old_plane_state->uapi.dst.y2;
2644 				clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2645 						 &crtc_state->pipe_src);
2646 			}
2647 
2648 			if (new_plane_state->uapi.visible) {
2649 				damaged_area.y1 = new_plane_state->uapi.dst.y1;
2650 				damaged_area.y2 = new_plane_state->uapi.dst.y2;
2651 				clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2652 						 &crtc_state->pipe_src);
2653 			}
2654 			continue;
2655 		} else if (new_plane_state->uapi.alpha != old_plane_state->uapi.alpha) {
2656 			/* If alpha changed mark the whole plane area as damaged */
2657 			damaged_area.y1 = new_plane_state->uapi.dst.y1;
2658 			damaged_area.y2 = new_plane_state->uapi.dst.y2;
2659 			clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2660 					 &crtc_state->pipe_src);
2661 			continue;
2662 		}
2663 
2664 		src = drm_plane_state_src(&new_plane_state->uapi);
2665 		drm_rect_fp_to_int(&src, &src);
2666 
2667 		if (!drm_atomic_helper_damage_merged(&old_plane_state->uapi,
2668 						     &new_plane_state->uapi, &damaged_area))
2669 			continue;
2670 
2671 		damaged_area.y1 += new_plane_state->uapi.dst.y1 - src.y1;
2672 		damaged_area.y2 += new_plane_state->uapi.dst.y1 - src.y1;
2673 		damaged_area.x1 += new_plane_state->uapi.dst.x1 - src.x1;
2674 		damaged_area.x2 += new_plane_state->uapi.dst.x1 - src.x1;
2675 
2676 		clip_area_update(&crtc_state->psr2_su_area, &damaged_area, &crtc_state->pipe_src);
2677 	}
2678 
2679 	/*
2680 	 * TODO: For now we are just using full update in case
2681 	 * selective fetch area calculation fails. To optimize this we
2682 	 * should identify cases where this happens and fix the area
2683 	 * calculation for those.
2684 	 */
2685 	if (crtc_state->psr2_su_area.y1 == -1) {
2686 		drm_info_once(display->drm,
2687 			      "Selective fetch area calculation failed in pipe %c\n",
2688 			      pipe_name(crtc->pipe));
2689 		full_update = true;
2690 	}
2691 
2692 	if (full_update)
2693 		goto skip_sel_fetch_set_loop;
2694 
2695 	intel_psr_apply_su_area_workarounds(crtc_state);
2696 
2697 	ret = drm_atomic_add_affected_planes(&state->base, &crtc->base);
2698 	if (ret)
2699 		return ret;
2700 
2701 	/*
2702 	 * Adjust su area to cover cursor fully as necessary (early
2703 	 * transport). This needs to be done after
2704 	 * drm_atomic_add_affected_planes to ensure visible cursor is added into
2705 	 * affected planes even when cursor is not updated by itself.
2706 	 */
2707 	intel_psr2_sel_fetch_et_alignment(state, crtc, &cursor_in_su_area);
2708 
2709 	intel_psr2_sel_fetch_pipe_alignment(crtc_state);
2710 
2711 	/*
2712 	 * Now that we have the pipe damaged area check if it intersect with
2713 	 * every plane, if it does set the plane selective fetch area.
2714 	 */
2715 	for_each_oldnew_intel_plane_in_state(state, plane, old_plane_state,
2716 					     new_plane_state, i) {
2717 		struct drm_rect *sel_fetch_area, inter;
2718 		struct intel_plane *linked = new_plane_state->planar_linked_plane;
2719 
2720 		if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc ||
2721 		    !new_plane_state->uapi.visible)
2722 			continue;
2723 
2724 		inter = crtc_state->psr2_su_area;
2725 		sel_fetch_area = &new_plane_state->psr2_sel_fetch_area;
2726 		if (!drm_rect_intersect(&inter, &new_plane_state->uapi.dst)) {
2727 			sel_fetch_area->y1 = -1;
2728 			sel_fetch_area->y2 = -1;
2729 			/*
2730 			 * if plane sel fetch was previously enabled ->
2731 			 * disable it
2732 			 */
2733 			if (drm_rect_height(&old_plane_state->psr2_sel_fetch_area) > 0)
2734 				crtc_state->update_planes |= BIT(plane->id);
2735 
2736 			continue;
2737 		}
2738 
2739 		if (!psr2_sel_fetch_plane_state_supported(new_plane_state)) {
2740 			full_update = true;
2741 			break;
2742 		}
2743 
2744 		sel_fetch_area = &new_plane_state->psr2_sel_fetch_area;
2745 		sel_fetch_area->y1 = inter.y1 - new_plane_state->uapi.dst.y1;
2746 		sel_fetch_area->y2 = inter.y2 - new_plane_state->uapi.dst.y1;
2747 		crtc_state->update_planes |= BIT(plane->id);
2748 
2749 		/*
2750 		 * Sel_fetch_area is calculated for UV plane. Use
2751 		 * same area for Y plane as well.
2752 		 */
2753 		if (linked) {
2754 			struct intel_plane_state *linked_new_plane_state;
2755 			struct drm_rect *linked_sel_fetch_area;
2756 
2757 			linked_new_plane_state = intel_atomic_get_plane_state(state, linked);
2758 			if (IS_ERR(linked_new_plane_state))
2759 				return PTR_ERR(linked_new_plane_state);
2760 
2761 			linked_sel_fetch_area = &linked_new_plane_state->psr2_sel_fetch_area;
2762 			linked_sel_fetch_area->y1 = sel_fetch_area->y1;
2763 			linked_sel_fetch_area->y2 = sel_fetch_area->y2;
2764 			crtc_state->update_planes |= BIT(linked->id);
2765 		}
2766 	}
2767 
2768 skip_sel_fetch_set_loop:
2769 	psr2_man_trk_ctl_calc(crtc_state, full_update);
2770 	crtc_state->pipe_srcsz_early_tpt =
2771 		psr2_pipe_srcsz_early_tpt_calc(crtc_state, full_update);
2772 	return 0;
2773 }
2774 
intel_psr_pre_plane_update(struct intel_atomic_state * state,struct intel_crtc * crtc)2775 void intel_psr_pre_plane_update(struct intel_atomic_state *state,
2776 				struct intel_crtc *crtc)
2777 {
2778 	struct intel_display *display = to_intel_display(state);
2779 	struct drm_i915_private *i915 = to_i915(state->base.dev);
2780 	const struct intel_crtc_state *old_crtc_state =
2781 		intel_atomic_get_old_crtc_state(state, crtc);
2782 	const struct intel_crtc_state *new_crtc_state =
2783 		intel_atomic_get_new_crtc_state(state, crtc);
2784 	struct intel_encoder *encoder;
2785 
2786 	if (!HAS_PSR(display))
2787 		return;
2788 
2789 	for_each_intel_encoder_mask_with_psr(state->base.dev, encoder,
2790 					     old_crtc_state->uapi.encoder_mask) {
2791 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2792 		struct intel_psr *psr = &intel_dp->psr;
2793 		bool needs_to_disable = false;
2794 
2795 		mutex_lock(&psr->lock);
2796 
2797 		/*
2798 		 * Reasons to disable:
2799 		 * - PSR disabled in new state
2800 		 * - All planes will go inactive
2801 		 * - Changing between PSR versions
2802 		 * - Region Early Transport changing
2803 		 * - Display WA #1136: skl, bxt
2804 		 */
2805 		needs_to_disable |= intel_crtc_needs_modeset(new_crtc_state);
2806 		needs_to_disable |= !new_crtc_state->has_psr;
2807 		needs_to_disable |= !new_crtc_state->active_planes;
2808 		needs_to_disable |= new_crtc_state->has_sel_update != psr->sel_update_enabled;
2809 		needs_to_disable |= new_crtc_state->enable_psr2_su_region_et !=
2810 			psr->su_region_et_enabled;
2811 		needs_to_disable |= DISPLAY_VER(i915) < 11 &&
2812 			new_crtc_state->wm_level_disabled;
2813 
2814 		if (psr->enabled && needs_to_disable)
2815 			intel_psr_disable_locked(intel_dp);
2816 		else if (psr->enabled && new_crtc_state->wm_level_disabled)
2817 			/* Wa_14015648006 */
2818 			wm_optimization_wa(intel_dp, new_crtc_state);
2819 
2820 		mutex_unlock(&psr->lock);
2821 	}
2822 }
2823 
intel_psr_post_plane_update(struct intel_atomic_state * state,struct intel_crtc * crtc)2824 void intel_psr_post_plane_update(struct intel_atomic_state *state,
2825 				 struct intel_crtc *crtc)
2826 {
2827 	struct intel_display *display = to_intel_display(state);
2828 	const struct intel_crtc_state *crtc_state =
2829 		intel_atomic_get_new_crtc_state(state, crtc);
2830 	struct intel_encoder *encoder;
2831 
2832 	if (!crtc_state->has_psr)
2833 		return;
2834 
2835 	for_each_intel_encoder_mask_with_psr(state->base.dev, encoder,
2836 					     crtc_state->uapi.encoder_mask) {
2837 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2838 		struct intel_psr *psr = &intel_dp->psr;
2839 		bool keep_disabled = false;
2840 
2841 		mutex_lock(&psr->lock);
2842 
2843 		drm_WARN_ON(display->drm,
2844 			    psr->enabled && !crtc_state->active_planes);
2845 
2846 		keep_disabled |= psr->sink_not_reliable;
2847 		keep_disabled |= !crtc_state->active_planes;
2848 
2849 		/* Display WA #1136: skl, bxt */
2850 		keep_disabled |= DISPLAY_VER(display) < 11 &&
2851 			crtc_state->wm_level_disabled;
2852 
2853 		if (!psr->enabled && !keep_disabled)
2854 			intel_psr_enable_locked(intel_dp, crtc_state);
2855 		else if (psr->enabled && !crtc_state->wm_level_disabled)
2856 			/* Wa_14015648006 */
2857 			wm_optimization_wa(intel_dp, crtc_state);
2858 
2859 		/* Force a PSR exit when enabling CRC to avoid CRC timeouts */
2860 		if (crtc_state->crc_enabled && psr->enabled)
2861 			psr_force_hw_tracking_exit(intel_dp);
2862 
2863 		/*
2864 		 * Clear possible busy bits in case we have
2865 		 * invalidate -> flip -> flush sequence.
2866 		 */
2867 		intel_dp->psr.busy_frontbuffer_bits = 0;
2868 
2869 		mutex_unlock(&psr->lock);
2870 	}
2871 }
2872 
_psr2_ready_for_pipe_update_locked(struct intel_dp * intel_dp)2873 static int _psr2_ready_for_pipe_update_locked(struct intel_dp *intel_dp)
2874 {
2875 	struct intel_display *display = to_intel_display(intel_dp);
2876 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2877 
2878 	/*
2879 	 * Any state lower than EDP_PSR2_STATUS_STATE_DEEP_SLEEP is enough.
2880 	 * As all higher states has bit 4 of PSR2 state set we can just wait for
2881 	 * EDP_PSR2_STATUS_STATE_DEEP_SLEEP to be cleared.
2882 	 */
2883 	return intel_de_wait_for_clear(display,
2884 				       EDP_PSR2_STATUS(display, cpu_transcoder),
2885 				       EDP_PSR2_STATUS_STATE_DEEP_SLEEP, 50);
2886 }
2887 
_psr1_ready_for_pipe_update_locked(struct intel_dp * intel_dp)2888 static int _psr1_ready_for_pipe_update_locked(struct intel_dp *intel_dp)
2889 {
2890 	struct intel_display *display = to_intel_display(intel_dp);
2891 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2892 
2893 	/*
2894 	 * From bspec: Panel Self Refresh (BDW+)
2895 	 * Max. time for PSR to idle = Inverse of the refresh rate + 6 ms of
2896 	 * exit training time + 1.5 ms of aux channel handshake. 50 ms is
2897 	 * defensive enough to cover everything.
2898 	 */
2899 	return intel_de_wait_for_clear(display,
2900 				       psr_status_reg(display, cpu_transcoder),
2901 				       EDP_PSR_STATUS_STATE_MASK, 50);
2902 }
2903 
2904 /**
2905  * intel_psr_wait_for_idle_locked - wait for PSR be ready for a pipe update
2906  * @new_crtc_state: new CRTC state
2907  *
2908  * This function is expected to be called from pipe_update_start() where it is
2909  * not expected to race with PSR enable or disable.
2910  */
intel_psr_wait_for_idle_locked(const struct intel_crtc_state * new_crtc_state)2911 void intel_psr_wait_for_idle_locked(const struct intel_crtc_state *new_crtc_state)
2912 {
2913 	struct intel_display *display = to_intel_display(new_crtc_state);
2914 	struct intel_encoder *encoder;
2915 
2916 	if (!new_crtc_state->has_psr)
2917 		return;
2918 
2919 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
2920 					     new_crtc_state->uapi.encoder_mask) {
2921 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2922 		int ret;
2923 
2924 		lockdep_assert_held(&intel_dp->psr.lock);
2925 
2926 		if (!intel_dp->psr.enabled || intel_dp->psr.panel_replay_enabled)
2927 			continue;
2928 
2929 		if (intel_dp->psr.sel_update_enabled)
2930 			ret = _psr2_ready_for_pipe_update_locked(intel_dp);
2931 		else
2932 			ret = _psr1_ready_for_pipe_update_locked(intel_dp);
2933 
2934 		if (ret)
2935 			drm_err(display->drm,
2936 				"PSR wait timed out, atomic update may fail\n");
2937 	}
2938 }
2939 
__psr_wait_for_idle_locked(struct intel_dp * intel_dp)2940 static bool __psr_wait_for_idle_locked(struct intel_dp *intel_dp)
2941 {
2942 	struct intel_display *display = to_intel_display(intel_dp);
2943 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2944 	i915_reg_t reg;
2945 	u32 mask;
2946 	int err;
2947 
2948 	if (!intel_dp->psr.enabled)
2949 		return false;
2950 
2951 	if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled ||
2952 					  intel_dp->psr.panel_replay_enabled)) {
2953 		reg = EDP_PSR2_STATUS(display, cpu_transcoder);
2954 		mask = EDP_PSR2_STATUS_STATE_MASK;
2955 	} else {
2956 		reg = psr_status_reg(display, cpu_transcoder);
2957 		mask = EDP_PSR_STATUS_STATE_MASK;
2958 	}
2959 
2960 	mutex_unlock(&intel_dp->psr.lock);
2961 
2962 	err = intel_de_wait_for_clear(display, reg, mask, 50);
2963 	if (err)
2964 		drm_err(display->drm,
2965 			"Timed out waiting for PSR Idle for re-enable\n");
2966 
2967 	/* After the unlocked wait, verify that PSR is still wanted! */
2968 	mutex_lock(&intel_dp->psr.lock);
2969 	return err == 0 && intel_dp->psr.enabled;
2970 }
2971 
intel_psr_fastset_force(struct intel_display * display)2972 static int intel_psr_fastset_force(struct intel_display *display)
2973 {
2974 	struct drm_connector_list_iter conn_iter;
2975 	struct drm_modeset_acquire_ctx ctx;
2976 	struct drm_atomic_state *state;
2977 	struct drm_connector *conn;
2978 	int err = 0;
2979 
2980 	state = drm_atomic_state_alloc(display->drm);
2981 	if (!state)
2982 		return -ENOMEM;
2983 
2984 	drm_modeset_acquire_init(&ctx, DRM_MODESET_ACQUIRE_INTERRUPTIBLE);
2985 
2986 	state->acquire_ctx = &ctx;
2987 	to_intel_atomic_state(state)->internal = true;
2988 
2989 retry:
2990 	drm_connector_list_iter_begin(display->drm, &conn_iter);
2991 	drm_for_each_connector_iter(conn, &conn_iter) {
2992 		struct drm_connector_state *conn_state;
2993 		struct drm_crtc_state *crtc_state;
2994 
2995 		if (conn->connector_type != DRM_MODE_CONNECTOR_eDP)
2996 			continue;
2997 
2998 		conn_state = drm_atomic_get_connector_state(state, conn);
2999 		if (IS_ERR(conn_state)) {
3000 			err = PTR_ERR(conn_state);
3001 			break;
3002 		}
3003 
3004 		if (!conn_state->crtc)
3005 			continue;
3006 
3007 		crtc_state = drm_atomic_get_crtc_state(state, conn_state->crtc);
3008 		if (IS_ERR(crtc_state)) {
3009 			err = PTR_ERR(crtc_state);
3010 			break;
3011 		}
3012 
3013 		/* Mark mode as changed to trigger a pipe->update() */
3014 		crtc_state->mode_changed = true;
3015 	}
3016 	drm_connector_list_iter_end(&conn_iter);
3017 
3018 	if (err == 0)
3019 		err = drm_atomic_commit(state);
3020 
3021 	if (err == -EDEADLK) {
3022 		drm_atomic_state_clear(state);
3023 		err = drm_modeset_backoff(&ctx);
3024 		if (!err)
3025 			goto retry;
3026 	}
3027 
3028 	drm_modeset_drop_locks(&ctx);
3029 	drm_modeset_acquire_fini(&ctx);
3030 	drm_atomic_state_put(state);
3031 
3032 	return err;
3033 }
3034 
intel_psr_debug_set(struct intel_dp * intel_dp,u64 val)3035 int intel_psr_debug_set(struct intel_dp *intel_dp, u64 val)
3036 {
3037 	struct intel_display *display = to_intel_display(intel_dp);
3038 	const u32 mode = val & I915_PSR_DEBUG_MODE_MASK;
3039 	const u32 disable_bits = val & (I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
3040 					I915_PSR_DEBUG_PANEL_REPLAY_DISABLE);
3041 	u32 old_mode, old_disable_bits;
3042 	int ret;
3043 
3044 	if (val & ~(I915_PSR_DEBUG_IRQ | I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
3045 		    I915_PSR_DEBUG_PANEL_REPLAY_DISABLE |
3046 		    I915_PSR_DEBUG_MODE_MASK) ||
3047 	    mode > I915_PSR_DEBUG_ENABLE_SEL_FETCH) {
3048 		drm_dbg_kms(display->drm, "Invalid debug mask %llx\n", val);
3049 		return -EINVAL;
3050 	}
3051 
3052 	ret = mutex_lock_interruptible(&intel_dp->psr.lock);
3053 	if (ret)
3054 		return ret;
3055 
3056 	old_mode = intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK;
3057 	old_disable_bits = intel_dp->psr.debug &
3058 		(I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
3059 		 I915_PSR_DEBUG_PANEL_REPLAY_DISABLE);
3060 
3061 	intel_dp->psr.debug = val;
3062 
3063 	/*
3064 	 * Do it right away if it's already enabled, otherwise it will be done
3065 	 * when enabling the source.
3066 	 */
3067 	if (intel_dp->psr.enabled)
3068 		psr_irq_control(intel_dp);
3069 
3070 	mutex_unlock(&intel_dp->psr.lock);
3071 
3072 	if (old_mode != mode || old_disable_bits != disable_bits)
3073 		ret = intel_psr_fastset_force(display);
3074 
3075 	return ret;
3076 }
3077 
intel_psr_handle_irq(struct intel_dp * intel_dp)3078 static void intel_psr_handle_irq(struct intel_dp *intel_dp)
3079 {
3080 	struct intel_psr *psr = &intel_dp->psr;
3081 
3082 	intel_psr_disable_locked(intel_dp);
3083 	psr->sink_not_reliable = true;
3084 	/* let's make sure that sink is awaken */
3085 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
3086 }
3087 
intel_psr_work(struct work_struct * work)3088 static void intel_psr_work(struct work_struct *work)
3089 {
3090 	struct intel_dp *intel_dp =
3091 		container_of(work, typeof(*intel_dp), psr.work);
3092 
3093 	mutex_lock(&intel_dp->psr.lock);
3094 
3095 	if (!intel_dp->psr.enabled)
3096 		goto unlock;
3097 
3098 	if (READ_ONCE(intel_dp->psr.irq_aux_error))
3099 		intel_psr_handle_irq(intel_dp);
3100 
3101 	/*
3102 	 * We have to make sure PSR is ready for re-enable
3103 	 * otherwise it keeps disabled until next full enable/disable cycle.
3104 	 * PSR might take some time to get fully disabled
3105 	 * and be ready for re-enable.
3106 	 */
3107 	if (!__psr_wait_for_idle_locked(intel_dp))
3108 		goto unlock;
3109 
3110 	/*
3111 	 * The delayed work can race with an invalidate hence we need to
3112 	 * recheck. Since psr_flush first clears this and then reschedules we
3113 	 * won't ever miss a flush when bailing out here.
3114 	 */
3115 	if (intel_dp->psr.busy_frontbuffer_bits || intel_dp->psr.active)
3116 		goto unlock;
3117 
3118 	intel_psr_activate(intel_dp);
3119 unlock:
3120 	mutex_unlock(&intel_dp->psr.lock);
3121 }
3122 
_psr_invalidate_handle(struct intel_dp * intel_dp)3123 static void _psr_invalidate_handle(struct intel_dp *intel_dp)
3124 {
3125 	struct intel_display *display = to_intel_display(intel_dp);
3126 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3127 
3128 	if (intel_dp->psr.psr2_sel_fetch_enabled) {
3129 		u32 val;
3130 
3131 		if (intel_dp->psr.psr2_sel_fetch_cff_enabled) {
3132 			/* Send one update otherwise lag is observed in screen */
3133 			intel_de_write(display,
3134 				       CURSURFLIVE(display, intel_dp->psr.pipe),
3135 				       0);
3136 			return;
3137 		}
3138 
3139 		val = man_trk_ctl_enable_bit_get(display) |
3140 		      man_trk_ctl_partial_frame_bit_get(display) |
3141 		      man_trk_ctl_continuos_full_frame(display);
3142 		intel_de_write(display,
3143 			       PSR2_MAN_TRK_CTL(display, cpu_transcoder),
3144 			       val);
3145 		intel_de_write(display,
3146 			       CURSURFLIVE(display, intel_dp->psr.pipe), 0);
3147 		intel_dp->psr.psr2_sel_fetch_cff_enabled = true;
3148 	} else {
3149 		intel_psr_exit(intel_dp);
3150 	}
3151 }
3152 
3153 /**
3154  * intel_psr_invalidate - Invalidate PSR
3155  * @display: display device
3156  * @frontbuffer_bits: frontbuffer plane tracking bits
3157  * @origin: which operation caused the invalidate
3158  *
3159  * Since the hardware frontbuffer tracking has gaps we need to integrate
3160  * with the software frontbuffer tracking. This function gets called every
3161  * time frontbuffer rendering starts and a buffer gets dirtied. PSR must be
3162  * disabled if the frontbuffer mask contains a buffer relevant to PSR.
3163  *
3164  * Dirty frontbuffers relevant to PSR are tracked in busy_frontbuffer_bits."
3165  */
intel_psr_invalidate(struct intel_display * display,unsigned frontbuffer_bits,enum fb_op_origin origin)3166 void intel_psr_invalidate(struct intel_display *display,
3167 			  unsigned frontbuffer_bits, enum fb_op_origin origin)
3168 {
3169 	struct intel_encoder *encoder;
3170 
3171 	if (origin == ORIGIN_FLIP)
3172 		return;
3173 
3174 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3175 		unsigned int pipe_frontbuffer_bits = frontbuffer_bits;
3176 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3177 
3178 		mutex_lock(&intel_dp->psr.lock);
3179 		if (!intel_dp->psr.enabled) {
3180 			mutex_unlock(&intel_dp->psr.lock);
3181 			continue;
3182 		}
3183 
3184 		pipe_frontbuffer_bits &=
3185 			INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe);
3186 		intel_dp->psr.busy_frontbuffer_bits |= pipe_frontbuffer_bits;
3187 
3188 		if (pipe_frontbuffer_bits)
3189 			_psr_invalidate_handle(intel_dp);
3190 
3191 		mutex_unlock(&intel_dp->psr.lock);
3192 	}
3193 }
3194 /*
3195  * When we will be completely rely on PSR2 S/W tracking in future,
3196  * intel_psr_flush() will invalidate and flush the PSR for ORIGIN_FLIP
3197  * event also therefore tgl_dc3co_flush_locked() require to be changed
3198  * accordingly in future.
3199  */
3200 static void
tgl_dc3co_flush_locked(struct intel_dp * intel_dp,unsigned int frontbuffer_bits,enum fb_op_origin origin)3201 tgl_dc3co_flush_locked(struct intel_dp *intel_dp, unsigned int frontbuffer_bits,
3202 		       enum fb_op_origin origin)
3203 {
3204 	struct intel_display *display = to_intel_display(intel_dp);
3205 	struct drm_i915_private *i915 = to_i915(display->drm);
3206 
3207 	if (!intel_dp->psr.dc3co_exitline || !intel_dp->psr.sel_update_enabled ||
3208 	    !intel_dp->psr.active)
3209 		return;
3210 
3211 	/*
3212 	 * At every frontbuffer flush flip event modified delay of delayed work,
3213 	 * when delayed work schedules that means display has been idle.
3214 	 */
3215 	if (!(frontbuffer_bits &
3216 	    INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe)))
3217 		return;
3218 
3219 	tgl_psr2_enable_dc3co(intel_dp);
3220 	mod_delayed_work(i915->unordered_wq, &intel_dp->psr.dc3co_work,
3221 			 intel_dp->psr.dc3co_exit_delay);
3222 }
3223 
_psr_flush_handle(struct intel_dp * intel_dp)3224 static void _psr_flush_handle(struct intel_dp *intel_dp)
3225 {
3226 	struct intel_display *display = to_intel_display(intel_dp);
3227 	struct drm_i915_private *dev_priv = to_i915(display->drm);
3228 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3229 
3230 	if (intel_dp->psr.psr2_sel_fetch_enabled) {
3231 		if (intel_dp->psr.psr2_sel_fetch_cff_enabled) {
3232 			/* can we turn CFF off? */
3233 			if (intel_dp->psr.busy_frontbuffer_bits == 0) {
3234 				u32 val = man_trk_ctl_enable_bit_get(display) |
3235 					man_trk_ctl_partial_frame_bit_get(display) |
3236 					man_trk_ctl_single_full_frame_bit_get(display) |
3237 					man_trk_ctl_continuos_full_frame(display);
3238 
3239 				/*
3240 				 * Set psr2_sel_fetch_cff_enabled as false to allow selective
3241 				 * updates. Still keep cff bit enabled as we don't have proper
3242 				 * SU configuration in case update is sent for any reason after
3243 				 * sff bit gets cleared by the HW on next vblank.
3244 				 */
3245 				intel_de_write(display,
3246 					       PSR2_MAN_TRK_CTL(display, cpu_transcoder),
3247 					       val);
3248 				intel_de_write(display,
3249 					       CURSURFLIVE(display, intel_dp->psr.pipe),
3250 					       0);
3251 				intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
3252 			}
3253 		} else {
3254 			/*
3255 			 * continuous full frame is disabled, only a single full
3256 			 * frame is required
3257 			 */
3258 			psr_force_hw_tracking_exit(intel_dp);
3259 		}
3260 	} else {
3261 		psr_force_hw_tracking_exit(intel_dp);
3262 
3263 		if (!intel_dp->psr.active && !intel_dp->psr.busy_frontbuffer_bits)
3264 			queue_work(dev_priv->unordered_wq, &intel_dp->psr.work);
3265 	}
3266 }
3267 
3268 /**
3269  * intel_psr_flush - Flush PSR
3270  * @display: display device
3271  * @frontbuffer_bits: frontbuffer plane tracking bits
3272  * @origin: which operation caused the flush
3273  *
3274  * Since the hardware frontbuffer tracking has gaps we need to integrate
3275  * with the software frontbuffer tracking. This function gets called every
3276  * time frontbuffer rendering has completed and flushed out to memory. PSR
3277  * can be enabled again if no other frontbuffer relevant to PSR is dirty.
3278  *
3279  * Dirty frontbuffers relevant to PSR are tracked in busy_frontbuffer_bits.
3280  */
intel_psr_flush(struct intel_display * display,unsigned frontbuffer_bits,enum fb_op_origin origin)3281 void intel_psr_flush(struct intel_display *display,
3282 		     unsigned frontbuffer_bits, enum fb_op_origin origin)
3283 {
3284 	struct intel_encoder *encoder;
3285 
3286 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3287 		unsigned int pipe_frontbuffer_bits = frontbuffer_bits;
3288 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3289 
3290 		mutex_lock(&intel_dp->psr.lock);
3291 		if (!intel_dp->psr.enabled) {
3292 			mutex_unlock(&intel_dp->psr.lock);
3293 			continue;
3294 		}
3295 
3296 		pipe_frontbuffer_bits &=
3297 			INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe);
3298 		intel_dp->psr.busy_frontbuffer_bits &= ~pipe_frontbuffer_bits;
3299 
3300 		/*
3301 		 * If the PSR is paused by an explicit intel_psr_paused() call,
3302 		 * we have to ensure that the PSR is not activated until
3303 		 * intel_psr_resume() is called.
3304 		 */
3305 		if (intel_dp->psr.paused)
3306 			goto unlock;
3307 
3308 		if (origin == ORIGIN_FLIP ||
3309 		    (origin == ORIGIN_CURSOR_UPDATE &&
3310 		     !intel_dp->psr.psr2_sel_fetch_enabled)) {
3311 			tgl_dc3co_flush_locked(intel_dp, frontbuffer_bits, origin);
3312 			goto unlock;
3313 		}
3314 
3315 		if (pipe_frontbuffer_bits == 0)
3316 			goto unlock;
3317 
3318 		/* By definition flush = invalidate + flush */
3319 		_psr_flush_handle(intel_dp);
3320 unlock:
3321 		mutex_unlock(&intel_dp->psr.lock);
3322 	}
3323 }
3324 
3325 /**
3326  * intel_psr_init - Init basic PSR work and mutex.
3327  * @intel_dp: Intel DP
3328  *
3329  * This function is called after the initializing connector.
3330  * (the initializing of connector treats the handling of connector capabilities)
3331  * And it initializes basic PSR stuff for each DP Encoder.
3332  */
intel_psr_init(struct intel_dp * intel_dp)3333 void intel_psr_init(struct intel_dp *intel_dp)
3334 {
3335 	struct intel_display *display = to_intel_display(intel_dp);
3336 	struct intel_connector *connector = intel_dp->attached_connector;
3337 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
3338 
3339 	if (!(HAS_PSR(display) || HAS_DP20(display)))
3340 		return;
3341 
3342 	/*
3343 	 * HSW spec explicitly says PSR is tied to port A.
3344 	 * BDW+ platforms have a instance of PSR registers per transcoder but
3345 	 * BDW, GEN9 and GEN11 are not validated by HW team in other transcoder
3346 	 * than eDP one.
3347 	 * For now it only supports one instance of PSR for BDW, GEN9 and GEN11.
3348 	 * So lets keep it hardcoded to PORT_A for BDW, GEN9 and GEN11.
3349 	 * But GEN12 supports a instance of PSR registers per transcoder.
3350 	 */
3351 	if (DISPLAY_VER(display) < 12 && dig_port->base.port != PORT_A) {
3352 		drm_dbg_kms(display->drm,
3353 			    "PSR condition failed: Port not supported\n");
3354 		return;
3355 	}
3356 
3357 	if ((HAS_DP20(display) && !intel_dp_is_edp(intel_dp)) ||
3358 	    DISPLAY_VER(display) >= 20)
3359 		intel_dp->psr.source_panel_replay_support = true;
3360 
3361 	if (HAS_PSR(display) && intel_dp_is_edp(intel_dp))
3362 		intel_dp->psr.source_support = true;
3363 
3364 	/* Set link_standby x link_off defaults */
3365 	if (DISPLAY_VER(display) < 12)
3366 		/* For new platforms up to TGL let's respect VBT back again */
3367 		intel_dp->psr.link_standby = connector->panel.vbt.psr.full_link;
3368 
3369 	INIT_WORK(&intel_dp->psr.work, intel_psr_work);
3370 	INIT_DELAYED_WORK(&intel_dp->psr.dc3co_work, tgl_dc3co_disable_work);
3371 	mutex_init(&intel_dp->psr.lock);
3372 }
3373 
psr_get_status_and_error_status(struct intel_dp * intel_dp,u8 * status,u8 * error_status)3374 static int psr_get_status_and_error_status(struct intel_dp *intel_dp,
3375 					   u8 *status, u8 *error_status)
3376 {
3377 	struct drm_dp_aux *aux = &intel_dp->aux;
3378 	int ret;
3379 	unsigned int offset;
3380 
3381 	offset = intel_dp->psr.panel_replay_enabled ?
3382 		 DP_SINK_DEVICE_PR_AND_FRAME_LOCK_STATUS : DP_PSR_STATUS;
3383 
3384 	ret = drm_dp_dpcd_readb(aux, offset, status);
3385 	if (ret != 1)
3386 		return ret;
3387 
3388 	offset = intel_dp->psr.panel_replay_enabled ?
3389 		 DP_PANEL_REPLAY_ERROR_STATUS : DP_PSR_ERROR_STATUS;
3390 
3391 	ret = drm_dp_dpcd_readb(aux, offset, error_status);
3392 	if (ret != 1)
3393 		return ret;
3394 
3395 	*status = *status & DP_PSR_SINK_STATE_MASK;
3396 
3397 	return 0;
3398 }
3399 
psr_alpm_check(struct intel_dp * intel_dp)3400 static void psr_alpm_check(struct intel_dp *intel_dp)
3401 {
3402 	struct intel_display *display = to_intel_display(intel_dp);
3403 	struct drm_dp_aux *aux = &intel_dp->aux;
3404 	struct intel_psr *psr = &intel_dp->psr;
3405 	u8 val;
3406 	int r;
3407 
3408 	if (!psr->sel_update_enabled)
3409 		return;
3410 
3411 	r = drm_dp_dpcd_readb(aux, DP_RECEIVER_ALPM_STATUS, &val);
3412 	if (r != 1) {
3413 		drm_err(display->drm, "Error reading ALPM status\n");
3414 		return;
3415 	}
3416 
3417 	if (val & DP_ALPM_LOCK_TIMEOUT_ERROR) {
3418 		intel_psr_disable_locked(intel_dp);
3419 		psr->sink_not_reliable = true;
3420 		drm_dbg_kms(display->drm,
3421 			    "ALPM lock timeout error, disabling PSR\n");
3422 
3423 		/* Clearing error */
3424 		drm_dp_dpcd_writeb(aux, DP_RECEIVER_ALPM_STATUS, val);
3425 	}
3426 }
3427 
psr_capability_changed_check(struct intel_dp * intel_dp)3428 static void psr_capability_changed_check(struct intel_dp *intel_dp)
3429 {
3430 	struct intel_display *display = to_intel_display(intel_dp);
3431 	struct intel_psr *psr = &intel_dp->psr;
3432 	u8 val;
3433 	int r;
3434 
3435 	r = drm_dp_dpcd_readb(&intel_dp->aux, DP_PSR_ESI, &val);
3436 	if (r != 1) {
3437 		drm_err(display->drm, "Error reading DP_PSR_ESI\n");
3438 		return;
3439 	}
3440 
3441 	if (val & DP_PSR_CAPS_CHANGE) {
3442 		intel_psr_disable_locked(intel_dp);
3443 		psr->sink_not_reliable = true;
3444 		drm_dbg_kms(display->drm,
3445 			    "Sink PSR capability changed, disabling PSR\n");
3446 
3447 		/* Clearing it */
3448 		drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_ESI, val);
3449 	}
3450 }
3451 
3452 /*
3453  * On common bits:
3454  * DP_PSR_RFB_STORAGE_ERROR == DP_PANEL_REPLAY_RFB_STORAGE_ERROR
3455  * DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR == DP_PANEL_REPLAY_VSC_SDP_UNCORRECTABLE_ERROR
3456  * DP_PSR_LINK_CRC_ERROR == DP_PANEL_REPLAY_LINK_CRC_ERROR
3457  * this function is relying on PSR definitions
3458  */
intel_psr_short_pulse(struct intel_dp * intel_dp)3459 void intel_psr_short_pulse(struct intel_dp *intel_dp)
3460 {
3461 	struct intel_display *display = to_intel_display(intel_dp);
3462 	struct intel_psr *psr = &intel_dp->psr;
3463 	u8 status, error_status;
3464 	const u8 errors = DP_PSR_RFB_STORAGE_ERROR |
3465 			  DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR |
3466 			  DP_PSR_LINK_CRC_ERROR;
3467 
3468 	if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
3469 		return;
3470 
3471 	mutex_lock(&psr->lock);
3472 
3473 	psr->link_ok = false;
3474 
3475 	if (!psr->enabled)
3476 		goto exit;
3477 
3478 	if (psr_get_status_and_error_status(intel_dp, &status, &error_status)) {
3479 		drm_err(display->drm,
3480 			"Error reading PSR status or error status\n");
3481 		goto exit;
3482 	}
3483 
3484 	if ((!psr->panel_replay_enabled && status == DP_PSR_SINK_INTERNAL_ERROR) ||
3485 	    (error_status & errors)) {
3486 		intel_psr_disable_locked(intel_dp);
3487 		psr->sink_not_reliable = true;
3488 	}
3489 
3490 	if (!psr->panel_replay_enabled && status == DP_PSR_SINK_INTERNAL_ERROR &&
3491 	    !error_status)
3492 		drm_dbg_kms(display->drm,
3493 			    "PSR sink internal error, disabling PSR\n");
3494 	if (error_status & DP_PSR_RFB_STORAGE_ERROR)
3495 		drm_dbg_kms(display->drm,
3496 			    "PSR RFB storage error, disabling PSR\n");
3497 	if (error_status & DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR)
3498 		drm_dbg_kms(display->drm,
3499 			    "PSR VSC SDP uncorrectable error, disabling PSR\n");
3500 	if (error_status & DP_PSR_LINK_CRC_ERROR)
3501 		drm_dbg_kms(display->drm,
3502 			    "PSR Link CRC error, disabling PSR\n");
3503 
3504 	if (error_status & ~errors)
3505 		drm_err(display->drm,
3506 			"PSR_ERROR_STATUS unhandled errors %x\n",
3507 			error_status & ~errors);
3508 	/* clear status register */
3509 	drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_ERROR_STATUS, error_status);
3510 
3511 	if (!psr->panel_replay_enabled) {
3512 		psr_alpm_check(intel_dp);
3513 		psr_capability_changed_check(intel_dp);
3514 	}
3515 
3516 exit:
3517 	mutex_unlock(&psr->lock);
3518 }
3519 
intel_psr_enabled(struct intel_dp * intel_dp)3520 bool intel_psr_enabled(struct intel_dp *intel_dp)
3521 {
3522 	bool ret;
3523 
3524 	if (!CAN_PSR(intel_dp))
3525 		return false;
3526 
3527 	mutex_lock(&intel_dp->psr.lock);
3528 	ret = intel_dp->psr.enabled;
3529 	mutex_unlock(&intel_dp->psr.lock);
3530 
3531 	return ret;
3532 }
3533 
3534 /**
3535  * intel_psr_link_ok - return psr->link_ok
3536  * @intel_dp: struct intel_dp
3537  *
3538  * We are seeing unexpected link re-trainings with some panels. This is caused
3539  * by panel stating bad link status after PSR is enabled. Code checking link
3540  * status can call this to ensure it can ignore bad link status stated by the
3541  * panel I.e. if panel is stating bad link and intel_psr_link_ok is stating link
3542  * is ok caller should rely on latter.
3543  *
3544  * Return value of link_ok
3545  */
intel_psr_link_ok(struct intel_dp * intel_dp)3546 bool intel_psr_link_ok(struct intel_dp *intel_dp)
3547 {
3548 	bool ret;
3549 
3550 	if ((!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp)) ||
3551 	    !intel_dp_is_edp(intel_dp))
3552 		return false;
3553 
3554 	mutex_lock(&intel_dp->psr.lock);
3555 	ret = intel_dp->psr.link_ok;
3556 	mutex_unlock(&intel_dp->psr.lock);
3557 
3558 	return ret;
3559 }
3560 
3561 /**
3562  * intel_psr_lock - grab PSR lock
3563  * @crtc_state: the crtc state
3564  *
3565  * This is initially meant to be used by around CRTC update, when
3566  * vblank sensitive registers are updated and we need grab the lock
3567  * before it to avoid vblank evasion.
3568  */
intel_psr_lock(const struct intel_crtc_state * crtc_state)3569 void intel_psr_lock(const struct intel_crtc_state *crtc_state)
3570 {
3571 	struct intel_display *display = to_intel_display(crtc_state);
3572 	struct intel_encoder *encoder;
3573 
3574 	if (!crtc_state->has_psr)
3575 		return;
3576 
3577 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
3578 					     crtc_state->uapi.encoder_mask) {
3579 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3580 
3581 		mutex_lock(&intel_dp->psr.lock);
3582 		break;
3583 	}
3584 }
3585 
3586 /**
3587  * intel_psr_unlock - release PSR lock
3588  * @crtc_state: the crtc state
3589  *
3590  * Release the PSR lock that was held during pipe update.
3591  */
intel_psr_unlock(const struct intel_crtc_state * crtc_state)3592 void intel_psr_unlock(const struct intel_crtc_state *crtc_state)
3593 {
3594 	struct intel_display *display = to_intel_display(crtc_state);
3595 	struct intel_encoder *encoder;
3596 
3597 	if (!crtc_state->has_psr)
3598 		return;
3599 
3600 	for_each_intel_encoder_mask_with_psr(display->drm, encoder,
3601 					     crtc_state->uapi.encoder_mask) {
3602 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3603 
3604 		mutex_unlock(&intel_dp->psr.lock);
3605 		break;
3606 	}
3607 }
3608 
3609 static void
psr_source_status(struct intel_dp * intel_dp,struct seq_file * m)3610 psr_source_status(struct intel_dp *intel_dp, struct seq_file *m)
3611 {
3612 	struct intel_display *display = to_intel_display(intel_dp);
3613 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3614 	const char *status = "unknown";
3615 	u32 val, status_val;
3616 
3617 	if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled ||
3618 					  intel_dp->psr.panel_replay_enabled)) {
3619 		static const char * const live_status[] = {
3620 			"IDLE",
3621 			"CAPTURE",
3622 			"CAPTURE_FS",
3623 			"SLEEP",
3624 			"BUFON_FW",
3625 			"ML_UP",
3626 			"SU_STANDBY",
3627 			"FAST_SLEEP",
3628 			"DEEP_SLEEP",
3629 			"BUF_ON",
3630 			"TG_ON"
3631 		};
3632 		val = intel_de_read(display,
3633 				    EDP_PSR2_STATUS(display, cpu_transcoder));
3634 		status_val = REG_FIELD_GET(EDP_PSR2_STATUS_STATE_MASK, val);
3635 		if (status_val < ARRAY_SIZE(live_status))
3636 			status = live_status[status_val];
3637 	} else {
3638 		static const char * const live_status[] = {
3639 			"IDLE",
3640 			"SRDONACK",
3641 			"SRDENT",
3642 			"BUFOFF",
3643 			"BUFON",
3644 			"AUXACK",
3645 			"SRDOFFACK",
3646 			"SRDENT_ON",
3647 		};
3648 		val = intel_de_read(display,
3649 				    psr_status_reg(display, cpu_transcoder));
3650 		status_val = REG_FIELD_GET(EDP_PSR_STATUS_STATE_MASK, val);
3651 		if (status_val < ARRAY_SIZE(live_status))
3652 			status = live_status[status_val];
3653 	}
3654 
3655 	seq_printf(m, "Source PSR/PanelReplay status: %s [0x%08x]\n", status, val);
3656 }
3657 
intel_psr_sink_capability(struct intel_dp * intel_dp,struct seq_file * m)3658 static void intel_psr_sink_capability(struct intel_dp *intel_dp,
3659 				      struct seq_file *m)
3660 {
3661 	struct intel_psr *psr = &intel_dp->psr;
3662 
3663 	seq_printf(m, "Sink support: PSR = %s",
3664 		   str_yes_no(psr->sink_support));
3665 
3666 	if (psr->sink_support)
3667 		seq_printf(m, " [0x%02x]", intel_dp->psr_dpcd[0]);
3668 	if (intel_dp->psr_dpcd[0] == DP_PSR2_WITH_Y_COORD_ET_SUPPORTED)
3669 		seq_printf(m, " (Early Transport)");
3670 	seq_printf(m, ", Panel Replay = %s", str_yes_no(psr->sink_panel_replay_support));
3671 	seq_printf(m, ", Panel Replay Selective Update = %s",
3672 		   str_yes_no(psr->sink_panel_replay_su_support));
3673 	if (intel_dp->pr_dpcd & DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT)
3674 		seq_printf(m, " (Early Transport)");
3675 	seq_printf(m, "\n");
3676 }
3677 
intel_psr_print_mode(struct intel_dp * intel_dp,struct seq_file * m)3678 static void intel_psr_print_mode(struct intel_dp *intel_dp,
3679 				 struct seq_file *m)
3680 {
3681 	struct intel_psr *psr = &intel_dp->psr;
3682 	const char *status, *mode, *region_et;
3683 
3684 	if (psr->enabled)
3685 		status = " enabled";
3686 	else
3687 		status = "disabled";
3688 
3689 	if (psr->panel_replay_enabled && psr->sel_update_enabled)
3690 		mode = "Panel Replay Selective Update";
3691 	else if (psr->panel_replay_enabled)
3692 		mode = "Panel Replay";
3693 	else if (psr->sel_update_enabled)
3694 		mode = "PSR2";
3695 	else if (psr->enabled)
3696 		mode = "PSR1";
3697 	else
3698 		mode = "";
3699 
3700 	if (psr->su_region_et_enabled)
3701 		region_et = " (Early Transport)";
3702 	else
3703 		region_et = "";
3704 
3705 	seq_printf(m, "PSR mode: %s%s%s\n", mode, status, region_et);
3706 }
3707 
intel_psr_status(struct seq_file * m,struct intel_dp * intel_dp)3708 static int intel_psr_status(struct seq_file *m, struct intel_dp *intel_dp)
3709 {
3710 	struct intel_display *display = to_intel_display(intel_dp);
3711 	struct drm_i915_private *dev_priv = to_i915(display->drm);
3712 	enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3713 	struct intel_psr *psr = &intel_dp->psr;
3714 	intel_wakeref_t wakeref;
3715 	bool enabled;
3716 	u32 val, psr2_ctl;
3717 
3718 	intel_psr_sink_capability(intel_dp, m);
3719 
3720 	if (!(psr->sink_support || psr->sink_panel_replay_support))
3721 		return 0;
3722 
3723 	wakeref = intel_runtime_pm_get(&dev_priv->runtime_pm);
3724 	mutex_lock(&psr->lock);
3725 
3726 	intel_psr_print_mode(intel_dp, m);
3727 
3728 	if (!psr->enabled) {
3729 		seq_printf(m, "PSR sink not reliable: %s\n",
3730 			   str_yes_no(psr->sink_not_reliable));
3731 
3732 		goto unlock;
3733 	}
3734 
3735 	if (psr->panel_replay_enabled) {
3736 		val = intel_de_read(display, TRANS_DP2_CTL(cpu_transcoder));
3737 
3738 		if (intel_dp_is_edp(intel_dp))
3739 			psr2_ctl = intel_de_read(display,
3740 						 EDP_PSR2_CTL(display,
3741 							      cpu_transcoder));
3742 
3743 		enabled = val & TRANS_DP2_PANEL_REPLAY_ENABLE;
3744 	} else if (psr->sel_update_enabled) {
3745 		val = intel_de_read(display,
3746 				    EDP_PSR2_CTL(display, cpu_transcoder));
3747 		enabled = val & EDP_PSR2_ENABLE;
3748 	} else {
3749 		val = intel_de_read(display, psr_ctl_reg(display, cpu_transcoder));
3750 		enabled = val & EDP_PSR_ENABLE;
3751 	}
3752 	seq_printf(m, "Source PSR/PanelReplay ctl: %s [0x%08x]\n",
3753 		   str_enabled_disabled(enabled), val);
3754 	if (psr->panel_replay_enabled && intel_dp_is_edp(intel_dp))
3755 		seq_printf(m, "PSR2_CTL: 0x%08x\n",
3756 			   psr2_ctl);
3757 	psr_source_status(intel_dp, m);
3758 	seq_printf(m, "Busy frontbuffer bits: 0x%08x\n",
3759 		   psr->busy_frontbuffer_bits);
3760 
3761 	/*
3762 	 * SKL+ Perf counter is reset to 0 everytime DC state is entered
3763 	 */
3764 	val = intel_de_read(display, psr_perf_cnt_reg(display, cpu_transcoder));
3765 	seq_printf(m, "Performance counter: %u\n",
3766 		   REG_FIELD_GET(EDP_PSR_PERF_CNT_MASK, val));
3767 
3768 	if (psr->debug & I915_PSR_DEBUG_IRQ) {
3769 		seq_printf(m, "Last attempted entry at: %lld\n",
3770 			   psr->last_entry_attempt);
3771 		seq_printf(m, "Last exit at: %lld\n", psr->last_exit);
3772 	}
3773 
3774 	if (psr->sel_update_enabled) {
3775 		u32 su_frames_val[3];
3776 		int frame;
3777 
3778 		/*
3779 		 * Reading all 3 registers before hand to minimize crossing a
3780 		 * frame boundary between register reads
3781 		 */
3782 		for (frame = 0; frame < PSR2_SU_STATUS_FRAMES; frame += 3) {
3783 			val = intel_de_read(display,
3784 					    PSR2_SU_STATUS(display, cpu_transcoder, frame));
3785 			su_frames_val[frame / 3] = val;
3786 		}
3787 
3788 		seq_puts(m, "Frame:\tPSR2 SU blocks:\n");
3789 
3790 		for (frame = 0; frame < PSR2_SU_STATUS_FRAMES; frame++) {
3791 			u32 su_blocks;
3792 
3793 			su_blocks = su_frames_val[frame / 3] &
3794 				    PSR2_SU_STATUS_MASK(frame);
3795 			su_blocks = su_blocks >> PSR2_SU_STATUS_SHIFT(frame);
3796 			seq_printf(m, "%d\t%d\n", frame, su_blocks);
3797 		}
3798 
3799 		seq_printf(m, "PSR2 selective fetch: %s\n",
3800 			   str_enabled_disabled(psr->psr2_sel_fetch_enabled));
3801 	}
3802 
3803 unlock:
3804 	mutex_unlock(&psr->lock);
3805 	intel_runtime_pm_put(&dev_priv->runtime_pm, wakeref);
3806 
3807 	return 0;
3808 }
3809 
i915_edp_psr_status_show(struct seq_file * m,void * data)3810 static int i915_edp_psr_status_show(struct seq_file *m, void *data)
3811 {
3812 	struct intel_display *display = m->private;
3813 	struct intel_dp *intel_dp = NULL;
3814 	struct intel_encoder *encoder;
3815 
3816 	if (!HAS_PSR(display))
3817 		return -ENODEV;
3818 
3819 	/* Find the first EDP which supports PSR */
3820 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3821 		intel_dp = enc_to_intel_dp(encoder);
3822 		break;
3823 	}
3824 
3825 	if (!intel_dp)
3826 		return -ENODEV;
3827 
3828 	return intel_psr_status(m, intel_dp);
3829 }
3830 DEFINE_SHOW_ATTRIBUTE(i915_edp_psr_status);
3831 
3832 static int
i915_edp_psr_debug_set(void * data,u64 val)3833 i915_edp_psr_debug_set(void *data, u64 val)
3834 {
3835 	struct intel_display *display = data;
3836 	struct drm_i915_private *dev_priv = to_i915(display->drm);
3837 	struct intel_encoder *encoder;
3838 	intel_wakeref_t wakeref;
3839 	int ret = -ENODEV;
3840 
3841 	if (!HAS_PSR(display))
3842 		return ret;
3843 
3844 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3845 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3846 
3847 		drm_dbg_kms(display->drm, "Setting PSR debug to %llx\n", val);
3848 
3849 		wakeref = intel_runtime_pm_get(&dev_priv->runtime_pm);
3850 
3851 		// TODO: split to each transcoder's PSR debug state
3852 		ret = intel_psr_debug_set(intel_dp, val);
3853 
3854 		intel_runtime_pm_put(&dev_priv->runtime_pm, wakeref);
3855 	}
3856 
3857 	return ret;
3858 }
3859 
3860 static int
i915_edp_psr_debug_get(void * data,u64 * val)3861 i915_edp_psr_debug_get(void *data, u64 *val)
3862 {
3863 	struct intel_display *display = data;
3864 	struct intel_encoder *encoder;
3865 
3866 	if (!HAS_PSR(display))
3867 		return -ENODEV;
3868 
3869 	for_each_intel_encoder_with_psr(display->drm, encoder) {
3870 		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3871 
3872 		// TODO: split to each transcoder's PSR debug state
3873 		*val = READ_ONCE(intel_dp->psr.debug);
3874 		return 0;
3875 	}
3876 
3877 	return -ENODEV;
3878 }
3879 
3880 DEFINE_SIMPLE_ATTRIBUTE(i915_edp_psr_debug_fops,
3881 			i915_edp_psr_debug_get, i915_edp_psr_debug_set,
3882 			"%llu\n");
3883 
intel_psr_debugfs_register(struct intel_display * display)3884 void intel_psr_debugfs_register(struct intel_display *display)
3885 {
3886 	struct drm_minor *minor = display->drm->primary;
3887 
3888 	debugfs_create_file("i915_edp_psr_debug", 0644, minor->debugfs_root,
3889 			    display, &i915_edp_psr_debug_fops);
3890 
3891 	debugfs_create_file("i915_edp_psr_status", 0444, minor->debugfs_root,
3892 			    display, &i915_edp_psr_status_fops);
3893 }
3894 
psr_mode_str(struct intel_dp * intel_dp)3895 static const char *psr_mode_str(struct intel_dp *intel_dp)
3896 {
3897 	if (intel_dp->psr.panel_replay_enabled)
3898 		return "PANEL-REPLAY";
3899 	else if (intel_dp->psr.enabled)
3900 		return "PSR";
3901 
3902 	return "unknown";
3903 }
3904 
i915_psr_sink_status_show(struct seq_file * m,void * data)3905 static int i915_psr_sink_status_show(struct seq_file *m, void *data)
3906 {
3907 	struct intel_connector *connector = m->private;
3908 	struct intel_dp *intel_dp = intel_attached_dp(connector);
3909 	static const char * const sink_status[] = {
3910 		"inactive",
3911 		"transition to active, capture and display",
3912 		"active, display from RFB",
3913 		"active, capture and display on sink device timings",
3914 		"transition to inactive, capture and display, timing re-sync",
3915 		"reserved",
3916 		"reserved",
3917 		"sink internal error",
3918 	};
3919 	const char *str;
3920 	int ret;
3921 	u8 status, error_status;
3922 
3923 	if (!(CAN_PSR(intel_dp) || CAN_PANEL_REPLAY(intel_dp))) {
3924 		seq_puts(m, "PSR/Panel-Replay Unsupported\n");
3925 		return -ENODEV;
3926 	}
3927 
3928 	if (connector->base.status != connector_status_connected)
3929 		return -ENODEV;
3930 
3931 	ret = psr_get_status_and_error_status(intel_dp, &status, &error_status);
3932 	if (ret)
3933 		return ret;
3934 
3935 	status &= DP_PSR_SINK_STATE_MASK;
3936 	if (status < ARRAY_SIZE(sink_status))
3937 		str = sink_status[status];
3938 	else
3939 		str = "unknown";
3940 
3941 	seq_printf(m, "Sink %s status: 0x%x [%s]\n", psr_mode_str(intel_dp), status, str);
3942 
3943 	seq_printf(m, "Sink %s error status: 0x%x", psr_mode_str(intel_dp), error_status);
3944 
3945 	if (error_status & (DP_PSR_RFB_STORAGE_ERROR |
3946 			    DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR |
3947 			    DP_PSR_LINK_CRC_ERROR))
3948 		seq_puts(m, ":\n");
3949 	else
3950 		seq_puts(m, "\n");
3951 	if (error_status & DP_PSR_RFB_STORAGE_ERROR)
3952 		seq_printf(m, "\t%s RFB storage error\n", psr_mode_str(intel_dp));
3953 	if (error_status & DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR)
3954 		seq_printf(m, "\t%s VSC SDP uncorrectable error\n", psr_mode_str(intel_dp));
3955 	if (error_status & DP_PSR_LINK_CRC_ERROR)
3956 		seq_printf(m, "\t%s Link CRC error\n", psr_mode_str(intel_dp));
3957 
3958 	return ret;
3959 }
3960 DEFINE_SHOW_ATTRIBUTE(i915_psr_sink_status);
3961 
i915_psr_status_show(struct seq_file * m,void * data)3962 static int i915_psr_status_show(struct seq_file *m, void *data)
3963 {
3964 	struct intel_connector *connector = m->private;
3965 	struct intel_dp *intel_dp = intel_attached_dp(connector);
3966 
3967 	return intel_psr_status(m, intel_dp);
3968 }
3969 DEFINE_SHOW_ATTRIBUTE(i915_psr_status);
3970 
intel_psr_connector_debugfs_add(struct intel_connector * connector)3971 void intel_psr_connector_debugfs_add(struct intel_connector *connector)
3972 {
3973 	struct intel_display *display = to_intel_display(connector);
3974 	struct dentry *root = connector->base.debugfs_entry;
3975 
3976 	if (connector->base.connector_type != DRM_MODE_CONNECTOR_eDP &&
3977 	    connector->base.connector_type != DRM_MODE_CONNECTOR_DisplayPort)
3978 		return;
3979 
3980 	debugfs_create_file("i915_psr_sink_status", 0444, root,
3981 			    connector, &i915_psr_sink_status_fops);
3982 
3983 	if (HAS_PSR(display) || HAS_DP20(display))
3984 		debugfs_create_file("i915_psr_status", 0444, root,
3985 				    connector, &i915_psr_status_fops);
3986 }
3987