Lines Matching full:display
59 * Since Haswell Display controller supports Panel Self-Refresh on display
61 * spec in eDP1.3. PSR feature allows the display to go to lower standby states
62 * when system is idle but display is on as it eliminates display refresh
64 * display is unchanged.
108 * When unmasked (nearly) all display register writes (eg. even
263 struct intel_display *display = to_intel_display(intel_dp); in panel_replay_global_enabled() local
266 display->params.enable_panel_replay; in panel_replay_global_enabled()
271 struct intel_display *display = to_intel_display(intel_dp); in psr_irq_psr_error_bit_get() local
273 return DISPLAY_VER(display) >= 12 ? TGL_PSR_ERROR : in psr_irq_psr_error_bit_get()
279 struct intel_display *display = to_intel_display(intel_dp); in psr_irq_post_exit_bit_get() local
281 return DISPLAY_VER(display) >= 12 ? TGL_PSR_POST_EXIT : in psr_irq_post_exit_bit_get()
287 struct intel_display *display = to_intel_display(intel_dp); in psr_irq_pre_entry_bit_get() local
289 return DISPLAY_VER(display) >= 12 ? TGL_PSR_PRE_ENTRY : in psr_irq_pre_entry_bit_get()
295 struct intel_display *display = to_intel_display(intel_dp); in psr_irq_mask_get() local
297 return DISPLAY_VER(display) >= 12 ? TGL_PSR_MASK : in psr_irq_mask_get()
301 static i915_reg_t psr_ctl_reg(struct intel_display *display, in psr_ctl_reg() argument
304 if (DISPLAY_VER(display) >= 8) in psr_ctl_reg()
305 return EDP_PSR_CTL(display, cpu_transcoder); in psr_ctl_reg()
310 static i915_reg_t psr_debug_reg(struct intel_display *display, in psr_debug_reg() argument
313 if (DISPLAY_VER(display) >= 8) in psr_debug_reg()
314 return EDP_PSR_DEBUG(display, cpu_transcoder); in psr_debug_reg()
319 static i915_reg_t psr_perf_cnt_reg(struct intel_display *display, in psr_perf_cnt_reg() argument
322 if (DISPLAY_VER(display) >= 8) in psr_perf_cnt_reg()
323 return EDP_PSR_PERF_CNT(display, cpu_transcoder); in psr_perf_cnt_reg()
328 static i915_reg_t psr_status_reg(struct intel_display *display, in psr_status_reg() argument
331 if (DISPLAY_VER(display) >= 8) in psr_status_reg()
332 return EDP_PSR_STATUS(display, cpu_transcoder); in psr_status_reg()
337 static i915_reg_t psr_imr_reg(struct intel_display *display, in psr_imr_reg() argument
340 if (DISPLAY_VER(display) >= 12) in psr_imr_reg()
341 return TRANS_PSR_IMR(display, cpu_transcoder); in psr_imr_reg()
346 static i915_reg_t psr_iir_reg(struct intel_display *display, in psr_iir_reg() argument
349 if (DISPLAY_VER(display) >= 12) in psr_iir_reg()
350 return TRANS_PSR_IIR(display, cpu_transcoder); in psr_iir_reg()
355 static i915_reg_t psr_aux_ctl_reg(struct intel_display *display, in psr_aux_ctl_reg() argument
358 if (DISPLAY_VER(display) >= 8) in psr_aux_ctl_reg()
359 return EDP_PSR_AUX_CTL(display, cpu_transcoder); in psr_aux_ctl_reg()
364 static i915_reg_t psr_aux_data_reg(struct intel_display *display, in psr_aux_data_reg() argument
367 if (DISPLAY_VER(display) >= 8) in psr_aux_data_reg()
368 return EDP_PSR_AUX_DATA(display, cpu_transcoder, i); in psr_aux_data_reg()
375 struct intel_display *display = to_intel_display(intel_dp); in psr_irq_control() local
387 intel_de_rmw(display, psr_imr_reg(display, cpu_transcoder), in psr_irq_control()
391 static void psr_event_print(struct intel_display *display, in psr_event_print() argument
394 drm_dbg_kms(display->drm, "PSR exit events: 0x%x\n", val); in psr_event_print()
396 drm_dbg_kms(display->drm, "\tPSR2 watchdog timer expired\n"); in psr_event_print()
398 drm_dbg_kms(display->drm, "\tPSR2 disabled\n"); in psr_event_print()
400 drm_dbg_kms(display->drm, "\tSU dirty FIFO underrun\n"); in psr_event_print()
402 drm_dbg_kms(display->drm, "\tSU CRC FIFO underrun\n"); in psr_event_print()
404 drm_dbg_kms(display->drm, "\tGraphics reset\n"); in psr_event_print()
406 drm_dbg_kms(display->drm, "\tPCH interrupt\n"); in psr_event_print()
408 drm_dbg_kms(display->drm, "\tMemory up\n"); in psr_event_print()
410 drm_dbg_kms(display->drm, "\tFront buffer modification\n"); in psr_event_print()
412 drm_dbg_kms(display->drm, "\tPSR watchdog timer expired\n"); in psr_event_print()
414 drm_dbg_kms(display->drm, "\tPIPE registers updated\n"); in psr_event_print()
416 drm_dbg_kms(display->drm, "\tRegister updated\n"); in psr_event_print()
418 drm_dbg_kms(display->drm, "\tHDCP enabled\n"); in psr_event_print()
420 drm_dbg_kms(display->drm, "\tKVMR session enabled\n"); in psr_event_print()
422 drm_dbg_kms(display->drm, "\tVBI enabled\n"); in psr_event_print()
424 drm_dbg_kms(display->drm, "\tLPSP mode exited\n"); in psr_event_print()
426 drm_dbg_kms(display->drm, "\tPSR disabled\n"); in psr_event_print()
431 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_irq_handler() local
437 drm_dbg_kms(display->drm, in intel_psr_irq_handler()
444 drm_dbg_kms(display->drm, in intel_psr_irq_handler()
448 if (DISPLAY_VER(display) >= 9) { in intel_psr_irq_handler()
451 val = intel_de_rmw(display, in intel_psr_irq_handler()
452 PSR_EVENT(display, cpu_transcoder), in intel_psr_irq_handler()
455 psr_event_print(display, val, intel_dp->psr.sel_update_enabled); in intel_psr_irq_handler()
460 drm_warn(display->drm, "[transcoder %s] PSR aux error\n", in intel_psr_irq_handler()
473 intel_de_rmw(display, psr_imr_reg(display, cpu_transcoder), in intel_psr_irq_handler()
476 queue_work(display->wq.unordered, &intel_dp->psr.work); in intel_psr_irq_handler()
482 struct intel_display *display = to_intel_display(intel_dp); in intel_dp_get_sink_sync_latency() local
489 drm_dbg_kms(display->drm, in intel_dp_get_sink_sync_latency()
532 struct intel_display *display = to_intel_display(intel_dp); in intel_dp_get_su_granularity() local
558 drm_dbg_kms(display->drm, in intel_dp_get_su_granularity()
571 drm_dbg_kms(display->drm, in intel_dp_get_su_granularity()
585 struct intel_display *display = to_intel_display(intel_dp); in _panel_replay_init_dpcd() local
603 drm_dbg_kms(display->drm, in _panel_replay_init_dpcd()
610 drm_dbg_kms(display->drm, in _panel_replay_init_dpcd()
622 drm_dbg_kms(display->drm, in _panel_replay_init_dpcd()
630 struct intel_display *display = to_intel_display(intel_dp); in _psr_init_dpcd() local
641 drm_dbg_kms(display->drm, "eDP panel supports PSR version %x\n", in _psr_init_dpcd()
645 drm_dbg_kms(display->drm, in _psr_init_dpcd()
651 drm_dbg_kms(display->drm, in _psr_init_dpcd()
660 if (DISPLAY_VER(display) >= 9 && in _psr_init_dpcd()
678 drm_dbg_kms(display->drm, "PSR2 %ssupported\n", in _psr_init_dpcd()
696 struct intel_display *display = to_intel_display(intel_dp); in hsw_psr_setup_aux() local
711 intel_de_write(display, in hsw_psr_setup_aux()
712 psr_aux_data_reg(display, cpu_transcoder, i >> 2), in hsw_psr_setup_aux()
727 intel_de_write(display, psr_aux_ctl_reg(display, cpu_transcoder), in hsw_psr_setup_aux()
733 struct intel_display *display = to_intel_display(intel_dp); in psr2_su_region_et_valid() local
735 if (DISPLAY_VER(display) < 20 || !intel_dp_is_edp(intel_dp) || in psr2_su_region_et_valid()
774 struct intel_display *display = to_intel_display(intel_dp); in _psr_enable_sink() local
783 if (DISPLAY_VER(display) >= 8) in _psr_enable_sink()
823 struct intel_display *display = to_intel_display(intel_dp); in intel_psr1_get_tp_time() local
827 if (DISPLAY_VER(display) >= 11) in intel_psr1_get_tp_time()
830 if (display->params.psr_safest_params) { in intel_psr1_get_tp_time()
858 if (DISPLAY_VER(display) < 9 && in intel_psr1_get_tp_time()
864 if (intel_dp_source_supports_tps3(display) && in intel_psr1_get_tp_time()
875 struct intel_display *display = to_intel_display(intel_dp); in psr_compute_idle_frames() local
885 if (drm_WARN_ON(display->drm, idle_frames > 0xf)) in psr_compute_idle_frames()
893 struct intel_display *display = to_intel_display(intel_dp); in is_dc5_dc6_blocked() local
894 u32 current_dc_state = intel_display_power_get_current_dc_state(display); in is_dc5_dc6_blocked()
895 struct intel_crtc *crtc = intel_crtc_for_pipe(display, intel_dp->psr.pipe); in is_dc5_dc6_blocked()
906 struct intel_display *display = to_intel_display(intel_dp); in hsw_activate_psr1() local
913 if (DISPLAY_VER(display) < 20) in hsw_activate_psr1()
916 if (display->platform.haswell) in hsw_activate_psr1()
924 if (DISPLAY_VER(display) >= 8) in hsw_activate_psr1()
927 if (DISPLAY_VER(display) >= 20) in hsw_activate_psr1()
930 intel_de_rmw(display, psr_ctl_reg(display, cpu_transcoder), in hsw_activate_psr1()
934 if ((DISPLAY_VER(display) == 20 || in hsw_activate_psr1()
935 IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) && in hsw_activate_psr1()
937 intel_dmc_start_pkgc_exit_at_start_of_undelayed_vblank(display, in hsw_activate_psr1()
944 struct intel_display *display = to_intel_display(intel_dp); in intel_psr2_get_tp_time() local
948 if (display->params.psr_safest_params) in intel_psr2_get_tp_time()
992 struct intel_display *display = to_intel_display(intel_dp); in dg2_activate_panel_replay() local
1003 intel_de_write(display, EDP_PSR2_CTL(display, cpu_transcoder), in dg2_activate_panel_replay()
1007 intel_de_rmw(display, in dg2_activate_panel_replay()
1008 PSR2_MAN_TRK_CTL(display, intel_dp->psr.transcoder), in dg2_activate_panel_replay()
1011 intel_de_rmw(display, TRANS_DP2_CTL(intel_dp->psr.transcoder), 0, in dg2_activate_panel_replay()
1017 struct intel_display *display = to_intel_display(intel_dp); in hsw_activate_psr2() local
1024 if ((DISPLAY_VER(display) == 20 || in hsw_activate_psr2()
1025 IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) && in hsw_activate_psr2()
1032 if (DISPLAY_VER(display) < 14 && !display->platform.alderlake_p) in hsw_activate_psr2()
1035 if (DISPLAY_VER(display) >= 10 && DISPLAY_VER(display) < 13) in hsw_activate_psr2()
1042 if (DISPLAY_VER(display) >= 12 && DISPLAY_VER(display) < 20) { in hsw_activate_psr2()
1050 if (display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_E0)) { in hsw_activate_psr2()
1073 } else if (DISPLAY_VER(display) >= 20) { in hsw_activate_psr2()
1075 } else if (DISPLAY_VER(display) >= 12) { in hsw_activate_psr2()
1078 } else if (DISPLAY_VER(display) >= 9) { in hsw_activate_psr2()
1086 if (DISPLAY_VER(display) >= 20) in hsw_activate_psr2()
1092 tmp = intel_de_read(display, in hsw_activate_psr2()
1093 PSR2_MAN_TRK_CTL(display, cpu_transcoder)); in hsw_activate_psr2()
1094 drm_WARN_ON(display->drm, !(tmp & PSR2_MAN_TRK_CTL_ENABLE)); in hsw_activate_psr2()
1095 } else if (HAS_PSR2_SEL_FETCH(display)) { in hsw_activate_psr2()
1096 intel_de_write(display, in hsw_activate_psr2()
1097 PSR2_MAN_TRK_CTL(display, cpu_transcoder), 0); in hsw_activate_psr2()
1107 intel_de_write(display, psr_ctl_reg(display, cpu_transcoder), psr_val); in hsw_activate_psr2()
1109 intel_de_write(display, EDP_PSR2_CTL(display, cpu_transcoder), val); in hsw_activate_psr2()
1113 transcoder_has_psr2(struct intel_display *display, enum transcoder cpu_transcoder) in transcoder_has_psr2() argument
1115 if (display->platform.alderlake_p || DISPLAY_VER(display) >= 14) in transcoder_has_psr2()
1117 else if (DISPLAY_VER(display) >= 12) in transcoder_has_psr2()
1119 else if (DISPLAY_VER(display) >= 9) in transcoder_has_psr2()
1137 struct intel_display *display = to_intel_display(intel_dp); in psr2_program_idle_frames() local
1140 intel_de_rmw(display, EDP_PSR2_CTL(display, cpu_transcoder), in psr2_program_idle_frames()
1147 struct intel_display *display = to_intel_display(intel_dp); in tgl_psr2_enable_dc3co() local
1150 intel_display_power_set_target_dc_state(display, DC_STATE_EN_DC3CO); in tgl_psr2_enable_dc3co()
1155 struct intel_display *display = to_intel_display(intel_dp); in tgl_psr2_disable_dc3co() local
1157 intel_display_power_set_target_dc_state(display, DC_STATE_EN_UPTO_DC6); in tgl_psr2_disable_dc3co()
1190 struct intel_display *display = to_intel_display(intel_dp); in dc3co_is_pipe_port_compatible() local
1195 if (display->platform.alderlake_p || DISPLAY_VER(display) >= 14) in dc3co_is_pipe_port_compatible()
1205 struct intel_display *display = to_intel_display(intel_dp); in tgl_dc3co_exitline_compute_config() local
1207 struct i915_power_domains *power_domains = &display->power.domains; in tgl_dc3co_exitline_compute_config()
1231 if (display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) in tgl_dc3co_exitline_compute_config()
1241 if (drm_WARN_ON(display->drm, exit_scanlines > crtc_vdisplay)) in tgl_dc3co_exitline_compute_config()
1250 struct intel_display *display = to_intel_display(intel_dp); in intel_psr2_sel_fetch_config_valid() local
1252 if (!display->params.enable_psr2_sel_fetch && in intel_psr2_sel_fetch_config_valid()
1254 drm_dbg_kms(display->drm, in intel_psr2_sel_fetch_config_valid()
1260 drm_dbg_kms(display->drm, in intel_psr2_sel_fetch_config_valid()
1271 struct intel_display *display = to_intel_display(intel_dp); in psr2_granularity_check() local
1293 if (display->platform.alderlake_p || DISPLAY_VER(display) >= 14) in psr2_granularity_check()
1314 struct intel_display *display = to_intel_display(intel_dp); in _compute_psr2_sdp_prior_scanline_indication() local
1328 if (DISPLAY_VER(display) < 14 || intel_dp->edp_dpcd[0] < DP_EDP_14b) in _compute_psr2_sdp_prior_scanline_indication()
1338 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_entry_setup_frames() local
1343 drm_dbg_kms(display->drm, in intel_psr_entry_setup_frames()
1351 if (DISPLAY_VER(display) >= 20) { in intel_psr_entry_setup_frames()
1354 drm_dbg_kms(display->drm, in intel_psr_entry_setup_frames()
1358 drm_dbg_kms(display->drm, in intel_psr_entry_setup_frames()
1372 struct intel_display *display = to_intel_display(intel_dp); in wake_lines_fit_into_vblank() local
1380 wake_lines = DISPLAY_VER(display) < 20 ? in wake_lines_fit_into_vblank()
1398 struct intel_display *display = to_intel_display(intel_dp); in alpm_config_valid() local
1401 drm_dbg_kms(display->drm, in alpm_config_valid()
1407 drm_dbg_kms(display->drm, in alpm_config_valid()
1418 struct intel_display *display = to_intel_display(intel_dp); in intel_psr2_config_valid() local
1423 if (!intel_dp->psr.sink_psr2_support || display->params.enable_psr == 1) in intel_psr2_config_valid()
1427 if (display->platform.jasperlake || display->platform.elkhartlake) { in intel_psr2_config_valid()
1428 drm_dbg_kms(display->drm, "PSR2 not supported by phy\n"); in intel_psr2_config_valid()
1433 if (display->platform.rocketlake || display->platform.alderlake_s || in intel_psr2_config_valid()
1434 display->platform.dg2) { in intel_psr2_config_valid()
1435 drm_dbg_kms(display->drm, in intel_psr2_config_valid()
1440 if (display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) { in intel_psr2_config_valid()
1441 drm_dbg_kms(display->drm, in intel_psr2_config_valid()
1446 if (!transcoder_has_psr2(display, crtc_state->cpu_transcoder)) { in intel_psr2_config_valid()
1447 drm_dbg_kms(display->drm, in intel_psr2_config_valid()
1459 (DISPLAY_VER(display) < 14 && !display->platform.alderlake_p)) { in intel_psr2_config_valid()
1460 drm_dbg_kms(display->drm, in intel_psr2_config_valid()
1465 if (DISPLAY_VER(display) >= 20) { in intel_psr2_config_valid()
1469 } else if (IS_DISPLAY_VER(display, 12, 14)) { in intel_psr2_config_valid()
1473 } else if (IS_DISPLAY_VER(display, 10, 11)) { in intel_psr2_config_valid()
1477 } else if (DISPLAY_VER(display) == 9) { in intel_psr2_config_valid()
1484 drm_dbg_kms(display->drm, in intel_psr2_config_valid()
1492 display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) { in intel_psr2_config_valid()
1493 drm_dbg_kms(display->drm, in intel_psr2_config_valid()
1503 drm_dbg_kms(display->drm, in intel_psr2_config_valid()
1518 struct intel_display *display = to_intel_display(intel_dp); in intel_sel_update_config_valid() local
1520 if (HAS_PSR2_SEL_FETCH(display) && in intel_sel_update_config_valid()
1522 !HAS_PSR_HW_TRACKING(display)) { in intel_sel_update_config_valid()
1523 drm_dbg_kms(display->drm, in intel_sel_update_config_valid()
1529 drm_dbg_kms(display->drm, in intel_sel_update_config_valid()
1538 drm_dbg_kms(display->drm, in intel_sel_update_config_valid()
1543 if (crtc_state->has_panel_replay && (DISPLAY_VER(display) < 14 || in intel_sel_update_config_valid()
1548 drm_dbg_kms(display->drm, in intel_sel_update_config_valid()
1554 drm_dbg_kms(display->drm, in intel_sel_update_config_valid()
1572 struct intel_display *display = to_intel_display(intel_dp); in _psr_compute_config() local
1576 if (!CAN_PSR(intel_dp) || !display->params.enable_psr) in _psr_compute_config()
1590 drm_dbg_kms(display->drm, in _psr_compute_config()
1603 struct intel_display *display = to_intel_display(intel_dp); in _panel_replay_compute_config() local
1612 drm_dbg_kms(display->drm, "Panel Replay disabled by flag\n"); in _panel_replay_compute_config()
1617 drm_dbg_kms(display->drm, in _panel_replay_compute_config()
1633 drm_dbg_kms(display->drm, in _panel_replay_compute_config()
1644 drm_dbg_kms(display->drm, in _panel_replay_compute_config()
1658 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_needs_wa_18037818876() local
1660 return (DISPLAY_VER(display) == 20 && intel_dp->psr.entry_setup_frames > 0 && in intel_psr_needs_wa_18037818876()
1668 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_compute_config() local
1675 drm_dbg_kms(display->drm, "PSR disabled by flag\n"); in intel_psr_compute_config()
1680 drm_dbg_kms(display->drm, in intel_psr_compute_config()
1686 drm_dbg_kms(display->drm, in intel_psr_compute_config()
1697 drm_dbg_kms(display->drm, in intel_psr_compute_config()
1717 drm_dbg_kms(display->drm, in intel_psr_compute_config()
1722 if (DISPLAY_VER(display) != 20 && in intel_psr_compute_config()
1723 !IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) in intel_psr_compute_config()
1731 for_each_intel_crtc(display->drm, crtc) in intel_psr_compute_config()
1743 struct intel_display *display = to_intel_display(encoder); in intel_psr_get_config() local
1776 if (HAS_PSR2_SEL_FETCH(display)) { in intel_psr_get_config()
1777 val = intel_de_read(display, in intel_psr_get_config()
1778 PSR2_MAN_TRK_CTL(display, cpu_transcoder)); in intel_psr_get_config()
1785 if (DISPLAY_VER(display) >= 12) { in intel_psr_get_config()
1786 val = intel_de_read(display, in intel_psr_get_config()
1787 TRANS_EXITLINE(display, cpu_transcoder)); in intel_psr_get_config()
1796 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_activate() local
1799 drm_WARN_ON(display->drm, in intel_psr_activate()
1800 transcoder_has_psr2(display, cpu_transcoder) && in intel_psr_activate()
1801 intel_de_read(display, EDP_PSR2_CTL(display, cpu_transcoder)) & EDP_PSR2_ENABLE); in intel_psr_activate()
1803 drm_WARN_ON(display->drm, in intel_psr_activate()
1804 intel_de_read(display, psr_ctl_reg(display, cpu_transcoder)) & EDP_PSR_ENABLE); in intel_psr_activate()
1806 drm_WARN_ON(display->drm, intel_dp->psr.active); in intel_psr_activate()
1808 drm_WARN_ON(display->drm, !intel_dp->psr.enabled); in intel_psr_activate()
1830 struct intel_display *display = to_intel_display(intel_dp); in wm_optimization_wa() local
1835 if (IS_DISPLAY_VER(display, 11, 14) && crtc_state->wm_level_disabled) in wm_optimization_wa()
1839 if (DISPLAY_VER(display) == 12 && in wm_optimization_wa()
1845 intel_de_rmw(display, GEN8_CHICKEN_DCPR_1, in wm_optimization_wa()
1848 intel_de_rmw(display, GEN8_CHICKEN_DCPR_1, in wm_optimization_wa()
1855 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_enable_source() local
1863 if (DISPLAY_VER(display) < 9) in intel_psr_enable_source()
1880 if (DISPLAY_VER(display) < 20 || intel_dp_is_edp(intel_dp)) in intel_psr_enable_source()
1890 * higher than should be possible with an external display. in intel_psr_enable_source()
1894 if (DISPLAY_VER(display) >= 8 || display->platform.haswell_ult) in intel_psr_enable_source()
1897 if (DISPLAY_VER(display) < 20) in intel_psr_enable_source()
1904 if (IS_DISPLAY_VER(display, 9, 10)) in intel_psr_enable_source()
1908 if (display->platform.haswell) in intel_psr_enable_source()
1912 intel_de_write(display, psr_debug_reg(display, cpu_transcoder), mask); in intel_psr_enable_source()
1921 intel_de_rmw(display, in intel_psr_enable_source()
1922 TRANS_EXITLINE(display, cpu_transcoder), in intel_psr_enable_source()
1926 if (HAS_PSR_HW_TRACKING(display) && HAS_PSR2_SEL_FETCH(display)) in intel_psr_enable_source()
1927 intel_de_rmw(display, CHICKEN_PAR1_1, IGNORE_PSR2_HW_TRACKING, in intel_psr_enable_source()
1938 if (DISPLAY_VER(display) == 9) in intel_psr_enable_source()
1939 intel_de_rmw(display, CHICKEN_TRANS(display, cpu_transcoder), 0, in intel_psr_enable_source()
1949 (IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0) || in intel_psr_enable_source()
1950 display->platform.alderlake_p)) in intel_psr_enable_source()
1951 intel_de_rmw(display, CHICKEN_TRANS(display, cpu_transcoder), in intel_psr_enable_source()
1956 IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0)) in intel_psr_enable_source()
1957 intel_de_rmw(display, in intel_psr_enable_source()
1958 MTL_CLKGATE_DIS_TRANS(display, cpu_transcoder), in intel_psr_enable_source()
1961 else if (display->platform.alderlake_p) in intel_psr_enable_source()
1962 intel_de_rmw(display, CLKGATE_DIS_MISC, 0, in intel_psr_enable_source()
1967 if ((DISPLAY_VER(display) == 20 || in intel_psr_enable_source()
1968 IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) && in intel_psr_enable_source()
1970 intel_dmc_block_pkgc(display, intel_dp->psr.pipe, true); in intel_psr_enable_source()
1977 struct intel_display *display = to_intel_display(intel_dp); in psr_interrupt_error_check() local
1992 val = intel_de_read(display, psr_iir_reg(display, cpu_transcoder)); in psr_interrupt_error_check()
1996 drm_dbg_kms(display->drm, in psr_interrupt_error_check()
2008 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_enable_locked() local
2012 drm_WARN_ON(display->drm, intel_dp->psr.enabled); in intel_psr_enable_locked()
2035 drm_dbg_kms(display->drm, "Enabling Panel Replay\n"); in intel_psr_enable_locked()
2037 drm_dbg_kms(display->drm, "Enabling PSR%s\n", in intel_psr_enable_locked()
2076 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_exit() local
2081 if (transcoder_has_psr2(display, cpu_transcoder)) { in intel_psr_exit()
2082 val = intel_de_read(display, in intel_psr_exit()
2083 EDP_PSR2_CTL(display, cpu_transcoder)); in intel_psr_exit()
2084 drm_WARN_ON(display->drm, val & EDP_PSR2_ENABLE); in intel_psr_exit()
2087 val = intel_de_read(display, in intel_psr_exit()
2088 psr_ctl_reg(display, cpu_transcoder)); in intel_psr_exit()
2089 drm_WARN_ON(display->drm, val & EDP_PSR_ENABLE); in intel_psr_exit()
2095 intel_de_rmw(display, TRANS_DP2_CTL(intel_dp->psr.transcoder), in intel_psr_exit()
2100 val = intel_de_rmw(display, in intel_psr_exit()
2101 EDP_PSR2_CTL(display, cpu_transcoder), in intel_psr_exit()
2104 drm_WARN_ON(display->drm, !(val & EDP_PSR2_ENABLE)); in intel_psr_exit()
2106 if ((DISPLAY_VER(display) == 20 || in intel_psr_exit()
2107 IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) && in intel_psr_exit()
2109 intel_dmc_start_pkgc_exit_at_start_of_undelayed_vblank(display, in intel_psr_exit()
2113 val = intel_de_rmw(display, in intel_psr_exit()
2114 psr_ctl_reg(display, cpu_transcoder), in intel_psr_exit()
2117 drm_WARN_ON(display->drm, !(val & EDP_PSR_ENABLE)); in intel_psr_exit()
2124 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_wait_exit_locked() local
2131 psr_status = EDP_PSR2_STATUS(display, cpu_transcoder); in intel_psr_wait_exit_locked()
2134 psr_status = psr_status_reg(display, cpu_transcoder); in intel_psr_wait_exit_locked()
2139 if (intel_de_wait_for_clear(display, psr_status, in intel_psr_wait_exit_locked()
2141 drm_err(display->drm, "Timed out waiting PSR idle state\n"); in intel_psr_wait_exit_locked()
2146 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_disable_locked() local
2155 drm_dbg_kms(display->drm, "Disabling Panel Replay\n"); in intel_psr_disable_locked()
2157 drm_dbg_kms(display->drm, "Disabling PSR%s\n", in intel_psr_disable_locked()
2167 if (DISPLAY_VER(display) >= 11) in intel_psr_disable_locked()
2168 intel_de_rmw(display, GEN8_CHICKEN_DCPR_1, in intel_psr_disable_locked()
2174 IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0)) in intel_psr_disable_locked()
2175 intel_de_rmw(display, in intel_psr_disable_locked()
2176 MTL_CLKGATE_DIS_TRANS(display, cpu_transcoder), in intel_psr_disable_locked()
2178 else if (display->platform.alderlake_p) in intel_psr_disable_locked()
2179 intel_de_rmw(display, CLKGATE_DIS_MISC, in intel_psr_disable_locked()
2199 if ((DISPLAY_VER(display) == 20 || in intel_psr_disable_locked()
2200 IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) && in intel_psr_disable_locked()
2202 intel_dmc_block_pkgc(display, intel_dp->psr.pipe, false); in intel_psr_disable_locked()
2224 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_disable() local
2229 if (drm_WARN_ON(display->drm, !CAN_PSR(intel_dp) && in intel_psr_disable()
2283 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_resume() local
2295 drm_warn(display->drm, "Unbalanced PSR pause/resume!\n"); in intel_psr_resume()
2320 struct intel_display *display = to_intel_display(crtc_state); in intel_psr_needs_vblank_notification() local
2337 if ((DISPLAY_VER(display) == 20 || in intel_psr_needs_vblank_notification()
2338 IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) && in intel_psr_needs_vblank_notification()
2360 struct intel_display *display = to_intel_display(crtc); in intel_psr_trigger_frame_change_event() local
2363 intel_de_write_dsb(display, dsb, in intel_psr_trigger_frame_change_event()
2364 CURSURFLIVE(display, crtc->pipe), 0); in intel_psr_trigger_frame_change_event()
2375 struct intel_display *display = to_intel_display(crtc_state); in intel_psr_min_vblank_delay() local
2381 if (intel_vrr_possible(crtc_state) && IS_DISPLAY_VER(display, 13, 14)) in intel_psr_min_vblank_delay()
2385 if (DISPLAY_VER(display) < 20) in intel_psr_min_vblank_delay()
2403 if (DISPLAY_VER(display) >= 30 && (crtc_state->has_panel_replay || in intel_psr_min_vblank_delay()
2406 else if (DISPLAY_VER(display) < 30 && (crtc_state->has_sel_update || in intel_psr_min_vblank_delay()
2414 static u32 man_trk_ctl_enable_bit_get(struct intel_display *display) in man_trk_ctl_enable_bit_get() argument
2416 return display->platform.alderlake_p || DISPLAY_VER(display) >= 14 ? 0 : in man_trk_ctl_enable_bit_get()
2420 static u32 man_trk_ctl_single_full_frame_bit_get(struct intel_display *display) in man_trk_ctl_single_full_frame_bit_get() argument
2422 return display->platform.alderlake_p || DISPLAY_VER(display) >= 14 ? in man_trk_ctl_single_full_frame_bit_get()
2427 static u32 man_trk_ctl_partial_frame_bit_get(struct intel_display *display) in man_trk_ctl_partial_frame_bit_get() argument
2429 return display->platform.alderlake_p || DISPLAY_VER(display) >= 14 ? in man_trk_ctl_partial_frame_bit_get()
2434 static u32 man_trk_ctl_continuos_full_frame(struct intel_display *display) in man_trk_ctl_continuos_full_frame() argument
2436 return display->platform.alderlake_p || DISPLAY_VER(display) >= 14 ? in man_trk_ctl_continuos_full_frame()
2443 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_force_update() local
2446 * Display WA #0884: skl+ in intel_psr_force_update()
2454 * This workaround do not exist for platforms with display 10 or newer in intel_psr_force_update()
2455 * but testing proved that it works for up display 13, for newer in intel_psr_force_update()
2458 intel_de_write(display, CURSURFLIVE(display, intel_dp->psr.pipe), 0); in intel_psr_force_update()
2464 struct intel_display *display = to_intel_display(crtc_state); in intel_psr2_program_trans_man_trk_ctl() local
2472 for_each_intel_encoder_mask_with_psr(display->drm, encoder, in intel_psr2_program_trans_man_trk_ctl()
2479 if (DISPLAY_VER(display) < 20 && intel_dp->psr.psr2_sel_fetch_cff_enabled) in intel_psr2_program_trans_man_trk_ctl()
2484 intel_de_write_dsb(display, dsb, in intel_psr2_program_trans_man_trk_ctl()
2485 PSR2_MAN_TRK_CTL(display, cpu_transcoder), in intel_psr2_program_trans_man_trk_ctl()
2491 intel_de_write_dsb(display, dsb, PIPE_SRCSZ_ERLY_TPT(crtc->pipe), in intel_psr2_program_trans_man_trk_ctl()
2498 struct intel_display *display = to_intel_display(crtc_state); in psr2_man_trk_ctl_calc() local
2499 u32 val = man_trk_ctl_enable_bit_get(display); in psr2_man_trk_ctl_calc()
2502 val |= man_trk_ctl_partial_frame_bit_get(display); in psr2_man_trk_ctl_calc()
2505 val |= man_trk_ctl_continuos_full_frame(display); in psr2_man_trk_ctl_calc()
2512 if (display->platform.alderlake_p || DISPLAY_VER(display) >= 14) { in psr2_man_trk_ctl_calc()
2565 struct intel_display *display = to_intel_display(crtc_state); in intel_psr2_sel_fetch_pipe_alignment() local
2571 (display->platform.alderlake_p || DISPLAY_VER(display) >= 14)) in intel_psr2_sel_fetch_pipe_alignment()
2659 struct intel_display *display = to_intel_display(crtc_state); in intel_psr_apply_pr_link_on_su_wa() local
2675 for_each_intel_encoder_mask_with_psr(display->drm, encoder, in intel_psr_apply_pr_link_on_su_wa()
2691 struct intel_display *display = to_intel_display(crtc_state); in intel_psr_apply_su_area_workarounds() local
2695 ((IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0) || in intel_psr_apply_su_area_workarounds()
2696 display->platform.alderlake_p || display->platform.tigerlake)) && in intel_psr_apply_su_area_workarounds()
2701 if (DISPLAY_VER(display) == 30) in intel_psr_apply_su_area_workarounds()
2708 struct intel_display *display = to_intel_display(state); in intel_psr2_sel_fetch_update() local
2804 drm_info_once(display->drm, in intel_psr2_sel_fetch_update()
2893 void intel_psr2_panic_force_full_update(struct intel_display *display, in intel_psr2_panic_force_full_update() argument
2898 u32 val = man_trk_ctl_enable_bit_get(display); in intel_psr2_panic_force_full_update()
2901 val |= man_trk_ctl_partial_frame_bit_get(display); in intel_psr2_panic_force_full_update()
2902 val |= man_trk_ctl_continuos_full_frame(display); in intel_psr2_panic_force_full_update()
2905 intel_de_write_fw(display, PSR2_MAN_TRK_CTL(display, cpu_transcoder), val); in intel_psr2_panic_force_full_update()
2910 intel_de_write_fw(display, PIPE_SRCSZ_ERLY_TPT(crtc->pipe), 0); in intel_psr2_panic_force_full_update()
2916 struct intel_display *display = to_intel_display(state); in intel_psr_pre_plane_update() local
2923 if (!HAS_PSR(display)) in intel_psr_pre_plane_update()
2940 * - Display WA #1136: skl, bxt in intel_psr_pre_plane_update()
2948 (DISPLAY_VER(display) < 11 && new_crtc_state->wm_level_disabled)) in intel_psr_pre_plane_update()
2962 struct intel_display *display = to_intel_display(state); in intel_psr_post_plane_update() local
2978 drm_WARN_ON(display->drm, in intel_psr_post_plane_update()
2984 /* Display WA #1136: skl, bxt */ in intel_psr_post_plane_update()
2985 keep_disabled |= DISPLAY_VER(display) < 11 && in intel_psr_post_plane_update()
3020 struct intel_display *display = to_intel_display(new_crtc_state); in _psr2_ready_for_pipe_update_locked() local
3029 intel_dsb_poll(dsb, EDP_PSR2_STATUS(display, cpu_transcoder), in _psr2_ready_for_pipe_update_locked()
3035 return intel_de_wait_for_clear(display, in _psr2_ready_for_pipe_update_locked()
3036 EDP_PSR2_STATUS(display, cpu_transcoder), in _psr2_ready_for_pipe_update_locked()
3045 struct intel_display *display = to_intel_display(new_crtc_state); in _psr1_ready_for_pipe_update_locked() local
3049 intel_dsb_poll(dsb, psr_status_reg(display, cpu_transcoder), in _psr1_ready_for_pipe_update_locked()
3055 return intel_de_wait_for_clear(display, in _psr1_ready_for_pipe_update_locked()
3056 psr_status_reg(display, cpu_transcoder), in _psr1_ready_for_pipe_update_locked()
3070 struct intel_display *display = to_intel_display(new_crtc_state); in intel_psr_wait_for_idle_locked() local
3076 for_each_intel_encoder_mask_with_psr(display->drm, encoder, in intel_psr_wait_for_idle_locked()
3094 drm_err(display->drm, in intel_psr_wait_for_idle_locked()
3113 struct intel_display *display = to_intel_display(intel_dp); in __psr_wait_for_idle_locked() local
3124 reg = EDP_PSR2_STATUS(display, cpu_transcoder); in __psr_wait_for_idle_locked()
3127 reg = psr_status_reg(display, cpu_transcoder); in __psr_wait_for_idle_locked()
3133 err = intel_de_wait_for_clear(display, reg, mask, 50); in __psr_wait_for_idle_locked()
3135 drm_err(display->drm, in __psr_wait_for_idle_locked()
3143 static int intel_psr_fastset_force(struct intel_display *display) in intel_psr_fastset_force() argument
3151 state = drm_atomic_state_alloc(display->drm); in intel_psr_fastset_force()
3161 drm_connector_list_iter_begin(display->drm, &conn_iter); in intel_psr_fastset_force()
3208 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_debug_set() local
3219 drm_dbg_kms(display->drm, "Invalid debug mask %llx\n", val); in intel_psr_debug_set()
3244 ret = intel_psr_fastset_force(display); in intel_psr_debug_set()
3301 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_configure_full_frame_update() local
3307 if (DISPLAY_VER(display) >= 20) in intel_psr_configure_full_frame_update()
3308 intel_de_write(display, LNL_SFF_CTL(cpu_transcoder), in intel_psr_configure_full_frame_update()
3311 intel_de_write(display, in intel_psr_configure_full_frame_update()
3312 PSR2_MAN_TRK_CTL(display, cpu_transcoder), in intel_psr_configure_full_frame_update()
3313 man_trk_ctl_enable_bit_get(display) | in intel_psr_configure_full_frame_update()
3314 man_trk_ctl_partial_frame_bit_get(display) | in intel_psr_configure_full_frame_update()
3315 man_trk_ctl_single_full_frame_bit_get(display) | in intel_psr_configure_full_frame_update()
3316 man_trk_ctl_continuos_full_frame(display)); in intel_psr_configure_full_frame_update()
3321 struct intel_display *display = to_intel_display(intel_dp); in _psr_invalidate_handle() local
3323 if (DISPLAY_VER(display) < 20 && intel_dp->psr.psr2_sel_fetch_enabled) { in _psr_invalidate_handle()
3337 * @display: display device
3348 void intel_psr_invalidate(struct intel_display *display, in intel_psr_invalidate() argument
3356 for_each_intel_encoder_with_psr(display->drm, encoder) { in intel_psr_invalidate()
3386 struct intel_display *display = to_intel_display(intel_dp); in tgl_dc3co_flush_locked() local
3394 * when delayed work schedules that means display has been idle. in tgl_dc3co_flush_locked()
3401 mod_delayed_work(display->wq.unordered, &intel_dp->psr.dc3co_work, in tgl_dc3co_flush_locked()
3407 struct intel_display *display = to_intel_display(intel_dp); in _psr_flush_handle() local
3409 if (DISPLAY_VER(display) < 20 && intel_dp->psr.psr2_sel_fetch_enabled) { in _psr_flush_handle()
3442 queue_work(display->wq.unordered, &intel_dp->psr.work); in _psr_flush_handle()
3447 * @display: display device
3458 void intel_psr_flush(struct intel_display *display, in intel_psr_flush() argument
3463 for_each_intel_encoder_with_psr(display->drm, encoder) { in intel_psr_flush()
3512 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_init() local
3516 if (!(HAS_PSR(display) || HAS_DP20(display))) in intel_psr_init()
3528 if (DISPLAY_VER(display) < 12 && dig_port->base.port != PORT_A) { in intel_psr_init()
3529 drm_dbg_kms(display->drm, in intel_psr_init()
3534 if ((HAS_DP20(display) && !intel_dp_is_edp(intel_dp)) || in intel_psr_init()
3535 DISPLAY_VER(display) >= 20) in intel_psr_init()
3538 if (HAS_PSR(display) && intel_dp_is_edp(intel_dp)) in intel_psr_init()
3542 if (DISPLAY_VER(display) < 12) in intel_psr_init()
3592 struct intel_display *display = to_intel_display(intel_dp); in psr_capability_changed_check() local
3599 drm_err(display->drm, "Error reading DP_PSR_ESI\n"); in psr_capability_changed_check()
3606 drm_dbg_kms(display->drm, in psr_capability_changed_check()
3623 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_short_pulse() local
3641 drm_err(display->drm, in intel_psr_short_pulse()
3654 drm_dbg_kms(display->drm, in intel_psr_short_pulse()
3657 drm_dbg_kms(display->drm, in intel_psr_short_pulse()
3660 drm_dbg_kms(display->drm, in intel_psr_short_pulse()
3663 drm_dbg_kms(display->drm, in intel_psr_short_pulse()
3667 drm_err(display->drm, in intel_psr_short_pulse()
3733 struct intel_display *display = to_intel_display(crtc_state); in intel_psr_lock() local
3739 for_each_intel_encoder_mask_with_psr(display->drm, encoder, in intel_psr_lock()
3756 struct intel_display *display = to_intel_display(crtc_state); in intel_psr_unlock() local
3762 for_each_intel_encoder_mask_with_psr(display->drm, encoder, in intel_psr_unlock()
3774 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_apply_underrun_on_idle_wa_locked() local
3786 intel_dmc_start_pkgc_exit_at_start_of_undelayed_vblank(display, in intel_psr_apply_underrun_on_idle_wa_locked()
3793 struct intel_display *display = container_of(work, typeof(*display), in psr_dc5_dc6_wa_work() local
3797 for_each_intel_encoder_with_psr(display->drm, encoder) { in psr_dc5_dc6_wa_work()
3812 * @display: intel atomic state
3817 void intel_psr_notify_dc5_dc6(struct intel_display *display) in intel_psr_notify_dc5_dc6() argument
3819 if (DISPLAY_VER(display) != 20 && in intel_psr_notify_dc5_dc6()
3820 !IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) in intel_psr_notify_dc5_dc6()
3823 schedule_work(&display->psr_dc5_dc6_wa_work); in intel_psr_notify_dc5_dc6()
3828 * @display: intel atomic state
3833 void intel_psr_dc5_dc6_wa_init(struct intel_display *display) in intel_psr_dc5_dc6_wa_init() argument
3835 if (DISPLAY_VER(display) != 20 && in intel_psr_dc5_dc6_wa_init()
3836 !IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) in intel_psr_dc5_dc6_wa_init()
3839 INIT_WORK(&display->psr_dc5_dc6_wa_work, psr_dc5_dc6_wa_work); in intel_psr_dc5_dc6_wa_init()
3854 struct intel_display *display = to_intel_display(state); in intel_psr_notify_pipe_change() local
3857 if (DISPLAY_VER(display) != 20 && in intel_psr_notify_pipe_change()
3858 !IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) in intel_psr_notify_pipe_change()
3861 for_each_intel_encoder_with_psr(display->drm, encoder) { in intel_psr_notify_pipe_change()
3897 * @display: intel display struct
3903 void intel_psr_notify_vblank_enable_disable(struct intel_display *display, in intel_psr_notify_vblank_enable_disable() argument
3908 for_each_intel_encoder_with_psr(display->drm, encoder) { in intel_psr_notify_vblank_enable_disable()
3931 intel_display_power_set_target_dc_state(display, enable ? DC_STATE_DISABLE : in intel_psr_notify_vblank_enable_disable()
3938 struct intel_display *display = to_intel_display(intel_dp); in psr_source_status() local
3943 if ((intel_dp_is_edp(intel_dp) || DISPLAY_VER(display) >= 30) && in psr_source_status()
3958 val = intel_de_read(display, in psr_source_status()
3959 EDP_PSR2_STATUS(display, cpu_transcoder)); in psr_source_status()
3974 val = intel_de_read(display, in psr_source_status()
3975 psr_status_reg(display, cpu_transcoder)); in psr_source_status()
4037 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_status() local
4049 wakeref = intel_display_rpm_get(display); in intel_psr_status()
4062 val = intel_de_read(display, TRANS_DP2_CTL(cpu_transcoder)); in intel_psr_status()
4065 psr2_ctl = intel_de_read(display, in intel_psr_status()
4066 EDP_PSR2_CTL(display, in intel_psr_status()
4071 val = intel_de_read(display, in intel_psr_status()
4072 EDP_PSR2_CTL(display, cpu_transcoder)); in intel_psr_status()
4075 val = intel_de_read(display, psr_ctl_reg(display, cpu_transcoder)); in intel_psr_status()
4090 val = intel_de_read(display, psr_perf_cnt_reg(display, cpu_transcoder)); in intel_psr_status()
4108 if (DISPLAY_VER(display) < 13) { in intel_psr_status()
4114 val = intel_de_read(display, in intel_psr_status()
4115 PSR2_SU_STATUS(display, cpu_transcoder, frame)); in intel_psr_status()
4137 intel_display_rpm_put(display, wakeref); in intel_psr_status()
4144 struct intel_display *display = m->private; in i915_edp_psr_status_show() local
4148 if (!HAS_PSR(display)) in i915_edp_psr_status_show()
4152 for_each_intel_encoder_with_psr(display->drm, encoder) { in i915_edp_psr_status_show()
4167 struct intel_display *display = data; in i915_edp_psr_debug_set() local
4171 if (!HAS_PSR(display)) in i915_edp_psr_debug_set()
4174 for_each_intel_encoder_with_psr(display->drm, encoder) { in i915_edp_psr_debug_set()
4177 drm_dbg_kms(display->drm, "Setting PSR debug to %llx\n", val); in i915_edp_psr_debug_set()
4180 with_intel_display_rpm(display) in i915_edp_psr_debug_set()
4190 struct intel_display *display = data; in i915_edp_psr_debug_get() local
4193 if (!HAS_PSR(display)) in i915_edp_psr_debug_get()
4196 for_each_intel_encoder_with_psr(display->drm, encoder) { in i915_edp_psr_debug_get()
4211 void intel_psr_debugfs_register(struct intel_display *display) in intel_psr_debugfs_register() argument
4213 struct dentry *debugfs_root = display->drm->debugfs_root; in intel_psr_debugfs_register()
4216 display, &i915_edp_psr_debug_fops); in intel_psr_debugfs_register()
4219 display, &i915_edp_psr_status_fops); in intel_psr_debugfs_register()
4238 "transition to active, capture and display", in i915_psr_sink_status_show()
4239 "active, display from RFB", in i915_psr_sink_status_show()
4240 "active, capture and display on sink device timings", in i915_psr_sink_status_show()
4241 "transition to inactive, capture and display, timing re-sync", in i915_psr_sink_status_show()
4300 struct intel_display *display = to_intel_display(connector); in intel_psr_connector_debugfs_add() local
4310 if (HAS_PSR(display) || HAS_DP20(display)) in intel_psr_connector_debugfs_add()