Lines Matching refs:display

39 	struct intel_display *display = to_intel_display(encoder);
42 if (display->platform.pantherlake && phy == PHY_A)
45 if ((display->platform.lunarlake || display->platform.meteorlake) && phy < PHY_C)
69 * by display and lane 1 is owned by USB.
76 assert_dc_off(struct intel_display *display)
80 enabled = intel_display_power_is_enabled(display, POWER_DOMAIN_DC_OFF);
81 drm_WARN_ON(display->drm, !enabled);
86 struct intel_display *display = to_intel_display(encoder);
90 intel_de_rmw(display,
91 XELPDP_PORT_MSGBUS_TIMER(display, encoder->port, lane),
107 struct intel_display *display = to_intel_display(encoder);
112 wakeref = intel_display_power_get(display, POWER_DOMAIN_DC_OFF);
120 struct intel_display *display = to_intel_display(encoder);
124 intel_display_power_put(display, POWER_DOMAIN_DC_OFF, wakeref);
130 struct intel_display *display = to_intel_display(encoder);
132 intel_de_rmw(display,
133 XELPDP_PORT_P2M_MSGBUS_STATUS(display, encoder->port, lane),
139 struct intel_display *display = to_intel_display(encoder);
143 intel_de_write(display, XELPDP_PORT_M2P_MSGBUS_CTL(display, port, lane),
146 if (intel_de_wait_for_clear(display, XELPDP_PORT_M2P_MSGBUS_CTL(display, port, lane),
149 drm_err_once(display->drm,
161 struct intel_display *display = to_intel_display(encoder);
165 if (intel_de_wait_custom(display,
166 XELPDP_PORT_P2M_MSGBUS_STATUS(display, port, lane),
171 drm_dbg_kms(display->drm,
175 if (!(intel_de_read(display, XELPDP_PORT_MSGBUS_TIMER(display, port, lane)) &
177 drm_dbg_kms(display->drm,
186 drm_dbg_kms(display->drm,
195 drm_dbg_kms(display->drm,
209 struct intel_display *display = to_intel_display(encoder);
215 if (intel_de_wait_for_clear(display, XELPDP_PORT_M2P_MSGBUS_CTL(display, port, lane),
218 drm_dbg_kms(display->drm,
224 intel_de_write(display, XELPDP_PORT_M2P_MSGBUS_CTL(display, port, lane),
240 if (DISPLAY_VER(display) < 30)
249 struct intel_display *display = to_intel_display(encoder);
253 assert_dc_off(display);
263 drm_err_once(display->drm,
281 struct intel_display *display = to_intel_display(encoder);
287 if (intel_de_wait_for_clear(display, XELPDP_PORT_M2P_MSGBUS_CTL(display, port, lane),
290 drm_dbg_kms(display->drm,
296 intel_de_write(display, XELPDP_PORT_M2P_MSGBUS_CTL(display, port, lane),
303 if (intel_de_wait_for_clear(display, XELPDP_PORT_M2P_MSGBUS_CTL(display, port, lane),
306 drm_dbg_kms(display->drm,
316 } else if ((intel_de_read(display, XELPDP_PORT_P2M_MSGBUS_STATUS(display, port, lane)) &
318 drm_dbg_kms(display->drm,
331 if (DISPLAY_VER(display) < 30)
340 struct intel_display *display = to_intel_display(encoder);
344 assert_dc_off(display);
354 drm_err_once(display->drm,
370 struct intel_display *display = to_intel_display(encoder);
372 assert_dc_off(display);
384 struct intel_display *display = to_intel_display(encoder);
387 assert_dc_off(display);
451 struct intel_display *display = to_intel_display(encoder);
466 if (drm_WARN_ON_ONCE(display->drm, !trans)) {
2028 struct intel_display *display = to_intel_display(encoder);
2031 if (intel_panel_use_ssc(display)) {
2042 struct intel_display *display = to_intel_display(encoder);
2048 drm_WARN_ON(display->drm, ARRAY_SIZE(pll_state->c10.pll) < 9);
2128 static void intel_c10_pll_program(struct intel_display *display,
2156 static void intel_c10pll_dump_hw_state(struct intel_display *display,
2165 drm_dbg_kms(display->drm, "c10pll_hw_state: fracen: %s, ",
2172 drm_dbg_kms(display->drm, "quot: %u, rem: %u, den: %u,\n",
2179 drm_dbg_kms(display->drm,
2182 drm_dbg_kms(display->drm, "c10pll_rawhw_state:");
2183 drm_dbg_kms(display->drm, "tx: 0x%x, cmn: 0x%x\n", hw_state->tx,
2188 drm_dbg_kms(display->drm,
2211 struct intel_display *display = to_intel_display(crtc_state);
2216 if (DISPLAY_VER(display) >= 20) {
2219 } else if (display->platform.battlemage) {
2221 } else if (display->platform.meteorlake_u ||
2306 struct intel_display *display = to_intel_display(crtc_state);
2310 if (DISPLAY_RUNTIME_INFO(display)->edp_typec_support)
2312 if (DISPLAY_VERx100(display) == 1401)
2316 if (DISPLAY_VER(display) >= 30)
2318 else if (DISPLAY_VERx100(display) == 1401)
2422 struct intel_display *display = to_intel_display(encoder);
2437 PHY_C20_B_TX_CNTX_CFG(display, i));
2441 PHY_C20_A_TX_CNTX_CFG(display, i));
2449 PHY_C20_B_CMN_CNTX_CFG(display, i));
2453 PHY_C20_A_CMN_CNTX_CFG(display, i));
2462 PHY_C20_B_MPLLB_CNTX_CFG(display, i));
2466 PHY_C20_A_MPLLB_CNTX_CFG(display, i));
2474 PHY_C20_B_MPLLA_CNTX_CFG(display, i));
2478 PHY_C20_A_MPLLA_CNTX_CFG(display, i));
2487 static void intel_c20pll_dump_hw_state(struct intel_display *display,
2492 drm_dbg_kms(display->drm, "c20pll_hw_state:\n");
2493 drm_dbg_kms(display->drm,
2496 drm_dbg_kms(display->drm,
2502 drm_dbg_kms(display->drm, "mpllb[%d] = 0x%.4x\n", i,
2506 drm_dbg_kms(display->drm, "mplla[%d] = 0x%.4x\n", i,
2511 void intel_cx0pll_dump_hw_state(struct intel_display *display,
2515 intel_c10pll_dump_hw_state(display, &hw_state->c10);
2517 intel_c20pll_dump_hw_state(display, &hw_state->c20);
2617 static void intel_c20_pll_program(struct intel_display *display,
2645 PHY_C20_A_TX_CNTX_CFG(display, i),
2649 PHY_C20_B_TX_CNTX_CFG(display, i),
2657 PHY_C20_A_CMN_CNTX_CFG(display, i),
2661 PHY_C20_B_CMN_CNTX_CFG(display, i),
2670 PHY_C20_A_MPLLB_CNTX_CFG(display, i),
2674 PHY_C20_B_MPLLB_CNTX_CFG(display, i),
2681 PHY_C20_A_MPLLA_CNTX_CFG(display, i),
2685 PHY_C20_B_MPLLA_CNTX_CFG(display, i),
2753 struct intel_display *display = to_intel_display(encoder);
2756 intel_de_rmw(display, XELPDP_PORT_BUF_CTL1(display, encoder->port),
2766 val |= XELPDP_DDI_CLOCK_SELECT_PREP(display, XELPDP_DDI_CLOCK_SELECT_DIV18CLK);
2768 val |= XELPDP_DDI_CLOCK_SELECT_PREP(display, XELPDP_DDI_CLOCK_SELECT_MAXPCLK);
2777 intel_de_rmw(display, XELPDP_PORT_CLOCK_CTL(display, encoder->port),
2779 XELPDP_DDI_CLOCK_SELECT_MASK(display) | XELPDP_SSC_ENABLE_PLLA |
2808 struct intel_display *display = to_intel_display(encoder);
2811 i915_reg_t buf_ctl2_reg = XELPDP_PORT_BUF_CTL2(display, port);
2814 intel_de_rmw(display, buf_ctl2_reg,
2820 if (intel_de_wait_for_clear(display, XELPDP_PORT_M2P_MSGBUS_CTL(display, port, lane),
2823 drm_dbg_kms(display->drm,
2829 intel_de_rmw(display, buf_ctl2_reg,
2834 if (intel_de_wait_custom(display, buf_ctl2_reg,
2837 drm_warn(display->drm,
2844 struct intel_display *display = to_intel_display(encoder);
2847 intel_de_rmw(display, XELPDP_PORT_BUF_CTL2(display, port),
2850 intel_de_rmw(display, XELPDP_PORT_BUF_CTL3(display, port),
2882 struct intel_display *display = to_intel_display(encoder);
2895 if (intel_de_wait_custom(display, XELPDP_PORT_BUF_CTL1(display, port),
2899 drm_warn(display->drm,
2903 intel_de_rmw(display, XELPDP_PORT_BUF_CTL2(display, port), lane_pipe_reset,
2906 if (intel_de_wait_custom(display, XELPDP_PORT_BUF_CTL2(display, port),
2909 drm_warn(display->drm,
2913 intel_de_rmw(display, XELPDP_PORT_CLOCK_CTL(display, port),
2917 if (intel_de_wait_custom(display, XELPDP_PORT_CLOCK_CTL(display, port),
2921 drm_warn(display->drm,
2929 intel_de_rmw(display, XELPDP_PORT_BUF_CTL2(display, port), lane_pipe_reset, 0);
2931 if (intel_de_wait_for_clear(display, XELPDP_PORT_BUF_CTL2(display, port),
2934 drm_warn(display->drm,
3009 struct intel_display *display = to_intel_display(encoder);
3041 intel_c10_pll_program(display, encoder, &pll_state->c10);
3043 intel_c20_pll_program(display, encoder, &pll_state->c20, is_dp, port_clock);
3060 intel_de_write(display, DDI_CLK_VALFREQ(encoder->port), port_clock);
3066 intel_de_rmw(display, XELPDP_PORT_CLOCK_CTL(display, encoder->port),
3071 if (intel_de_wait_custom(display, XELPDP_PORT_CLOCK_CTL(display, encoder->port),
3075 drm_warn(display->drm, "Port %c PLL not locked after %dus.\n",
3097 struct intel_display *display = to_intel_display(encoder);
3100 val = intel_de_read(display, XELPDP_PORT_CLOCK_CTL(display, encoder->port));
3102 clock = XELPDP_DDI_CLOCK_SELECT_GET(display, val);
3104 drm_WARN_ON(display->drm, !(val & XELPDP_FORWARD_CLOCK_UNGATE));
3105 drm_WARN_ON(display->drm, !(val & XELPDP_TBT_CLOCK_REQUEST));
3106 drm_WARN_ON(display->drm, !(val & XELPDP_TBT_CLOCK_ACK));
3127 static int intel_mtl_tbt_clock_select(struct intel_display *display,
3140 if (DISPLAY_VER(display) < 30) {
3141 drm_WARN_ON(display->drm, "UHBR10 not supported for the platform\n");
3146 if (DISPLAY_VER(display) < 30) {
3147 drm_WARN_ON(display->drm, "UHBR20 not supported for the platform\n");
3160 struct intel_display *display = to_intel_display(encoder);
3170 mask = XELPDP_DDI_CLOCK_SELECT_MASK(display);
3171 val |= XELPDP_DDI_CLOCK_SELECT_PREP(display,
3172 intel_mtl_tbt_clock_select(display, crtc_state->port_clock));
3177 intel_de_rmw(display, XELPDP_PORT_CLOCK_CTL(display, encoder->port),
3181 val = intel_de_read(display, XELPDP_PORT_CLOCK_CTL(display, encoder->port));
3192 intel_de_write(display, XELPDP_PORT_CLOCK_CTL(display, encoder->port), val);
3195 if (intel_de_wait_custom(display, XELPDP_PORT_CLOCK_CTL(display, encoder->port),
3199 drm_warn(display->drm,
3212 intel_de_write(display, DDI_CLK_VALFREQ(encoder->port),
3229 struct intel_display *display = to_intel_display(encoder);
3234 if ((display->platform.battlemage && encoder->port == PORT_A) ||
3235 (DISPLAY_VER(display) >= 30 && encoder->type == INTEL_OUTPUT_EDP))
3243 struct intel_display *display = to_intel_display(encoder);
3260 intel_de_rmw(display, XELPDP_PORT_CLOCK_CTL(display, encoder->port),
3265 intel_de_write(display, DDI_CLK_VALFREQ(encoder->port), 0);
3270 if (intel_de_wait_custom(display, XELPDP_PORT_CLOCK_CTL(display, encoder->port),
3274 drm_warn(display->drm,
3284 intel_de_rmw(display, XELPDP_PORT_CLOCK_CTL(display, encoder->port),
3285 XELPDP_DDI_CLOCK_SELECT_MASK(display), 0);
3286 intel_de_rmw(display, XELPDP_PORT_CLOCK_CTL(display, encoder->port),
3294 struct intel_display *display = to_intel_display(encoder);
3298 return intel_de_read(display, XELPDP_PORT_CLOCK_CTL(display, encoder->port)) &
3304 struct intel_display *display = to_intel_display(encoder);
3315 intel_de_rmw(display, XELPDP_PORT_CLOCK_CTL(display, encoder->port),
3319 if (intel_de_wait_custom(display, XELPDP_PORT_CLOCK_CTL(display, encoder->port),
3321 drm_warn(display->drm,
3333 intel_de_rmw(display, XELPDP_PORT_CLOCK_CTL(display, encoder->port),
3334 XELPDP_DDI_CLOCK_SELECT_MASK(display) |
3338 intel_de_write(display, DDI_CLK_VALFREQ(encoder->port), 0);
3355 struct intel_display *display = to_intel_display(encoder);
3362 val = intel_de_read(display, XELPDP_PORT_CLOCK_CTL(display, encoder->port));
3363 clock = XELPDP_DDI_CLOCK_SELECT_GET(display, val);
3377 struct intel_display *display = to_intel_display(state);
3384 INTEL_DISPLAY_STATE_WARN(display, mpllb_hw_state->pll[i] != expected,
3390 INTEL_DISPLAY_STATE_WARN(display, mpllb_hw_state->tx != mpllb_sw_state->tx,
3395 INTEL_DISPLAY_STATE_WARN(display, mpllb_hw_state->cmn != mpllb_sw_state->cmn,
3484 struct intel_display *display = to_intel_display(state);
3491 INTEL_DISPLAY_STATE_WARN(display, mpll_hw_state->clock != clock,
3496 INTEL_DISPLAY_STATE_WARN(display, sw_use_mpllb != hw_use_mpllb,
3503 INTEL_DISPLAY_STATE_WARN(display, mpll_hw_state->mpllb[i] != mpll_sw_state->mpllb[i],
3510 INTEL_DISPLAY_STATE_WARN(display, mpll_hw_state->mplla[i] != mpll_sw_state->mplla[i],
3518 INTEL_DISPLAY_STATE_WARN(display, mpll_hw_state->tx[i] != mpll_sw_state->tx[i],
3525 INTEL_DISPLAY_STATE_WARN(display, mpll_hw_state->cmn[i] != mpll_sw_state->cmn[i],
3535 struct intel_display *display = to_intel_display(state);
3541 if (DISPLAY_VER(display) < 14)
3566 * The dedicated display PHYs reset to a power state that blocks S0ix, increasing idle
3577 void intel_cx0_pll_power_save_wa(struct intel_display *display)
3581 if (DISPLAY_VER(display) != 30)
3584 for_each_intel_encoder(display->drm, encoder) {
3601 drm_WARN_ON(display->drm,
3606 drm_dbg_kms(display->drm,