Lines Matching refs:i915
37 static void skl_sagv_disable(struct drm_i915_private *i915);
55 u8 intel_enabled_dbuf_slices_mask(struct drm_i915_private *i915) in intel_enabled_dbuf_slices_mask() argument
60 for_each_dbuf_slice(i915, slice) { in intel_enabled_dbuf_slices_mask()
61 if (intel_de_read(i915, DBUF_CTL_S(slice)) & DBUF_POWER_STATE) in intel_enabled_dbuf_slices_mask()
72 static bool skl_needs_memory_bw_wa(struct drm_i915_private *i915) in skl_needs_memory_bw_wa() argument
74 return DISPLAY_VER(i915) == 9; in skl_needs_memory_bw_wa()
78 intel_has_sagv(struct drm_i915_private *i915) in intel_has_sagv() argument
80 struct intel_display *display = &i915->display; in intel_has_sagv()
86 intel_sagv_block_time(struct drm_i915_private *i915) in intel_sagv_block_time() argument
88 struct intel_display *display = &i915->display; in intel_sagv_block_time()
100 ret = snb_pcode_read(&i915->uncore, in intel_sagv_block_time()
118 static void intel_sagv_init(struct drm_i915_private *i915) in intel_sagv_init() argument
120 struct intel_display *display = &i915->display; in intel_sagv_init()
130 skl_sagv_disable(i915); in intel_sagv_init()
134 display->sagv.block_time_us = intel_sagv_block_time(i915); in intel_sagv_init()
137 str_yes_no(intel_has_sagv(i915)), display->sagv.block_time_us); in intel_sagv_init()
145 if (!intel_has_sagv(i915)) in intel_sagv_init()
160 static void skl_sagv_enable(struct drm_i915_private *i915) in skl_sagv_enable() argument
164 if (!intel_has_sagv(i915)) in skl_sagv_enable()
167 if (i915->display.sagv.status == I915_SAGV_ENABLED) in skl_sagv_enable()
170 drm_dbg_kms(&i915->drm, "Enabling SAGV\n"); in skl_sagv_enable()
171 ret = snb_pcode_write(&i915->uncore, GEN9_PCODE_SAGV_CONTROL, in skl_sagv_enable()
180 if (IS_SKYLAKE(i915) && ret == -ENXIO) { in skl_sagv_enable()
181 drm_dbg(&i915->drm, "No SAGV found on system, ignoring\n"); in skl_sagv_enable()
182 i915->display.sagv.status = I915_SAGV_NOT_CONTROLLED; in skl_sagv_enable()
185 drm_err(&i915->drm, "Failed to enable SAGV\n"); in skl_sagv_enable()
189 i915->display.sagv.status = I915_SAGV_ENABLED; in skl_sagv_enable()
192 static void skl_sagv_disable(struct drm_i915_private *i915) in skl_sagv_disable() argument
196 if (!intel_has_sagv(i915)) in skl_sagv_disable()
199 if (i915->display.sagv.status == I915_SAGV_DISABLED) in skl_sagv_disable()
202 drm_dbg_kms(&i915->drm, "Disabling SAGV\n"); in skl_sagv_disable()
204 ret = skl_pcode_request(&i915->uncore, GEN9_PCODE_SAGV_CONTROL, in skl_sagv_disable()
212 if (IS_SKYLAKE(i915) && ret == -ENXIO) { in skl_sagv_disable()
213 drm_dbg(&i915->drm, "No SAGV found on system, ignoring\n"); in skl_sagv_disable()
214 i915->display.sagv.status = I915_SAGV_NOT_CONTROLLED; in skl_sagv_disable()
217 drm_err(&i915->drm, "Failed to disable SAGV (%d)\n", ret); in skl_sagv_disable()
221 i915->display.sagv.status = I915_SAGV_DISABLED; in skl_sagv_disable()
226 struct drm_i915_private *i915 = to_i915(state->base.dev); in skl_sagv_pre_plane_update() local
233 if (!intel_can_enable_sagv(i915, new_bw_state)) in skl_sagv_pre_plane_update()
234 skl_sagv_disable(i915); in skl_sagv_pre_plane_update()
239 struct drm_i915_private *i915 = to_i915(state->base.dev); in skl_sagv_post_plane_update() local
246 if (intel_can_enable_sagv(i915, new_bw_state)) in skl_sagv_post_plane_update()
247 skl_sagv_enable(i915); in skl_sagv_post_plane_update()
252 struct drm_i915_private *i915 = to_i915(state->base.dev); in icl_sagv_pre_plane_update() local
270 drm_dbg_kms(&i915->drm, "Restricting QGV points: 0x%x -> 0x%x\n", in icl_sagv_pre_plane_update()
279 icl_pcode_restrict_qgv_points(i915, new_mask); in icl_sagv_pre_plane_update()
284 struct drm_i915_private *i915 = to_i915(state->base.dev); in icl_sagv_post_plane_update() local
302 drm_dbg_kms(&i915->drm, "Relaxing QGV points: 0x%x -> 0x%x\n", in icl_sagv_post_plane_update()
311 icl_pcode_restrict_qgv_points(i915, new_mask); in icl_sagv_post_plane_update()
316 struct drm_i915_private *i915 = to_i915(state->base.dev); in intel_sagv_pre_plane_update() local
325 if (!intel_has_sagv(i915)) in intel_sagv_pre_plane_update()
328 if (DISPLAY_VER(i915) >= 11) in intel_sagv_pre_plane_update()
336 struct drm_i915_private *i915 = to_i915(state->base.dev); in intel_sagv_post_plane_update() local
345 if (!intel_has_sagv(i915)) in intel_sagv_post_plane_update()
348 if (DISPLAY_VER(i915) >= 11) in intel_sagv_post_plane_update()
357 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in skl_crtc_can_enable_sagv() local
361 if (!intel_has_sagv(i915)) in skl_crtc_can_enable_sagv()
380 for (level = i915->display.wm.num_levels - 1; in skl_crtc_can_enable_sagv()
429 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in intel_crtc_can_enable_sagv() local
431 if (!i915->display.params.enable_sagv) in intel_crtc_can_enable_sagv()
434 if (DISPLAY_VER(i915) >= 12) in intel_crtc_can_enable_sagv()
440 bool intel_can_enable_sagv(struct drm_i915_private *i915, in intel_can_enable_sagv() argument
443 if (DISPLAY_VER(i915) < 11 && in intel_can_enable_sagv()
453 struct drm_i915_private *i915 = to_i915(state->base.dev); in intel_compute_sagv_mask() local
489 DISPLAY_VER(i915) >= 12 && in intel_compute_sagv_mask()
510 if (intel_can_enable_sagv(i915, new_bw_state) != in intel_compute_sagv_mask()
511 intel_can_enable_sagv(i915, old_bw_state)) { in intel_compute_sagv_mask()
533 static int intel_dbuf_slice_size(struct drm_i915_private *i915) in intel_dbuf_slice_size() argument
535 return DISPLAY_INFO(i915)->dbuf.size / in intel_dbuf_slice_size()
536 hweight8(DISPLAY_INFO(i915)->dbuf.slice_mask); in intel_dbuf_slice_size()
540 skl_ddb_entry_for_slices(struct drm_i915_private *i915, u8 slice_mask, in skl_ddb_entry_for_slices() argument
543 int slice_size = intel_dbuf_slice_size(i915); in skl_ddb_entry_for_slices()
555 WARN_ON(ddb->end > DISPLAY_INFO(i915)->dbuf.size); in skl_ddb_entry_for_slices()
558 static unsigned int mbus_ddb_offset(struct drm_i915_private *i915, u8 slice_mask) in mbus_ddb_offset() argument
567 skl_ddb_entry_for_slices(i915, slice_mask, &ddb); in mbus_ddb_offset()
572 u32 skl_ddb_dbuf_slice_mask(struct drm_i915_private *i915, in skl_ddb_dbuf_slice_mask() argument
575 int slice_size = intel_dbuf_slice_size(i915); in skl_ddb_dbuf_slice_mask()
621 struct drm_i915_private *i915 = in intel_crtc_dbuf_weights() local
629 for_each_pipe(i915, pipe) { in intel_crtc_dbuf_weights()
655 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in skl_crtc_allocate_ddb() local
677 skl_ddb_entry_for_slices(i915, dbuf_slice_mask, &ddb_slices); in skl_crtc_allocate_ddb()
678 mbus_offset = mbus_ddb_offset(i915, dbuf_slice_mask); in skl_crtc_allocate_ddb()
712 drm_dbg_kms(&i915->drm, in skl_crtc_allocate_ddb()
737 static unsigned int skl_wm_latency(struct drm_i915_private *i915, int level, in skl_wm_latency() argument
740 unsigned int latency = i915->display.wm.skl_latency[level]; in skl_wm_latency()
749 if ((IS_KABYLAKE(i915) || IS_COFFEELAKE(i915) || IS_COMETLAKE(i915)) && in skl_wm_latency()
750 skl_watermark_ipc_enabled(i915)) in skl_wm_latency()
753 if (skl_needs_memory_bw_wa(i915) && wp && wp->x_tiled) in skl_wm_latency()
764 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev); in skl_cursor_allocation() local
775 drm_WARN_ON(&i915->drm, ret); in skl_cursor_allocation()
777 for (level = 0; level < i915->display.wm.num_levels; level++) { in skl_cursor_allocation()
778 unsigned int latency = skl_wm_latency(i915, level, &wp); in skl_cursor_allocation()
800 skl_ddb_get_hw_plane_state(struct drm_i915_private *i915, in skl_ddb_get_hw_plane_state() argument
807 struct intel_display *display = &i915->display; in skl_ddb_get_hw_plane_state()
839 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in skl_pipe_ddb_get_hw_state() local
846 wakeref = intel_display_power_get_if_enabled(i915, power_domain); in skl_pipe_ddb_get_hw_state()
851 skl_ddb_get_hw_plane_state(i915, pipe, in skl_pipe_ddb_get_hw_state()
858 intel_display_power_put(i915, power_domain, wakeref); in skl_pipe_ddb_get_hw_state()
1369 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in skl_compute_dbuf_slices() local
1372 if (IS_DG2(i915)) in skl_compute_dbuf_slices()
1374 else if (DISPLAY_VER(i915) >= 13) in skl_compute_dbuf_slices()
1376 else if (DISPLAY_VER(i915) == 12) in skl_compute_dbuf_slices()
1378 else if (DISPLAY_VER(i915) == 11) in skl_compute_dbuf_slices()
1419 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in skl_total_relative_data_rate() local
1429 if (DISPLAY_VER(i915) < 11) in skl_total_relative_data_rate()
1491 static bool skl_need_wm_copy_wa(struct drm_i915_private *i915, int level, in skl_need_wm_copy_wa() argument
1545 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in skl_crtc_allocate_plane_ddb() local
1587 for (level = i915->display.wm.num_levels - 1; level >= 0; level--) { in skl_crtc_allocate_plane_ddb()
1598 drm_WARN_ON(&i915->drm, in skl_crtc_allocate_plane_ddb()
1617 drm_dbg_kms(&i915->drm, in skl_crtc_allocate_plane_ddb()
1619 drm_dbg_kms(&i915->drm, "minimum required %d/%d\n", in skl_crtc_allocate_plane_ddb()
1647 if (DISPLAY_VER(i915) < 11 && in skl_crtc_allocate_plane_ddb()
1663 drm_WARN_ON(&i915->drm, iter.size != 0 || iter.data_rate != 0); in skl_crtc_allocate_plane_ddb()
1671 for (level++; level < i915->display.wm.num_levels; level++) { in skl_crtc_allocate_plane_ddb()
1680 if (DISPLAY_VER(i915) < 11 && in skl_crtc_allocate_plane_ddb()
1688 if (skl_need_wm_copy_wa(i915, level, wm)) { in skl_crtc_allocate_plane_ddb()
1710 if (DISPLAY_VER(i915) < 11 && in skl_crtc_allocate_plane_ddb()
1736 skl_wm_method1(const struct drm_i915_private *i915, u32 pixel_rate, in skl_wm_method1() argument
1748 if (DISPLAY_VER(i915) >= 10) in skl_wm_method1()
1774 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev); in intel_get_linetime_us() local
1784 if (drm_WARN_ON(&i915->drm, pixel_rate == 0)) in intel_get_linetime_us()
1802 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in skl_compute_wm_params() local
1808 drm_dbg_kms(&i915->drm, in skl_compute_wm_params()
1826 if (DISPLAY_VER(i915) >= 11 && in skl_compute_wm_params()
1851 if (skl_needs_memory_bw_wa(i915)) in skl_compute_wm_params()
1862 else if (DISPLAY_VER(i915) >= 10) in skl_compute_wm_params()
1871 if (!wp->x_tiled || DISPLAY_VER(i915) >= 10) in skl_compute_wm_params()
1908 static bool skl_wm_has_lines(struct drm_i915_private *i915, int level) in skl_wm_has_lines() argument
1910 if (DISPLAY_VER(i915) >= 10) in skl_wm_has_lines()
1917 static int skl_wm_max_lines(struct drm_i915_private *i915) in skl_wm_max_lines() argument
1919 if (DISPLAY_VER(i915) >= 13) in skl_wm_max_lines()
1940 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev); in skl_compute_plane_wm() local
1952 method1 = skl_wm_method1(i915, wp->plane_pixel_rate, in skl_compute_plane_wm()
1967 if (DISPLAY_VER(i915) == 9) in skl_compute_plane_wm()
1977 if (DISPLAY_VER(i915) < 30) in skl_compute_plane_wm()
1996 if (skl_wm_has_lines(i915, level)) in skl_compute_plane_wm()
2002 if (DISPLAY_VER(i915) == 9) { in skl_compute_plane_wm()
2027 if (DISPLAY_VER(i915) >= 11) { in skl_compute_plane_wm()
2044 if (!skl_wm_has_lines(i915, level)) in skl_compute_plane_wm()
2047 if (lines > skl_wm_max_lines(i915)) { in skl_compute_plane_wm()
2066 if (DISPLAY_VER(i915) < 12 && i915->display.sagv.block_time_us) in skl_compute_plane_wm()
2067 result->can_sagv = latency >= i915->display.sagv.block_time_us; in skl_compute_plane_wm()
2076 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev); in skl_compute_wm_levels() local
2080 for (level = 0; level < i915->display.wm.num_levels; level++) { in skl_compute_wm_levels()
2082 unsigned int latency = skl_wm_latency(i915, level, wm_params); in skl_compute_wm_levels()
2096 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev); in tgl_compute_sagv_wm() local
2101 if (i915->display.sagv.block_time_us) in tgl_compute_sagv_wm()
2102 latency = i915->display.sagv.block_time_us + in tgl_compute_sagv_wm()
2103 skl_wm_latency(i915, 0, wm_params); in tgl_compute_sagv_wm()
2110 static void skl_compute_transition_wm(struct drm_i915_private *i915, in skl_compute_transition_wm() argument
2119 if (!skl_watermark_ipc_enabled(i915)) in skl_compute_transition_wm()
2126 if (DISPLAY_VER(i915) == 9) in skl_compute_transition_wm()
2129 if (DISPLAY_VER(i915) >= 11) in skl_compute_transition_wm()
2135 if (DISPLAY_VER(i915) == 10) in skl_compute_transition_wm()
2178 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in skl_build_plane_wm_single() local
2190 skl_compute_transition_wm(i915, &wm->trans_wm, in skl_build_plane_wm_single()
2193 if (DISPLAY_VER(i915) >= 12) { in skl_build_plane_wm_single()
2196 skl_compute_transition_wm(i915, &wm->sagv.trans_wm, in skl_build_plane_wm_single()
2257 struct drm_i915_private *i915 = to_i915(plane->base.dev); in icl_build_plane_wm() local
2271 drm_WARN_ON(&i915->drm, in icl_build_plane_wm()
2273 drm_WARN_ON(&i915->drm, !fb->format->is_yuv || in icl_build_plane_wm()
2329 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in skl_max_wm_level_for_vblank() local
2332 for (level = i915->display.wm.num_levels - 1; level >= 0; level--) { in skl_max_wm_level_for_vblank()
2336 latency = skl_wm_latency(i915, level, NULL); in skl_max_wm_level_for_vblank()
2354 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in skl_wm_check_vblank() local
2370 crtc_state->wm_level_disabled = level < i915->display.wm.num_levels - 1; in skl_wm_check_vblank()
2372 for (level++; level < i915->display.wm.num_levels; level++) { in skl_wm_check_vblank()
2388 if (DISPLAY_VER(i915) >= 12 && in skl_wm_check_vblank()
2389 i915->display.sagv.block_time_us && in skl_wm_check_vblank()
2391 i915->display.sagv.block_time_us)) { in skl_wm_check_vblank()
2409 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in skl_build_pipe_wm() local
2425 if (DISPLAY_VER(i915) >= 11) in skl_build_pipe_wm()
2448 static bool skl_plane_wm_equals(struct drm_i915_private *i915, in skl_plane_wm_equals() argument
2452 struct intel_display *display = &i915->display; in skl_plane_wm_equals()
2507 struct drm_i915_private *i915 = to_i915(state->base.dev); in skl_ddb_add_affected_planes() local
2514 for_each_intel_plane_on_crtc(&i915->drm, crtc, plane) { in skl_ddb_add_affected_planes()
2525 drm_dbg_kms(&i915->drm, "[PLANE:%d:%s] Can't change DDB during async flip\n", in skl_ddb_add_affected_planes()
2544 struct drm_i915_private *i915 = to_i915(dbuf_state->base.state->base.dev); in intel_dbuf_enabled_slices() local
2554 for_each_pipe(i915, pipe) in intel_dbuf_enabled_slices()
2564 struct drm_i915_private *i915 = to_i915(state->base.dev); in skl_compute_ddb() local
2603 for_each_intel_crtc(&i915->drm, crtc) { in skl_compute_ddb()
2626 drm_dbg_kms(&i915->drm, in skl_compute_ddb()
2630 DISPLAY_INFO(i915)->dbuf.slice_mask, in skl_compute_ddb()
2648 for_each_intel_crtc(&i915->drm, crtc) { in skl_compute_ddb()
2675 struct drm_i915_private *i915 = to_i915(state->base.dev); in skl_print_wm_changes() local
2692 for_each_intel_plane_on_crtc(&i915->drm, crtc, plane) { in skl_print_wm_changes()
2702 drm_dbg_kms(&i915->drm, in skl_print_wm_changes()
2709 for_each_intel_plane_on_crtc(&i915->drm, crtc, plane) { in skl_print_wm_changes()
2716 if (skl_plane_wm_equals(i915, old_wm, new_wm)) in skl_print_wm_changes()
2719 drm_dbg_kms(&i915->drm, in skl_print_wm_changes()
2738 drm_dbg_kms(&i915->drm, in skl_print_wm_changes()
2765 drm_dbg_kms(&i915->drm, in skl_print_wm_changes()
2784 drm_dbg_kms(&i915->drm, in skl_print_wm_changes()
2862 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in skl_wm_add_affected_planes() local
2869 for_each_intel_plane_on_crtc(&i915->drm, crtc, plane) { in skl_wm_add_affected_planes()
2888 drm_dbg_kms(&i915->drm, "[PLANE:%d:%s] Can't change watermarks during async flip\n", in skl_wm_add_affected_planes()
2919 struct drm_i915_private *i915 = to_i915(display->drm); in intel_program_dpkgc_latency() local
2945 latency = skl_watermark_max_latency(i915, 1); in intel_program_dpkgc_latency()
3066 static void skl_wm_get_hw_state(struct drm_i915_private *i915) in skl_wm_get_hw_state() argument
3068 struct intel_display *display = &i915->display; in skl_wm_get_hw_state()
3070 to_intel_dbuf_state(i915->display.dbuf.obj.state); in skl_wm_get_hw_state()
3107 skl_ddb_get_hw_plane_state(i915, crtc->pipe, in skl_wm_get_hw_state()
3123 mbus_offset = mbus_ddb_offset(i915, slices); in skl_wm_get_hw_state()
3129 skl_ddb_dbuf_slice_mask(i915, &crtc_state->wm.skl.ddb); in skl_wm_get_hw_state()
3142 bool skl_watermark_ipc_enabled(struct drm_i915_private *i915) in skl_watermark_ipc_enabled() argument
3144 return i915->display.wm.ipc_enabled; in skl_watermark_ipc_enabled()
3147 void skl_watermark_ipc_update(struct drm_i915_private *i915) in skl_watermark_ipc_update() argument
3149 if (!HAS_IPC(i915)) in skl_watermark_ipc_update()
3152 intel_de_rmw(i915, DISP_ARB_CTL2, DISP_IPC_ENABLE, in skl_watermark_ipc_update()
3153 skl_watermark_ipc_enabled(i915) ? DISP_IPC_ENABLE : 0); in skl_watermark_ipc_update()
3156 static bool skl_watermark_ipc_can_enable(struct drm_i915_private *i915) in skl_watermark_ipc_can_enable() argument
3159 if (IS_SKYLAKE(i915)) in skl_watermark_ipc_can_enable()
3163 if (IS_KABYLAKE(i915) || in skl_watermark_ipc_can_enable()
3164 IS_COFFEELAKE(i915) || in skl_watermark_ipc_can_enable()
3165 IS_COMETLAKE(i915)) in skl_watermark_ipc_can_enable()
3166 return i915->dram_info.symmetric_memory; in skl_watermark_ipc_can_enable()
3171 void skl_watermark_ipc_init(struct drm_i915_private *i915) in skl_watermark_ipc_init() argument
3173 if (!HAS_IPC(i915)) in skl_watermark_ipc_init()
3176 i915->display.wm.ipc_enabled = skl_watermark_ipc_can_enable(i915); in skl_watermark_ipc_init()
3178 skl_watermark_ipc_update(i915); in skl_watermark_ipc_init()
3182 adjust_wm_latency(struct drm_i915_private *i915, in adjust_wm_latency() argument
3185 bool wm_lv_0_adjust_needed = i915->dram_info.wm_lv_0_adjust_needed; in adjust_wm_latency()
3225 static void mtl_read_wm_latency(struct drm_i915_private *i915, u16 wm[]) in mtl_read_wm_latency() argument
3227 int num_levels = i915->display.wm.num_levels; in mtl_read_wm_latency()
3230 val = intel_de_read(i915, MTL_LATENCY_LP0_LP1); in mtl_read_wm_latency()
3234 val = intel_de_read(i915, MTL_LATENCY_LP2_LP3); in mtl_read_wm_latency()
3238 val = intel_de_read(i915, MTL_LATENCY_LP4_LP5); in mtl_read_wm_latency()
3242 adjust_wm_latency(i915, wm, num_levels, 6); in mtl_read_wm_latency()
3245 static void skl_read_wm_latency(struct drm_i915_private *i915, u16 wm[]) in skl_read_wm_latency() argument
3247 int num_levels = i915->display.wm.num_levels; in skl_read_wm_latency()
3248 int read_latency = DISPLAY_VER(i915) >= 12 ? 3 : 2; in skl_read_wm_latency()
3249 int mult = IS_DG2(i915) ? 2 : 1; in skl_read_wm_latency()
3255 ret = snb_pcode_read(&i915->uncore, GEN9_PCODE_READ_MEM_LATENCY, &val, NULL); in skl_read_wm_latency()
3257 drm_err(&i915->drm, "SKL Mailbox read error = %d\n", ret); in skl_read_wm_latency()
3268 ret = snb_pcode_read(&i915->uncore, GEN9_PCODE_READ_MEM_LATENCY, &val, NULL); in skl_read_wm_latency()
3270 drm_err(&i915->drm, "SKL Mailbox read error = %d\n", ret); in skl_read_wm_latency()
3279 adjust_wm_latency(i915, wm, num_levels, read_latency); in skl_read_wm_latency()
3282 static void skl_setup_wm_latency(struct drm_i915_private *i915) in skl_setup_wm_latency() argument
3284 struct intel_display *display = &i915->display; in skl_setup_wm_latency()
3292 mtl_read_wm_latency(i915, display->wm.skl_latency); in skl_setup_wm_latency()
3294 skl_read_wm_latency(i915, display->wm.skl_latency); in skl_setup_wm_latency()
3296 intel_print_wm_latency(i915, "Gen9 Plane", display->wm.skl_latency); in skl_setup_wm_latency()
3324 struct drm_i915_private *i915 = to_i915(state->base.dev); in intel_atomic_get_dbuf_state() local
3327 dbuf_state = intel_atomic_get_global_obj_state(state, &i915->display.dbuf.obj); in intel_atomic_get_dbuf_state()
3334 int intel_dbuf_init(struct drm_i915_private *i915) in intel_dbuf_init() argument
3336 struct intel_display *display = &i915->display; in intel_dbuf_init()
3371 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in pipe_mbus_dbox_ctl() local
3374 if (DISPLAY_VER(i915) >= 14) in pipe_mbus_dbox_ctl()
3377 if (DISPLAY_VER(i915) >= 12) { in pipe_mbus_dbox_ctl()
3383 if (DISPLAY_VER(i915) >= 14) in pipe_mbus_dbox_ctl()
3386 else if (IS_ALDERLAKE_P(i915)) in pipe_mbus_dbox_ctl()
3393 if (DISPLAY_VER(i915) >= 14) { in pipe_mbus_dbox_ctl()
3395 } else if (IS_ALDERLAKE_P(i915)) { in pipe_mbus_dbox_ctl()
3398 } else if (DISPLAY_VER(i915) >= 12) { in pipe_mbus_dbox_ctl()
3406 if (DISPLAY_VERx100(i915) == 1400) { in pipe_mbus_dbox_ctl()
3416 static void pipe_mbus_dbox_ctl_update(struct drm_i915_private *i915, in pipe_mbus_dbox_ctl_update() argument
3421 for_each_intel_crtc_in_pipe_mask(&i915->drm, crtc, dbuf_state->active_pipes) in pipe_mbus_dbox_ctl_update()
3422 intel_de_write(i915, PIPE_MBUS_DBOX_CTL(crtc->pipe), in pipe_mbus_dbox_ctl_update()
3428 struct drm_i915_private *i915 = to_i915(state->base.dev); in intel_mbus_dbox_update() local
3431 if (DISPLAY_VER(i915) < 11) in intel_mbus_dbox_update()
3441 pipe_mbus_dbox_ctl_update(i915, new_dbuf_state); in intel_mbus_dbox_update()
3458 void intel_dbuf_mdclk_cdclk_ratio_update(struct drm_i915_private *i915, in intel_dbuf_mdclk_cdclk_ratio_update() argument
3461 struct intel_display *display = &i915->display; in intel_dbuf_mdclk_cdclk_ratio_update()
3485 struct drm_i915_private *i915 = to_i915(state->base.dev); in intel_dbuf_mdclk_min_tracker_update() local
3500 intel_dbuf_mdclk_cdclk_ratio_update(i915, mdclk_cdclk_ratio, in intel_dbuf_mdclk_min_tracker_update()
3508 struct drm_i915_private *i915 = to_i915(state->base.dev); in intel_mbus_joined_pipe() local
3513 drm_WARN_ON(&i915->drm, !dbuf_state->joined_mbus); in intel_mbus_joined_pipe()
3514 drm_WARN_ON(&i915->drm, !is_power_of_2(dbuf_state->active_pipes)); in intel_mbus_joined_pipe()
3525 static void mbus_ctl_join_update(struct drm_i915_private *i915, in mbus_ctl_join_update() argument
3541 intel_de_rmw(i915, MBUS_CTL, in mbus_ctl_join_update()
3549 struct drm_i915_private *i915 = to_i915(state->base.dev); in intel_dbuf_mbus_join_update() local
3555 drm_dbg_kms(&i915->drm, "Changing mbus joined: %s -> %s (pipe: %c)\n", in intel_dbuf_mbus_join_update()
3560 mbus_ctl_join_update(i915, new_dbuf_state, pipe); in intel_dbuf_mbus_join_update()
3621 struct drm_i915_private *i915 = to_i915(state->base.dev); in intel_dbuf_pre_plane_update() local
3639 gen9_dbuf_slices_update(i915, new_slices); in intel_dbuf_pre_plane_update()
3644 struct drm_i915_private *i915 = to_i915(state->base.dev); in intel_dbuf_post_plane_update() local
3662 gen9_dbuf_slices_update(i915, new_slices); in intel_dbuf_post_plane_update()
3665 static void skl_mbus_sanitize(struct drm_i915_private *i915) in skl_mbus_sanitize() argument
3667 struct intel_display *display = &i915->display; in skl_mbus_sanitize()
3682 intel_dbuf_mdclk_cdclk_ratio_update(i915, in skl_mbus_sanitize()
3685 pipe_mbus_dbox_ctl_update(i915, dbuf_state); in skl_mbus_sanitize()
3686 mbus_ctl_join_update(i915, dbuf_state, INVALID_PIPE); in skl_mbus_sanitize()
3689 static bool skl_dbuf_is_misconfigured(struct drm_i915_private *i915) in skl_dbuf_is_misconfigured() argument
3692 to_intel_dbuf_state(i915->display.dbuf.obj.state); in skl_dbuf_is_misconfigured()
3696 for_each_intel_crtc(&i915->drm, crtc) { in skl_dbuf_is_misconfigured()
3703 for_each_intel_crtc(&i915->drm, crtc) { in skl_dbuf_is_misconfigured()
3721 static void skl_dbuf_sanitize(struct drm_i915_private *i915) in skl_dbuf_sanitize() argument
3736 if (!skl_dbuf_is_misconfigured(i915)) in skl_dbuf_sanitize()
3739 drm_dbg_kms(&i915->drm, "BIOS has misprogrammed the DBUF, disabling all planes\n"); in skl_dbuf_sanitize()
3741 for_each_intel_crtc(&i915->drm, crtc) { in skl_dbuf_sanitize()
3751 drm_WARN_ON(&i915->drm, crtc_state->active_planes != 0); in skl_dbuf_sanitize()
3757 static void skl_wm_get_hw_state_and_sanitize(struct drm_i915_private *i915) in skl_wm_get_hw_state_and_sanitize() argument
3759 skl_wm_get_hw_state(i915); in skl_wm_get_hw_state_and_sanitize()
3761 skl_mbus_sanitize(i915); in skl_wm_get_hw_state_and_sanitize()
3762 skl_dbuf_sanitize(i915); in skl_wm_get_hw_state_and_sanitize()
3769 struct drm_i915_private *i915 = to_i915(state->base.dev); in intel_wm_state_verify() local
3784 if (DISPLAY_VER(i915) < 9 || !new_crtc_state->hw.active) in intel_wm_state_verify()
3795 hw_enabled_slices = intel_enabled_dbuf_slices_mask(i915); in intel_wm_state_verify()
3797 if (DISPLAY_VER(i915) >= 11 && in intel_wm_state_verify()
3798 hw_enabled_slices != i915->display.dbuf.enabled_slices) in intel_wm_state_verify()
3799 drm_err(&i915->drm, in intel_wm_state_verify()
3801 i915->display.dbuf.enabled_slices, in intel_wm_state_verify()
3804 for_each_intel_plane_on_crtc(&i915->drm, crtc, plane) { in intel_wm_state_verify()
3809 for (level = 0; level < i915->display.wm.num_levels; level++) { in intel_wm_state_verify()
3816 drm_err(&i915->drm, in intel_wm_state_verify()
3831 drm_err(&i915->drm, in intel_wm_state_verify()
3847 drm_err(&i915->drm, in intel_wm_state_verify()
3863 drm_err(&i915->drm, in intel_wm_state_verify()
3879 drm_err(&i915->drm, in intel_wm_state_verify()
3895 void skl_wm_init(struct drm_i915_private *i915) in skl_wm_init() argument
3897 intel_sagv_init(i915); in skl_wm_init()
3899 skl_setup_wm_latency(i915); in skl_wm_init()
3901 i915->display.funcs.wm = &skl_wm_funcs; in skl_wm_init()
3906 struct drm_i915_private *i915 = m->private; in skl_watermark_ipc_status_show() local
3909 str_yes_no(skl_watermark_ipc_enabled(i915))); in skl_watermark_ipc_status_show()
3915 struct drm_i915_private *i915 = inode->i_private; in skl_watermark_ipc_status_open() local
3917 return single_open(file, skl_watermark_ipc_status_show, i915); in skl_watermark_ipc_status_open()
3925 struct drm_i915_private *i915 = m->private; in skl_watermark_ipc_status_write() local
3934 with_intel_runtime_pm(&i915->runtime_pm, wakeref) { in skl_watermark_ipc_status_write()
3935 if (!skl_watermark_ipc_enabled(i915) && enable) in skl_watermark_ipc_status_write()
3936 drm_info(&i915->drm, in skl_watermark_ipc_status_write()
3938 i915->display.wm.ipc_enabled = enable; in skl_watermark_ipc_status_write()
3939 skl_watermark_ipc_update(i915); in skl_watermark_ipc_status_write()
3956 struct drm_i915_private *i915 = m->private; in intel_sagv_status_show() local
3964 seq_printf(m, "SAGV available: %s\n", str_yes_no(intel_has_sagv(i915))); in intel_sagv_status_show()
3966 str_enabled_disabled(i915->display.params.enable_sagv)); in intel_sagv_status_show()
3967 seq_printf(m, "SAGV status: %s\n", sagv_status[i915->display.sagv.status]); in intel_sagv_status_show()
3968 seq_printf(m, "SAGV block time: %d usec\n", i915->display.sagv.block_time_us); in intel_sagv_status_show()
3975 void skl_watermark_debugfs_register(struct drm_i915_private *i915) in skl_watermark_debugfs_register() argument
3977 struct intel_display *display = &i915->display; in skl_watermark_debugfs_register()
3981 debugfs_create_file("i915_ipc_status", 0644, minor->debugfs_root, i915, in skl_watermark_debugfs_register()
3985 debugfs_create_file("i915_sagv_status", 0444, minor->debugfs_root, i915, in skl_watermark_debugfs_register()
3989 unsigned int skl_watermark_max_latency(struct drm_i915_private *i915, int initial_wm_level) in skl_watermark_max_latency() argument
3993 for (level = i915->display.wm.num_levels - 1; level >= initial_wm_level; level--) { in skl_watermark_max_latency()
3994 unsigned int latency = skl_wm_latency(i915, level, NULL); in skl_watermark_max_latency()