Lines Matching refs:dev_priv
106 static void chv_set_memory_dvfs(struct drm_i915_private *dev_priv, bool enable)
110 vlv_punit_get(dev_priv);
112 val = vlv_punit_read(dev_priv, PUNIT_REG_DDR_SETUP2);
119 vlv_punit_write(dev_priv, PUNIT_REG_DDR_SETUP2, val);
121 if (wait_for((vlv_punit_read(dev_priv, PUNIT_REG_DDR_SETUP2) &
123 drm_err(&dev_priv->drm,
126 vlv_punit_put(dev_priv);
129 static void chv_set_memory_pm5(struct drm_i915_private *dev_priv, bool enable)
133 vlv_punit_get(dev_priv);
135 val = vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM);
140 vlv_punit_write(dev_priv, PUNIT_REG_DSPSSPM, val);
142 vlv_punit_put(dev_priv);
148 static bool _intel_set_memory_cxsr(struct drm_i915_private *dev_priv, bool enable)
150 struct intel_display *display = &dev_priv->display;
154 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
155 was_enabled = intel_uncore_read(&dev_priv->uncore, FW_BLC_SELF_VLV) & FW_CSPWRDWNEN;
156 intel_uncore_write(&dev_priv->uncore, FW_BLC_SELF_VLV, enable ? FW_CSPWRDWNEN : 0);
157 intel_uncore_posting_read(&dev_priv->uncore, FW_BLC_SELF_VLV);
158 } else if (IS_G4X(dev_priv) || IS_I965GM(dev_priv)) {
159 was_enabled = intel_uncore_read(&dev_priv->uncore, FW_BLC_SELF) & FW_BLC_SELF_EN;
160 intel_uncore_write(&dev_priv->uncore, FW_BLC_SELF, enable ? FW_BLC_SELF_EN : 0);
161 intel_uncore_posting_read(&dev_priv->uncore, FW_BLC_SELF);
162 } else if (IS_PINEVIEW(dev_priv)) {
163 val = intel_uncore_read(&dev_priv->uncore, DSPFW3(dev_priv));
169 intel_uncore_write(&dev_priv->uncore, DSPFW3(dev_priv), val);
170 intel_uncore_posting_read(&dev_priv->uncore, DSPFW3(dev_priv));
171 } else if (IS_I945G(dev_priv) || IS_I945GM(dev_priv)) {
172 was_enabled = intel_uncore_read(&dev_priv->uncore, FW_BLC_SELF) & FW_BLC_SELF_EN;
175 intel_uncore_write(&dev_priv->uncore, FW_BLC_SELF, val);
176 intel_uncore_posting_read(&dev_priv->uncore, FW_BLC_SELF);
177 } else if (IS_I915GM(dev_priv)) {
183 was_enabled = intel_uncore_read(&dev_priv->uncore, INSTPM) & INSTPM_SELF_EN;
186 intel_uncore_write(&dev_priv->uncore, INSTPM, val);
187 intel_uncore_posting_read(&dev_priv->uncore, INSTPM);
194 drm_dbg_kms(&dev_priv->drm, "memory self-refresh is %s (was %s)\n",
203 * @dev_priv: i915 device
238 bool intel_set_memory_cxsr(struct drm_i915_private *dev_priv, bool enable)
242 mutex_lock(&dev_priv->display.wm.wm_mutex);
243 ret = _intel_set_memory_cxsr(dev_priv, enable);
244 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
245 dev_priv->display.wm.vlv.cxsr = enable;
246 else if (IS_G4X(dev_priv))
247 dev_priv->display.wm.g4x.cxsr = enable;
248 mutex_unlock(&dev_priv->display.wm.wm_mutex);
275 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
283 dsparb = intel_uncore_read(&dev_priv->uncore,
284 DSPARB(dev_priv));
285 dsparb2 = intel_uncore_read(&dev_priv->uncore, DSPARB2);
290 dsparb = intel_uncore_read(&dev_priv->uncore,
291 DSPARB(dev_priv));
292 dsparb2 = intel_uncore_read(&dev_priv->uncore, DSPARB2);
297 dsparb2 = intel_uncore_read(&dev_priv->uncore, DSPARB2);
298 dsparb3 = intel_uncore_read(&dev_priv->uncore, DSPARB3);
313 static int i9xx_get_fifo_size(struct drm_i915_private *dev_priv,
316 u32 dsparb = intel_uncore_read(&dev_priv->uncore, DSPARB(dev_priv));
323 drm_dbg_kms(&dev_priv->drm, "FIFO size - (0x%08x) %c: %d\n",
329 static int i830_get_fifo_size(struct drm_i915_private *dev_priv,
332 u32 dsparb = intel_uncore_read(&dev_priv->uncore, DSPARB(dev_priv));
340 drm_dbg_kms(&dev_priv->drm, "FIFO size - (0x%08x) %c: %d\n",
346 static int i845_get_fifo_size(struct drm_i915_private *dev_priv,
349 u32 dsparb = intel_uncore_read(&dev_priv->uncore, DSPARB(dev_priv));
355 drm_dbg_kms(&dev_priv->drm, "FIFO size - (0x%08x) %c: %d\n",
629 static struct intel_crtc *single_enabled_crtc(struct drm_i915_private *dev_priv)
633 for_each_intel_crtc(&dev_priv->drm, crtc) {
644 static void pnv_update_wm(struct drm_i915_private *dev_priv)
651 latency = pnv_get_cxsr_latency(dev_priv);
653 drm_dbg_kms(&dev_priv->drm, "Unknown FSB/MEM, disabling CxSR\n");
654 intel_set_memory_cxsr(dev_priv, false);
658 crtc = single_enabled_crtc(dev_priv);
666 wm = intel_calculate_wm(dev_priv, pixel_rate,
670 reg = intel_uncore_read(&dev_priv->uncore, DSPFW1(dev_priv));
673 intel_uncore_write(&dev_priv->uncore, DSPFW1(dev_priv), reg);
674 drm_dbg_kms(&dev_priv->drm, "DSPFW1 register is %x\n", reg);
677 wm = intel_calculate_wm(dev_priv, pixel_rate,
681 intel_uncore_rmw(&dev_priv->uncore, DSPFW3(dev_priv),
686 wm = intel_calculate_wm(dev_priv, pixel_rate,
690 intel_uncore_rmw(&dev_priv->uncore, DSPFW3(dev_priv),
694 wm = intel_calculate_wm(dev_priv, pixel_rate,
698 reg = intel_uncore_read(&dev_priv->uncore, DSPFW3(dev_priv));
701 intel_uncore_write(&dev_priv->uncore, DSPFW3(dev_priv), reg);
702 drm_dbg_kms(&dev_priv->drm, "DSPFW3 register is %x\n", reg);
704 intel_set_memory_cxsr(dev_priv, true);
706 intel_set_memory_cxsr(dev_priv, false);
797 static void g4x_write_wm_values(struct drm_i915_private *dev_priv,
800 struct intel_display *display = &dev_priv->display;
803 for_each_pipe(dev_priv, pipe)
806 intel_uncore_write(&dev_priv->uncore, DSPFW1(dev_priv),
811 intel_uncore_write(&dev_priv->uncore, DSPFW2(dev_priv),
818 intel_uncore_write(&dev_priv->uncore, DSPFW3(dev_priv),
824 intel_uncore_posting_read(&dev_priv->uncore, DSPFW1(dev_priv));
830 static void vlv_write_wm_values(struct drm_i915_private *dev_priv,
833 struct intel_display *display = &dev_priv->display;
836 for_each_pipe(dev_priv, pipe) {
839 intel_uncore_write(&dev_priv->uncore, VLV_DDL(pipe),
851 intel_uncore_write(&dev_priv->uncore, DSPHOWM, 0);
852 intel_uncore_write(&dev_priv->uncore, DSPHOWM1, 0);
853 intel_uncore_write(&dev_priv->uncore, DSPFW4, 0);
854 intel_uncore_write(&dev_priv->uncore, DSPFW5, 0);
855 intel_uncore_write(&dev_priv->uncore, DSPFW6, 0);
857 intel_uncore_write(&dev_priv->uncore, DSPFW1(dev_priv),
862 intel_uncore_write(&dev_priv->uncore, DSPFW2(dev_priv),
866 intel_uncore_write(&dev_priv->uncore, DSPFW3(dev_priv),
869 if (IS_CHERRYVIEW(dev_priv)) {
870 intel_uncore_write(&dev_priv->uncore, DSPFW7_CHV,
873 intel_uncore_write(&dev_priv->uncore, DSPFW8_CHV,
876 intel_uncore_write(&dev_priv->uncore, DSPFW9_CHV,
879 intel_uncore_write(&dev_priv->uncore, DSPHOWM,
891 intel_uncore_write(&dev_priv->uncore, DSPFW7,
894 intel_uncore_write(&dev_priv->uncore, DSPHOWM,
904 intel_uncore_posting_read(&dev_priv->uncore, DSPFW1(dev_priv));
909 static void g4x_setup_wm_latency(struct drm_i915_private *dev_priv)
912 dev_priv->display.wm.pri_latency[G4X_WM_LEVEL_NORMAL] = 5;
913 dev_priv->display.wm.pri_latency[G4X_WM_LEVEL_SR] = 12;
914 dev_priv->display.wm.pri_latency[G4X_WM_LEVEL_HPLL] = 35;
916 dev_priv->display.wm.num_levels = G4X_WM_LEVEL_HPLL + 1;
966 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
969 unsigned int latency = dev_priv->display.wm.pri_latency[level] * 10;
1020 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
1023 for (; level < dev_priv->display.wm.num_levels; level++) {
1036 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
1042 for (; level < dev_priv->display.wm.num_levels; level++) {
1060 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
1072 for (level = 0; level < dev_priv->display.wm.num_levels; level++) {
1112 drm_dbg_kms(&dev_priv->drm,
1120 drm_dbg_kms(&dev_priv->drm,
1140 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
1142 if (level >= dev_priv->display.wm.num_levels)
1284 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1314 drm_WARN_ON(&dev_priv->drm, intermediate->wm.plane[plane_id] >
1332 drm_WARN_ON(&dev_priv->drm,
1338 drm_WARN_ON(&dev_priv->drm,
1345 drm_WARN_ON(&dev_priv->drm,
1348 drm_WARN_ON(&dev_priv->drm,
1379 static void g4x_merge_wm(struct drm_i915_private *dev_priv,
1389 for_each_intel_crtc(&dev_priv->drm, crtc) {
1411 for_each_intel_crtc(&dev_priv->drm, crtc) {
1423 static void g4x_program_watermarks(struct drm_i915_private *dev_priv)
1425 struct g4x_wm_values *old_wm = &dev_priv->display.wm.g4x;
1428 g4x_merge_wm(dev_priv, &new_wm);
1434 _intel_set_memory_cxsr(dev_priv, false);
1436 g4x_write_wm_values(dev_priv, &new_wm);
1439 _intel_set_memory_cxsr(dev_priv, true);
1447 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1451 mutex_lock(&dev_priv->display.wm.wm_mutex);
1453 g4x_program_watermarks(dev_priv);
1454 mutex_unlock(&dev_priv->display.wm.wm_mutex);
1460 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1467 mutex_lock(&dev_priv->display.wm.wm_mutex);
1469 g4x_program_watermarks(dev_priv);
1470 mutex_unlock(&dev_priv->display.wm.wm_mutex);
1489 static void vlv_setup_wm_latency(struct drm_i915_private *dev_priv)
1492 dev_priv->display.wm.pri_latency[VLV_WM_LEVEL_PM2] = 3;
1494 dev_priv->display.wm.num_levels = VLV_WM_LEVEL_PM2 + 1;
1496 if (IS_CHERRYVIEW(dev_priv)) {
1497 dev_priv->display.wm.pri_latency[VLV_WM_LEVEL_PM5] = 12;
1498 dev_priv->display.wm.pri_latency[VLV_WM_LEVEL_DDR_DVFS] = 33;
1500 dev_priv->display.wm.num_levels = VLV_WM_LEVEL_DDR_DVFS + 1;
1509 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
1514 if (dev_priv->display.wm.pri_latency[level] == 0)
1535 dev_priv->display.wm.pri_latency[level] * 10);
1550 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1619 drm_WARN_ON(&dev_priv->drm, active_planes != 0 && fifo_left != 0);
1623 drm_WARN_ON(&dev_priv->drm, fifo_left != fifo_size);
1634 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1636 for (; level < dev_priv->display.wm.num_levels; level++) {
1662 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
1665 for (; level < dev_priv->display.wm.num_levels; level++) {
1679 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
1689 for (level = 0; level < dev_priv->display.wm.num_levels; level++) {
1706 drm_dbg_kms(&dev_priv->drm,
1738 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1748 wm_state->num_levels = dev_priv->display.wm.num_levels;
1758 const int sr_fifo_size = INTEL_NUM_PIPES(dev_priv) * 512 - 1;
1858 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1859 struct intel_uncore *uncore = &dev_priv->uncore;
1874 drm_WARN_ON(&dev_priv->drm, fifo_state->plane[PLANE_CURSOR] != 63);
1875 drm_WARN_ON(&dev_priv->drm, fifo_size != 511);
1892 dsparb = intel_uncore_read_fw(uncore, DSPARB(dev_priv));
1905 intel_uncore_write_fw(uncore, DSPARB(dev_priv), dsparb);
1909 dsparb = intel_uncore_read_fw(uncore, DSPARB(dev_priv));
1922 intel_uncore_write_fw(uncore, DSPARB(dev_priv), dsparb);
1946 intel_uncore_posting_read_fw(uncore, DSPARB(dev_priv));
2021 static void vlv_merge_wm(struct drm_i915_private *dev_priv,
2027 wm->level = dev_priv->display.wm.num_levels - 1;
2030 for_each_intel_crtc(&dev_priv->drm, crtc) {
2049 for_each_intel_crtc(&dev_priv->drm, crtc) {
2064 static void vlv_program_watermarks(struct drm_i915_private *dev_priv)
2066 struct vlv_wm_values *old_wm = &dev_priv->display.wm.vlv;
2069 vlv_merge_wm(dev_priv, &new_wm);
2075 chv_set_memory_dvfs(dev_priv, false);
2078 chv_set_memory_pm5(dev_priv, false);
2081 _intel_set_memory_cxsr(dev_priv, false);
2083 vlv_write_wm_values(dev_priv, &new_wm);
2086 _intel_set_memory_cxsr(dev_priv, true);
2089 chv_set_memory_pm5(dev_priv, true);
2092 chv_set_memory_dvfs(dev_priv, true);
2100 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
2104 mutex_lock(&dev_priv->display.wm.wm_mutex);
2106 vlv_program_watermarks(dev_priv);
2107 mutex_unlock(&dev_priv->display.wm.wm_mutex);
2113 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
2120 mutex_lock(&dev_priv->display.wm.wm_mutex);
2122 vlv_program_watermarks(dev_priv);
2123 mutex_unlock(&dev_priv->display.wm.wm_mutex);
2126 static void i965_update_wm(struct drm_i915_private *dev_priv)
2134 crtc = single_enabled_crtc(dev_priv);
2155 drm_dbg_kms(&dev_priv->drm,
2170 drm_dbg_kms(&dev_priv->drm,
2178 intel_set_memory_cxsr(dev_priv, false);
2181 drm_dbg_kms(&dev_priv->drm,
2186 intel_uncore_write(&dev_priv->uncore, DSPFW1(dev_priv),
2191 intel_uncore_write(&dev_priv->uncore, DSPFW2(dev_priv),
2195 intel_uncore_write(&dev_priv->uncore, DSPFW3(dev_priv),
2199 intel_set_memory_cxsr(dev_priv, true);
2219 static void i9xx_update_wm(struct drm_i915_private *dev_priv)
2229 if (IS_I945GM(dev_priv))
2231 else if (DISPLAY_VER(dev_priv) != 2)
2236 if (DISPLAY_VER(dev_priv) == 2)
2237 fifo_size = i830_get_fifo_size(dev_priv, PLANE_A);
2239 fifo_size = i9xx_get_fifo_size(dev_priv, PLANE_A);
2240 crtc = intel_crtc_for_plane(dev_priv, PLANE_A);
2246 if (DISPLAY_VER(dev_priv) == 2)
2251 planea_wm = intel_calculate_wm(dev_priv, crtc->config->pixel_rate,
2260 if (DISPLAY_VER(dev_priv) == 2)
2263 if (DISPLAY_VER(dev_priv) == 2)
2264 fifo_size = i830_get_fifo_size(dev_priv, PLANE_B);
2266 fifo_size = i9xx_get_fifo_size(dev_priv, PLANE_B);
2267 crtc = intel_crtc_for_plane(dev_priv, PLANE_B);
2273 if (DISPLAY_VER(dev_priv) == 2)
2278 planeb_wm = intel_calculate_wm(dev_priv, crtc->config->pixel_rate,
2287 drm_dbg_kms(&dev_priv->drm,
2290 crtc = single_enabled_crtc(dev_priv);
2291 if (IS_I915GM(dev_priv) && crtc) {
2307 intel_set_memory_cxsr(dev_priv, false);
2310 if (HAS_FW_BLC(dev_priv) && crtc) {
2323 if (IS_I915GM(dev_priv) || IS_I945GM(dev_priv))
2331 drm_dbg_kms(&dev_priv->drm,
2337 if (IS_I945G(dev_priv) || IS_I945GM(dev_priv))
2338 intel_uncore_write(&dev_priv->uncore, FW_BLC_SELF,
2341 intel_uncore_write(&dev_priv->uncore, FW_BLC_SELF, srwm & 0x3f);
2344 drm_dbg_kms(&dev_priv->drm,
2355 intel_uncore_write(&dev_priv->uncore, FW_BLC, fwater_lo);
2356 intel_uncore_write(&dev_priv->uncore, FW_BLC2, fwater_hi);
2359 intel_set_memory_cxsr(dev_priv, true);
2362 static void i845_update_wm(struct drm_i915_private *dev_priv)
2368 crtc = single_enabled_crtc(dev_priv);
2372 planea_wm = intel_calculate_wm(dev_priv, crtc->config->pixel_rate,
2374 i845_get_fifo_size(dev_priv, PLANE_A),
2376 fwater_lo = intel_uncore_read(&dev_priv->uncore, FW_BLC) & ~0xfff;
2379 drm_dbg_kms(&dev_priv->drm,
2382 intel_uncore_write(&dev_priv->uncore, FW_BLC, fwater_lo);
2537 ilk_display_fifo_size(const struct drm_i915_private *dev_priv)
2539 if (DISPLAY_VER(dev_priv) >= 8)
2541 else if (DISPLAY_VER(dev_priv) >= 7)
2548 ilk_plane_wm_reg_max(const struct drm_i915_private *dev_priv,
2551 if (DISPLAY_VER(dev_priv) >= 8)
2554 else if (DISPLAY_VER(dev_priv) >= 7)
2566 ilk_cursor_wm_reg_max(const struct drm_i915_private *dev_priv, int level)
2568 if (DISPLAY_VER(dev_priv) >= 7)
2574 static unsigned int ilk_fbc_wm_reg_max(const struct drm_i915_private *dev_priv)
2576 if (DISPLAY_VER(dev_priv) >= 8)
2583 static unsigned int ilk_plane_wm_max(const struct drm_i915_private *dev_priv,
2589 unsigned int fifo_size = ilk_display_fifo_size(dev_priv);
2597 fifo_size /= INTEL_NUM_PIPES(dev_priv);
2604 if (DISPLAY_VER(dev_priv) < 7)
2620 return min(fifo_size, ilk_plane_wm_reg_max(dev_priv, level, is_sprite));
2624 static unsigned int ilk_cursor_wm_max(const struct drm_i915_private *dev_priv,
2633 return ilk_cursor_wm_reg_max(dev_priv, level);
2636 static void ilk_compute_wm_maximums(const struct drm_i915_private *dev_priv,
2642 max->pri = ilk_plane_wm_max(dev_priv, level, config, ddb_partitioning, false);
2643 max->spr = ilk_plane_wm_max(dev_priv, level, config, ddb_partitioning, true);
2644 max->cur = ilk_cursor_wm_max(dev_priv, level, config);
2645 max->fbc = ilk_fbc_wm_reg_max(dev_priv);
2648 static void ilk_compute_wm_reg_maximums(const struct drm_i915_private *dev_priv,
2652 max->pri = ilk_plane_wm_reg_max(dev_priv, level, false);
2653 max->spr = ilk_plane_wm_reg_max(dev_priv, level, true);
2654 max->cur = ilk_cursor_wm_reg_max(dev_priv, level);
2655 max->fbc = ilk_fbc_wm_reg_max(dev_priv);
2703 static void ilk_compute_wm_level(const struct drm_i915_private *dev_priv,
2712 u16 pri_latency = dev_priv->display.wm.pri_latency[level];
2713 u16 spr_latency = dev_priv->display.wm.spr_latency[level];
2714 u16 cur_latency = dev_priv->display.wm.cur_latency[level];
2783 static void intel_fixup_spr_wm_latency(struct drm_i915_private *dev_priv,
2787 if (DISPLAY_VER(dev_priv) == 5)
2791 static void intel_fixup_cur_wm_latency(struct drm_i915_private *dev_priv,
2795 if (DISPLAY_VER(dev_priv) == 5)
2799 static bool ilk_increase_wm_latency(struct drm_i915_private *dev_priv,
2808 for (level = 1; level < dev_priv->display.wm.num_levels; level++)
2814 static void snb_wm_latency_quirk(struct drm_i915_private *dev_priv)
2822 changed = ilk_increase_wm_latency(dev_priv, dev_priv->display.wm.pri_latency, 12);
2823 changed |= ilk_increase_wm_latency(dev_priv, dev_priv->display.wm.spr_latency, 12);
2824 changed |= ilk_increase_wm_latency(dev_priv, dev_priv->display.wm.cur_latency, 12);
2829 drm_dbg_kms(&dev_priv->drm,
2831 intel_print_wm_latency(dev_priv, "Primary", dev_priv->display.wm.pri_latency);
2832 intel_print_wm_latency(dev_priv, "Sprite", dev_priv->display.wm.spr_latency);
2833 intel_print_wm_latency(dev_priv, "Cursor", dev_priv->display.wm.cur_latency);
2836 static void snb_wm_lp3_irq_quirk(struct drm_i915_private *dev_priv)
2849 if (dev_priv->display.wm.pri_latency[3] == 0 &&
2850 dev_priv->display.wm.spr_latency[3] == 0 &&
2851 dev_priv->display.wm.cur_latency[3] == 0)
2854 dev_priv->display.wm.pri_latency[3] = 0;
2855 dev_priv->display.wm.spr_latency[3] = 0;
2856 dev_priv->display.wm.cur_latency[3] = 0;
2858 drm_dbg_kms(&dev_priv->drm,
2860 intel_print_wm_latency(dev_priv, "Primary", dev_priv->display.wm.pri_latency);
2861 intel_print_wm_latency(dev_priv, "Sprite", dev_priv->display.wm.spr_latency);
2862 intel_print_wm_latency(dev_priv, "Cursor", dev_priv->display.wm.cur_latency);
2865 static void ilk_setup_wm_latency(struct drm_i915_private *dev_priv)
2867 if (IS_BROADWELL(dev_priv) || IS_HASWELL(dev_priv))
2868 hsw_read_wm_latency(dev_priv, dev_priv->display.wm.pri_latency);
2869 else if (DISPLAY_VER(dev_priv) >= 6)
2870 snb_read_wm_latency(dev_priv, dev_priv->display.wm.pri_latency);
2872 ilk_read_wm_latency(dev_priv, dev_priv->display.wm.pri_latency);
2874 memcpy(dev_priv->display.wm.spr_latency, dev_priv->display.wm.pri_latency,
2875 sizeof(dev_priv->display.wm.pri_latency));
2876 memcpy(dev_priv->display.wm.cur_latency, dev_priv->display.wm.pri_latency,
2877 sizeof(dev_priv->display.wm.pri_latency));
2879 intel_fixup_spr_wm_latency(dev_priv, dev_priv->display.wm.spr_latency);
2880 intel_fixup_cur_wm_latency(dev_priv, dev_priv->display.wm.cur_latency);
2882 intel_print_wm_latency(dev_priv, "Primary", dev_priv->display.wm.pri_latency);
2883 intel_print_wm_latency(dev_priv, "Sprite", dev_priv->display.wm.spr_latency);
2884 intel_print_wm_latency(dev_priv, "Cursor", dev_priv->display.wm.cur_latency);
2886 if (DISPLAY_VER(dev_priv) == 6) {
2887 snb_wm_latency_quirk(dev_priv);
2888 snb_wm_lp3_irq_quirk(dev_priv);
2892 static bool ilk_validate_pipe_wm(struct drm_i915_private *dev_priv,
2904 ilk_compute_wm_maximums(dev_priv, 0, &config, INTEL_DDB_PART_1_2, &max);
2907 if (!ilk_validate_wm_level(dev_priv, 0, &max, &pipe_wm->wm[0])) {
2908 drm_dbg_kms(&dev_priv->drm, "LP0 watermark invalid\n");
2919 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2946 usable_level = dev_priv->display.wm.num_levels - 1;
2949 if (DISPLAY_VER(dev_priv) < 7 && pipe_wm->sprites_enabled)
2957 ilk_compute_wm_level(dev_priv, crtc, 0, crtc_state,
2960 if (!ilk_validate_pipe_wm(dev_priv, pipe_wm))
2963 ilk_compute_wm_reg_maximums(dev_priv, 1, &max);
2968 ilk_compute_wm_level(dev_priv, crtc, level, crtc_state,
2976 if (!ilk_validate_wm_level(dev_priv, level, &max, wm)) {
2993 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
3018 for (level = 0; level < dev_priv->display.wm.num_levels; level++) {
3039 if (!ilk_validate_pipe_wm(dev_priv, intermediate))
3071 static void ilk_merge_wm_level(struct drm_i915_private *dev_priv,
3079 for_each_intel_crtc(&dev_priv->drm, crtc) {
3104 static void ilk_wm_merge(struct drm_i915_private *dev_priv,
3109 int level, num_levels = dev_priv->display.wm.num_levels;
3113 if ((DISPLAY_VER(dev_priv) < 7 || IS_IVYBRIDGE(dev_priv)) &&
3118 merged->fbc_wm_enabled = DISPLAY_VER(dev_priv) >= 6;
3124 ilk_merge_wm_level(dev_priv, level, wm);
3128 else if (!ilk_validate_wm_level(dev_priv, level, max, wm))
3144 if (DISPLAY_VER(dev_priv) == 5 && HAS_FBC(dev_priv) &&
3145 dev_priv->display.params.enable_fbc && !merged->fbc_wm_enabled) {
3161 static unsigned int ilk_wm_lp_latency(struct drm_i915_private *dev_priv,
3164 if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv))
3167 return dev_priv->display.wm.pri_latency[level];
3170 static void ilk_compute_wm_results(struct drm_i915_private *dev_priv,
3194 WM_LP_LATENCY(ilk_wm_lp_latency(dev_priv, level)) |
3201 if (DISPLAY_VER(dev_priv) >= 8)
3212 if (DISPLAY_VER(dev_priv) < 7 && r->spr_val) {
3213 drm_WARN_ON(&dev_priv->drm, wm_lp != 1);
3219 for_each_intel_crtc(&dev_priv->drm, crtc) {
3224 if (drm_WARN_ON(&dev_priv->drm, !r->enable))
3239 ilk_find_best_result(struct drm_i915_private *dev_priv,
3245 for (level = 1; level < dev_priv->display.wm.num_levels; level++) {
3271 static unsigned int ilk_compute_wm_dirty(struct drm_i915_private *dev_priv,
3279 for_each_pipe(dev_priv, pipe) {
3317 static bool _ilk_disable_lp_wm(struct drm_i915_private *dev_priv,
3320 struct ilk_wm_values *previous = &dev_priv->display.wm.hw;
3325 intel_uncore_write(&dev_priv->uncore, WM3_LP_ILK, previous->wm_lp[2]);
3330 intel_uncore_write(&dev_priv->uncore, WM2_LP_ILK, previous->wm_lp[1]);
3335 intel_uncore_write(&dev_priv->uncore, WM1_LP_ILK, previous->wm_lp[0]);
3351 static void ilk_write_wm_values(struct drm_i915_private *dev_priv,
3354 struct ilk_wm_values *previous = &dev_priv->display.wm.hw;
3357 dirty = ilk_compute_wm_dirty(dev_priv, previous, results);
3361 _ilk_disable_lp_wm(dev_priv, dirty);
3364 intel_uncore_write(&dev_priv->uncore, WM0_PIPE_ILK(PIPE_A), results->wm_pipe[0]);
3366 intel_uncore_write(&dev_priv->uncore, WM0_PIPE_ILK(PIPE_B), results->wm_pipe[1]);
3368 intel_uncore_write(&dev_priv->uncore, WM0_PIPE_ILK(PIPE_C), results->wm_pipe[2]);
3371 if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv))
3372 intel_uncore_rmw(&dev_priv->uncore, WM_MISC, WM_MISC_DATA_PARTITION_5_6,
3376 intel_uncore_rmw(&dev_priv->uncore, DISP_ARB_CTL2, DISP_DATA_PARTITION_5_6,
3382 intel_uncore_rmw(&dev_priv->uncore, DISP_ARB_CTL, DISP_FBC_WM_DIS,
3387 intel_uncore_write(&dev_priv->uncore, WM1S_LP_ILK, results->wm_lp_spr[0]);
3389 if (DISPLAY_VER(dev_priv) >= 7) {
3391 intel_uncore_write(&dev_priv->uncore, WM2S_LP_IVB, results->wm_lp_spr[1]);
3393 intel_uncore_write(&dev_priv->uncore, WM3S_LP_IVB, results->wm_lp_spr[2]);
3397 intel_uncore_write(&dev_priv->uncore, WM1_LP_ILK, results->wm_lp[0]);
3399 intel_uncore_write(&dev_priv->uncore, WM2_LP_ILK, results->wm_lp[1]);
3401 intel_uncore_write(&dev_priv->uncore, WM3_LP_ILK, results->wm_lp[2]);
3403 dev_priv->display.wm.hw = *results;
3406 bool ilk_disable_cxsr(struct drm_i915_private *dev_priv)
3408 return _ilk_disable_lp_wm(dev_priv, WM_DIRTY_LP_ALL);
3411 static void ilk_compute_wm_config(struct drm_i915_private *dev_priv,
3417 for_each_intel_crtc(&dev_priv->drm, crtc) {
3429 static void ilk_program_watermarks(struct drm_i915_private *dev_priv)
3437 ilk_compute_wm_config(dev_priv, &config);
3439 ilk_compute_wm_maximums(dev_priv, 1, &config, INTEL_DDB_PART_1_2, &max);
3440 ilk_wm_merge(dev_priv, &config, &max, &lp_wm_1_2);
3443 if (DISPLAY_VER(dev_priv) >= 7 &&
3445 ilk_compute_wm_maximums(dev_priv, 1, &config, INTEL_DDB_PART_5_6, &max);
3446 ilk_wm_merge(dev_priv, &config, &max, &lp_wm_5_6);
3448 best_lp_wm = ilk_find_best_result(dev_priv, &lp_wm_1_2, &lp_wm_5_6);
3456 ilk_compute_wm_results(dev_priv, best_lp_wm, partitioning, &results);
3458 ilk_write_wm_values(dev_priv, &results);
3464 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
3468 mutex_lock(&dev_priv->display.wm.wm_mutex);
3470 ilk_program_watermarks(dev_priv);
3471 mutex_unlock(&dev_priv->display.wm.wm_mutex);
3477 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
3484 mutex_lock(&dev_priv->display.wm.wm_mutex);
3486 ilk_program_watermarks(dev_priv);
3487 mutex_unlock(&dev_priv->display.wm.wm_mutex);
3493 struct drm_i915_private *dev_priv = to_i915(dev);
3494 struct ilk_wm_values *hw = &dev_priv->display.wm.hw;
3499 hw->wm_pipe[pipe] = intel_uncore_read(&dev_priv->uncore, WM0_PIPE_ILK(pipe));
3526 for (level = 0; level < dev_priv->display.wm.num_levels; level++)
3575 void ilk_wm_sanitize(struct drm_i915_private *dev_priv)
3586 if (!dev_priv->display.funcs.wm->optimize_watermarks)
3589 if (drm_WARN_ON(&dev_priv->drm, DISPLAY_VER(dev_priv) >= 9))
3592 state = drm_atomic_state_alloc(&dev_priv->drm);
3593 if (drm_WARN_ON(&dev_priv->drm, !state))
3609 if (!HAS_GMCH(dev_priv))
3616 ret = intel_atomic_check(&dev_priv->drm, state);
3646 drm_WARN(&dev_priv->drm, ret,
3660 static void g4x_read_wm_values(struct drm_i915_private *dev_priv,
3665 tmp = intel_uncore_read(&dev_priv->uncore, DSPFW1(dev_priv));
3671 tmp = intel_uncore_read(&dev_priv->uncore, DSPFW2(dev_priv));
3679 tmp = intel_uncore_read(&dev_priv->uncore, DSPFW3(dev_priv));
3686 static void vlv_read_wm_values(struct drm_i915_private *dev_priv,
3692 for_each_pipe(dev_priv, pipe) {
3693 tmp = intel_uncore_read(&dev_priv->uncore, VLV_DDL(pipe));
3705 tmp = intel_uncore_read(&dev_priv->uncore, DSPFW1(dev_priv));
3711 tmp = intel_uncore_read(&dev_priv->uncore, DSPFW2(dev_priv));
3716 tmp = intel_uncore_read(&dev_priv->uncore, DSPFW3(dev_priv));
3719 if (IS_CHERRYVIEW(dev_priv)) {
3720 tmp = intel_uncore_read(&dev_priv->uncore, DSPFW7_CHV);
3724 tmp = intel_uncore_read(&dev_priv->uncore, DSPFW8_CHV);
3728 tmp = intel_uncore_read(&dev_priv->uncore, DSPFW9_CHV);
3732 tmp = intel_uncore_read(&dev_priv->uncore, DSPHOWM);
3744 tmp = intel_uncore_read(&dev_priv->uncore, DSPFW7);
3748 tmp = intel_uncore_read(&dev_priv->uncore, DSPHOWM);
3762 static void g4x_wm_get_hw_state(struct drm_i915_private *dev_priv)
3764 struct g4x_wm_values *wm = &dev_priv->display.wm.g4x;
3767 g4x_read_wm_values(dev_priv, wm);
3769 wm->cxsr = intel_uncore_read(&dev_priv->uncore, FW_BLC_SELF) & FW_BLC_SELF_EN;
3771 for_each_intel_crtc(&dev_priv->drm, crtc) {
3836 drm_dbg_kms(&dev_priv->drm,
3844 drm_dbg_kms(&dev_priv->drm,
3847 drm_dbg_kms(&dev_priv->drm,
3850 drm_dbg_kms(&dev_priv->drm, "Initial SR=%s HPLL=%s FBC=%s\n",
3855 static void g4x_wm_sanitize(struct drm_i915_private *dev_priv)
3857 struct intel_display *display = &dev_priv->display;
3861 mutex_lock(&dev_priv->display.wm.wm_mutex);
3863 for_each_intel_plane(&dev_priv->drm, plane) {
3876 for (level = 0; level < dev_priv->display.wm.num_levels; level++) {
3887 for_each_intel_crtc(&dev_priv->drm, crtc) {
3893 drm_WARN_ON(&dev_priv->drm, ret);
3900 g4x_program_watermarks(dev_priv);
3902 mutex_unlock(&dev_priv->display.wm.wm_mutex);
3905 static void vlv_wm_get_hw_state(struct drm_i915_private *dev_priv)
3907 struct vlv_wm_values *wm = &dev_priv->display.wm.vlv;
3911 vlv_read_wm_values(dev_priv, wm);
3913 wm->cxsr = intel_uncore_read(&dev_priv->uncore, FW_BLC_SELF_VLV) & FW_CSPWRDWNEN;
3916 if (IS_CHERRYVIEW(dev_priv)) {
3917 vlv_punit_get(dev_priv);
3919 val = vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM);
3932 val = vlv_punit_read(dev_priv, PUNIT_REG_DDR_SETUP2);
3934 vlv_punit_write(dev_priv, PUNIT_REG_DDR_SETUP2, val);
3936 if (wait_for((vlv_punit_read(dev_priv, PUNIT_REG_DDR_SETUP2) &
3938 drm_dbg_kms(&dev_priv->drm,
3941 dev_priv->display.wm.num_levels = VLV_WM_LEVEL_PM5 + 1;
3943 val = vlv_punit_read(dev_priv, PUNIT_REG_DDR_SETUP2);
3948 vlv_punit_put(dev_priv);
3951 for_each_intel_crtc(&dev_priv->drm, crtc) {
3991 drm_dbg_kms(&dev_priv->drm,
4000 drm_dbg_kms(&dev_priv->drm,
4005 static void vlv_wm_sanitize(struct drm_i915_private *dev_priv)
4007 struct intel_display *display = &dev_priv->display;
4011 mutex_lock(&dev_priv->display.wm.wm_mutex);
4013 for_each_intel_plane(&dev_priv->drm, plane) {
4026 for (level = 0; level < dev_priv->display.wm.num_levels; level++) {
4034 for_each_intel_crtc(&dev_priv->drm, crtc) {
4040 drm_WARN_ON(&dev_priv->drm, ret);
4047 vlv_program_watermarks(dev_priv);
4049 mutex_unlock(&dev_priv->display.wm.wm_mutex);
4056 static void ilk_init_lp_watermarks(struct drm_i915_private *dev_priv)
4058 intel_uncore_rmw(&dev_priv->uncore, WM3_LP_ILK, WM_LP_ENABLE, 0);
4059 intel_uncore_rmw(&dev_priv->uncore, WM2_LP_ILK, WM_LP_ENABLE, 0);
4060 intel_uncore_rmw(&dev_priv->uncore, WM1_LP_ILK, WM_LP_ENABLE, 0);
4068 static void ilk_wm_get_hw_state(struct drm_i915_private *dev_priv)
4070 struct ilk_wm_values *hw = &dev_priv->display.wm.hw;
4073 ilk_init_lp_watermarks(dev_priv);
4075 for_each_intel_crtc(&dev_priv->drm, crtc)
4078 hw->wm_lp[0] = intel_uncore_read(&dev_priv->uncore, WM1_LP_ILK);
4079 hw->wm_lp[1] = intel_uncore_read(&dev_priv->uncore, WM2_LP_ILK);
4080 hw->wm_lp[2] = intel_uncore_read(&dev_priv->uncore, WM3_LP_ILK);
4082 hw->wm_lp_spr[0] = intel_uncore_read(&dev_priv->uncore, WM1S_LP_ILK);
4083 if (DISPLAY_VER(dev_priv) >= 7) {
4084 hw->wm_lp_spr[1] = intel_uncore_read(&dev_priv->uncore, WM2S_LP_IVB);
4085 hw->wm_lp_spr[2] = intel_uncore_read(&dev_priv->uncore, WM3S_LP_IVB);
4088 if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv))
4089 hw->partitioning = (intel_uncore_read(&dev_priv->uncore, WM_MISC) &
4092 else if (IS_IVYBRIDGE(dev_priv))
4093 hw->partitioning = (intel_uncore_read(&dev_priv->uncore, DISP_ARB_CTL2) &
4098 !(intel_uncore_read(&dev_priv->uncore, DISP_ARB_CTL) & DISP_FBC_WM_DIS);
4148 void i9xx_wm_init(struct drm_i915_private *dev_priv)
4151 if (HAS_PCH_SPLIT(dev_priv)) {
4152 ilk_setup_wm_latency(dev_priv);
4153 dev_priv->display.funcs.wm = &ilk_wm_funcs;
4154 } else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
4155 vlv_setup_wm_latency(dev_priv);
4156 dev_priv->display.funcs.wm = &vlv_wm_funcs;
4157 } else if (IS_G4X(dev_priv)) {
4158 g4x_setup_wm_latency(dev_priv);
4159 dev_priv->display.funcs.wm = &g4x_wm_funcs;
4160 } else if (IS_PINEVIEW(dev_priv)) {
4161 if (!pnv_get_cxsr_latency(dev_priv)) {
4162 drm_info(&dev_priv->drm, "Unknown FSB/MEM, disabling CxSR\n");
4164 intel_set_memory_cxsr(dev_priv, false);
4165 dev_priv->display.funcs.wm = &nop_funcs;
4167 dev_priv->display.funcs.wm = &pnv_wm_funcs;
4169 } else if (DISPLAY_VER(dev_priv) == 4) {
4170 dev_priv->display.funcs.wm = &i965_wm_funcs;
4171 } else if (DISPLAY_VER(dev_priv) == 3) {
4172 dev_priv->display.funcs.wm = &i9xx_wm_funcs;
4173 } else if (DISPLAY_VER(dev_priv) == 2) {
4174 if (INTEL_NUM_PIPES(dev_priv) == 1)
4175 dev_priv->display.funcs.wm = &i845_wm_funcs;
4177 dev_priv->display.funcs.wm = &i9xx_wm_funcs;
4179 drm_err(&dev_priv->drm,
4181 dev_priv->display.funcs.wm = &nop_funcs;