xref: /linux/drivers/gpu/drm/i915/display/intel_pch_refclk.c (revision 25489a4f556414445d342951615178368ee45cde)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2021 Intel Corporation
4  */
5 
6 #include "i915_drv.h"
7 #include "i915_reg.h"
8 #include "intel_de.h"
9 #include "intel_display_types.h"
10 #include "intel_panel.h"
11 #include "intel_pch_refclk.h"
12 #include "intel_sbi.h"
13 
14 static void lpt_fdi_reset_mphy(struct intel_display *display)
15 {
16 	intel_de_rmw(display, SOUTH_CHICKEN2, 0, FDI_MPHY_IOSFSB_RESET_CTL);
17 
18 	if (wait_for_us(intel_de_read(display, SOUTH_CHICKEN2) &
19 			FDI_MPHY_IOSFSB_RESET_STATUS, 100))
20 		drm_err(display->drm, "FDI mPHY reset assert timeout\n");
21 
22 	intel_de_rmw(display, SOUTH_CHICKEN2, FDI_MPHY_IOSFSB_RESET_CTL, 0);
23 
24 	if (wait_for_us((intel_de_read(display, SOUTH_CHICKEN2) &
25 			 FDI_MPHY_IOSFSB_RESET_STATUS) == 0, 100))
26 		drm_err(display->drm, "FDI mPHY reset de-assert timeout\n");
27 }
28 
29 /* WaMPhyProgramming:hsw */
30 static void lpt_fdi_program_mphy(struct intel_display *display)
31 {
32 	struct drm_i915_private *dev_priv = to_i915(display->drm);
33 	u32 tmp;
34 
35 	lpt_fdi_reset_mphy(display);
36 
37 	tmp = intel_sbi_read(dev_priv, 0x8008, SBI_MPHY);
38 	tmp &= ~(0xFF << 24);
39 	tmp |= (0x12 << 24);
40 	intel_sbi_write(dev_priv, 0x8008, tmp, SBI_MPHY);
41 
42 	tmp = intel_sbi_read(dev_priv, 0x2008, SBI_MPHY);
43 	tmp |= (1 << 11);
44 	intel_sbi_write(dev_priv, 0x2008, tmp, SBI_MPHY);
45 
46 	tmp = intel_sbi_read(dev_priv, 0x2108, SBI_MPHY);
47 	tmp |= (1 << 11);
48 	intel_sbi_write(dev_priv, 0x2108, tmp, SBI_MPHY);
49 
50 	tmp = intel_sbi_read(dev_priv, 0x206C, SBI_MPHY);
51 	tmp |= (1 << 24) | (1 << 21) | (1 << 18);
52 	intel_sbi_write(dev_priv, 0x206C, tmp, SBI_MPHY);
53 
54 	tmp = intel_sbi_read(dev_priv, 0x216C, SBI_MPHY);
55 	tmp |= (1 << 24) | (1 << 21) | (1 << 18);
56 	intel_sbi_write(dev_priv, 0x216C, tmp, SBI_MPHY);
57 
58 	tmp = intel_sbi_read(dev_priv, 0x2080, SBI_MPHY);
59 	tmp &= ~(7 << 13);
60 	tmp |= (5 << 13);
61 	intel_sbi_write(dev_priv, 0x2080, tmp, SBI_MPHY);
62 
63 	tmp = intel_sbi_read(dev_priv, 0x2180, SBI_MPHY);
64 	tmp &= ~(7 << 13);
65 	tmp |= (5 << 13);
66 	intel_sbi_write(dev_priv, 0x2180, tmp, SBI_MPHY);
67 
68 	tmp = intel_sbi_read(dev_priv, 0x208C, SBI_MPHY);
69 	tmp &= ~0xFF;
70 	tmp |= 0x1C;
71 	intel_sbi_write(dev_priv, 0x208C, tmp, SBI_MPHY);
72 
73 	tmp = intel_sbi_read(dev_priv, 0x218C, SBI_MPHY);
74 	tmp &= ~0xFF;
75 	tmp |= 0x1C;
76 	intel_sbi_write(dev_priv, 0x218C, tmp, SBI_MPHY);
77 
78 	tmp = intel_sbi_read(dev_priv, 0x2098, SBI_MPHY);
79 	tmp &= ~(0xFF << 16);
80 	tmp |= (0x1C << 16);
81 	intel_sbi_write(dev_priv, 0x2098, tmp, SBI_MPHY);
82 
83 	tmp = intel_sbi_read(dev_priv, 0x2198, SBI_MPHY);
84 	tmp &= ~(0xFF << 16);
85 	tmp |= (0x1C << 16);
86 	intel_sbi_write(dev_priv, 0x2198, tmp, SBI_MPHY);
87 
88 	tmp = intel_sbi_read(dev_priv, 0x20C4, SBI_MPHY);
89 	tmp |= (1 << 27);
90 	intel_sbi_write(dev_priv, 0x20C4, tmp, SBI_MPHY);
91 
92 	tmp = intel_sbi_read(dev_priv, 0x21C4, SBI_MPHY);
93 	tmp |= (1 << 27);
94 	intel_sbi_write(dev_priv, 0x21C4, tmp, SBI_MPHY);
95 
96 	tmp = intel_sbi_read(dev_priv, 0x20EC, SBI_MPHY);
97 	tmp &= ~(0xF << 28);
98 	tmp |= (4 << 28);
99 	intel_sbi_write(dev_priv, 0x20EC, tmp, SBI_MPHY);
100 
101 	tmp = intel_sbi_read(dev_priv, 0x21EC, SBI_MPHY);
102 	tmp &= ~(0xF << 28);
103 	tmp |= (4 << 28);
104 	intel_sbi_write(dev_priv, 0x21EC, tmp, SBI_MPHY);
105 }
106 
107 void lpt_disable_iclkip(struct intel_display *display)
108 {
109 	struct drm_i915_private *dev_priv = to_i915(display->drm);
110 	u32 temp;
111 
112 	intel_de_write(display, PIXCLK_GATE, PIXCLK_GATE_GATE);
113 
114 	intel_sbi_lock(dev_priv);
115 
116 	temp = intel_sbi_read(dev_priv, SBI_SSCCTL6, SBI_ICLK);
117 	temp |= SBI_SSCCTL_DISABLE;
118 	intel_sbi_write(dev_priv, SBI_SSCCTL6, temp, SBI_ICLK);
119 
120 	intel_sbi_unlock(dev_priv);
121 }
122 
123 struct iclkip_params {
124 	u32 iclk_virtual_root_freq;
125 	u32 iclk_pi_range;
126 	u32 divsel, phaseinc, auxdiv, phasedir, desired_divisor;
127 };
128 
129 static void iclkip_params_init(struct iclkip_params *p)
130 {
131 	memset(p, 0, sizeof(*p));
132 
133 	p->iclk_virtual_root_freq = 172800 * 1000;
134 	p->iclk_pi_range = 64;
135 }
136 
137 static int lpt_iclkip_freq(struct iclkip_params *p)
138 {
139 	return DIV_ROUND_CLOSEST(p->iclk_virtual_root_freq,
140 				 p->desired_divisor << p->auxdiv);
141 }
142 
143 static void lpt_compute_iclkip(struct iclkip_params *p, int clock)
144 {
145 	iclkip_params_init(p);
146 
147 	/* The iCLK virtual clock root frequency is in MHz,
148 	 * but the adjusted_mode->crtc_clock in KHz. To get the
149 	 * divisors, it is necessary to divide one by another, so we
150 	 * convert the virtual clock precision to KHz here for higher
151 	 * precision.
152 	 */
153 	for (p->auxdiv = 0; p->auxdiv < 2; p->auxdiv++) {
154 		p->desired_divisor = DIV_ROUND_CLOSEST(p->iclk_virtual_root_freq,
155 						       clock << p->auxdiv);
156 		p->divsel = (p->desired_divisor / p->iclk_pi_range) - 2;
157 		p->phaseinc = p->desired_divisor % p->iclk_pi_range;
158 
159 		/*
160 		 * Near 20MHz is a corner case which is
161 		 * out of range for the 7-bit divisor
162 		 */
163 		if (p->divsel <= 0x7f)
164 			break;
165 	}
166 }
167 
168 int lpt_iclkip(const struct intel_crtc_state *crtc_state)
169 {
170 	struct iclkip_params p;
171 
172 	lpt_compute_iclkip(&p, crtc_state->hw.adjusted_mode.crtc_clock);
173 
174 	return lpt_iclkip_freq(&p);
175 }
176 
177 /* Program iCLKIP clock to the desired frequency */
178 void lpt_program_iclkip(const struct intel_crtc_state *crtc_state)
179 {
180 	struct intel_display *display = to_intel_display(crtc_state);
181 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
182 	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
183 	int clock = crtc_state->hw.adjusted_mode.crtc_clock;
184 	struct iclkip_params p;
185 	u32 temp;
186 
187 	lpt_disable_iclkip(display);
188 
189 	lpt_compute_iclkip(&p, clock);
190 	drm_WARN_ON(display->drm, lpt_iclkip_freq(&p) != clock);
191 
192 	/* This should not happen with any sane values */
193 	drm_WARN_ON(display->drm, SBI_SSCDIVINTPHASE_DIVSEL(p.divsel) &
194 		    ~SBI_SSCDIVINTPHASE_DIVSEL_MASK);
195 	drm_WARN_ON(display->drm, SBI_SSCDIVINTPHASE_DIR(p.phasedir) &
196 		    ~SBI_SSCDIVINTPHASE_INCVAL_MASK);
197 
198 	drm_dbg_kms(display->drm,
199 		    "iCLKIP clock: found settings for %dKHz refresh rate: auxdiv=%x, divsel=%x, phasedir=%x, phaseinc=%x\n",
200 		    clock, p.auxdiv, p.divsel, p.phasedir, p.phaseinc);
201 
202 	intel_sbi_lock(dev_priv);
203 
204 	/* Program SSCDIVINTPHASE6 */
205 	temp = intel_sbi_read(dev_priv, SBI_SSCDIVINTPHASE6, SBI_ICLK);
206 	temp &= ~SBI_SSCDIVINTPHASE_DIVSEL_MASK;
207 	temp |= SBI_SSCDIVINTPHASE_DIVSEL(p.divsel);
208 	temp &= ~SBI_SSCDIVINTPHASE_INCVAL_MASK;
209 	temp |= SBI_SSCDIVINTPHASE_INCVAL(p.phaseinc);
210 	temp |= SBI_SSCDIVINTPHASE_DIR(p.phasedir);
211 	temp |= SBI_SSCDIVINTPHASE_PROPAGATE;
212 	intel_sbi_write(dev_priv, SBI_SSCDIVINTPHASE6, temp, SBI_ICLK);
213 
214 	/* Program SSCAUXDIV */
215 	temp = intel_sbi_read(dev_priv, SBI_SSCAUXDIV6, SBI_ICLK);
216 	temp &= ~SBI_SSCAUXDIV_FINALDIV2SEL(1);
217 	temp |= SBI_SSCAUXDIV_FINALDIV2SEL(p.auxdiv);
218 	intel_sbi_write(dev_priv, SBI_SSCAUXDIV6, temp, SBI_ICLK);
219 
220 	/* Enable modulator and associated divider */
221 	temp = intel_sbi_read(dev_priv, SBI_SSCCTL6, SBI_ICLK);
222 	temp &= ~SBI_SSCCTL_DISABLE;
223 	intel_sbi_write(dev_priv, SBI_SSCCTL6, temp, SBI_ICLK);
224 
225 	intel_sbi_unlock(dev_priv);
226 
227 	/* Wait for initialization time */
228 	udelay(24);
229 
230 	intel_de_write(display, PIXCLK_GATE, PIXCLK_GATE_UNGATE);
231 }
232 
233 int lpt_get_iclkip(struct intel_display *display)
234 {
235 	struct drm_i915_private *dev_priv = to_i915(display->drm);
236 	struct iclkip_params p;
237 	u32 temp;
238 
239 	if ((intel_de_read(display, PIXCLK_GATE) & PIXCLK_GATE_UNGATE) == 0)
240 		return 0;
241 
242 	iclkip_params_init(&p);
243 
244 	intel_sbi_lock(dev_priv);
245 
246 	temp = intel_sbi_read(dev_priv, SBI_SSCCTL6, SBI_ICLK);
247 	if (temp & SBI_SSCCTL_DISABLE) {
248 		intel_sbi_unlock(dev_priv);
249 		return 0;
250 	}
251 
252 	temp = intel_sbi_read(dev_priv, SBI_SSCDIVINTPHASE6, SBI_ICLK);
253 	p.divsel = (temp & SBI_SSCDIVINTPHASE_DIVSEL_MASK) >>
254 		SBI_SSCDIVINTPHASE_DIVSEL_SHIFT;
255 	p.phaseinc = (temp & SBI_SSCDIVINTPHASE_INCVAL_MASK) >>
256 		SBI_SSCDIVINTPHASE_INCVAL_SHIFT;
257 
258 	temp = intel_sbi_read(dev_priv, SBI_SSCAUXDIV6, SBI_ICLK);
259 	p.auxdiv = (temp & SBI_SSCAUXDIV_FINALDIV2SEL_MASK) >>
260 		SBI_SSCAUXDIV_FINALDIV2SEL_SHIFT;
261 
262 	intel_sbi_unlock(dev_priv);
263 
264 	p.desired_divisor = (p.divsel + 2) * p.iclk_pi_range + p.phaseinc;
265 
266 	return lpt_iclkip_freq(&p);
267 }
268 
269 /* Implements 3 different sequences from BSpec chapter "Display iCLK
270  * Programming" based on the parameters passed:
271  * - Sequence to enable CLKOUT_DP
272  * - Sequence to enable CLKOUT_DP without spread
273  * - Sequence to enable CLKOUT_DP for FDI usage and configure PCH FDI I/O
274  */
275 static void lpt_enable_clkout_dp(struct intel_display *display,
276 				 bool with_spread, bool with_fdi)
277 {
278 	struct drm_i915_private *dev_priv = to_i915(display->drm);
279 	u32 reg, tmp;
280 
281 	if (drm_WARN(display->drm, with_fdi && !with_spread,
282 		     "FDI requires downspread\n"))
283 		with_spread = true;
284 	if (drm_WARN(display->drm, HAS_PCH_LPT_LP(display) &&
285 		     with_fdi, "LP PCH doesn't have FDI\n"))
286 		with_fdi = false;
287 
288 	intel_sbi_lock(dev_priv);
289 
290 	tmp = intel_sbi_read(dev_priv, SBI_SSCCTL, SBI_ICLK);
291 	tmp &= ~SBI_SSCCTL_DISABLE;
292 	tmp |= SBI_SSCCTL_PATHALT;
293 	intel_sbi_write(dev_priv, SBI_SSCCTL, tmp, SBI_ICLK);
294 
295 	udelay(24);
296 
297 	if (with_spread) {
298 		tmp = intel_sbi_read(dev_priv, SBI_SSCCTL, SBI_ICLK);
299 		tmp &= ~SBI_SSCCTL_PATHALT;
300 		intel_sbi_write(dev_priv, SBI_SSCCTL, tmp, SBI_ICLK);
301 
302 		if (with_fdi)
303 			lpt_fdi_program_mphy(display);
304 	}
305 
306 	reg = HAS_PCH_LPT_LP(display) ? SBI_GEN0 : SBI_DBUFF0;
307 	tmp = intel_sbi_read(dev_priv, reg, SBI_ICLK);
308 	tmp |= SBI_GEN0_CFG_BUFFENABLE_DISABLE;
309 	intel_sbi_write(dev_priv, reg, tmp, SBI_ICLK);
310 
311 	intel_sbi_unlock(dev_priv);
312 }
313 
314 /* Sequence to disable CLKOUT_DP */
315 void lpt_disable_clkout_dp(struct intel_display *display)
316 {
317 	struct drm_i915_private *dev_priv = to_i915(display->drm);
318 	u32 reg, tmp;
319 
320 	intel_sbi_lock(dev_priv);
321 
322 	reg = HAS_PCH_LPT_LP(display) ? SBI_GEN0 : SBI_DBUFF0;
323 	tmp = intel_sbi_read(dev_priv, reg, SBI_ICLK);
324 	tmp &= ~SBI_GEN0_CFG_BUFFENABLE_DISABLE;
325 	intel_sbi_write(dev_priv, reg, tmp, SBI_ICLK);
326 
327 	tmp = intel_sbi_read(dev_priv, SBI_SSCCTL, SBI_ICLK);
328 	if (!(tmp & SBI_SSCCTL_DISABLE)) {
329 		if (!(tmp & SBI_SSCCTL_PATHALT)) {
330 			tmp |= SBI_SSCCTL_PATHALT;
331 			intel_sbi_write(dev_priv, SBI_SSCCTL, tmp, SBI_ICLK);
332 			udelay(32);
333 		}
334 		tmp |= SBI_SSCCTL_DISABLE;
335 		intel_sbi_write(dev_priv, SBI_SSCCTL, tmp, SBI_ICLK);
336 	}
337 
338 	intel_sbi_unlock(dev_priv);
339 }
340 
341 #define BEND_IDX(steps) ((50 + (steps)) / 5)
342 
343 static const u16 sscdivintphase[] = {
344 	[BEND_IDX( 50)] = 0x3B23,
345 	[BEND_IDX( 45)] = 0x3B23,
346 	[BEND_IDX( 40)] = 0x3C23,
347 	[BEND_IDX( 35)] = 0x3C23,
348 	[BEND_IDX( 30)] = 0x3D23,
349 	[BEND_IDX( 25)] = 0x3D23,
350 	[BEND_IDX( 20)] = 0x3E23,
351 	[BEND_IDX( 15)] = 0x3E23,
352 	[BEND_IDX( 10)] = 0x3F23,
353 	[BEND_IDX(  5)] = 0x3F23,
354 	[BEND_IDX(  0)] = 0x0025,
355 	[BEND_IDX( -5)] = 0x0025,
356 	[BEND_IDX(-10)] = 0x0125,
357 	[BEND_IDX(-15)] = 0x0125,
358 	[BEND_IDX(-20)] = 0x0225,
359 	[BEND_IDX(-25)] = 0x0225,
360 	[BEND_IDX(-30)] = 0x0325,
361 	[BEND_IDX(-35)] = 0x0325,
362 	[BEND_IDX(-40)] = 0x0425,
363 	[BEND_IDX(-45)] = 0x0425,
364 	[BEND_IDX(-50)] = 0x0525,
365 };
366 
367 /*
368  * Bend CLKOUT_DP
369  * steps -50 to 50 inclusive, in steps of 5
370  * < 0 slow down the clock, > 0 speed up the clock, 0 == no bend (135MHz)
371  * change in clock period = -(steps / 10) * 5.787 ps
372  */
373 static void lpt_bend_clkout_dp(struct intel_display *display, int steps)
374 {
375 	struct drm_i915_private *dev_priv = to_i915(display->drm);
376 	u32 tmp;
377 	int idx = BEND_IDX(steps);
378 
379 	if (drm_WARN_ON(display->drm, steps % 5 != 0))
380 		return;
381 
382 	if (drm_WARN_ON(display->drm, idx >= ARRAY_SIZE(sscdivintphase)))
383 		return;
384 
385 	intel_sbi_lock(dev_priv);
386 
387 	if (steps % 10 != 0)
388 		tmp = 0xAAAAAAAB;
389 	else
390 		tmp = 0x00000000;
391 	intel_sbi_write(dev_priv, SBI_SSCDITHPHASE, tmp, SBI_ICLK);
392 
393 	tmp = intel_sbi_read(dev_priv, SBI_SSCDIVINTPHASE, SBI_ICLK);
394 	tmp &= 0xffff0000;
395 	tmp |= sscdivintphase[idx];
396 	intel_sbi_write(dev_priv, SBI_SSCDIVINTPHASE, tmp, SBI_ICLK);
397 
398 	intel_sbi_unlock(dev_priv);
399 }
400 
401 #undef BEND_IDX
402 
403 static bool spll_uses_pch_ssc(struct intel_display *display)
404 {
405 	u32 fuse_strap = intel_de_read(display, FUSE_STRAP);
406 	u32 ctl = intel_de_read(display, SPLL_CTL);
407 
408 	if ((ctl & SPLL_PLL_ENABLE) == 0)
409 		return false;
410 
411 	if ((ctl & SPLL_REF_MASK) == SPLL_REF_MUXED_SSC &&
412 	    (fuse_strap & HSW_CPU_SSC_ENABLE) == 0)
413 		return true;
414 
415 	if (display->platform.broadwell &&
416 	    (ctl & SPLL_REF_MASK) == SPLL_REF_PCH_SSC_BDW)
417 		return true;
418 
419 	return false;
420 }
421 
422 static bool wrpll_uses_pch_ssc(struct intel_display *display, enum intel_dpll_id id)
423 {
424 	u32 fuse_strap = intel_de_read(display, FUSE_STRAP);
425 	u32 ctl = intel_de_read(display, WRPLL_CTL(id));
426 
427 	if ((ctl & WRPLL_PLL_ENABLE) == 0)
428 		return false;
429 
430 	if ((ctl & WRPLL_REF_MASK) == WRPLL_REF_PCH_SSC)
431 		return true;
432 
433 	if ((display->platform.broadwell || display->platform.haswell_ult) &&
434 	    (ctl & WRPLL_REF_MASK) == WRPLL_REF_MUXED_SSC_BDW &&
435 	    (fuse_strap & HSW_CPU_SSC_ENABLE) == 0)
436 		return true;
437 
438 	return false;
439 }
440 
441 static void lpt_init_pch_refclk(struct intel_display *display)
442 {
443 	struct intel_encoder *encoder;
444 	bool has_fdi = false;
445 
446 	for_each_intel_encoder(display->drm, encoder) {
447 		switch (encoder->type) {
448 		case INTEL_OUTPUT_ANALOG:
449 			has_fdi = true;
450 			break;
451 		default:
452 			break;
453 		}
454 	}
455 
456 	/*
457 	 * The BIOS may have decided to use the PCH SSC
458 	 * reference so we must not disable it until the
459 	 * relevant PLLs have stopped relying on it. We'll
460 	 * just leave the PCH SSC reference enabled in case
461 	 * any active PLL is using it. It will get disabled
462 	 * after runtime suspend if we don't have FDI.
463 	 *
464 	 * TODO: Move the whole reference clock handling
465 	 * to the modeset sequence proper so that we can
466 	 * actually enable/disable/reconfigure these things
467 	 * safely. To do that we need to introduce a real
468 	 * clock hierarchy. That would also allow us to do
469 	 * clock bending finally.
470 	 */
471 	display->dpll.pch_ssc_use = 0;
472 
473 	if (spll_uses_pch_ssc(display)) {
474 		drm_dbg_kms(display->drm, "SPLL using PCH SSC\n");
475 		display->dpll.pch_ssc_use |= BIT(DPLL_ID_SPLL);
476 	}
477 
478 	if (wrpll_uses_pch_ssc(display, DPLL_ID_WRPLL1)) {
479 		drm_dbg_kms(display->drm, "WRPLL1 using PCH SSC\n");
480 		display->dpll.pch_ssc_use |= BIT(DPLL_ID_WRPLL1);
481 	}
482 
483 	if (wrpll_uses_pch_ssc(display, DPLL_ID_WRPLL2)) {
484 		drm_dbg_kms(display->drm, "WRPLL2 using PCH SSC\n");
485 		display->dpll.pch_ssc_use |= BIT(DPLL_ID_WRPLL2);
486 	}
487 
488 	if (display->dpll.pch_ssc_use)
489 		return;
490 
491 	if (has_fdi) {
492 		lpt_bend_clkout_dp(display, 0);
493 		lpt_enable_clkout_dp(display, true, true);
494 	} else {
495 		lpt_disable_clkout_dp(display);
496 	}
497 }
498 
499 static void ilk_init_pch_refclk(struct intel_display *display)
500 {
501 	struct intel_encoder *encoder;
502 	struct intel_shared_dpll *pll;
503 	int i;
504 	u32 val, final;
505 	bool has_lvds = false;
506 	bool has_cpu_edp = false;
507 	bool has_panel = false;
508 	bool has_ck505 = false;
509 	bool can_ssc = false;
510 	bool using_ssc_source = false;
511 
512 	/* We need to take the global config into account */
513 	for_each_intel_encoder(display->drm, encoder) {
514 		switch (encoder->type) {
515 		case INTEL_OUTPUT_LVDS:
516 			has_panel = true;
517 			has_lvds = true;
518 			break;
519 		case INTEL_OUTPUT_EDP:
520 			has_panel = true;
521 			if (encoder->port == PORT_A)
522 				has_cpu_edp = true;
523 			break;
524 		default:
525 			break;
526 		}
527 	}
528 
529 	if (HAS_PCH_IBX(display)) {
530 		has_ck505 = display->vbt.display_clock_mode;
531 		can_ssc = has_ck505;
532 	} else {
533 		has_ck505 = false;
534 		can_ssc = true;
535 	}
536 
537 	/* Check if any DPLLs are using the SSC source */
538 	for_each_shared_dpll(display, pll, i) {
539 		u32 temp;
540 
541 		temp = intel_de_read(display, PCH_DPLL(pll->info->id));
542 
543 		if (!(temp & DPLL_VCO_ENABLE))
544 			continue;
545 
546 		if ((temp & PLL_REF_INPUT_MASK) ==
547 		    PLLB_REF_INPUT_SPREADSPECTRUMIN) {
548 			using_ssc_source = true;
549 			break;
550 		}
551 	}
552 
553 	drm_dbg_kms(display->drm,
554 		    "has_panel %d has_lvds %d has_ck505 %d using_ssc_source %d\n",
555 		    has_panel, has_lvds, has_ck505, using_ssc_source);
556 
557 	/* Ironlake: try to setup display ref clock before DPLL
558 	 * enabling. This is only under driver's control after
559 	 * PCH B stepping, previous chipset stepping should be
560 	 * ignoring this setting.
561 	 */
562 	val = intel_de_read(display, PCH_DREF_CONTROL);
563 
564 	/* As we must carefully and slowly disable/enable each source in turn,
565 	 * compute the final state we want first and check if we need to
566 	 * make any changes at all.
567 	 */
568 	final = val;
569 	final &= ~DREF_NONSPREAD_SOURCE_MASK;
570 	if (has_ck505)
571 		final |= DREF_NONSPREAD_CK505_ENABLE;
572 	else
573 		final |= DREF_NONSPREAD_SOURCE_ENABLE;
574 
575 	final &= ~DREF_SSC_SOURCE_MASK;
576 	final &= ~DREF_CPU_SOURCE_OUTPUT_MASK;
577 	final &= ~DREF_SSC1_ENABLE;
578 
579 	if (has_panel) {
580 		final |= DREF_SSC_SOURCE_ENABLE;
581 
582 		if (intel_panel_use_ssc(display) && can_ssc)
583 			final |= DREF_SSC1_ENABLE;
584 
585 		if (has_cpu_edp) {
586 			if (intel_panel_use_ssc(display) && can_ssc)
587 				final |= DREF_CPU_SOURCE_OUTPUT_DOWNSPREAD;
588 			else
589 				final |= DREF_CPU_SOURCE_OUTPUT_NONSPREAD;
590 		} else {
591 			final |= DREF_CPU_SOURCE_OUTPUT_DISABLE;
592 		}
593 	} else if (using_ssc_source) {
594 		final |= DREF_SSC_SOURCE_ENABLE;
595 		final |= DREF_SSC1_ENABLE;
596 	}
597 
598 	if (final == val)
599 		return;
600 
601 	/* Always enable nonspread source */
602 	val &= ~DREF_NONSPREAD_SOURCE_MASK;
603 
604 	if (has_ck505)
605 		val |= DREF_NONSPREAD_CK505_ENABLE;
606 	else
607 		val |= DREF_NONSPREAD_SOURCE_ENABLE;
608 
609 	if (has_panel) {
610 		val &= ~DREF_SSC_SOURCE_MASK;
611 		val |= DREF_SSC_SOURCE_ENABLE;
612 
613 		/* SSC must be turned on before enabling the CPU output  */
614 		if (intel_panel_use_ssc(display) && can_ssc) {
615 			drm_dbg_kms(display->drm, "Using SSC on panel\n");
616 			val |= DREF_SSC1_ENABLE;
617 		} else {
618 			val &= ~DREF_SSC1_ENABLE;
619 		}
620 
621 		/* Get SSC going before enabling the outputs */
622 		intel_de_write(display, PCH_DREF_CONTROL, val);
623 		intel_de_posting_read(display, PCH_DREF_CONTROL);
624 		udelay(200);
625 
626 		val &= ~DREF_CPU_SOURCE_OUTPUT_MASK;
627 
628 		/* Enable CPU source on CPU attached eDP */
629 		if (has_cpu_edp) {
630 			if (intel_panel_use_ssc(display) && can_ssc) {
631 				drm_dbg_kms(display->drm,
632 					    "Using SSC on eDP\n");
633 				val |= DREF_CPU_SOURCE_OUTPUT_DOWNSPREAD;
634 			} else {
635 				val |= DREF_CPU_SOURCE_OUTPUT_NONSPREAD;
636 			}
637 		} else {
638 			val |= DREF_CPU_SOURCE_OUTPUT_DISABLE;
639 		}
640 
641 		intel_de_write(display, PCH_DREF_CONTROL, val);
642 		intel_de_posting_read(display, PCH_DREF_CONTROL);
643 		udelay(200);
644 	} else {
645 		drm_dbg_kms(display->drm, "Disabling CPU source output\n");
646 
647 		val &= ~DREF_CPU_SOURCE_OUTPUT_MASK;
648 
649 		/* Turn off CPU output */
650 		val |= DREF_CPU_SOURCE_OUTPUT_DISABLE;
651 
652 		intel_de_write(display, PCH_DREF_CONTROL, val);
653 		intel_de_posting_read(display, PCH_DREF_CONTROL);
654 		udelay(200);
655 
656 		if (!using_ssc_source) {
657 			drm_dbg_kms(display->drm, "Disabling SSC source\n");
658 
659 			/* Turn off the SSC source */
660 			val &= ~DREF_SSC_SOURCE_MASK;
661 			val |= DREF_SSC_SOURCE_DISABLE;
662 
663 			/* Turn off SSC1 */
664 			val &= ~DREF_SSC1_ENABLE;
665 
666 			intel_de_write(display, PCH_DREF_CONTROL, val);
667 			intel_de_posting_read(display, PCH_DREF_CONTROL);
668 			udelay(200);
669 		}
670 	}
671 
672 	drm_WARN_ON(display->drm, val != final);
673 }
674 
675 /*
676  * Initialize reference clocks when the driver loads
677  */
678 void intel_init_pch_refclk(struct intel_display *display)
679 {
680 	if (HAS_PCH_IBX(display) || HAS_PCH_CPT(display))
681 		ilk_init_pch_refclk(display);
682 	else if (HAS_PCH_LPT(display))
683 		lpt_init_pch_refclk(display);
684 }
685