xref: /linux/drivers/gpu/drm/i915/display/intel_pch_refclk.c (revision 07fdad3a93756b872da7b53647715c48d0f4a2d0)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2021 Intel Corporation
4  */
5 
6 #include <drm/drm_print.h>
7 
8 #include "i915_reg.h"
9 #include "i915_utils.h"
10 #include "intel_de.h"
11 #include "intel_display_regs.h"
12 #include "intel_display_types.h"
13 #include "intel_panel.h"
14 #include "intel_pch_refclk.h"
15 #include "intel_sbi.h"
16 #include "intel_sbi_regs.h"
17 
18 static void lpt_fdi_reset_mphy(struct intel_display *display)
19 {
20 	int ret;
21 
22 	intel_de_rmw(display, SOUTH_CHICKEN2, 0, FDI_MPHY_IOSFSB_RESET_CTL);
23 
24 	ret = intel_de_wait_custom(display, SOUTH_CHICKEN2,
25 				   FDI_MPHY_IOSFSB_RESET_STATUS, FDI_MPHY_IOSFSB_RESET_STATUS,
26 				   100, 0, NULL);
27 	if (ret)
28 		drm_err(display->drm, "FDI mPHY reset assert timeout\n");
29 
30 	intel_de_rmw(display, SOUTH_CHICKEN2, FDI_MPHY_IOSFSB_RESET_CTL, 0);
31 
32 	ret = intel_de_wait_custom(display, SOUTH_CHICKEN2,
33 				   FDI_MPHY_IOSFSB_RESET_STATUS, 0,
34 				   100, 0, NULL);
35 	if (ret)
36 		drm_err(display->drm, "FDI mPHY reset de-assert timeout\n");
37 }
38 
39 /* WaMPhyProgramming:hsw */
40 static void lpt_fdi_program_mphy(struct intel_display *display)
41 {
42 	u32 tmp;
43 
44 	lpt_fdi_reset_mphy(display);
45 
46 	tmp = intel_sbi_read(display, 0x8008, SBI_MPHY);
47 	tmp &= ~(0xFF << 24);
48 	tmp |= (0x12 << 24);
49 	intel_sbi_write(display, 0x8008, tmp, SBI_MPHY);
50 
51 	tmp = intel_sbi_read(display, 0x2008, SBI_MPHY);
52 	tmp |= (1 << 11);
53 	intel_sbi_write(display, 0x2008, tmp, SBI_MPHY);
54 
55 	tmp = intel_sbi_read(display, 0x2108, SBI_MPHY);
56 	tmp |= (1 << 11);
57 	intel_sbi_write(display, 0x2108, tmp, SBI_MPHY);
58 
59 	tmp = intel_sbi_read(display, 0x206C, SBI_MPHY);
60 	tmp |= (1 << 24) | (1 << 21) | (1 << 18);
61 	intel_sbi_write(display, 0x206C, tmp, SBI_MPHY);
62 
63 	tmp = intel_sbi_read(display, 0x216C, SBI_MPHY);
64 	tmp |= (1 << 24) | (1 << 21) | (1 << 18);
65 	intel_sbi_write(display, 0x216C, tmp, SBI_MPHY);
66 
67 	tmp = intel_sbi_read(display, 0x2080, SBI_MPHY);
68 	tmp &= ~(7 << 13);
69 	tmp |= (5 << 13);
70 	intel_sbi_write(display, 0x2080, tmp, SBI_MPHY);
71 
72 	tmp = intel_sbi_read(display, 0x2180, SBI_MPHY);
73 	tmp &= ~(7 << 13);
74 	tmp |= (5 << 13);
75 	intel_sbi_write(display, 0x2180, tmp, SBI_MPHY);
76 
77 	tmp = intel_sbi_read(display, 0x208C, SBI_MPHY);
78 	tmp &= ~0xFF;
79 	tmp |= 0x1C;
80 	intel_sbi_write(display, 0x208C, tmp, SBI_MPHY);
81 
82 	tmp = intel_sbi_read(display, 0x218C, SBI_MPHY);
83 	tmp &= ~0xFF;
84 	tmp |= 0x1C;
85 	intel_sbi_write(display, 0x218C, tmp, SBI_MPHY);
86 
87 	tmp = intel_sbi_read(display, 0x2098, SBI_MPHY);
88 	tmp &= ~(0xFF << 16);
89 	tmp |= (0x1C << 16);
90 	intel_sbi_write(display, 0x2098, tmp, SBI_MPHY);
91 
92 	tmp = intel_sbi_read(display, 0x2198, SBI_MPHY);
93 	tmp &= ~(0xFF << 16);
94 	tmp |= (0x1C << 16);
95 	intel_sbi_write(display, 0x2198, tmp, SBI_MPHY);
96 
97 	tmp = intel_sbi_read(display, 0x20C4, SBI_MPHY);
98 	tmp |= (1 << 27);
99 	intel_sbi_write(display, 0x20C4, tmp, SBI_MPHY);
100 
101 	tmp = intel_sbi_read(display, 0x21C4, SBI_MPHY);
102 	tmp |= (1 << 27);
103 	intel_sbi_write(display, 0x21C4, tmp, SBI_MPHY);
104 
105 	tmp = intel_sbi_read(display, 0x20EC, SBI_MPHY);
106 	tmp &= ~(0xF << 28);
107 	tmp |= (4 << 28);
108 	intel_sbi_write(display, 0x20EC, tmp, SBI_MPHY);
109 
110 	tmp = intel_sbi_read(display, 0x21EC, SBI_MPHY);
111 	tmp &= ~(0xF << 28);
112 	tmp |= (4 << 28);
113 	intel_sbi_write(display, 0x21EC, tmp, SBI_MPHY);
114 }
115 
116 void lpt_disable_iclkip(struct intel_display *display)
117 {
118 	u32 temp;
119 
120 	intel_de_write(display, PIXCLK_GATE, PIXCLK_GATE_GATE);
121 
122 	intel_sbi_lock(display);
123 
124 	temp = intel_sbi_read(display, SBI_SSCCTL6, SBI_ICLK);
125 	temp |= SBI_SSCCTL_DISABLE;
126 	intel_sbi_write(display, SBI_SSCCTL6, temp, SBI_ICLK);
127 
128 	intel_sbi_unlock(display);
129 }
130 
131 struct iclkip_params {
132 	u32 iclk_virtual_root_freq;
133 	u32 iclk_pi_range;
134 	u32 divsel, phaseinc, auxdiv, phasedir, desired_divisor;
135 };
136 
137 static void iclkip_params_init(struct iclkip_params *p)
138 {
139 	memset(p, 0, sizeof(*p));
140 
141 	p->iclk_virtual_root_freq = 172800 * 1000;
142 	p->iclk_pi_range = 64;
143 }
144 
145 static int lpt_iclkip_freq(struct iclkip_params *p)
146 {
147 	return DIV_ROUND_CLOSEST(p->iclk_virtual_root_freq,
148 				 p->desired_divisor << p->auxdiv);
149 }
150 
151 static void lpt_compute_iclkip(struct iclkip_params *p, int clock)
152 {
153 	iclkip_params_init(p);
154 
155 	/* The iCLK virtual clock root frequency is in MHz,
156 	 * but the adjusted_mode->crtc_clock in KHz. To get the
157 	 * divisors, it is necessary to divide one by another, so we
158 	 * convert the virtual clock precision to KHz here for higher
159 	 * precision.
160 	 */
161 	for (p->auxdiv = 0; p->auxdiv < 2; p->auxdiv++) {
162 		p->desired_divisor = DIV_ROUND_CLOSEST(p->iclk_virtual_root_freq,
163 						       clock << p->auxdiv);
164 		p->divsel = (p->desired_divisor / p->iclk_pi_range) - 2;
165 		p->phaseinc = p->desired_divisor % p->iclk_pi_range;
166 
167 		/*
168 		 * Near 20MHz is a corner case which is
169 		 * out of range for the 7-bit divisor
170 		 */
171 		if (p->divsel <= 0x7f)
172 			break;
173 	}
174 }
175 
176 int lpt_iclkip(const struct intel_crtc_state *crtc_state)
177 {
178 	struct iclkip_params p;
179 
180 	lpt_compute_iclkip(&p, crtc_state->hw.adjusted_mode.crtc_clock);
181 
182 	return lpt_iclkip_freq(&p);
183 }
184 
185 /* Program iCLKIP clock to the desired frequency */
186 void lpt_program_iclkip(const struct intel_crtc_state *crtc_state)
187 {
188 	struct intel_display *display = to_intel_display(crtc_state);
189 	int clock = crtc_state->hw.adjusted_mode.crtc_clock;
190 	struct iclkip_params p;
191 	u32 temp;
192 
193 	lpt_disable_iclkip(display);
194 
195 	lpt_compute_iclkip(&p, clock);
196 	drm_WARN_ON(display->drm, lpt_iclkip_freq(&p) != clock);
197 
198 	/* This should not happen with any sane values */
199 	drm_WARN_ON(display->drm, SBI_SSCDIVINTPHASE_DIVSEL(p.divsel) &
200 		    ~SBI_SSCDIVINTPHASE_DIVSEL_MASK);
201 	drm_WARN_ON(display->drm, SBI_SSCDIVINTPHASE_DIR(p.phasedir) &
202 		    ~SBI_SSCDIVINTPHASE_INCVAL_MASK);
203 
204 	drm_dbg_kms(display->drm,
205 		    "iCLKIP clock: found settings for %dKHz refresh rate: auxdiv=%x, divsel=%x, phasedir=%x, phaseinc=%x\n",
206 		    clock, p.auxdiv, p.divsel, p.phasedir, p.phaseinc);
207 
208 	intel_sbi_lock(display);
209 
210 	/* Program SSCDIVINTPHASE6 */
211 	temp = intel_sbi_read(display, SBI_SSCDIVINTPHASE6, SBI_ICLK);
212 	temp &= ~SBI_SSCDIVINTPHASE_DIVSEL_MASK;
213 	temp |= SBI_SSCDIVINTPHASE_DIVSEL(p.divsel);
214 	temp &= ~SBI_SSCDIVINTPHASE_INCVAL_MASK;
215 	temp |= SBI_SSCDIVINTPHASE_INCVAL(p.phaseinc);
216 	temp |= SBI_SSCDIVINTPHASE_DIR(p.phasedir);
217 	temp |= SBI_SSCDIVINTPHASE_PROPAGATE;
218 	intel_sbi_write(display, SBI_SSCDIVINTPHASE6, temp, SBI_ICLK);
219 
220 	/* Program SSCAUXDIV */
221 	temp = intel_sbi_read(display, SBI_SSCAUXDIV6, SBI_ICLK);
222 	temp &= ~SBI_SSCAUXDIV_FINALDIV2SEL(1);
223 	temp |= SBI_SSCAUXDIV_FINALDIV2SEL(p.auxdiv);
224 	intel_sbi_write(display, SBI_SSCAUXDIV6, temp, SBI_ICLK);
225 
226 	/* Enable modulator and associated divider */
227 	temp = intel_sbi_read(display, SBI_SSCCTL6, SBI_ICLK);
228 	temp &= ~SBI_SSCCTL_DISABLE;
229 	intel_sbi_write(display, SBI_SSCCTL6, temp, SBI_ICLK);
230 
231 	intel_sbi_unlock(display);
232 
233 	/* Wait for initialization time */
234 	udelay(24);
235 
236 	intel_de_write(display, PIXCLK_GATE, PIXCLK_GATE_UNGATE);
237 }
238 
239 int lpt_get_iclkip(struct intel_display *display)
240 {
241 	struct iclkip_params p;
242 	u32 temp;
243 
244 	if ((intel_de_read(display, PIXCLK_GATE) & PIXCLK_GATE_UNGATE) == 0)
245 		return 0;
246 
247 	iclkip_params_init(&p);
248 
249 	intel_sbi_lock(display);
250 
251 	temp = intel_sbi_read(display, SBI_SSCCTL6, SBI_ICLK);
252 	if (temp & SBI_SSCCTL_DISABLE) {
253 		intel_sbi_unlock(display);
254 		return 0;
255 	}
256 
257 	temp = intel_sbi_read(display, SBI_SSCDIVINTPHASE6, SBI_ICLK);
258 	p.divsel = (temp & SBI_SSCDIVINTPHASE_DIVSEL_MASK) >>
259 		SBI_SSCDIVINTPHASE_DIVSEL_SHIFT;
260 	p.phaseinc = (temp & SBI_SSCDIVINTPHASE_INCVAL_MASK) >>
261 		SBI_SSCDIVINTPHASE_INCVAL_SHIFT;
262 
263 	temp = intel_sbi_read(display, SBI_SSCAUXDIV6, SBI_ICLK);
264 	p.auxdiv = (temp & SBI_SSCAUXDIV_FINALDIV2SEL_MASK) >>
265 		SBI_SSCAUXDIV_FINALDIV2SEL_SHIFT;
266 
267 	intel_sbi_unlock(display);
268 
269 	p.desired_divisor = (p.divsel + 2) * p.iclk_pi_range + p.phaseinc;
270 
271 	return lpt_iclkip_freq(&p);
272 }
273 
274 /* Implements 3 different sequences from BSpec chapter "Display iCLK
275  * Programming" based on the parameters passed:
276  * - Sequence to enable CLKOUT_DP
277  * - Sequence to enable CLKOUT_DP without spread
278  * - Sequence to enable CLKOUT_DP for FDI usage and configure PCH FDI I/O
279  */
280 static void lpt_enable_clkout_dp(struct intel_display *display,
281 				 bool with_spread, bool with_fdi)
282 {
283 	u32 reg, tmp;
284 
285 	if (drm_WARN(display->drm, with_fdi && !with_spread,
286 		     "FDI requires downspread\n"))
287 		with_spread = true;
288 	if (drm_WARN(display->drm, HAS_PCH_LPT_LP(display) &&
289 		     with_fdi, "LP PCH doesn't have FDI\n"))
290 		with_fdi = false;
291 
292 	intel_sbi_lock(display);
293 
294 	tmp = intel_sbi_read(display, SBI_SSCCTL, SBI_ICLK);
295 	tmp &= ~SBI_SSCCTL_DISABLE;
296 	tmp |= SBI_SSCCTL_PATHALT;
297 	intel_sbi_write(display, SBI_SSCCTL, tmp, SBI_ICLK);
298 
299 	udelay(24);
300 
301 	if (with_spread) {
302 		tmp = intel_sbi_read(display, SBI_SSCCTL, SBI_ICLK);
303 		tmp &= ~SBI_SSCCTL_PATHALT;
304 		intel_sbi_write(display, SBI_SSCCTL, tmp, SBI_ICLK);
305 
306 		if (with_fdi)
307 			lpt_fdi_program_mphy(display);
308 	}
309 
310 	reg = HAS_PCH_LPT_LP(display) ? SBI_GEN0 : SBI_DBUFF0;
311 	tmp = intel_sbi_read(display, reg, SBI_ICLK);
312 	tmp |= SBI_GEN0_CFG_BUFFENABLE_DISABLE;
313 	intel_sbi_write(display, reg, tmp, SBI_ICLK);
314 
315 	intel_sbi_unlock(display);
316 }
317 
318 /* Sequence to disable CLKOUT_DP */
319 void lpt_disable_clkout_dp(struct intel_display *display)
320 {
321 	u32 reg, tmp;
322 
323 	intel_sbi_lock(display);
324 
325 	reg = HAS_PCH_LPT_LP(display) ? SBI_GEN0 : SBI_DBUFF0;
326 	tmp = intel_sbi_read(display, reg, SBI_ICLK);
327 	tmp &= ~SBI_GEN0_CFG_BUFFENABLE_DISABLE;
328 	intel_sbi_write(display, reg, tmp, SBI_ICLK);
329 
330 	tmp = intel_sbi_read(display, SBI_SSCCTL, SBI_ICLK);
331 	if (!(tmp & SBI_SSCCTL_DISABLE)) {
332 		if (!(tmp & SBI_SSCCTL_PATHALT)) {
333 			tmp |= SBI_SSCCTL_PATHALT;
334 			intel_sbi_write(display, SBI_SSCCTL, tmp, SBI_ICLK);
335 			udelay(32);
336 		}
337 		tmp |= SBI_SSCCTL_DISABLE;
338 		intel_sbi_write(display, SBI_SSCCTL, tmp, SBI_ICLK);
339 	}
340 
341 	intel_sbi_unlock(display);
342 }
343 
344 #define BEND_IDX(steps) ((50 + (steps)) / 5)
345 
346 static const u16 sscdivintphase[] = {
347 	[BEND_IDX( 50)] = 0x3B23,
348 	[BEND_IDX( 45)] = 0x3B23,
349 	[BEND_IDX( 40)] = 0x3C23,
350 	[BEND_IDX( 35)] = 0x3C23,
351 	[BEND_IDX( 30)] = 0x3D23,
352 	[BEND_IDX( 25)] = 0x3D23,
353 	[BEND_IDX( 20)] = 0x3E23,
354 	[BEND_IDX( 15)] = 0x3E23,
355 	[BEND_IDX( 10)] = 0x3F23,
356 	[BEND_IDX(  5)] = 0x3F23,
357 	[BEND_IDX(  0)] = 0x0025,
358 	[BEND_IDX( -5)] = 0x0025,
359 	[BEND_IDX(-10)] = 0x0125,
360 	[BEND_IDX(-15)] = 0x0125,
361 	[BEND_IDX(-20)] = 0x0225,
362 	[BEND_IDX(-25)] = 0x0225,
363 	[BEND_IDX(-30)] = 0x0325,
364 	[BEND_IDX(-35)] = 0x0325,
365 	[BEND_IDX(-40)] = 0x0425,
366 	[BEND_IDX(-45)] = 0x0425,
367 	[BEND_IDX(-50)] = 0x0525,
368 };
369 
370 /*
371  * Bend CLKOUT_DP
372  * steps -50 to 50 inclusive, in steps of 5
373  * < 0 slow down the clock, > 0 speed up the clock, 0 == no bend (135MHz)
374  * change in clock period = -(steps / 10) * 5.787 ps
375  */
376 static void lpt_bend_clkout_dp(struct intel_display *display, int steps)
377 {
378 	u32 tmp;
379 	int idx = BEND_IDX(steps);
380 
381 	if (drm_WARN_ON(display->drm, steps % 5 != 0))
382 		return;
383 
384 	if (drm_WARN_ON(display->drm, idx >= ARRAY_SIZE(sscdivintphase)))
385 		return;
386 
387 	intel_sbi_lock(display);
388 
389 	if (steps % 10 != 0)
390 		tmp = 0xAAAAAAAB;
391 	else
392 		tmp = 0x00000000;
393 	intel_sbi_write(display, SBI_SSCDITHPHASE, tmp, SBI_ICLK);
394 
395 	tmp = intel_sbi_read(display, SBI_SSCDIVINTPHASE, SBI_ICLK);
396 	tmp &= 0xffff0000;
397 	tmp |= sscdivintphase[idx];
398 	intel_sbi_write(display, SBI_SSCDIVINTPHASE, tmp, SBI_ICLK);
399 
400 	intel_sbi_unlock(display);
401 }
402 
403 #undef BEND_IDX
404 
405 static bool spll_uses_pch_ssc(struct intel_display *display)
406 {
407 	u32 fuse_strap = intel_de_read(display, FUSE_STRAP);
408 	u32 ctl = intel_de_read(display, SPLL_CTL);
409 
410 	if ((ctl & SPLL_PLL_ENABLE) == 0)
411 		return false;
412 
413 	if ((ctl & SPLL_REF_MASK) == SPLL_REF_MUXED_SSC &&
414 	    (fuse_strap & HSW_CPU_SSC_ENABLE) == 0)
415 		return true;
416 
417 	if (display->platform.broadwell &&
418 	    (ctl & SPLL_REF_MASK) == SPLL_REF_PCH_SSC_BDW)
419 		return true;
420 
421 	return false;
422 }
423 
424 static bool wrpll_uses_pch_ssc(struct intel_display *display, enum intel_dpll_id id)
425 {
426 	u32 fuse_strap = intel_de_read(display, FUSE_STRAP);
427 	u32 ctl = intel_de_read(display, WRPLL_CTL(id));
428 
429 	if ((ctl & WRPLL_PLL_ENABLE) == 0)
430 		return false;
431 
432 	if ((ctl & WRPLL_REF_MASK) == WRPLL_REF_PCH_SSC)
433 		return true;
434 
435 	if ((display->platform.broadwell || display->platform.haswell_ult) &&
436 	    (ctl & WRPLL_REF_MASK) == WRPLL_REF_MUXED_SSC_BDW &&
437 	    (fuse_strap & HSW_CPU_SSC_ENABLE) == 0)
438 		return true;
439 
440 	return false;
441 }
442 
443 static void lpt_init_pch_refclk(struct intel_display *display)
444 {
445 	struct intel_encoder *encoder;
446 	bool has_fdi = false;
447 
448 	for_each_intel_encoder(display->drm, encoder) {
449 		switch (encoder->type) {
450 		case INTEL_OUTPUT_ANALOG:
451 			has_fdi = true;
452 			break;
453 		default:
454 			break;
455 		}
456 	}
457 
458 	/*
459 	 * The BIOS may have decided to use the PCH SSC
460 	 * reference so we must not disable it until the
461 	 * relevant PLLs have stopped relying on it. We'll
462 	 * just leave the PCH SSC reference enabled in case
463 	 * any active PLL is using it. It will get disabled
464 	 * after runtime suspend if we don't have FDI.
465 	 *
466 	 * TODO: Move the whole reference clock handling
467 	 * to the modeset sequence proper so that we can
468 	 * actually enable/disable/reconfigure these things
469 	 * safely. To do that we need to introduce a real
470 	 * clock hierarchy. That would also allow us to do
471 	 * clock bending finally.
472 	 */
473 	display->dpll.pch_ssc_use = 0;
474 
475 	if (spll_uses_pch_ssc(display)) {
476 		drm_dbg_kms(display->drm, "SPLL using PCH SSC\n");
477 		display->dpll.pch_ssc_use |= BIT(DPLL_ID_SPLL);
478 	}
479 
480 	if (wrpll_uses_pch_ssc(display, DPLL_ID_WRPLL1)) {
481 		drm_dbg_kms(display->drm, "WRPLL1 using PCH SSC\n");
482 		display->dpll.pch_ssc_use |= BIT(DPLL_ID_WRPLL1);
483 	}
484 
485 	if (wrpll_uses_pch_ssc(display, DPLL_ID_WRPLL2)) {
486 		drm_dbg_kms(display->drm, "WRPLL2 using PCH SSC\n");
487 		display->dpll.pch_ssc_use |= BIT(DPLL_ID_WRPLL2);
488 	}
489 
490 	if (display->dpll.pch_ssc_use)
491 		return;
492 
493 	if (has_fdi) {
494 		lpt_bend_clkout_dp(display, 0);
495 		lpt_enable_clkout_dp(display, true, true);
496 	} else {
497 		lpt_disable_clkout_dp(display);
498 	}
499 }
500 
501 static void ilk_init_pch_refclk(struct intel_display *display)
502 {
503 	struct intel_encoder *encoder;
504 	struct intel_dpll *pll;
505 	int i;
506 	u32 val, final;
507 	bool has_lvds = false;
508 	bool has_cpu_edp = false;
509 	bool has_panel = false;
510 	bool has_ck505 = false;
511 	bool can_ssc = false;
512 	bool using_ssc_source = false;
513 
514 	/* We need to take the global config into account */
515 	for_each_intel_encoder(display->drm, encoder) {
516 		switch (encoder->type) {
517 		case INTEL_OUTPUT_LVDS:
518 			has_panel = true;
519 			has_lvds = true;
520 			break;
521 		case INTEL_OUTPUT_EDP:
522 			has_panel = true;
523 			if (encoder->port == PORT_A)
524 				has_cpu_edp = true;
525 			break;
526 		default:
527 			break;
528 		}
529 	}
530 
531 	if (HAS_PCH_IBX(display)) {
532 		has_ck505 = display->vbt.display_clock_mode;
533 		can_ssc = has_ck505;
534 	} else {
535 		has_ck505 = false;
536 		can_ssc = true;
537 	}
538 
539 	/* Check if any DPLLs are using the SSC source */
540 	for_each_dpll(display, pll, i) {
541 		u32 temp;
542 
543 		temp = intel_de_read(display, PCH_DPLL(pll->info->id));
544 
545 		if (!(temp & DPLL_VCO_ENABLE))
546 			continue;
547 
548 		if ((temp & PLL_REF_INPUT_MASK) ==
549 		    PLLB_REF_INPUT_SPREADSPECTRUMIN) {
550 			using_ssc_source = true;
551 			break;
552 		}
553 	}
554 
555 	drm_dbg_kms(display->drm,
556 		    "has_panel %d has_lvds %d has_ck505 %d using_ssc_source %d\n",
557 		    has_panel, has_lvds, has_ck505, using_ssc_source);
558 
559 	/* Ironlake: try to setup display ref clock before DPLL
560 	 * enabling. This is only under driver's control after
561 	 * PCH B stepping, previous chipset stepping should be
562 	 * ignoring this setting.
563 	 */
564 	val = intel_de_read(display, PCH_DREF_CONTROL);
565 
566 	/* As we must carefully and slowly disable/enable each source in turn,
567 	 * compute the final state we want first and check if we need to
568 	 * make any changes at all.
569 	 */
570 	final = val;
571 	final &= ~DREF_NONSPREAD_SOURCE_MASK;
572 	if (has_ck505)
573 		final |= DREF_NONSPREAD_CK505_ENABLE;
574 	else
575 		final |= DREF_NONSPREAD_SOURCE_ENABLE;
576 
577 	final &= ~DREF_SSC_SOURCE_MASK;
578 	final &= ~DREF_CPU_SOURCE_OUTPUT_MASK;
579 	final &= ~DREF_SSC1_ENABLE;
580 
581 	if (has_panel) {
582 		final |= DREF_SSC_SOURCE_ENABLE;
583 
584 		if (intel_panel_use_ssc(display) && can_ssc)
585 			final |= DREF_SSC1_ENABLE;
586 
587 		if (has_cpu_edp) {
588 			if (intel_panel_use_ssc(display) && can_ssc)
589 				final |= DREF_CPU_SOURCE_OUTPUT_DOWNSPREAD;
590 			else
591 				final |= DREF_CPU_SOURCE_OUTPUT_NONSPREAD;
592 		} else {
593 			final |= DREF_CPU_SOURCE_OUTPUT_DISABLE;
594 		}
595 	} else if (using_ssc_source) {
596 		final |= DREF_SSC_SOURCE_ENABLE;
597 		final |= DREF_SSC1_ENABLE;
598 	}
599 
600 	if (final == val)
601 		return;
602 
603 	/* Always enable nonspread source */
604 	val &= ~DREF_NONSPREAD_SOURCE_MASK;
605 
606 	if (has_ck505)
607 		val |= DREF_NONSPREAD_CK505_ENABLE;
608 	else
609 		val |= DREF_NONSPREAD_SOURCE_ENABLE;
610 
611 	if (has_panel) {
612 		val &= ~DREF_SSC_SOURCE_MASK;
613 		val |= DREF_SSC_SOURCE_ENABLE;
614 
615 		/* SSC must be turned on before enabling the CPU output  */
616 		if (intel_panel_use_ssc(display) && can_ssc) {
617 			drm_dbg_kms(display->drm, "Using SSC on panel\n");
618 			val |= DREF_SSC1_ENABLE;
619 		} else {
620 			val &= ~DREF_SSC1_ENABLE;
621 		}
622 
623 		/* Get SSC going before enabling the outputs */
624 		intel_de_write(display, PCH_DREF_CONTROL, val);
625 		intel_de_posting_read(display, PCH_DREF_CONTROL);
626 		udelay(200);
627 
628 		val &= ~DREF_CPU_SOURCE_OUTPUT_MASK;
629 
630 		/* Enable CPU source on CPU attached eDP */
631 		if (has_cpu_edp) {
632 			if (intel_panel_use_ssc(display) && can_ssc) {
633 				drm_dbg_kms(display->drm,
634 					    "Using SSC on eDP\n");
635 				val |= DREF_CPU_SOURCE_OUTPUT_DOWNSPREAD;
636 			} else {
637 				val |= DREF_CPU_SOURCE_OUTPUT_NONSPREAD;
638 			}
639 		} else {
640 			val |= DREF_CPU_SOURCE_OUTPUT_DISABLE;
641 		}
642 
643 		intel_de_write(display, PCH_DREF_CONTROL, val);
644 		intel_de_posting_read(display, PCH_DREF_CONTROL);
645 		udelay(200);
646 	} else {
647 		drm_dbg_kms(display->drm, "Disabling CPU source output\n");
648 
649 		val &= ~DREF_CPU_SOURCE_OUTPUT_MASK;
650 
651 		/* Turn off CPU output */
652 		val |= DREF_CPU_SOURCE_OUTPUT_DISABLE;
653 
654 		intel_de_write(display, PCH_DREF_CONTROL, val);
655 		intel_de_posting_read(display, PCH_DREF_CONTROL);
656 		udelay(200);
657 
658 		if (!using_ssc_source) {
659 			drm_dbg_kms(display->drm, "Disabling SSC source\n");
660 
661 			/* Turn off the SSC source */
662 			val &= ~DREF_SSC_SOURCE_MASK;
663 			val |= DREF_SSC_SOURCE_DISABLE;
664 
665 			/* Turn off SSC1 */
666 			val &= ~DREF_SSC1_ENABLE;
667 
668 			intel_de_write(display, PCH_DREF_CONTROL, val);
669 			intel_de_posting_read(display, PCH_DREF_CONTROL);
670 			udelay(200);
671 		}
672 	}
673 
674 	drm_WARN_ON(display->drm, val != final);
675 }
676 
677 /*
678  * Initialize reference clocks when the driver loads
679  */
680 void intel_init_pch_refclk(struct intel_display *display)
681 {
682 	if (HAS_PCH_IBX(display) || HAS_PCH_CPT(display))
683 		ilk_init_pch_refclk(display);
684 	else if (HAS_PCH_LPT(display))
685 		lpt_init_pch_refclk(display);
686 }
687