xref: /linux/drivers/gpu/drm/i915/display/intel_pch_refclk.c (revision 22c55fb9eb92395d999b8404d73e58540d11bdd8)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2021 Intel Corporation
4  */
5 
6 #include <drm/drm_print.h>
7 
8 #include "i915_reg.h"
9 #include "i915_utils.h"
10 #include "intel_de.h"
11 #include "intel_display_regs.h"
12 #include "intel_display_types.h"
13 #include "intel_panel.h"
14 #include "intel_pch_refclk.h"
15 #include "intel_sbi.h"
16 #include "intel_sbi_regs.h"
17 
18 static void lpt_fdi_reset_mphy(struct intel_display *display)
19 {
20 	intel_de_rmw(display, SOUTH_CHICKEN2, 0, FDI_MPHY_IOSFSB_RESET_CTL);
21 
22 	if (wait_for_us(intel_de_read(display, SOUTH_CHICKEN2) &
23 			FDI_MPHY_IOSFSB_RESET_STATUS, 100))
24 		drm_err(display->drm, "FDI mPHY reset assert timeout\n");
25 
26 	intel_de_rmw(display, SOUTH_CHICKEN2, FDI_MPHY_IOSFSB_RESET_CTL, 0);
27 
28 	if (wait_for_us((intel_de_read(display, SOUTH_CHICKEN2) &
29 			 FDI_MPHY_IOSFSB_RESET_STATUS) == 0, 100))
30 		drm_err(display->drm, "FDI mPHY reset de-assert timeout\n");
31 }
32 
33 /* WaMPhyProgramming:hsw */
34 static void lpt_fdi_program_mphy(struct intel_display *display)
35 {
36 	u32 tmp;
37 
38 	lpt_fdi_reset_mphy(display);
39 
40 	tmp = intel_sbi_read(display, 0x8008, SBI_MPHY);
41 	tmp &= ~(0xFF << 24);
42 	tmp |= (0x12 << 24);
43 	intel_sbi_write(display, 0x8008, tmp, SBI_MPHY);
44 
45 	tmp = intel_sbi_read(display, 0x2008, SBI_MPHY);
46 	tmp |= (1 << 11);
47 	intel_sbi_write(display, 0x2008, tmp, SBI_MPHY);
48 
49 	tmp = intel_sbi_read(display, 0x2108, SBI_MPHY);
50 	tmp |= (1 << 11);
51 	intel_sbi_write(display, 0x2108, tmp, SBI_MPHY);
52 
53 	tmp = intel_sbi_read(display, 0x206C, SBI_MPHY);
54 	tmp |= (1 << 24) | (1 << 21) | (1 << 18);
55 	intel_sbi_write(display, 0x206C, tmp, SBI_MPHY);
56 
57 	tmp = intel_sbi_read(display, 0x216C, SBI_MPHY);
58 	tmp |= (1 << 24) | (1 << 21) | (1 << 18);
59 	intel_sbi_write(display, 0x216C, tmp, SBI_MPHY);
60 
61 	tmp = intel_sbi_read(display, 0x2080, SBI_MPHY);
62 	tmp &= ~(7 << 13);
63 	tmp |= (5 << 13);
64 	intel_sbi_write(display, 0x2080, tmp, SBI_MPHY);
65 
66 	tmp = intel_sbi_read(display, 0x2180, SBI_MPHY);
67 	tmp &= ~(7 << 13);
68 	tmp |= (5 << 13);
69 	intel_sbi_write(display, 0x2180, tmp, SBI_MPHY);
70 
71 	tmp = intel_sbi_read(display, 0x208C, SBI_MPHY);
72 	tmp &= ~0xFF;
73 	tmp |= 0x1C;
74 	intel_sbi_write(display, 0x208C, tmp, SBI_MPHY);
75 
76 	tmp = intel_sbi_read(display, 0x218C, SBI_MPHY);
77 	tmp &= ~0xFF;
78 	tmp |= 0x1C;
79 	intel_sbi_write(display, 0x218C, tmp, SBI_MPHY);
80 
81 	tmp = intel_sbi_read(display, 0x2098, SBI_MPHY);
82 	tmp &= ~(0xFF << 16);
83 	tmp |= (0x1C << 16);
84 	intel_sbi_write(display, 0x2098, tmp, SBI_MPHY);
85 
86 	tmp = intel_sbi_read(display, 0x2198, SBI_MPHY);
87 	tmp &= ~(0xFF << 16);
88 	tmp |= (0x1C << 16);
89 	intel_sbi_write(display, 0x2198, tmp, SBI_MPHY);
90 
91 	tmp = intel_sbi_read(display, 0x20C4, SBI_MPHY);
92 	tmp |= (1 << 27);
93 	intel_sbi_write(display, 0x20C4, tmp, SBI_MPHY);
94 
95 	tmp = intel_sbi_read(display, 0x21C4, SBI_MPHY);
96 	tmp |= (1 << 27);
97 	intel_sbi_write(display, 0x21C4, tmp, SBI_MPHY);
98 
99 	tmp = intel_sbi_read(display, 0x20EC, SBI_MPHY);
100 	tmp &= ~(0xF << 28);
101 	tmp |= (4 << 28);
102 	intel_sbi_write(display, 0x20EC, tmp, SBI_MPHY);
103 
104 	tmp = intel_sbi_read(display, 0x21EC, SBI_MPHY);
105 	tmp &= ~(0xF << 28);
106 	tmp |= (4 << 28);
107 	intel_sbi_write(display, 0x21EC, tmp, SBI_MPHY);
108 }
109 
110 void lpt_disable_iclkip(struct intel_display *display)
111 {
112 	u32 temp;
113 
114 	intel_de_write(display, PIXCLK_GATE, PIXCLK_GATE_GATE);
115 
116 	intel_sbi_lock(display);
117 
118 	temp = intel_sbi_read(display, SBI_SSCCTL6, SBI_ICLK);
119 	temp |= SBI_SSCCTL_DISABLE;
120 	intel_sbi_write(display, SBI_SSCCTL6, temp, SBI_ICLK);
121 
122 	intel_sbi_unlock(display);
123 }
124 
125 struct iclkip_params {
126 	u32 iclk_virtual_root_freq;
127 	u32 iclk_pi_range;
128 	u32 divsel, phaseinc, auxdiv, phasedir, desired_divisor;
129 };
130 
131 static void iclkip_params_init(struct iclkip_params *p)
132 {
133 	memset(p, 0, sizeof(*p));
134 
135 	p->iclk_virtual_root_freq = 172800 * 1000;
136 	p->iclk_pi_range = 64;
137 }
138 
139 static int lpt_iclkip_freq(struct iclkip_params *p)
140 {
141 	return DIV_ROUND_CLOSEST(p->iclk_virtual_root_freq,
142 				 p->desired_divisor << p->auxdiv);
143 }
144 
145 static void lpt_compute_iclkip(struct iclkip_params *p, int clock)
146 {
147 	iclkip_params_init(p);
148 
149 	/* The iCLK virtual clock root frequency is in MHz,
150 	 * but the adjusted_mode->crtc_clock in KHz. To get the
151 	 * divisors, it is necessary to divide one by another, so we
152 	 * convert the virtual clock precision to KHz here for higher
153 	 * precision.
154 	 */
155 	for (p->auxdiv = 0; p->auxdiv < 2; p->auxdiv++) {
156 		p->desired_divisor = DIV_ROUND_CLOSEST(p->iclk_virtual_root_freq,
157 						       clock << p->auxdiv);
158 		p->divsel = (p->desired_divisor / p->iclk_pi_range) - 2;
159 		p->phaseinc = p->desired_divisor % p->iclk_pi_range;
160 
161 		/*
162 		 * Near 20MHz is a corner case which is
163 		 * out of range for the 7-bit divisor
164 		 */
165 		if (p->divsel <= 0x7f)
166 			break;
167 	}
168 }
169 
170 int lpt_iclkip(const struct intel_crtc_state *crtc_state)
171 {
172 	struct iclkip_params p;
173 
174 	lpt_compute_iclkip(&p, crtc_state->hw.adjusted_mode.crtc_clock);
175 
176 	return lpt_iclkip_freq(&p);
177 }
178 
179 /* Program iCLKIP clock to the desired frequency */
180 void lpt_program_iclkip(const struct intel_crtc_state *crtc_state)
181 {
182 	struct intel_display *display = to_intel_display(crtc_state);
183 	int clock = crtc_state->hw.adjusted_mode.crtc_clock;
184 	struct iclkip_params p;
185 	u32 temp;
186 
187 	lpt_disable_iclkip(display);
188 
189 	lpt_compute_iclkip(&p, clock);
190 	drm_WARN_ON(display->drm, lpt_iclkip_freq(&p) != clock);
191 
192 	/* This should not happen with any sane values */
193 	drm_WARN_ON(display->drm, SBI_SSCDIVINTPHASE_DIVSEL(p.divsel) &
194 		    ~SBI_SSCDIVINTPHASE_DIVSEL_MASK);
195 	drm_WARN_ON(display->drm, SBI_SSCDIVINTPHASE_DIR(p.phasedir) &
196 		    ~SBI_SSCDIVINTPHASE_INCVAL_MASK);
197 
198 	drm_dbg_kms(display->drm,
199 		    "iCLKIP clock: found settings for %dKHz refresh rate: auxdiv=%x, divsel=%x, phasedir=%x, phaseinc=%x\n",
200 		    clock, p.auxdiv, p.divsel, p.phasedir, p.phaseinc);
201 
202 	intel_sbi_lock(display);
203 
204 	/* Program SSCDIVINTPHASE6 */
205 	temp = intel_sbi_read(display, SBI_SSCDIVINTPHASE6, SBI_ICLK);
206 	temp &= ~SBI_SSCDIVINTPHASE_DIVSEL_MASK;
207 	temp |= SBI_SSCDIVINTPHASE_DIVSEL(p.divsel);
208 	temp &= ~SBI_SSCDIVINTPHASE_INCVAL_MASK;
209 	temp |= SBI_SSCDIVINTPHASE_INCVAL(p.phaseinc);
210 	temp |= SBI_SSCDIVINTPHASE_DIR(p.phasedir);
211 	temp |= SBI_SSCDIVINTPHASE_PROPAGATE;
212 	intel_sbi_write(display, SBI_SSCDIVINTPHASE6, temp, SBI_ICLK);
213 
214 	/* Program SSCAUXDIV */
215 	temp = intel_sbi_read(display, SBI_SSCAUXDIV6, SBI_ICLK);
216 	temp &= ~SBI_SSCAUXDIV_FINALDIV2SEL(1);
217 	temp |= SBI_SSCAUXDIV_FINALDIV2SEL(p.auxdiv);
218 	intel_sbi_write(display, SBI_SSCAUXDIV6, temp, SBI_ICLK);
219 
220 	/* Enable modulator and associated divider */
221 	temp = intel_sbi_read(display, SBI_SSCCTL6, SBI_ICLK);
222 	temp &= ~SBI_SSCCTL_DISABLE;
223 	intel_sbi_write(display, SBI_SSCCTL6, temp, SBI_ICLK);
224 
225 	intel_sbi_unlock(display);
226 
227 	/* Wait for initialization time */
228 	udelay(24);
229 
230 	intel_de_write(display, PIXCLK_GATE, PIXCLK_GATE_UNGATE);
231 }
232 
233 int lpt_get_iclkip(struct intel_display *display)
234 {
235 	struct iclkip_params p;
236 	u32 temp;
237 
238 	if ((intel_de_read(display, PIXCLK_GATE) & PIXCLK_GATE_UNGATE) == 0)
239 		return 0;
240 
241 	iclkip_params_init(&p);
242 
243 	intel_sbi_lock(display);
244 
245 	temp = intel_sbi_read(display, SBI_SSCCTL6, SBI_ICLK);
246 	if (temp & SBI_SSCCTL_DISABLE) {
247 		intel_sbi_unlock(display);
248 		return 0;
249 	}
250 
251 	temp = intel_sbi_read(display, SBI_SSCDIVINTPHASE6, SBI_ICLK);
252 	p.divsel = (temp & SBI_SSCDIVINTPHASE_DIVSEL_MASK) >>
253 		SBI_SSCDIVINTPHASE_DIVSEL_SHIFT;
254 	p.phaseinc = (temp & SBI_SSCDIVINTPHASE_INCVAL_MASK) >>
255 		SBI_SSCDIVINTPHASE_INCVAL_SHIFT;
256 
257 	temp = intel_sbi_read(display, SBI_SSCAUXDIV6, SBI_ICLK);
258 	p.auxdiv = (temp & SBI_SSCAUXDIV_FINALDIV2SEL_MASK) >>
259 		SBI_SSCAUXDIV_FINALDIV2SEL_SHIFT;
260 
261 	intel_sbi_unlock(display);
262 
263 	p.desired_divisor = (p.divsel + 2) * p.iclk_pi_range + p.phaseinc;
264 
265 	return lpt_iclkip_freq(&p);
266 }
267 
268 /* Implements 3 different sequences from BSpec chapter "Display iCLK
269  * Programming" based on the parameters passed:
270  * - Sequence to enable CLKOUT_DP
271  * - Sequence to enable CLKOUT_DP without spread
272  * - Sequence to enable CLKOUT_DP for FDI usage and configure PCH FDI I/O
273  */
274 static void lpt_enable_clkout_dp(struct intel_display *display,
275 				 bool with_spread, bool with_fdi)
276 {
277 	u32 reg, tmp;
278 
279 	if (drm_WARN(display->drm, with_fdi && !with_spread,
280 		     "FDI requires downspread\n"))
281 		with_spread = true;
282 	if (drm_WARN(display->drm, HAS_PCH_LPT_LP(display) &&
283 		     with_fdi, "LP PCH doesn't have FDI\n"))
284 		with_fdi = false;
285 
286 	intel_sbi_lock(display);
287 
288 	tmp = intel_sbi_read(display, SBI_SSCCTL, SBI_ICLK);
289 	tmp &= ~SBI_SSCCTL_DISABLE;
290 	tmp |= SBI_SSCCTL_PATHALT;
291 	intel_sbi_write(display, SBI_SSCCTL, tmp, SBI_ICLK);
292 
293 	udelay(24);
294 
295 	if (with_spread) {
296 		tmp = intel_sbi_read(display, SBI_SSCCTL, SBI_ICLK);
297 		tmp &= ~SBI_SSCCTL_PATHALT;
298 		intel_sbi_write(display, SBI_SSCCTL, tmp, SBI_ICLK);
299 
300 		if (with_fdi)
301 			lpt_fdi_program_mphy(display);
302 	}
303 
304 	reg = HAS_PCH_LPT_LP(display) ? SBI_GEN0 : SBI_DBUFF0;
305 	tmp = intel_sbi_read(display, reg, SBI_ICLK);
306 	tmp |= SBI_GEN0_CFG_BUFFENABLE_DISABLE;
307 	intel_sbi_write(display, reg, tmp, SBI_ICLK);
308 
309 	intel_sbi_unlock(display);
310 }
311 
312 /* Sequence to disable CLKOUT_DP */
313 void lpt_disable_clkout_dp(struct intel_display *display)
314 {
315 	u32 reg, tmp;
316 
317 	intel_sbi_lock(display);
318 
319 	reg = HAS_PCH_LPT_LP(display) ? SBI_GEN0 : SBI_DBUFF0;
320 	tmp = intel_sbi_read(display, reg, SBI_ICLK);
321 	tmp &= ~SBI_GEN0_CFG_BUFFENABLE_DISABLE;
322 	intel_sbi_write(display, reg, tmp, SBI_ICLK);
323 
324 	tmp = intel_sbi_read(display, SBI_SSCCTL, SBI_ICLK);
325 	if (!(tmp & SBI_SSCCTL_DISABLE)) {
326 		if (!(tmp & SBI_SSCCTL_PATHALT)) {
327 			tmp |= SBI_SSCCTL_PATHALT;
328 			intel_sbi_write(display, SBI_SSCCTL, tmp, SBI_ICLK);
329 			udelay(32);
330 		}
331 		tmp |= SBI_SSCCTL_DISABLE;
332 		intel_sbi_write(display, SBI_SSCCTL, tmp, SBI_ICLK);
333 	}
334 
335 	intel_sbi_unlock(display);
336 }
337 
338 #define BEND_IDX(steps) ((50 + (steps)) / 5)
339 
340 static const u16 sscdivintphase[] = {
341 	[BEND_IDX( 50)] = 0x3B23,
342 	[BEND_IDX( 45)] = 0x3B23,
343 	[BEND_IDX( 40)] = 0x3C23,
344 	[BEND_IDX( 35)] = 0x3C23,
345 	[BEND_IDX( 30)] = 0x3D23,
346 	[BEND_IDX( 25)] = 0x3D23,
347 	[BEND_IDX( 20)] = 0x3E23,
348 	[BEND_IDX( 15)] = 0x3E23,
349 	[BEND_IDX( 10)] = 0x3F23,
350 	[BEND_IDX(  5)] = 0x3F23,
351 	[BEND_IDX(  0)] = 0x0025,
352 	[BEND_IDX( -5)] = 0x0025,
353 	[BEND_IDX(-10)] = 0x0125,
354 	[BEND_IDX(-15)] = 0x0125,
355 	[BEND_IDX(-20)] = 0x0225,
356 	[BEND_IDX(-25)] = 0x0225,
357 	[BEND_IDX(-30)] = 0x0325,
358 	[BEND_IDX(-35)] = 0x0325,
359 	[BEND_IDX(-40)] = 0x0425,
360 	[BEND_IDX(-45)] = 0x0425,
361 	[BEND_IDX(-50)] = 0x0525,
362 };
363 
364 /*
365  * Bend CLKOUT_DP
366  * steps -50 to 50 inclusive, in steps of 5
367  * < 0 slow down the clock, > 0 speed up the clock, 0 == no bend (135MHz)
368  * change in clock period = -(steps / 10) * 5.787 ps
369  */
370 static void lpt_bend_clkout_dp(struct intel_display *display, int steps)
371 {
372 	u32 tmp;
373 	int idx = BEND_IDX(steps);
374 
375 	if (drm_WARN_ON(display->drm, steps % 5 != 0))
376 		return;
377 
378 	if (drm_WARN_ON(display->drm, idx >= ARRAY_SIZE(sscdivintphase)))
379 		return;
380 
381 	intel_sbi_lock(display);
382 
383 	if (steps % 10 != 0)
384 		tmp = 0xAAAAAAAB;
385 	else
386 		tmp = 0x00000000;
387 	intel_sbi_write(display, SBI_SSCDITHPHASE, tmp, SBI_ICLK);
388 
389 	tmp = intel_sbi_read(display, SBI_SSCDIVINTPHASE, SBI_ICLK);
390 	tmp &= 0xffff0000;
391 	tmp |= sscdivintphase[idx];
392 	intel_sbi_write(display, SBI_SSCDIVINTPHASE, tmp, SBI_ICLK);
393 
394 	intel_sbi_unlock(display);
395 }
396 
397 #undef BEND_IDX
398 
399 static bool spll_uses_pch_ssc(struct intel_display *display)
400 {
401 	u32 fuse_strap = intel_de_read(display, FUSE_STRAP);
402 	u32 ctl = intel_de_read(display, SPLL_CTL);
403 
404 	if ((ctl & SPLL_PLL_ENABLE) == 0)
405 		return false;
406 
407 	if ((ctl & SPLL_REF_MASK) == SPLL_REF_MUXED_SSC &&
408 	    (fuse_strap & HSW_CPU_SSC_ENABLE) == 0)
409 		return true;
410 
411 	if (display->platform.broadwell &&
412 	    (ctl & SPLL_REF_MASK) == SPLL_REF_PCH_SSC_BDW)
413 		return true;
414 
415 	return false;
416 }
417 
418 static bool wrpll_uses_pch_ssc(struct intel_display *display, enum intel_dpll_id id)
419 {
420 	u32 fuse_strap = intel_de_read(display, FUSE_STRAP);
421 	u32 ctl = intel_de_read(display, WRPLL_CTL(id));
422 
423 	if ((ctl & WRPLL_PLL_ENABLE) == 0)
424 		return false;
425 
426 	if ((ctl & WRPLL_REF_MASK) == WRPLL_REF_PCH_SSC)
427 		return true;
428 
429 	if ((display->platform.broadwell || display->platform.haswell_ult) &&
430 	    (ctl & WRPLL_REF_MASK) == WRPLL_REF_MUXED_SSC_BDW &&
431 	    (fuse_strap & HSW_CPU_SSC_ENABLE) == 0)
432 		return true;
433 
434 	return false;
435 }
436 
437 static void lpt_init_pch_refclk(struct intel_display *display)
438 {
439 	struct intel_encoder *encoder;
440 	bool has_fdi = false;
441 
442 	for_each_intel_encoder(display->drm, encoder) {
443 		switch (encoder->type) {
444 		case INTEL_OUTPUT_ANALOG:
445 			has_fdi = true;
446 			break;
447 		default:
448 			break;
449 		}
450 	}
451 
452 	/*
453 	 * The BIOS may have decided to use the PCH SSC
454 	 * reference so we must not disable it until the
455 	 * relevant PLLs have stopped relying on it. We'll
456 	 * just leave the PCH SSC reference enabled in case
457 	 * any active PLL is using it. It will get disabled
458 	 * after runtime suspend if we don't have FDI.
459 	 *
460 	 * TODO: Move the whole reference clock handling
461 	 * to the modeset sequence proper so that we can
462 	 * actually enable/disable/reconfigure these things
463 	 * safely. To do that we need to introduce a real
464 	 * clock hierarchy. That would also allow us to do
465 	 * clock bending finally.
466 	 */
467 	display->dpll.pch_ssc_use = 0;
468 
469 	if (spll_uses_pch_ssc(display)) {
470 		drm_dbg_kms(display->drm, "SPLL using PCH SSC\n");
471 		display->dpll.pch_ssc_use |= BIT(DPLL_ID_SPLL);
472 	}
473 
474 	if (wrpll_uses_pch_ssc(display, DPLL_ID_WRPLL1)) {
475 		drm_dbg_kms(display->drm, "WRPLL1 using PCH SSC\n");
476 		display->dpll.pch_ssc_use |= BIT(DPLL_ID_WRPLL1);
477 	}
478 
479 	if (wrpll_uses_pch_ssc(display, DPLL_ID_WRPLL2)) {
480 		drm_dbg_kms(display->drm, "WRPLL2 using PCH SSC\n");
481 		display->dpll.pch_ssc_use |= BIT(DPLL_ID_WRPLL2);
482 	}
483 
484 	if (display->dpll.pch_ssc_use)
485 		return;
486 
487 	if (has_fdi) {
488 		lpt_bend_clkout_dp(display, 0);
489 		lpt_enable_clkout_dp(display, true, true);
490 	} else {
491 		lpt_disable_clkout_dp(display);
492 	}
493 }
494 
495 static void ilk_init_pch_refclk(struct intel_display *display)
496 {
497 	struct intel_encoder *encoder;
498 	struct intel_dpll *pll;
499 	int i;
500 	u32 val, final;
501 	bool has_lvds = false;
502 	bool has_cpu_edp = false;
503 	bool has_panel = false;
504 	bool has_ck505 = false;
505 	bool can_ssc = false;
506 	bool using_ssc_source = false;
507 
508 	/* We need to take the global config into account */
509 	for_each_intel_encoder(display->drm, encoder) {
510 		switch (encoder->type) {
511 		case INTEL_OUTPUT_LVDS:
512 			has_panel = true;
513 			has_lvds = true;
514 			break;
515 		case INTEL_OUTPUT_EDP:
516 			has_panel = true;
517 			if (encoder->port == PORT_A)
518 				has_cpu_edp = true;
519 			break;
520 		default:
521 			break;
522 		}
523 	}
524 
525 	if (HAS_PCH_IBX(display)) {
526 		has_ck505 = display->vbt.display_clock_mode;
527 		can_ssc = has_ck505;
528 	} else {
529 		has_ck505 = false;
530 		can_ssc = true;
531 	}
532 
533 	/* Check if any DPLLs are using the SSC source */
534 	for_each_dpll(display, pll, i) {
535 		u32 temp;
536 
537 		temp = intel_de_read(display, PCH_DPLL(pll->info->id));
538 
539 		if (!(temp & DPLL_VCO_ENABLE))
540 			continue;
541 
542 		if ((temp & PLL_REF_INPUT_MASK) ==
543 		    PLLB_REF_INPUT_SPREADSPECTRUMIN) {
544 			using_ssc_source = true;
545 			break;
546 		}
547 	}
548 
549 	drm_dbg_kms(display->drm,
550 		    "has_panel %d has_lvds %d has_ck505 %d using_ssc_source %d\n",
551 		    has_panel, has_lvds, has_ck505, using_ssc_source);
552 
553 	/* Ironlake: try to setup display ref clock before DPLL
554 	 * enabling. This is only under driver's control after
555 	 * PCH B stepping, previous chipset stepping should be
556 	 * ignoring this setting.
557 	 */
558 	val = intel_de_read(display, PCH_DREF_CONTROL);
559 
560 	/* As we must carefully and slowly disable/enable each source in turn,
561 	 * compute the final state we want first and check if we need to
562 	 * make any changes at all.
563 	 */
564 	final = val;
565 	final &= ~DREF_NONSPREAD_SOURCE_MASK;
566 	if (has_ck505)
567 		final |= DREF_NONSPREAD_CK505_ENABLE;
568 	else
569 		final |= DREF_NONSPREAD_SOURCE_ENABLE;
570 
571 	final &= ~DREF_SSC_SOURCE_MASK;
572 	final &= ~DREF_CPU_SOURCE_OUTPUT_MASK;
573 	final &= ~DREF_SSC1_ENABLE;
574 
575 	if (has_panel) {
576 		final |= DREF_SSC_SOURCE_ENABLE;
577 
578 		if (intel_panel_use_ssc(display) && can_ssc)
579 			final |= DREF_SSC1_ENABLE;
580 
581 		if (has_cpu_edp) {
582 			if (intel_panel_use_ssc(display) && can_ssc)
583 				final |= DREF_CPU_SOURCE_OUTPUT_DOWNSPREAD;
584 			else
585 				final |= DREF_CPU_SOURCE_OUTPUT_NONSPREAD;
586 		} else {
587 			final |= DREF_CPU_SOURCE_OUTPUT_DISABLE;
588 		}
589 	} else if (using_ssc_source) {
590 		final |= DREF_SSC_SOURCE_ENABLE;
591 		final |= DREF_SSC1_ENABLE;
592 	}
593 
594 	if (final == val)
595 		return;
596 
597 	/* Always enable nonspread source */
598 	val &= ~DREF_NONSPREAD_SOURCE_MASK;
599 
600 	if (has_ck505)
601 		val |= DREF_NONSPREAD_CK505_ENABLE;
602 	else
603 		val |= DREF_NONSPREAD_SOURCE_ENABLE;
604 
605 	if (has_panel) {
606 		val &= ~DREF_SSC_SOURCE_MASK;
607 		val |= DREF_SSC_SOURCE_ENABLE;
608 
609 		/* SSC must be turned on before enabling the CPU output  */
610 		if (intel_panel_use_ssc(display) && can_ssc) {
611 			drm_dbg_kms(display->drm, "Using SSC on panel\n");
612 			val |= DREF_SSC1_ENABLE;
613 		} else {
614 			val &= ~DREF_SSC1_ENABLE;
615 		}
616 
617 		/* Get SSC going before enabling the outputs */
618 		intel_de_write(display, PCH_DREF_CONTROL, val);
619 		intel_de_posting_read(display, PCH_DREF_CONTROL);
620 		udelay(200);
621 
622 		val &= ~DREF_CPU_SOURCE_OUTPUT_MASK;
623 
624 		/* Enable CPU source on CPU attached eDP */
625 		if (has_cpu_edp) {
626 			if (intel_panel_use_ssc(display) && can_ssc) {
627 				drm_dbg_kms(display->drm,
628 					    "Using SSC on eDP\n");
629 				val |= DREF_CPU_SOURCE_OUTPUT_DOWNSPREAD;
630 			} else {
631 				val |= DREF_CPU_SOURCE_OUTPUT_NONSPREAD;
632 			}
633 		} else {
634 			val |= DREF_CPU_SOURCE_OUTPUT_DISABLE;
635 		}
636 
637 		intel_de_write(display, PCH_DREF_CONTROL, val);
638 		intel_de_posting_read(display, PCH_DREF_CONTROL);
639 		udelay(200);
640 	} else {
641 		drm_dbg_kms(display->drm, "Disabling CPU source output\n");
642 
643 		val &= ~DREF_CPU_SOURCE_OUTPUT_MASK;
644 
645 		/* Turn off CPU output */
646 		val |= DREF_CPU_SOURCE_OUTPUT_DISABLE;
647 
648 		intel_de_write(display, PCH_DREF_CONTROL, val);
649 		intel_de_posting_read(display, PCH_DREF_CONTROL);
650 		udelay(200);
651 
652 		if (!using_ssc_source) {
653 			drm_dbg_kms(display->drm, "Disabling SSC source\n");
654 
655 			/* Turn off the SSC source */
656 			val &= ~DREF_SSC_SOURCE_MASK;
657 			val |= DREF_SSC_SOURCE_DISABLE;
658 
659 			/* Turn off SSC1 */
660 			val &= ~DREF_SSC1_ENABLE;
661 
662 			intel_de_write(display, PCH_DREF_CONTROL, val);
663 			intel_de_posting_read(display, PCH_DREF_CONTROL);
664 			udelay(200);
665 		}
666 	}
667 
668 	drm_WARN_ON(display->drm, val != final);
669 }
670 
671 /*
672  * Initialize reference clocks when the driver loads
673  */
674 void intel_init_pch_refclk(struct intel_display *display)
675 {
676 	if (HAS_PCH_IBX(display) || HAS_PCH_CPT(display))
677 		ilk_init_pch_refclk(display);
678 	else if (HAS_PCH_LPT(display))
679 		lpt_init_pch_refclk(display);
680 }
681