xref: /linux/drivers/gpu/drm/i915/display/intel_dpll_mgr.c (revision 53597deca0e38c30e6cd4ba2114fa42d2bcd85bb)
1 /*
2  * Copyright © 2006-2016 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21  * DEALINGS IN THE SOFTWARE.
22  */
23 
24 #include <linux/math.h>
25 #include <linux/string_helpers.h>
26 
27 #include <drm/drm_print.h>
28 
29 #include "bxt_dpio_phy_regs.h"
30 #include "intel_cx0_phy.h"
31 #include "intel_de.h"
32 #include "intel_display_regs.h"
33 #include "intel_display_types.h"
34 #include "intel_display_utils.h"
35 #include "intel_dkl_phy.h"
36 #include "intel_dkl_phy_regs.h"
37 #include "intel_dpio_phy.h"
38 #include "intel_dpll.h"
39 #include "intel_dpll_mgr.h"
40 #include "intel_hti.h"
41 #include "intel_lt_phy.h"
42 #include "intel_mg_phy_regs.h"
43 #include "intel_pch_refclk.h"
44 #include "intel_step.h"
45 #include "intel_tc.h"
46 
47 /**
48  * DOC: Display PLLs
49  *
50  * Display PLLs used for driving outputs vary by platform. While some have
51  * per-pipe or per-encoder dedicated PLLs, others allow the use of any PLL
52  * from a pool. In the latter scenario, it is possible that multiple pipes
53  * share a PLL if their configurations match.
54  *
55  * This file provides an abstraction over display PLLs. The function
56  * intel_dpll_init() initializes the PLLs for the given platform.  The
57  * users of a PLL are tracked and that tracking is integrated with the atomic
58  * modset interface. During an atomic operation, required PLLs can be reserved
59  * for a given CRTC and encoder configuration by calling
60  * intel_dpll_reserve() and previously reserved PLLs can be released
61  * with intel_dpll_release().
62  * Changes to the users are first staged in the atomic state, and then made
63  * effective by calling intel_dpll_swap_state() during the atomic
64  * commit phase.
65  */
66 
67 /* platform specific hooks for managing DPLLs */
68 struct intel_dpll_funcs {
69 	/*
70 	 * Hook for enabling the pll, called from intel_enable_dpll() if
71 	 * the pll is not already enabled.
72 	 */
73 	void (*enable)(struct intel_display *display,
74 		       struct intel_dpll *pll,
75 		       const struct intel_dpll_hw_state *dpll_hw_state);
76 
77 	/*
78 	 * Hook for disabling the pll, called from intel_disable_dpll()
79 	 * only when it is safe to disable the pll, i.e., there are no more
80 	 * tracked users for it.
81 	 */
82 	void (*disable)(struct intel_display *display,
83 			struct intel_dpll *pll);
84 
85 	/*
86 	 * Hook for reading the values currently programmed to the DPLL
87 	 * registers. This is used for initial hw state readout and state
88 	 * verification after a mode set.
89 	 */
90 	bool (*get_hw_state)(struct intel_display *display,
91 			     struct intel_dpll *pll,
92 			     struct intel_dpll_hw_state *dpll_hw_state);
93 
94 	/*
95 	 * Hook for calculating the pll's output frequency based on its passed
96 	 * in state.
97 	 */
98 	int (*get_freq)(struct intel_display *i915,
99 			const struct intel_dpll *pll,
100 			const struct intel_dpll_hw_state *dpll_hw_state);
101 };
102 
103 struct intel_dpll_mgr {
104 	const struct dpll_info *dpll_info;
105 
106 	int (*compute_dplls)(struct intel_atomic_state *state,
107 			     struct intel_crtc *crtc,
108 			     struct intel_encoder *encoder);
109 	int (*get_dplls)(struct intel_atomic_state *state,
110 			 struct intel_crtc *crtc,
111 			 struct intel_encoder *encoder);
112 	void (*put_dplls)(struct intel_atomic_state *state,
113 			  struct intel_crtc *crtc);
114 	void (*update_active_dpll)(struct intel_atomic_state *state,
115 				   struct intel_crtc *crtc,
116 				   struct intel_encoder *encoder);
117 	void (*update_ref_clks)(struct intel_display *display);
118 	void (*dump_hw_state)(struct drm_printer *p,
119 			      const struct intel_dpll_hw_state *dpll_hw_state);
120 	bool (*compare_hw_state)(const struct intel_dpll_hw_state *a,
121 				 const struct intel_dpll_hw_state *b);
122 };
123 
124 static void
125 intel_atomic_duplicate_dpll_state(struct intel_display *display,
126 				  struct intel_dpll_state *dpll_state)
127 {
128 	struct intel_dpll *pll;
129 	int i;
130 
131 	/* Copy dpll state */
132 	for_each_dpll(display, pll, i)
133 		dpll_state[pll->index] = pll->state;
134 }
135 
136 static struct intel_dpll_state *
137 intel_atomic_get_dpll_state(struct drm_atomic_state *s)
138 {
139 	struct intel_atomic_state *state = to_intel_atomic_state(s);
140 	struct intel_display *display = to_intel_display(state);
141 
142 	drm_WARN_ON(s->dev, !drm_modeset_is_locked(&s->dev->mode_config.connection_mutex));
143 
144 	if (!state->dpll_set) {
145 		state->dpll_set = true;
146 
147 		intel_atomic_duplicate_dpll_state(display,
148 						  state->dpll_state);
149 	}
150 
151 	return state->dpll_state;
152 }
153 
154 /**
155  * intel_get_dpll_by_id - get a DPLL given its id
156  * @display: intel_display device instance
157  * @id: pll id
158  *
159  * Returns:
160  * A pointer to the DPLL with @id
161  */
162 struct intel_dpll *
163 intel_get_dpll_by_id(struct intel_display *display,
164 		     enum intel_dpll_id id)
165 {
166 	struct intel_dpll *pll;
167 	int i;
168 
169 	for_each_dpll(display, pll, i) {
170 		if (pll->info->id == id)
171 			return pll;
172 	}
173 
174 	MISSING_CASE(id);
175 	return NULL;
176 }
177 
178 /* For ILK+ */
179 void assert_dpll(struct intel_display *display,
180 		 struct intel_dpll *pll,
181 		 bool state)
182 {
183 	bool cur_state;
184 	struct intel_dpll_hw_state hw_state;
185 
186 	if (drm_WARN(display->drm, !pll,
187 		     "asserting DPLL %s with no DPLL\n", str_on_off(state)))
188 		return;
189 
190 	cur_state = intel_dpll_get_hw_state(display, pll, &hw_state);
191 	INTEL_DISPLAY_STATE_WARN(display, cur_state != state,
192 				 "%s assertion failure (expected %s, current %s)\n",
193 				 pll->info->name, str_on_off(state),
194 				 str_on_off(cur_state));
195 }
196 
197 static enum tc_port icl_pll_id_to_tc_port(enum intel_dpll_id id)
198 {
199 	return TC_PORT_1 + id - DPLL_ID_ICL_MGPLL1;
200 }
201 
202 enum intel_dpll_id icl_tc_port_to_pll_id(enum tc_port tc_port)
203 {
204 	return tc_port - TC_PORT_1 + DPLL_ID_ICL_MGPLL1;
205 }
206 
207 enum intel_dpll_id mtl_port_to_pll_id(struct intel_display *display, enum port port)
208 {
209 	if (port >= PORT_TC1)
210 		return icl_tc_port_to_pll_id(intel_port_to_tc(display, port));
211 
212 	switch (port) {
213 	case PORT_A:
214 		return DPLL_ID_ICL_DPLL0;
215 	case PORT_B:
216 		return DPLL_ID_ICL_DPLL1;
217 	default:
218 		MISSING_CASE(port);
219 		return DPLL_ID_ICL_DPLL0;
220 	}
221 }
222 
223 static i915_reg_t
224 intel_combo_pll_enable_reg(struct intel_display *display,
225 			   struct intel_dpll *pll)
226 {
227 	if (display->platform.dg1)
228 		return DG1_DPLL_ENABLE(pll->info->id);
229 	else if ((display->platform.jasperlake || display->platform.elkhartlake) &&
230 		 (pll->info->id == DPLL_ID_EHL_DPLL4))
231 		return MG_PLL_ENABLE(0);
232 
233 	return ICL_DPLL_ENABLE(pll->info->id);
234 }
235 
236 static i915_reg_t
237 intel_tc_pll_enable_reg(struct intel_display *display,
238 			struct intel_dpll *pll)
239 {
240 	const enum intel_dpll_id id = pll->info->id;
241 	enum tc_port tc_port = icl_pll_id_to_tc_port(id);
242 
243 	if (display->platform.alderlake_p)
244 		return ADLP_PORTTC_PLL_ENABLE(tc_port);
245 
246 	return MG_PLL_ENABLE(tc_port);
247 }
248 
249 static void _intel_enable_shared_dpll(struct intel_display *display,
250 				      struct intel_dpll *pll)
251 {
252 	if (pll->info->power_domain)
253 		pll->wakeref = intel_display_power_get(display, pll->info->power_domain);
254 
255 	pll->info->funcs->enable(display, pll, &pll->state.hw_state);
256 	pll->on = true;
257 }
258 
259 static void _intel_disable_shared_dpll(struct intel_display *display,
260 				       struct intel_dpll *pll)
261 {
262 	pll->info->funcs->disable(display, pll);
263 	pll->on = false;
264 
265 	if (pll->info->power_domain)
266 		intel_display_power_put(display, pll->info->power_domain, pll->wakeref);
267 }
268 
269 /**
270  * intel_dpll_enable - enable a CRTC's DPLL
271  * @crtc_state: CRTC, and its state, which has a DPLL
272  *
273  * Enable DPLL used by @crtc.
274  */
275 void intel_dpll_enable(const struct intel_crtc_state *crtc_state)
276 {
277 	struct intel_display *display = to_intel_display(crtc_state);
278 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
279 	struct intel_dpll *pll = crtc_state->intel_dpll;
280 	unsigned int pipe_mask = intel_crtc_joined_pipe_mask(crtc_state);
281 	unsigned int old_mask;
282 
283 	if (drm_WARN_ON(display->drm, !pll))
284 		return;
285 
286 	mutex_lock(&display->dpll.lock);
287 	old_mask = pll->active_mask;
288 
289 	if (drm_WARN_ON(display->drm, !(pll->state.pipe_mask & pipe_mask)) ||
290 	    drm_WARN_ON(display->drm, pll->active_mask & pipe_mask))
291 		goto out;
292 
293 	pll->active_mask |= pipe_mask;
294 
295 	drm_dbg_kms(display->drm,
296 		    "enable %s (active 0x%x, on? %d) for [CRTC:%d:%s]\n",
297 		    pll->info->name, pll->active_mask, pll->on,
298 		    crtc->base.base.id, crtc->base.name);
299 
300 	if (old_mask) {
301 		drm_WARN_ON(display->drm, !pll->on);
302 		assert_dpll_enabled(display, pll);
303 		goto out;
304 	}
305 	drm_WARN_ON(display->drm, pll->on);
306 
307 	drm_dbg_kms(display->drm, "enabling %s\n", pll->info->name);
308 
309 	_intel_enable_shared_dpll(display, pll);
310 
311 out:
312 	mutex_unlock(&display->dpll.lock);
313 }
314 
315 /**
316  * intel_dpll_disable - disable a CRTC's shared DPLL
317  * @crtc_state: CRTC, and its state, which has a shared DPLL
318  *
319  * Disable DPLL used by @crtc.
320  */
321 void intel_dpll_disable(const struct intel_crtc_state *crtc_state)
322 {
323 	struct intel_display *display = to_intel_display(crtc_state);
324 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
325 	struct intel_dpll *pll = crtc_state->intel_dpll;
326 	unsigned int pipe_mask = intel_crtc_joined_pipe_mask(crtc_state);
327 
328 	/* PCH only available on ILK+ */
329 	if (DISPLAY_VER(display) < 5)
330 		return;
331 
332 	if (pll == NULL)
333 		return;
334 
335 	mutex_lock(&display->dpll.lock);
336 	if (drm_WARN(display->drm, !(pll->active_mask & pipe_mask),
337 		     "%s not used by [CRTC:%d:%s]\n", pll->info->name,
338 		     crtc->base.base.id, crtc->base.name))
339 		goto out;
340 
341 	drm_dbg_kms(display->drm,
342 		    "disable %s (active 0x%x, on? %d) for [CRTC:%d:%s]\n",
343 		    pll->info->name, pll->active_mask, pll->on,
344 		    crtc->base.base.id, crtc->base.name);
345 
346 	assert_dpll_enabled(display, pll);
347 	drm_WARN_ON(display->drm, !pll->on);
348 
349 	pll->active_mask &= ~pipe_mask;
350 	if (pll->active_mask)
351 		goto out;
352 
353 	drm_dbg_kms(display->drm, "disabling %s\n", pll->info->name);
354 
355 	_intel_disable_shared_dpll(display, pll);
356 
357 out:
358 	mutex_unlock(&display->dpll.lock);
359 }
360 
361 static unsigned long
362 intel_dpll_mask_all(struct intel_display *display)
363 {
364 	struct intel_dpll *pll;
365 	unsigned long dpll_mask = 0;
366 	int i;
367 
368 	for_each_dpll(display, pll, i) {
369 		drm_WARN_ON(display->drm, dpll_mask & BIT(pll->info->id));
370 
371 		dpll_mask |= BIT(pll->info->id);
372 	}
373 
374 	return dpll_mask;
375 }
376 
377 static struct intel_dpll *
378 intel_find_dpll(struct intel_atomic_state *state,
379 		const struct intel_crtc *crtc,
380 		const struct intel_dpll_hw_state *dpll_hw_state,
381 		unsigned long dpll_mask)
382 {
383 	struct intel_display *display = to_intel_display(crtc);
384 	unsigned long dpll_mask_all = intel_dpll_mask_all(display);
385 	struct intel_dpll_state *dpll_state;
386 	struct intel_dpll *unused_pll = NULL;
387 	enum intel_dpll_id id;
388 
389 	dpll_state = intel_atomic_get_dpll_state(&state->base);
390 
391 	drm_WARN_ON(display->drm, dpll_mask & ~dpll_mask_all);
392 
393 	for_each_set_bit(id, &dpll_mask, fls(dpll_mask_all)) {
394 		struct intel_dpll *pll;
395 
396 		pll = intel_get_dpll_by_id(display, id);
397 		if (!pll)
398 			continue;
399 
400 		/* Only want to check enabled timings first */
401 		if (dpll_state[pll->index].pipe_mask == 0) {
402 			if (!unused_pll)
403 				unused_pll = pll;
404 			continue;
405 		}
406 
407 		if (memcmp(dpll_hw_state,
408 			   &dpll_state[pll->index].hw_state,
409 			   sizeof(*dpll_hw_state)) == 0) {
410 			drm_dbg_kms(display->drm,
411 				    "[CRTC:%d:%s] sharing existing %s (pipe mask 0x%x, active 0x%x)\n",
412 				    crtc->base.base.id, crtc->base.name,
413 				    pll->info->name,
414 				    dpll_state[pll->index].pipe_mask,
415 				    pll->active_mask);
416 			return pll;
417 		}
418 	}
419 
420 	/* Ok no matching timings, maybe there's a free one? */
421 	if (unused_pll) {
422 		drm_dbg_kms(display->drm, "[CRTC:%d:%s] allocated %s\n",
423 			    crtc->base.base.id, crtc->base.name,
424 			    unused_pll->info->name);
425 		return unused_pll;
426 	}
427 
428 	return NULL;
429 }
430 
431 /**
432  * intel_dpll_crtc_get - Get a DPLL reference for a CRTC
433  * @crtc: CRTC on which behalf the reference is taken
434  * @pll: DPLL for which the reference is taken
435  * @dpll_state: the DPLL atomic state in which the reference is tracked
436  *
437  * Take a reference for @pll tracking the use of it by @crtc.
438  */
439 static void
440 intel_dpll_crtc_get(const struct intel_crtc *crtc,
441 		    const struct intel_dpll *pll,
442 		    struct intel_dpll_state *dpll_state)
443 {
444 	struct intel_display *display = to_intel_display(crtc);
445 
446 	drm_WARN_ON(display->drm, (dpll_state->pipe_mask & BIT(crtc->pipe)) != 0);
447 
448 	dpll_state->pipe_mask |= BIT(crtc->pipe);
449 
450 	drm_dbg_kms(display->drm, "[CRTC:%d:%s] reserving %s\n",
451 		    crtc->base.base.id, crtc->base.name, pll->info->name);
452 }
453 
454 static void
455 intel_reference_dpll(struct intel_atomic_state *state,
456 		     const struct intel_crtc *crtc,
457 		     const struct intel_dpll *pll,
458 		     const struct intel_dpll_hw_state *dpll_hw_state)
459 {
460 	struct intel_dpll_state *dpll_state;
461 
462 	dpll_state = intel_atomic_get_dpll_state(&state->base);
463 
464 	if (dpll_state[pll->index].pipe_mask == 0)
465 		dpll_state[pll->index].hw_state = *dpll_hw_state;
466 
467 	intel_dpll_crtc_get(crtc, pll, &dpll_state[pll->index]);
468 }
469 
470 /**
471  * intel_dpll_crtc_put - Drop a DPLL reference for a CRTC
472  * @crtc: CRTC on which behalf the reference is dropped
473  * @pll: DPLL for which the reference is dropped
474  * @dpll_state: the DPLL atomic state in which the reference is tracked
475  *
476  * Drop a reference for @pll tracking the end of use of it by @crtc.
477  */
478 void
479 intel_dpll_crtc_put(const struct intel_crtc *crtc,
480 		    const struct intel_dpll *pll,
481 		    struct intel_dpll_state *dpll_state)
482 {
483 	struct intel_display *display = to_intel_display(crtc);
484 
485 	drm_WARN_ON(display->drm, (dpll_state->pipe_mask & BIT(crtc->pipe)) == 0);
486 
487 	dpll_state->pipe_mask &= ~BIT(crtc->pipe);
488 
489 	drm_dbg_kms(display->drm, "[CRTC:%d:%s] releasing %s\n",
490 		    crtc->base.base.id, crtc->base.name, pll->info->name);
491 }
492 
493 static void intel_unreference_dpll(struct intel_atomic_state *state,
494 				   const struct intel_crtc *crtc,
495 				   const struct intel_dpll *pll)
496 {
497 	struct intel_dpll_state *dpll_state;
498 
499 	dpll_state = intel_atomic_get_dpll_state(&state->base);
500 
501 	intel_dpll_crtc_put(crtc, pll, &dpll_state[pll->index]);
502 }
503 
504 static void intel_put_dpll(struct intel_atomic_state *state,
505 			   struct intel_crtc *crtc)
506 {
507 	const struct intel_crtc_state *old_crtc_state =
508 		intel_atomic_get_old_crtc_state(state, crtc);
509 	struct intel_crtc_state *new_crtc_state =
510 		intel_atomic_get_new_crtc_state(state, crtc);
511 
512 	new_crtc_state->intel_dpll = NULL;
513 
514 	if (!old_crtc_state->intel_dpll)
515 		return;
516 
517 	intel_unreference_dpll(state, crtc, old_crtc_state->intel_dpll);
518 }
519 
520 /**
521  * intel_dpll_swap_state - make atomic DPLL configuration effective
522  * @state: atomic state
523  *
524  * This is the dpll version of drm_atomic_helper_swap_state() since the
525  * helper does not handle driver-specific global state.
526  *
527  * For consistency with atomic helpers this function does a complete swap,
528  * i.e. it also puts the current state into @state, even though there is no
529  * need for that at this moment.
530  */
531 void intel_dpll_swap_state(struct intel_atomic_state *state)
532 {
533 	struct intel_display *display = to_intel_display(state);
534 	struct intel_dpll_state *dpll_state = state->dpll_state;
535 	struct intel_dpll *pll;
536 	int i;
537 
538 	if (!state->dpll_set)
539 		return;
540 
541 	for_each_dpll(display, pll, i)
542 		swap(pll->state, dpll_state[pll->index]);
543 }
544 
545 static bool ibx_pch_dpll_get_hw_state(struct intel_display *display,
546 				      struct intel_dpll *pll,
547 				      struct intel_dpll_hw_state *dpll_hw_state)
548 {
549 	struct i9xx_dpll_hw_state *hw_state = &dpll_hw_state->i9xx;
550 	const enum intel_dpll_id id = pll->info->id;
551 	struct ref_tracker *wakeref;
552 	u32 val;
553 
554 	wakeref = intel_display_power_get_if_enabled(display,
555 						     POWER_DOMAIN_DISPLAY_CORE);
556 	if (!wakeref)
557 		return false;
558 
559 	val = intel_de_read(display, PCH_DPLL(id));
560 	hw_state->dpll = val;
561 	hw_state->fp0 = intel_de_read(display, PCH_FP0(id));
562 	hw_state->fp1 = intel_de_read(display, PCH_FP1(id));
563 
564 	intel_display_power_put(display, POWER_DOMAIN_DISPLAY_CORE, wakeref);
565 
566 	return val & DPLL_VCO_ENABLE;
567 }
568 
569 static void ibx_assert_pch_refclk_enabled(struct intel_display *display)
570 {
571 	u32 val;
572 	bool enabled;
573 
574 	val = intel_de_read(display, PCH_DREF_CONTROL);
575 	enabled = !!(val & (DREF_SSC_SOURCE_MASK | DREF_NONSPREAD_SOURCE_MASK |
576 			    DREF_SUPERSPREAD_SOURCE_MASK));
577 	INTEL_DISPLAY_STATE_WARN(display, !enabled,
578 				 "PCH refclk assertion failure, should be active but is disabled\n");
579 }
580 
581 static void ibx_pch_dpll_enable(struct intel_display *display,
582 				struct intel_dpll *pll,
583 				const struct intel_dpll_hw_state *dpll_hw_state)
584 {
585 	const struct i9xx_dpll_hw_state *hw_state = &dpll_hw_state->i9xx;
586 	const enum intel_dpll_id id = pll->info->id;
587 
588 	/* PCH refclock must be enabled first */
589 	ibx_assert_pch_refclk_enabled(display);
590 
591 	intel_de_write(display, PCH_FP0(id), hw_state->fp0);
592 	intel_de_write(display, PCH_FP1(id), hw_state->fp1);
593 
594 	intel_de_write(display, PCH_DPLL(id), hw_state->dpll);
595 
596 	/* Wait for the clocks to stabilize. */
597 	intel_de_posting_read(display, PCH_DPLL(id));
598 	udelay(150);
599 
600 	/* The pixel multiplier can only be updated once the
601 	 * DPLL is enabled and the clocks are stable.
602 	 *
603 	 * So write it again.
604 	 */
605 	intel_de_write(display, PCH_DPLL(id), hw_state->dpll);
606 	intel_de_posting_read(display, PCH_DPLL(id));
607 	udelay(200);
608 }
609 
610 static void ibx_pch_dpll_disable(struct intel_display *display,
611 				 struct intel_dpll *pll)
612 {
613 	const enum intel_dpll_id id = pll->info->id;
614 
615 	intel_de_write(display, PCH_DPLL(id), 0);
616 	intel_de_posting_read(display, PCH_DPLL(id));
617 	udelay(200);
618 }
619 
620 static int ibx_compute_dpll(struct intel_atomic_state *state,
621 			    struct intel_crtc *crtc,
622 			    struct intel_encoder *encoder)
623 {
624 	return 0;
625 }
626 
627 static int ibx_get_dpll(struct intel_atomic_state *state,
628 			struct intel_crtc *crtc,
629 			struct intel_encoder *encoder)
630 {
631 	struct intel_display *display = to_intel_display(state);
632 	struct intel_crtc_state *crtc_state =
633 		intel_atomic_get_new_crtc_state(state, crtc);
634 	struct intel_dpll *pll;
635 	enum intel_dpll_id id;
636 
637 	if (HAS_PCH_IBX(display)) {
638 		/* Ironlake PCH has a fixed PLL->PCH pipe mapping. */
639 		id = (enum intel_dpll_id) crtc->pipe;
640 		pll = intel_get_dpll_by_id(display, id);
641 
642 		drm_dbg_kms(display->drm,
643 			    "[CRTC:%d:%s] using pre-allocated %s\n",
644 			    crtc->base.base.id, crtc->base.name,
645 			    pll->info->name);
646 	} else {
647 		pll = intel_find_dpll(state, crtc,
648 				      &crtc_state->dpll_hw_state,
649 				      BIT(DPLL_ID_PCH_PLL_B) |
650 				      BIT(DPLL_ID_PCH_PLL_A));
651 	}
652 
653 	if (!pll)
654 		return -EINVAL;
655 
656 	/* reference the pll */
657 	intel_reference_dpll(state, crtc,
658 			     pll, &crtc_state->dpll_hw_state);
659 
660 	crtc_state->intel_dpll = pll;
661 
662 	return 0;
663 }
664 
665 static void ibx_dump_hw_state(struct drm_printer *p,
666 			      const struct intel_dpll_hw_state *dpll_hw_state)
667 {
668 	const struct i9xx_dpll_hw_state *hw_state = &dpll_hw_state->i9xx;
669 
670 	drm_printf(p, "dpll_hw_state: dpll: 0x%x, dpll_md: 0x%x, "
671 		   "fp0: 0x%x, fp1: 0x%x\n",
672 		   hw_state->dpll,
673 		   hw_state->dpll_md,
674 		   hw_state->fp0,
675 		   hw_state->fp1);
676 }
677 
678 static bool ibx_compare_hw_state(const struct intel_dpll_hw_state *_a,
679 				 const struct intel_dpll_hw_state *_b)
680 {
681 	const struct i9xx_dpll_hw_state *a = &_a->i9xx;
682 	const struct i9xx_dpll_hw_state *b = &_b->i9xx;
683 
684 	return a->dpll == b->dpll &&
685 		a->dpll_md == b->dpll_md &&
686 		a->fp0 == b->fp0 &&
687 		a->fp1 == b->fp1;
688 }
689 
690 static const struct intel_dpll_funcs ibx_pch_dpll_funcs = {
691 	.enable = ibx_pch_dpll_enable,
692 	.disable = ibx_pch_dpll_disable,
693 	.get_hw_state = ibx_pch_dpll_get_hw_state,
694 };
695 
696 static const struct dpll_info pch_plls[] = {
697 	{ .name = "PCH DPLL A", .funcs = &ibx_pch_dpll_funcs, .id = DPLL_ID_PCH_PLL_A, },
698 	{ .name = "PCH DPLL B", .funcs = &ibx_pch_dpll_funcs, .id = DPLL_ID_PCH_PLL_B, },
699 	{}
700 };
701 
702 static const struct intel_dpll_mgr pch_pll_mgr = {
703 	.dpll_info = pch_plls,
704 	.compute_dplls = ibx_compute_dpll,
705 	.get_dplls = ibx_get_dpll,
706 	.put_dplls = intel_put_dpll,
707 	.dump_hw_state = ibx_dump_hw_state,
708 	.compare_hw_state = ibx_compare_hw_state,
709 };
710 
711 static void hsw_ddi_wrpll_enable(struct intel_display *display,
712 				 struct intel_dpll *pll,
713 				 const struct intel_dpll_hw_state *dpll_hw_state)
714 {
715 	const struct hsw_dpll_hw_state *hw_state = &dpll_hw_state->hsw;
716 	const enum intel_dpll_id id = pll->info->id;
717 
718 	intel_de_write(display, WRPLL_CTL(id), hw_state->wrpll);
719 	intel_de_posting_read(display, WRPLL_CTL(id));
720 	udelay(20);
721 }
722 
723 static void hsw_ddi_spll_enable(struct intel_display *display,
724 				struct intel_dpll *pll,
725 				const struct intel_dpll_hw_state *dpll_hw_state)
726 {
727 	const struct hsw_dpll_hw_state *hw_state = &dpll_hw_state->hsw;
728 
729 	intel_de_write(display, SPLL_CTL, hw_state->spll);
730 	intel_de_posting_read(display, SPLL_CTL);
731 	udelay(20);
732 }
733 
734 static void hsw_ddi_wrpll_disable(struct intel_display *display,
735 				  struct intel_dpll *pll)
736 {
737 	const enum intel_dpll_id id = pll->info->id;
738 
739 	intel_de_rmw(display, WRPLL_CTL(id), WRPLL_PLL_ENABLE, 0);
740 	intel_de_posting_read(display, WRPLL_CTL(id));
741 
742 	/*
743 	 * Try to set up the PCH reference clock once all DPLLs
744 	 * that depend on it have been shut down.
745 	 */
746 	if (display->dpll.pch_ssc_use & BIT(id))
747 		intel_init_pch_refclk(display);
748 }
749 
750 static void hsw_ddi_spll_disable(struct intel_display *display,
751 				 struct intel_dpll *pll)
752 {
753 	enum intel_dpll_id id = pll->info->id;
754 
755 	intel_de_rmw(display, SPLL_CTL, SPLL_PLL_ENABLE, 0);
756 	intel_de_posting_read(display, SPLL_CTL);
757 
758 	/*
759 	 * Try to set up the PCH reference clock once all DPLLs
760 	 * that depend on it have been shut down.
761 	 */
762 	if (display->dpll.pch_ssc_use & BIT(id))
763 		intel_init_pch_refclk(display);
764 }
765 
766 static bool hsw_ddi_wrpll_get_hw_state(struct intel_display *display,
767 				       struct intel_dpll *pll,
768 				       struct intel_dpll_hw_state *dpll_hw_state)
769 {
770 	struct hsw_dpll_hw_state *hw_state = &dpll_hw_state->hsw;
771 	const enum intel_dpll_id id = pll->info->id;
772 	struct ref_tracker *wakeref;
773 	u32 val;
774 
775 	wakeref = intel_display_power_get_if_enabled(display,
776 						     POWER_DOMAIN_DISPLAY_CORE);
777 	if (!wakeref)
778 		return false;
779 
780 	val = intel_de_read(display, WRPLL_CTL(id));
781 	hw_state->wrpll = val;
782 
783 	intel_display_power_put(display, POWER_DOMAIN_DISPLAY_CORE, wakeref);
784 
785 	return val & WRPLL_PLL_ENABLE;
786 }
787 
788 static bool hsw_ddi_spll_get_hw_state(struct intel_display *display,
789 				      struct intel_dpll *pll,
790 				      struct intel_dpll_hw_state *dpll_hw_state)
791 {
792 	struct hsw_dpll_hw_state *hw_state = &dpll_hw_state->hsw;
793 	struct ref_tracker *wakeref;
794 	u32 val;
795 
796 	wakeref = intel_display_power_get_if_enabled(display,
797 						     POWER_DOMAIN_DISPLAY_CORE);
798 	if (!wakeref)
799 		return false;
800 
801 	val = intel_de_read(display, SPLL_CTL);
802 	hw_state->spll = val;
803 
804 	intel_display_power_put(display, POWER_DOMAIN_DISPLAY_CORE, wakeref);
805 
806 	return val & SPLL_PLL_ENABLE;
807 }
808 
809 #define LC_FREQ 2700
810 #define LC_FREQ_2K U64_C(LC_FREQ * 2000)
811 
812 #define P_MIN 2
813 #define P_MAX 64
814 #define P_INC 2
815 
816 /* Constraints for PLL good behavior */
817 #define REF_MIN 48
818 #define REF_MAX 400
819 #define VCO_MIN 2400
820 #define VCO_MAX 4800
821 
822 struct hsw_wrpll_rnp {
823 	unsigned p, n2, r2;
824 };
825 
826 static unsigned hsw_wrpll_get_budget_for_freq(int clock)
827 {
828 	switch (clock) {
829 	case 25175000:
830 	case 25200000:
831 	case 27000000:
832 	case 27027000:
833 	case 37762500:
834 	case 37800000:
835 	case 40500000:
836 	case 40541000:
837 	case 54000000:
838 	case 54054000:
839 	case 59341000:
840 	case 59400000:
841 	case 72000000:
842 	case 74176000:
843 	case 74250000:
844 	case 81000000:
845 	case 81081000:
846 	case 89012000:
847 	case 89100000:
848 	case 108000000:
849 	case 108108000:
850 	case 111264000:
851 	case 111375000:
852 	case 148352000:
853 	case 148500000:
854 	case 162000000:
855 	case 162162000:
856 	case 222525000:
857 	case 222750000:
858 	case 296703000:
859 	case 297000000:
860 		return 0;
861 	case 233500000:
862 	case 245250000:
863 	case 247750000:
864 	case 253250000:
865 	case 298000000:
866 		return 1500;
867 	case 169128000:
868 	case 169500000:
869 	case 179500000:
870 	case 202000000:
871 		return 2000;
872 	case 256250000:
873 	case 262500000:
874 	case 270000000:
875 	case 272500000:
876 	case 273750000:
877 	case 280750000:
878 	case 281250000:
879 	case 286000000:
880 	case 291750000:
881 		return 4000;
882 	case 267250000:
883 	case 268500000:
884 		return 5000;
885 	default:
886 		return 1000;
887 	}
888 }
889 
890 static void hsw_wrpll_update_rnp(u64 freq2k, unsigned int budget,
891 				 unsigned int r2, unsigned int n2,
892 				 unsigned int p,
893 				 struct hsw_wrpll_rnp *best)
894 {
895 	u64 a, b, c, d, diff, diff_best;
896 
897 	/* No best (r,n,p) yet */
898 	if (best->p == 0) {
899 		best->p = p;
900 		best->n2 = n2;
901 		best->r2 = r2;
902 		return;
903 	}
904 
905 	/*
906 	 * Output clock is (LC_FREQ_2K / 2000) * N / (P * R), which compares to
907 	 * freq2k.
908 	 *
909 	 * delta = 1e6 *
910 	 *	   abs(freq2k - (LC_FREQ_2K * n2/(p * r2))) /
911 	 *	   freq2k;
912 	 *
913 	 * and we would like delta <= budget.
914 	 *
915 	 * If the discrepancy is above the PPM-based budget, always prefer to
916 	 * improve upon the previous solution.  However, if you're within the
917 	 * budget, try to maximize Ref * VCO, that is N / (P * R^2).
918 	 */
919 	a = freq2k * budget * p * r2;
920 	b = freq2k * budget * best->p * best->r2;
921 	diff = abs_diff(freq2k * p * r2, LC_FREQ_2K * n2);
922 	diff_best = abs_diff(freq2k * best->p * best->r2,
923 			     LC_FREQ_2K * best->n2);
924 	c = 1000000 * diff;
925 	d = 1000000 * diff_best;
926 
927 	if (a < c && b < d) {
928 		/* If both are above the budget, pick the closer */
929 		if (best->p * best->r2 * diff < p * r2 * diff_best) {
930 			best->p = p;
931 			best->n2 = n2;
932 			best->r2 = r2;
933 		}
934 	} else if (a >= c && b < d) {
935 		/* If A is below the threshold but B is above it?  Update. */
936 		best->p = p;
937 		best->n2 = n2;
938 		best->r2 = r2;
939 	} else if (a >= c && b >= d) {
940 		/* Both are below the limit, so pick the higher n2/(r2*r2) */
941 		if (n2 * best->r2 * best->r2 > best->n2 * r2 * r2) {
942 			best->p = p;
943 			best->n2 = n2;
944 			best->r2 = r2;
945 		}
946 	}
947 	/* Otherwise a < c && b >= d, do nothing */
948 }
949 
950 static void
951 hsw_ddi_calculate_wrpll(int clock /* in Hz */,
952 			unsigned *r2_out, unsigned *n2_out, unsigned *p_out)
953 {
954 	u64 freq2k;
955 	unsigned p, n2, r2;
956 	struct hsw_wrpll_rnp best = {};
957 	unsigned budget;
958 
959 	freq2k = clock / 100;
960 
961 	budget = hsw_wrpll_get_budget_for_freq(clock);
962 
963 	/* Special case handling for 540 pixel clock: bypass WR PLL entirely
964 	 * and directly pass the LC PLL to it. */
965 	if (freq2k == 5400000) {
966 		*n2_out = 2;
967 		*p_out = 1;
968 		*r2_out = 2;
969 		return;
970 	}
971 
972 	/*
973 	 * Ref = LC_FREQ / R, where Ref is the actual reference input seen by
974 	 * the WR PLL.
975 	 *
976 	 * We want R so that REF_MIN <= Ref <= REF_MAX.
977 	 * Injecting R2 = 2 * R gives:
978 	 *   REF_MAX * r2 > LC_FREQ * 2 and
979 	 *   REF_MIN * r2 < LC_FREQ * 2
980 	 *
981 	 * Which means the desired boundaries for r2 are:
982 	 *  LC_FREQ * 2 / REF_MAX < r2 < LC_FREQ * 2 / REF_MIN
983 	 *
984 	 */
985 	for (r2 = LC_FREQ * 2 / REF_MAX + 1;
986 	     r2 <= LC_FREQ * 2 / REF_MIN;
987 	     r2++) {
988 
989 		/*
990 		 * VCO = N * Ref, that is: VCO = N * LC_FREQ / R
991 		 *
992 		 * Once again we want VCO_MIN <= VCO <= VCO_MAX.
993 		 * Injecting R2 = 2 * R and N2 = 2 * N, we get:
994 		 *   VCO_MAX * r2 > n2 * LC_FREQ and
995 		 *   VCO_MIN * r2 < n2 * LC_FREQ)
996 		 *
997 		 * Which means the desired boundaries for n2 are:
998 		 * VCO_MIN * r2 / LC_FREQ < n2 < VCO_MAX * r2 / LC_FREQ
999 		 */
1000 		for (n2 = VCO_MIN * r2 / LC_FREQ + 1;
1001 		     n2 <= VCO_MAX * r2 / LC_FREQ;
1002 		     n2++) {
1003 
1004 			for (p = P_MIN; p <= P_MAX; p += P_INC)
1005 				hsw_wrpll_update_rnp(freq2k, budget,
1006 						     r2, n2, p, &best);
1007 		}
1008 	}
1009 
1010 	*n2_out = best.n2;
1011 	*p_out = best.p;
1012 	*r2_out = best.r2;
1013 }
1014 
1015 static int hsw_ddi_wrpll_get_freq(struct intel_display *display,
1016 				  const struct intel_dpll *pll,
1017 				  const struct intel_dpll_hw_state *dpll_hw_state)
1018 {
1019 	const struct hsw_dpll_hw_state *hw_state = &dpll_hw_state->hsw;
1020 	int refclk;
1021 	int n, p, r;
1022 	u32 wrpll = hw_state->wrpll;
1023 
1024 	switch (wrpll & WRPLL_REF_MASK) {
1025 	case WRPLL_REF_SPECIAL_HSW:
1026 		/* Muxed-SSC for BDW, non-SSC for non-ULT HSW. */
1027 		if (display->platform.haswell && !display->platform.haswell_ult) {
1028 			refclk = display->dpll.ref_clks.nssc;
1029 			break;
1030 		}
1031 		fallthrough;
1032 	case WRPLL_REF_PCH_SSC:
1033 		/*
1034 		 * We could calculate spread here, but our checking
1035 		 * code only cares about 5% accuracy, and spread is a max of
1036 		 * 0.5% downspread.
1037 		 */
1038 		refclk = display->dpll.ref_clks.ssc;
1039 		break;
1040 	case WRPLL_REF_LCPLL:
1041 		refclk = 2700000;
1042 		break;
1043 	default:
1044 		MISSING_CASE(wrpll);
1045 		return 0;
1046 	}
1047 
1048 	r = wrpll & WRPLL_DIVIDER_REF_MASK;
1049 	p = (wrpll & WRPLL_DIVIDER_POST_MASK) >> WRPLL_DIVIDER_POST_SHIFT;
1050 	n = (wrpll & WRPLL_DIVIDER_FB_MASK) >> WRPLL_DIVIDER_FB_SHIFT;
1051 
1052 	/* Convert to KHz, p & r have a fixed point portion */
1053 	return (refclk * n / 10) / (p * r) * 2;
1054 }
1055 
1056 static int
1057 hsw_ddi_wrpll_compute_dpll(struct intel_atomic_state *state,
1058 			   struct intel_crtc *crtc)
1059 {
1060 	struct intel_display *display = to_intel_display(state);
1061 	struct intel_crtc_state *crtc_state =
1062 		intel_atomic_get_new_crtc_state(state, crtc);
1063 	struct hsw_dpll_hw_state *hw_state = &crtc_state->dpll_hw_state.hsw;
1064 	unsigned int p, n2, r2;
1065 
1066 	hsw_ddi_calculate_wrpll(crtc_state->port_clock * 1000, &r2, &n2, &p);
1067 
1068 	hw_state->wrpll =
1069 		WRPLL_PLL_ENABLE | WRPLL_REF_LCPLL |
1070 		WRPLL_DIVIDER_REFERENCE(r2) | WRPLL_DIVIDER_FEEDBACK(n2) |
1071 		WRPLL_DIVIDER_POST(p);
1072 
1073 	crtc_state->port_clock = hsw_ddi_wrpll_get_freq(display, NULL,
1074 							&crtc_state->dpll_hw_state);
1075 
1076 	return 0;
1077 }
1078 
1079 static struct intel_dpll *
1080 hsw_ddi_wrpll_get_dpll(struct intel_atomic_state *state,
1081 		       struct intel_crtc *crtc)
1082 {
1083 	struct intel_crtc_state *crtc_state =
1084 		intel_atomic_get_new_crtc_state(state, crtc);
1085 
1086 	return intel_find_dpll(state, crtc,
1087 				      &crtc_state->dpll_hw_state,
1088 				      BIT(DPLL_ID_WRPLL2) |
1089 				      BIT(DPLL_ID_WRPLL1));
1090 }
1091 
1092 static int
1093 hsw_ddi_lcpll_compute_dpll(struct intel_crtc_state *crtc_state)
1094 {
1095 	struct intel_display *display = to_intel_display(crtc_state);
1096 	int clock = crtc_state->port_clock;
1097 
1098 	switch (clock / 2) {
1099 	case 81000:
1100 	case 135000:
1101 	case 270000:
1102 		return 0;
1103 	default:
1104 		drm_dbg_kms(display->drm, "Invalid clock for DP: %d\n",
1105 			    clock);
1106 		return -EINVAL;
1107 	}
1108 }
1109 
1110 static struct intel_dpll *
1111 hsw_ddi_lcpll_get_dpll(struct intel_crtc_state *crtc_state)
1112 {
1113 	struct intel_display *display = to_intel_display(crtc_state);
1114 	struct intel_dpll *pll;
1115 	enum intel_dpll_id pll_id;
1116 	int clock = crtc_state->port_clock;
1117 
1118 	switch (clock / 2) {
1119 	case 81000:
1120 		pll_id = DPLL_ID_LCPLL_810;
1121 		break;
1122 	case 135000:
1123 		pll_id = DPLL_ID_LCPLL_1350;
1124 		break;
1125 	case 270000:
1126 		pll_id = DPLL_ID_LCPLL_2700;
1127 		break;
1128 	default:
1129 		MISSING_CASE(clock / 2);
1130 		return NULL;
1131 	}
1132 
1133 	pll = intel_get_dpll_by_id(display, pll_id);
1134 
1135 	if (!pll)
1136 		return NULL;
1137 
1138 	return pll;
1139 }
1140 
1141 static int hsw_ddi_lcpll_get_freq(struct intel_display *display,
1142 				  const struct intel_dpll *pll,
1143 				  const struct intel_dpll_hw_state *dpll_hw_state)
1144 {
1145 	int link_clock = 0;
1146 
1147 	switch (pll->info->id) {
1148 	case DPLL_ID_LCPLL_810:
1149 		link_clock = 81000;
1150 		break;
1151 	case DPLL_ID_LCPLL_1350:
1152 		link_clock = 135000;
1153 		break;
1154 	case DPLL_ID_LCPLL_2700:
1155 		link_clock = 270000;
1156 		break;
1157 	default:
1158 		drm_WARN(display->drm, 1, "bad port clock sel\n");
1159 		break;
1160 	}
1161 
1162 	return link_clock * 2;
1163 }
1164 
1165 static int
1166 hsw_ddi_spll_compute_dpll(struct intel_atomic_state *state,
1167 			  struct intel_crtc *crtc)
1168 {
1169 	struct intel_crtc_state *crtc_state =
1170 		intel_atomic_get_new_crtc_state(state, crtc);
1171 	struct hsw_dpll_hw_state *hw_state = &crtc_state->dpll_hw_state.hsw;
1172 
1173 	if (drm_WARN_ON(crtc->base.dev, crtc_state->port_clock / 2 != 135000))
1174 		return -EINVAL;
1175 
1176 	hw_state->spll =
1177 		SPLL_PLL_ENABLE | SPLL_FREQ_1350MHz | SPLL_REF_MUXED_SSC;
1178 
1179 	return 0;
1180 }
1181 
1182 static struct intel_dpll *
1183 hsw_ddi_spll_get_dpll(struct intel_atomic_state *state,
1184 		      struct intel_crtc *crtc)
1185 {
1186 	struct intel_crtc_state *crtc_state =
1187 		intel_atomic_get_new_crtc_state(state, crtc);
1188 
1189 	return intel_find_dpll(state, crtc, &crtc_state->dpll_hw_state,
1190 				      BIT(DPLL_ID_SPLL));
1191 }
1192 
1193 static int hsw_ddi_spll_get_freq(struct intel_display *display,
1194 				 const struct intel_dpll *pll,
1195 				 const struct intel_dpll_hw_state *dpll_hw_state)
1196 {
1197 	const struct hsw_dpll_hw_state *hw_state = &dpll_hw_state->hsw;
1198 	int link_clock = 0;
1199 
1200 	switch (hw_state->spll & SPLL_FREQ_MASK) {
1201 	case SPLL_FREQ_810MHz:
1202 		link_clock = 81000;
1203 		break;
1204 	case SPLL_FREQ_1350MHz:
1205 		link_clock = 135000;
1206 		break;
1207 	case SPLL_FREQ_2700MHz:
1208 		link_clock = 270000;
1209 		break;
1210 	default:
1211 		drm_WARN(display->drm, 1, "bad spll freq\n");
1212 		break;
1213 	}
1214 
1215 	return link_clock * 2;
1216 }
1217 
1218 static int hsw_compute_dpll(struct intel_atomic_state *state,
1219 			    struct intel_crtc *crtc,
1220 			    struct intel_encoder *encoder)
1221 {
1222 	struct intel_crtc_state *crtc_state =
1223 		intel_atomic_get_new_crtc_state(state, crtc);
1224 
1225 	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
1226 		return hsw_ddi_wrpll_compute_dpll(state, crtc);
1227 	else if (intel_crtc_has_dp_encoder(crtc_state))
1228 		return hsw_ddi_lcpll_compute_dpll(crtc_state);
1229 	else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_ANALOG))
1230 		return hsw_ddi_spll_compute_dpll(state, crtc);
1231 	else
1232 		return -EINVAL;
1233 }
1234 
1235 static int hsw_get_dpll(struct intel_atomic_state *state,
1236 			struct intel_crtc *crtc,
1237 			struct intel_encoder *encoder)
1238 {
1239 	struct intel_crtc_state *crtc_state =
1240 		intel_atomic_get_new_crtc_state(state, crtc);
1241 	struct intel_dpll *pll = NULL;
1242 
1243 	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
1244 		pll = hsw_ddi_wrpll_get_dpll(state, crtc);
1245 	else if (intel_crtc_has_dp_encoder(crtc_state))
1246 		pll = hsw_ddi_lcpll_get_dpll(crtc_state);
1247 	else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_ANALOG))
1248 		pll = hsw_ddi_spll_get_dpll(state, crtc);
1249 
1250 	if (!pll)
1251 		return -EINVAL;
1252 
1253 	intel_reference_dpll(state, crtc,
1254 			     pll, &crtc_state->dpll_hw_state);
1255 
1256 	crtc_state->intel_dpll = pll;
1257 
1258 	return 0;
1259 }
1260 
1261 static void hsw_update_dpll_ref_clks(struct intel_display *display)
1262 {
1263 	display->dpll.ref_clks.ssc = 135000;
1264 	/* Non-SSC is only used on non-ULT HSW. */
1265 	if (intel_de_read(display, FUSE_STRAP3) & HSW_REF_CLK_SELECT)
1266 		display->dpll.ref_clks.nssc = 24000;
1267 	else
1268 		display->dpll.ref_clks.nssc = 135000;
1269 }
1270 
1271 static void hsw_dump_hw_state(struct drm_printer *p,
1272 			      const struct intel_dpll_hw_state *dpll_hw_state)
1273 {
1274 	const struct hsw_dpll_hw_state *hw_state = &dpll_hw_state->hsw;
1275 
1276 	drm_printf(p, "dpll_hw_state: wrpll: 0x%x spll: 0x%x\n",
1277 		   hw_state->wrpll, hw_state->spll);
1278 }
1279 
1280 static bool hsw_compare_hw_state(const struct intel_dpll_hw_state *_a,
1281 				 const struct intel_dpll_hw_state *_b)
1282 {
1283 	const struct hsw_dpll_hw_state *a = &_a->hsw;
1284 	const struct hsw_dpll_hw_state *b = &_b->hsw;
1285 
1286 	return a->wrpll == b->wrpll &&
1287 		a->spll == b->spll;
1288 }
1289 
1290 static const struct intel_dpll_funcs hsw_ddi_wrpll_funcs = {
1291 	.enable = hsw_ddi_wrpll_enable,
1292 	.disable = hsw_ddi_wrpll_disable,
1293 	.get_hw_state = hsw_ddi_wrpll_get_hw_state,
1294 	.get_freq = hsw_ddi_wrpll_get_freq,
1295 };
1296 
1297 static const struct intel_dpll_funcs hsw_ddi_spll_funcs = {
1298 	.enable = hsw_ddi_spll_enable,
1299 	.disable = hsw_ddi_spll_disable,
1300 	.get_hw_state = hsw_ddi_spll_get_hw_state,
1301 	.get_freq = hsw_ddi_spll_get_freq,
1302 };
1303 
1304 static void hsw_ddi_lcpll_enable(struct intel_display *display,
1305 				 struct intel_dpll *pll,
1306 				 const struct intel_dpll_hw_state *hw_state)
1307 {
1308 }
1309 
1310 static void hsw_ddi_lcpll_disable(struct intel_display *display,
1311 				  struct intel_dpll *pll)
1312 {
1313 }
1314 
1315 static bool hsw_ddi_lcpll_get_hw_state(struct intel_display *display,
1316 				       struct intel_dpll *pll,
1317 				       struct intel_dpll_hw_state *dpll_hw_state)
1318 {
1319 	return true;
1320 }
1321 
1322 static const struct intel_dpll_funcs hsw_ddi_lcpll_funcs = {
1323 	.enable = hsw_ddi_lcpll_enable,
1324 	.disable = hsw_ddi_lcpll_disable,
1325 	.get_hw_state = hsw_ddi_lcpll_get_hw_state,
1326 	.get_freq = hsw_ddi_lcpll_get_freq,
1327 };
1328 
1329 static const struct dpll_info hsw_plls[] = {
1330 	{ .name = "WRPLL 1", .funcs = &hsw_ddi_wrpll_funcs, .id = DPLL_ID_WRPLL1, },
1331 	{ .name = "WRPLL 2", .funcs = &hsw_ddi_wrpll_funcs, .id = DPLL_ID_WRPLL2, },
1332 	{ .name = "SPLL", .funcs = &hsw_ddi_spll_funcs, .id = DPLL_ID_SPLL, },
1333 	{ .name = "LCPLL 810", .funcs = &hsw_ddi_lcpll_funcs, .id = DPLL_ID_LCPLL_810,
1334 	  .always_on = true, },
1335 	{ .name = "LCPLL 1350", .funcs = &hsw_ddi_lcpll_funcs, .id = DPLL_ID_LCPLL_1350,
1336 	  .always_on = true, },
1337 	{ .name = "LCPLL 2700", .funcs = &hsw_ddi_lcpll_funcs, .id = DPLL_ID_LCPLL_2700,
1338 	  .always_on = true, },
1339 	{}
1340 };
1341 
1342 static const struct intel_dpll_mgr hsw_pll_mgr = {
1343 	.dpll_info = hsw_plls,
1344 	.compute_dplls = hsw_compute_dpll,
1345 	.get_dplls = hsw_get_dpll,
1346 	.put_dplls = intel_put_dpll,
1347 	.update_ref_clks = hsw_update_dpll_ref_clks,
1348 	.dump_hw_state = hsw_dump_hw_state,
1349 	.compare_hw_state = hsw_compare_hw_state,
1350 };
1351 
1352 struct skl_dpll_regs {
1353 	i915_reg_t ctl, cfgcr1, cfgcr2;
1354 };
1355 
1356 /* this array is indexed by the *shared* pll id */
1357 static const struct skl_dpll_regs skl_dpll_regs[4] = {
1358 	{
1359 		/* DPLL 0 */
1360 		.ctl = LCPLL1_CTL,
1361 		/* DPLL 0 doesn't support HDMI mode */
1362 	},
1363 	{
1364 		/* DPLL 1 */
1365 		.ctl = LCPLL2_CTL,
1366 		.cfgcr1 = DPLL_CFGCR1(SKL_DPLL1),
1367 		.cfgcr2 = DPLL_CFGCR2(SKL_DPLL1),
1368 	},
1369 	{
1370 		/* DPLL 2 */
1371 		.ctl = WRPLL_CTL(0),
1372 		.cfgcr1 = DPLL_CFGCR1(SKL_DPLL2),
1373 		.cfgcr2 = DPLL_CFGCR2(SKL_DPLL2),
1374 	},
1375 	{
1376 		/* DPLL 3 */
1377 		.ctl = WRPLL_CTL(1),
1378 		.cfgcr1 = DPLL_CFGCR1(SKL_DPLL3),
1379 		.cfgcr2 = DPLL_CFGCR2(SKL_DPLL3),
1380 	},
1381 };
1382 
1383 static void skl_ddi_pll_write_ctrl1(struct intel_display *display,
1384 				    struct intel_dpll *pll,
1385 				    const struct skl_dpll_hw_state *hw_state)
1386 {
1387 	const enum intel_dpll_id id = pll->info->id;
1388 
1389 	intel_de_rmw(display, DPLL_CTRL1,
1390 		     DPLL_CTRL1_HDMI_MODE(id) |
1391 		     DPLL_CTRL1_SSC(id) |
1392 		     DPLL_CTRL1_LINK_RATE_MASK(id),
1393 		     hw_state->ctrl1 << (id * 6));
1394 	intel_de_posting_read(display, DPLL_CTRL1);
1395 }
1396 
1397 static void skl_ddi_pll_enable(struct intel_display *display,
1398 			       struct intel_dpll *pll,
1399 			       const struct intel_dpll_hw_state *dpll_hw_state)
1400 {
1401 	const struct skl_dpll_hw_state *hw_state = &dpll_hw_state->skl;
1402 	const struct skl_dpll_regs *regs = skl_dpll_regs;
1403 	const enum intel_dpll_id id = pll->info->id;
1404 
1405 	skl_ddi_pll_write_ctrl1(display, pll, hw_state);
1406 
1407 	intel_de_write(display, regs[id].cfgcr1, hw_state->cfgcr1);
1408 	intel_de_write(display, regs[id].cfgcr2, hw_state->cfgcr2);
1409 	intel_de_posting_read(display, regs[id].cfgcr1);
1410 	intel_de_posting_read(display, regs[id].cfgcr2);
1411 
1412 	/* the enable bit is always bit 31 */
1413 	intel_de_rmw(display, regs[id].ctl, 0, LCPLL_PLL_ENABLE);
1414 
1415 	if (intel_de_wait_for_set_ms(display, DPLL_STATUS, DPLL_LOCK(id), 5))
1416 		drm_err(display->drm, "DPLL %d not locked\n", id);
1417 }
1418 
1419 static void skl_ddi_dpll0_enable(struct intel_display *display,
1420 				 struct intel_dpll *pll,
1421 				 const struct intel_dpll_hw_state *dpll_hw_state)
1422 {
1423 	const struct skl_dpll_hw_state *hw_state = &dpll_hw_state->skl;
1424 
1425 	skl_ddi_pll_write_ctrl1(display, pll, hw_state);
1426 }
1427 
1428 static void skl_ddi_pll_disable(struct intel_display *display,
1429 				struct intel_dpll *pll)
1430 {
1431 	const struct skl_dpll_regs *regs = skl_dpll_regs;
1432 	const enum intel_dpll_id id = pll->info->id;
1433 
1434 	/* the enable bit is always bit 31 */
1435 	intel_de_rmw(display, regs[id].ctl, LCPLL_PLL_ENABLE, 0);
1436 	intel_de_posting_read(display, regs[id].ctl);
1437 }
1438 
1439 static void skl_ddi_dpll0_disable(struct intel_display *display,
1440 				  struct intel_dpll *pll)
1441 {
1442 }
1443 
1444 static bool skl_ddi_pll_get_hw_state(struct intel_display *display,
1445 				     struct intel_dpll *pll,
1446 				     struct intel_dpll_hw_state *dpll_hw_state)
1447 {
1448 	struct skl_dpll_hw_state *hw_state = &dpll_hw_state->skl;
1449 	const struct skl_dpll_regs *regs = skl_dpll_regs;
1450 	const enum intel_dpll_id id = pll->info->id;
1451 	struct ref_tracker *wakeref;
1452 	bool ret;
1453 	u32 val;
1454 
1455 	wakeref = intel_display_power_get_if_enabled(display,
1456 						     POWER_DOMAIN_DISPLAY_CORE);
1457 	if (!wakeref)
1458 		return false;
1459 
1460 	ret = false;
1461 
1462 	val = intel_de_read(display, regs[id].ctl);
1463 	if (!(val & LCPLL_PLL_ENABLE))
1464 		goto out;
1465 
1466 	val = intel_de_read(display, DPLL_CTRL1);
1467 	hw_state->ctrl1 = (val >> (id * 6)) & 0x3f;
1468 
1469 	/* avoid reading back stale values if HDMI mode is not enabled */
1470 	if (val & DPLL_CTRL1_HDMI_MODE(id)) {
1471 		hw_state->cfgcr1 = intel_de_read(display, regs[id].cfgcr1);
1472 		hw_state->cfgcr2 = intel_de_read(display, regs[id].cfgcr2);
1473 	}
1474 	ret = true;
1475 
1476 out:
1477 	intel_display_power_put(display, POWER_DOMAIN_DISPLAY_CORE, wakeref);
1478 
1479 	return ret;
1480 }
1481 
1482 static bool skl_ddi_dpll0_get_hw_state(struct intel_display *display,
1483 				       struct intel_dpll *pll,
1484 				       struct intel_dpll_hw_state *dpll_hw_state)
1485 {
1486 	struct skl_dpll_hw_state *hw_state = &dpll_hw_state->skl;
1487 	const struct skl_dpll_regs *regs = skl_dpll_regs;
1488 	const enum intel_dpll_id id = pll->info->id;
1489 	struct ref_tracker *wakeref;
1490 	u32 val;
1491 	bool ret;
1492 
1493 	wakeref = intel_display_power_get_if_enabled(display,
1494 						     POWER_DOMAIN_DISPLAY_CORE);
1495 	if (!wakeref)
1496 		return false;
1497 
1498 	ret = false;
1499 
1500 	/* DPLL0 is always enabled since it drives CDCLK */
1501 	val = intel_de_read(display, regs[id].ctl);
1502 	if (drm_WARN_ON(display->drm, !(val & LCPLL_PLL_ENABLE)))
1503 		goto out;
1504 
1505 	val = intel_de_read(display, DPLL_CTRL1);
1506 	hw_state->ctrl1 = (val >> (id * 6)) & 0x3f;
1507 
1508 	ret = true;
1509 
1510 out:
1511 	intel_display_power_put(display, POWER_DOMAIN_DISPLAY_CORE, wakeref);
1512 
1513 	return ret;
1514 }
1515 
1516 struct skl_wrpll_context {
1517 	u64 min_deviation;		/* current minimal deviation */
1518 	u64 central_freq;		/* chosen central freq */
1519 	u64 dco_freq;			/* chosen dco freq */
1520 	unsigned int p;			/* chosen divider */
1521 };
1522 
1523 /* DCO freq must be within +1%/-6%  of the DCO central freq */
1524 #define SKL_DCO_MAX_PDEVIATION	100
1525 #define SKL_DCO_MAX_NDEVIATION	600
1526 
1527 static void skl_wrpll_try_divider(struct skl_wrpll_context *ctx,
1528 				  u64 central_freq,
1529 				  u64 dco_freq,
1530 				  unsigned int divider)
1531 {
1532 	u64 deviation;
1533 
1534 	deviation = div64_u64(10000 * abs_diff(dco_freq, central_freq),
1535 			      central_freq);
1536 
1537 	/* positive deviation */
1538 	if (dco_freq >= central_freq) {
1539 		if (deviation < SKL_DCO_MAX_PDEVIATION &&
1540 		    deviation < ctx->min_deviation) {
1541 			ctx->min_deviation = deviation;
1542 			ctx->central_freq = central_freq;
1543 			ctx->dco_freq = dco_freq;
1544 			ctx->p = divider;
1545 		}
1546 	/* negative deviation */
1547 	} else if (deviation < SKL_DCO_MAX_NDEVIATION &&
1548 		   deviation < ctx->min_deviation) {
1549 		ctx->min_deviation = deviation;
1550 		ctx->central_freq = central_freq;
1551 		ctx->dco_freq = dco_freq;
1552 		ctx->p = divider;
1553 	}
1554 }
1555 
1556 static void skl_wrpll_get_multipliers(unsigned int p,
1557 				      unsigned int *p0 /* out */,
1558 				      unsigned int *p1 /* out */,
1559 				      unsigned int *p2 /* out */)
1560 {
1561 	/* even dividers */
1562 	if (p % 2 == 0) {
1563 		unsigned int half = p / 2;
1564 
1565 		if (half == 1 || half == 2 || half == 3 || half == 5) {
1566 			*p0 = 2;
1567 			*p1 = 1;
1568 			*p2 = half;
1569 		} else if (half % 2 == 0) {
1570 			*p0 = 2;
1571 			*p1 = half / 2;
1572 			*p2 = 2;
1573 		} else if (half % 3 == 0) {
1574 			*p0 = 3;
1575 			*p1 = half / 3;
1576 			*p2 = 2;
1577 		} else if (half % 7 == 0) {
1578 			*p0 = 7;
1579 			*p1 = half / 7;
1580 			*p2 = 2;
1581 		}
1582 	} else if (p == 3 || p == 9) {  /* 3, 5, 7, 9, 15, 21, 35 */
1583 		*p0 = 3;
1584 		*p1 = 1;
1585 		*p2 = p / 3;
1586 	} else if (p == 5 || p == 7) {
1587 		*p0 = p;
1588 		*p1 = 1;
1589 		*p2 = 1;
1590 	} else if (p == 15) {
1591 		*p0 = 3;
1592 		*p1 = 1;
1593 		*p2 = 5;
1594 	} else if (p == 21) {
1595 		*p0 = 7;
1596 		*p1 = 1;
1597 		*p2 = 3;
1598 	} else if (p == 35) {
1599 		*p0 = 7;
1600 		*p1 = 1;
1601 		*p2 = 5;
1602 	}
1603 }
1604 
1605 struct skl_wrpll_params {
1606 	u32 dco_fraction;
1607 	u32 dco_integer;
1608 	u32 qdiv_ratio;
1609 	u32 qdiv_mode;
1610 	u32 kdiv;
1611 	u32 pdiv;
1612 	u32 central_freq;
1613 };
1614 
1615 static void skl_wrpll_params_populate(struct skl_wrpll_params *params,
1616 				      u64 afe_clock,
1617 				      int ref_clock,
1618 				      u64 central_freq,
1619 				      u32 p0, u32 p1, u32 p2)
1620 {
1621 	u64 dco_freq;
1622 
1623 	switch (central_freq) {
1624 	case 9600000000ULL:
1625 		params->central_freq = 0;
1626 		break;
1627 	case 9000000000ULL:
1628 		params->central_freq = 1;
1629 		break;
1630 	case 8400000000ULL:
1631 		params->central_freq = 3;
1632 	}
1633 
1634 	switch (p0) {
1635 	case 1:
1636 		params->pdiv = 0;
1637 		break;
1638 	case 2:
1639 		params->pdiv = 1;
1640 		break;
1641 	case 3:
1642 		params->pdiv = 2;
1643 		break;
1644 	case 7:
1645 		params->pdiv = 4;
1646 		break;
1647 	default:
1648 		WARN(1, "Incorrect PDiv\n");
1649 	}
1650 
1651 	switch (p2) {
1652 	case 5:
1653 		params->kdiv = 0;
1654 		break;
1655 	case 2:
1656 		params->kdiv = 1;
1657 		break;
1658 	case 3:
1659 		params->kdiv = 2;
1660 		break;
1661 	case 1:
1662 		params->kdiv = 3;
1663 		break;
1664 	default:
1665 		WARN(1, "Incorrect KDiv\n");
1666 	}
1667 
1668 	params->qdiv_ratio = p1;
1669 	params->qdiv_mode = (params->qdiv_ratio == 1) ? 0 : 1;
1670 
1671 	dco_freq = p0 * p1 * p2 * afe_clock;
1672 
1673 	/*
1674 	 * Intermediate values are in Hz.
1675 	 * Divide by MHz to match bsepc
1676 	 */
1677 	params->dco_integer = div_u64(dco_freq, ref_clock * KHz(1));
1678 	params->dco_fraction =
1679 		div_u64((div_u64(dco_freq, ref_clock / KHz(1)) -
1680 			 params->dco_integer * MHz(1)) * 0x8000, MHz(1));
1681 }
1682 
1683 static int
1684 skl_ddi_calculate_wrpll(int clock,
1685 			int ref_clock,
1686 			struct skl_wrpll_params *wrpll_params)
1687 {
1688 	static const u64 dco_central_freq[3] = { 8400000000ULL,
1689 						 9000000000ULL,
1690 						 9600000000ULL };
1691 	static const u8 even_dividers[] = {  4,  6,  8, 10, 12, 14, 16, 18, 20,
1692 					    24, 28, 30, 32, 36, 40, 42, 44,
1693 					    48, 52, 54, 56, 60, 64, 66, 68,
1694 					    70, 72, 76, 78, 80, 84, 88, 90,
1695 					    92, 96, 98 };
1696 	static const u8 odd_dividers[] = { 3, 5, 7, 9, 15, 21, 35 };
1697 	static const struct {
1698 		const u8 *list;
1699 		int n_dividers;
1700 	} dividers[] = {
1701 		{ even_dividers, ARRAY_SIZE(even_dividers) },
1702 		{ odd_dividers, ARRAY_SIZE(odd_dividers) },
1703 	};
1704 	struct skl_wrpll_context ctx = {
1705 		.min_deviation = U64_MAX,
1706 	};
1707 	unsigned int dco, d, i;
1708 	unsigned int p0, p1, p2;
1709 	u64 afe_clock = (u64)clock * 1000 * 5; /* AFE Clock is 5x Pixel clock, in Hz */
1710 
1711 	for (d = 0; d < ARRAY_SIZE(dividers); d++) {
1712 		for (dco = 0; dco < ARRAY_SIZE(dco_central_freq); dco++) {
1713 			for (i = 0; i < dividers[d].n_dividers; i++) {
1714 				unsigned int p = dividers[d].list[i];
1715 				u64 dco_freq = p * afe_clock;
1716 
1717 				skl_wrpll_try_divider(&ctx,
1718 						      dco_central_freq[dco],
1719 						      dco_freq,
1720 						      p);
1721 				/*
1722 				 * Skip the remaining dividers if we're sure to
1723 				 * have found the definitive divider, we can't
1724 				 * improve a 0 deviation.
1725 				 */
1726 				if (ctx.min_deviation == 0)
1727 					goto skip_remaining_dividers;
1728 			}
1729 		}
1730 
1731 skip_remaining_dividers:
1732 		/*
1733 		 * If a solution is found with an even divider, prefer
1734 		 * this one.
1735 		 */
1736 		if (d == 0 && ctx.p)
1737 			break;
1738 	}
1739 
1740 	if (!ctx.p)
1741 		return -EINVAL;
1742 
1743 	/*
1744 	 * gcc incorrectly analyses that these can be used without being
1745 	 * initialized. To be fair, it's hard to guess.
1746 	 */
1747 	p0 = p1 = p2 = 0;
1748 	skl_wrpll_get_multipliers(ctx.p, &p0, &p1, &p2);
1749 	skl_wrpll_params_populate(wrpll_params, afe_clock, ref_clock,
1750 				  ctx.central_freq, p0, p1, p2);
1751 
1752 	return 0;
1753 }
1754 
1755 static int skl_ddi_wrpll_get_freq(struct intel_display *display,
1756 				  const struct intel_dpll *pll,
1757 				  const struct intel_dpll_hw_state *dpll_hw_state)
1758 {
1759 	const struct skl_dpll_hw_state *hw_state = &dpll_hw_state->skl;
1760 	int ref_clock = display->dpll.ref_clks.nssc;
1761 	u32 p0, p1, p2, dco_freq;
1762 
1763 	p0 = hw_state->cfgcr2 & DPLL_CFGCR2_PDIV_MASK;
1764 	p2 = hw_state->cfgcr2 & DPLL_CFGCR2_KDIV_MASK;
1765 
1766 	if (hw_state->cfgcr2 &  DPLL_CFGCR2_QDIV_MODE(1))
1767 		p1 = (hw_state->cfgcr2 & DPLL_CFGCR2_QDIV_RATIO_MASK) >> 8;
1768 	else
1769 		p1 = 1;
1770 
1771 
1772 	switch (p0) {
1773 	case DPLL_CFGCR2_PDIV_1:
1774 		p0 = 1;
1775 		break;
1776 	case DPLL_CFGCR2_PDIV_2:
1777 		p0 = 2;
1778 		break;
1779 	case DPLL_CFGCR2_PDIV_3:
1780 		p0 = 3;
1781 		break;
1782 	case DPLL_CFGCR2_PDIV_7_INVALID:
1783 		/*
1784 		 * Incorrect ASUS-Z170M BIOS setting, the HW seems to ignore bit#0,
1785 		 * handling it the same way as PDIV_7.
1786 		 */
1787 		drm_dbg_kms(display->drm, "Invalid WRPLL PDIV divider value, fixing it.\n");
1788 		fallthrough;
1789 	case DPLL_CFGCR2_PDIV_7:
1790 		p0 = 7;
1791 		break;
1792 	default:
1793 		MISSING_CASE(p0);
1794 		return 0;
1795 	}
1796 
1797 	switch (p2) {
1798 	case DPLL_CFGCR2_KDIV_5:
1799 		p2 = 5;
1800 		break;
1801 	case DPLL_CFGCR2_KDIV_2:
1802 		p2 = 2;
1803 		break;
1804 	case DPLL_CFGCR2_KDIV_3:
1805 		p2 = 3;
1806 		break;
1807 	case DPLL_CFGCR2_KDIV_1:
1808 		p2 = 1;
1809 		break;
1810 	default:
1811 		MISSING_CASE(p2);
1812 		return 0;
1813 	}
1814 
1815 	dco_freq = (hw_state->cfgcr1 & DPLL_CFGCR1_DCO_INTEGER_MASK) *
1816 		   ref_clock;
1817 
1818 	dco_freq += ((hw_state->cfgcr1 & DPLL_CFGCR1_DCO_FRACTION_MASK) >> 9) *
1819 		    ref_clock / 0x8000;
1820 
1821 	if (drm_WARN_ON(display->drm, p0 == 0 || p1 == 0 || p2 == 0))
1822 		return 0;
1823 
1824 	return dco_freq / (p0 * p1 * p2 * 5);
1825 }
1826 
1827 static int skl_ddi_hdmi_pll_dividers(struct intel_crtc_state *crtc_state)
1828 {
1829 	struct intel_display *display = to_intel_display(crtc_state);
1830 	struct skl_dpll_hw_state *hw_state = &crtc_state->dpll_hw_state.skl;
1831 	struct skl_wrpll_params wrpll_params = {};
1832 	int ret;
1833 
1834 	ret = skl_ddi_calculate_wrpll(crtc_state->port_clock,
1835 				      display->dpll.ref_clks.nssc, &wrpll_params);
1836 	if (ret)
1837 		return ret;
1838 
1839 	/*
1840 	 * See comment in intel_dpll_hw_state to understand why we always use 0
1841 	 * as the DPLL id in this function.
1842 	 */
1843 	hw_state->ctrl1 =
1844 		DPLL_CTRL1_OVERRIDE(0) |
1845 		DPLL_CTRL1_HDMI_MODE(0);
1846 
1847 	hw_state->cfgcr1 =
1848 		DPLL_CFGCR1_FREQ_ENABLE |
1849 		DPLL_CFGCR1_DCO_FRACTION(wrpll_params.dco_fraction) |
1850 		wrpll_params.dco_integer;
1851 
1852 	hw_state->cfgcr2 =
1853 		DPLL_CFGCR2_QDIV_RATIO(wrpll_params.qdiv_ratio) |
1854 		DPLL_CFGCR2_QDIV_MODE(wrpll_params.qdiv_mode) |
1855 		DPLL_CFGCR2_KDIV(wrpll_params.kdiv) |
1856 		DPLL_CFGCR2_PDIV(wrpll_params.pdiv) |
1857 		wrpll_params.central_freq;
1858 
1859 	crtc_state->port_clock = skl_ddi_wrpll_get_freq(display, NULL,
1860 							&crtc_state->dpll_hw_state);
1861 
1862 	return 0;
1863 }
1864 
1865 static int
1866 skl_ddi_dp_set_dpll_hw_state(struct intel_crtc_state *crtc_state)
1867 {
1868 	struct skl_dpll_hw_state *hw_state = &crtc_state->dpll_hw_state.skl;
1869 	u32 ctrl1;
1870 
1871 	/*
1872 	 * See comment in intel_dpll_hw_state to understand why we always use 0
1873 	 * as the DPLL id in this function.
1874 	 */
1875 	ctrl1 = DPLL_CTRL1_OVERRIDE(0);
1876 	switch (crtc_state->port_clock / 2) {
1877 	case 81000:
1878 		ctrl1 |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_810, 0);
1879 		break;
1880 	case 135000:
1881 		ctrl1 |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1350, 0);
1882 		break;
1883 	case 270000:
1884 		ctrl1 |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_2700, 0);
1885 		break;
1886 		/* eDP 1.4 rates */
1887 	case 162000:
1888 		ctrl1 |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1620, 0);
1889 		break;
1890 	case 108000:
1891 		ctrl1 |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1080, 0);
1892 		break;
1893 	case 216000:
1894 		ctrl1 |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_2160, 0);
1895 		break;
1896 	}
1897 
1898 	hw_state->ctrl1 = ctrl1;
1899 
1900 	return 0;
1901 }
1902 
1903 static int skl_ddi_lcpll_get_freq(struct intel_display *display,
1904 				  const struct intel_dpll *pll,
1905 				  const struct intel_dpll_hw_state *dpll_hw_state)
1906 {
1907 	const struct skl_dpll_hw_state *hw_state = &dpll_hw_state->skl;
1908 	int link_clock = 0;
1909 
1910 	switch ((hw_state->ctrl1 & DPLL_CTRL1_LINK_RATE_MASK(0)) >>
1911 		DPLL_CTRL1_LINK_RATE_SHIFT(0)) {
1912 	case DPLL_CTRL1_LINK_RATE_810:
1913 		link_clock = 81000;
1914 		break;
1915 	case DPLL_CTRL1_LINK_RATE_1080:
1916 		link_clock = 108000;
1917 		break;
1918 	case DPLL_CTRL1_LINK_RATE_1350:
1919 		link_clock = 135000;
1920 		break;
1921 	case DPLL_CTRL1_LINK_RATE_1620:
1922 		link_clock = 162000;
1923 		break;
1924 	case DPLL_CTRL1_LINK_RATE_2160:
1925 		link_clock = 216000;
1926 		break;
1927 	case DPLL_CTRL1_LINK_RATE_2700:
1928 		link_clock = 270000;
1929 		break;
1930 	default:
1931 		drm_WARN(display->drm, 1, "Unsupported link rate\n");
1932 		break;
1933 	}
1934 
1935 	return link_clock * 2;
1936 }
1937 
1938 static int skl_compute_dpll(struct intel_atomic_state *state,
1939 			    struct intel_crtc *crtc,
1940 			    struct intel_encoder *encoder)
1941 {
1942 	struct intel_crtc_state *crtc_state =
1943 		intel_atomic_get_new_crtc_state(state, crtc);
1944 
1945 	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
1946 		return skl_ddi_hdmi_pll_dividers(crtc_state);
1947 	else if (intel_crtc_has_dp_encoder(crtc_state))
1948 		return skl_ddi_dp_set_dpll_hw_state(crtc_state);
1949 	else
1950 		return -EINVAL;
1951 }
1952 
1953 static int skl_get_dpll(struct intel_atomic_state *state,
1954 			struct intel_crtc *crtc,
1955 			struct intel_encoder *encoder)
1956 {
1957 	struct intel_crtc_state *crtc_state =
1958 		intel_atomic_get_new_crtc_state(state, crtc);
1959 	struct intel_dpll *pll;
1960 
1961 	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
1962 		pll = intel_find_dpll(state, crtc,
1963 				      &crtc_state->dpll_hw_state,
1964 				      BIT(DPLL_ID_SKL_DPLL0));
1965 	else
1966 		pll = intel_find_dpll(state, crtc,
1967 				      &crtc_state->dpll_hw_state,
1968 				      BIT(DPLL_ID_SKL_DPLL3) |
1969 				      BIT(DPLL_ID_SKL_DPLL2) |
1970 				      BIT(DPLL_ID_SKL_DPLL1));
1971 	if (!pll)
1972 		return -EINVAL;
1973 
1974 	intel_reference_dpll(state, crtc,
1975 			     pll, &crtc_state->dpll_hw_state);
1976 
1977 	crtc_state->intel_dpll = pll;
1978 
1979 	return 0;
1980 }
1981 
1982 static int skl_ddi_pll_get_freq(struct intel_display *display,
1983 				const struct intel_dpll *pll,
1984 				const struct intel_dpll_hw_state *dpll_hw_state)
1985 {
1986 	const struct skl_dpll_hw_state *hw_state = &dpll_hw_state->skl;
1987 
1988 	/*
1989 	 * ctrl1 register is already shifted for each pll, just use 0 to get
1990 	 * the internal shift for each field
1991 	 */
1992 	if (hw_state->ctrl1 & DPLL_CTRL1_HDMI_MODE(0))
1993 		return skl_ddi_wrpll_get_freq(display, pll, dpll_hw_state);
1994 	else
1995 		return skl_ddi_lcpll_get_freq(display, pll, dpll_hw_state);
1996 }
1997 
1998 static void skl_update_dpll_ref_clks(struct intel_display *display)
1999 {
2000 	/* No SSC ref */
2001 	display->dpll.ref_clks.nssc = display->cdclk.hw.ref;
2002 }
2003 
2004 static void skl_dump_hw_state(struct drm_printer *p,
2005 			      const struct intel_dpll_hw_state *dpll_hw_state)
2006 {
2007 	const struct skl_dpll_hw_state *hw_state = &dpll_hw_state->skl;
2008 
2009 	drm_printf(p, "dpll_hw_state: ctrl1: 0x%x, cfgcr1: 0x%x, cfgcr2: 0x%x\n",
2010 		   hw_state->ctrl1, hw_state->cfgcr1, hw_state->cfgcr2);
2011 }
2012 
2013 static bool skl_compare_hw_state(const struct intel_dpll_hw_state *_a,
2014 				 const struct intel_dpll_hw_state *_b)
2015 {
2016 	const struct skl_dpll_hw_state *a = &_a->skl;
2017 	const struct skl_dpll_hw_state *b = &_b->skl;
2018 
2019 	return a->ctrl1 == b->ctrl1 &&
2020 		a->cfgcr1 == b->cfgcr1 &&
2021 		a->cfgcr2 == b->cfgcr2;
2022 }
2023 
2024 static const struct intel_dpll_funcs skl_ddi_pll_funcs = {
2025 	.enable = skl_ddi_pll_enable,
2026 	.disable = skl_ddi_pll_disable,
2027 	.get_hw_state = skl_ddi_pll_get_hw_state,
2028 	.get_freq = skl_ddi_pll_get_freq,
2029 };
2030 
2031 static const struct intel_dpll_funcs skl_ddi_dpll0_funcs = {
2032 	.enable = skl_ddi_dpll0_enable,
2033 	.disable = skl_ddi_dpll0_disable,
2034 	.get_hw_state = skl_ddi_dpll0_get_hw_state,
2035 	.get_freq = skl_ddi_pll_get_freq,
2036 };
2037 
2038 static const struct dpll_info skl_plls[] = {
2039 	{ .name = "DPLL 0", .funcs = &skl_ddi_dpll0_funcs, .id = DPLL_ID_SKL_DPLL0,
2040 	  .always_on = true, },
2041 	{ .name = "DPLL 1", .funcs = &skl_ddi_pll_funcs, .id = DPLL_ID_SKL_DPLL1, },
2042 	{ .name = "DPLL 2", .funcs = &skl_ddi_pll_funcs, .id = DPLL_ID_SKL_DPLL2, },
2043 	{ .name = "DPLL 3", .funcs = &skl_ddi_pll_funcs, .id = DPLL_ID_SKL_DPLL3, },
2044 	{}
2045 };
2046 
2047 static const struct intel_dpll_mgr skl_pll_mgr = {
2048 	.dpll_info = skl_plls,
2049 	.compute_dplls = skl_compute_dpll,
2050 	.get_dplls = skl_get_dpll,
2051 	.put_dplls = intel_put_dpll,
2052 	.update_ref_clks = skl_update_dpll_ref_clks,
2053 	.dump_hw_state = skl_dump_hw_state,
2054 	.compare_hw_state = skl_compare_hw_state,
2055 };
2056 
2057 static void bxt_ddi_pll_enable(struct intel_display *display,
2058 			       struct intel_dpll *pll,
2059 			       const struct intel_dpll_hw_state *dpll_hw_state)
2060 {
2061 	const struct bxt_dpll_hw_state *hw_state = &dpll_hw_state->bxt;
2062 	enum port port = (enum port)pll->info->id; /* 1:1 port->PLL mapping */
2063 	enum dpio_phy phy = DPIO_PHY0;
2064 	enum dpio_channel ch = DPIO_CH0;
2065 	u32 temp;
2066 	int ret;
2067 
2068 	bxt_port_to_phy_channel(display, port, &phy, &ch);
2069 
2070 	/* Non-SSC reference */
2071 	intel_de_rmw(display, BXT_PORT_PLL_ENABLE(port), 0, PORT_PLL_REF_SEL);
2072 
2073 	if (display->platform.geminilake) {
2074 		intel_de_rmw(display, BXT_PORT_PLL_ENABLE(port),
2075 			     0, PORT_PLL_POWER_ENABLE);
2076 
2077 		ret = intel_de_wait_for_set_us(display,
2078 					       BXT_PORT_PLL_ENABLE(port),
2079 					       PORT_PLL_POWER_STATE, 200);
2080 		if (ret)
2081 			drm_err(display->drm,
2082 				"Power state not set for PLL:%d\n", port);
2083 	}
2084 
2085 	/* Disable 10 bit clock */
2086 	intel_de_rmw(display, BXT_PORT_PLL_EBB_4(phy, ch),
2087 		     PORT_PLL_10BIT_CLK_ENABLE, 0);
2088 
2089 	/* Write P1 & P2 */
2090 	intel_de_rmw(display, BXT_PORT_PLL_EBB_0(phy, ch),
2091 		     PORT_PLL_P1_MASK | PORT_PLL_P2_MASK, hw_state->ebb0);
2092 
2093 	/* Write M2 integer */
2094 	intel_de_rmw(display, BXT_PORT_PLL(phy, ch, 0),
2095 		     PORT_PLL_M2_INT_MASK, hw_state->pll0);
2096 
2097 	/* Write N */
2098 	intel_de_rmw(display, BXT_PORT_PLL(phy, ch, 1),
2099 		     PORT_PLL_N_MASK, hw_state->pll1);
2100 
2101 	/* Write M2 fraction */
2102 	intel_de_rmw(display, BXT_PORT_PLL(phy, ch, 2),
2103 		     PORT_PLL_M2_FRAC_MASK, hw_state->pll2);
2104 
2105 	/* Write M2 fraction enable */
2106 	intel_de_rmw(display, BXT_PORT_PLL(phy, ch, 3),
2107 		     PORT_PLL_M2_FRAC_ENABLE, hw_state->pll3);
2108 
2109 	/* Write coeff */
2110 	temp = intel_de_read(display, BXT_PORT_PLL(phy, ch, 6));
2111 	temp &= ~PORT_PLL_PROP_COEFF_MASK;
2112 	temp &= ~PORT_PLL_INT_COEFF_MASK;
2113 	temp &= ~PORT_PLL_GAIN_CTL_MASK;
2114 	temp |= hw_state->pll6;
2115 	intel_de_write(display, BXT_PORT_PLL(phy, ch, 6), temp);
2116 
2117 	/* Write calibration val */
2118 	intel_de_rmw(display, BXT_PORT_PLL(phy, ch, 8),
2119 		     PORT_PLL_TARGET_CNT_MASK, hw_state->pll8);
2120 
2121 	intel_de_rmw(display, BXT_PORT_PLL(phy, ch, 9),
2122 		     PORT_PLL_LOCK_THRESHOLD_MASK, hw_state->pll9);
2123 
2124 	temp = intel_de_read(display, BXT_PORT_PLL(phy, ch, 10));
2125 	temp &= ~PORT_PLL_DCO_AMP_OVR_EN_H;
2126 	temp &= ~PORT_PLL_DCO_AMP_MASK;
2127 	temp |= hw_state->pll10;
2128 	intel_de_write(display, BXT_PORT_PLL(phy, ch, 10), temp);
2129 
2130 	/* Recalibrate with new settings */
2131 	temp = intel_de_read(display, BXT_PORT_PLL_EBB_4(phy, ch));
2132 	temp |= PORT_PLL_RECALIBRATE;
2133 	intel_de_write(display, BXT_PORT_PLL_EBB_4(phy, ch), temp);
2134 	temp &= ~PORT_PLL_10BIT_CLK_ENABLE;
2135 	temp |= hw_state->ebb4;
2136 	intel_de_write(display, BXT_PORT_PLL_EBB_4(phy, ch), temp);
2137 
2138 	/* Enable PLL */
2139 	intel_de_rmw(display, BXT_PORT_PLL_ENABLE(port), 0, PORT_PLL_ENABLE);
2140 	intel_de_posting_read(display, BXT_PORT_PLL_ENABLE(port));
2141 
2142 	ret = intel_de_wait_for_set_us(display, BXT_PORT_PLL_ENABLE(port),
2143 				       PORT_PLL_LOCK, 200);
2144 	if (ret)
2145 		drm_err(display->drm, "PLL %d not locked\n", port);
2146 
2147 	if (display->platform.geminilake) {
2148 		temp = intel_de_read(display, BXT_PORT_TX_DW5_LN(phy, ch, 0));
2149 		temp |= DCC_DELAY_RANGE_2;
2150 		intel_de_write(display, BXT_PORT_TX_DW5_GRP(phy, ch), temp);
2151 	}
2152 
2153 	/*
2154 	 * While we write to the group register to program all lanes at once we
2155 	 * can read only lane registers and we pick lanes 0/1 for that.
2156 	 */
2157 	temp = intel_de_read(display, BXT_PORT_PCS_DW12_LN01(phy, ch));
2158 	temp &= ~LANE_STAGGER_MASK;
2159 	temp &= ~LANESTAGGER_STRAP_OVRD;
2160 	temp |= hw_state->pcsdw12;
2161 	intel_de_write(display, BXT_PORT_PCS_DW12_GRP(phy, ch), temp);
2162 }
2163 
2164 static void bxt_ddi_pll_disable(struct intel_display *display,
2165 				struct intel_dpll *pll)
2166 {
2167 	enum port port = (enum port)pll->info->id; /* 1:1 port->PLL mapping */
2168 	int ret;
2169 
2170 	intel_de_rmw(display, BXT_PORT_PLL_ENABLE(port), PORT_PLL_ENABLE, 0);
2171 	intel_de_posting_read(display, BXT_PORT_PLL_ENABLE(port));
2172 
2173 	if (display->platform.geminilake) {
2174 		intel_de_rmw(display, BXT_PORT_PLL_ENABLE(port),
2175 			     PORT_PLL_POWER_ENABLE, 0);
2176 
2177 		ret = intel_de_wait_for_clear_us(display,
2178 						 BXT_PORT_PLL_ENABLE(port),
2179 						 PORT_PLL_POWER_STATE, 200);
2180 		if (ret)
2181 			drm_err(display->drm,
2182 				"Power state not reset for PLL:%d\n", port);
2183 	}
2184 }
2185 
2186 static bool bxt_ddi_pll_get_hw_state(struct intel_display *display,
2187 				     struct intel_dpll *pll,
2188 				     struct intel_dpll_hw_state *dpll_hw_state)
2189 {
2190 	struct bxt_dpll_hw_state *hw_state = &dpll_hw_state->bxt;
2191 	enum port port = (enum port)pll->info->id; /* 1:1 port->PLL mapping */
2192 	struct ref_tracker *wakeref;
2193 	enum dpio_phy phy;
2194 	enum dpio_channel ch;
2195 	u32 val;
2196 	bool ret;
2197 
2198 	bxt_port_to_phy_channel(display, port, &phy, &ch);
2199 
2200 	wakeref = intel_display_power_get_if_enabled(display,
2201 						     POWER_DOMAIN_DISPLAY_CORE);
2202 	if (!wakeref)
2203 		return false;
2204 
2205 	ret = false;
2206 
2207 	val = intel_de_read(display, BXT_PORT_PLL_ENABLE(port));
2208 	if (!(val & PORT_PLL_ENABLE))
2209 		goto out;
2210 
2211 	hw_state->ebb0 = intel_de_read(display, BXT_PORT_PLL_EBB_0(phy, ch));
2212 	hw_state->ebb0 &= PORT_PLL_P1_MASK | PORT_PLL_P2_MASK;
2213 
2214 	hw_state->ebb4 = intel_de_read(display, BXT_PORT_PLL_EBB_4(phy, ch));
2215 	hw_state->ebb4 &= PORT_PLL_10BIT_CLK_ENABLE;
2216 
2217 	hw_state->pll0 = intel_de_read(display, BXT_PORT_PLL(phy, ch, 0));
2218 	hw_state->pll0 &= PORT_PLL_M2_INT_MASK;
2219 
2220 	hw_state->pll1 = intel_de_read(display, BXT_PORT_PLL(phy, ch, 1));
2221 	hw_state->pll1 &= PORT_PLL_N_MASK;
2222 
2223 	hw_state->pll2 = intel_de_read(display, BXT_PORT_PLL(phy, ch, 2));
2224 	hw_state->pll2 &= PORT_PLL_M2_FRAC_MASK;
2225 
2226 	hw_state->pll3 = intel_de_read(display, BXT_PORT_PLL(phy, ch, 3));
2227 	hw_state->pll3 &= PORT_PLL_M2_FRAC_ENABLE;
2228 
2229 	hw_state->pll6 = intel_de_read(display, BXT_PORT_PLL(phy, ch, 6));
2230 	hw_state->pll6 &= PORT_PLL_PROP_COEFF_MASK |
2231 			  PORT_PLL_INT_COEFF_MASK |
2232 			  PORT_PLL_GAIN_CTL_MASK;
2233 
2234 	hw_state->pll8 = intel_de_read(display, BXT_PORT_PLL(phy, ch, 8));
2235 	hw_state->pll8 &= PORT_PLL_TARGET_CNT_MASK;
2236 
2237 	hw_state->pll9 = intel_de_read(display, BXT_PORT_PLL(phy, ch, 9));
2238 	hw_state->pll9 &= PORT_PLL_LOCK_THRESHOLD_MASK;
2239 
2240 	hw_state->pll10 = intel_de_read(display, BXT_PORT_PLL(phy, ch, 10));
2241 	hw_state->pll10 &= PORT_PLL_DCO_AMP_OVR_EN_H |
2242 			   PORT_PLL_DCO_AMP_MASK;
2243 
2244 	/*
2245 	 * While we write to the group register to program all lanes at once we
2246 	 * can read only lane registers. We configure all lanes the same way, so
2247 	 * here just read out lanes 0/1 and output a note if lanes 2/3 differ.
2248 	 */
2249 	hw_state->pcsdw12 = intel_de_read(display,
2250 					  BXT_PORT_PCS_DW12_LN01(phy, ch));
2251 	if (intel_de_read(display, BXT_PORT_PCS_DW12_LN23(phy, ch)) != hw_state->pcsdw12)
2252 		drm_dbg(display->drm,
2253 			"lane stagger config different for lane 01 (%08x) and 23 (%08x)\n",
2254 			hw_state->pcsdw12,
2255 			intel_de_read(display,
2256 				      BXT_PORT_PCS_DW12_LN23(phy, ch)));
2257 	hw_state->pcsdw12 &= LANE_STAGGER_MASK | LANESTAGGER_STRAP_OVRD;
2258 
2259 	ret = true;
2260 
2261 out:
2262 	intel_display_power_put(display, POWER_DOMAIN_DISPLAY_CORE, wakeref);
2263 
2264 	return ret;
2265 }
2266 
2267 /* pre-calculated values for DP linkrates */
2268 static const struct dpll bxt_dp_clk_val[] = {
2269 	/* m2 is .22 binary fixed point */
2270 	{ .dot = 162000, .p1 = 4, .p2 = 2, .n = 1, .m1 = 2, .m2 = 0x819999a /* 32.4 */ },
2271 	{ .dot = 270000, .p1 = 4, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x6c00000 /* 27.0 */ },
2272 	{ .dot = 540000, .p1 = 2, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x6c00000 /* 27.0 */ },
2273 	{ .dot = 216000, .p1 = 3, .p2 = 2, .n = 1, .m1 = 2, .m2 = 0x819999a /* 32.4 */ },
2274 	{ .dot = 243000, .p1 = 4, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x6133333 /* 24.3 */ },
2275 	{ .dot = 324000, .p1 = 4, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x819999a /* 32.4 */ },
2276 	{ .dot = 432000, .p1 = 3, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x819999a /* 32.4 */ },
2277 };
2278 
2279 static int
2280 bxt_ddi_hdmi_pll_dividers(struct intel_crtc_state *crtc_state,
2281 			  struct dpll *clk_div)
2282 {
2283 	struct intel_display *display = to_intel_display(crtc_state);
2284 
2285 	/* Calculate HDMI div */
2286 	/*
2287 	 * FIXME: tie the following calculation into
2288 	 * i9xx_crtc_compute_clock
2289 	 */
2290 	if (!bxt_find_best_dpll(crtc_state, clk_div))
2291 		return -EINVAL;
2292 
2293 	drm_WARN_ON(display->drm, clk_div->m1 != 2);
2294 
2295 	return 0;
2296 }
2297 
2298 static void bxt_ddi_dp_pll_dividers(struct intel_crtc_state *crtc_state,
2299 				    struct dpll *clk_div)
2300 {
2301 	struct intel_display *display = to_intel_display(crtc_state);
2302 	int i;
2303 
2304 	*clk_div = bxt_dp_clk_val[0];
2305 	for (i = 0; i < ARRAY_SIZE(bxt_dp_clk_val); ++i) {
2306 		if (crtc_state->port_clock == bxt_dp_clk_val[i].dot) {
2307 			*clk_div = bxt_dp_clk_val[i];
2308 			break;
2309 		}
2310 	}
2311 
2312 	chv_calc_dpll_params(display->dpll.ref_clks.nssc, clk_div);
2313 
2314 	drm_WARN_ON(display->drm, clk_div->vco == 0 ||
2315 		    clk_div->dot != crtc_state->port_clock);
2316 }
2317 
2318 static int bxt_ddi_set_dpll_hw_state(struct intel_crtc_state *crtc_state,
2319 				     const struct dpll *clk_div)
2320 {
2321 	struct intel_display *display = to_intel_display(crtc_state);
2322 	struct bxt_dpll_hw_state *hw_state = &crtc_state->dpll_hw_state.bxt;
2323 	int clock = crtc_state->port_clock;
2324 	int vco = clk_div->vco;
2325 	u32 prop_coef, int_coef, gain_ctl, targ_cnt;
2326 	u32 lanestagger;
2327 
2328 	if (vco >= 6200000 && vco <= 6700000) {
2329 		prop_coef = 4;
2330 		int_coef = 9;
2331 		gain_ctl = 3;
2332 		targ_cnt = 8;
2333 	} else if ((vco > 5400000 && vco < 6200000) ||
2334 			(vco >= 4800000 && vco < 5400000)) {
2335 		prop_coef = 5;
2336 		int_coef = 11;
2337 		gain_ctl = 3;
2338 		targ_cnt = 9;
2339 	} else if (vco == 5400000) {
2340 		prop_coef = 3;
2341 		int_coef = 8;
2342 		gain_ctl = 1;
2343 		targ_cnt = 9;
2344 	} else {
2345 		drm_err(display->drm, "Invalid VCO\n");
2346 		return -EINVAL;
2347 	}
2348 
2349 	if (clock > 270000)
2350 		lanestagger = 0x18;
2351 	else if (clock > 135000)
2352 		lanestagger = 0x0d;
2353 	else if (clock > 67000)
2354 		lanestagger = 0x07;
2355 	else if (clock > 33000)
2356 		lanestagger = 0x04;
2357 	else
2358 		lanestagger = 0x02;
2359 
2360 	hw_state->ebb0 = PORT_PLL_P1(clk_div->p1) | PORT_PLL_P2(clk_div->p2);
2361 	hw_state->pll0 = PORT_PLL_M2_INT(clk_div->m2 >> 22);
2362 	hw_state->pll1 = PORT_PLL_N(clk_div->n);
2363 	hw_state->pll2 = PORT_PLL_M2_FRAC(clk_div->m2 & 0x3fffff);
2364 
2365 	if (clk_div->m2 & 0x3fffff)
2366 		hw_state->pll3 = PORT_PLL_M2_FRAC_ENABLE;
2367 
2368 	hw_state->pll6 = PORT_PLL_PROP_COEFF(prop_coef) |
2369 		PORT_PLL_INT_COEFF(int_coef) |
2370 		PORT_PLL_GAIN_CTL(gain_ctl);
2371 
2372 	hw_state->pll8 = PORT_PLL_TARGET_CNT(targ_cnt);
2373 
2374 	hw_state->pll9 = PORT_PLL_LOCK_THRESHOLD(5);
2375 
2376 	hw_state->pll10 = PORT_PLL_DCO_AMP(15) |
2377 		PORT_PLL_DCO_AMP_OVR_EN_H;
2378 
2379 	hw_state->ebb4 = PORT_PLL_10BIT_CLK_ENABLE;
2380 
2381 	hw_state->pcsdw12 = LANESTAGGER_STRAP_OVRD | lanestagger;
2382 
2383 	return 0;
2384 }
2385 
2386 static int bxt_ddi_pll_get_freq(struct intel_display *display,
2387 				const struct intel_dpll *pll,
2388 				const struct intel_dpll_hw_state *dpll_hw_state)
2389 {
2390 	const struct bxt_dpll_hw_state *hw_state = &dpll_hw_state->bxt;
2391 	struct dpll clock;
2392 
2393 	clock.m1 = 2;
2394 	clock.m2 = REG_FIELD_GET(PORT_PLL_M2_INT_MASK, hw_state->pll0) << 22;
2395 	if (hw_state->pll3 & PORT_PLL_M2_FRAC_ENABLE)
2396 		clock.m2 |= REG_FIELD_GET(PORT_PLL_M2_FRAC_MASK,
2397 					  hw_state->pll2);
2398 	clock.n = REG_FIELD_GET(PORT_PLL_N_MASK, hw_state->pll1);
2399 	clock.p1 = REG_FIELD_GET(PORT_PLL_P1_MASK, hw_state->ebb0);
2400 	clock.p2 = REG_FIELD_GET(PORT_PLL_P2_MASK, hw_state->ebb0);
2401 
2402 	return chv_calc_dpll_params(display->dpll.ref_clks.nssc, &clock);
2403 }
2404 
2405 static int
2406 bxt_ddi_dp_set_dpll_hw_state(struct intel_crtc_state *crtc_state)
2407 {
2408 	struct dpll clk_div = {};
2409 
2410 	bxt_ddi_dp_pll_dividers(crtc_state, &clk_div);
2411 
2412 	return bxt_ddi_set_dpll_hw_state(crtc_state, &clk_div);
2413 }
2414 
2415 static int
2416 bxt_ddi_hdmi_set_dpll_hw_state(struct intel_crtc_state *crtc_state)
2417 {
2418 	struct intel_display *display = to_intel_display(crtc_state);
2419 	struct dpll clk_div = {};
2420 	int ret;
2421 
2422 	bxt_ddi_hdmi_pll_dividers(crtc_state, &clk_div);
2423 
2424 	ret = bxt_ddi_set_dpll_hw_state(crtc_state, &clk_div);
2425 	if (ret)
2426 		return ret;
2427 
2428 	crtc_state->port_clock = bxt_ddi_pll_get_freq(display, NULL,
2429 						      &crtc_state->dpll_hw_state);
2430 
2431 	return 0;
2432 }
2433 
2434 static int bxt_compute_dpll(struct intel_atomic_state *state,
2435 			    struct intel_crtc *crtc,
2436 			    struct intel_encoder *encoder)
2437 {
2438 	struct intel_crtc_state *crtc_state =
2439 		intel_atomic_get_new_crtc_state(state, crtc);
2440 
2441 	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
2442 		return bxt_ddi_hdmi_set_dpll_hw_state(crtc_state);
2443 	else if (intel_crtc_has_dp_encoder(crtc_state))
2444 		return bxt_ddi_dp_set_dpll_hw_state(crtc_state);
2445 	else
2446 		return -EINVAL;
2447 }
2448 
2449 static int bxt_get_dpll(struct intel_atomic_state *state,
2450 			struct intel_crtc *crtc,
2451 			struct intel_encoder *encoder)
2452 {
2453 	struct intel_display *display = to_intel_display(state);
2454 	struct intel_crtc_state *crtc_state =
2455 		intel_atomic_get_new_crtc_state(state, crtc);
2456 	struct intel_dpll *pll;
2457 	enum intel_dpll_id id;
2458 
2459 	/* 1:1 mapping between ports and PLLs */
2460 	id = (enum intel_dpll_id) encoder->port;
2461 	pll = intel_get_dpll_by_id(display, id);
2462 
2463 	drm_dbg_kms(display->drm, "[CRTC:%d:%s] using pre-allocated %s\n",
2464 		    crtc->base.base.id, crtc->base.name, pll->info->name);
2465 
2466 	intel_reference_dpll(state, crtc,
2467 			     pll, &crtc_state->dpll_hw_state);
2468 
2469 	crtc_state->intel_dpll = pll;
2470 
2471 	return 0;
2472 }
2473 
2474 static void bxt_update_dpll_ref_clks(struct intel_display *display)
2475 {
2476 	display->dpll.ref_clks.ssc = 100000;
2477 	display->dpll.ref_clks.nssc = 100000;
2478 	/* DSI non-SSC ref 19.2MHz */
2479 }
2480 
2481 static void bxt_dump_hw_state(struct drm_printer *p,
2482 			      const struct intel_dpll_hw_state *dpll_hw_state)
2483 {
2484 	const struct bxt_dpll_hw_state *hw_state = &dpll_hw_state->bxt;
2485 
2486 	drm_printf(p, "dpll_hw_state: ebb0: 0x%x, ebb4: 0x%x,"
2487 		   "pll0: 0x%x, pll1: 0x%x, pll2: 0x%x, pll3: 0x%x, "
2488 		   "pll6: 0x%x, pll8: 0x%x, pll9: 0x%x, pll10: 0x%x, pcsdw12: 0x%x\n",
2489 		   hw_state->ebb0, hw_state->ebb4,
2490 		   hw_state->pll0, hw_state->pll1, hw_state->pll2, hw_state->pll3,
2491 		   hw_state->pll6, hw_state->pll8, hw_state->pll9, hw_state->pll10,
2492 		   hw_state->pcsdw12);
2493 }
2494 
2495 static bool bxt_compare_hw_state(const struct intel_dpll_hw_state *_a,
2496 				 const struct intel_dpll_hw_state *_b)
2497 {
2498 	const struct bxt_dpll_hw_state *a = &_a->bxt;
2499 	const struct bxt_dpll_hw_state *b = &_b->bxt;
2500 
2501 	return a->ebb0 == b->ebb0 &&
2502 		a->ebb4 == b->ebb4 &&
2503 		a->pll0 == b->pll0 &&
2504 		a->pll1 == b->pll1 &&
2505 		a->pll2 == b->pll2 &&
2506 		a->pll3 == b->pll3 &&
2507 		a->pll6 == b->pll6 &&
2508 		a->pll8 == b->pll8 &&
2509 		a->pll10 == b->pll10 &&
2510 		a->pcsdw12 == b->pcsdw12;
2511 }
2512 
2513 static const struct intel_dpll_funcs bxt_ddi_pll_funcs = {
2514 	.enable = bxt_ddi_pll_enable,
2515 	.disable = bxt_ddi_pll_disable,
2516 	.get_hw_state = bxt_ddi_pll_get_hw_state,
2517 	.get_freq = bxt_ddi_pll_get_freq,
2518 };
2519 
2520 static const struct dpll_info bxt_plls[] = {
2521 	{ .name = "PORT PLL A", .funcs = &bxt_ddi_pll_funcs, .id = DPLL_ID_SKL_DPLL0, },
2522 	{ .name = "PORT PLL B", .funcs = &bxt_ddi_pll_funcs, .id = DPLL_ID_SKL_DPLL1, },
2523 	{ .name = "PORT PLL C", .funcs = &bxt_ddi_pll_funcs, .id = DPLL_ID_SKL_DPLL2, },
2524 	{}
2525 };
2526 
2527 static const struct intel_dpll_mgr bxt_pll_mgr = {
2528 	.dpll_info = bxt_plls,
2529 	.compute_dplls = bxt_compute_dpll,
2530 	.get_dplls = bxt_get_dpll,
2531 	.put_dplls = intel_put_dpll,
2532 	.update_ref_clks = bxt_update_dpll_ref_clks,
2533 	.dump_hw_state = bxt_dump_hw_state,
2534 	.compare_hw_state = bxt_compare_hw_state,
2535 };
2536 
2537 static void icl_wrpll_get_multipliers(int bestdiv, int *pdiv,
2538 				      int *qdiv, int *kdiv)
2539 {
2540 	/* even dividers */
2541 	if (bestdiv % 2 == 0) {
2542 		if (bestdiv == 2) {
2543 			*pdiv = 2;
2544 			*qdiv = 1;
2545 			*kdiv = 1;
2546 		} else if (bestdiv % 4 == 0) {
2547 			*pdiv = 2;
2548 			*qdiv = bestdiv / 4;
2549 			*kdiv = 2;
2550 		} else if (bestdiv % 6 == 0) {
2551 			*pdiv = 3;
2552 			*qdiv = bestdiv / 6;
2553 			*kdiv = 2;
2554 		} else if (bestdiv % 5 == 0) {
2555 			*pdiv = 5;
2556 			*qdiv = bestdiv / 10;
2557 			*kdiv = 2;
2558 		} else if (bestdiv % 14 == 0) {
2559 			*pdiv = 7;
2560 			*qdiv = bestdiv / 14;
2561 			*kdiv = 2;
2562 		}
2563 	} else {
2564 		if (bestdiv == 3 || bestdiv == 5 || bestdiv == 7) {
2565 			*pdiv = bestdiv;
2566 			*qdiv = 1;
2567 			*kdiv = 1;
2568 		} else { /* 9, 15, 21 */
2569 			*pdiv = bestdiv / 3;
2570 			*qdiv = 1;
2571 			*kdiv = 3;
2572 		}
2573 	}
2574 }
2575 
2576 static void icl_wrpll_params_populate(struct skl_wrpll_params *params,
2577 				      u32 dco_freq, u32 ref_freq,
2578 				      int pdiv, int qdiv, int kdiv)
2579 {
2580 	u32 dco;
2581 
2582 	switch (kdiv) {
2583 	case 1:
2584 		params->kdiv = 1;
2585 		break;
2586 	case 2:
2587 		params->kdiv = 2;
2588 		break;
2589 	case 3:
2590 		params->kdiv = 4;
2591 		break;
2592 	default:
2593 		WARN(1, "Incorrect KDiv\n");
2594 	}
2595 
2596 	switch (pdiv) {
2597 	case 2:
2598 		params->pdiv = 1;
2599 		break;
2600 	case 3:
2601 		params->pdiv = 2;
2602 		break;
2603 	case 5:
2604 		params->pdiv = 4;
2605 		break;
2606 	case 7:
2607 		params->pdiv = 8;
2608 		break;
2609 	default:
2610 		WARN(1, "Incorrect PDiv\n");
2611 	}
2612 
2613 	WARN_ON(kdiv != 2 && qdiv != 1);
2614 
2615 	params->qdiv_ratio = qdiv;
2616 	params->qdiv_mode = (qdiv == 1) ? 0 : 1;
2617 
2618 	dco = div_u64((u64)dco_freq << 15, ref_freq);
2619 
2620 	params->dco_integer = dco >> 15;
2621 	params->dco_fraction = dco & 0x7fff;
2622 }
2623 
2624 /*
2625  * Display WA #22010492432: ehl, tgl, adl-s, adl-p
2626  * Program half of the nominal DCO divider fraction value.
2627  */
2628 static bool
2629 ehl_combo_pll_div_frac_wa_needed(struct intel_display *display)
2630 {
2631 	return ((display->platform.elkhartlake &&
2632 		 IS_DISPLAY_STEP(display, STEP_B0, STEP_FOREVER)) ||
2633 		DISPLAY_VER(display) >= 12) &&
2634 		display->dpll.ref_clks.nssc == 38400;
2635 }
2636 
2637 struct icl_combo_pll_params {
2638 	int clock;
2639 	struct skl_wrpll_params wrpll;
2640 };
2641 
2642 /*
2643  * These values alrea already adjusted: they're the bits we write to the
2644  * registers, not the logical values.
2645  */
2646 static const struct icl_combo_pll_params icl_dp_combo_pll_24MHz_values[] = {
2647 	{ 540000,
2648 	  { .dco_integer = 0x151, .dco_fraction = 0x4000,		/* [0]: 5.4 */
2649 	    .pdiv = 0x2 /* 3 */, .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2650 	{ 270000,
2651 	  { .dco_integer = 0x151, .dco_fraction = 0x4000,		/* [1]: 2.7 */
2652 	    .pdiv = 0x2 /* 3 */, .kdiv = 2, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2653 	{ 162000,
2654 	  { .dco_integer = 0x151, .dco_fraction = 0x4000,		/* [2]: 1.62 */
2655 	    .pdiv = 0x4 /* 5 */, .kdiv = 2, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2656 	{ 324000,
2657 	  { .dco_integer = 0x151, .dco_fraction = 0x4000,		/* [3]: 3.24 */
2658 	    .pdiv = 0x4 /* 5 */, .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2659 	{ 216000,
2660 	  { .dco_integer = 0x168, .dco_fraction = 0x0000,		/* [4]: 2.16 */
2661 	    .pdiv = 0x1 /* 2 */, .kdiv = 2, .qdiv_mode = 1, .qdiv_ratio = 2, }, },
2662 	{ 432000,
2663 	  { .dco_integer = 0x168, .dco_fraction = 0x0000,		/* [5]: 4.32 */
2664 	    .pdiv = 0x1 /* 2 */, .kdiv = 2, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2665 	{ 648000,
2666 	  { .dco_integer = 0x195, .dco_fraction = 0x0000,		/* [6]: 6.48 */
2667 	    .pdiv = 0x2 /* 3 */, .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2668 	{ 810000,
2669 	  { .dco_integer = 0x151, .dco_fraction = 0x4000,		/* [7]: 8.1 */
2670 	    .pdiv = 0x1 /* 2 */, .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2671 };
2672 
2673 
2674 /* Also used for 38.4 MHz values. */
2675 static const struct icl_combo_pll_params icl_dp_combo_pll_19_2MHz_values[] = {
2676 	{ 540000,
2677 	  { .dco_integer = 0x1A5, .dco_fraction = 0x7000,		/* [0]: 5.4 */
2678 	    .pdiv = 0x2 /* 3 */, .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2679 	{ 270000,
2680 	  { .dco_integer = 0x1A5, .dco_fraction = 0x7000,		/* [1]: 2.7 */
2681 	    .pdiv = 0x2 /* 3 */, .kdiv = 2, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2682 	{ 162000,
2683 	  { .dco_integer = 0x1A5, .dco_fraction = 0x7000,		/* [2]: 1.62 */
2684 	    .pdiv = 0x4 /* 5 */, .kdiv = 2, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2685 	{ 324000,
2686 	  { .dco_integer = 0x1A5, .dco_fraction = 0x7000,		/* [3]: 3.24 */
2687 	    .pdiv = 0x4 /* 5 */, .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2688 	{ 216000,
2689 	  { .dco_integer = 0x1C2, .dco_fraction = 0x0000,		/* [4]: 2.16 */
2690 	    .pdiv = 0x1 /* 2 */, .kdiv = 2, .qdiv_mode = 1, .qdiv_ratio = 2, }, },
2691 	{ 432000,
2692 	  { .dco_integer = 0x1C2, .dco_fraction = 0x0000,		/* [5]: 4.32 */
2693 	    .pdiv = 0x1 /* 2 */, .kdiv = 2, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2694 	{ 648000,
2695 	  { .dco_integer = 0x1FA, .dco_fraction = 0x2000,		/* [6]: 6.48 */
2696 	    .pdiv = 0x2 /* 3 */, .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2697 	{ 810000,
2698 	  { .dco_integer = 0x1A5, .dco_fraction = 0x7000,		/* [7]: 8.1 */
2699 	    .pdiv = 0x1 /* 2 */, .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0, }, },
2700 };
2701 
2702 static const struct skl_wrpll_params icl_tbt_pll_24MHz_values = {
2703 	.dco_integer = 0x151, .dco_fraction = 0x4000,
2704 	.pdiv = 0x4 /* 5 */, .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0,
2705 };
2706 
2707 static const struct skl_wrpll_params icl_tbt_pll_19_2MHz_values = {
2708 	.dco_integer = 0x1A5, .dco_fraction = 0x7000,
2709 	.pdiv = 0x4 /* 5 */, .kdiv = 1, .qdiv_mode = 0, .qdiv_ratio = 0,
2710 };
2711 
2712 static const struct skl_wrpll_params tgl_tbt_pll_19_2MHz_values = {
2713 	.dco_integer = 0x54, .dco_fraction = 0x3000,
2714 	/* the following params are unused */
2715 	.pdiv = 0, .kdiv = 0, .qdiv_mode = 0, .qdiv_ratio = 0,
2716 };
2717 
2718 static const struct skl_wrpll_params tgl_tbt_pll_24MHz_values = {
2719 	.dco_integer = 0x43, .dco_fraction = 0x4000,
2720 	/* the following params are unused */
2721 };
2722 
2723 static int icl_calc_dp_combo_pll(struct intel_crtc_state *crtc_state,
2724 				 struct skl_wrpll_params *pll_params)
2725 {
2726 	struct intel_display *display = to_intel_display(crtc_state);
2727 	const struct icl_combo_pll_params *params =
2728 		display->dpll.ref_clks.nssc == 24000 ?
2729 		icl_dp_combo_pll_24MHz_values :
2730 		icl_dp_combo_pll_19_2MHz_values;
2731 	int clock = crtc_state->port_clock;
2732 	int i;
2733 
2734 	for (i = 0; i < ARRAY_SIZE(icl_dp_combo_pll_24MHz_values); i++) {
2735 		if (clock == params[i].clock) {
2736 			*pll_params = params[i].wrpll;
2737 			return 0;
2738 		}
2739 	}
2740 
2741 	MISSING_CASE(clock);
2742 	return -EINVAL;
2743 }
2744 
2745 static int icl_calc_tbt_pll(struct intel_crtc_state *crtc_state,
2746 			    struct skl_wrpll_params *pll_params)
2747 {
2748 	struct intel_display *display = to_intel_display(crtc_state);
2749 
2750 	if (DISPLAY_VER(display) >= 12) {
2751 		switch (display->dpll.ref_clks.nssc) {
2752 		default:
2753 			MISSING_CASE(display->dpll.ref_clks.nssc);
2754 			fallthrough;
2755 		case 19200:
2756 		case 38400:
2757 			*pll_params = tgl_tbt_pll_19_2MHz_values;
2758 			break;
2759 		case 24000:
2760 			*pll_params = tgl_tbt_pll_24MHz_values;
2761 			break;
2762 		}
2763 	} else {
2764 		switch (display->dpll.ref_clks.nssc) {
2765 		default:
2766 			MISSING_CASE(display->dpll.ref_clks.nssc);
2767 			fallthrough;
2768 		case 19200:
2769 		case 38400:
2770 			*pll_params = icl_tbt_pll_19_2MHz_values;
2771 			break;
2772 		case 24000:
2773 			*pll_params = icl_tbt_pll_24MHz_values;
2774 			break;
2775 		}
2776 	}
2777 
2778 	return 0;
2779 }
2780 
2781 static int icl_ddi_tbt_pll_get_freq(struct intel_display *display,
2782 				    const struct intel_dpll *pll,
2783 				    const struct intel_dpll_hw_state *dpll_hw_state)
2784 {
2785 	/*
2786 	 * The PLL outputs multiple frequencies at the same time, selection is
2787 	 * made at DDI clock mux level.
2788 	 */
2789 	drm_WARN_ON(display->drm, 1);
2790 
2791 	return 0;
2792 }
2793 
2794 static int icl_wrpll_ref_clock(struct intel_display *display)
2795 {
2796 	int ref_clock = display->dpll.ref_clks.nssc;
2797 
2798 	/*
2799 	 * For ICL+, the spec states: if reference frequency is 38.4,
2800 	 * use 19.2 because the DPLL automatically divides that by 2.
2801 	 */
2802 	if (ref_clock == 38400)
2803 		ref_clock = 19200;
2804 
2805 	return ref_clock;
2806 }
2807 
2808 static int
2809 icl_calc_wrpll(struct intel_crtc_state *crtc_state,
2810 	       struct skl_wrpll_params *wrpll_params)
2811 {
2812 	struct intel_display *display = to_intel_display(crtc_state);
2813 	int ref_clock = icl_wrpll_ref_clock(display);
2814 	u32 afe_clock = crtc_state->port_clock * 5;
2815 	u32 dco_min = 7998000;
2816 	u32 dco_max = 10000000;
2817 	u32 dco_mid = (dco_min + dco_max) / 2;
2818 	static const int dividers[] = {  2,  4,  6,  8, 10, 12,  14,  16,
2819 					 18, 20, 24, 28, 30, 32,  36,  40,
2820 					 42, 44, 48, 50, 52, 54,  56,  60,
2821 					 64, 66, 68, 70, 72, 76,  78,  80,
2822 					 84, 88, 90, 92, 96, 98, 100, 102,
2823 					  3,  5,  7,  9, 15, 21 };
2824 	u32 dco, best_dco = 0, dco_centrality = 0;
2825 	u32 best_dco_centrality = U32_MAX; /* Spec meaning of 999999 MHz */
2826 	int d, best_div = 0, pdiv = 0, qdiv = 0, kdiv = 0;
2827 
2828 	for (d = 0; d < ARRAY_SIZE(dividers); d++) {
2829 		dco = afe_clock * dividers[d];
2830 
2831 		if (dco <= dco_max && dco >= dco_min) {
2832 			dco_centrality = abs(dco - dco_mid);
2833 
2834 			if (dco_centrality < best_dco_centrality) {
2835 				best_dco_centrality = dco_centrality;
2836 				best_div = dividers[d];
2837 				best_dco = dco;
2838 			}
2839 		}
2840 	}
2841 
2842 	if (best_div == 0)
2843 		return -EINVAL;
2844 
2845 	icl_wrpll_get_multipliers(best_div, &pdiv, &qdiv, &kdiv);
2846 	icl_wrpll_params_populate(wrpll_params, best_dco, ref_clock,
2847 				  pdiv, qdiv, kdiv);
2848 
2849 	return 0;
2850 }
2851 
2852 static int icl_ddi_combo_pll_get_freq(struct intel_display *display,
2853 				      const struct intel_dpll *pll,
2854 				      const struct intel_dpll_hw_state *dpll_hw_state)
2855 {
2856 	const struct icl_dpll_hw_state *hw_state = &dpll_hw_state->icl;
2857 	int ref_clock = icl_wrpll_ref_clock(display);
2858 	u32 dco_fraction;
2859 	u32 p0, p1, p2, dco_freq;
2860 
2861 	p0 = hw_state->cfgcr1 & DPLL_CFGCR1_PDIV_MASK;
2862 	p2 = hw_state->cfgcr1 & DPLL_CFGCR1_KDIV_MASK;
2863 
2864 	if (hw_state->cfgcr1 & DPLL_CFGCR1_QDIV_MODE(1))
2865 		p1 = (hw_state->cfgcr1 & DPLL_CFGCR1_QDIV_RATIO_MASK) >>
2866 			DPLL_CFGCR1_QDIV_RATIO_SHIFT;
2867 	else
2868 		p1 = 1;
2869 
2870 	switch (p0) {
2871 	case DPLL_CFGCR1_PDIV_2:
2872 		p0 = 2;
2873 		break;
2874 	case DPLL_CFGCR1_PDIV_3:
2875 		p0 = 3;
2876 		break;
2877 	case DPLL_CFGCR1_PDIV_5:
2878 		p0 = 5;
2879 		break;
2880 	case DPLL_CFGCR1_PDIV_7:
2881 		p0 = 7;
2882 		break;
2883 	}
2884 
2885 	switch (p2) {
2886 	case DPLL_CFGCR1_KDIV_1:
2887 		p2 = 1;
2888 		break;
2889 	case DPLL_CFGCR1_KDIV_2:
2890 		p2 = 2;
2891 		break;
2892 	case DPLL_CFGCR1_KDIV_3:
2893 		p2 = 3;
2894 		break;
2895 	}
2896 
2897 	dco_freq = (hw_state->cfgcr0 & DPLL_CFGCR0_DCO_INTEGER_MASK) *
2898 		   ref_clock;
2899 
2900 	dco_fraction = (hw_state->cfgcr0 & DPLL_CFGCR0_DCO_FRACTION_MASK) >>
2901 		       DPLL_CFGCR0_DCO_FRACTION_SHIFT;
2902 
2903 	if (ehl_combo_pll_div_frac_wa_needed(display))
2904 		dco_fraction *= 2;
2905 
2906 	dco_freq += (dco_fraction * ref_clock) / 0x8000;
2907 
2908 	if (drm_WARN_ON(display->drm, p0 == 0 || p1 == 0 || p2 == 0))
2909 		return 0;
2910 
2911 	return dco_freq / (p0 * p1 * p2 * 5);
2912 }
2913 
2914 static void icl_calc_dpll_state(struct intel_display *display,
2915 				const struct skl_wrpll_params *pll_params,
2916 				struct intel_dpll_hw_state *dpll_hw_state)
2917 {
2918 	struct icl_dpll_hw_state *hw_state = &dpll_hw_state->icl;
2919 	u32 dco_fraction = pll_params->dco_fraction;
2920 
2921 	if (ehl_combo_pll_div_frac_wa_needed(display))
2922 		dco_fraction = DIV_ROUND_CLOSEST(dco_fraction, 2);
2923 
2924 	hw_state->cfgcr0 = DPLL_CFGCR0_DCO_FRACTION(dco_fraction) |
2925 			    pll_params->dco_integer;
2926 
2927 	hw_state->cfgcr1 = DPLL_CFGCR1_QDIV_RATIO(pll_params->qdiv_ratio) |
2928 			    DPLL_CFGCR1_QDIV_MODE(pll_params->qdiv_mode) |
2929 			    DPLL_CFGCR1_KDIV(pll_params->kdiv) |
2930 			    DPLL_CFGCR1_PDIV(pll_params->pdiv);
2931 
2932 	if (DISPLAY_VER(display) >= 12)
2933 		hw_state->cfgcr1 |= TGL_DPLL_CFGCR1_CFSELOVRD_NORMAL_XTAL;
2934 	else
2935 		hw_state->cfgcr1 |= DPLL_CFGCR1_CENTRAL_FREQ_8400;
2936 
2937 	if (display->vbt.override_afc_startup)
2938 		hw_state->div0 = TGL_DPLL0_DIV0_AFC_STARTUP(display->vbt.override_afc_startup_val);
2939 }
2940 
2941 static int icl_mg_pll_find_divisors(int clock_khz, bool is_dp, bool use_ssc,
2942 				    u32 *target_dco_khz,
2943 				    struct icl_dpll_hw_state *hw_state,
2944 				    bool is_dkl)
2945 {
2946 	static const u8 div1_vals[] = { 7, 5, 3, 2 };
2947 	u32 dco_min_freq, dco_max_freq;
2948 	unsigned int i;
2949 	int div2;
2950 
2951 	dco_min_freq = is_dp ? 8100000 : use_ssc ? 8000000 : 7992000;
2952 	dco_max_freq = is_dp ? 8100000 : 10000000;
2953 
2954 	for (i = 0; i < ARRAY_SIZE(div1_vals); i++) {
2955 		int div1 = div1_vals[i];
2956 
2957 		for (div2 = 10; div2 > 0; div2--) {
2958 			int dco = div1 * div2 * clock_khz * 5;
2959 			int a_divratio, tlinedrv, inputsel;
2960 			u32 hsdiv;
2961 
2962 			if (dco < dco_min_freq || dco > dco_max_freq)
2963 				continue;
2964 
2965 			if (div2 >= 2) {
2966 				/*
2967 				 * Note: a_divratio not matching TGL BSpec
2968 				 * algorithm but matching hardcoded values and
2969 				 * working on HW for DP alt-mode at least
2970 				 */
2971 				a_divratio = is_dp ? 10 : 5;
2972 				tlinedrv = is_dkl ? 1 : 2;
2973 			} else {
2974 				a_divratio = 5;
2975 				tlinedrv = 0;
2976 			}
2977 			inputsel = is_dp ? 0 : 1;
2978 
2979 			switch (div1) {
2980 			default:
2981 				MISSING_CASE(div1);
2982 				fallthrough;
2983 			case 2:
2984 				hsdiv = MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_2;
2985 				break;
2986 			case 3:
2987 				hsdiv = MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_3;
2988 				break;
2989 			case 5:
2990 				hsdiv = MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_5;
2991 				break;
2992 			case 7:
2993 				hsdiv = MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_7;
2994 				break;
2995 			}
2996 
2997 			*target_dco_khz = dco;
2998 
2999 			hw_state->mg_refclkin_ctl = MG_REFCLKIN_CTL_OD_2_MUX(1);
3000 
3001 			hw_state->mg_clktop2_coreclkctl1 =
3002 				MG_CLKTOP2_CORECLKCTL1_A_DIVRATIO(a_divratio);
3003 
3004 			hw_state->mg_clktop2_hsclkctl =
3005 				MG_CLKTOP2_HSCLKCTL_TLINEDRV_CLKSEL(tlinedrv) |
3006 				MG_CLKTOP2_HSCLKCTL_CORE_INPUTSEL(inputsel) |
3007 				hsdiv |
3008 				MG_CLKTOP2_HSCLKCTL_DSDIV_RATIO(div2);
3009 
3010 			return 0;
3011 		}
3012 	}
3013 
3014 	return -EINVAL;
3015 }
3016 
3017 /*
3018  * The specification for this function uses real numbers, so the math had to be
3019  * adapted to integer-only calculation, that's why it looks so different.
3020  */
3021 static int icl_calc_mg_pll_state(struct intel_crtc_state *crtc_state,
3022 				 struct intel_dpll_hw_state *dpll_hw_state)
3023 {
3024 	struct intel_display *display = to_intel_display(crtc_state);
3025 	struct icl_dpll_hw_state *hw_state = &dpll_hw_state->icl;
3026 	int refclk_khz = display->dpll.ref_clks.nssc;
3027 	int clock = crtc_state->port_clock;
3028 	u32 dco_khz, m1div, m2div_int, m2div_rem, m2div_frac;
3029 	u32 iref_ndiv, iref_trim, iref_pulse_w;
3030 	u32 prop_coeff, int_coeff;
3031 	u32 tdc_targetcnt, feedfwgain;
3032 	u64 ssc_stepsize, ssc_steplen, ssc_steplog;
3033 	u64 tmp;
3034 	bool use_ssc = false;
3035 	bool is_dp = !intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI);
3036 	bool is_dkl = DISPLAY_VER(display) >= 12;
3037 	int ret;
3038 
3039 	ret = icl_mg_pll_find_divisors(clock, is_dp, use_ssc, &dco_khz,
3040 				       hw_state, is_dkl);
3041 	if (ret)
3042 		return ret;
3043 
3044 	m1div = 2;
3045 	m2div_int = dco_khz / (refclk_khz * m1div);
3046 	if (m2div_int > 255) {
3047 		if (!is_dkl) {
3048 			m1div = 4;
3049 			m2div_int = dco_khz / (refclk_khz * m1div);
3050 		}
3051 
3052 		if (m2div_int > 255)
3053 			return -EINVAL;
3054 	}
3055 	m2div_rem = dco_khz % (refclk_khz * m1div);
3056 
3057 	tmp = (u64)m2div_rem * (1 << 22);
3058 	do_div(tmp, refclk_khz * m1div);
3059 	m2div_frac = tmp;
3060 
3061 	switch (refclk_khz) {
3062 	case 19200:
3063 		iref_ndiv = 1;
3064 		iref_trim = 28;
3065 		iref_pulse_w = 1;
3066 		break;
3067 	case 24000:
3068 		iref_ndiv = 1;
3069 		iref_trim = 25;
3070 		iref_pulse_w = 2;
3071 		break;
3072 	case 38400:
3073 		iref_ndiv = 2;
3074 		iref_trim = 28;
3075 		iref_pulse_w = 1;
3076 		break;
3077 	default:
3078 		MISSING_CASE(refclk_khz);
3079 		return -EINVAL;
3080 	}
3081 
3082 	/*
3083 	 * tdc_res = 0.000003
3084 	 * tdc_targetcnt = int(2 / (tdc_res * 8 * 50 * 1.1) / refclk_mhz + 0.5)
3085 	 *
3086 	 * The multiplication by 1000 is due to refclk MHz to KHz conversion. It
3087 	 * was supposed to be a division, but we rearranged the operations of
3088 	 * the formula to avoid early divisions so we don't multiply the
3089 	 * rounding errors.
3090 	 *
3091 	 * 0.000003 * 8 * 50 * 1.1 = 0.00132, also known as 132 / 100000, which
3092 	 * we also rearrange to work with integers.
3093 	 *
3094 	 * The 0.5 transformed to 5 results in a multiplication by 10 and the
3095 	 * last division by 10.
3096 	 */
3097 	tdc_targetcnt = (2 * 1000 * 100000 * 10 / (132 * refclk_khz) + 5) / 10;
3098 
3099 	/*
3100 	 * Here we divide dco_khz by 10 in order to allow the dividend to fit in
3101 	 * 32 bits. That's not a problem since we round the division down
3102 	 * anyway.
3103 	 */
3104 	feedfwgain = (use_ssc || m2div_rem > 0) ?
3105 		m1div * 1000000 * 100 / (dco_khz * 3 / 10) : 0;
3106 
3107 	if (dco_khz >= 9000000) {
3108 		prop_coeff = 5;
3109 		int_coeff = 10;
3110 	} else {
3111 		prop_coeff = 4;
3112 		int_coeff = 8;
3113 	}
3114 
3115 	if (use_ssc) {
3116 		tmp = mul_u32_u32(dco_khz, 47 * 32);
3117 		do_div(tmp, refclk_khz * m1div * 10000);
3118 		ssc_stepsize = tmp;
3119 
3120 		tmp = mul_u32_u32(dco_khz, 1000);
3121 		ssc_steplen = DIV_ROUND_UP_ULL(tmp, 32 * 2 * 32);
3122 	} else {
3123 		ssc_stepsize = 0;
3124 		ssc_steplen = 0;
3125 	}
3126 	ssc_steplog = 4;
3127 
3128 	/* write pll_state calculations */
3129 	if (is_dkl) {
3130 		hw_state->mg_pll_div0 = DKL_PLL_DIV0_INTEG_COEFF(int_coeff) |
3131 					 DKL_PLL_DIV0_PROP_COEFF(prop_coeff) |
3132 					 DKL_PLL_DIV0_FBPREDIV(m1div) |
3133 					 DKL_PLL_DIV0_FBDIV_INT(m2div_int);
3134 		if (display->vbt.override_afc_startup) {
3135 			u8 val = display->vbt.override_afc_startup_val;
3136 
3137 			hw_state->mg_pll_div0 |= DKL_PLL_DIV0_AFC_STARTUP(val);
3138 		}
3139 
3140 		hw_state->mg_pll_div1 = DKL_PLL_DIV1_IREF_TRIM(iref_trim) |
3141 					 DKL_PLL_DIV1_TDC_TARGET_CNT(tdc_targetcnt);
3142 
3143 		hw_state->mg_pll_ssc = DKL_PLL_SSC_IREF_NDIV_RATIO(iref_ndiv) |
3144 					DKL_PLL_SSC_STEP_LEN(ssc_steplen) |
3145 					DKL_PLL_SSC_STEP_NUM(ssc_steplog) |
3146 					(use_ssc ? DKL_PLL_SSC_EN : 0);
3147 
3148 		hw_state->mg_pll_bias = (m2div_frac ? DKL_PLL_BIAS_FRAC_EN_H : 0) |
3149 					  DKL_PLL_BIAS_FBDIV_FRAC(m2div_frac);
3150 
3151 		hw_state->mg_pll_tdc_coldst_bias =
3152 				DKL_PLL_TDC_SSC_STEP_SIZE(ssc_stepsize) |
3153 				DKL_PLL_TDC_FEED_FWD_GAIN(feedfwgain);
3154 
3155 	} else {
3156 		hw_state->mg_pll_div0 =
3157 			(m2div_rem > 0 ? MG_PLL_DIV0_FRACNEN_H : 0) |
3158 			MG_PLL_DIV0_FBDIV_FRAC(m2div_frac) |
3159 			MG_PLL_DIV0_FBDIV_INT(m2div_int);
3160 
3161 		hw_state->mg_pll_div1 =
3162 			MG_PLL_DIV1_IREF_NDIVRATIO(iref_ndiv) |
3163 			MG_PLL_DIV1_DITHER_DIV_2 |
3164 			MG_PLL_DIV1_NDIVRATIO(1) |
3165 			MG_PLL_DIV1_FBPREDIV(m1div);
3166 
3167 		hw_state->mg_pll_lf =
3168 			MG_PLL_LF_TDCTARGETCNT(tdc_targetcnt) |
3169 			MG_PLL_LF_AFCCNTSEL_512 |
3170 			MG_PLL_LF_GAINCTRL(1) |
3171 			MG_PLL_LF_INT_COEFF(int_coeff) |
3172 			MG_PLL_LF_PROP_COEFF(prop_coeff);
3173 
3174 		hw_state->mg_pll_frac_lock =
3175 			MG_PLL_FRAC_LOCK_TRUELOCK_CRIT_32 |
3176 			MG_PLL_FRAC_LOCK_EARLYLOCK_CRIT_32 |
3177 			MG_PLL_FRAC_LOCK_LOCKTHRESH(10) |
3178 			MG_PLL_FRAC_LOCK_DCODITHEREN |
3179 			MG_PLL_FRAC_LOCK_FEEDFWRDGAIN(feedfwgain);
3180 		if (use_ssc || m2div_rem > 0)
3181 			hw_state->mg_pll_frac_lock |=
3182 				MG_PLL_FRAC_LOCK_FEEDFWRDCAL_EN;
3183 
3184 		hw_state->mg_pll_ssc =
3185 			(use_ssc ? MG_PLL_SSC_EN : 0) |
3186 			MG_PLL_SSC_TYPE(2) |
3187 			MG_PLL_SSC_STEPLENGTH(ssc_steplen) |
3188 			MG_PLL_SSC_STEPNUM(ssc_steplog) |
3189 			MG_PLL_SSC_FLLEN |
3190 			MG_PLL_SSC_STEPSIZE(ssc_stepsize);
3191 
3192 		hw_state->mg_pll_tdc_coldst_bias =
3193 			MG_PLL_TDC_COLDST_COLDSTART |
3194 			MG_PLL_TDC_COLDST_IREFINT_EN |
3195 			MG_PLL_TDC_COLDST_REFBIAS_START_PULSE_W(iref_pulse_w) |
3196 			MG_PLL_TDC_TDCOVCCORR_EN |
3197 			MG_PLL_TDC_TDCSEL(3);
3198 
3199 		hw_state->mg_pll_bias =
3200 			MG_PLL_BIAS_BIAS_GB_SEL(3) |
3201 			MG_PLL_BIAS_INIT_DCOAMP(0x3F) |
3202 			MG_PLL_BIAS_BIAS_BONUS(10) |
3203 			MG_PLL_BIAS_BIASCAL_EN |
3204 			MG_PLL_BIAS_CTRIM(12) |
3205 			MG_PLL_BIAS_VREF_RDAC(4) |
3206 			MG_PLL_BIAS_IREFTRIM(iref_trim);
3207 
3208 		if (refclk_khz == 38400) {
3209 			hw_state->mg_pll_tdc_coldst_bias_mask =
3210 				MG_PLL_TDC_COLDST_COLDSTART;
3211 			hw_state->mg_pll_bias_mask = 0;
3212 		} else {
3213 			hw_state->mg_pll_tdc_coldst_bias_mask = -1U;
3214 			hw_state->mg_pll_bias_mask = -1U;
3215 		}
3216 
3217 		hw_state->mg_pll_tdc_coldst_bias &=
3218 			hw_state->mg_pll_tdc_coldst_bias_mask;
3219 		hw_state->mg_pll_bias &= hw_state->mg_pll_bias_mask;
3220 	}
3221 
3222 	return 0;
3223 }
3224 
3225 static int icl_ddi_mg_pll_get_freq(struct intel_display *display,
3226 				   const struct intel_dpll *pll,
3227 				   const struct intel_dpll_hw_state *dpll_hw_state)
3228 {
3229 	const struct icl_dpll_hw_state *hw_state = &dpll_hw_state->icl;
3230 	u32 m1, m2_int, m2_frac, div1, div2, ref_clock;
3231 	u64 tmp;
3232 
3233 	ref_clock = display->dpll.ref_clks.nssc;
3234 
3235 	if (DISPLAY_VER(display) >= 12) {
3236 		m1 = hw_state->mg_pll_div0 & DKL_PLL_DIV0_FBPREDIV_MASK;
3237 		m1 = m1 >> DKL_PLL_DIV0_FBPREDIV_SHIFT;
3238 		m2_int = hw_state->mg_pll_div0 & DKL_PLL_DIV0_FBDIV_INT_MASK;
3239 
3240 		if (hw_state->mg_pll_bias & DKL_PLL_BIAS_FRAC_EN_H) {
3241 			m2_frac = hw_state->mg_pll_bias &
3242 				  DKL_PLL_BIAS_FBDIV_FRAC_MASK;
3243 			m2_frac = m2_frac >> DKL_PLL_BIAS_FBDIV_SHIFT;
3244 		} else {
3245 			m2_frac = 0;
3246 		}
3247 	} else {
3248 		m1 = hw_state->mg_pll_div1 & MG_PLL_DIV1_FBPREDIV_MASK;
3249 		m2_int = hw_state->mg_pll_div0 & MG_PLL_DIV0_FBDIV_INT_MASK;
3250 
3251 		if (hw_state->mg_pll_div0 & MG_PLL_DIV0_FRACNEN_H) {
3252 			m2_frac = hw_state->mg_pll_div0 &
3253 				  MG_PLL_DIV0_FBDIV_FRAC_MASK;
3254 			m2_frac = m2_frac >> MG_PLL_DIV0_FBDIV_FRAC_SHIFT;
3255 		} else {
3256 			m2_frac = 0;
3257 		}
3258 	}
3259 
3260 	switch (hw_state->mg_clktop2_hsclkctl &
3261 		MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_MASK) {
3262 	case MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_2:
3263 		div1 = 2;
3264 		break;
3265 	case MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_3:
3266 		div1 = 3;
3267 		break;
3268 	case MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_5:
3269 		div1 = 5;
3270 		break;
3271 	case MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_7:
3272 		div1 = 7;
3273 		break;
3274 	default:
3275 		MISSING_CASE(hw_state->mg_clktop2_hsclkctl);
3276 		return 0;
3277 	}
3278 
3279 	div2 = (hw_state->mg_clktop2_hsclkctl &
3280 		MG_CLKTOP2_HSCLKCTL_DSDIV_RATIO_MASK) >>
3281 		MG_CLKTOP2_HSCLKCTL_DSDIV_RATIO_SHIFT;
3282 
3283 	/* div2 value of 0 is same as 1 means no div */
3284 	if (div2 == 0)
3285 		div2 = 1;
3286 
3287 	/*
3288 	 * Adjust the original formula to delay the division by 2^22 in order to
3289 	 * minimize possible rounding errors.
3290 	 */
3291 	tmp = (u64)m1 * m2_int * ref_clock +
3292 	      (((u64)m1 * m2_frac * ref_clock) >> 22);
3293 	tmp = div_u64(tmp, 5 * div1 * div2);
3294 
3295 	return tmp;
3296 }
3297 
3298 /**
3299  * icl_set_active_port_dpll - select the active port DPLL for a given CRTC
3300  * @crtc_state: state for the CRTC to select the DPLL for
3301  * @port_dpll_id: the active @port_dpll_id to select
3302  *
3303  * Select the given @port_dpll_id instance from the DPLLs reserved for the
3304  * CRTC.
3305  */
3306 void icl_set_active_port_dpll(struct intel_crtc_state *crtc_state,
3307 			      enum icl_port_dpll_id port_dpll_id)
3308 {
3309 	struct icl_port_dpll *port_dpll =
3310 		&crtc_state->icl_port_dplls[port_dpll_id];
3311 
3312 	crtc_state->intel_dpll = port_dpll->pll;
3313 	crtc_state->dpll_hw_state = port_dpll->hw_state;
3314 }
3315 
3316 static void icl_update_active_dpll(struct intel_atomic_state *state,
3317 				   struct intel_crtc *crtc,
3318 				   struct intel_encoder *encoder)
3319 {
3320 	struct intel_crtc_state *crtc_state =
3321 		intel_atomic_get_new_crtc_state(state, crtc);
3322 	struct intel_digital_port *primary_port;
3323 	enum icl_port_dpll_id port_dpll_id = ICL_PORT_DPLL_DEFAULT;
3324 
3325 	primary_port = encoder->type == INTEL_OUTPUT_DP_MST ?
3326 		enc_to_mst(encoder)->primary :
3327 		enc_to_dig_port(encoder);
3328 
3329 	if (primary_port &&
3330 	    (intel_tc_port_in_dp_alt_mode(primary_port) ||
3331 	     intel_tc_port_in_legacy_mode(primary_port)))
3332 		port_dpll_id = ICL_PORT_DPLL_MG_PHY;
3333 
3334 	icl_set_active_port_dpll(crtc_state, port_dpll_id);
3335 }
3336 
3337 static int icl_compute_combo_phy_dpll(struct intel_atomic_state *state,
3338 				      struct intel_crtc *crtc)
3339 {
3340 	struct intel_display *display = to_intel_display(state);
3341 	struct intel_crtc_state *crtc_state =
3342 		intel_atomic_get_new_crtc_state(state, crtc);
3343 	struct icl_port_dpll *port_dpll =
3344 		&crtc_state->icl_port_dplls[ICL_PORT_DPLL_DEFAULT];
3345 	struct skl_wrpll_params pll_params = {};
3346 	int ret;
3347 
3348 	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI) ||
3349 	    intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DSI))
3350 		ret = icl_calc_wrpll(crtc_state, &pll_params);
3351 	else
3352 		ret = icl_calc_dp_combo_pll(crtc_state, &pll_params);
3353 
3354 	if (ret)
3355 		return ret;
3356 
3357 	icl_calc_dpll_state(display, &pll_params, &port_dpll->hw_state);
3358 
3359 	/* this is mainly for the fastset check */
3360 	icl_set_active_port_dpll(crtc_state, ICL_PORT_DPLL_DEFAULT);
3361 
3362 	crtc_state->port_clock = icl_ddi_combo_pll_get_freq(display, NULL,
3363 							    &port_dpll->hw_state);
3364 
3365 	return 0;
3366 }
3367 
3368 static int icl_get_combo_phy_dpll(struct intel_atomic_state *state,
3369 				  struct intel_crtc *crtc,
3370 				  struct intel_encoder *encoder)
3371 {
3372 	struct intel_display *display = to_intel_display(crtc);
3373 	struct intel_crtc_state *crtc_state =
3374 		intel_atomic_get_new_crtc_state(state, crtc);
3375 	struct icl_port_dpll *port_dpll =
3376 		&crtc_state->icl_port_dplls[ICL_PORT_DPLL_DEFAULT];
3377 	enum port port = encoder->port;
3378 	unsigned long dpll_mask;
3379 
3380 	if (display->platform.alderlake_s) {
3381 		dpll_mask =
3382 			BIT(DPLL_ID_DG1_DPLL3) |
3383 			BIT(DPLL_ID_DG1_DPLL2) |
3384 			BIT(DPLL_ID_ICL_DPLL1) |
3385 			BIT(DPLL_ID_ICL_DPLL0);
3386 	} else if (display->platform.dg1) {
3387 		if (port == PORT_D || port == PORT_E) {
3388 			dpll_mask =
3389 				BIT(DPLL_ID_DG1_DPLL2) |
3390 				BIT(DPLL_ID_DG1_DPLL3);
3391 		} else {
3392 			dpll_mask =
3393 				BIT(DPLL_ID_DG1_DPLL0) |
3394 				BIT(DPLL_ID_DG1_DPLL1);
3395 		}
3396 	} else if (display->platform.rocketlake) {
3397 		dpll_mask =
3398 			BIT(DPLL_ID_EHL_DPLL4) |
3399 			BIT(DPLL_ID_ICL_DPLL1) |
3400 			BIT(DPLL_ID_ICL_DPLL0);
3401 	} else if ((display->platform.jasperlake ||
3402 		    display->platform.elkhartlake) &&
3403 		   port != PORT_A) {
3404 		dpll_mask =
3405 			BIT(DPLL_ID_EHL_DPLL4) |
3406 			BIT(DPLL_ID_ICL_DPLL1) |
3407 			BIT(DPLL_ID_ICL_DPLL0);
3408 	} else {
3409 		dpll_mask = BIT(DPLL_ID_ICL_DPLL1) | BIT(DPLL_ID_ICL_DPLL0);
3410 	}
3411 
3412 	/* Eliminate DPLLs from consideration if reserved by HTI */
3413 	dpll_mask &= ~intel_hti_dpll_mask(display);
3414 
3415 	port_dpll->pll = intel_find_dpll(state, crtc,
3416 					 &port_dpll->hw_state,
3417 					 dpll_mask);
3418 	if (!port_dpll->pll)
3419 		return -EINVAL;
3420 
3421 	intel_reference_dpll(state, crtc,
3422 			     port_dpll->pll, &port_dpll->hw_state);
3423 
3424 	icl_update_active_dpll(state, crtc, encoder);
3425 
3426 	return 0;
3427 }
3428 
3429 static int icl_compute_tc_phy_dplls(struct intel_atomic_state *state,
3430 				    struct intel_crtc *crtc)
3431 {
3432 	struct intel_display *display = to_intel_display(state);
3433 	struct intel_crtc_state *crtc_state =
3434 		intel_atomic_get_new_crtc_state(state, crtc);
3435 	const struct intel_crtc_state *old_crtc_state =
3436 		intel_atomic_get_old_crtc_state(state, crtc);
3437 	struct icl_port_dpll *port_dpll =
3438 		&crtc_state->icl_port_dplls[ICL_PORT_DPLL_DEFAULT];
3439 	struct skl_wrpll_params pll_params = {};
3440 	int ret;
3441 
3442 	port_dpll = &crtc_state->icl_port_dplls[ICL_PORT_DPLL_DEFAULT];
3443 	ret = icl_calc_tbt_pll(crtc_state, &pll_params);
3444 	if (ret)
3445 		return ret;
3446 
3447 	icl_calc_dpll_state(display, &pll_params, &port_dpll->hw_state);
3448 
3449 	port_dpll = &crtc_state->icl_port_dplls[ICL_PORT_DPLL_MG_PHY];
3450 	ret = icl_calc_mg_pll_state(crtc_state, &port_dpll->hw_state);
3451 	if (ret)
3452 		return ret;
3453 
3454 	/* this is mainly for the fastset check */
3455 	if (old_crtc_state->intel_dpll &&
3456 	    old_crtc_state->intel_dpll->info->id == DPLL_ID_ICL_TBTPLL)
3457 		icl_set_active_port_dpll(crtc_state, ICL_PORT_DPLL_DEFAULT);
3458 	else
3459 		icl_set_active_port_dpll(crtc_state, ICL_PORT_DPLL_MG_PHY);
3460 
3461 	crtc_state->port_clock = icl_ddi_mg_pll_get_freq(display, NULL,
3462 							 &port_dpll->hw_state);
3463 
3464 	return 0;
3465 }
3466 
3467 static int icl_get_tc_phy_dplls(struct intel_atomic_state *state,
3468 				struct intel_crtc *crtc,
3469 				struct intel_encoder *encoder)
3470 {
3471 	struct intel_crtc_state *crtc_state =
3472 		intel_atomic_get_new_crtc_state(state, crtc);
3473 	struct icl_port_dpll *port_dpll =
3474 		&crtc_state->icl_port_dplls[ICL_PORT_DPLL_DEFAULT];
3475 	enum intel_dpll_id dpll_id;
3476 	int ret;
3477 
3478 	port_dpll = &crtc_state->icl_port_dplls[ICL_PORT_DPLL_DEFAULT];
3479 	port_dpll->pll = intel_find_dpll(state, crtc,
3480 					 &port_dpll->hw_state,
3481 					 BIT(DPLL_ID_ICL_TBTPLL));
3482 	if (!port_dpll->pll)
3483 		return -EINVAL;
3484 	intel_reference_dpll(state, crtc,
3485 			     port_dpll->pll, &port_dpll->hw_state);
3486 
3487 	port_dpll = &crtc_state->icl_port_dplls[ICL_PORT_DPLL_MG_PHY];
3488 	dpll_id = icl_tc_port_to_pll_id(intel_encoder_to_tc(encoder));
3489 	port_dpll->pll = intel_find_dpll(state, crtc,
3490 					 &port_dpll->hw_state,
3491 					 BIT(dpll_id));
3492 	if (!port_dpll->pll) {
3493 		ret = -EINVAL;
3494 		goto err_unreference_tbt_pll;
3495 	}
3496 	intel_reference_dpll(state, crtc,
3497 			     port_dpll->pll, &port_dpll->hw_state);
3498 
3499 	icl_update_active_dpll(state, crtc, encoder);
3500 
3501 	return 0;
3502 
3503 err_unreference_tbt_pll:
3504 	port_dpll = &crtc_state->icl_port_dplls[ICL_PORT_DPLL_DEFAULT];
3505 	intel_unreference_dpll(state, crtc, port_dpll->pll);
3506 
3507 	return ret;
3508 }
3509 
3510 /*
3511  * Get the PLL for either a port using a C10 PHY PLL, or for a port using a
3512  * C20 PHY PLL in the cases of:
3513  * - BMG port A/B
3514  * - PTL port B eDP over TypeC PHY
3515  */
3516 static int mtl_get_non_tc_phy_dpll(struct intel_atomic_state *state,
3517 				      struct intel_crtc *crtc,
3518 				      struct intel_encoder *encoder)
3519 {
3520 	struct intel_display *display = to_intel_display(crtc);
3521 	struct intel_crtc_state *crtc_state =
3522 		intel_atomic_get_new_crtc_state(state, crtc);
3523 	struct icl_port_dpll *port_dpll =
3524 		&crtc_state->icl_port_dplls[ICL_PORT_DPLL_DEFAULT];
3525 	enum intel_dpll_id pll_id = mtl_port_to_pll_id(display, encoder->port);
3526 
3527 	port_dpll->pll = intel_find_dpll(state, crtc,
3528 					 &port_dpll->hw_state,
3529 					 BIT(pll_id));
3530 	if (!port_dpll->pll)
3531 		return -EINVAL;
3532 
3533 	intel_reference_dpll(state, crtc,
3534 			     port_dpll->pll, &port_dpll->hw_state);
3535 
3536 	icl_update_active_dpll(state, crtc, encoder);
3537 
3538 	return 0;
3539 }
3540 
3541 static int icl_compute_dplls(struct intel_atomic_state *state,
3542 			     struct intel_crtc *crtc,
3543 			     struct intel_encoder *encoder)
3544 {
3545 	if (intel_encoder_is_combo(encoder))
3546 		return icl_compute_combo_phy_dpll(state, crtc);
3547 	else if (intel_encoder_is_tc(encoder))
3548 		return icl_compute_tc_phy_dplls(state, crtc);
3549 
3550 	MISSING_CASE(encoder->port);
3551 
3552 	return 0;
3553 }
3554 
3555 static int icl_get_dplls(struct intel_atomic_state *state,
3556 			 struct intel_crtc *crtc,
3557 			 struct intel_encoder *encoder)
3558 {
3559 	if (intel_encoder_is_combo(encoder))
3560 		return icl_get_combo_phy_dpll(state, crtc, encoder);
3561 	else if (intel_encoder_is_tc(encoder))
3562 		return icl_get_tc_phy_dplls(state, crtc, encoder);
3563 
3564 	MISSING_CASE(encoder->port);
3565 
3566 	return -EINVAL;
3567 }
3568 
3569 static void icl_put_dplls(struct intel_atomic_state *state,
3570 			  struct intel_crtc *crtc)
3571 {
3572 	const struct intel_crtc_state *old_crtc_state =
3573 		intel_atomic_get_old_crtc_state(state, crtc);
3574 	struct intel_crtc_state *new_crtc_state =
3575 		intel_atomic_get_new_crtc_state(state, crtc);
3576 	enum icl_port_dpll_id id;
3577 
3578 	new_crtc_state->intel_dpll = NULL;
3579 
3580 	for (id = ICL_PORT_DPLL_DEFAULT; id < ICL_PORT_DPLL_COUNT; id++) {
3581 		const struct icl_port_dpll *old_port_dpll =
3582 			&old_crtc_state->icl_port_dplls[id];
3583 		struct icl_port_dpll *new_port_dpll =
3584 			&new_crtc_state->icl_port_dplls[id];
3585 
3586 		new_port_dpll->pll = NULL;
3587 
3588 		if (!old_port_dpll->pll)
3589 			continue;
3590 
3591 		intel_unreference_dpll(state, crtc, old_port_dpll->pll);
3592 	}
3593 }
3594 
3595 static bool mg_pll_get_hw_state(struct intel_display *display,
3596 				struct intel_dpll *pll,
3597 				struct intel_dpll_hw_state *dpll_hw_state)
3598 {
3599 	struct icl_dpll_hw_state *hw_state = &dpll_hw_state->icl;
3600 	const enum intel_dpll_id id = pll->info->id;
3601 	enum tc_port tc_port = icl_pll_id_to_tc_port(id);
3602 	struct ref_tracker *wakeref;
3603 	bool ret = false;
3604 	u32 val;
3605 
3606 	i915_reg_t enable_reg = intel_tc_pll_enable_reg(display, pll);
3607 
3608 	wakeref = intel_display_power_get_if_enabled(display,
3609 						     POWER_DOMAIN_DISPLAY_CORE);
3610 	if (!wakeref)
3611 		return false;
3612 
3613 	val = intel_de_read(display, enable_reg);
3614 	if (!(val & PLL_ENABLE))
3615 		goto out;
3616 
3617 	hw_state->mg_refclkin_ctl = intel_de_read(display,
3618 						  MG_REFCLKIN_CTL(tc_port));
3619 	hw_state->mg_refclkin_ctl &= MG_REFCLKIN_CTL_OD_2_MUX_MASK;
3620 
3621 	hw_state->mg_clktop2_coreclkctl1 =
3622 		intel_de_read(display, MG_CLKTOP2_CORECLKCTL1(tc_port));
3623 	hw_state->mg_clktop2_coreclkctl1 &=
3624 		MG_CLKTOP2_CORECLKCTL1_A_DIVRATIO_MASK;
3625 
3626 	hw_state->mg_clktop2_hsclkctl =
3627 		intel_de_read(display, MG_CLKTOP2_HSCLKCTL(tc_port));
3628 	hw_state->mg_clktop2_hsclkctl &=
3629 		MG_CLKTOP2_HSCLKCTL_TLINEDRV_CLKSEL_MASK |
3630 		MG_CLKTOP2_HSCLKCTL_CORE_INPUTSEL_MASK |
3631 		MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_MASK |
3632 		MG_CLKTOP2_HSCLKCTL_DSDIV_RATIO_MASK;
3633 
3634 	hw_state->mg_pll_div0 = intel_de_read(display, MG_PLL_DIV0(tc_port));
3635 	hw_state->mg_pll_div1 = intel_de_read(display, MG_PLL_DIV1(tc_port));
3636 	hw_state->mg_pll_lf = intel_de_read(display, MG_PLL_LF(tc_port));
3637 	hw_state->mg_pll_frac_lock = intel_de_read(display,
3638 						   MG_PLL_FRAC_LOCK(tc_port));
3639 	hw_state->mg_pll_ssc = intel_de_read(display, MG_PLL_SSC(tc_port));
3640 
3641 	hw_state->mg_pll_bias = intel_de_read(display, MG_PLL_BIAS(tc_port));
3642 	hw_state->mg_pll_tdc_coldst_bias =
3643 		intel_de_read(display, MG_PLL_TDC_COLDST_BIAS(tc_port));
3644 
3645 	if (display->dpll.ref_clks.nssc == 38400) {
3646 		hw_state->mg_pll_tdc_coldst_bias_mask = MG_PLL_TDC_COLDST_COLDSTART;
3647 		hw_state->mg_pll_bias_mask = 0;
3648 	} else {
3649 		hw_state->mg_pll_tdc_coldst_bias_mask = -1U;
3650 		hw_state->mg_pll_bias_mask = -1U;
3651 	}
3652 
3653 	hw_state->mg_pll_tdc_coldst_bias &= hw_state->mg_pll_tdc_coldst_bias_mask;
3654 	hw_state->mg_pll_bias &= hw_state->mg_pll_bias_mask;
3655 
3656 	ret = true;
3657 out:
3658 	intel_display_power_put(display, POWER_DOMAIN_DISPLAY_CORE, wakeref);
3659 	return ret;
3660 }
3661 
3662 static bool dkl_pll_get_hw_state(struct intel_display *display,
3663 				 struct intel_dpll *pll,
3664 				 struct intel_dpll_hw_state *dpll_hw_state)
3665 {
3666 	struct icl_dpll_hw_state *hw_state = &dpll_hw_state->icl;
3667 	const enum intel_dpll_id id = pll->info->id;
3668 	enum tc_port tc_port = icl_pll_id_to_tc_port(id);
3669 	struct ref_tracker *wakeref;
3670 	bool ret = false;
3671 	u32 val;
3672 
3673 	wakeref = intel_display_power_get_if_enabled(display,
3674 						     POWER_DOMAIN_DISPLAY_CORE);
3675 	if (!wakeref)
3676 		return false;
3677 
3678 	val = intel_de_read(display, intel_tc_pll_enable_reg(display, pll));
3679 	if (!(val & PLL_ENABLE))
3680 		goto out;
3681 
3682 	/*
3683 	 * All registers read here have the same HIP_INDEX_REG even though
3684 	 * they are on different building blocks
3685 	 */
3686 	hw_state->mg_refclkin_ctl = intel_dkl_phy_read(display,
3687 						       DKL_REFCLKIN_CTL(tc_port));
3688 	hw_state->mg_refclkin_ctl &= MG_REFCLKIN_CTL_OD_2_MUX_MASK;
3689 
3690 	hw_state->mg_clktop2_hsclkctl =
3691 		intel_dkl_phy_read(display, DKL_CLKTOP2_HSCLKCTL(tc_port));
3692 	hw_state->mg_clktop2_hsclkctl &=
3693 		MG_CLKTOP2_HSCLKCTL_TLINEDRV_CLKSEL_MASK |
3694 		MG_CLKTOP2_HSCLKCTL_CORE_INPUTSEL_MASK |
3695 		MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_MASK |
3696 		MG_CLKTOP2_HSCLKCTL_DSDIV_RATIO_MASK;
3697 
3698 	hw_state->mg_clktop2_coreclkctl1 =
3699 		intel_dkl_phy_read(display, DKL_CLKTOP2_CORECLKCTL1(tc_port));
3700 	hw_state->mg_clktop2_coreclkctl1 &=
3701 		MG_CLKTOP2_CORECLKCTL1_A_DIVRATIO_MASK;
3702 
3703 	hw_state->mg_pll_div0 = intel_dkl_phy_read(display, DKL_PLL_DIV0(tc_port));
3704 	val = DKL_PLL_DIV0_MASK;
3705 	if (display->vbt.override_afc_startup)
3706 		val |= DKL_PLL_DIV0_AFC_STARTUP_MASK;
3707 	hw_state->mg_pll_div0 &= val;
3708 
3709 	hw_state->mg_pll_div1 = intel_dkl_phy_read(display, DKL_PLL_DIV1(tc_port));
3710 	hw_state->mg_pll_div1 &= (DKL_PLL_DIV1_IREF_TRIM_MASK |
3711 				  DKL_PLL_DIV1_TDC_TARGET_CNT_MASK);
3712 
3713 	hw_state->mg_pll_ssc = intel_dkl_phy_read(display, DKL_PLL_SSC(tc_port));
3714 	hw_state->mg_pll_ssc &= (DKL_PLL_SSC_IREF_NDIV_RATIO_MASK |
3715 				 DKL_PLL_SSC_STEP_LEN_MASK |
3716 				 DKL_PLL_SSC_STEP_NUM_MASK |
3717 				 DKL_PLL_SSC_EN);
3718 
3719 	hw_state->mg_pll_bias = intel_dkl_phy_read(display, DKL_PLL_BIAS(tc_port));
3720 	hw_state->mg_pll_bias &= (DKL_PLL_BIAS_FRAC_EN_H |
3721 				  DKL_PLL_BIAS_FBDIV_FRAC_MASK);
3722 
3723 	hw_state->mg_pll_tdc_coldst_bias =
3724 		intel_dkl_phy_read(display, DKL_PLL_TDC_COLDST_BIAS(tc_port));
3725 	hw_state->mg_pll_tdc_coldst_bias &= (DKL_PLL_TDC_SSC_STEP_SIZE_MASK |
3726 					     DKL_PLL_TDC_FEED_FWD_GAIN_MASK);
3727 
3728 	ret = true;
3729 out:
3730 	intel_display_power_put(display, POWER_DOMAIN_DISPLAY_CORE, wakeref);
3731 	return ret;
3732 }
3733 
3734 static bool icl_pll_get_hw_state(struct intel_display *display,
3735 				 struct intel_dpll *pll,
3736 				 struct intel_dpll_hw_state *dpll_hw_state,
3737 				 i915_reg_t enable_reg)
3738 {
3739 	struct icl_dpll_hw_state *hw_state = &dpll_hw_state->icl;
3740 	const enum intel_dpll_id id = pll->info->id;
3741 	struct ref_tracker *wakeref;
3742 	bool ret = false;
3743 	u32 val;
3744 
3745 	wakeref = intel_display_power_get_if_enabled(display,
3746 						     POWER_DOMAIN_DISPLAY_CORE);
3747 	if (!wakeref)
3748 		return false;
3749 
3750 	val = intel_de_read(display, enable_reg);
3751 	if (!(val & PLL_ENABLE))
3752 		goto out;
3753 
3754 	if (display->platform.alderlake_s) {
3755 		hw_state->cfgcr0 = intel_de_read(display, ADLS_DPLL_CFGCR0(id));
3756 		hw_state->cfgcr1 = intel_de_read(display, ADLS_DPLL_CFGCR1(id));
3757 	} else if (display->platform.dg1) {
3758 		hw_state->cfgcr0 = intel_de_read(display, DG1_DPLL_CFGCR0(id));
3759 		hw_state->cfgcr1 = intel_de_read(display, DG1_DPLL_CFGCR1(id));
3760 	} else if (display->platform.rocketlake) {
3761 		hw_state->cfgcr0 = intel_de_read(display,
3762 						 RKL_DPLL_CFGCR0(id));
3763 		hw_state->cfgcr1 = intel_de_read(display,
3764 						 RKL_DPLL_CFGCR1(id));
3765 	} else if (DISPLAY_VER(display) >= 12) {
3766 		hw_state->cfgcr0 = intel_de_read(display,
3767 						 TGL_DPLL_CFGCR0(id));
3768 		hw_state->cfgcr1 = intel_de_read(display,
3769 						 TGL_DPLL_CFGCR1(id));
3770 		if (display->vbt.override_afc_startup) {
3771 			hw_state->div0 = intel_de_read(display, TGL_DPLL0_DIV0(id));
3772 			hw_state->div0 &= TGL_DPLL0_DIV0_AFC_STARTUP_MASK;
3773 		}
3774 	} else {
3775 		if ((display->platform.jasperlake || display->platform.elkhartlake) &&
3776 		    id == DPLL_ID_EHL_DPLL4) {
3777 			hw_state->cfgcr0 = intel_de_read(display,
3778 							 ICL_DPLL_CFGCR0(4));
3779 			hw_state->cfgcr1 = intel_de_read(display,
3780 							 ICL_DPLL_CFGCR1(4));
3781 		} else {
3782 			hw_state->cfgcr0 = intel_de_read(display,
3783 							 ICL_DPLL_CFGCR0(id));
3784 			hw_state->cfgcr1 = intel_de_read(display,
3785 							 ICL_DPLL_CFGCR1(id));
3786 		}
3787 	}
3788 
3789 	ret = true;
3790 out:
3791 	intel_display_power_put(display, POWER_DOMAIN_DISPLAY_CORE, wakeref);
3792 	return ret;
3793 }
3794 
3795 static bool combo_pll_get_hw_state(struct intel_display *display,
3796 				   struct intel_dpll *pll,
3797 				   struct intel_dpll_hw_state *dpll_hw_state)
3798 {
3799 	i915_reg_t enable_reg = intel_combo_pll_enable_reg(display, pll);
3800 
3801 	return icl_pll_get_hw_state(display, pll, dpll_hw_state, enable_reg);
3802 }
3803 
3804 static bool icl_tbt_pll_get_hw_state(struct intel_display *display,
3805 				     struct intel_dpll *pll,
3806 				     struct intel_dpll_hw_state *dpll_hw_state)
3807 {
3808 	return icl_pll_get_hw_state(display, pll, dpll_hw_state, TBT_PLL_ENABLE);
3809 }
3810 
3811 static void icl_dpll_write(struct intel_display *display,
3812 			   struct intel_dpll *pll,
3813 			   const struct icl_dpll_hw_state *hw_state)
3814 {
3815 	const enum intel_dpll_id id = pll->info->id;
3816 	i915_reg_t cfgcr0_reg, cfgcr1_reg, div0_reg = INVALID_MMIO_REG;
3817 
3818 	if (display->platform.alderlake_s) {
3819 		cfgcr0_reg = ADLS_DPLL_CFGCR0(id);
3820 		cfgcr1_reg = ADLS_DPLL_CFGCR1(id);
3821 	} else if (display->platform.dg1) {
3822 		cfgcr0_reg = DG1_DPLL_CFGCR0(id);
3823 		cfgcr1_reg = DG1_DPLL_CFGCR1(id);
3824 	} else if (display->platform.rocketlake) {
3825 		cfgcr0_reg = RKL_DPLL_CFGCR0(id);
3826 		cfgcr1_reg = RKL_DPLL_CFGCR1(id);
3827 	} else if (DISPLAY_VER(display) >= 12) {
3828 		cfgcr0_reg = TGL_DPLL_CFGCR0(id);
3829 		cfgcr1_reg = TGL_DPLL_CFGCR1(id);
3830 		div0_reg = TGL_DPLL0_DIV0(id);
3831 	} else {
3832 		if ((display->platform.jasperlake || display->platform.elkhartlake) &&
3833 		    id == DPLL_ID_EHL_DPLL4) {
3834 			cfgcr0_reg = ICL_DPLL_CFGCR0(4);
3835 			cfgcr1_reg = ICL_DPLL_CFGCR1(4);
3836 		} else {
3837 			cfgcr0_reg = ICL_DPLL_CFGCR0(id);
3838 			cfgcr1_reg = ICL_DPLL_CFGCR1(id);
3839 		}
3840 	}
3841 
3842 	intel_de_write(display, cfgcr0_reg, hw_state->cfgcr0);
3843 	intel_de_write(display, cfgcr1_reg, hw_state->cfgcr1);
3844 	drm_WARN_ON_ONCE(display->drm, display->vbt.override_afc_startup &&
3845 			 !i915_mmio_reg_valid(div0_reg));
3846 	if (display->vbt.override_afc_startup &&
3847 	    i915_mmio_reg_valid(div0_reg))
3848 		intel_de_rmw(display, div0_reg,
3849 			     TGL_DPLL0_DIV0_AFC_STARTUP_MASK, hw_state->div0);
3850 	intel_de_posting_read(display, cfgcr1_reg);
3851 }
3852 
3853 static void icl_mg_pll_write(struct intel_display *display,
3854 			     struct intel_dpll *pll,
3855 			     const struct icl_dpll_hw_state *hw_state)
3856 {
3857 	enum tc_port tc_port = icl_pll_id_to_tc_port(pll->info->id);
3858 
3859 	/*
3860 	 * Some of the following registers have reserved fields, so program
3861 	 * these with RMW based on a mask. The mask can be fixed or generated
3862 	 * during the calc/readout phase if the mask depends on some other HW
3863 	 * state like refclk, see icl_calc_mg_pll_state().
3864 	 */
3865 	intel_de_rmw(display, MG_REFCLKIN_CTL(tc_port),
3866 		     MG_REFCLKIN_CTL_OD_2_MUX_MASK, hw_state->mg_refclkin_ctl);
3867 
3868 	intel_de_rmw(display, MG_CLKTOP2_CORECLKCTL1(tc_port),
3869 		     MG_CLKTOP2_CORECLKCTL1_A_DIVRATIO_MASK,
3870 		     hw_state->mg_clktop2_coreclkctl1);
3871 
3872 	intel_de_rmw(display, MG_CLKTOP2_HSCLKCTL(tc_port),
3873 		     MG_CLKTOP2_HSCLKCTL_TLINEDRV_CLKSEL_MASK |
3874 		     MG_CLKTOP2_HSCLKCTL_CORE_INPUTSEL_MASK |
3875 		     MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_MASK |
3876 		     MG_CLKTOP2_HSCLKCTL_DSDIV_RATIO_MASK,
3877 		     hw_state->mg_clktop2_hsclkctl);
3878 
3879 	intel_de_write(display, MG_PLL_DIV0(tc_port), hw_state->mg_pll_div0);
3880 	intel_de_write(display, MG_PLL_DIV1(tc_port), hw_state->mg_pll_div1);
3881 	intel_de_write(display, MG_PLL_LF(tc_port), hw_state->mg_pll_lf);
3882 	intel_de_write(display, MG_PLL_FRAC_LOCK(tc_port),
3883 		       hw_state->mg_pll_frac_lock);
3884 	intel_de_write(display, MG_PLL_SSC(tc_port), hw_state->mg_pll_ssc);
3885 
3886 	intel_de_rmw(display, MG_PLL_BIAS(tc_port),
3887 		     hw_state->mg_pll_bias_mask, hw_state->mg_pll_bias);
3888 
3889 	intel_de_rmw(display, MG_PLL_TDC_COLDST_BIAS(tc_port),
3890 		     hw_state->mg_pll_tdc_coldst_bias_mask,
3891 		     hw_state->mg_pll_tdc_coldst_bias);
3892 
3893 	intel_de_posting_read(display, MG_PLL_TDC_COLDST_BIAS(tc_port));
3894 }
3895 
3896 static void dkl_pll_write(struct intel_display *display,
3897 			  struct intel_dpll *pll,
3898 			  const struct icl_dpll_hw_state *hw_state)
3899 {
3900 	enum tc_port tc_port = icl_pll_id_to_tc_port(pll->info->id);
3901 	u32 val;
3902 
3903 	/*
3904 	 * All registers programmed here have the same HIP_INDEX_REG even
3905 	 * though on different building block
3906 	 */
3907 	/* All the registers are RMW */
3908 	val = intel_dkl_phy_read(display, DKL_REFCLKIN_CTL(tc_port));
3909 	val &= ~MG_REFCLKIN_CTL_OD_2_MUX_MASK;
3910 	val |= hw_state->mg_refclkin_ctl;
3911 	intel_dkl_phy_write(display, DKL_REFCLKIN_CTL(tc_port), val);
3912 
3913 	val = intel_dkl_phy_read(display, DKL_CLKTOP2_CORECLKCTL1(tc_port));
3914 	val &= ~MG_CLKTOP2_CORECLKCTL1_A_DIVRATIO_MASK;
3915 	val |= hw_state->mg_clktop2_coreclkctl1;
3916 	intel_dkl_phy_write(display, DKL_CLKTOP2_CORECLKCTL1(tc_port), val);
3917 
3918 	val = intel_dkl_phy_read(display, DKL_CLKTOP2_HSCLKCTL(tc_port));
3919 	val &= ~(MG_CLKTOP2_HSCLKCTL_TLINEDRV_CLKSEL_MASK |
3920 		 MG_CLKTOP2_HSCLKCTL_CORE_INPUTSEL_MASK |
3921 		 MG_CLKTOP2_HSCLKCTL_HSDIV_RATIO_MASK |
3922 		 MG_CLKTOP2_HSCLKCTL_DSDIV_RATIO_MASK);
3923 	val |= hw_state->mg_clktop2_hsclkctl;
3924 	intel_dkl_phy_write(display, DKL_CLKTOP2_HSCLKCTL(tc_port), val);
3925 
3926 	val = DKL_PLL_DIV0_MASK;
3927 	if (display->vbt.override_afc_startup)
3928 		val |= DKL_PLL_DIV0_AFC_STARTUP_MASK;
3929 	intel_dkl_phy_rmw(display, DKL_PLL_DIV0(tc_port), val,
3930 			  hw_state->mg_pll_div0);
3931 
3932 	val = intel_dkl_phy_read(display, DKL_PLL_DIV1(tc_port));
3933 	val &= ~(DKL_PLL_DIV1_IREF_TRIM_MASK |
3934 		 DKL_PLL_DIV1_TDC_TARGET_CNT_MASK);
3935 	val |= hw_state->mg_pll_div1;
3936 	intel_dkl_phy_write(display, DKL_PLL_DIV1(tc_port), val);
3937 
3938 	val = intel_dkl_phy_read(display, DKL_PLL_SSC(tc_port));
3939 	val &= ~(DKL_PLL_SSC_IREF_NDIV_RATIO_MASK |
3940 		 DKL_PLL_SSC_STEP_LEN_MASK |
3941 		 DKL_PLL_SSC_STEP_NUM_MASK |
3942 		 DKL_PLL_SSC_EN);
3943 	val |= hw_state->mg_pll_ssc;
3944 	intel_dkl_phy_write(display, DKL_PLL_SSC(tc_port), val);
3945 
3946 	val = intel_dkl_phy_read(display, DKL_PLL_BIAS(tc_port));
3947 	val &= ~(DKL_PLL_BIAS_FRAC_EN_H |
3948 		 DKL_PLL_BIAS_FBDIV_FRAC_MASK);
3949 	val |= hw_state->mg_pll_bias;
3950 	intel_dkl_phy_write(display, DKL_PLL_BIAS(tc_port), val);
3951 
3952 	val = intel_dkl_phy_read(display, DKL_PLL_TDC_COLDST_BIAS(tc_port));
3953 	val &= ~(DKL_PLL_TDC_SSC_STEP_SIZE_MASK |
3954 		 DKL_PLL_TDC_FEED_FWD_GAIN_MASK);
3955 	val |= hw_state->mg_pll_tdc_coldst_bias;
3956 	intel_dkl_phy_write(display, DKL_PLL_TDC_COLDST_BIAS(tc_port), val);
3957 
3958 	intel_dkl_phy_posting_read(display, DKL_PLL_TDC_COLDST_BIAS(tc_port));
3959 }
3960 
3961 static void icl_pll_power_enable(struct intel_display *display,
3962 				 struct intel_dpll *pll,
3963 				 i915_reg_t enable_reg)
3964 {
3965 	intel_de_rmw(display, enable_reg, 0, PLL_POWER_ENABLE);
3966 
3967 	/*
3968 	 * The spec says we need to "wait" but it also says it should be
3969 	 * immediate.
3970 	 */
3971 	if (intel_de_wait_for_set_ms(display, enable_reg, PLL_POWER_STATE, 1))
3972 		drm_err(display->drm, "PLL %d Power not enabled\n",
3973 			pll->info->id);
3974 }
3975 
3976 static void icl_pll_enable(struct intel_display *display,
3977 			   struct intel_dpll *pll,
3978 			   i915_reg_t enable_reg)
3979 {
3980 	intel_de_rmw(display, enable_reg, 0, PLL_ENABLE);
3981 
3982 	/* Timeout is actually 600us. */
3983 	if (intel_de_wait_for_set_ms(display, enable_reg, PLL_LOCK, 1))
3984 		drm_err(display->drm, "PLL %d not locked\n", pll->info->id);
3985 }
3986 
3987 static void adlp_cmtg_clock_gating_wa(struct intel_display *display, struct intel_dpll *pll)
3988 {
3989 	u32 val;
3990 
3991 	if (!(display->platform.alderlake_p && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) ||
3992 	    pll->info->id != DPLL_ID_ICL_DPLL0)
3993 		return;
3994 	/*
3995 	 * Wa_16011069516:adl-p[a0]
3996 	 *
3997 	 * All CMTG regs are unreliable until CMTG clock gating is disabled,
3998 	 * so we can only assume the default TRANS_CMTG_CHICKEN reg value and
3999 	 * sanity check this assumption with a double read, which presumably
4000 	 * returns the correct value even with clock gating on.
4001 	 *
4002 	 * Instead of the usual place for workarounds we apply this one here,
4003 	 * since TRANS_CMTG_CHICKEN is only accessible while DPLL0 is enabled.
4004 	 */
4005 	val = intel_de_read(display, TRANS_CMTG_CHICKEN);
4006 	val = intel_de_rmw(display, TRANS_CMTG_CHICKEN, ~0, DISABLE_DPT_CLK_GATING);
4007 	if (drm_WARN_ON(display->drm, val & ~DISABLE_DPT_CLK_GATING))
4008 		drm_dbg_kms(display->drm, "Unexpected flags in TRANS_CMTG_CHICKEN: %08x\n", val);
4009 }
4010 
4011 static void combo_pll_enable(struct intel_display *display,
4012 			     struct intel_dpll *pll,
4013 			     const struct intel_dpll_hw_state *dpll_hw_state)
4014 {
4015 	const struct icl_dpll_hw_state *hw_state = &dpll_hw_state->icl;
4016 	i915_reg_t enable_reg = intel_combo_pll_enable_reg(display, pll);
4017 
4018 	icl_pll_power_enable(display, pll, enable_reg);
4019 
4020 	icl_dpll_write(display, pll, hw_state);
4021 
4022 	/*
4023 	 * DVFS pre sequence would be here, but in our driver the cdclk code
4024 	 * paths should already be setting the appropriate voltage, hence we do
4025 	 * nothing here.
4026 	 */
4027 
4028 	icl_pll_enable(display, pll, enable_reg);
4029 
4030 	adlp_cmtg_clock_gating_wa(display, pll);
4031 
4032 	/* DVFS post sequence would be here. See the comment above. */
4033 }
4034 
4035 static void icl_tbt_pll_enable(struct intel_display *display,
4036 			       struct intel_dpll *pll,
4037 			       const struct intel_dpll_hw_state *dpll_hw_state)
4038 {
4039 	const struct icl_dpll_hw_state *hw_state = &dpll_hw_state->icl;
4040 
4041 	icl_pll_power_enable(display, pll, TBT_PLL_ENABLE);
4042 
4043 	icl_dpll_write(display, pll, hw_state);
4044 
4045 	/*
4046 	 * DVFS pre sequence would be here, but in our driver the cdclk code
4047 	 * paths should already be setting the appropriate voltage, hence we do
4048 	 * nothing here.
4049 	 */
4050 
4051 	icl_pll_enable(display, pll, TBT_PLL_ENABLE);
4052 
4053 	/* DVFS post sequence would be here. See the comment above. */
4054 }
4055 
4056 static void mg_pll_enable(struct intel_display *display,
4057 			  struct intel_dpll *pll,
4058 			  const struct intel_dpll_hw_state *dpll_hw_state)
4059 {
4060 	const struct icl_dpll_hw_state *hw_state = &dpll_hw_state->icl;
4061 	i915_reg_t enable_reg = intel_tc_pll_enable_reg(display, pll);
4062 
4063 	icl_pll_power_enable(display, pll, enable_reg);
4064 
4065 	if (DISPLAY_VER(display) >= 12)
4066 		dkl_pll_write(display, pll, hw_state);
4067 	else
4068 		icl_mg_pll_write(display, pll, hw_state);
4069 
4070 	/*
4071 	 * DVFS pre sequence would be here, but in our driver the cdclk code
4072 	 * paths should already be setting the appropriate voltage, hence we do
4073 	 * nothing here.
4074 	 */
4075 
4076 	icl_pll_enable(display, pll, enable_reg);
4077 
4078 	/* DVFS post sequence would be here. See the comment above. */
4079 }
4080 
4081 static void icl_pll_disable(struct intel_display *display,
4082 			    struct intel_dpll *pll,
4083 			    i915_reg_t enable_reg)
4084 {
4085 	/* The first steps are done by intel_ddi_post_disable(). */
4086 
4087 	/*
4088 	 * DVFS pre sequence would be here, but in our driver the cdclk code
4089 	 * paths should already be setting the appropriate voltage, hence we do
4090 	 * nothing here.
4091 	 */
4092 
4093 	intel_de_rmw(display, enable_reg, PLL_ENABLE, 0);
4094 
4095 	/* Timeout is actually 1us. */
4096 	if (intel_de_wait_for_clear_ms(display, enable_reg, PLL_LOCK, 1))
4097 		drm_err(display->drm, "PLL %d locked\n", pll->info->id);
4098 
4099 	/* DVFS post sequence would be here. See the comment above. */
4100 
4101 	intel_de_rmw(display, enable_reg, PLL_POWER_ENABLE, 0);
4102 
4103 	/*
4104 	 * The spec says we need to "wait" but it also says it should be
4105 	 * immediate.
4106 	 */
4107 	if (intel_de_wait_for_clear_ms(display, enable_reg, PLL_POWER_STATE, 1))
4108 		drm_err(display->drm, "PLL %d Power not disabled\n",
4109 			pll->info->id);
4110 }
4111 
4112 static void combo_pll_disable(struct intel_display *display,
4113 			      struct intel_dpll *pll)
4114 {
4115 	i915_reg_t enable_reg = intel_combo_pll_enable_reg(display, pll);
4116 
4117 	icl_pll_disable(display, pll, enable_reg);
4118 }
4119 
4120 static void icl_tbt_pll_disable(struct intel_display *display,
4121 				struct intel_dpll *pll)
4122 {
4123 	icl_pll_disable(display, pll, TBT_PLL_ENABLE);
4124 }
4125 
4126 static void mg_pll_disable(struct intel_display *display,
4127 			   struct intel_dpll *pll)
4128 {
4129 	i915_reg_t enable_reg = intel_tc_pll_enable_reg(display, pll);
4130 
4131 	icl_pll_disable(display, pll, enable_reg);
4132 }
4133 
4134 static void icl_update_dpll_ref_clks(struct intel_display *display)
4135 {
4136 	/* No SSC ref */
4137 	display->dpll.ref_clks.nssc = display->cdclk.hw.ref;
4138 }
4139 
4140 static void icl_dump_hw_state(struct drm_printer *p,
4141 			      const struct intel_dpll_hw_state *dpll_hw_state)
4142 {
4143 	const struct icl_dpll_hw_state *hw_state = &dpll_hw_state->icl;
4144 
4145 	drm_printf(p, "dpll_hw_state: cfgcr0: 0x%x, cfgcr1: 0x%x, div0: 0x%x, "
4146 		   "mg_refclkin_ctl: 0x%x, hg_clktop2_coreclkctl1: 0x%x, "
4147 		   "mg_clktop2_hsclkctl: 0x%x, mg_pll_div0: 0x%x, "
4148 		   "mg_pll_div2: 0x%x, mg_pll_lf: 0x%x, "
4149 		   "mg_pll_frac_lock: 0x%x, mg_pll_ssc: 0x%x, "
4150 		   "mg_pll_bias: 0x%x, mg_pll_tdc_coldst_bias: 0x%x\n",
4151 		   hw_state->cfgcr0, hw_state->cfgcr1, hw_state->div0,
4152 		   hw_state->mg_refclkin_ctl,
4153 		   hw_state->mg_clktop2_coreclkctl1,
4154 		   hw_state->mg_clktop2_hsclkctl,
4155 		   hw_state->mg_pll_div0,
4156 		   hw_state->mg_pll_div1,
4157 		   hw_state->mg_pll_lf,
4158 		   hw_state->mg_pll_frac_lock,
4159 		   hw_state->mg_pll_ssc,
4160 		   hw_state->mg_pll_bias,
4161 		   hw_state->mg_pll_tdc_coldst_bias);
4162 }
4163 
4164 static bool icl_compare_hw_state(const struct intel_dpll_hw_state *_a,
4165 				 const struct intel_dpll_hw_state *_b)
4166 {
4167 	const struct icl_dpll_hw_state *a = &_a->icl;
4168 	const struct icl_dpll_hw_state *b = &_b->icl;
4169 
4170 	/* FIXME split combo vs. mg more thoroughly */
4171 	return a->cfgcr0 == b->cfgcr0 &&
4172 		a->cfgcr1 == b->cfgcr1 &&
4173 		a->div0 == b->div0 &&
4174 		a->mg_refclkin_ctl == b->mg_refclkin_ctl &&
4175 		a->mg_clktop2_coreclkctl1 == b->mg_clktop2_coreclkctl1 &&
4176 		a->mg_clktop2_hsclkctl == b->mg_clktop2_hsclkctl &&
4177 		a->mg_pll_div0 == b->mg_pll_div0 &&
4178 		a->mg_pll_div1 == b->mg_pll_div1 &&
4179 		a->mg_pll_lf == b->mg_pll_lf &&
4180 		a->mg_pll_frac_lock == b->mg_pll_frac_lock &&
4181 		a->mg_pll_ssc == b->mg_pll_ssc &&
4182 		a->mg_pll_bias == b->mg_pll_bias &&
4183 		a->mg_pll_tdc_coldst_bias == b->mg_pll_tdc_coldst_bias;
4184 }
4185 
4186 static const struct intel_dpll_funcs combo_pll_funcs = {
4187 	.enable = combo_pll_enable,
4188 	.disable = combo_pll_disable,
4189 	.get_hw_state = combo_pll_get_hw_state,
4190 	.get_freq = icl_ddi_combo_pll_get_freq,
4191 };
4192 
4193 static const struct intel_dpll_funcs icl_tbt_pll_funcs = {
4194 	.enable = icl_tbt_pll_enable,
4195 	.disable = icl_tbt_pll_disable,
4196 	.get_hw_state = icl_tbt_pll_get_hw_state,
4197 	.get_freq = icl_ddi_tbt_pll_get_freq,
4198 };
4199 
4200 static const struct intel_dpll_funcs mg_pll_funcs = {
4201 	.enable = mg_pll_enable,
4202 	.disable = mg_pll_disable,
4203 	.get_hw_state = mg_pll_get_hw_state,
4204 	.get_freq = icl_ddi_mg_pll_get_freq,
4205 };
4206 
4207 static const struct dpll_info icl_plls[] = {
4208 	{ .name = "DPLL 0", .funcs = &combo_pll_funcs, .id = DPLL_ID_ICL_DPLL0, },
4209 	{ .name = "DPLL 1", .funcs = &combo_pll_funcs, .id = DPLL_ID_ICL_DPLL1, },
4210 	{ .name = "TBT PLL", .funcs = &icl_tbt_pll_funcs, .id = DPLL_ID_ICL_TBTPLL,
4211 	  .is_alt_port_dpll = true, },
4212 	{ .name = "MG PLL 1", .funcs = &mg_pll_funcs, .id = DPLL_ID_ICL_MGPLL1, },
4213 	{ .name = "MG PLL 2", .funcs = &mg_pll_funcs, .id = DPLL_ID_ICL_MGPLL2, },
4214 	{ .name = "MG PLL 3", .funcs = &mg_pll_funcs, .id = DPLL_ID_ICL_MGPLL3, },
4215 	{ .name = "MG PLL 4", .funcs = &mg_pll_funcs, .id = DPLL_ID_ICL_MGPLL4, },
4216 	{}
4217 };
4218 
4219 static const struct intel_dpll_mgr icl_pll_mgr = {
4220 	.dpll_info = icl_plls,
4221 	.compute_dplls = icl_compute_dplls,
4222 	.get_dplls = icl_get_dplls,
4223 	.put_dplls = icl_put_dplls,
4224 	.update_active_dpll = icl_update_active_dpll,
4225 	.update_ref_clks = icl_update_dpll_ref_clks,
4226 	.dump_hw_state = icl_dump_hw_state,
4227 	.compare_hw_state = icl_compare_hw_state,
4228 };
4229 
4230 static const struct dpll_info ehl_plls[] = {
4231 	{ .name = "DPLL 0", .funcs = &combo_pll_funcs, .id = DPLL_ID_ICL_DPLL0, },
4232 	{ .name = "DPLL 1", .funcs = &combo_pll_funcs, .id = DPLL_ID_ICL_DPLL1, },
4233 	{ .name = "DPLL 4", .funcs = &combo_pll_funcs, .id = DPLL_ID_EHL_DPLL4,
4234 	  .power_domain = POWER_DOMAIN_DC_OFF, },
4235 	{}
4236 };
4237 
4238 static const struct intel_dpll_mgr ehl_pll_mgr = {
4239 	.dpll_info = ehl_plls,
4240 	.compute_dplls = icl_compute_dplls,
4241 	.get_dplls = icl_get_dplls,
4242 	.put_dplls = icl_put_dplls,
4243 	.update_ref_clks = icl_update_dpll_ref_clks,
4244 	.dump_hw_state = icl_dump_hw_state,
4245 	.compare_hw_state = icl_compare_hw_state,
4246 };
4247 
4248 static const struct intel_dpll_funcs dkl_pll_funcs = {
4249 	.enable = mg_pll_enable,
4250 	.disable = mg_pll_disable,
4251 	.get_hw_state = dkl_pll_get_hw_state,
4252 	.get_freq = icl_ddi_mg_pll_get_freq,
4253 };
4254 
4255 static const struct dpll_info tgl_plls[] = {
4256 	{ .name = "DPLL 0", .funcs = &combo_pll_funcs, .id = DPLL_ID_ICL_DPLL0, },
4257 	{ .name = "DPLL 1", .funcs = &combo_pll_funcs, .id = DPLL_ID_ICL_DPLL1, },
4258 	{ .name = "TBT PLL", .funcs = &icl_tbt_pll_funcs, .id = DPLL_ID_ICL_TBTPLL,
4259 	  .is_alt_port_dpll = true, },
4260 	{ .name = "TC PLL 1", .funcs = &dkl_pll_funcs, .id = DPLL_ID_ICL_MGPLL1, },
4261 	{ .name = "TC PLL 2", .funcs = &dkl_pll_funcs, .id = DPLL_ID_ICL_MGPLL2, },
4262 	{ .name = "TC PLL 3", .funcs = &dkl_pll_funcs, .id = DPLL_ID_ICL_MGPLL3, },
4263 	{ .name = "TC PLL 4", .funcs = &dkl_pll_funcs, .id = DPLL_ID_ICL_MGPLL4, },
4264 	{ .name = "TC PLL 5", .funcs = &dkl_pll_funcs, .id = DPLL_ID_TGL_MGPLL5, },
4265 	{ .name = "TC PLL 6", .funcs = &dkl_pll_funcs, .id = DPLL_ID_TGL_MGPLL6, },
4266 	{}
4267 };
4268 
4269 static const struct intel_dpll_mgr tgl_pll_mgr = {
4270 	.dpll_info = tgl_plls,
4271 	.compute_dplls = icl_compute_dplls,
4272 	.get_dplls = icl_get_dplls,
4273 	.put_dplls = icl_put_dplls,
4274 	.update_active_dpll = icl_update_active_dpll,
4275 	.update_ref_clks = icl_update_dpll_ref_clks,
4276 	.dump_hw_state = icl_dump_hw_state,
4277 	.compare_hw_state = icl_compare_hw_state,
4278 };
4279 
4280 static const struct dpll_info rkl_plls[] = {
4281 	{ .name = "DPLL 0", .funcs = &combo_pll_funcs, .id = DPLL_ID_ICL_DPLL0, },
4282 	{ .name = "DPLL 1", .funcs = &combo_pll_funcs, .id = DPLL_ID_ICL_DPLL1, },
4283 	{ .name = "DPLL 4", .funcs = &combo_pll_funcs, .id = DPLL_ID_EHL_DPLL4, },
4284 	{}
4285 };
4286 
4287 static const struct intel_dpll_mgr rkl_pll_mgr = {
4288 	.dpll_info = rkl_plls,
4289 	.compute_dplls = icl_compute_dplls,
4290 	.get_dplls = icl_get_dplls,
4291 	.put_dplls = icl_put_dplls,
4292 	.update_ref_clks = icl_update_dpll_ref_clks,
4293 	.dump_hw_state = icl_dump_hw_state,
4294 	.compare_hw_state = icl_compare_hw_state,
4295 };
4296 
4297 static const struct dpll_info dg1_plls[] = {
4298 	{ .name = "DPLL 0", .funcs = &combo_pll_funcs, .id = DPLL_ID_DG1_DPLL0, },
4299 	{ .name = "DPLL 1", .funcs = &combo_pll_funcs, .id = DPLL_ID_DG1_DPLL1, },
4300 	{ .name = "DPLL 2", .funcs = &combo_pll_funcs, .id = DPLL_ID_DG1_DPLL2, },
4301 	{ .name = "DPLL 3", .funcs = &combo_pll_funcs, .id = DPLL_ID_DG1_DPLL3, },
4302 	{}
4303 };
4304 
4305 static const struct intel_dpll_mgr dg1_pll_mgr = {
4306 	.dpll_info = dg1_plls,
4307 	.compute_dplls = icl_compute_dplls,
4308 	.get_dplls = icl_get_dplls,
4309 	.put_dplls = icl_put_dplls,
4310 	.update_ref_clks = icl_update_dpll_ref_clks,
4311 	.dump_hw_state = icl_dump_hw_state,
4312 	.compare_hw_state = icl_compare_hw_state,
4313 };
4314 
4315 static const struct dpll_info adls_plls[] = {
4316 	{ .name = "DPLL 0", .funcs = &combo_pll_funcs, .id = DPLL_ID_ICL_DPLL0, },
4317 	{ .name = "DPLL 1", .funcs = &combo_pll_funcs, .id = DPLL_ID_ICL_DPLL1, },
4318 	{ .name = "DPLL 2", .funcs = &combo_pll_funcs, .id = DPLL_ID_DG1_DPLL2, },
4319 	{ .name = "DPLL 3", .funcs = &combo_pll_funcs, .id = DPLL_ID_DG1_DPLL3, },
4320 	{}
4321 };
4322 
4323 static const struct intel_dpll_mgr adls_pll_mgr = {
4324 	.dpll_info = adls_plls,
4325 	.compute_dplls = icl_compute_dplls,
4326 	.get_dplls = icl_get_dplls,
4327 	.put_dplls = icl_put_dplls,
4328 	.update_ref_clks = icl_update_dpll_ref_clks,
4329 	.dump_hw_state = icl_dump_hw_state,
4330 	.compare_hw_state = icl_compare_hw_state,
4331 };
4332 
4333 static const struct dpll_info adlp_plls[] = {
4334 	{ .name = "DPLL 0", .funcs = &combo_pll_funcs, .id = DPLL_ID_ICL_DPLL0, },
4335 	{ .name = "DPLL 1", .funcs = &combo_pll_funcs, .id = DPLL_ID_ICL_DPLL1, },
4336 	{ .name = "TBT PLL", .funcs = &icl_tbt_pll_funcs, .id = DPLL_ID_ICL_TBTPLL,
4337 	  .is_alt_port_dpll = true, },
4338 	{ .name = "TC PLL 1", .funcs = &dkl_pll_funcs, .id = DPLL_ID_ICL_MGPLL1, },
4339 	{ .name = "TC PLL 2", .funcs = &dkl_pll_funcs, .id = DPLL_ID_ICL_MGPLL2, },
4340 	{ .name = "TC PLL 3", .funcs = &dkl_pll_funcs, .id = DPLL_ID_ICL_MGPLL3, },
4341 	{ .name = "TC PLL 4", .funcs = &dkl_pll_funcs, .id = DPLL_ID_ICL_MGPLL4, },
4342 	{}
4343 };
4344 
4345 static const struct intel_dpll_mgr adlp_pll_mgr = {
4346 	.dpll_info = adlp_plls,
4347 	.compute_dplls = icl_compute_dplls,
4348 	.get_dplls = icl_get_dplls,
4349 	.put_dplls = icl_put_dplls,
4350 	.update_active_dpll = icl_update_active_dpll,
4351 	.update_ref_clks = icl_update_dpll_ref_clks,
4352 	.dump_hw_state = icl_dump_hw_state,
4353 	.compare_hw_state = icl_compare_hw_state,
4354 };
4355 
4356 static struct intel_encoder *get_intel_encoder(struct intel_display *display,
4357 					       const struct intel_dpll *pll)
4358 {
4359 	struct intel_encoder *encoder;
4360 	enum intel_dpll_id mtl_id;
4361 
4362 	for_each_intel_encoder(display->drm, encoder) {
4363 		mtl_id = mtl_port_to_pll_id(display, encoder->port);
4364 
4365 		if (mtl_id == pll->info->id)
4366 			return encoder;
4367 	}
4368 
4369 	return NULL;
4370 }
4371 
4372 static bool mtl_pll_get_hw_state(struct intel_display *display,
4373 				 struct intel_dpll *pll,
4374 				 struct intel_dpll_hw_state *dpll_hw_state)
4375 {
4376 	struct intel_encoder *encoder = get_intel_encoder(display, pll);
4377 
4378 	if (!encoder)
4379 		return false;
4380 
4381 	return intel_cx0pll_readout_hw_state(encoder, &dpll_hw_state->cx0pll);
4382 }
4383 
4384 static int mtl_pll_get_freq(struct intel_display *display,
4385 			    const struct intel_dpll *pll,
4386 			    const struct intel_dpll_hw_state *dpll_hw_state)
4387 {
4388 	struct intel_encoder *encoder = get_intel_encoder(display, pll);
4389 
4390 	if (drm_WARN_ON(display->drm, !encoder))
4391 		return -EINVAL;
4392 
4393 	return intel_cx0pll_calc_port_clock(encoder, &dpll_hw_state->cx0pll);
4394 }
4395 
4396 static void mtl_pll_enable(struct intel_display *display,
4397 			   struct intel_dpll *pll,
4398 			   const struct intel_dpll_hw_state *dpll_hw_state)
4399 {
4400 	struct intel_encoder *encoder = get_intel_encoder(display, pll);
4401 
4402 	if (drm_WARN_ON(display->drm, !encoder))
4403 		return;
4404 
4405 	intel_mtl_pll_enable(encoder, pll, dpll_hw_state);
4406 }
4407 
4408 static void mtl_pll_disable(struct intel_display *display,
4409 			    struct intel_dpll *pll)
4410 {
4411 	struct intel_encoder *encoder = get_intel_encoder(display, pll);
4412 
4413 	if (drm_WARN_ON(display->drm, !encoder))
4414 		return;
4415 
4416 	intel_mtl_pll_disable(encoder);
4417 }
4418 
4419 static const struct intel_dpll_funcs mtl_pll_funcs = {
4420 	.enable = mtl_pll_enable,
4421 	.disable = mtl_pll_disable,
4422 	.get_hw_state = mtl_pll_get_hw_state,
4423 	.get_freq = mtl_pll_get_freq,
4424 };
4425 
4426 static void mtl_tbt_pll_enable(struct intel_display *display,
4427 			       struct intel_dpll *pll,
4428 			       const struct intel_dpll_hw_state *hw_state)
4429 {
4430 }
4431 
4432 static void mtl_tbt_pll_disable(struct intel_display *display,
4433 				struct intel_dpll *pll)
4434 {
4435 }
4436 
4437 static int mtl_tbt_pll_get_freq(struct intel_display *display,
4438 				const struct intel_dpll *pll,
4439 				const struct intel_dpll_hw_state *dpll_hw_state)
4440 {
4441 	/*
4442 	 * The PLL outputs multiple frequencies at the same time, selection is
4443 	 * made at DDI clock mux level.
4444 	 */
4445 	drm_WARN_ON(display->drm, 1);
4446 
4447 	return 0;
4448 }
4449 
4450 static const struct intel_dpll_funcs mtl_tbt_pll_funcs = {
4451 	.enable = mtl_tbt_pll_enable,
4452 	.disable = mtl_tbt_pll_disable,
4453 	.get_hw_state = intel_mtl_tbt_pll_readout_hw_state,
4454 	.get_freq = mtl_tbt_pll_get_freq,
4455 };
4456 
4457 static const struct dpll_info mtl_plls[] = {
4458 	{ .name = "DPLL 0", .funcs = &mtl_pll_funcs, .id = DPLL_ID_ICL_DPLL0, },
4459 	{ .name = "DPLL 1", .funcs = &mtl_pll_funcs, .id = DPLL_ID_ICL_DPLL1, },
4460 	{ .name = "TBT PLL", .funcs = &mtl_tbt_pll_funcs, .id = DPLL_ID_ICL_TBTPLL,
4461 	  .is_alt_port_dpll = true, .always_on = true },
4462 	{ .name = "TC PLL 1", .funcs = &mtl_pll_funcs, .id = DPLL_ID_ICL_MGPLL1, },
4463 	{ .name = "TC PLL 2", .funcs = &mtl_pll_funcs, .id = DPLL_ID_ICL_MGPLL2, },
4464 	{ .name = "TC PLL 3", .funcs = &mtl_pll_funcs, .id = DPLL_ID_ICL_MGPLL3, },
4465 	{ .name = "TC PLL 4", .funcs = &mtl_pll_funcs, .id = DPLL_ID_ICL_MGPLL4, },
4466 	{}
4467 };
4468 
4469 /*
4470  * Compute the state for either a C10 PHY PLL, or in the case of the PTL port B,
4471  * eDP on TypeC PHY case for a C20 PHY PLL.
4472  */
4473 static int mtl_compute_non_tc_phy_dpll(struct intel_atomic_state *state,
4474 				       struct intel_crtc *crtc,
4475 				       struct intel_encoder *encoder)
4476 {
4477 	struct intel_crtc_state *crtc_state =
4478 		intel_atomic_get_new_crtc_state(state, crtc);
4479 	struct icl_port_dpll *port_dpll =
4480 		&crtc_state->icl_port_dplls[ICL_PORT_DPLL_DEFAULT];
4481 	int ret;
4482 
4483 	ret = intel_cx0pll_calc_state(crtc_state, encoder, &port_dpll->hw_state);
4484 	if (ret)
4485 		return ret;
4486 
4487 	/* this is mainly for the fastset check */
4488 	icl_set_active_port_dpll(crtc_state, ICL_PORT_DPLL_DEFAULT);
4489 
4490 	crtc_state->port_clock = intel_cx0pll_calc_port_clock(encoder,
4491 							      &port_dpll->hw_state.cx0pll);
4492 
4493 	return 0;
4494 }
4495 
4496 static int mtl_compute_tc_phy_dplls(struct intel_atomic_state *state,
4497 				    struct intel_crtc *crtc,
4498 				    struct intel_encoder *encoder)
4499 {
4500 	struct intel_crtc_state *crtc_state =
4501 		intel_atomic_get_new_crtc_state(state, crtc);
4502 	const struct intel_crtc_state *old_crtc_state =
4503 		intel_atomic_get_old_crtc_state(state, crtc);
4504 	struct icl_port_dpll *port_dpll;
4505 	int ret;
4506 
4507 	port_dpll = &crtc_state->icl_port_dplls[ICL_PORT_DPLL_DEFAULT];
4508 	intel_mtl_tbt_pll_calc_state(&port_dpll->hw_state);
4509 
4510 	port_dpll = &crtc_state->icl_port_dplls[ICL_PORT_DPLL_MG_PHY];
4511 	ret = intel_cx0pll_calc_state(crtc_state, encoder, &port_dpll->hw_state);
4512 	if (ret)
4513 		return ret;
4514 
4515 	/* this is mainly for the fastset check */
4516 	if (old_crtc_state->intel_dpll &&
4517 	    old_crtc_state->intel_dpll->info->id == DPLL_ID_ICL_TBTPLL)
4518 		icl_set_active_port_dpll(crtc_state, ICL_PORT_DPLL_DEFAULT);
4519 	else
4520 		icl_set_active_port_dpll(crtc_state, ICL_PORT_DPLL_MG_PHY);
4521 
4522 	crtc_state->port_clock = intel_cx0pll_calc_port_clock(encoder,
4523 							      &port_dpll->hw_state.cx0pll);
4524 
4525 	return 0;
4526 }
4527 
4528 static int mtl_compute_dplls(struct intel_atomic_state *state,
4529 			     struct intel_crtc *crtc,
4530 			     struct intel_encoder *encoder)
4531 {
4532 	if (intel_encoder_is_tc(encoder))
4533 		return mtl_compute_tc_phy_dplls(state, crtc, encoder);
4534 	else
4535 		return mtl_compute_non_tc_phy_dpll(state, crtc, encoder);
4536 }
4537 
4538 static int mtl_get_dplls(struct intel_atomic_state *state,
4539 			 struct intel_crtc *crtc,
4540 			 struct intel_encoder *encoder)
4541 {
4542 	if (intel_encoder_is_tc(encoder))
4543 		return icl_get_tc_phy_dplls(state, crtc, encoder);
4544 	else
4545 		return mtl_get_non_tc_phy_dpll(state, crtc, encoder);
4546 }
4547 
4548 static void mtl_dump_hw_state(struct drm_printer *p,
4549 			      const struct intel_dpll_hw_state *dpll_hw_state)
4550 {
4551 	intel_cx0pll_dump_hw_state(p, &dpll_hw_state->cx0pll);
4552 }
4553 
4554 static bool mtl_compare_hw_state(const struct intel_dpll_hw_state *_a,
4555 				 const struct intel_dpll_hw_state *_b)
4556 {
4557 	const struct intel_cx0pll_state *a = &_a->cx0pll;
4558 	const struct intel_cx0pll_state *b = &_b->cx0pll;
4559 
4560 	return intel_cx0pll_compare_hw_state(a, b);
4561 }
4562 
4563 static const struct intel_dpll_mgr mtl_pll_mgr = {
4564 	.dpll_info = mtl_plls,
4565 	.compute_dplls = mtl_compute_dplls,
4566 	.get_dplls = mtl_get_dplls,
4567 	.put_dplls = icl_put_dplls,
4568 	.update_active_dpll = icl_update_active_dpll,
4569 	.update_ref_clks = icl_update_dpll_ref_clks,
4570 	.dump_hw_state = mtl_dump_hw_state,
4571 	.compare_hw_state = mtl_compare_hw_state,
4572 };
4573 
4574 static bool xe3plpd_pll_get_hw_state(struct intel_display *display,
4575 				     struct intel_dpll *pll,
4576 				     struct intel_dpll_hw_state *dpll_hw_state)
4577 {
4578 	struct intel_encoder *encoder = get_intel_encoder(display, pll);
4579 
4580 	if (!encoder)
4581 		return false;
4582 
4583 	return intel_lt_phy_pll_readout_hw_state(encoder, &dpll_hw_state->ltpll);
4584 }
4585 
4586 static int xe3plpd_pll_get_freq(struct intel_display *display,
4587 				const struct intel_dpll *pll,
4588 				const struct intel_dpll_hw_state *dpll_hw_state)
4589 {
4590 	struct intel_encoder *encoder = get_intel_encoder(display, pll);
4591 
4592 	if (drm_WARN_ON(display->drm, !encoder))
4593 		return -EINVAL;
4594 
4595 	return intel_lt_phy_calc_port_clock(display, &dpll_hw_state->ltpll);
4596 }
4597 
4598 static void xe3plpd_pll_enable(struct intel_display *display,
4599 			       struct intel_dpll *pll,
4600 			       const struct intel_dpll_hw_state *dpll_hw_state)
4601 {
4602 	struct intel_encoder *encoder = get_intel_encoder(display, pll);
4603 
4604 	if (drm_WARN_ON(display->drm, !encoder))
4605 		return;
4606 
4607 	intel_xe3plpd_pll_enable(encoder, pll, dpll_hw_state);
4608 }
4609 
4610 static void xe3plpd_pll_disable(struct intel_display *display,
4611 				struct intel_dpll *pll)
4612 {
4613 	struct intel_encoder *encoder = get_intel_encoder(display, pll);
4614 
4615 	if (drm_WARN_ON(display->drm, !encoder))
4616 		return;
4617 
4618 	intel_xe3plpd_pll_disable(encoder);
4619 }
4620 
4621 static const struct intel_dpll_funcs xe3plpd_tbt_pll_funcs = {
4622 	.enable = mtl_tbt_pll_enable,
4623 	.disable = mtl_tbt_pll_disable,
4624 	.get_hw_state = intel_lt_phy_tbt_pll_readout_hw_state,
4625 	.get_freq = mtl_tbt_pll_get_freq,
4626 };
4627 
4628 static const struct intel_dpll_funcs xe3plpd_pll_funcs = {
4629 	.enable = xe3plpd_pll_enable,
4630 	.disable = xe3plpd_pll_disable,
4631 	.get_hw_state = xe3plpd_pll_get_hw_state,
4632 	.get_freq = xe3plpd_pll_get_freq,
4633 };
4634 
4635 static const struct dpll_info xe3plpd_plls[] = {
4636 	{ .name = "DPLL 0", .funcs = &xe3plpd_pll_funcs, .id = DPLL_ID_ICL_DPLL0, },
4637 	{ .name = "DPLL 1", .funcs = &xe3plpd_pll_funcs, .id = DPLL_ID_ICL_DPLL1, },
4638 	{ .name = "TBT PLL", .funcs = &xe3plpd_tbt_pll_funcs, .id = DPLL_ID_ICL_TBTPLL,
4639 	  .is_alt_port_dpll = true, .always_on = true },
4640 	{ .name = "TC PLL 1", .funcs = &xe3plpd_pll_funcs, .id = DPLL_ID_ICL_MGPLL1, },
4641 	{ .name = "TC PLL 2", .funcs = &xe3plpd_pll_funcs, .id = DPLL_ID_ICL_MGPLL2, },
4642 	{ .name = "TC PLL 3", .funcs = &xe3plpd_pll_funcs, .id = DPLL_ID_ICL_MGPLL3, },
4643 	{ .name = "TC PLL 4", .funcs = &xe3plpd_pll_funcs, .id = DPLL_ID_ICL_MGPLL4, },
4644 	{}
4645 };
4646 
4647 static int xe3plpd_compute_non_tc_phy_dpll(struct intel_atomic_state *state,
4648 					   struct intel_crtc *crtc,
4649 					   struct intel_encoder *encoder)
4650 {
4651 	struct intel_display *display = to_intel_display(encoder);
4652 	struct intel_crtc_state *crtc_state =
4653 		intel_atomic_get_new_crtc_state(state, crtc);
4654 	struct icl_port_dpll *port_dpll =
4655 		&crtc_state->icl_port_dplls[ICL_PORT_DPLL_DEFAULT];
4656 	int ret;
4657 
4658 	ret = intel_lt_phy_pll_calc_state(crtc_state, encoder, &port_dpll->hw_state);
4659 	if (ret)
4660 		return ret;
4661 
4662 	/* this is mainly for the fastset check */
4663 	icl_set_active_port_dpll(crtc_state, ICL_PORT_DPLL_DEFAULT);
4664 
4665 	crtc_state->port_clock = intel_lt_phy_calc_port_clock(display, &port_dpll->hw_state.ltpll);
4666 
4667 	return 0;
4668 }
4669 
4670 static int xe3plpd_compute_tc_phy_dplls(struct intel_atomic_state *state,
4671 					struct intel_crtc *crtc,
4672 					struct intel_encoder *encoder)
4673 {
4674 	struct intel_display *display = to_intel_display(encoder);
4675 	struct intel_crtc_state *crtc_state =
4676 		intel_atomic_get_new_crtc_state(state, crtc);
4677 	const struct intel_crtc_state *old_crtc_state =
4678 		intel_atomic_get_old_crtc_state(state, crtc);
4679 	struct icl_port_dpll *port_dpll;
4680 	int ret;
4681 
4682 	port_dpll = &crtc_state->icl_port_dplls[ICL_PORT_DPLL_DEFAULT];
4683 	intel_lt_phy_tbt_pll_calc_state(&port_dpll->hw_state);
4684 
4685 	port_dpll = &crtc_state->icl_port_dplls[ICL_PORT_DPLL_MG_PHY];
4686 	ret = intel_lt_phy_pll_calc_state(crtc_state, encoder, &port_dpll->hw_state);
4687 	if (ret)
4688 		return ret;
4689 
4690 	/* this is mainly for the fastset check */
4691 	if (old_crtc_state->intel_dpll &&
4692 	    old_crtc_state->intel_dpll->info->id == DPLL_ID_ICL_TBTPLL)
4693 		icl_set_active_port_dpll(crtc_state, ICL_PORT_DPLL_DEFAULT);
4694 	else
4695 		icl_set_active_port_dpll(crtc_state, ICL_PORT_DPLL_MG_PHY);
4696 
4697 	crtc_state->port_clock = intel_lt_phy_calc_port_clock(display, &port_dpll->hw_state.ltpll);
4698 
4699 	return 0;
4700 }
4701 
4702 static int xe3plpd_compute_dplls(struct intel_atomic_state *state,
4703 				 struct intel_crtc *crtc,
4704 				 struct intel_encoder *encoder)
4705 {
4706 	if (intel_encoder_is_tc(encoder))
4707 		return xe3plpd_compute_tc_phy_dplls(state, crtc, encoder);
4708 	else
4709 		return xe3plpd_compute_non_tc_phy_dpll(state, crtc, encoder);
4710 }
4711 
4712 static void xe3plpd_dump_hw_state(struct drm_printer *p,
4713 				  const struct intel_dpll_hw_state *dpll_hw_state)
4714 {
4715 	intel_lt_phy_dump_hw_state(p, &dpll_hw_state->ltpll);
4716 }
4717 
4718 static bool xe3plpd_compare_hw_state(const struct intel_dpll_hw_state *_a,
4719 				     const struct intel_dpll_hw_state *_b)
4720 {
4721 	const struct intel_lt_phy_pll_state *a = &_a->ltpll;
4722 	const struct intel_lt_phy_pll_state *b = &_b->ltpll;
4723 
4724 	return intel_lt_phy_pll_compare_hw_state(a, b);
4725 }
4726 
4727 static const struct intel_dpll_mgr xe3plpd_pll_mgr = {
4728 	.dpll_info = xe3plpd_plls,
4729 	.compute_dplls = xe3plpd_compute_dplls,
4730 	.get_dplls = mtl_get_dplls,
4731 	.put_dplls = icl_put_dplls,
4732 	.update_active_dpll = icl_update_active_dpll,
4733 	.update_ref_clks = icl_update_dpll_ref_clks,
4734 	.dump_hw_state = xe3plpd_dump_hw_state,
4735 	.compare_hw_state = xe3plpd_compare_hw_state,
4736 };
4737 
4738 /**
4739  * intel_dpll_init - Initialize DPLLs
4740  * @display: intel_display device
4741  *
4742  * Initialize DPLLs for @display.
4743  */
4744 void intel_dpll_init(struct intel_display *display)
4745 {
4746 	const struct intel_dpll_mgr *dpll_mgr = NULL;
4747 	const struct dpll_info *dpll_info;
4748 	int i;
4749 
4750 	mutex_init(&display->dpll.lock);
4751 
4752 	if (display->platform.dg2)
4753 		/* No shared DPLLs on DG2; port PLLs are part of the PHY */
4754 		dpll_mgr = NULL;
4755 	else if (DISPLAY_VER(display) >= 35)
4756 		dpll_mgr = &xe3plpd_pll_mgr;
4757 	else if (DISPLAY_VER(display) >= 14)
4758 		dpll_mgr = &mtl_pll_mgr;
4759 	else if (display->platform.alderlake_p)
4760 		dpll_mgr = &adlp_pll_mgr;
4761 	else if (display->platform.alderlake_s)
4762 		dpll_mgr = &adls_pll_mgr;
4763 	else if (display->platform.dg1)
4764 		dpll_mgr = &dg1_pll_mgr;
4765 	else if (display->platform.rocketlake)
4766 		dpll_mgr = &rkl_pll_mgr;
4767 	else if (DISPLAY_VER(display) >= 12)
4768 		dpll_mgr = &tgl_pll_mgr;
4769 	else if (display->platform.jasperlake || display->platform.elkhartlake)
4770 		dpll_mgr = &ehl_pll_mgr;
4771 	else if (DISPLAY_VER(display) >= 11)
4772 		dpll_mgr = &icl_pll_mgr;
4773 	else if (display->platform.geminilake || display->platform.broxton)
4774 		dpll_mgr = &bxt_pll_mgr;
4775 	else if (DISPLAY_VER(display) == 9)
4776 		dpll_mgr = &skl_pll_mgr;
4777 	else if (HAS_DDI(display))
4778 		dpll_mgr = &hsw_pll_mgr;
4779 	else if (HAS_PCH_IBX(display) || HAS_PCH_CPT(display))
4780 		dpll_mgr = &pch_pll_mgr;
4781 
4782 	if (!dpll_mgr)
4783 		goto out_verify;
4784 
4785 	dpll_info = dpll_mgr->dpll_info;
4786 
4787 	for (i = 0; dpll_info[i].name; i++) {
4788 		if (drm_WARN_ON(display->drm,
4789 				i >= ARRAY_SIZE(display->dpll.dplls)))
4790 			break;
4791 
4792 		/* must fit into unsigned long bitmask on 32bit */
4793 		if (drm_WARN_ON(display->drm, dpll_info[i].id >= 32))
4794 			break;
4795 
4796 		display->dpll.dplls[i].info = &dpll_info[i];
4797 		display->dpll.dplls[i].index = i;
4798 	}
4799 
4800 	display->dpll.mgr = dpll_mgr;
4801 	display->dpll.num_dpll = i;
4802 
4803 out_verify:
4804 	/*
4805 	 * TODO: Convert these to a KUnit test or dependent on a kconfig
4806 	 * debug option.
4807 	 */
4808 	intel_cx0pll_verify_plls(display);
4809 	intel_lt_phy_verify_plls(display);
4810 }
4811 
4812 /**
4813  * intel_dpll_compute - compute DPLL state CRTC and encoder combination
4814  * @state: atomic state
4815  * @crtc: CRTC to compute DPLLs for
4816  * @encoder: encoder
4817  *
4818  * This function computes the DPLL state for the given CRTC and encoder.
4819  *
4820  * The new configuration in the atomic commit @state is made effective by
4821  * calling intel_dpll_swap_state().
4822  *
4823  * Returns:
4824  * 0 on success, negative error code on failure.
4825  */
4826 int intel_dpll_compute(struct intel_atomic_state *state,
4827 		       struct intel_crtc *crtc,
4828 		       struct intel_encoder *encoder)
4829 {
4830 	struct intel_display *display = to_intel_display(state);
4831 	const struct intel_dpll_mgr *dpll_mgr = display->dpll.mgr;
4832 
4833 	if (drm_WARN_ON(display->drm, !dpll_mgr))
4834 		return -EINVAL;
4835 
4836 	return dpll_mgr->compute_dplls(state, crtc, encoder);
4837 }
4838 
4839 /**
4840  * intel_dpll_reserve - reserve DPLLs for CRTC and encoder combination
4841  * @state: atomic state
4842  * @crtc: CRTC to reserve DPLLs for
4843  * @encoder: encoder
4844  *
4845  * This function reserves all required DPLLs for the given CRTC and encoder
4846  * combination in the current atomic commit @state and the new @crtc atomic
4847  * state.
4848  *
4849  * The new configuration in the atomic commit @state is made effective by
4850  * calling intel_dpll_swap_state().
4851  *
4852  * The reserved DPLLs should be released by calling
4853  * intel_dpll_release().
4854  *
4855  * Returns:
4856  * 0 if all required DPLLs were successfully reserved,
4857  * negative error code otherwise.
4858  */
4859 int intel_dpll_reserve(struct intel_atomic_state *state,
4860 		       struct intel_crtc *crtc,
4861 		       struct intel_encoder *encoder)
4862 {
4863 	struct intel_display *display = to_intel_display(state);
4864 	const struct intel_dpll_mgr *dpll_mgr = display->dpll.mgr;
4865 
4866 	if (drm_WARN_ON(display->drm, !dpll_mgr))
4867 		return -EINVAL;
4868 
4869 	return dpll_mgr->get_dplls(state, crtc, encoder);
4870 }
4871 
4872 /**
4873  * intel_dpll_release - end use of DPLLs by CRTC in atomic state
4874  * @state: atomic state
4875  * @crtc: crtc from which the DPLLs are to be released
4876  *
4877  * This function releases all DPLLs reserved by intel_dpll_reserve()
4878  * from the current atomic commit @state and the old @crtc atomic state.
4879  *
4880  * The new configuration in the atomic commit @state is made effective by
4881  * calling intel_dpll_swap_state().
4882  */
4883 void intel_dpll_release(struct intel_atomic_state *state,
4884 			struct intel_crtc *crtc)
4885 {
4886 	struct intel_display *display = to_intel_display(state);
4887 	const struct intel_dpll_mgr *dpll_mgr = display->dpll.mgr;
4888 
4889 	/*
4890 	 * FIXME: this function is called for every platform having a
4891 	 * compute_clock hook, even though the platform doesn't yet support
4892 	 * the DPLL framework and intel_dpll_reserve() is not
4893 	 * called on those.
4894 	 */
4895 	if (!dpll_mgr)
4896 		return;
4897 
4898 	dpll_mgr->put_dplls(state, crtc);
4899 }
4900 
4901 /**
4902  * intel_dpll_update_active - update the active DPLL for a CRTC/encoder
4903  * @state: atomic state
4904  * @crtc: the CRTC for which to update the active DPLL
4905  * @encoder: encoder determining the type of port DPLL
4906  *
4907  * Update the active DPLL for the given @crtc/@encoder in @crtc's atomic state,
4908  * from the port DPLLs reserved previously by intel_dpll_reserve(). The
4909  * DPLL selected will be based on the current mode of the encoder's port.
4910  */
4911 void intel_dpll_update_active(struct intel_atomic_state *state,
4912 			      struct intel_crtc *crtc,
4913 			      struct intel_encoder *encoder)
4914 {
4915 	struct intel_display *display = to_intel_display(encoder);
4916 	const struct intel_dpll_mgr *dpll_mgr = display->dpll.mgr;
4917 
4918 	if (drm_WARN_ON(display->drm, !dpll_mgr))
4919 		return;
4920 
4921 	dpll_mgr->update_active_dpll(state, crtc, encoder);
4922 }
4923 
4924 /**
4925  * intel_dpll_get_freq - calculate the DPLL's output frequency
4926  * @display: intel_display device
4927  * @pll: DPLL for which to calculate the output frequency
4928  * @dpll_hw_state: DPLL state from which to calculate the output frequency
4929  *
4930  * Return the output frequency corresponding to @pll's passed in @dpll_hw_state.
4931  */
4932 int intel_dpll_get_freq(struct intel_display *display,
4933 			const struct intel_dpll *pll,
4934 			const struct intel_dpll_hw_state *dpll_hw_state)
4935 {
4936 	if (drm_WARN_ON(display->drm, !pll->info->funcs->get_freq))
4937 		return 0;
4938 
4939 	return pll->info->funcs->get_freq(display, pll, dpll_hw_state);
4940 }
4941 
4942 /**
4943  * intel_dpll_get_hw_state - readout the DPLL's hardware state
4944  * @display: intel_display device instance
4945  * @pll: DPLL for which to calculate the output frequency
4946  * @dpll_hw_state: DPLL's hardware state
4947  *
4948  * Read out @pll's hardware state into @dpll_hw_state.
4949  */
4950 bool intel_dpll_get_hw_state(struct intel_display *display,
4951 			     struct intel_dpll *pll,
4952 			     struct intel_dpll_hw_state *dpll_hw_state)
4953 {
4954 	return pll->info->funcs->get_hw_state(display, pll, dpll_hw_state);
4955 }
4956 
4957 static void readout_dpll_hw_state(struct intel_display *display,
4958 				  struct intel_dpll *pll)
4959 {
4960 	struct intel_crtc *crtc;
4961 
4962 	pll->on = intel_dpll_get_hw_state(display, pll, &pll->state.hw_state);
4963 
4964 	if (pll->on && pll->info->power_domain)
4965 		pll->wakeref = intel_display_power_get(display, pll->info->power_domain);
4966 
4967 	pll->state.pipe_mask = 0;
4968 	for_each_intel_crtc(display->drm, crtc) {
4969 		struct intel_crtc_state *crtc_state =
4970 			to_intel_crtc_state(crtc->base.state);
4971 
4972 		if (crtc_state->hw.active && crtc_state->intel_dpll == pll)
4973 			intel_dpll_crtc_get(crtc, pll, &pll->state);
4974 	}
4975 	pll->active_mask = pll->state.pipe_mask;
4976 
4977 	drm_dbg_kms(display->drm,
4978 		    "%s hw state readout: pipe_mask 0x%x, on %i\n",
4979 		    pll->info->name, pll->state.pipe_mask, pll->on);
4980 }
4981 
4982 void intel_dpll_update_ref_clks(struct intel_display *display)
4983 {
4984 	if (display->dpll.mgr && display->dpll.mgr->update_ref_clks)
4985 		display->dpll.mgr->update_ref_clks(display);
4986 }
4987 
4988 void intel_dpll_readout_hw_state(struct intel_display *display)
4989 {
4990 	struct intel_dpll *pll;
4991 	int i;
4992 
4993 	for_each_dpll(display, pll, i)
4994 		readout_dpll_hw_state(display, pll);
4995 }
4996 
4997 static void sanitize_dpll_state(struct intel_display *display,
4998 				struct intel_dpll *pll)
4999 {
5000 	if (!pll->on)
5001 		return;
5002 
5003 	adlp_cmtg_clock_gating_wa(display, pll);
5004 
5005 	if (pll->active_mask)
5006 		return;
5007 
5008 	drm_dbg_kms(display->drm,
5009 		    "%s enabled but not in use, disabling\n",
5010 		    pll->info->name);
5011 
5012 	_intel_disable_shared_dpll(display, pll);
5013 }
5014 
5015 void intel_dpll_sanitize_state(struct intel_display *display)
5016 {
5017 	struct intel_dpll *pll;
5018 	int i;
5019 
5020 	intel_cx0_pll_power_save_wa(display);
5021 
5022 	for_each_dpll(display, pll, i)
5023 		sanitize_dpll_state(display, pll);
5024 }
5025 
5026 /**
5027  * intel_dpll_dump_hw_state - dump hw_state
5028  * @display: intel_display structure
5029  * @p: where to print the state to
5030  * @dpll_hw_state: hw state to be dumped
5031  *
5032  * Dumo out the relevant values in @dpll_hw_state.
5033  */
5034 void intel_dpll_dump_hw_state(struct intel_display *display,
5035 			      struct drm_printer *p,
5036 			      const struct intel_dpll_hw_state *dpll_hw_state)
5037 {
5038 	if (display->dpll.mgr) {
5039 		display->dpll.mgr->dump_hw_state(p, dpll_hw_state);
5040 	} else {
5041 		/* fallback for platforms that don't use the shared dpll
5042 		 * infrastructure
5043 		 */
5044 		ibx_dump_hw_state(p, dpll_hw_state);
5045 	}
5046 }
5047 
5048 /**
5049  * intel_dpll_compare_hw_state - compare the two states
5050  * @display: intel_display structure
5051  * @a: first DPLL hw state
5052  * @b: second DPLL hw state
5053  *
5054  * Compare DPLL hw states @a and @b.
5055  *
5056  * Returns: true if the states are equal, false if the differ
5057  */
5058 bool intel_dpll_compare_hw_state(struct intel_display *display,
5059 				 const struct intel_dpll_hw_state *a,
5060 				 const struct intel_dpll_hw_state *b)
5061 {
5062 	if (display->dpll.mgr) {
5063 		return display->dpll.mgr->compare_hw_state(a, b);
5064 	} else {
5065 		/* fallback for platforms that don't use the shared dpll
5066 		 * infrastructure
5067 		 */
5068 		return ibx_compare_hw_state(a, b);
5069 	}
5070 }
5071 
5072 static void
5073 verify_single_dpll_state(struct intel_display *display,
5074 			 struct intel_dpll *pll,
5075 			 struct intel_crtc *crtc,
5076 			 const struct intel_crtc_state *new_crtc_state)
5077 {
5078 	struct intel_dpll_hw_state dpll_hw_state = {};
5079 	bool pll_mismatch = false;
5080 	u8 pipe_mask;
5081 	bool active;
5082 
5083 	active = intel_dpll_get_hw_state(display, pll, &dpll_hw_state);
5084 
5085 	if (!pll->info->always_on) {
5086 		INTEL_DISPLAY_STATE_WARN(display, !pll->on && pll->active_mask,
5087 					 "%s: pll in active use but not on in sw tracking\n",
5088 					 pll->info->name);
5089 		INTEL_DISPLAY_STATE_WARN(display, pll->on && !pll->active_mask,
5090 					 "%s: pll is on but not used by any active pipe\n",
5091 					 pll->info->name);
5092 		INTEL_DISPLAY_STATE_WARN(display, pll->on != active,
5093 					 "%s: pll on state mismatch (expected %i, found %i)\n",
5094 					 pll->info->name, pll->on, active);
5095 	}
5096 
5097 	if (!crtc) {
5098 		INTEL_DISPLAY_STATE_WARN(display,
5099 					 pll->active_mask & ~pll->state.pipe_mask,
5100 					 "%s: more active pll users than references: 0x%x vs 0x%x\n",
5101 					 pll->info->name, pll->active_mask, pll->state.pipe_mask);
5102 
5103 		return;
5104 	}
5105 
5106 	pipe_mask = BIT(crtc->pipe);
5107 
5108 	if (new_crtc_state->hw.active)
5109 		INTEL_DISPLAY_STATE_WARN(display, !(pll->active_mask & pipe_mask),
5110 					 "%s: pll active mismatch (expected pipe %c in active mask 0x%x)\n",
5111 					 pll->info->name, pipe_name(crtc->pipe), pll->active_mask);
5112 	else
5113 		INTEL_DISPLAY_STATE_WARN(display, pll->active_mask & pipe_mask,
5114 					 "%s: pll active mismatch (didn't expect pipe %c in active mask 0x%x)\n",
5115 					 pll->info->name, pipe_name(crtc->pipe), pll->active_mask);
5116 
5117 	INTEL_DISPLAY_STATE_WARN(display, !(pll->state.pipe_mask & pipe_mask),
5118 				 "%s: pll enabled crtcs mismatch (expected 0x%x in 0x%x)\n",
5119 				 pll->info->name, pipe_mask, pll->state.pipe_mask);
5120 
5121 	if (pll->on) {
5122 		const struct intel_dpll_mgr *dpll_mgr = display->dpll.mgr;
5123 
5124 		if (HAS_LT_PHY(display))
5125 			pll_mismatch = !dpll_mgr->compare_hw_state(&pll->state.hw_state,
5126 								   &dpll_hw_state);
5127 		else
5128 			pll_mismatch = memcmp(&pll->state.hw_state, &dpll_hw_state,
5129 					      sizeof(dpll_hw_state));
5130 	}
5131 
5132 	if (INTEL_DISPLAY_STATE_WARN(display, pll_mismatch,
5133 				     "%s: pll hw state mismatch\n",
5134 				     pll->info->name)) {
5135 		struct drm_printer p = drm_dbg_printer(display->drm, DRM_UT_KMS, NULL);
5136 
5137 		drm_printf(&p, "PLL %s HW state:\n", pll->info->name);
5138 		intel_dpll_dump_hw_state(display, &p, &dpll_hw_state);
5139 		drm_printf(&p, "PLL %s SW state:\n", pll->info->name);
5140 		intel_dpll_dump_hw_state(display, &p, &pll->state.hw_state);
5141 	}
5142 }
5143 
5144 static bool has_alt_port_dpll(const struct intel_dpll *old_pll,
5145 			      const struct intel_dpll *new_pll)
5146 {
5147 	return old_pll && new_pll && old_pll != new_pll &&
5148 		(old_pll->info->is_alt_port_dpll || new_pll->info->is_alt_port_dpll);
5149 }
5150 
5151 void intel_dpll_state_verify(struct intel_atomic_state *state,
5152 			     struct intel_crtc *crtc)
5153 {
5154 	struct intel_display *display = to_intel_display(state);
5155 	const struct intel_crtc_state *old_crtc_state =
5156 		intel_atomic_get_old_crtc_state(state, crtc);
5157 	const struct intel_crtc_state *new_crtc_state =
5158 		intel_atomic_get_new_crtc_state(state, crtc);
5159 
5160 	if (new_crtc_state->intel_dpll)
5161 		verify_single_dpll_state(display, new_crtc_state->intel_dpll,
5162 					 crtc, new_crtc_state);
5163 
5164 	if (old_crtc_state->intel_dpll &&
5165 	    old_crtc_state->intel_dpll != new_crtc_state->intel_dpll) {
5166 		u8 pipe_mask = BIT(crtc->pipe);
5167 		struct intel_dpll *pll = old_crtc_state->intel_dpll;
5168 
5169 		INTEL_DISPLAY_STATE_WARN(display, pll->active_mask & pipe_mask,
5170 					 "%s: pll active mismatch (didn't expect pipe %c in active mask (0x%x))\n",
5171 					 pll->info->name, pipe_name(crtc->pipe), pll->active_mask);
5172 
5173 		/* TC ports have both MG/TC and TBT PLL referenced simultaneously */
5174 		INTEL_DISPLAY_STATE_WARN(display, !has_alt_port_dpll(old_crtc_state->intel_dpll,
5175 								     new_crtc_state->intel_dpll) &&
5176 					 pll->state.pipe_mask & pipe_mask,
5177 					 "%s: pll enabled crtcs mismatch (found pipe %c in enabled mask (0x%x))\n",
5178 					 pll->info->name, pipe_name(crtc->pipe), pll->state.pipe_mask);
5179 	}
5180 }
5181 
5182 void intel_dpll_verify_disabled(struct intel_atomic_state *state)
5183 {
5184 	struct intel_display *display = to_intel_display(state);
5185 	struct intel_dpll *pll;
5186 	int i;
5187 
5188 	for_each_dpll(display, pll, i)
5189 		verify_single_dpll_state(display, pll, NULL, NULL);
5190 }
5191