xref: /linux/drivers/gpu/drm/i915/display/intel_lt_phy.c (revision 5ea5880764cbb164afb17a62e76ca75dc371409d)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2025 Intel Corporation
4  */
5 
6 #include <drm/drm_print.h>
7 
8 #include "intel_cx0_phy.h"
9 #include "intel_cx0_phy_regs.h"
10 #include "intel_ddi.h"
11 #include "intel_ddi_buf_trans.h"
12 #include "intel_de.h"
13 #include "intel_display.h"
14 #include "intel_display_regs.h"
15 #include "intel_display_types.h"
16 #include "intel_display_utils.h"
17 #include "intel_dpll.h"
18 #include "intel_dpll_mgr.h"
19 #include "intel_hdmi.h"
20 #include "intel_lt_phy.h"
21 #include "intel_lt_phy_regs.h"
22 #include "intel_panel.h"
23 #include "intel_psr.h"
24 #include "intel_tc.h"
25 
26 #define for_each_lt_phy_lane_in_mask(__lane_mask, __lane) \
27 	for ((__lane) = 0; (__lane) < 2; (__lane)++) \
28 		for_each_if((__lane_mask) & BIT(__lane))
29 
30 #define INTEL_LT_PHY_LANE0		BIT(0)
31 #define INTEL_LT_PHY_LANE1		BIT(1)
32 #define INTEL_LT_PHY_BOTH_LANES		(INTEL_LT_PHY_LANE1 |\
33 					 INTEL_LT_PHY_LANE0)
34 #define MODE_DP				3
35 #define MODE_HDMI_20			4
36 #define MODE_HDMI_FRL			5
37 #define Q32_TO_INT(x)	((x) >> 32)
38 #define Q32_TO_FRAC(x)	((x) & 0xFFFFFFFF)
39 #define DCO_MIN_FREQ_MHZ	11850
40 #define REF_CLK_KHZ	38400
41 #define TDC_RES_MULTIPLIER	10000000ULL
42 
43 struct phy_param_t {
44 	u32 val;
45 	u32 addr;
46 };
47 
48 struct lt_phy_params {
49 	struct phy_param_t pll_reg4;
50 	struct phy_param_t pll_reg3;
51 	struct phy_param_t pll_reg5;
52 	struct phy_param_t pll_reg57;
53 	struct phy_param_t lf;
54 	struct phy_param_t tdc;
55 	struct phy_param_t ssc;
56 	struct phy_param_t bias2;
57 	struct phy_param_t bias_trim;
58 	struct phy_param_t dco_med;
59 	struct phy_param_t dco_fine;
60 	struct phy_param_t ssc_inj;
61 	struct phy_param_t surv_bonus;
62 };
63 
64 static const struct intel_lt_phy_pll_state xe3plpd_lt_dp_rbr = {
65 	.config = {
66 		0x83,
67 		0x2d,
68 		0x0,
69 	},
70 	.addr_msb = {
71 		0x87,
72 		0x87,
73 		0x87,
74 		0x87,
75 		0x88,
76 		0x88,
77 		0x88,
78 		0x88,
79 		0x88,
80 		0x88,
81 		0x88,
82 		0x88,
83 		0x88,
84 	},
85 	.addr_lsb = {
86 		0x10,
87 		0x0c,
88 		0x14,
89 		0xe4,
90 		0x0c,
91 		0x10,
92 		0x14,
93 		0x18,
94 		0x48,
95 		0x40,
96 		0x4c,
97 		0x24,
98 		0x44,
99 	},
100 	.data = {
101 		{ 0x0,  0x4c, 0x2,  0x0  },
102 		{ 0x5,  0xa,  0x2a, 0x20 },
103 		{ 0x80, 0x0,  0x0,  0x0  },
104 		{ 0x4,  0x4,  0x82, 0x28 },
105 		{ 0xfa, 0x16, 0x83, 0x11 },
106 		{ 0x80, 0x0f, 0xf9, 0x53 },
107 		{ 0x84, 0x26, 0x5,  0x4  },
108 		{ 0x0,  0xe0, 0x1,  0x0  },
109 		{ 0x4b, 0x48, 0x0,  0x0  },
110 		{ 0x27, 0x8,  0x0,  0x0  },
111 		{ 0x5a, 0x13, 0x29, 0x13 },
112 		{ 0x0,  0x5b, 0xe0, 0x0a },
113 		{ 0x0,  0x0,  0x0,  0x0  },
114 	},
115 };
116 
117 static const struct intel_lt_phy_pll_state xe3plpd_lt_dp_hbr1 = {
118 	.config = {
119 		0x8b,
120 		0x2d,
121 		0x0,
122 	},
123 	.addr_msb = {
124 		0x87,
125 		0x87,
126 		0x87,
127 		0x87,
128 		0x88,
129 		0x88,
130 		0x88,
131 		0x88,
132 		0x88,
133 		0x88,
134 		0x88,
135 		0x88,
136 		0x88,
137 	},
138 	.addr_lsb = {
139 		0x10,
140 		0x0c,
141 		0x14,
142 		0xe4,
143 		0x0c,
144 		0x10,
145 		0x14,
146 		0x18,
147 		0x48,
148 		0x40,
149 		0x4c,
150 		0x24,
151 		0x44,
152 	},
153 	.data = {
154 		{ 0x0,  0x4c, 0x2,  0x0  },
155 		{ 0x3,  0xca, 0x34, 0xa0 },
156 		{ 0xe0, 0x0,  0x0,  0x0  },
157 		{ 0x5,  0x4,  0x81, 0xad },
158 		{ 0xfa, 0x11, 0x83, 0x11 },
159 		{ 0x80, 0x0f, 0xf9, 0x53 },
160 		{ 0x84, 0x26, 0x7,  0x4  },
161 		{ 0x0,  0xe0, 0x1,  0x0  },
162 		{ 0x43, 0x48, 0x0,  0x0  },
163 		{ 0x27, 0x8,  0x0,  0x0  },
164 		{ 0x5a, 0x13, 0x29, 0x13 },
165 		{ 0x0,  0x5b, 0xe0, 0x0d },
166 		{ 0x0,  0x0,  0x0,  0x0  },
167 	},
168 };
169 
170 static const struct intel_lt_phy_pll_state xe3plpd_lt_dp_hbr2 = {
171 	.config = {
172 		0x93,
173 		0x2d,
174 		0x0,
175 	},
176 	.addr_msb = {
177 		0x87,
178 		0x87,
179 		0x87,
180 		0x87,
181 		0x88,
182 		0x88,
183 		0x88,
184 		0x88,
185 		0x88,
186 		0x88,
187 		0x88,
188 		0x88,
189 		0x88,
190 	},
191 	.addr_lsb = {
192 		0x10,
193 		0x0c,
194 		0x14,
195 		0xe4,
196 		0x0c,
197 		0x10,
198 		0x14,
199 		0x18,
200 		0x48,
201 		0x40,
202 		0x4c,
203 		0x24,
204 		0x44,
205 	},
206 	.data = {
207 		{ 0x0,  0x4c, 0x2,  0x0  },
208 		{ 0x1,  0x4d, 0x34, 0xa0 },
209 		{ 0xe0, 0x0,  0x0,  0x0  },
210 		{ 0xa,  0x4,  0x81, 0xda },
211 		{ 0xfa, 0x11, 0x83, 0x11 },
212 		{ 0x80, 0x0f, 0xf9, 0x53 },
213 		{ 0x84, 0x26, 0x7,  0x4  },
214 		{ 0x0,  0xe0, 0x1,  0x0  },
215 		{ 0x43, 0x48, 0x0,  0x0  },
216 		{ 0x27, 0x8,  0x0,  0x0  },
217 		{ 0x5a, 0x13, 0x29, 0x13 },
218 		{ 0x0,  0x5b, 0xe0, 0x0d },
219 		{ 0x0,  0x0,  0x0,  0x0  },
220 	},
221 };
222 
223 static const struct intel_lt_phy_pll_state xe3plpd_lt_dp_hbr3 = {
224 	.config = {
225 		0x9b,
226 		0x2d,
227 		0x0,
228 	},
229 	.addr_msb = {
230 		0x87,
231 		0x87,
232 		0x87,
233 		0x87,
234 		0x88,
235 		0x88,
236 		0x88,
237 		0x88,
238 		0x88,
239 		0x88,
240 		0x88,
241 		0x88,
242 		0x88,
243 	},
244 	.addr_lsb = {
245 		0x10,
246 		0x0c,
247 		0x14,
248 		0xe4,
249 		0x0c,
250 		0x10,
251 		0x14,
252 		0x18,
253 		0x48,
254 		0x40,
255 		0x4c,
256 		0x24,
257 		0x44,
258 	},
259 	.data = {
260 		{ 0x0,  0x4c, 0x2,  0x0  },
261 		{ 0x1,  0x4a, 0x34, 0xa0 },
262 		{ 0xe0, 0x0,  0x0,  0x0  },
263 		{ 0x5,  0x4,  0x80, 0xa8 },
264 		{ 0xfa, 0x11, 0x83, 0x11 },
265 		{ 0x80, 0x0f, 0xf9, 0x53 },
266 		{ 0x84, 0x26, 0x7,  0x4  },
267 		{ 0x0,  0xe0, 0x1,  0x0  },
268 		{ 0x43, 0x48, 0x0,  0x0  },
269 		{ 0x27, 0x8,  0x0,  0x0  },
270 		{ 0x5a, 0x13, 0x29, 0x13 },
271 		{ 0x0,  0x5b, 0xe0, 0x0d },
272 		{ 0x0,  0x0,  0x0,  0x0  },
273 	},
274 };
275 
276 static const struct intel_lt_phy_pll_state xe3plpd_lt_dp_uhbr10 = {
277 	.config = {
278 		0x43,
279 		0x2d,
280 		0x0,
281 	},
282 	.addr_msb = {
283 		0x85,
284 		0x85,
285 		0x85,
286 		0x85,
287 		0x86,
288 		0x86,
289 		0x86,
290 		0x86,
291 		0x86,
292 		0x86,
293 		0x86,
294 		0x86,
295 		0x86,
296 	},
297 	.addr_lsb = {
298 		0x10,
299 		0x0c,
300 		0x14,
301 		0xe4,
302 		0x0c,
303 		0x10,
304 		0x14,
305 		0x18,
306 		0x48,
307 		0x40,
308 		0x4c,
309 		0x24,
310 		0x44,
311 	},
312 	.data = {
313 		{ 0x0,  0x4c, 0x2,  0x0  },
314 		{ 0x1,  0xa,  0x20, 0x80 },
315 		{ 0x6a, 0xaa, 0xaa, 0xab },
316 		{ 0x0,  0x3,  0x4,  0x94 },
317 		{ 0xfa, 0x1c, 0x83, 0x11 },
318 		{ 0x80, 0x0f, 0xf9, 0x53 },
319 		{ 0x84, 0x26, 0x4,  0x4  },
320 		{ 0x0,  0xe0, 0x1,  0x0  },
321 		{ 0x45, 0x48, 0x0,  0x0  },
322 		{ 0x27, 0x8,  0x0,  0x0  },
323 		{ 0x5a, 0x14, 0x2a, 0x14 },
324 		{ 0x0,  0x5b, 0xe0, 0x8  },
325 		{ 0x0,  0x0,  0x0,  0x0  },
326 	},
327 };
328 
329 static const struct intel_lt_phy_pll_state xe3plpd_lt_dp_uhbr13_5 = {
330 	.config = {
331 		0xcb,
332 		0x2d,
333 		0x0,
334 	},
335 	.addr_msb = {
336 		0x87,
337 		0x87,
338 		0x87,
339 		0x87,
340 		0x88,
341 		0x88,
342 		0x88,
343 		0x88,
344 		0x88,
345 		0x88,
346 		0x88,
347 		0x88,
348 		0x88,
349 	},
350 	.addr_lsb = {
351 		0x10,
352 		0x0c,
353 		0x14,
354 		0xe4,
355 		0x0c,
356 		0x10,
357 		0x14,
358 		0x18,
359 		0x48,
360 		0x40,
361 		0x4c,
362 		0x24,
363 		0x44,
364 	},
365 	.data = {
366 		{ 0x0,  0x4c, 0x2,  0x0  },
367 		{ 0x2,  0x9,  0x2b, 0xe0 },
368 		{ 0x90, 0x0,  0x0,  0x0  },
369 		{ 0x8,  0x4,  0x80, 0xe0 },
370 		{ 0xfa, 0x15, 0x83, 0x11 },
371 		{ 0x80, 0x0f, 0xf9, 0x53 },
372 		{ 0x84, 0x26, 0x6,  0x4  },
373 		{ 0x0,  0xe0, 0x1,  0x0  },
374 		{ 0x49, 0x48, 0x0,  0x0  },
375 		{ 0x27, 0x8,  0x0,  0x0  },
376 		{ 0x5a, 0x13, 0x29, 0x13 },
377 		{ 0x0,  0x57, 0xe0, 0x0c },
378 		{ 0x0,  0x0,  0x0,  0x0  },
379 	},
380 };
381 
382 static const struct intel_lt_phy_pll_state xe3plpd_lt_dp_uhbr20 = {
383 	.config = {
384 		0x53,
385 		0x2d,
386 		0x0,
387 	},
388 	.addr_msb = {
389 		0x85,
390 		0x85,
391 		0x85,
392 		0x85,
393 		0x86,
394 		0x86,
395 		0x86,
396 		0x86,
397 		0x86,
398 		0x86,
399 		0x86,
400 		0x86,
401 		0x86,
402 	},
403 	.addr_lsb = {
404 		0x10,
405 		0x0c,
406 		0x14,
407 		0xe4,
408 		0x0c,
409 		0x10,
410 		0x14,
411 		0x18,
412 		0x48,
413 		0x40,
414 		0x4c,
415 		0x24,
416 		0x44,
417 	},
418 	.data = {
419 		{ 0x0,  0x4c, 0x2,  0x0  },
420 		{ 0x1,  0xa,  0x20, 0x80 },
421 		{ 0x6a, 0xaa, 0xaa, 0xab },
422 		{ 0x0,  0x3,  0x4,  0x94 },
423 		{ 0xfa, 0x1c, 0x83, 0x11 },
424 		{ 0x80, 0x0f, 0xf9, 0x53 },
425 		{ 0x84, 0x26, 0x4,  0x4  },
426 		{ 0x0,  0xe0, 0x1,  0x0  },
427 		{ 0x45, 0x48, 0x0,  0x0  },
428 		{ 0x27, 0x8,  0x0,  0x0  },
429 		{ 0x5a, 0x14, 0x2a, 0x14 },
430 		{ 0x0,  0x5b, 0xe0, 0x8  },
431 		{ 0x0,  0x0,  0x0,  0x0  },
432 	},
433 };
434 
435 struct intel_lt_phy_pll_params {
436 	const char *name;
437 	bool is_hdmi;
438 	int clock_rate;
439 	const struct intel_lt_phy_pll_state *state;
440 };
441 
442 #define __LT_PHY_PLL_PARAMS(__is_hdmi, __clock_rate, __state)    { \
443 	.name = __stringify(__state), \
444 	.is_hdmi = __is_hdmi, \
445 	.clock_rate = __clock_rate, \
446 	.state = &__state, \
447 }
448 
449 #define LT_PHY_PLL_HDMI_PARAMS(__clock_rate, __state)	__LT_PHY_PLL_PARAMS(true, __clock_rate, __state)
450 #define LT_PHY_PLL_DP_PARAMS(__clock_rate, __state)	__LT_PHY_PLL_PARAMS(false, __clock_rate, __state)
451 
452 static const struct intel_lt_phy_pll_params xe3plpd_lt_dp_tables[] = {
453 	LT_PHY_PLL_DP_PARAMS(162000, xe3plpd_lt_dp_rbr),
454 	LT_PHY_PLL_DP_PARAMS(270000, xe3plpd_lt_dp_hbr1),
455 	LT_PHY_PLL_DP_PARAMS(540000, xe3plpd_lt_dp_hbr2),
456 	LT_PHY_PLL_DP_PARAMS(810000, xe3plpd_lt_dp_hbr3),
457 	LT_PHY_PLL_DP_PARAMS(1000000, xe3plpd_lt_dp_uhbr10),
458 	LT_PHY_PLL_DP_PARAMS(1350000, xe3plpd_lt_dp_uhbr13_5),
459 	LT_PHY_PLL_DP_PARAMS(2000000, xe3plpd_lt_dp_uhbr20),
460 	{}
461 };
462 
463 static const struct intel_lt_phy_pll_state xe3plpd_lt_edp_2_16 = {
464 	.config = {
465 		0xa3,
466 		0x2d,
467 		0x1,
468 	},
469 	.addr_msb = {
470 		0x87,
471 		0x87,
472 		0x87,
473 		0x87,
474 		0x88,
475 		0x88,
476 		0x88,
477 		0x88,
478 		0x88,
479 		0x88,
480 		0x88,
481 		0x88,
482 		0x88,
483 	},
484 	.addr_lsb = {
485 		0x10,
486 		0x0c,
487 		0x14,
488 		0xe4,
489 		0x0c,
490 		0x10,
491 		0x14,
492 		0x18,
493 		0x48,
494 		0x40,
495 		0x4c,
496 		0x24,
497 		0x44,
498 	},
499 	.data = {
500 		{ 0x0,  0x4c, 0x2,  0x0  },
501 		{ 0x3,  0xca, 0x2a, 0x20 },
502 		{ 0x80, 0x0,  0x0,  0x0  },
503 		{ 0x6,  0x4,  0x81, 0xbc },
504 		{ 0xfa, 0x16, 0x83, 0x11 },
505 		{ 0x80, 0x0f, 0xf9, 0x53 },
506 		{ 0x84, 0x26, 0x5,  0x4  },
507 		{ 0x0,  0xe0, 0x1,  0x0  },
508 		{ 0x4b, 0x48, 0x0,  0x0  },
509 		{ 0x27, 0x8,  0x0,  0x0  },
510 		{ 0x5a, 0x13, 0x29, 0x13 },
511 		{ 0x0,  0x5b, 0xe0, 0x0a },
512 		{ 0x0,  0x0,  0x0,  0x0  },
513 	},
514 };
515 
516 static const struct intel_lt_phy_pll_state xe3plpd_lt_edp_2_43 = {
517 	.config = {
518 		0xab,
519 		0x2d,
520 		0x1,
521 	},
522 	.addr_msb = {
523 		0x87,
524 		0x87,
525 		0x87,
526 		0x87,
527 		0x88,
528 		0x88,
529 		0x88,
530 		0x88,
531 		0x88,
532 		0x88,
533 		0x88,
534 		0x88,
535 		0x88,
536 	},
537 	.addr_lsb = {
538 		0x10,
539 		0x0c,
540 		0x14,
541 		0xe4,
542 		0x0c,
543 		0x10,
544 		0x14,
545 		0x18,
546 		0x48,
547 		0x40,
548 		0x4c,
549 		0x24,
550 		0x44,
551 	},
552 	.data = {
553 		{ 0x0,  0x4c, 0x2,  0x0  },
554 		{ 0x3,  0xca, 0x2f, 0x60 },
555 		{ 0xb0, 0x0,  0x0,  0x0  },
556 		{ 0x6,  0x4,  0x81, 0xbc },
557 		{ 0xfa, 0x13, 0x83, 0x11 },
558 		{ 0x80, 0x0f, 0xf9, 0x53 },
559 		{ 0x84, 0x26, 0x6,  0x4  },
560 		{ 0x0,  0xe0, 0x1,  0x0  },
561 		{ 0x47, 0x48, 0x0,  0x0  },
562 		{ 0x0,  0x0,  0x0,  0x0  },
563 		{ 0x5a, 0x13, 0x29, 0x13 },
564 		{ 0x0,  0x5b, 0xe0, 0x0c },
565 		{ 0x0,  0x0,  0x0,  0x0  },
566 	},
567 };
568 
569 static const struct intel_lt_phy_pll_state xe3plpd_lt_edp_3_24 = {
570 	.config = {
571 		0xb3,
572 		0x2d,
573 		0x1,
574 	},
575 	.addr_msb = {
576 		0x87,
577 		0x87,
578 		0x87,
579 		0x87,
580 		0x88,
581 		0x88,
582 		0x88,
583 		0x88,
584 		0x88,
585 		0x88,
586 		0x88,
587 		0x88,
588 		0x88,
589 	},
590 	.addr_lsb = {
591 		0x10,
592 		0x0c,
593 		0x14,
594 		0xe4,
595 		0x0c,
596 		0x10,
597 		0x14,
598 		0x18,
599 		0x48,
600 		0x40,
601 		0x4c,
602 		0x24,
603 		0x44,
604 	},
605 	.data = {
606 		{ 0x0,  0x4c, 0x2,  0x0  },
607 		{ 0x2,  0x8a, 0x2a, 0x20 },
608 		{ 0x80, 0x0,  0x0,  0x0  },
609 		{ 0x6,  0x4,  0x81, 0x28 },
610 		{ 0xfa, 0x16, 0x83, 0x11 },
611 		{ 0x80, 0x0f, 0xf9, 0x53 },
612 		{ 0x84, 0x26, 0x5,  0x4  },
613 		{ 0x0,  0xe0, 0x1,  0x0  },
614 		{ 0x4b, 0x48, 0x0,  0x0  },
615 		{ 0x27, 0x8,  0x0,  0x0  },
616 		{ 0x5a, 0x13, 0x29, 0x13 },
617 		{ 0x0,  0x5b, 0xe0, 0x0a },
618 		{ 0x0,  0x0,  0x0,  0x0  },
619 	},
620 };
621 
622 static const struct intel_lt_phy_pll_state xe3plpd_lt_edp_4_32 = {
623 	.config = {
624 		0xbb,
625 		0x2d,
626 		0x1,
627 	},
628 	.addr_msb = {
629 		0x87,
630 		0x87,
631 		0x87,
632 		0x87,
633 		0x88,
634 		0x88,
635 		0x88,
636 		0x88,
637 		0x88,
638 		0x88,
639 		0x88,
640 		0x88,
641 		0x88,
642 	},
643 	.addr_lsb = {
644 		0x10,
645 		0x0c,
646 		0x14,
647 		0xe4,
648 		0x0c,
649 		0x10,
650 		0x14,
651 		0x18,
652 		0x48,
653 		0x40,
654 		0x4c,
655 		0x24,
656 		0x44,
657 	},
658 	.data = {
659 		{ 0x0,  0x4c, 0x2,  0x0  },
660 		{ 0x1,  0x4d, 0x2a, 0x20 },
661 		{ 0x80, 0x0,  0x0,  0x0  },
662 		{ 0xc,  0x4,  0x81, 0xbc },
663 		{ 0xfa, 0x16, 0x83, 0x11 },
664 		{ 0x80, 0x0f, 0xf9, 0x53 },
665 		{ 0x84, 0x26, 0x5,  0x4  },
666 		{ 0x0,  0xe0, 0x1,  0x0  },
667 		{ 0x4b, 0x48, 0x0,  0x0  },
668 		{ 0x27, 0x8,  0x0,  0x0  },
669 		{ 0x5a, 0x13, 0x29, 0x13 },
670 		{ 0x0,  0x5b, 0xe0, 0x0a },
671 		{ 0x0,  0x0,  0x0,  0x0  },
672 	},
673 };
674 
675 static const struct intel_lt_phy_pll_state xe3plpd_lt_edp_6_75 = {
676 	.config = {
677 		0xdb,
678 		0x2d,
679 		0x1,
680 	},
681 	.addr_msb = {
682 		0x87,
683 		0x87,
684 		0x87,
685 		0x87,
686 		0x88,
687 		0x88,
688 		0x88,
689 		0x88,
690 		0x88,
691 		0x88,
692 		0x88,
693 		0x88,
694 		0x88,
695 	},
696 	.addr_lsb = {
697 		0x10,
698 		0x0c,
699 		0x14,
700 		0xe4,
701 		0x0c,
702 		0x10,
703 		0x14,
704 		0x18,
705 		0x48,
706 		0x40,
707 		0x4c,
708 		0x24,
709 		0x44,
710 	},
711 	.data = {
712 		{ 0x0,  0x4c, 0x2,  0x0  },
713 		{ 0x1,  0x4a, 0x2b, 0xe0 },
714 		{ 0x90, 0x0,  0x0,  0x0  },
715 		{ 0x6,  0x4,  0x80, 0xa8 },
716 		{ 0xfa, 0x15, 0x83, 0x11 },
717 		{ 0x80, 0x0f, 0xf9, 0x53 },
718 		{ 0x84, 0x26, 0x6,  0x4  },
719 		{ 0x0,  0xe0, 0x1,  0x0  },
720 		{ 0x49, 0x48, 0x0,  0x0  },
721 		{ 0x27, 0x8,  0x0,  0x0  },
722 		{ 0x5a, 0x13, 0x29, 0x13 },
723 		{ 0x0,  0x57, 0xe0, 0x0c },
724 		{ 0x0,  0x0,  0x0,  0x0  },
725 	},
726 };
727 
728 static const struct intel_lt_phy_pll_params xe3plpd_lt_edp_tables[] = {
729 	LT_PHY_PLL_DP_PARAMS(162000, xe3plpd_lt_dp_rbr),
730 	LT_PHY_PLL_DP_PARAMS(216000, xe3plpd_lt_edp_2_16),
731 	LT_PHY_PLL_DP_PARAMS(243000, xe3plpd_lt_edp_2_43),
732 	LT_PHY_PLL_DP_PARAMS(270000, xe3plpd_lt_dp_hbr1),
733 	LT_PHY_PLL_DP_PARAMS(324000, xe3plpd_lt_edp_3_24),
734 	LT_PHY_PLL_DP_PARAMS(432000, xe3plpd_lt_edp_4_32),
735 	LT_PHY_PLL_DP_PARAMS(540000, xe3plpd_lt_dp_hbr2),
736 	LT_PHY_PLL_DP_PARAMS(675000, xe3plpd_lt_edp_6_75),
737 	LT_PHY_PLL_DP_PARAMS(810000, xe3plpd_lt_dp_hbr3),
738 	{}
739 };
740 
741 static const struct intel_lt_phy_pll_state xe3plpd_lt_hdmi_252 = {
742 	.config = {
743 		0x84,
744 		0x2d,
745 		0x0,
746 	},
747 	.addr_msb = {
748 		0x87,
749 		0x87,
750 		0x87,
751 		0x87,
752 		0x88,
753 		0x88,
754 		0x88,
755 		0x88,
756 		0x88,
757 		0x88,
758 		0x88,
759 		0x88,
760 		0x88,
761 	},
762 	.addr_lsb = {
763 		0x10,
764 		0x0c,
765 		0x14,
766 		0xe4,
767 		0x0c,
768 		0x10,
769 		0x14,
770 		0x18,
771 		0x48,
772 		0x40,
773 		0x4c,
774 		0x24,
775 		0x44,
776 	},
777 	.data = {
778 		{ 0x0,  0x4c, 0x2,  0x0  },
779 		{ 0x0c, 0x15, 0x27, 0x60 },
780 		{ 0x0,  0x0,  0x0,  0x0  },
781 		{ 0x8,  0x4,  0x98, 0x28 },
782 		{ 0x42, 0x0,  0x84, 0x10 },
783 		{ 0x80, 0x0f, 0xd9, 0xb5 },
784 		{ 0x86, 0x0,  0x0,  0x0  },
785 		{ 0x1,  0xa0, 0x1,  0x0  },
786 		{ 0x4b, 0x0,  0x0,  0x0  },
787 		{ 0x28, 0x0,  0x0,  0x0  },
788 		{ 0x0,  0x14, 0x2a, 0x14 },
789 		{ 0x0,  0x0,  0x0,  0x0  },
790 		{ 0x0,  0x0,  0x0,  0x0  },
791 	},
792 };
793 
794 static const struct intel_lt_phy_pll_state xe3plpd_lt_hdmi_742p5 = {
795 	.config = {
796 		0x84,
797 		0x2d,
798 		0x0,
799 	},
800 	.addr_msb = {
801 		0x87,
802 		0x87,
803 		0x87,
804 		0x87,
805 		0x88,
806 		0x88,
807 		0x88,
808 		0x88,
809 		0x88,
810 		0x88,
811 		0x88,
812 		0x88,
813 		0x88,
814 	},
815 	.addr_lsb = {
816 		0x10,
817 		0x0c,
818 		0x14,
819 		0xe4,
820 		0x0c,
821 		0x10,
822 		0x14,
823 		0x18,
824 		0x48,
825 		0x40,
826 		0x4c,
827 		0x24,
828 		0x44,
829 	},
830 	.data = {
831 		{ 0x0,  0x4c, 0x2,  0x0  },
832 		{ 0x4,  0x15, 0x26, 0xa0 },
833 		{ 0x60, 0x0,  0x0,  0x0  },
834 		{ 0x8,  0x4,  0x88, 0x28 },
835 		{ 0xfa, 0x0c, 0x84, 0x11 },
836 		{ 0x80, 0x0f, 0xd9, 0x53 },
837 		{ 0x86, 0x0,  0x0,  0x0  },
838 		{ 0x1,  0xa0, 0x1,  0x0  },
839 		{ 0x4b, 0x0,  0x0,  0x0  },
840 		{ 0x28, 0x0,  0x0,  0x0  },
841 		{ 0x0,  0x14, 0x2a, 0x14 },
842 		{ 0x0,  0x0,  0x0,  0x0  },
843 		{ 0x0,  0x0,  0x0,  0x0  },
844 	},
845 };
846 
847 static const struct intel_lt_phy_pll_state xe3plpd_lt_hdmi_1p485 = {
848 	.config = {
849 		0x84,
850 		0x2d,
851 		0x0,
852 	},
853 	.addr_msb = {
854 		0x87,
855 		0x87,
856 		0x87,
857 		0x87,
858 		0x88,
859 		0x88,
860 		0x88,
861 		0x88,
862 		0x88,
863 		0x88,
864 		0x88,
865 		0x88,
866 		0x88,
867 	},
868 	.addr_lsb = {
869 		0x10,
870 		0x0c,
871 		0x14,
872 		0xe4,
873 		0x0c,
874 		0x10,
875 		0x14,
876 		0x18,
877 		0x48,
878 		0x40,
879 		0x4c,
880 		0x24,
881 		0x44,
882 	},
883 	.data = {
884 		{ 0x0,  0x4c, 0x2,  0x0  },
885 		{ 0x2,  0x15, 0x26, 0xa0 },
886 		{ 0x60, 0x0,  0x0,  0x0  },
887 		{ 0x8,  0x4,  0x84, 0x28 },
888 		{ 0xfa, 0x0c, 0x84, 0x11 },
889 		{ 0x80, 0x0f, 0xd9, 0x53 },
890 		{ 0x86, 0x0,  0x0,  0x0  },
891 		{ 0x1,  0xa0, 0x1,  0x0  },
892 		{ 0x4b, 0x0,  0x0,  0x0  },
893 		{ 0x28, 0x0,  0x0,  0x0  },
894 		{ 0x0,  0x14, 0x2a, 0x14 },
895 		{ 0x0,  0x0,  0x0,  0x0  },
896 		{ 0x0,  0x0,  0x0,  0x0  },
897 	},
898 };
899 
900 static const struct intel_lt_phy_pll_state xe3plpd_lt_hdmi_5p94 = {
901 	.config = {
902 		0x84,
903 		0x2d,
904 		0x0,
905 	},
906 	.addr_msb = {
907 		0x87,
908 		0x87,
909 		0x87,
910 		0x87,
911 		0x88,
912 		0x88,
913 		0x88,
914 		0x88,
915 		0x88,
916 		0x88,
917 		0x88,
918 		0x88,
919 		0x88,
920 	},
921 	.addr_lsb = {
922 		0x10,
923 		0x0c,
924 		0x14,
925 		0xe4,
926 		0x0c,
927 		0x10,
928 		0x14,
929 		0x18,
930 		0x48,
931 		0x40,
932 		0x4c,
933 		0x24,
934 		0x44,
935 	},
936 	.data = {
937 		{ 0x0,  0x4c, 0x2,  0x0  },
938 		{ 0x0,  0x95, 0x26, 0xa0 },
939 		{ 0x60, 0x0,  0x0,  0x0  },
940 		{ 0x8,  0x4,  0x81, 0x28 },
941 		{ 0xfa, 0x0c, 0x84, 0x11 },
942 		{ 0x80, 0x0f, 0xd9, 0x53 },
943 		{ 0x86, 0x0,  0x0,  0x0  },
944 		{ 0x1,  0xa0, 0x1,  0x0  },
945 		{ 0x4b, 0x0,  0x0,  0x0  },
946 		{ 0x28, 0x0,  0x0,  0x0  },
947 		{ 0x0,  0x14, 0x2a, 0x14 },
948 		{ 0x0,  0x0,  0x0,  0x0  },
949 		{ 0x0,  0x0,  0x0,  0x0  },
950 	},
951 };
952 
953 static const struct intel_lt_phy_pll_params xe3plpd_lt_hdmi_tables[] = {
954 	LT_PHY_PLL_HDMI_PARAMS(25200, xe3plpd_lt_hdmi_252),
955 	LT_PHY_PLL_HDMI_PARAMS(74250, xe3plpd_lt_hdmi_742p5),
956 	LT_PHY_PLL_HDMI_PARAMS(148500, xe3plpd_lt_hdmi_1p485),
957 	LT_PHY_PLL_HDMI_PARAMS(594000, xe3plpd_lt_hdmi_5p94),
958 	{}
959 };
960 
961 static u8 intel_lt_phy_get_owned_lane_mask(struct intel_encoder *encoder)
962 {
963 	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
964 
965 	if (!intel_tc_port_in_dp_alt_mode(dig_port))
966 		return INTEL_LT_PHY_BOTH_LANES;
967 
968 	return intel_tc_port_max_lane_count(dig_port) > 2
969 		? INTEL_LT_PHY_BOTH_LANES : INTEL_LT_PHY_LANE0;
970 }
971 
972 static u8 intel_lt_phy_read(struct intel_encoder *encoder, u8 lane_mask, u16 addr)
973 {
974 	return intel_cx0_read(encoder, lane_mask, addr);
975 }
976 
977 static void intel_lt_phy_write(struct intel_encoder *encoder,
978 			       u8 lane_mask, u16 addr, u8 data, bool committed)
979 {
980 	intel_cx0_write(encoder, lane_mask, addr, data, committed);
981 }
982 
983 static void intel_lt_phy_rmw(struct intel_encoder *encoder,
984 			     u8 lane_mask, u16 addr, u8 clear, u8 set, bool committed)
985 {
986 	intel_cx0_rmw(encoder, lane_mask, addr, clear, set, committed);
987 }
988 
989 static void intel_lt_phy_clear_status_p2p(struct intel_encoder *encoder,
990 					  int lane)
991 {
992 	struct intel_display *display = to_intel_display(encoder);
993 
994 	intel_de_rmw(display,
995 		     XE3PLPD_PORT_P2M_MSGBUS_STATUS_P2P(encoder->port, lane),
996 		     XELPDP_PORT_P2M_RESPONSE_READY, 0);
997 }
998 
999 static void
1000 assert_dc_off(struct intel_display *display)
1001 {
1002 	bool enabled;
1003 
1004 	enabled = intel_display_power_is_enabled(display, POWER_DOMAIN_DC_OFF);
1005 	drm_WARN_ON(display->drm, !enabled);
1006 }
1007 
1008 static int __intel_lt_phy_p2p_write_once(struct intel_encoder *encoder,
1009 					 int lane, u16 addr, u8 data,
1010 					 i915_reg_t mac_reg_addr,
1011 					 u8 expected_mac_val)
1012 {
1013 	struct intel_display *display = to_intel_display(encoder);
1014 	enum port port = encoder->port;
1015 	enum phy phy = intel_encoder_to_phy(encoder);
1016 	int ack;
1017 	u32 val;
1018 
1019 	if (intel_de_wait_for_clear_ms(display, XELPDP_PORT_M2P_MSGBUS_CTL(display, port, lane),
1020 				       XELPDP_PORT_P2P_TRANSACTION_PENDING,
1021 				       XELPDP_MSGBUS_TIMEOUT_MS)) {
1022 		drm_dbg_kms(display->drm,
1023 			    "PHY %c Timeout waiting for previous transaction to complete. Resetting bus.\n",
1024 			    phy_name(phy));
1025 		intel_cx0_bus_reset(encoder, lane);
1026 		return -ETIMEDOUT;
1027 	}
1028 
1029 	intel_de_rmw(display, XELPDP_PORT_P2M_MSGBUS_STATUS(display, port, lane), 0, 0);
1030 
1031 	intel_de_write(display, XELPDP_PORT_M2P_MSGBUS_CTL(display, port, lane),
1032 		       XELPDP_PORT_P2P_TRANSACTION_PENDING |
1033 		       XELPDP_PORT_M2P_COMMAND_WRITE_COMMITTED |
1034 		       XELPDP_PORT_M2P_DATA(data) |
1035 		       XELPDP_PORT_M2P_ADDRESS(addr));
1036 
1037 	ack = intel_cx0_wait_for_ack(encoder, XELPDP_PORT_P2M_COMMAND_WRITE_ACK, lane, &val);
1038 	if (ack < 0)
1039 		return ack;
1040 
1041 	if (val & XELPDP_PORT_P2M_ERROR_SET) {
1042 		drm_dbg_kms(display->drm,
1043 			    "PHY %c Error occurred during P2P write command. Status: 0x%x\n",
1044 			    phy_name(phy), val);
1045 		intel_lt_phy_clear_status_p2p(encoder, lane);
1046 		intel_cx0_bus_reset(encoder, lane);
1047 		return -EINVAL;
1048 	}
1049 
1050 	/*
1051 	 * RE-VISIT:
1052 	 * This needs to be added to give PHY time to set everything up this was a requirement
1053 	 * to get the display up and running
1054 	 * This is the time PHY takes to settle down after programming the PHY.
1055 	 */
1056 	udelay(150);
1057 	intel_cx0_clear_response_ready_flag(encoder, lane);
1058 	intel_lt_phy_clear_status_p2p(encoder, lane);
1059 
1060 	return 0;
1061 }
1062 
1063 static void __intel_lt_phy_p2p_write(struct intel_encoder *encoder,
1064 				     int lane, u16 addr, u8 data,
1065 				     i915_reg_t mac_reg_addr,
1066 				     u8 expected_mac_val)
1067 {
1068 	struct intel_display *display = to_intel_display(encoder);
1069 	enum phy phy = intel_encoder_to_phy(encoder);
1070 	int i, status;
1071 
1072 	assert_dc_off(display);
1073 
1074 	/* 3 tries is assumed to be enough to write successfully */
1075 	for (i = 0; i < 3; i++) {
1076 		status = __intel_lt_phy_p2p_write_once(encoder, lane, addr, data, mac_reg_addr,
1077 						       expected_mac_val);
1078 
1079 		if (status == 0)
1080 			return;
1081 	}
1082 
1083 	drm_err_once(display->drm,
1084 		     "PHY %c P2P Write %04x failed after %d retries.\n", phy_name(phy), addr, i);
1085 }
1086 
1087 static void intel_lt_phy_p2p_write(struct intel_encoder *encoder,
1088 				   u8 lane_mask, u16 addr, u8 data,
1089 				   i915_reg_t mac_reg_addr,
1090 				   u8 expected_mac_val)
1091 {
1092 	int lane;
1093 
1094 	for_each_lt_phy_lane_in_mask(lane_mask, lane)
1095 		__intel_lt_phy_p2p_write(encoder, lane, addr, data, mac_reg_addr, expected_mac_val);
1096 }
1097 
1098 static void
1099 intel_lt_phy_setup_powerdown(struct intel_encoder *encoder, u8 lane_count)
1100 {
1101 	/*
1102 	 * The new PORT_BUF_CTL6 stuff for dc5 entry and exit needs to be handled
1103 	 * by dmc firmware not explicitly mentioned in Bspec. This leaves this
1104 	 * function as a wrapper only but keeping it expecting future changes.
1105 	 */
1106 	intel_cx0_setup_powerdown(encoder);
1107 }
1108 
1109 static void
1110 intel_lt_phy_powerdown_change_sequence(struct intel_encoder *encoder,
1111 				       u8 lane_mask, u8 state)
1112 {
1113 	intel_cx0_powerdown_change_sequence(encoder, lane_mask, state);
1114 }
1115 
1116 static void
1117 intel_lt_phy_lane_reset(struct intel_encoder *encoder,
1118 			u8 lane_count)
1119 {
1120 	struct intel_display *display = to_intel_display(encoder);
1121 	enum port port = encoder->port;
1122 	enum phy phy = intel_encoder_to_phy(encoder);
1123 	u8 owned_lane_mask = intel_lt_phy_get_owned_lane_mask(encoder);
1124 	u32 lane_pipe_reset = owned_lane_mask == INTEL_LT_PHY_BOTH_LANES
1125 				? XELPDP_LANE_PIPE_RESET(0) | XELPDP_LANE_PIPE_RESET(1)
1126 				: XELPDP_LANE_PIPE_RESET(0);
1127 	u32 lane_phy_current_status = owned_lane_mask == INTEL_LT_PHY_BOTH_LANES
1128 					? (XELPDP_LANE_PHY_CURRENT_STATUS(0) |
1129 					   XELPDP_LANE_PHY_CURRENT_STATUS(1))
1130 					: XELPDP_LANE_PHY_CURRENT_STATUS(0);
1131 	u32 lane_phy_pulse_status = owned_lane_mask == INTEL_LT_PHY_BOTH_LANES
1132 					? (XE3PLPDP_LANE_PHY_PULSE_STATUS(0) |
1133 					   XE3PLPDP_LANE_PHY_PULSE_STATUS(1))
1134 					: XE3PLPDP_LANE_PHY_PULSE_STATUS(0);
1135 
1136 	intel_de_rmw(display, XE3PLPD_PORT_BUF_CTL5(port),
1137 		     XE3PLPD_MACCLK_RATE_MASK, XE3PLPD_MACCLK_RATE_DEF);
1138 
1139 	intel_de_rmw(display, XELPDP_PORT_BUF_CTL1(display, port),
1140 		     XE3PLPDP_PHY_MODE_MASK, XE3PLPDP_PHY_MODE_DP);
1141 
1142 	intel_lt_phy_setup_powerdown(encoder, lane_count);
1143 	intel_lt_phy_powerdown_change_sequence(encoder, owned_lane_mask,
1144 					       XELPDP_P2_STATE_RESET);
1145 
1146 	intel_de_rmw(display, XE3PLPD_PORT_BUF_CTL5(port),
1147 		     XE3PLPD_MACCLK_RESET_0, 0);
1148 
1149 	intel_de_rmw(display, XELPDP_PORT_CLOCK_CTL(display, port),
1150 		     XELPDP_LANE_PCLK_PLL_REQUEST(0),
1151 		     XELPDP_LANE_PCLK_PLL_REQUEST(0));
1152 
1153 	if (intel_de_wait_for_set_ms(display, XELPDP_PORT_CLOCK_CTL(display, port),
1154 				     XELPDP_LANE_PCLK_PLL_ACK(0),
1155 				     XE3PLPD_MACCLK_TURNON_LATENCY_MS))
1156 		drm_warn(display->drm, "PHY %c PLL MacCLK assertion ack not done\n",
1157 			 phy_name(phy));
1158 
1159 	intel_de_rmw(display, XELPDP_PORT_CLOCK_CTL(display, port),
1160 		     XELPDP_FORWARD_CLOCK_UNGATE,
1161 		     XELPDP_FORWARD_CLOCK_UNGATE);
1162 
1163 	intel_de_rmw(display, XELPDP_PORT_BUF_CTL2(display, port),
1164 		     lane_pipe_reset | lane_phy_pulse_status, 0);
1165 
1166 	if (intel_de_wait_for_clear_ms(display, XELPDP_PORT_BUF_CTL2(display, port),
1167 				       lane_phy_current_status,
1168 				       XE3PLPD_RESET_END_LATENCY_MS))
1169 		drm_warn(display->drm, "PHY %c failed to bring out of lane reset\n",
1170 			 phy_name(phy));
1171 
1172 	if (intel_de_wait_for_set_ms(display, XELPDP_PORT_BUF_CTL2(display, port),
1173 				     lane_phy_pulse_status,
1174 				     XE3PLPD_RATE_CALIB_DONE_LATENCY_MS))
1175 		drm_warn(display->drm, "PHY %c PLL rate not changed\n",
1176 			 phy_name(phy));
1177 
1178 	intel_de_rmw(display, XELPDP_PORT_BUF_CTL2(display, port), lane_phy_pulse_status, 0);
1179 }
1180 
1181 static bool intel_lt_phy_is_hdmi(const struct intel_lt_phy_pll_state *ltpll)
1182 {
1183 	u8 mode = REG_FIELD_GET8(LT_PHY_VDR_MODE_ENCODING_MASK, ltpll->config[0]);
1184 
1185 	if (mode == MODE_HDMI_20 || mode == MODE_HDMI_FRL)
1186 		return true;
1187 
1188 	return false;
1189 }
1190 
1191 static bool intel_lt_phy_is_dp(const struct intel_lt_phy_pll_state *ltpll)
1192 {
1193 	u8 mode = REG_FIELD_GET8(LT_PHY_VDR_MODE_ENCODING_MASK, ltpll->config[0]);
1194 
1195 	if (mode == MODE_DP)
1196 		return true;
1197 
1198 	return false;
1199 }
1200 
1201 static void
1202 intel_lt_phy_program_port_clock_ctl(struct intel_encoder *encoder,
1203 				    const struct intel_lt_phy_pll_state *ltpll,
1204 				    int port_clock,
1205 				    bool lane_reversal)
1206 {
1207 	struct intel_display *display = to_intel_display(encoder);
1208 	u32 val = 0;
1209 
1210 	intel_de_rmw(display, XELPDP_PORT_BUF_CTL1(display, encoder->port),
1211 		     XELPDP_PORT_REVERSAL,
1212 		     lane_reversal ? XELPDP_PORT_REVERSAL : 0);
1213 
1214 	val |= XELPDP_FORWARD_CLOCK_UNGATE;
1215 
1216 	/*
1217 	 * We actually mean MACCLK here and not MAXPCLK when using LT Phy
1218 	 * but since the register bits still remain the same we use
1219 	 * the same definition
1220 	 */
1221 	if (intel_lt_phy_is_hdmi(ltpll) && intel_hdmi_is_frl(port_clock))
1222 		val |= XELPDP_DDI_CLOCK_SELECT_PREP(display, XELPDP_DDI_CLOCK_SELECT_DIV18CLK);
1223 	else
1224 		val |= XELPDP_DDI_CLOCK_SELECT_PREP(display, XELPDP_DDI_CLOCK_SELECT_MAXPCLK);
1225 
1226 	 /* DP2.0 10G and 20G rates enable MPLLA*/
1227 	if (port_clock == 1000000 || port_clock == 2000000)
1228 		val |= XELPDP_SSC_ENABLE_PLLA;
1229 	else
1230 		val |= ltpll->ssc_enabled ? XELPDP_SSC_ENABLE_PLLB : 0;
1231 
1232 	intel_de_rmw(display, XELPDP_PORT_CLOCK_CTL(display, encoder->port),
1233 		     XELPDP_LANE1_PHY_CLOCK_SELECT | XELPDP_FORWARD_CLOCK_UNGATE |
1234 		     XELPDP_DDI_CLOCK_SELECT_MASK(display) | XELPDP_SSC_ENABLE_PLLA |
1235 		     XELPDP_SSC_ENABLE_PLLB, val);
1236 }
1237 
1238 static u32 intel_lt_phy_get_dp_clock(u8 rate)
1239 {
1240 	switch (rate) {
1241 	case 0:
1242 		return 162000;
1243 	case 1:
1244 		return 270000;
1245 	case 2:
1246 		return 540000;
1247 	case 3:
1248 		return 810000;
1249 	case 4:
1250 		return 216000;
1251 	case 5:
1252 		return 243000;
1253 	case 6:
1254 		return 324000;
1255 	case 7:
1256 		return 432000;
1257 	case 8:
1258 		return 1000000;
1259 	case 9:
1260 		return 1350000;
1261 	case 10:
1262 		return 2000000;
1263 	case 11:
1264 		return 675000;
1265 	default:
1266 		MISSING_CASE(rate);
1267 		return 0;
1268 	}
1269 }
1270 
1271 static bool
1272 intel_lt_phy_config_changed(struct intel_encoder *encoder,
1273 			    const struct intel_lt_phy_pll_state *ltpll,
1274 			    u32 port_clock)
1275 {
1276 	u8 val, rate;
1277 	u32 clock;
1278 
1279 	val = intel_lt_phy_read(encoder, INTEL_LT_PHY_LANE0,
1280 				LT_PHY_VDR_0_CONFIG);
1281 	rate = REG_FIELD_GET8(LT_PHY_VDR_RATE_ENCODING_MASK, val);
1282 
1283 	/*
1284 	 * The only time we do not reconfigure the PLL is when we are
1285 	 * using 1.62 Gbps clock since PHY PLL defaults to that
1286 	 * otherwise we always need to reconfigure it.
1287 	 */
1288 	if (intel_lt_phy_is_dp(ltpll)) {
1289 		clock = intel_lt_phy_get_dp_clock(rate);
1290 		if (port_clock == 1620000 && port_clock == clock)
1291 			return false;
1292 	}
1293 
1294 	return true;
1295 }
1296 
1297 static struct ref_tracker *intel_lt_phy_transaction_begin(struct intel_encoder *encoder)
1298 {
1299 	struct intel_display *display = to_intel_display(encoder);
1300 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1301 	struct ref_tracker *wakeref;
1302 
1303 	intel_psr_pause(intel_dp);
1304 	wakeref = intel_display_power_get(display, POWER_DOMAIN_DC_OFF);
1305 
1306 	return wakeref;
1307 }
1308 
1309 static void intel_lt_phy_transaction_end(struct intel_encoder *encoder, struct ref_tracker *wakeref)
1310 {
1311 	struct intel_display *display = to_intel_display(encoder);
1312 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1313 
1314 	intel_psr_resume(intel_dp);
1315 	intel_display_power_put(display, POWER_DOMAIN_DC_OFF, wakeref);
1316 }
1317 
1318 static const struct intel_lt_phy_pll_params *
1319 intel_lt_phy_pll_tables_get(struct intel_crtc_state *crtc_state,
1320 			    struct intel_encoder *encoder)
1321 {
1322 	if (intel_crtc_has_dp_encoder(crtc_state)) {
1323 		if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
1324 			return xe3plpd_lt_edp_tables;
1325 
1326 		return xe3plpd_lt_dp_tables;
1327 	} else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) {
1328 		return xe3plpd_lt_hdmi_tables;
1329 	}
1330 
1331 	MISSING_CASE(encoder->type);
1332 	return NULL;
1333 }
1334 
1335 static bool
1336 intel_lt_phy_pll_is_ssc_enabled(struct intel_crtc_state *crtc_state,
1337 				struct intel_encoder *encoder)
1338 {
1339 	struct intel_display *display = to_intel_display(encoder);
1340 
1341 	if (intel_crtc_has_dp_encoder(crtc_state)) {
1342 		if (intel_panel_use_ssc(display)) {
1343 			struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1344 
1345 			return (intel_dp->dpcd[DP_MAX_DOWNSPREAD] & DP_MAX_DOWNSPREAD_0_5);
1346 		}
1347 	}
1348 
1349 	return false;
1350 }
1351 
1352 static u64 mul_q32_u32(u64 a_q32, u32 b)
1353 {
1354 	u64 p0, p1, carry, result;
1355 	u64 x_hi = a_q32 >> 32;
1356 	u64 x_lo = a_q32 & 0xFFFFFFFFULL;
1357 
1358 	p0 = x_lo * (u64)b;
1359 	p1 = x_hi * (u64)b;
1360 	carry = p0 >> 32;
1361 	result = (p1 << 32) + (carry << 32) + (p0 & 0xFFFFFFFFULL);
1362 
1363 	return result;
1364 }
1365 
1366 static bool
1367 calculate_target_dco_and_loop_cnt(u32 frequency_khz, u64 *target_dco_mhz, u32 *loop_cnt)
1368 {
1369 	u32 ppm_value = 1;
1370 	u32 dco_min_freq = DCO_MIN_FREQ_MHZ;
1371 	u32 dco_max_freq = 16200;
1372 	u32 dco_min_freq_low = 10000;
1373 	u32 dco_max_freq_low = 12000;
1374 	u64 val = 0;
1375 	u64 refclk_khz = REF_CLK_KHZ;
1376 	u64 m2div = 0;
1377 	u64 val_with_frac = 0;
1378 	u64 ppm = 0;
1379 	u64 temp0 = 0, temp1, scale;
1380 	int ppm_cnt, dco_count, y;
1381 
1382 	for (ppm_cnt = 0; ppm_cnt < 5; ppm_cnt++) {
1383 		ppm_value = ppm_cnt == 2 ? 2 : 1;
1384 		for (dco_count = 0; dco_count < 2; dco_count++) {
1385 			if (dco_count == 1) {
1386 				dco_min_freq = dco_min_freq_low;
1387 				dco_max_freq = dco_max_freq_low;
1388 			}
1389 			for (y = 2; y <= 255; y += 2) {
1390 				val = div64_u64((u64)y * frequency_khz, 200);
1391 				m2div = div64_u64(((u64)(val) << 32), refclk_khz);
1392 				m2div = mul_q32_u32(m2div, 500);
1393 				val_with_frac = mul_q32_u32(m2div, refclk_khz);
1394 				val_with_frac = div64_u64(val_with_frac, 500);
1395 				temp1 = Q32_TO_INT(val_with_frac);
1396 				temp0 = (temp1 > val) ? (temp1 - val) :
1397 					(val - temp1);
1398 				ppm = div64_u64(temp0, val);
1399 				if (temp1 >= dco_min_freq &&
1400 				    temp1 <= dco_max_freq &&
1401 				    ppm < ppm_value) {
1402 					/* Round to two places */
1403 					scale = (1ULL << 32) / 100;
1404 					temp0 = DIV_ROUND_UP_ULL(val_with_frac,
1405 								 scale);
1406 					*target_dco_mhz = temp0 * scale;
1407 					*loop_cnt = y;
1408 					return true;
1409 				}
1410 			}
1411 		}
1412 	}
1413 
1414 	return false;
1415 }
1416 
1417 static void set_phy_vdr_addresses(struct lt_phy_params *p, int pll_type)
1418 {
1419 	p->pll_reg4.addr = PLL_REG_ADDR(PLL_REG4_ADDR, pll_type);
1420 	p->pll_reg3.addr = PLL_REG_ADDR(PLL_REG3_ADDR, pll_type);
1421 	p->pll_reg5.addr = PLL_REG_ADDR(PLL_REG5_ADDR, pll_type);
1422 	p->pll_reg57.addr = PLL_REG_ADDR(PLL_REG57_ADDR, pll_type);
1423 	p->lf.addr = PLL_REG_ADDR(PLL_LF_ADDR, pll_type);
1424 	p->tdc.addr = PLL_REG_ADDR(PLL_TDC_ADDR, pll_type);
1425 	p->ssc.addr = PLL_REG_ADDR(PLL_SSC_ADDR, pll_type);
1426 	p->bias2.addr = PLL_REG_ADDR(PLL_BIAS2_ADDR, pll_type);
1427 	p->bias_trim.addr = PLL_REG_ADDR(PLL_BIAS_TRIM_ADDR, pll_type);
1428 	p->dco_med.addr = PLL_REG_ADDR(PLL_DCO_MED_ADDR, pll_type);
1429 	p->dco_fine.addr = PLL_REG_ADDR(PLL_DCO_FINE_ADDR, pll_type);
1430 	p->ssc_inj.addr = PLL_REG_ADDR(PLL_SSC_INJ_ADDR, pll_type);
1431 	p->surv_bonus.addr = PLL_REG_ADDR(PLL_SURV_BONUS_ADDR, pll_type);
1432 }
1433 
1434 static void compute_ssc(struct lt_phy_params *p, u32 ana_cfg)
1435 {
1436 	int ssc_stepsize = 0;
1437 	int ssc_steplen = 0;
1438 	int ssc_steplog = 0;
1439 
1440 	p->ssc.val = (1 << 31) | (ana_cfg << 24) | (ssc_steplog << 16) |
1441 		(ssc_stepsize << 8) | ssc_steplen;
1442 }
1443 
1444 static void compute_bias2(struct lt_phy_params *p)
1445 {
1446 	u32 ssc_en_local = 0;
1447 	u64 dynctrl_ovrd_en = 0;
1448 
1449 	p->bias2.val = (dynctrl_ovrd_en << 31) | (ssc_en_local << 30) |
1450 		(1 << 23) | (1 << 24) | (32 << 16) | (1 << 8);
1451 }
1452 
1453 static void compute_tdc(struct lt_phy_params *p, u64 tdc_fine)
1454 {
1455 	u32 settling_time = 15;
1456 	u32 bias_ovr_en = 1;
1457 	u32 coldstart = 1;
1458 	u32 true_lock = 2;
1459 	u32 early_lock = 1;
1460 	u32 lock_ovr_en = 1;
1461 	u32 lock_thr = tdc_fine ? 3 : 5;
1462 	u32 unlock_thr = tdc_fine ? 5 : 11;
1463 
1464 	p->tdc.val = (u32)((2 << 30) + (settling_time << 16) + (bias_ovr_en << 15) +
1465 		    (lock_ovr_en << 14) + (coldstart << 12) + (true_lock << 10) +
1466 		    (early_lock << 8) + (unlock_thr << 4) + lock_thr);
1467 }
1468 
1469 static void compute_dco_med(struct lt_phy_params *p)
1470 {
1471 	u32 cselmed_en = 0;
1472 	u32 cselmed_dyn_adj = 0;
1473 	u32 cselmed_ratio = 39;
1474 	u32 cselmed_thr = 8;
1475 
1476 	p->dco_med.val = (cselmed_en << 31) + (cselmed_dyn_adj << 30) +
1477 		(cselmed_ratio << 24) + (cselmed_thr << 21);
1478 }
1479 
1480 static void compute_dco_fine(struct lt_phy_params *p, u32 dco_12g)
1481 {
1482 	u32 dco_fine0_tune_2_0 = 0;
1483 	u32 dco_fine1_tune_2_0 = 0;
1484 	u32 dco_fine2_tune_2_0 = 0;
1485 	u32 dco_fine3_tune_2_0 = 0;
1486 	u32 dco_dith0_tune_2_0 = 0;
1487 	u32 dco_dith1_tune_2_0 = 0;
1488 
1489 	dco_fine0_tune_2_0 = dco_12g ? 4 : 3;
1490 	dco_fine1_tune_2_0 = 2;
1491 	dco_fine2_tune_2_0 = dco_12g ? 2 : 1;
1492 	dco_fine3_tune_2_0 = 5;
1493 	dco_dith0_tune_2_0 = dco_12g ? 4 : 3;
1494 	dco_dith1_tune_2_0 = 2;
1495 
1496 	p->dco_fine.val = (dco_dith1_tune_2_0 << 19) +
1497 		(dco_dith0_tune_2_0 << 16) +
1498 		(dco_fine3_tune_2_0 << 11) +
1499 		(dco_fine2_tune_2_0 << 8) +
1500 		(dco_fine1_tune_2_0 << 3) +
1501 		dco_fine0_tune_2_0;
1502 }
1503 
1504 int
1505 intel_lt_phy_calculate_hdmi_state(struct intel_lt_phy_pll_state *lt_state,
1506 				  u32 frequency_khz)
1507 {
1508 #define DATA_ASSIGN(i, pll_reg)	\
1509 	do {			\
1510 		lt_state->data[i][0] = (u8)((((pll_reg).val) & 0xFF000000) >> 24); \
1511 		lt_state->data[i][1] = (u8)((((pll_reg).val) & 0x00FF0000) >> 16); \
1512 		lt_state->data[i][2] = (u8)((((pll_reg).val) & 0x0000FF00) >> 8); \
1513 		lt_state->data[i][3] = (u8)((((pll_reg).val) & 0x000000FF));	\
1514 	} while (0)
1515 #define ADDR_ASSIGN(i, pll_reg)	\
1516 	do {			\
1517 		lt_state->addr_msb[i] = ((pll_reg).addr >> 8) & 0xFF;	\
1518 		lt_state->addr_lsb[i] = (pll_reg).addr & 0xFF;		\
1519 	} while (0)
1520 
1521 	bool found = false;
1522 	struct lt_phy_params p;
1523 	u32 dco_fmin = DCO_MIN_FREQ_MHZ;
1524 	u64 refclk_khz = REF_CLK_KHZ;
1525 	u32 refclk_mhz_int = REF_CLK_KHZ / 1000;
1526 	u64 m2div = 0;
1527 	u64 target_dco_mhz = 0;
1528 	u64 tdc_fine, tdc_targetcnt;
1529 	u64 feedfwd_gain ,feedfwd_cal_en;
1530 	u64 tdc_res = 30;
1531 	u32 prop_coeff;
1532 	u32 int_coeff;
1533 	u32 ndiv = 1;
1534 	u32 m1div = 1, m2div_int, m2div_frac;
1535 	u32 frac_en;
1536 	u32 ana_cfg;
1537 	u32 loop_cnt = 0;
1538 	u32 gain_ctrl = 2;
1539 	u32 postdiv = 0;
1540 	u32 dco_12g = 0;
1541 	u32 pll_type = 0;
1542 	u32 d1 = 2, d3 = 5, d4 = 0, d5 = 0;
1543 	u32 d6 = 0, d6_new = 0;
1544 	u32 d7, d8 = 0;
1545 	u32 bonus_7_0 = 0;
1546 	u32 csel2fo = 11;
1547 	u32 csel2fo_ovrd_en = 1;
1548 	u64 temp0, temp1, temp2, temp3;
1549 
1550 	p.surv_bonus.val = (bonus_7_0 << 16);
1551 	p.pll_reg4.val = (refclk_mhz_int << 17) +
1552 		(ndiv << 9) + (1 << 4);
1553 	p.bias_trim.val = (csel2fo_ovrd_en << 30) + (csel2fo << 24);
1554 	p.ssc_inj.val = 0;
1555 	found = calculate_target_dco_and_loop_cnt(frequency_khz, &target_dco_mhz, &loop_cnt);
1556 	if (!found)
1557 		return -EINVAL;
1558 
1559 	m2div = div64_u64(target_dco_mhz, (refclk_khz * ndiv * m1div));
1560 	m2div = mul_q32_u32(m2div, 1000);
1561 	if (Q32_TO_INT(m2div) > 511)
1562 		return -EINVAL;
1563 
1564 	m2div_int = (u32)Q32_TO_INT(m2div);
1565 	m2div_frac = (u32)(Q32_TO_FRAC(m2div));
1566 	frac_en = (m2div_frac > 0) ? 1 : 0;
1567 
1568 	if (frac_en > 0)
1569 		tdc_res = 70;
1570 	else
1571 		tdc_res = 36;
1572 	tdc_fine = tdc_res > 50 ? 1 : 0;
1573 	temp0 = tdc_res * 40 * 11;
1574 	temp1 = div64_u64(((4 * TDC_RES_MULTIPLIER) + temp0) * 500, temp0 * refclk_khz);
1575 	temp2 = div64_u64(temp0 * refclk_khz, 1000);
1576 	temp3 = div64_u64(((8 * TDC_RES_MULTIPLIER) + temp2), temp2);
1577 	tdc_targetcnt = tdc_res < 50 ? (int)(temp1) : (int)(temp3);
1578 	tdc_targetcnt = (int)(tdc_targetcnt / 2);
1579 	temp0 = mul_q32_u32(target_dco_mhz, tdc_res);
1580 	temp0 >>= 32;
1581 	feedfwd_gain = (m2div_frac > 0) ? div64_u64(m1div * TDC_RES_MULTIPLIER, temp0) : 0;
1582 	feedfwd_cal_en = frac_en;
1583 
1584 	temp0 = (u32)Q32_TO_INT(target_dco_mhz);
1585 	prop_coeff = (temp0 >= dco_fmin) ? 3 : 4;
1586 	int_coeff = (temp0 >= dco_fmin) ? 7 : 8;
1587 	ana_cfg = (temp0 >= dco_fmin) ? 8 : 6;
1588 	dco_12g = (temp0 >= dco_fmin) ? 0 : 1;
1589 
1590 	if (temp0 > 12960)
1591 		d7 = 10;
1592 	else
1593 		d7 = 8;
1594 
1595 	d8 = loop_cnt / 2;
1596 	d4 = d8 * 2;
1597 
1598 	/* Compute pll_reg3,5,57 & lf */
1599 	p.pll_reg3.val = (u32)((d4 << 21) + (d3 << 18) + (d1 << 15) + (m2div_int << 5));
1600 	p.pll_reg5.val = m2div_frac;
1601 	postdiv = (d5 == 0) ? 9 : d5;
1602 	d6_new = (d6 == 0) ? 40 : d6;
1603 	p.pll_reg57.val = (d7 << 24) + (postdiv << 15) + (d8 << 7) + d6_new;
1604 	p.lf.val = (u32)((frac_en << 31) + (1 << 30) + (frac_en << 29) +
1605 		   (feedfwd_cal_en << 28) + (tdc_fine << 27) +
1606 		   (gain_ctrl << 24) + (feedfwd_gain << 16) +
1607 		   (int_coeff << 12) + (prop_coeff << 8) + tdc_targetcnt);
1608 
1609 	compute_ssc(&p, ana_cfg);
1610 	compute_bias2(&p);
1611 	compute_tdc(&p, tdc_fine);
1612 	compute_dco_med(&p);
1613 	compute_dco_fine(&p, dco_12g);
1614 
1615 	pll_type = ((frequency_khz == 10000) || (frequency_khz == 20000) ||
1616 		    (frequency_khz == 2500) || (dco_12g == 1)) ? 0 : 1;
1617 	set_phy_vdr_addresses(&p, pll_type);
1618 
1619 	lt_state->config[0] = 0x84;
1620 	lt_state->config[1] = 0x2d;
1621 	ADDR_ASSIGN(0, p.pll_reg4);
1622 	ADDR_ASSIGN(1, p.pll_reg3);
1623 	ADDR_ASSIGN(2, p.pll_reg5);
1624 	ADDR_ASSIGN(3, p.pll_reg57);
1625 	ADDR_ASSIGN(4, p.lf);
1626 	ADDR_ASSIGN(5, p.tdc);
1627 	ADDR_ASSIGN(6, p.ssc);
1628 	ADDR_ASSIGN(7, p.bias2);
1629 	ADDR_ASSIGN(8, p.bias_trim);
1630 	ADDR_ASSIGN(9, p.dco_med);
1631 	ADDR_ASSIGN(10, p.dco_fine);
1632 	ADDR_ASSIGN(11, p.ssc_inj);
1633 	ADDR_ASSIGN(12, p.surv_bonus);
1634 	DATA_ASSIGN(0, p.pll_reg4);
1635 	DATA_ASSIGN(1, p.pll_reg3);
1636 	DATA_ASSIGN(2, p.pll_reg5);
1637 	DATA_ASSIGN(3, p.pll_reg57);
1638 	DATA_ASSIGN(4, p.lf);
1639 	DATA_ASSIGN(5, p.tdc);
1640 	DATA_ASSIGN(6, p.ssc);
1641 	DATA_ASSIGN(7, p.bias2);
1642 	DATA_ASSIGN(8, p.bias_trim);
1643 	DATA_ASSIGN(9, p.dco_med);
1644 	DATA_ASSIGN(10, p.dco_fine);
1645 	DATA_ASSIGN(11, p.ssc_inj);
1646 	DATA_ASSIGN(12, p.surv_bonus);
1647 
1648 	return 0;
1649 }
1650 
1651 static int
1652 intel_lt_phy_calc_hdmi_port_clock(struct intel_display *display,
1653 				  const struct intel_lt_phy_pll_state *lt_state)
1654 {
1655 #define REGVAL(i) (				\
1656 	(lt_state->data[i][3])		|	\
1657 	(lt_state->data[i][2] << 8)	|	\
1658 	(lt_state->data[i][1] << 16)	|	\
1659 	(lt_state->data[i][0] << 24)		\
1660 )
1661 
1662 	int clk = 0;
1663 	u32 d8, pll_reg_5, pll_reg_3, pll_reg_57, m2div_frac, m2div_int;
1664 	u64 temp0, temp1;
1665 	/*
1666 	 * The algorithm uses '+' to combine bitfields when
1667 	 * constructing PLL_reg3 and PLL_reg57:
1668 	 * PLL_reg57 = (D7 << 24) + (postdiv << 15) + (D8 << 7) + D6_new;
1669 	 * PLL_reg3 = (D4 << 21) + (D3 << 18) + (D1 << 15) + (m2div_int << 5);
1670 	 *
1671 	 * However, this is likely intended to be a bitwise OR operation,
1672 	 * as each field occupies distinct, non-overlapping bits in the register.
1673 	 *
1674 	 * PLL_reg57 is composed of following fields packed into a 32-bit value:
1675 	 * - D7: max value 10 -> fits in 4 bits -> placed at bits 24-27
1676 	 * - postdiv: max value 9 -> fits in 4 bits -> placed at bits 15-18
1677 	 * - D8: derived from loop_cnt / 2, max 127 -> fits in 7 bits
1678 	 *	(though 8 bits are given to it) -> placed at bits 7-14
1679 	 * - D6_new: fits in lower 7 bits -> placed at bits 0-6
1680 	 * PLL_reg57 = (D7 << 24) | (postdiv << 15) | (D8 << 7) | D6_new;
1681 	 *
1682 	 * Similarly, PLL_reg3 is packed as:
1683 	 * - D4: max value 256 -> fits in 9 bits -> placed at bits 21-29
1684 	 * - D3: max value 9 -> fits in 4 bits -> placed at bits 18-21
1685 	 * - D1: max value 2 -> fits in 2 bits -> placed at bits 15-16
1686 	 * - m2div_int: max value 511 -> fits in 9 bits (10 bits allocated)
1687 	 *   -> placed at bits 5-14
1688 	 * PLL_reg3 = (D4 << 21) | (D3 << 18) | (D1 << 15) | (m2div_int << 5);
1689 	 */
1690 	pll_reg_5 = REGVAL(2);
1691 	pll_reg_3 = REGVAL(1);
1692 	pll_reg_57 = REGVAL(3);
1693 	m2div_frac = pll_reg_5;
1694 
1695 	/*
1696 	 * From forward algorithm we know
1697 	 * m2div = 2 * m2
1698 	 * val = y * frequency * 5
1699 	 * So now,
1700 	 * frequency = (m2 * 2 * refclk_khz / (d8 * 10))
1701 	 * frequency = (m2div * refclk_khz / (d8 * 10))
1702 	 */
1703 	d8 = (pll_reg_57 & REG_GENMASK(14, 7)) >> 7;
1704 	if (d8 == 0) {
1705 		drm_WARN_ON(display->drm,
1706 			    "Invalid port clock using lowest HDMI portclock\n");
1707 		return xe3plpd_lt_hdmi_tables[0].clock_rate;
1708 	}
1709 	m2div_int = (pll_reg_3  & REG_GENMASK(14, 5)) >> 5;
1710 	temp0 = ((u64)m2div_frac * REF_CLK_KHZ) >> 32;
1711 	temp1 = (u64)m2div_int * REF_CLK_KHZ;
1712 
1713 	clk = div_u64((temp1 + temp0), d8 * 10);
1714 
1715 	return clk;
1716 }
1717 
1718 int
1719 intel_lt_phy_calc_port_clock(struct intel_display *display,
1720 			     const struct intel_lt_phy_pll_state *lt_state)
1721 {
1722 	int clk;
1723 	u8 mode, rate;
1724 
1725 	mode = REG_FIELD_GET8(LT_PHY_VDR_MODE_ENCODING_MASK,
1726 			      lt_state->config[0]);
1727 	/*
1728 	 * For edp/dp read the clock value from the tables
1729 	 * and return the clock as the algorithm used for
1730 	 * calculating the port clock does not exactly matches
1731 	 * with edp/dp clock.
1732 	 */
1733 	if (mode == MODE_DP) {
1734 		rate = REG_FIELD_GET8(LT_PHY_VDR_RATE_ENCODING_MASK,
1735 				      lt_state->config[0]);
1736 		clk = intel_lt_phy_get_dp_clock(rate);
1737 	} else if (mode == MODE_HDMI_20) {
1738 		clk = intel_lt_phy_calc_hdmi_port_clock(display, lt_state);
1739 	} else {
1740 		drm_WARN_ON(display->drm, "Unsupported LT PHY Mode!\n");
1741 		clk = 25200;
1742 	}
1743 
1744 	return clk;
1745 }
1746 
1747 int
1748 intel_lt_phy_pll_calc_state(struct intel_crtc_state *crtc_state,
1749 			    struct intel_encoder *encoder,
1750 			    struct intel_dpll_hw_state *hw_state)
1751 {
1752 	struct intel_display *display = to_intel_display(crtc_state);
1753 	const struct intel_lt_phy_pll_params *tables;
1754 	int i;
1755 
1756 	memset(hw_state, 0, sizeof(*hw_state));
1757 
1758 	tables = intel_lt_phy_pll_tables_get(crtc_state, encoder);
1759 	if (!tables)
1760 		return -EINVAL;
1761 
1762 	for (i = 0; tables[i].name; i++) {
1763 		int clock = intel_lt_phy_calc_port_clock(display, tables[i].state);
1764 
1765 		drm_WARN_ON(display->drm, !intel_dpll_clock_matches(clock, tables[i].clock_rate));
1766 		if (intel_dpll_clock_matches(crtc_state->port_clock, clock)) {
1767 			hw_state->ltpll = *tables[i].state;
1768 			if (intel_crtc_has_dp_encoder(crtc_state)) {
1769 				if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
1770 					hw_state->ltpll.config[2] = 1;
1771 			}
1772 			hw_state->ltpll.ssc_enabled =
1773 				intel_lt_phy_pll_is_ssc_enabled(crtc_state, encoder);
1774 			hw_state->ltpll.lane_count = crtc_state->lane_count;
1775 			return 0;
1776 		}
1777 	}
1778 
1779 	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) {
1780 		hw_state->ltpll.lane_count = crtc_state->lane_count;
1781 		return intel_lt_phy_calculate_hdmi_state(&hw_state->ltpll,
1782 							 crtc_state->port_clock);
1783 	}
1784 
1785 	return -EINVAL;
1786 }
1787 
1788 void intel_lt_phy_tbt_pll_calc_state(struct intel_dpll_hw_state *hw_state)
1789 {
1790 	memset(hw_state, 0, sizeof(*hw_state));
1791 
1792 	hw_state->ltpll.tbt_mode = true;
1793 }
1794 
1795 static void
1796 intel_lt_phy_program_pll(struct intel_encoder *encoder,
1797 			 const struct intel_lt_phy_pll_state *ltpll)
1798 {
1799 	u8 owned_lane_mask = intel_lt_phy_get_owned_lane_mask(encoder);
1800 	int i, j, k;
1801 
1802 	intel_lt_phy_write(encoder, owned_lane_mask, LT_PHY_VDR_0_CONFIG,
1803 			   ltpll->config[0], MB_WRITE_COMMITTED);
1804 	intel_lt_phy_write(encoder, INTEL_LT_PHY_LANE0, LT_PHY_VDR_1_CONFIG,
1805 			   ltpll->config[1], MB_WRITE_COMMITTED);
1806 	intel_lt_phy_write(encoder, owned_lane_mask, LT_PHY_VDR_2_CONFIG,
1807 			   ltpll->config[2], MB_WRITE_COMMITTED);
1808 
1809 	for (i = 0; i <= 12; i++) {
1810 		intel_lt_phy_write(encoder, INTEL_LT_PHY_LANE0, LT_PHY_VDR_X_ADDR_MSB(i),
1811 				   ltpll->addr_msb[i],
1812 				   MB_WRITE_COMMITTED);
1813 		intel_lt_phy_write(encoder, INTEL_LT_PHY_LANE0, LT_PHY_VDR_X_ADDR_LSB(i),
1814 				   ltpll->addr_lsb[i],
1815 				   MB_WRITE_COMMITTED);
1816 
1817 		for (j = 3, k = 0; j >= 0; j--, k++)
1818 			intel_lt_phy_write(encoder, INTEL_LT_PHY_LANE0,
1819 					   LT_PHY_VDR_X_DATAY(i, j),
1820 					   ltpll->data[i][k],
1821 					   MB_WRITE_COMMITTED);
1822 	}
1823 }
1824 
1825 static void
1826 intel_lt_phy_enable_disable_tx(struct intel_encoder *encoder,
1827 			       const struct intel_lt_phy_pll_state *ltpll)
1828 {
1829 	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
1830 	bool lane_reversal = dig_port->lane_reversal;
1831 	u8 lane_count = ltpll->lane_count;
1832 	bool is_dp_alt =
1833 		intel_tc_port_in_dp_alt_mode(dig_port);
1834 	enum intel_tc_pin_assignment tc_pin =
1835 		intel_tc_port_get_pin_assignment(dig_port);
1836 	u8 transmitter_mask = 0;
1837 
1838 	/*
1839 	 * We have a two transmitters per lane and total of 2 PHY lanes so a total
1840 	 * of 4 transmitters. We prepare a mask of the lanes that need to be activated
1841 	 * and the transmitter which need to be activated for each lane. TX 0,1 correspond
1842 	 * to LANE0 and TX 2, 3 correspond to LANE1.
1843 	 */
1844 
1845 	switch (lane_count) {
1846 	case 1:
1847 		transmitter_mask = lane_reversal ? REG_BIT8(3) : REG_BIT8(0);
1848 		if (is_dp_alt) {
1849 			if (tc_pin == INTEL_TC_PIN_ASSIGNMENT_D)
1850 				transmitter_mask = REG_BIT8(0);
1851 			else
1852 				transmitter_mask = REG_BIT8(1);
1853 		}
1854 		break;
1855 	case 2:
1856 		transmitter_mask = lane_reversal ? REG_GENMASK8(3, 2) : REG_GENMASK8(1, 0);
1857 		if (is_dp_alt)
1858 			transmitter_mask = REG_GENMASK8(1, 0);
1859 		break;
1860 	case 3:
1861 		transmitter_mask = lane_reversal ? REG_GENMASK8(3, 1) : REG_GENMASK8(2, 0);
1862 		if (is_dp_alt)
1863 			transmitter_mask = REG_GENMASK8(2, 0);
1864 		break;
1865 	case 4:
1866 		transmitter_mask = REG_GENMASK8(3, 0);
1867 		break;
1868 	default:
1869 		MISSING_CASE(lane_count);
1870 		transmitter_mask = REG_GENMASK8(3, 0);
1871 		break;
1872 	}
1873 
1874 	if (transmitter_mask & BIT(0)) {
1875 		intel_lt_phy_p2p_write(encoder, INTEL_LT_PHY_LANE0, LT_PHY_TXY_CTL10(0),
1876 				       LT_PHY_TX_LANE_ENABLE, LT_PHY_TXY_CTL10_MAC(0),
1877 				       LT_PHY_TX_LANE_ENABLE);
1878 	} else {
1879 		intel_lt_phy_p2p_write(encoder, INTEL_LT_PHY_LANE0, LT_PHY_TXY_CTL10(0),
1880 				       0, LT_PHY_TXY_CTL10_MAC(0), 0);
1881 	}
1882 
1883 	if (transmitter_mask & BIT(1)) {
1884 		intel_lt_phy_p2p_write(encoder, INTEL_LT_PHY_LANE0, LT_PHY_TXY_CTL10(1),
1885 				       LT_PHY_TX_LANE_ENABLE, LT_PHY_TXY_CTL10_MAC(1),
1886 				       LT_PHY_TX_LANE_ENABLE);
1887 	} else {
1888 		intel_lt_phy_p2p_write(encoder, INTEL_LT_PHY_LANE0, LT_PHY_TXY_CTL10(1),
1889 				       0, LT_PHY_TXY_CTL10_MAC(1), 0);
1890 	}
1891 
1892 	if (transmitter_mask & BIT(2)) {
1893 		intel_lt_phy_p2p_write(encoder, INTEL_LT_PHY_LANE1, LT_PHY_TXY_CTL10(0),
1894 				       LT_PHY_TX_LANE_ENABLE, LT_PHY_TXY_CTL10_MAC(0),
1895 				       LT_PHY_TX_LANE_ENABLE);
1896 	} else {
1897 		intel_lt_phy_p2p_write(encoder, INTEL_LT_PHY_LANE1, LT_PHY_TXY_CTL10(0),
1898 				       0, LT_PHY_TXY_CTL10_MAC(0), 0);
1899 	}
1900 
1901 	if (transmitter_mask & BIT(3)) {
1902 		intel_lt_phy_p2p_write(encoder, INTEL_LT_PHY_LANE1, LT_PHY_TXY_CTL10(1),
1903 				       LT_PHY_TX_LANE_ENABLE, LT_PHY_TXY_CTL10_MAC(1),
1904 				       LT_PHY_TX_LANE_ENABLE);
1905 	} else {
1906 		intel_lt_phy_p2p_write(encoder, INTEL_LT_PHY_LANE1, LT_PHY_TXY_CTL10(1),
1907 				       0, LT_PHY_TXY_CTL10_MAC(1), 0);
1908 	}
1909 }
1910 
1911 void intel_lt_phy_pll_enable(struct intel_encoder *encoder,
1912 			     struct intel_dpll *pll,
1913 			     const struct intel_dpll_hw_state *dpll_hw_state)
1914 {
1915 	struct intel_display *display = to_intel_display(encoder);
1916 	int port_clock = intel_lt_phy_calc_port_clock(display, &dpll_hw_state->ltpll);
1917 	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
1918 	bool lane_reversal = dig_port->lane_reversal;
1919 	u8 owned_lane_mask = intel_lt_phy_get_owned_lane_mask(encoder);
1920 	enum phy phy = intel_encoder_to_phy(encoder);
1921 	enum port port = encoder->port;
1922 	struct ref_tracker *wakeref = 0;
1923 	u32 lane_phy_pulse_status = owned_lane_mask == INTEL_LT_PHY_BOTH_LANES
1924 					? (XE3PLPDP_LANE_PHY_PULSE_STATUS(0) |
1925 					   XE3PLPDP_LANE_PHY_PULSE_STATUS(1))
1926 					: XE3PLPDP_LANE_PHY_PULSE_STATUS(0);
1927 	u8 rate_update;
1928 
1929 	wakeref = intel_lt_phy_transaction_begin(encoder);
1930 
1931 	/* 1. Enable MacCLK at default 162 MHz frequency. */
1932 	intel_lt_phy_lane_reset(encoder, dpll_hw_state->ltpll.lane_count);
1933 
1934 	/* 2. Program PORT_CLOCK_CTL register to configure clock muxes, gating, and SSC. */
1935 	intel_lt_phy_program_port_clock_ctl(encoder, &dpll_hw_state->ltpll,
1936 					    port_clock, lane_reversal);
1937 
1938 	/* 3. Change owned PHY lanes power to Ready state. */
1939 	intel_lt_phy_powerdown_change_sequence(encoder, owned_lane_mask,
1940 					       XELPDP_P2_STATE_READY);
1941 
1942 	/*
1943 	 * 4. Read the PHY message bus VDR register PHY_VDR_0_Config check enabled PLL type,
1944 	 * encoded rate and encoded mode.
1945 	 */
1946 	if (intel_lt_phy_config_changed(encoder, &dpll_hw_state->ltpll, port_clock)) {
1947 		/*
1948 		 * 5. Program the PHY internal PLL registers over PHY message bus for the desired
1949 		 * frequency and protocol type
1950 		 */
1951 		intel_lt_phy_program_pll(encoder, &dpll_hw_state->ltpll);
1952 
1953 		/* 6. Use the P2P transaction flow */
1954 		/*
1955 		 * 6.1. Set the PHY VDR register 0xCC4[Rate Control VDR Update] = 1 over PHY message
1956 		 * bus for Owned PHY Lanes.
1957 		 */
1958 		/*
1959 		 * 6.2. Poll for P2P Transaction Ready = "1" and read the MAC message bus VDR
1960 		 * register at offset 0xC00 for Owned PHY Lanes*.
1961 		 */
1962 		/* 6.3. Clear P2P transaction Ready bit. */
1963 		intel_lt_phy_p2p_write(encoder, owned_lane_mask, LT_PHY_RATE_UPDATE,
1964 				       LT_PHY_RATE_CONTROL_VDR_UPDATE, LT_PHY_MAC_VDR,
1965 				       LT_PHY_PCLKIN_GATE);
1966 
1967 		/* 7. Program PORT_CLOCK_CTL[PCLK PLL Request LN0] = 0. */
1968 		intel_de_rmw(display, XELPDP_PORT_CLOCK_CTL(display, port),
1969 			     XELPDP_LANE_PCLK_PLL_REQUEST(0), 0);
1970 
1971 		/* 8. Poll for PORT_CLOCK_CTL[PCLK PLL Ack LN0]= 0. */
1972 		if (intel_de_wait_for_clear_us(display, XELPDP_PORT_CLOCK_CTL(display, port),
1973 					       XELPDP_LANE_PCLK_PLL_ACK(0),
1974 					       XE3PLPD_MACCLK_TURNOFF_LATENCY_US))
1975 			drm_warn(display->drm, "PHY %c PLL MacCLK ack deassertion timeout\n",
1976 				 phy_name(phy));
1977 
1978 		/*
1979 		 * 9. Follow the Display Voltage Frequency Switching - Sequence Before Frequency
1980 		 * Change. We handle this step in bxt_set_cdclk().
1981 		 */
1982 		/* 10. Program DDI_CLK_VALFREQ to match intended DDI clock frequency. */
1983 		intel_de_write(display, DDI_CLK_VALFREQ(encoder->port), port_clock);
1984 
1985 		/* 11. Program PORT_CLOCK_CTL[PCLK PLL Request LN0] = 1. */
1986 		intel_de_rmw(display, XELPDP_PORT_CLOCK_CTL(display, port),
1987 			     XELPDP_LANE_PCLK_PLL_REQUEST(0),
1988 			     XELPDP_LANE_PCLK_PLL_REQUEST(0));
1989 
1990 		/* 12. Poll for PORT_CLOCK_CTL[PCLK PLL Ack LN0]= 1. */
1991 		if (intel_de_wait_for_set_ms(display, XELPDP_PORT_CLOCK_CTL(display, port),
1992 					     XELPDP_LANE_PCLK_PLL_ACK(0),
1993 					     XE3PLPD_MACCLK_TURNON_LATENCY_MS))
1994 			drm_warn(display->drm, "PHY %c PLL MacCLK ack assertion timeout\n",
1995 				 phy_name(phy));
1996 
1997 		/*
1998 		 * 13. Ungate the forward clock by setting
1999 		 * PORT_CLOCK_CTL[Forward Clock Ungate] = 1.
2000 		 */
2001 		intel_de_rmw(display, XELPDP_PORT_CLOCK_CTL(display, port),
2002 			     XELPDP_FORWARD_CLOCK_UNGATE,
2003 			     XELPDP_FORWARD_CLOCK_UNGATE);
2004 
2005 		/* 14. SW clears PORT_BUF_CTL2 [PHY Pulse Status]. */
2006 		intel_de_rmw(display, XELPDP_PORT_BUF_CTL2(display, port),
2007 			     lane_phy_pulse_status,
2008 			     lane_phy_pulse_status);
2009 		/*
2010 		 * 15. Clear the PHY VDR register 0xCC4[Rate Control VDR Update] over
2011 		 * PHY message bus for Owned PHY Lanes.
2012 		 */
2013 		rate_update = intel_lt_phy_read(encoder, INTEL_LT_PHY_LANE0, LT_PHY_RATE_UPDATE);
2014 		rate_update &= ~LT_PHY_RATE_CONTROL_VDR_UPDATE;
2015 		intel_lt_phy_write(encoder, owned_lane_mask, LT_PHY_RATE_UPDATE,
2016 				   rate_update, MB_WRITE_COMMITTED);
2017 
2018 		/* 16. Poll for PORT_BUF_CTL2 register PHY Pulse Status = 1 for Owned PHY Lanes. */
2019 		if (intel_de_wait_for_set_ms(display, XELPDP_PORT_BUF_CTL2(display, port),
2020 					     lane_phy_pulse_status,
2021 					     XE3PLPD_RATE_CALIB_DONE_LATENCY_MS))
2022 			drm_warn(display->drm, "PHY %c PLL rate not changed\n",
2023 				 phy_name(phy));
2024 
2025 		/* 17. SW clears PORT_BUF_CTL2 [PHY Pulse Status]. */
2026 		intel_de_rmw(display, XELPDP_PORT_BUF_CTL2(display, port),
2027 			     lane_phy_pulse_status,
2028 			     lane_phy_pulse_status);
2029 	} else {
2030 		intel_de_write(display, DDI_CLK_VALFREQ(encoder->port), port_clock);
2031 	}
2032 
2033 	/*
2034 	 * 18. Follow the Display Voltage Frequency Switching - Sequence After Frequency Change.
2035 	 * We handle this step in bxt_set_cdclk()
2036 	 */
2037 	/* 19. Move the PHY powerdown state to Active and program to enable/disable transmitters */
2038 	intel_lt_phy_powerdown_change_sequence(encoder, owned_lane_mask,
2039 					       XELPDP_P0_STATE_ACTIVE);
2040 
2041 	intel_lt_phy_enable_disable_tx(encoder, &dpll_hw_state->ltpll);
2042 	intel_lt_phy_transaction_end(encoder, wakeref);
2043 }
2044 
2045 void intel_lt_phy_pll_disable(struct intel_encoder *encoder)
2046 {
2047 	struct intel_display *display = to_intel_display(encoder);
2048 	enum phy phy = intel_encoder_to_phy(encoder);
2049 	enum port port = encoder->port;
2050 	struct ref_tracker *wakeref;
2051 	u8 owned_lane_mask = intel_lt_phy_get_owned_lane_mask(encoder);
2052 	u32 lane_pipe_reset = owned_lane_mask == INTEL_LT_PHY_BOTH_LANES
2053 				? (XELPDP_LANE_PIPE_RESET(0) |
2054 				   XELPDP_LANE_PIPE_RESET(1))
2055 				: XELPDP_LANE_PIPE_RESET(0);
2056 	u32 lane_phy_current_status = owned_lane_mask == INTEL_LT_PHY_BOTH_LANES
2057 					? (XELPDP_LANE_PHY_CURRENT_STATUS(0) |
2058 					   XELPDP_LANE_PHY_CURRENT_STATUS(1))
2059 					: XELPDP_LANE_PHY_CURRENT_STATUS(0);
2060 	u32 lane_phy_pulse_status = owned_lane_mask == INTEL_LT_PHY_BOTH_LANES
2061 					? (XE3PLPDP_LANE_PHY_PULSE_STATUS(0) |
2062 					   XE3PLPDP_LANE_PHY_PULSE_STATUS(1))
2063 					: XE3PLPDP_LANE_PHY_PULSE_STATUS(0);
2064 
2065 	wakeref = intel_lt_phy_transaction_begin(encoder);
2066 
2067 	/* 1. Clear PORT_BUF_CTL2 [PHY Pulse Status]. */
2068 	intel_de_rmw(display, XELPDP_PORT_BUF_CTL2(display, port),
2069 		     lane_phy_pulse_status,
2070 		     lane_phy_pulse_status);
2071 
2072 	/* 2. Set PORT_BUF_CTL2<port> Lane<PHY Lanes Owned> Pipe Reset to 1. */
2073 	intel_de_rmw(display, XELPDP_PORT_BUF_CTL2(display, port), lane_pipe_reset,
2074 		     lane_pipe_reset);
2075 
2076 	/* 3. Poll for PORT_BUF_CTL2<port> Lane<PHY Lanes Owned> PHY Current Status == 1. */
2077 	if (intel_de_wait_for_set_us(display, XELPDP_PORT_BUF_CTL2(display, port),
2078 				     lane_phy_current_status,
2079 				     XE3PLPD_RESET_START_LATENCY_US))
2080 		drm_warn(display->drm, "PHY %c failed to reset lane\n",
2081 			 phy_name(phy));
2082 
2083 	/* 4. Clear for PHY pulse status on owned PHY lanes. */
2084 	intel_de_rmw(display, XELPDP_PORT_BUF_CTL2(display, port),
2085 		     lane_phy_pulse_status,
2086 		     lane_phy_pulse_status);
2087 
2088 	/*
2089 	 * 5. Follow the Display Voltage Frequency Switching -
2090 	 * Sequence Before Frequency Change. We handle this step in bxt_set_cdclk().
2091 	 */
2092 	/* 6. Program PORT_CLOCK_CTL[PCLK PLL Request LN0] = 0. */
2093 	intel_de_rmw(display, XELPDP_PORT_CLOCK_CTL(display, port),
2094 		     XELPDP_LANE_PCLK_PLL_REQUEST(0), 0);
2095 
2096 	/* 7. Program DDI_CLK_VALFREQ to 0. */
2097 	intel_de_write(display, DDI_CLK_VALFREQ(encoder->port), 0);
2098 
2099 	/* 8. Poll for PORT_CLOCK_CTL[PCLK PLL Ack LN0]= 0. */
2100 	if (intel_de_wait_for_clear_us(display, XELPDP_PORT_CLOCK_CTL(display, port),
2101 				       XELPDP_LANE_PCLK_PLL_ACK(0),
2102 				       XE3PLPD_MACCLK_TURNOFF_LATENCY_US))
2103 		drm_warn(display->drm, "PHY %c PLL MacCLK ack deassertion timeout\n",
2104 			 phy_name(phy));
2105 
2106 	/*
2107 	 *  9. Follow the Display Voltage Frequency Switching -
2108 	 *  Sequence After Frequency Change. We handle this step in bxt_set_cdclk().
2109 	 */
2110 	/* 10. Program PORT_CLOCK_CTL register to disable and gate clocks. */
2111 	intel_de_rmw(display, XELPDP_PORT_CLOCK_CTL(display, port),
2112 		     XELPDP_DDI_CLOCK_SELECT_MASK(display) | XELPDP_FORWARD_CLOCK_UNGATE, 0);
2113 
2114 	/* 11. Program PORT_BUF_CTL5[MacCLK Reset_0] = 1 to assert MacCLK reset. */
2115 	intel_de_rmw(display, XE3PLPD_PORT_BUF_CTL5(port),
2116 		     XE3PLPD_MACCLK_RESET_0, XE3PLPD_MACCLK_RESET_0);
2117 
2118 	intel_lt_phy_transaction_end(encoder, wakeref);
2119 }
2120 
2121 void intel_lt_phy_set_signal_levels(struct intel_encoder *encoder,
2122 				    const struct intel_crtc_state *crtc_state)
2123 {
2124 	struct intel_display *display = to_intel_display(encoder);
2125 	const struct intel_ddi_buf_trans *trans;
2126 	u8 owned_lane_mask;
2127 	struct ref_tracker *wakeref;
2128 	int n_entries, ln;
2129 	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
2130 
2131 	if (intel_tc_port_in_tbt_alt_mode(dig_port))
2132 		return;
2133 
2134 	owned_lane_mask = intel_lt_phy_get_owned_lane_mask(encoder);
2135 
2136 	wakeref = intel_lt_phy_transaction_begin(encoder);
2137 
2138 	trans = encoder->get_buf_trans(encoder, crtc_state, &n_entries);
2139 	if (drm_WARN_ON_ONCE(display->drm, !trans)) {
2140 		intel_lt_phy_transaction_end(encoder, wakeref);
2141 		return;
2142 	}
2143 
2144 	for (ln = 0; ln < crtc_state->lane_count; ln++) {
2145 		int level = intel_ddi_level(encoder, crtc_state, ln);
2146 		int lane = ln / 2;
2147 		int tx = ln % 2;
2148 		u8 lane_mask = lane == 0 ? INTEL_LT_PHY_LANE0 : INTEL_LT_PHY_LANE1;
2149 
2150 		if (!(lane_mask & owned_lane_mask))
2151 			continue;
2152 
2153 		intel_lt_phy_rmw(encoder, lane_mask, LT_PHY_TXY_CTL8(tx),
2154 				 LT_PHY_TX_SWING_LEVEL_MASK | LT_PHY_TX_SWING_MASK,
2155 				 LT_PHY_TX_SWING_LEVEL(trans->entries[level].lt.txswing_level) |
2156 				 LT_PHY_TX_SWING(trans->entries[level].lt.txswing),
2157 				 MB_WRITE_COMMITTED);
2158 
2159 		intel_lt_phy_rmw(encoder, lane_mask, LT_PHY_TXY_CTL2(tx),
2160 				 LT_PHY_TX_CURSOR_MASK,
2161 				 LT_PHY_TX_CURSOR(trans->entries[level].lt.pre_cursor),
2162 				 MB_WRITE_COMMITTED);
2163 		intel_lt_phy_rmw(encoder, lane_mask, LT_PHY_TXY_CTL3(tx),
2164 				 LT_PHY_TX_CURSOR_MASK,
2165 				 LT_PHY_TX_CURSOR(trans->entries[level].lt.main_cursor),
2166 				 MB_WRITE_COMMITTED);
2167 		intel_lt_phy_rmw(encoder, lane_mask, LT_PHY_TXY_CTL4(tx),
2168 				 LT_PHY_TX_CURSOR_MASK,
2169 				 LT_PHY_TX_CURSOR(trans->entries[level].lt.post_cursor),
2170 				 MB_WRITE_COMMITTED);
2171 	}
2172 
2173 	intel_lt_phy_transaction_end(encoder, wakeref);
2174 }
2175 
2176 void intel_lt_phy_dump_hw_state(struct drm_printer *p,
2177 				const struct intel_lt_phy_pll_state *hw_state)
2178 {
2179 	int i, j;
2180 
2181 	drm_printf(p, "lt_phy_pll_hw_state: lane count: %d, ssc enabled: %d, tbt mode: %d\n",
2182 		   hw_state->lane_count, hw_state->ssc_enabled, hw_state->tbt_mode);
2183 
2184 	for (i = 0; i < 3; i++) {
2185 		drm_printf(p, "config[%d] = 0x%.4x,\n",
2186 			   i, hw_state->config[i]);
2187 	}
2188 
2189 	for (i = 0; i <= 12; i++)
2190 		for (j = 3; j >= 0; j--)
2191 			drm_printf(p, "vdr_data[%d][%d] = 0x%.4x,\n",
2192 				   i, j, hw_state->data[i][j]);
2193 }
2194 
2195 bool
2196 intel_lt_phy_pll_compare_hw_state(const struct intel_lt_phy_pll_state *a,
2197 				  const struct intel_lt_phy_pll_state *b)
2198 {
2199 	if (a->tbt_mode || b->tbt_mode)
2200 		return true;
2201 
2202 	/*
2203 	 * With LT PHY values other than VDR0_CONFIG and VDR2_CONFIG are
2204 	 * unreliable. They cannot always be read back since internally
2205 	 * after power gating values are not restored back to the
2206 	 * shadow VDR registers. Thus we do not compare the whole state
2207 	 * just the two VDR registers.
2208 	 */
2209 	if (a->config[0] == b->config[0] &&
2210 	    a->config[2] == b->config[2])
2211 		return true;
2212 
2213 	return false;
2214 }
2215 
2216 static bool intel_lt_phy_pll_is_enabled(struct intel_encoder *encoder)
2217 {
2218 	struct intel_display *display = to_intel_display(encoder);
2219 
2220 	return intel_de_read(display, XELPDP_PORT_CLOCK_CTL(display, encoder->port)) &
2221 			     XELPDP_LANE_PCLK_PLL_ACK(0);
2222 }
2223 
2224 bool intel_lt_phy_tbt_pll_readout_hw_state(struct intel_display *display,
2225 					   struct intel_dpll *pll,
2226 					   struct intel_dpll_hw_state *hw_state)
2227 {
2228 	memset(hw_state, 0, sizeof(*hw_state));
2229 
2230 	hw_state->ltpll.tbt_mode = true;
2231 
2232 	return true;
2233 }
2234 
2235 bool intel_lt_phy_pll_readout_hw_state(struct intel_encoder *encoder,
2236 				       struct intel_lt_phy_pll_state *pll_state)
2237 {
2238 	u8 owned_lane_mask;
2239 	u8 lane;
2240 	struct ref_tracker *wakeref;
2241 	int i, j, k;
2242 
2243 	if (!intel_lt_phy_pll_is_enabled(encoder))
2244 		return false;
2245 
2246 	pll_state->tbt_mode = intel_tc_port_in_tbt_alt_mode(enc_to_dig_port(encoder));
2247 	if (pll_state->tbt_mode)
2248 		return false;
2249 
2250 	owned_lane_mask = intel_lt_phy_get_owned_lane_mask(encoder);
2251 	lane = owned_lane_mask & INTEL_LT_PHY_LANE0 ? : INTEL_LT_PHY_LANE1;
2252 	wakeref = intel_lt_phy_transaction_begin(encoder);
2253 
2254 	pll_state->lane_count = intel_readout_lane_count(encoder, INTEL_LT_PHY_LANE0,
2255 							 INTEL_LT_PHY_LANE1);
2256 	pll_state->config[0] = intel_lt_phy_read(encoder, lane, LT_PHY_VDR_0_CONFIG);
2257 	pll_state->config[1] = intel_lt_phy_read(encoder, INTEL_LT_PHY_LANE0, LT_PHY_VDR_1_CONFIG);
2258 	pll_state->config[2] = intel_lt_phy_read(encoder, lane, LT_PHY_VDR_2_CONFIG);
2259 
2260 	for (i = 0; i <= 12; i++) {
2261 		for (j = 3, k = 0; j >= 0; j--, k++)
2262 			pll_state->data[i][k] =
2263 				intel_lt_phy_read(encoder, INTEL_LT_PHY_LANE0,
2264 						  LT_PHY_VDR_X_DATAY(i, j));
2265 	}
2266 
2267 	intel_lt_phy_transaction_end(encoder, wakeref);
2268 
2269 	return true;
2270 }
2271 
2272 void intel_xe3plpd_pll_enable(struct intel_encoder *encoder,
2273 			      struct intel_dpll *pll,
2274 			      const struct intel_dpll_hw_state *dpll_hw_state)
2275 {
2276 	intel_lt_phy_pll_enable(encoder, pll, dpll_hw_state);
2277 }
2278 
2279 void intel_xe3plpd_pll_disable(struct intel_encoder *encoder)
2280 {
2281 	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
2282 
2283 	if (intel_tc_port_in_tbt_alt_mode(dig_port))
2284 		intel_mtl_tbt_pll_disable_clock(encoder);
2285 	else
2286 		intel_lt_phy_pll_disable(encoder);
2287 
2288 }
2289 
2290 static void intel_lt_phy_pll_verify_clock(struct intel_display *display,
2291 					  int precomputed_clock,
2292 					  const char *pll_state_name,
2293 					  const struct intel_lt_phy_pll_state *pll_state,
2294 					  bool is_precomputed_state)
2295 {
2296 	struct drm_printer p;
2297 	int clock;
2298 
2299 	clock = intel_lt_phy_calc_port_clock(display, pll_state);
2300 
2301 	if (intel_dpll_clock_matches(clock, precomputed_clock))
2302 		return;
2303 
2304 	drm_warn(display->drm,
2305 		 "PLL state %s (%s): clock difference too high: computed %d, pre-computed %d\n",
2306 		 pll_state_name,
2307 		 is_precomputed_state ? "precomputed" : "computed",
2308 		 clock, precomputed_clock);
2309 
2310 	if (!drm_debug_enabled(DRM_UT_KMS))
2311 		return;
2312 
2313 	p = drm_dbg_printer(display->drm, DRM_UT_KMS, NULL);
2314 
2315 	drm_printf(&p, "PLL state %s (%s):\n",
2316 		   pll_state_name,
2317 		   is_precomputed_state ? "precomputed" : "computed");
2318 	intel_lt_phy_dump_hw_state(&p, pll_state);
2319 }
2320 
2321 static void intel_lt_phy_pll_verify_params(struct intel_display *display,
2322 					   const struct intel_lt_phy_pll_params *pll_params)
2323 {
2324 	struct intel_lt_phy_pll_state pll_state;
2325 
2326 	intel_lt_phy_pll_verify_clock(display, pll_params->clock_rate, pll_params->name, pll_params->state, true);
2327 
2328 	if (!pll_params->is_hdmi)
2329 		return;
2330 
2331 	if (intel_lt_phy_calculate_hdmi_state(&pll_state, pll_params->clock_rate) != 0)
2332 		return;
2333 
2334 	intel_lt_phy_pll_verify_clock(display, pll_params->clock_rate, pll_params->name, &pll_state, false);
2335 }
2336 
2337 static void intel_lt_phy_pll_verify_tables(struct intel_display *display,
2338 					   const struct intel_lt_phy_pll_params *tables)
2339 {
2340 	int i;
2341 
2342 	for (i = 0; tables[i].name; i++)
2343 		intel_lt_phy_pll_verify_params(display, &tables[i]);
2344 }
2345 
2346 void intel_lt_phy_verify_plls(struct intel_display *display)
2347 {
2348 	intel_lt_phy_pll_verify_tables(display, xe3plpd_lt_dp_tables);
2349 	intel_lt_phy_pll_verify_tables(display, xe3plpd_lt_edp_tables);
2350 	intel_lt_phy_pll_verify_tables(display, xe3plpd_lt_hdmi_tables);
2351 }
2352