xref: /linux/drivers/gpu/drm/i915/display/intel_display_device.c (revision 2c1ed907520c50326b8f604907a8478b27881a2e)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2023 Intel Corporation
4  */
5 
6 #include <drm/intel/pciids.h>
7 #include <drm/drm_color_mgmt.h>
8 #include <linux/pci.h>
9 
10 #include "i915_drv.h"
11 #include "i915_reg.h"
12 #include "intel_cx0_phy_regs.h"
13 #include "intel_de.h"
14 #include "intel_display.h"
15 #include "intel_display_device.h"
16 #include "intel_display_params.h"
17 #include "intel_display_power.h"
18 #include "intel_display_reg_defs.h"
19 #include "intel_display_types.h"
20 #include "intel_fbc.h"
21 #include "intel_step.h"
22 
23 __diag_push();
24 __diag_ignore_all("-Woverride-init", "Allow field initialization overrides for display info");
25 
26 struct stepping_desc {
27 	const enum intel_step *map; /* revid to step map */
28 	size_t size; /* map size */
29 };
30 
31 #define STEP_INFO(_map)				\
32 	.step_info.map = _map,			\
33 	.step_info.size = ARRAY_SIZE(_map)
34 
35 struct subplatform_desc {
36 	struct intel_display_platforms platforms;
37 	const char *name;
38 	const u16 *pciidlist;
39 	struct stepping_desc step_info;
40 };
41 
42 #define SUBPLATFORM(_platform, _subplatform)				\
43 	.platforms._platform##_##_subplatform = 1,			\
44 	.name = #_subplatform
45 
46 /*
47  * Group subplatform alias that matches multiple subplatforms. For making ult
48  * cover both ult and ulx on HSW/BDW.
49  */
50 #define SUBPLATFORM_GROUP(_platform, _subplatform)			\
51 	.platforms._platform##_##_subplatform = 1
52 
53 struct platform_desc {
54 	struct intel_display_platforms platforms;
55 	const char *name;
56 	const struct subplatform_desc *subplatforms;
57 	const struct intel_display_device_info *info; /* NULL for GMD ID */
58 	struct stepping_desc step_info;
59 };
60 
61 #define PLATFORM(_platform)			 \
62 	.platforms._platform = 1,		 \
63 	.name = #_platform
64 
65 /*
66  * Group platform alias that matches multiple platforms. For aliases such as g4x
67  * that covers both g45 and gm45.
68  */
69 #define PLATFORM_GROUP(_platform)		\
70 	.platforms._platform = 1
71 
72 #define ID(id) (id)
73 
74 static const struct intel_display_device_info no_display = {};
75 
76 #define PIPE_A_OFFSET		0x70000
77 #define PIPE_B_OFFSET		0x71000
78 #define PIPE_C_OFFSET		0x72000
79 #define PIPE_D_OFFSET		0x73000
80 #define CHV_PIPE_C_OFFSET	0x74000
81 /*
82  * There's actually no pipe EDP. Some pipe registers have
83  * simply shifted from the pipe to the transcoder, while
84  * keeping their original offset. Thus we need PIPE_EDP_OFFSET
85  * to access such registers in transcoder EDP.
86  */
87 #define PIPE_EDP_OFFSET	0x7f000
88 
89 /* ICL DSI 0 and 1 */
90 #define PIPE_DSI0_OFFSET	0x7b000
91 #define PIPE_DSI1_OFFSET	0x7b800
92 
93 #define TRANSCODER_A_OFFSET 0x60000
94 #define TRANSCODER_B_OFFSET 0x61000
95 #define TRANSCODER_C_OFFSET 0x62000
96 #define CHV_TRANSCODER_C_OFFSET 0x63000
97 #define TRANSCODER_D_OFFSET 0x63000
98 #define TRANSCODER_EDP_OFFSET 0x6f000
99 #define TRANSCODER_DSI0_OFFSET	0x6b000
100 #define TRANSCODER_DSI1_OFFSET	0x6b800
101 
102 #define CURSOR_A_OFFSET 0x70080
103 #define CURSOR_B_OFFSET 0x700c0
104 #define CHV_CURSOR_C_OFFSET 0x700e0
105 #define IVB_CURSOR_B_OFFSET 0x71080
106 #define IVB_CURSOR_C_OFFSET 0x72080
107 #define TGL_CURSOR_D_OFFSET 0x73080
108 
109 #define I845_PIPE_OFFSETS \
110 	.pipe_offsets = { \
111 		[TRANSCODER_A] = PIPE_A_OFFSET,	\
112 	}, \
113 	.trans_offsets = { \
114 		[TRANSCODER_A] = TRANSCODER_A_OFFSET, \
115 	}
116 
117 #define I9XX_PIPE_OFFSETS \
118 	.pipe_offsets = { \
119 		[TRANSCODER_A] = PIPE_A_OFFSET,	\
120 		[TRANSCODER_B] = PIPE_B_OFFSET, \
121 	}, \
122 	.trans_offsets = { \
123 		[TRANSCODER_A] = TRANSCODER_A_OFFSET, \
124 		[TRANSCODER_B] = TRANSCODER_B_OFFSET, \
125 	}
126 
127 #define IVB_PIPE_OFFSETS \
128 	.pipe_offsets = { \
129 		[TRANSCODER_A] = PIPE_A_OFFSET,	\
130 		[TRANSCODER_B] = PIPE_B_OFFSET, \
131 		[TRANSCODER_C] = PIPE_C_OFFSET, \
132 	}, \
133 	.trans_offsets = { \
134 		[TRANSCODER_A] = TRANSCODER_A_OFFSET, \
135 		[TRANSCODER_B] = TRANSCODER_B_OFFSET, \
136 		[TRANSCODER_C] = TRANSCODER_C_OFFSET, \
137 	}
138 
139 #define HSW_PIPE_OFFSETS \
140 	.pipe_offsets = { \
141 		[TRANSCODER_A] = PIPE_A_OFFSET,	\
142 		[TRANSCODER_B] = PIPE_B_OFFSET, \
143 		[TRANSCODER_C] = PIPE_C_OFFSET, \
144 		[TRANSCODER_EDP] = PIPE_EDP_OFFSET, \
145 	}, \
146 	.trans_offsets = { \
147 		[TRANSCODER_A] = TRANSCODER_A_OFFSET, \
148 		[TRANSCODER_B] = TRANSCODER_B_OFFSET, \
149 		[TRANSCODER_C] = TRANSCODER_C_OFFSET, \
150 		[TRANSCODER_EDP] = TRANSCODER_EDP_OFFSET, \
151 	}
152 
153 #define CHV_PIPE_OFFSETS \
154 	.pipe_offsets = { \
155 		[TRANSCODER_A] = PIPE_A_OFFSET, \
156 		[TRANSCODER_B] = PIPE_B_OFFSET, \
157 		[TRANSCODER_C] = CHV_PIPE_C_OFFSET, \
158 	}, \
159 	.trans_offsets = { \
160 		[TRANSCODER_A] = TRANSCODER_A_OFFSET, \
161 		[TRANSCODER_B] = TRANSCODER_B_OFFSET, \
162 		[TRANSCODER_C] = CHV_TRANSCODER_C_OFFSET, \
163 	}
164 
165 #define I845_CURSOR_OFFSETS \
166 	.cursor_offsets = { \
167 		[PIPE_A] = CURSOR_A_OFFSET, \
168 	}
169 
170 #define I9XX_CURSOR_OFFSETS \
171 	.cursor_offsets = { \
172 		[PIPE_A] = CURSOR_A_OFFSET, \
173 		[PIPE_B] = CURSOR_B_OFFSET, \
174 	}
175 
176 #define CHV_CURSOR_OFFSETS \
177 	.cursor_offsets = { \
178 		[PIPE_A] = CURSOR_A_OFFSET, \
179 		[PIPE_B] = CURSOR_B_OFFSET, \
180 		[PIPE_C] = CHV_CURSOR_C_OFFSET, \
181 	}
182 
183 #define IVB_CURSOR_OFFSETS \
184 	.cursor_offsets = { \
185 		[PIPE_A] = CURSOR_A_OFFSET, \
186 		[PIPE_B] = IVB_CURSOR_B_OFFSET, \
187 		[PIPE_C] = IVB_CURSOR_C_OFFSET, \
188 	}
189 
190 #define TGL_CURSOR_OFFSETS \
191 	.cursor_offsets = { \
192 		[PIPE_A] = CURSOR_A_OFFSET, \
193 		[PIPE_B] = IVB_CURSOR_B_OFFSET, \
194 		[PIPE_C] = IVB_CURSOR_C_OFFSET, \
195 		[PIPE_D] = TGL_CURSOR_D_OFFSET, \
196 	}
197 
198 #define I845_COLORS \
199 	.color = { .gamma_lut_size = 256 }
200 #define I9XX_COLORS \
201 	.color = { .gamma_lut_size = 129, \
202 		   .gamma_lut_tests = DRM_COLOR_LUT_NON_DECREASING, \
203 	}
204 #define ILK_COLORS \
205 	.color = { .gamma_lut_size = 1024 }
206 #define IVB_COLORS \
207 	.color = { .degamma_lut_size = 1024, .gamma_lut_size = 1024 }
208 #define CHV_COLORS \
209 	.color = { \
210 		.degamma_lut_size = 65, .gamma_lut_size = 257, \
211 		.degamma_lut_tests = DRM_COLOR_LUT_NON_DECREASING, \
212 		.gamma_lut_tests = DRM_COLOR_LUT_NON_DECREASING, \
213 	}
214 #define GLK_COLORS \
215 	.color = { \
216 		.degamma_lut_size = 33, .gamma_lut_size = 1024, \
217 		.degamma_lut_tests = DRM_COLOR_LUT_NON_DECREASING | \
218 				     DRM_COLOR_LUT_EQUAL_CHANNELS, \
219 	}
220 #define ICL_COLORS \
221 	.color = { \
222 		.degamma_lut_size = 33, .gamma_lut_size = 262145, \
223 		.degamma_lut_tests = DRM_COLOR_LUT_NON_DECREASING | \
224 				     DRM_COLOR_LUT_EQUAL_CHANNELS, \
225 		.gamma_lut_tests = DRM_COLOR_LUT_NON_DECREASING, \
226 	}
227 
228 #define I830_DISPLAY \
229 	.has_overlay = 1, \
230 	.cursor_needs_physical = 1, \
231 	.overlay_needs_physical = 1, \
232 	.has_gmch = 1, \
233 	I9XX_PIPE_OFFSETS, \
234 	I9XX_CURSOR_OFFSETS, \
235 	I9XX_COLORS, \
236 	\
237 	.__runtime_defaults.ip.ver = 2, \
238 	.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B), \
239 	.__runtime_defaults.cpu_transcoder_mask = \
240 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B)
241 
242 #define I845_DISPLAY \
243 	.has_overlay = 1, \
244 	.overlay_needs_physical = 1, \
245 	.has_gmch = 1, \
246 	I845_PIPE_OFFSETS, \
247 	I845_CURSOR_OFFSETS, \
248 	I845_COLORS, \
249 	\
250 	.__runtime_defaults.ip.ver = 2, \
251 	.__runtime_defaults.pipe_mask = BIT(PIPE_A), \
252 	.__runtime_defaults.cpu_transcoder_mask = BIT(TRANSCODER_A)
253 
254 static const struct platform_desc i830_desc = {
255 	PLATFORM(i830),
256 	PLATFORM_GROUP(mobile),
257 	.info = &(const struct intel_display_device_info) {
258 		I830_DISPLAY,
259 
260 		.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C), /* DVO A/B/C */
261 	},
262 };
263 
264 static const struct platform_desc i845_desc = {
265 	PLATFORM(i845g),
266 	.info = &(const struct intel_display_device_info) {
267 		I845_DISPLAY,
268 
269 		.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C), /* DVO B/C */
270 	},
271 };
272 
273 static const struct platform_desc i85x_desc = {
274 	PLATFORM(i85x),
275 	PLATFORM_GROUP(mobile),
276 	.info = &(const struct intel_display_device_info) {
277 		I830_DISPLAY,
278 
279 		.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C), /* DVO B/C */
280 		.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
281 	},
282 };
283 
284 static const struct platform_desc i865g_desc = {
285 	PLATFORM(i865g),
286 	.info = &(const struct intel_display_device_info) {
287 		I845_DISPLAY,
288 
289 		.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C), /* DVO B/C */
290 		.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
291 	},
292 };
293 
294 #define GEN3_DISPLAY   \
295 	.has_gmch = 1, \
296 	.has_overlay = 1, \
297 	I9XX_PIPE_OFFSETS, \
298 	I9XX_CURSOR_OFFSETS, \
299 	\
300 	.__runtime_defaults.ip.ver = 3, \
301 	.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B), \
302 	.__runtime_defaults.cpu_transcoder_mask = \
303 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B), \
304 	.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C) /* SDVO B/C */
305 
306 static const struct platform_desc i915g_desc = {
307 	PLATFORM(i915g),
308 	.info = &(const struct intel_display_device_info) {
309 		GEN3_DISPLAY,
310 		I845_COLORS,
311 		.cursor_needs_physical = 1,
312 		.overlay_needs_physical = 1,
313 	},
314 };
315 
316 static const struct platform_desc i915gm_desc = {
317 	PLATFORM(i915gm),
318 	PLATFORM_GROUP(mobile),
319 	.info = &(const struct intel_display_device_info) {
320 		GEN3_DISPLAY,
321 		I9XX_COLORS,
322 		.cursor_needs_physical = 1,
323 		.overlay_needs_physical = 1,
324 		.supports_tv = 1,
325 
326 		.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
327 	},
328 };
329 
330 static const struct platform_desc i945g_desc = {
331 	PLATFORM(i945g),
332 	.info = &(const struct intel_display_device_info) {
333 		GEN3_DISPLAY,
334 		I845_COLORS,
335 		.has_hotplug = 1,
336 		.cursor_needs_physical = 1,
337 		.overlay_needs_physical = 1,
338 	},
339 };
340 
341 static const struct platform_desc i945gm_desc = {
342 	PLATFORM(i915gm),
343 	PLATFORM_GROUP(mobile),
344 	.info = &(const struct intel_display_device_info) {
345 		GEN3_DISPLAY,
346 		I9XX_COLORS,
347 		.has_hotplug = 1,
348 		.cursor_needs_physical = 1,
349 		.overlay_needs_physical = 1,
350 		.supports_tv = 1,
351 
352 		.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
353 	},
354 };
355 
356 static const struct platform_desc g33_desc = {
357 	PLATFORM(g33),
358 	.info = &(const struct intel_display_device_info) {
359 		GEN3_DISPLAY,
360 		I845_COLORS,
361 		.has_hotplug = 1,
362 	},
363 };
364 
365 static const struct intel_display_device_info pnv_display = {
366 	GEN3_DISPLAY,
367 	I9XX_COLORS,
368 	.has_hotplug = 1,
369 };
370 
371 static const struct platform_desc pnv_g_desc = {
372 	PLATFORM(pineview),
373 	.info = &pnv_display,
374 };
375 
376 static const struct platform_desc pnv_m_desc = {
377 	PLATFORM(pineview),
378 	PLATFORM_GROUP(mobile),
379 	.info = &pnv_display,
380 };
381 
382 #define GEN4_DISPLAY \
383 	.has_hotplug = 1, \
384 	.has_gmch = 1, \
385 	I9XX_PIPE_OFFSETS, \
386 	I9XX_CURSOR_OFFSETS, \
387 	I9XX_COLORS, \
388 	\
389 	.__runtime_defaults.ip.ver = 4, \
390 	.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B), \
391 	.__runtime_defaults.cpu_transcoder_mask = \
392 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B)
393 
394 static const struct platform_desc i965g_desc = {
395 	PLATFORM(i965g),
396 	.info = &(const struct intel_display_device_info) {
397 		GEN4_DISPLAY,
398 		.has_overlay = 1,
399 
400 		.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C), /* SDVO B/C */
401 	},
402 };
403 
404 static const struct platform_desc i965gm_desc = {
405 	PLATFORM(i965gm),
406 	PLATFORM_GROUP(mobile),
407 	.info = &(const struct intel_display_device_info) {
408 		GEN4_DISPLAY,
409 		.has_overlay = 1,
410 		.supports_tv = 1,
411 
412 		.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C), /* SDVO B/C */
413 		.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
414 	},
415 };
416 
417 static const struct platform_desc g45_desc = {
418 	PLATFORM(g45),
419 	PLATFORM_GROUP(g4x),
420 	.info = &(const struct intel_display_device_info) {
421 		GEN4_DISPLAY,
422 
423 		.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D), /* SDVO/HDMI/DP B/C, DP D */
424 	},
425 };
426 
427 static const struct platform_desc gm45_desc = {
428 	PLATFORM(gm45),
429 	PLATFORM_GROUP(g4x),
430 	PLATFORM_GROUP(mobile),
431 	.info = &(const struct intel_display_device_info) {
432 		GEN4_DISPLAY,
433 		.supports_tv = 1,
434 
435 		.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D), /* SDVO/HDMI/DP B/C, DP D */
436 		.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
437 	},
438 };
439 
440 #define ILK_DISPLAY \
441 	.has_hotplug = 1, \
442 	I9XX_PIPE_OFFSETS, \
443 	I9XX_CURSOR_OFFSETS, \
444 	ILK_COLORS, \
445 	\
446 	.__runtime_defaults.ip.ver = 5, \
447 	.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B), \
448 	.__runtime_defaults.cpu_transcoder_mask = \
449 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B), \
450 	.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D) /* DP A, SDVO/HDMI/DP B, HDMI/DP C/D */
451 
452 static const struct platform_desc ilk_d_desc = {
453 	PLATFORM(ironlake),
454 	.info = &(const struct intel_display_device_info) {
455 		ILK_DISPLAY,
456 	},
457 };
458 
459 static const struct platform_desc ilk_m_desc = {
460 	PLATFORM(ironlake),
461 	PLATFORM_GROUP(mobile),
462 	.info = &(const struct intel_display_device_info) {
463 		ILK_DISPLAY,
464 
465 		.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
466 	},
467 };
468 
469 static const struct intel_display_device_info snb_display = {
470 	.has_hotplug = 1,
471 	I9XX_PIPE_OFFSETS,
472 	I9XX_CURSOR_OFFSETS,
473 	ILK_COLORS,
474 
475 	.__runtime_defaults.ip.ver = 6,
476 	.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B),
477 	.__runtime_defaults.cpu_transcoder_mask =
478 	BIT(TRANSCODER_A) | BIT(TRANSCODER_B),
479 	.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D), /* DP A, SDVO/HDMI/DP B, HDMI/DP C/D */
480 	.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
481 };
482 
483 static const struct platform_desc snb_d_desc = {
484 	PLATFORM(sandybridge),
485 	.info = &snb_display,
486 };
487 
488 static const struct platform_desc snb_m_desc = {
489 	PLATFORM(sandybridge),
490 	PLATFORM_GROUP(mobile),
491 	.info = &snb_display,
492 };
493 
494 static const struct intel_display_device_info ivb_display = {
495 	.has_hotplug = 1,
496 	IVB_PIPE_OFFSETS,
497 	IVB_CURSOR_OFFSETS,
498 	IVB_COLORS,
499 
500 	.__runtime_defaults.ip.ver = 7,
501 	.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C),
502 	.__runtime_defaults.cpu_transcoder_mask =
503 	BIT(TRANSCODER_A) | BIT(TRANSCODER_B) | BIT(TRANSCODER_C),
504 	.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D), /* DP A, SDVO/HDMI/DP B, HDMI/DP C/D */
505 	.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
506 };
507 
508 static const struct platform_desc ivb_d_desc = {
509 	PLATFORM(ivybridge),
510 	.info = &ivb_display,
511 };
512 
513 static const struct platform_desc ivb_m_desc = {
514 	PLATFORM(ivybridge),
515 	PLATFORM_GROUP(mobile),
516 	.info = &ivb_display,
517 };
518 
519 static const struct platform_desc vlv_desc = {
520 	PLATFORM(valleyview),
521 	.info = &(const struct intel_display_device_info) {
522 		.has_gmch = 1,
523 		.has_hotplug = 1,
524 		.mmio_offset = VLV_DISPLAY_BASE,
525 		I9XX_PIPE_OFFSETS,
526 		I9XX_CURSOR_OFFSETS,
527 		I9XX_COLORS,
528 
529 		.__runtime_defaults.ip.ver = 7,
530 		.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B),
531 		.__runtime_defaults.cpu_transcoder_mask =
532 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B),
533 		.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C), /* HDMI/DP B/C */
534 	},
535 };
536 
537 static const u16 hsw_ult_ids[] = {
538 	INTEL_HSW_ULT_GT1_IDS(ID),
539 	INTEL_HSW_ULT_GT2_IDS(ID),
540 	INTEL_HSW_ULT_GT3_IDS(ID),
541 	0
542 };
543 
544 static const u16 hsw_ulx_ids[] = {
545 	INTEL_HSW_ULX_GT1_IDS(ID),
546 	INTEL_HSW_ULX_GT2_IDS(ID),
547 	0
548 };
549 
550 static const struct platform_desc hsw_desc = {
551 	PLATFORM(haswell),
552 	.subplatforms = (const struct subplatform_desc[]) {
553 		/* Special case: Use ult both as group and subplatform. */
554 		{
555 			SUBPLATFORM(haswell, ult),
556 			SUBPLATFORM_GROUP(haswell, ult),
557 			.pciidlist = hsw_ult_ids,
558 		},
559 		{
560 			SUBPLATFORM(haswell, ulx),
561 			SUBPLATFORM_GROUP(haswell, ult),
562 			.pciidlist = hsw_ulx_ids,
563 		},
564 		{},
565 	},
566 	.info = &(const struct intel_display_device_info) {
567 		.has_ddi = 1,
568 		.has_dp_mst = 1,
569 		.has_fpga_dbg = 1,
570 		.has_hotplug = 1,
571 		.has_psr = 1,
572 		.has_psr_hw_tracking = 1,
573 		HSW_PIPE_OFFSETS,
574 		IVB_CURSOR_OFFSETS,
575 		IVB_COLORS,
576 
577 		.__runtime_defaults.ip.ver = 7,
578 		.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C),
579 		.__runtime_defaults.cpu_transcoder_mask =
580 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) |
581 		BIT(TRANSCODER_C) | BIT(TRANSCODER_EDP),
582 		.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D) | BIT(PORT_E),
583 		.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
584 	},
585 };
586 
587 static const u16 bdw_ult_ids[] = {
588 	INTEL_BDW_ULT_GT1_IDS(ID),
589 	INTEL_BDW_ULT_GT2_IDS(ID),
590 	INTEL_BDW_ULT_GT3_IDS(ID),
591 	INTEL_BDW_ULT_RSVD_IDS(ID),
592 	0
593 };
594 
595 static const u16 bdw_ulx_ids[] = {
596 	INTEL_BDW_ULX_GT1_IDS(ID),
597 	INTEL_BDW_ULX_GT2_IDS(ID),
598 	INTEL_BDW_ULX_GT3_IDS(ID),
599 	INTEL_BDW_ULX_RSVD_IDS(ID),
600 	0
601 };
602 
603 static const struct platform_desc bdw_desc = {
604 	PLATFORM(broadwell),
605 	.subplatforms = (const struct subplatform_desc[]) {
606 		/* Special case: Use ult both as group and subplatform. */
607 		{
608 			SUBPLATFORM(broadwell, ult),
609 			SUBPLATFORM_GROUP(broadwell, ult),
610 			.pciidlist = bdw_ult_ids,
611 		},
612 		{
613 			SUBPLATFORM(broadwell, ulx),
614 			SUBPLATFORM_GROUP(broadwell, ult),
615 			.pciidlist = bdw_ulx_ids,
616 		},
617 		{},
618 	},
619 	.info = &(const struct intel_display_device_info) {
620 		.has_ddi = 1,
621 		.has_dp_mst = 1,
622 		.has_fpga_dbg = 1,
623 		.has_hotplug = 1,
624 		.has_psr = 1,
625 		.has_psr_hw_tracking = 1,
626 		HSW_PIPE_OFFSETS,
627 		IVB_CURSOR_OFFSETS,
628 		IVB_COLORS,
629 
630 		.__runtime_defaults.ip.ver = 8,
631 		.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C),
632 		.__runtime_defaults.cpu_transcoder_mask =
633 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) |
634 		BIT(TRANSCODER_C) | BIT(TRANSCODER_EDP),
635 		.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D) | BIT(PORT_E),
636 		.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
637 	},
638 };
639 
640 static const struct platform_desc chv_desc = {
641 	PLATFORM(cherryview),
642 	.info = &(const struct intel_display_device_info) {
643 		.has_hotplug = 1,
644 		.has_gmch = 1,
645 		.mmio_offset = VLV_DISPLAY_BASE,
646 		CHV_PIPE_OFFSETS,
647 		CHV_CURSOR_OFFSETS,
648 		CHV_COLORS,
649 
650 		.__runtime_defaults.ip.ver = 8,
651 		.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C),
652 		.__runtime_defaults.cpu_transcoder_mask =
653 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) | BIT(TRANSCODER_C),
654 		.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D), /* HDMI/DP B/C/D */
655 	},
656 };
657 
658 static const struct intel_display_device_info skl_display = {
659 	.dbuf.size = 896 - 4, /* 4 blocks for bypass path allocation */
660 	.dbuf.slice_mask = BIT(DBUF_S1),
661 	.has_ddi = 1,
662 	.has_dp_mst = 1,
663 	.has_fpga_dbg = 1,
664 	.has_hotplug = 1,
665 	.has_ipc = 1,
666 	.has_psr = 1,
667 	.has_psr_hw_tracking = 1,
668 	HSW_PIPE_OFFSETS,
669 	IVB_CURSOR_OFFSETS,
670 	IVB_COLORS,
671 
672 	.__runtime_defaults.ip.ver = 9,
673 	.__runtime_defaults.has_dmc = 1,
674 	.__runtime_defaults.has_hdcp = 1,
675 	.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C),
676 	.__runtime_defaults.cpu_transcoder_mask =
677 	BIT(TRANSCODER_A) | BIT(TRANSCODER_B) |
678 	BIT(TRANSCODER_C) | BIT(TRANSCODER_EDP),
679 	.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D) | BIT(PORT_E),
680 	.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
681 };
682 
683 static const u16 skl_ult_ids[] = {
684 	INTEL_SKL_ULT_GT1_IDS(ID),
685 	INTEL_SKL_ULT_GT2_IDS(ID),
686 	INTEL_SKL_ULT_GT3_IDS(ID),
687 	0
688 };
689 
690 static const u16 skl_ulx_ids[] = {
691 	INTEL_SKL_ULX_GT1_IDS(ID),
692 	INTEL_SKL_ULX_GT2_IDS(ID),
693 	0
694 };
695 
696 static const enum intel_step skl_steppings[] = {
697 	[0x6] = STEP_G0,
698 	[0x7] = STEP_H0,
699 	[0x9] = STEP_J0,
700 	[0xA] = STEP_I1,
701 };
702 
703 static const struct platform_desc skl_desc = {
704 	PLATFORM(skylake),
705 	.subplatforms = (const struct subplatform_desc[]) {
706 		{
707 			SUBPLATFORM(skylake, ult),
708 			.pciidlist = skl_ult_ids,
709 		},
710 		{
711 			SUBPLATFORM(skylake, ulx),
712 			.pciidlist = skl_ulx_ids,
713 		},
714 		{},
715 	},
716 	.info = &skl_display,
717 	STEP_INFO(skl_steppings),
718 };
719 
720 static const u16 kbl_ult_ids[] = {
721 	INTEL_KBL_ULT_GT1_IDS(ID),
722 	INTEL_KBL_ULT_GT2_IDS(ID),
723 	INTEL_KBL_ULT_GT3_IDS(ID),
724 	0
725 };
726 
727 static const u16 kbl_ulx_ids[] = {
728 	INTEL_KBL_ULX_GT1_IDS(ID),
729 	INTEL_KBL_ULX_GT2_IDS(ID),
730 	INTEL_AML_KBL_GT2_IDS(ID),
731 	0
732 };
733 
734 static const enum intel_step kbl_steppings[] = {
735 	[1] = STEP_B0,
736 	[2] = STEP_B0,
737 	[3] = STEP_B0,
738 	[4] = STEP_C0,
739 	[5] = STEP_B1,
740 	[6] = STEP_B1,
741 	[7] = STEP_C0,
742 };
743 
744 static const struct platform_desc kbl_desc = {
745 	PLATFORM(kabylake),
746 	.subplatforms = (const struct subplatform_desc[]) {
747 		{
748 			SUBPLATFORM(kabylake, ult),
749 			.pciidlist = kbl_ult_ids,
750 		},
751 		{
752 			SUBPLATFORM(kabylake, ulx),
753 			.pciidlist = kbl_ulx_ids,
754 		},
755 		{},
756 	},
757 	.info = &skl_display,
758 	STEP_INFO(kbl_steppings),
759 };
760 
761 static const u16 cfl_ult_ids[] = {
762 	INTEL_CFL_U_GT2_IDS(ID),
763 	INTEL_CFL_U_GT3_IDS(ID),
764 	INTEL_WHL_U_GT1_IDS(ID),
765 	INTEL_WHL_U_GT2_IDS(ID),
766 	INTEL_WHL_U_GT3_IDS(ID),
767 	0
768 };
769 
770 static const u16 cfl_ulx_ids[] = {
771 	INTEL_AML_CFL_GT2_IDS(ID),
772 	0
773 };
774 
775 static const struct platform_desc cfl_desc = {
776 	PLATFORM(coffeelake),
777 	.subplatforms = (const struct subplatform_desc[]) {
778 		{
779 			SUBPLATFORM(coffeelake, ult),
780 			.pciidlist = cfl_ult_ids,
781 		},
782 		{
783 			SUBPLATFORM(coffeelake, ulx),
784 			.pciidlist = cfl_ulx_ids,
785 		},
786 		{},
787 	},
788 	.info = &skl_display,
789 };
790 
791 static const u16 cml_ult_ids[] = {
792 	INTEL_CML_U_GT1_IDS(ID),
793 	INTEL_CML_U_GT2_IDS(ID),
794 	0
795 };
796 
797 static const struct platform_desc cml_desc = {
798 	PLATFORM(cometlake),
799 	.subplatforms = (const struct subplatform_desc[]) {
800 		{
801 			SUBPLATFORM(cometlake, ult),
802 			.pciidlist = cml_ult_ids,
803 		},
804 		{},
805 	},
806 	.info = &skl_display,
807 };
808 
809 #define GEN9_LP_DISPLAY			 \
810 	.dbuf.slice_mask = BIT(DBUF_S1), \
811 	.has_dp_mst = 1, \
812 	.has_ddi = 1, \
813 	.has_fpga_dbg = 1, \
814 	.has_hotplug = 1, \
815 	.has_ipc = 1, \
816 	.has_psr = 1, \
817 	.has_psr_hw_tracking = 1, \
818 	HSW_PIPE_OFFSETS, \
819 	IVB_CURSOR_OFFSETS, \
820 	IVB_COLORS, \
821 	\
822 	.__runtime_defaults.has_dmc = 1, \
823 	.__runtime_defaults.has_hdcp = 1, \
824 	.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A), \
825 	.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C), \
826 	.__runtime_defaults.cpu_transcoder_mask = \
827 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) | \
828 		BIT(TRANSCODER_C) | BIT(TRANSCODER_EDP) | \
829 		BIT(TRANSCODER_DSI_A) | BIT(TRANSCODER_DSI_C), \
830 	.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C)
831 
832 static const enum intel_step bxt_steppings[] = {
833 	[0xA] = STEP_C0,
834 	[0xB] = STEP_C0,
835 	[0xC] = STEP_D0,
836 	[0xD] = STEP_E0,
837 };
838 
839 static const struct platform_desc bxt_desc = {
840 	PLATFORM(broxton),
841 	.info = &(const struct intel_display_device_info) {
842 		GEN9_LP_DISPLAY,
843 		.dbuf.size = 512 - 4, /* 4 blocks for bypass path allocation */
844 
845 		.__runtime_defaults.ip.ver = 9,
846 	},
847 	STEP_INFO(bxt_steppings),
848 };
849 
850 static const enum intel_step glk_steppings[] = {
851 	[3] = STEP_B0,
852 };
853 
854 static const struct platform_desc glk_desc = {
855 	PLATFORM(geminilake),
856 	.info = &(const struct intel_display_device_info) {
857 		GEN9_LP_DISPLAY,
858 		.dbuf.size = 1024 - 4, /* 4 blocks for bypass path allocation */
859 		GLK_COLORS,
860 
861 		.__runtime_defaults.ip.ver = 10,
862 	},
863 	STEP_INFO(glk_steppings),
864 };
865 
866 #define ICL_DISPLAY \
867 	.abox_mask = BIT(0), \
868 	.dbuf.size = 2048, \
869 	.dbuf.slice_mask = BIT(DBUF_S1) | BIT(DBUF_S2), \
870 	.has_ddi = 1, \
871 	.has_dp_mst = 1, \
872 	.has_fpga_dbg = 1, \
873 	.has_hotplug = 1, \
874 	.has_ipc = 1, \
875 	.has_psr = 1, \
876 	.has_psr_hw_tracking = 1, \
877 	.pipe_offsets = { \
878 		[TRANSCODER_A] = PIPE_A_OFFSET, \
879 		[TRANSCODER_B] = PIPE_B_OFFSET, \
880 		[TRANSCODER_C] = PIPE_C_OFFSET, \
881 		[TRANSCODER_EDP] = PIPE_EDP_OFFSET, \
882 		[TRANSCODER_DSI_0] = PIPE_DSI0_OFFSET, \
883 		[TRANSCODER_DSI_1] = PIPE_DSI1_OFFSET, \
884 	}, \
885 	.trans_offsets = { \
886 		[TRANSCODER_A] = TRANSCODER_A_OFFSET, \
887 		[TRANSCODER_B] = TRANSCODER_B_OFFSET, \
888 		[TRANSCODER_C] = TRANSCODER_C_OFFSET, \
889 		[TRANSCODER_EDP] = TRANSCODER_EDP_OFFSET, \
890 		[TRANSCODER_DSI_0] = TRANSCODER_DSI0_OFFSET, \
891 		[TRANSCODER_DSI_1] = TRANSCODER_DSI1_OFFSET, \
892 	}, \
893 	IVB_CURSOR_OFFSETS, \
894 	ICL_COLORS, \
895 	\
896 	.__runtime_defaults.ip.ver = 11, \
897 	.__runtime_defaults.has_dmc = 1, \
898 	.__runtime_defaults.has_dsc = 1, \
899 	.__runtime_defaults.has_hdcp = 1, \
900 	.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C), \
901 	.__runtime_defaults.cpu_transcoder_mask = \
902 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) | \
903 		BIT(TRANSCODER_C) | BIT(TRANSCODER_EDP) | \
904 		BIT(TRANSCODER_DSI_0) | BIT(TRANSCODER_DSI_1), \
905 	.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A)
906 
907 static const u16 icl_port_f_ids[] = {
908 	INTEL_ICL_PORT_F_IDS(ID),
909 	0
910 };
911 
912 static const enum intel_step icl_steppings[] = {
913 	[7] = STEP_D0,
914 };
915 
916 static const struct platform_desc icl_desc = {
917 	PLATFORM(icelake),
918 	.subplatforms = (const struct subplatform_desc[]) {
919 		{
920 			SUBPLATFORM(icelake, port_f),
921 			.pciidlist = icl_port_f_ids,
922 		},
923 		{},
924 	},
925 	.info = &(const struct intel_display_device_info) {
926 		ICL_DISPLAY,
927 
928 		.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D) | BIT(PORT_E),
929 	},
930 	STEP_INFO(icl_steppings),
931 };
932 
933 static const struct intel_display_device_info jsl_ehl_display = {
934 	ICL_DISPLAY,
935 
936 	.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D),
937 };
938 
939 static const enum intel_step jsl_ehl_steppings[] = {
940 	[0] = STEP_A0,
941 	[1] = STEP_B0,
942 };
943 
944 static const struct platform_desc jsl_desc = {
945 	PLATFORM(jasperlake),
946 	.info = &jsl_ehl_display,
947 	STEP_INFO(jsl_ehl_steppings),
948 };
949 
950 static const struct platform_desc ehl_desc = {
951 	PLATFORM(elkhartlake),
952 	.info = &jsl_ehl_display,
953 	STEP_INFO(jsl_ehl_steppings),
954 };
955 
956 #define XE_D_DISPLAY \
957 	.abox_mask = GENMASK(2, 1), \
958 	.dbuf.size = 2048, \
959 	.dbuf.slice_mask = BIT(DBUF_S1) | BIT(DBUF_S2), \
960 	.has_ddi = 1, \
961 	.has_dp_mst = 1, \
962 	.has_dsb = 1, \
963 	.has_fpga_dbg = 1, \
964 	.has_hotplug = 1, \
965 	.has_ipc = 1, \
966 	.has_psr = 1, \
967 	.has_psr_hw_tracking = 1, \
968 	.pipe_offsets = { \
969 		[TRANSCODER_A] = PIPE_A_OFFSET, \
970 		[TRANSCODER_B] = PIPE_B_OFFSET, \
971 		[TRANSCODER_C] = PIPE_C_OFFSET, \
972 		[TRANSCODER_D] = PIPE_D_OFFSET, \
973 		[TRANSCODER_DSI_0] = PIPE_DSI0_OFFSET, \
974 		[TRANSCODER_DSI_1] = PIPE_DSI1_OFFSET, \
975 	}, \
976 	.trans_offsets = { \
977 		[TRANSCODER_A] = TRANSCODER_A_OFFSET, \
978 		[TRANSCODER_B] = TRANSCODER_B_OFFSET, \
979 		[TRANSCODER_C] = TRANSCODER_C_OFFSET, \
980 		[TRANSCODER_D] = TRANSCODER_D_OFFSET, \
981 		[TRANSCODER_DSI_0] = TRANSCODER_DSI0_OFFSET, \
982 		[TRANSCODER_DSI_1] = TRANSCODER_DSI1_OFFSET, \
983 	}, \
984 	TGL_CURSOR_OFFSETS, \
985 	ICL_COLORS, \
986 	\
987 	.__runtime_defaults.ip.ver = 12, \
988 	.__runtime_defaults.has_dmc = 1, \
989 	.__runtime_defaults.has_dsc = 1, \
990 	.__runtime_defaults.has_hdcp = 1, \
991 	.__runtime_defaults.pipe_mask = \
992 		BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C) | BIT(PIPE_D), \
993 	.__runtime_defaults.cpu_transcoder_mask = \
994 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) | \
995 		BIT(TRANSCODER_C) | BIT(TRANSCODER_D) | \
996 		BIT(TRANSCODER_DSI_0) | BIT(TRANSCODER_DSI_1), \
997 	.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A)
998 
999 static const u16 tgl_uy_ids[] = {
1000 	INTEL_TGL_GT2_IDS(ID),
1001 	0
1002 };
1003 
1004 static const enum intel_step tgl_steppings[] = {
1005 	[0] = STEP_B0,
1006 	[1] = STEP_D0,
1007 };
1008 
1009 static const enum intel_step tgl_uy_steppings[] = {
1010 	[0] = STEP_A0,
1011 	[1] = STEP_C0,
1012 	[2] = STEP_C0,
1013 	[3] = STEP_D0,
1014 };
1015 
1016 static const struct platform_desc tgl_desc = {
1017 	PLATFORM(tigerlake),
1018 	.subplatforms = (const struct subplatform_desc[]) {
1019 		{
1020 			SUBPLATFORM(tigerlake, uy),
1021 			.pciidlist = tgl_uy_ids,
1022 			STEP_INFO(tgl_uy_steppings),
1023 		},
1024 		{},
1025 	},
1026 	.info = &(const struct intel_display_device_info) {
1027 		XE_D_DISPLAY,
1028 
1029 		/*
1030 		 * FIXME DDI C/combo PHY C missing due to combo PHY
1031 		 * code making a mess on SKUs where the PHY is missing.
1032 		 */
1033 		.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) |
1034 		BIT(PORT_TC1) | BIT(PORT_TC2) | BIT(PORT_TC3) | BIT(PORT_TC4) | BIT(PORT_TC5) | BIT(PORT_TC6),
1035 	},
1036 	STEP_INFO(tgl_steppings),
1037 };
1038 
1039 static const enum intel_step dg1_steppings[] = {
1040 	[0] = STEP_A0,
1041 	[1] = STEP_B0,
1042 };
1043 
1044 static const struct platform_desc dg1_desc = {
1045 	PLATFORM(dg1),
1046 	PLATFORM_GROUP(dgfx),
1047 	.info = &(const struct intel_display_device_info) {
1048 		XE_D_DISPLAY,
1049 
1050 		.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) |
1051 		BIT(PORT_TC1) | BIT(PORT_TC2),
1052 	},
1053 	STEP_INFO(dg1_steppings),
1054 };
1055 
1056 static const enum intel_step rkl_steppings[] = {
1057 	[0] = STEP_A0,
1058 	[1] = STEP_B0,
1059 	[4] = STEP_C0,
1060 };
1061 
1062 static const struct platform_desc rkl_desc = {
1063 	PLATFORM(rocketlake),
1064 	.info = &(const struct intel_display_device_info) {
1065 		XE_D_DISPLAY,
1066 		.abox_mask = BIT(0),
1067 		.has_hti = 1,
1068 		.has_psr_hw_tracking = 0,
1069 
1070 		.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C),
1071 		.__runtime_defaults.cpu_transcoder_mask =
1072 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) | BIT(TRANSCODER_C),
1073 		.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) |
1074 		BIT(PORT_TC1) | BIT(PORT_TC2),
1075 	},
1076 	STEP_INFO(rkl_steppings),
1077 };
1078 
1079 static const u16 adls_rpls_ids[] = {
1080 	INTEL_RPLS_IDS(ID),
1081 	0
1082 };
1083 
1084 static const enum intel_step adl_s_steppings[] = {
1085 	[0x0] = STEP_A0,
1086 	[0x1] = STEP_A2,
1087 	[0x4] = STEP_B0,
1088 	[0x8] = STEP_B0,
1089 	[0xC] = STEP_C0,
1090 };
1091 
1092 static const enum intel_step adl_s_rpl_s_steppings[] = {
1093 	[0x4] = STEP_D0,
1094 	[0xC] = STEP_C0,
1095 };
1096 
1097 static const struct platform_desc adl_s_desc = {
1098 	PLATFORM(alderlake_s),
1099 	.subplatforms = (const struct subplatform_desc[]) {
1100 		{
1101 			SUBPLATFORM(alderlake_s, raptorlake_s),
1102 			.pciidlist = adls_rpls_ids,
1103 			STEP_INFO(adl_s_rpl_s_steppings),
1104 		},
1105 		{},
1106 	},
1107 	.info = &(const struct intel_display_device_info) {
1108 		XE_D_DISPLAY,
1109 		.has_hti = 1,
1110 		.has_psr_hw_tracking = 0,
1111 
1112 		.__runtime_defaults.port_mask = BIT(PORT_A) |
1113 		BIT(PORT_TC1) | BIT(PORT_TC2) | BIT(PORT_TC3) | BIT(PORT_TC4),
1114 	},
1115 	STEP_INFO(adl_s_steppings),
1116 };
1117 
1118 #define XE_LPD_FEATURES \
1119 	.abox_mask = GENMASK(1, 0),						\
1120 	.color = {								\
1121 		.degamma_lut_size = 129, .gamma_lut_size = 1024,		\
1122 		.degamma_lut_tests = DRM_COLOR_LUT_NON_DECREASING |		\
1123 		DRM_COLOR_LUT_EQUAL_CHANNELS,					\
1124 	},									\
1125 	.dbuf.size = 4096,							\
1126 	.dbuf.slice_mask = BIT(DBUF_S1) | BIT(DBUF_S2) | BIT(DBUF_S3) |		\
1127 		BIT(DBUF_S4),							\
1128 	.has_ddi = 1,								\
1129 	.has_dp_mst = 1,							\
1130 	.has_dsb = 1,								\
1131 	.has_fpga_dbg = 1,							\
1132 	.has_hotplug = 1,							\
1133 	.has_ipc = 1,								\
1134 	.has_psr = 1,								\
1135 	.pipe_offsets = {							\
1136 		[TRANSCODER_A] = PIPE_A_OFFSET,					\
1137 		[TRANSCODER_B] = PIPE_B_OFFSET,					\
1138 		[TRANSCODER_C] = PIPE_C_OFFSET,					\
1139 		[TRANSCODER_D] = PIPE_D_OFFSET,					\
1140 		[TRANSCODER_DSI_0] = PIPE_DSI0_OFFSET,				\
1141 		[TRANSCODER_DSI_1] = PIPE_DSI1_OFFSET,				\
1142 	},									\
1143 	.trans_offsets = {							\
1144 		[TRANSCODER_A] = TRANSCODER_A_OFFSET,				\
1145 		[TRANSCODER_B] = TRANSCODER_B_OFFSET,				\
1146 		[TRANSCODER_C] = TRANSCODER_C_OFFSET,				\
1147 		[TRANSCODER_D] = TRANSCODER_D_OFFSET,				\
1148 		[TRANSCODER_DSI_0] = TRANSCODER_DSI0_OFFSET,			\
1149 		[TRANSCODER_DSI_1] = TRANSCODER_DSI1_OFFSET,			\
1150 	},									\
1151 	TGL_CURSOR_OFFSETS,							\
1152 										\
1153 	.__runtime_defaults.ip.ver = 13,					\
1154 	.__runtime_defaults.has_dmc = 1,					\
1155 	.__runtime_defaults.has_dsc = 1,					\
1156 	.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),			\
1157 	.__runtime_defaults.has_hdcp = 1,					\
1158 	.__runtime_defaults.pipe_mask =						\
1159 		BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C) | BIT(PIPE_D)
1160 
1161 static const struct intel_display_device_info xe_lpd_display = {
1162 	XE_LPD_FEATURES,
1163 	.has_cdclk_crawl = 1,
1164 	.has_psr_hw_tracking = 0,
1165 
1166 	.__runtime_defaults.cpu_transcoder_mask =
1167 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) |
1168 		BIT(TRANSCODER_C) | BIT(TRANSCODER_D) |
1169 		BIT(TRANSCODER_DSI_0) | BIT(TRANSCODER_DSI_1),
1170 	.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) |
1171 		BIT(PORT_TC1) | BIT(PORT_TC2) | BIT(PORT_TC3) | BIT(PORT_TC4),
1172 };
1173 
1174 static const u16 adlp_adln_ids[] = {
1175 	INTEL_ADLN_IDS(ID),
1176 	0
1177 };
1178 
1179 static const u16 adlp_rplu_ids[] = {
1180 	INTEL_RPLU_IDS(ID),
1181 	0
1182 };
1183 
1184 static const u16 adlp_rplp_ids[] = {
1185 	INTEL_RPLP_IDS(ID),
1186 	0
1187 };
1188 
1189 static const enum intel_step adl_p_steppings[] = {
1190 	[0x0] = STEP_A0,
1191 	[0x4] = STEP_B0,
1192 	[0x8] = STEP_C0,
1193 	[0xC] = STEP_D0,
1194 };
1195 
1196 static const enum intel_step adl_p_adl_n_steppings[] = {
1197 	[0x0] = STEP_D0,
1198 };
1199 
1200 static const enum intel_step adl_p_rpl_pu_steppings[] = {
1201 	[0x4] = STEP_E0,
1202 };
1203 
1204 static const struct platform_desc adl_p_desc = {
1205 	PLATFORM(alderlake_p),
1206 	.subplatforms = (const struct subplatform_desc[]) {
1207 		{
1208 			SUBPLATFORM(alderlake_p, alderlake_n),
1209 			.pciidlist = adlp_adln_ids,
1210 			STEP_INFO(adl_p_adl_n_steppings),
1211 		},
1212 		{
1213 			SUBPLATFORM(alderlake_p, raptorlake_p),
1214 			.pciidlist = adlp_rplp_ids,
1215 			STEP_INFO(adl_p_rpl_pu_steppings),
1216 		},
1217 		{
1218 			SUBPLATFORM(alderlake_p, raptorlake_u),
1219 			.pciidlist = adlp_rplu_ids,
1220 			STEP_INFO(adl_p_rpl_pu_steppings),
1221 		},
1222 		{},
1223 	},
1224 	.info = &xe_lpd_display,
1225 	STEP_INFO(adl_p_steppings),
1226 };
1227 
1228 static const struct intel_display_device_info xe_hpd_display = {
1229 	XE_LPD_FEATURES,
1230 	.has_cdclk_squash = 1,
1231 
1232 	.__runtime_defaults.cpu_transcoder_mask =
1233 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) |
1234 		BIT(TRANSCODER_C) | BIT(TRANSCODER_D),
1235 	.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D_XELPD) |
1236 		BIT(PORT_TC1),
1237 };
1238 
1239 static const u16 dg2_g10_ids[] = {
1240 	INTEL_DG2_G10_IDS(ID),
1241 	0
1242 };
1243 
1244 static const u16 dg2_g11_ids[] = {
1245 	INTEL_DG2_G11_IDS(ID),
1246 	0
1247 };
1248 
1249 static const u16 dg2_g12_ids[] = {
1250 	INTEL_DG2_G12_IDS(ID),
1251 	0
1252 };
1253 
1254 static const enum intel_step dg2_g10_steppings[] = {
1255 	[0x0] = STEP_A0,
1256 	[0x1] = STEP_A0,
1257 	[0x4] = STEP_B0,
1258 	[0x8] = STEP_C0,
1259 };
1260 
1261 static const enum intel_step dg2_g11_steppings[] = {
1262 	[0x0] = STEP_B0,
1263 	[0x4] = STEP_C0,
1264 	[0x5] = STEP_C0,
1265 };
1266 
1267 static const enum intel_step dg2_g12_steppings[] = {
1268 	[0x0] = STEP_C0,
1269 	[0x1] = STEP_C0,
1270 };
1271 
1272 static const struct platform_desc dg2_desc = {
1273 	PLATFORM(dg2),
1274 	PLATFORM_GROUP(dgfx),
1275 	.subplatforms = (const struct subplatform_desc[]) {
1276 		{
1277 			SUBPLATFORM(dg2, g10),
1278 			.pciidlist = dg2_g10_ids,
1279 			STEP_INFO(dg2_g10_steppings),
1280 		},
1281 		{
1282 			SUBPLATFORM(dg2, g11),
1283 			.pciidlist = dg2_g11_ids,
1284 			STEP_INFO(dg2_g11_steppings),
1285 		},
1286 		{
1287 			SUBPLATFORM(dg2, g12),
1288 			.pciidlist = dg2_g12_ids,
1289 			STEP_INFO(dg2_g12_steppings),
1290 		},
1291 		{},
1292 	},
1293 	.info = &xe_hpd_display,
1294 };
1295 
1296 #define XE_LPDP_FEATURES							\
1297 	.abox_mask = GENMASK(1, 0),						\
1298 	.color = {								\
1299 		.degamma_lut_size = 129, .gamma_lut_size = 1024,		\
1300 		.degamma_lut_tests = DRM_COLOR_LUT_NON_DECREASING |		\
1301 		DRM_COLOR_LUT_EQUAL_CHANNELS,					\
1302 	},									\
1303 	.dbuf.size = 4096,							\
1304 	.dbuf.slice_mask = BIT(DBUF_S1) | BIT(DBUF_S2) | BIT(DBUF_S3) |		\
1305 		BIT(DBUF_S4),							\
1306 	.has_cdclk_crawl = 1,							\
1307 	.has_cdclk_squash = 1,							\
1308 	.has_ddi = 1,								\
1309 	.has_dp_mst = 1,							\
1310 	.has_dsb = 1,								\
1311 	.has_fpga_dbg = 1,							\
1312 	.has_hotplug = 1,							\
1313 	.has_ipc = 1,								\
1314 	.has_psr = 1,								\
1315 	.pipe_offsets = {							\
1316 		[TRANSCODER_A] = PIPE_A_OFFSET,					\
1317 		[TRANSCODER_B] = PIPE_B_OFFSET,					\
1318 		[TRANSCODER_C] = PIPE_C_OFFSET,					\
1319 		[TRANSCODER_D] = PIPE_D_OFFSET,					\
1320 	},									\
1321 	.trans_offsets = {							\
1322 		[TRANSCODER_A] = TRANSCODER_A_OFFSET,				\
1323 		[TRANSCODER_B] = TRANSCODER_B_OFFSET,				\
1324 		[TRANSCODER_C] = TRANSCODER_C_OFFSET,				\
1325 		[TRANSCODER_D] = TRANSCODER_D_OFFSET,				\
1326 	},									\
1327 	TGL_CURSOR_OFFSETS,							\
1328 										\
1329 	.__runtime_defaults.cpu_transcoder_mask =				\
1330 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) |				\
1331 		BIT(TRANSCODER_C) | BIT(TRANSCODER_D),				\
1332 	.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A) | BIT(INTEL_FBC_B),	\
1333 	.__runtime_defaults.has_dmc = 1,					\
1334 	.__runtime_defaults.has_dsc = 1,					\
1335 	.__runtime_defaults.has_hdcp = 1,					\
1336 	.__runtime_defaults.pipe_mask =						\
1337 		BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C) | BIT(PIPE_D),		\
1338 	.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) |		\
1339 		BIT(PORT_TC1) | BIT(PORT_TC2) | BIT(PORT_TC3) | BIT(PORT_TC4)
1340 
1341 static const struct intel_display_device_info xe_lpdp_display = {
1342 	XE_LPDP_FEATURES,
1343 };
1344 
1345 static const struct intel_display_device_info xe2_lpd_display = {
1346 	XE_LPDP_FEATURES,
1347 
1348 	.__runtime_defaults.fbc_mask =
1349 		BIT(INTEL_FBC_A) | BIT(INTEL_FBC_B) |
1350 		BIT(INTEL_FBC_C) | BIT(INTEL_FBC_D),
1351 	.__runtime_defaults.has_dbuf_overlap_detection = true,
1352 };
1353 
1354 static const struct intel_display_device_info xe2_hpd_display = {
1355 	XE_LPDP_FEATURES,
1356 	.__runtime_defaults.port_mask = BIT(PORT_A) |
1357 		BIT(PORT_TC1) | BIT(PORT_TC2) | BIT(PORT_TC3) | BIT(PORT_TC4),
1358 };
1359 
1360 /*
1361  * Do not initialize the .info member of the platform desc for GMD ID based
1362  * platforms. Their display will be probed automatically based on the IP version
1363  * reported by the hardware.
1364  */
1365 static const struct platform_desc mtl_desc = {
1366 	PLATFORM(meteorlake),
1367 };
1368 
1369 static const struct platform_desc lnl_desc = {
1370 	PLATFORM(lunarlake),
1371 };
1372 
1373 static const struct platform_desc bmg_desc = {
1374 	PLATFORM(battlemage),
1375 	PLATFORM_GROUP(dgfx),
1376 };
1377 
1378 static const struct platform_desc ptl_desc = {
1379 	PLATFORM(pantherlake),
1380 };
1381 
1382 __diag_pop();
1383 
1384 /*
1385  * Separate detection for no display cases to keep the display id array simple.
1386  *
1387  * IVB Q requires subvendor and subdevice matching to differentiate from IVB D
1388  * GT2 server.
1389  */
has_no_display(struct pci_dev * pdev)1390 static bool has_no_display(struct pci_dev *pdev)
1391 {
1392 	static const struct pci_device_id ids[] = {
1393 		INTEL_IVB_Q_IDS(INTEL_VGA_DEVICE, 0),
1394 		{}
1395 	};
1396 
1397 	return pci_match_id(ids, pdev);
1398 }
1399 
1400 #define INTEL_DISPLAY_DEVICE(_id, _desc) { .devid = (_id), .desc = (_desc) }
1401 
1402 static const struct {
1403 	u32 devid;
1404 	const struct platform_desc *desc;
1405 } intel_display_ids[] = {
1406 	INTEL_I830_IDS(INTEL_DISPLAY_DEVICE, &i830_desc),
1407 	INTEL_I845G_IDS(INTEL_DISPLAY_DEVICE, &i845_desc),
1408 	INTEL_I85X_IDS(INTEL_DISPLAY_DEVICE, &i85x_desc),
1409 	INTEL_I865G_IDS(INTEL_DISPLAY_DEVICE, &i865g_desc),
1410 	INTEL_I915G_IDS(INTEL_DISPLAY_DEVICE, &i915g_desc),
1411 	INTEL_I915GM_IDS(INTEL_DISPLAY_DEVICE, &i915gm_desc),
1412 	INTEL_I945G_IDS(INTEL_DISPLAY_DEVICE, &i945g_desc),
1413 	INTEL_I945GM_IDS(INTEL_DISPLAY_DEVICE, &i945gm_desc),
1414 	INTEL_I965G_IDS(INTEL_DISPLAY_DEVICE, &i965g_desc),
1415 	INTEL_G33_IDS(INTEL_DISPLAY_DEVICE, &g33_desc),
1416 	INTEL_I965GM_IDS(INTEL_DISPLAY_DEVICE, &i965gm_desc),
1417 	INTEL_GM45_IDS(INTEL_DISPLAY_DEVICE, &gm45_desc),
1418 	INTEL_G45_IDS(INTEL_DISPLAY_DEVICE, &g45_desc),
1419 	INTEL_PNV_G_IDS(INTEL_DISPLAY_DEVICE, &pnv_g_desc),
1420 	INTEL_PNV_M_IDS(INTEL_DISPLAY_DEVICE, &pnv_m_desc),
1421 	INTEL_ILK_D_IDS(INTEL_DISPLAY_DEVICE, &ilk_d_desc),
1422 	INTEL_ILK_M_IDS(INTEL_DISPLAY_DEVICE, &ilk_m_desc),
1423 	INTEL_SNB_D_IDS(INTEL_DISPLAY_DEVICE, &snb_d_desc),
1424 	INTEL_SNB_M_IDS(INTEL_DISPLAY_DEVICE, &snb_m_desc),
1425 	INTEL_IVB_D_IDS(INTEL_DISPLAY_DEVICE, &ivb_d_desc),
1426 	INTEL_IVB_M_IDS(INTEL_DISPLAY_DEVICE, &ivb_m_desc),
1427 	INTEL_HSW_IDS(INTEL_DISPLAY_DEVICE, &hsw_desc),
1428 	INTEL_VLV_IDS(INTEL_DISPLAY_DEVICE, &vlv_desc),
1429 	INTEL_BDW_IDS(INTEL_DISPLAY_DEVICE, &bdw_desc),
1430 	INTEL_CHV_IDS(INTEL_DISPLAY_DEVICE, &chv_desc),
1431 	INTEL_SKL_IDS(INTEL_DISPLAY_DEVICE, &skl_desc),
1432 	INTEL_BXT_IDS(INTEL_DISPLAY_DEVICE, &bxt_desc),
1433 	INTEL_GLK_IDS(INTEL_DISPLAY_DEVICE, &glk_desc),
1434 	INTEL_KBL_IDS(INTEL_DISPLAY_DEVICE, &kbl_desc),
1435 	INTEL_CFL_IDS(INTEL_DISPLAY_DEVICE, &cfl_desc),
1436 	INTEL_WHL_IDS(INTEL_DISPLAY_DEVICE, &cfl_desc),
1437 	INTEL_CML_IDS(INTEL_DISPLAY_DEVICE, &cml_desc),
1438 	INTEL_ICL_IDS(INTEL_DISPLAY_DEVICE, &icl_desc),
1439 	INTEL_EHL_IDS(INTEL_DISPLAY_DEVICE, &ehl_desc),
1440 	INTEL_JSL_IDS(INTEL_DISPLAY_DEVICE, &jsl_desc),
1441 	INTEL_TGL_IDS(INTEL_DISPLAY_DEVICE, &tgl_desc),
1442 	INTEL_DG1_IDS(INTEL_DISPLAY_DEVICE, &dg1_desc),
1443 	INTEL_RKL_IDS(INTEL_DISPLAY_DEVICE, &rkl_desc),
1444 	INTEL_ADLS_IDS(INTEL_DISPLAY_DEVICE, &adl_s_desc),
1445 	INTEL_RPLS_IDS(INTEL_DISPLAY_DEVICE, &adl_s_desc),
1446 	INTEL_ADLP_IDS(INTEL_DISPLAY_DEVICE, &adl_p_desc),
1447 	INTEL_ADLN_IDS(INTEL_DISPLAY_DEVICE, &adl_p_desc),
1448 	INTEL_RPLU_IDS(INTEL_DISPLAY_DEVICE, &adl_p_desc),
1449 	INTEL_RPLP_IDS(INTEL_DISPLAY_DEVICE, &adl_p_desc),
1450 	INTEL_DG2_IDS(INTEL_DISPLAY_DEVICE, &dg2_desc),
1451 	INTEL_ARL_IDS(INTEL_DISPLAY_DEVICE, &mtl_desc),
1452 	INTEL_MTL_IDS(INTEL_DISPLAY_DEVICE, &mtl_desc),
1453 	INTEL_LNL_IDS(INTEL_DISPLAY_DEVICE, &lnl_desc),
1454 	INTEL_BMG_IDS(INTEL_DISPLAY_DEVICE, &bmg_desc),
1455 	INTEL_PTL_IDS(INTEL_DISPLAY_DEVICE, &ptl_desc),
1456 };
1457 
1458 static const struct {
1459 	u16 ver;
1460 	u16 rel;
1461 	const struct intel_display_device_info *display;
1462 } gmdid_display_map[] = {
1463 	{ 14,  0, &xe_lpdp_display },
1464 	{ 14,  1, &xe2_hpd_display },
1465 	{ 20,  0, &xe2_lpd_display },
1466 	{ 30,  0, &xe2_lpd_display },
1467 };
1468 
1469 static const struct intel_display_device_info *
probe_gmdid_display(struct intel_display * display,struct intel_display_ip_ver * ip_ver)1470 probe_gmdid_display(struct intel_display *display, struct intel_display_ip_ver *ip_ver)
1471 {
1472 	struct pci_dev *pdev = to_pci_dev(display->drm->dev);
1473 	struct intel_display_ip_ver gmd_id;
1474 	void __iomem *addr;
1475 	u32 val;
1476 	int i;
1477 
1478 	addr = pci_iomap_range(pdev, 0, i915_mmio_reg_offset(GMD_ID_DISPLAY), sizeof(u32));
1479 	if (!addr) {
1480 		drm_err(display->drm,
1481 			"Cannot map MMIO BAR to read display GMD_ID\n");
1482 		return NULL;
1483 	}
1484 
1485 	val = ioread32(addr);
1486 	pci_iounmap(pdev, addr);
1487 
1488 	if (val == 0) {
1489 		drm_dbg_kms(display->drm, "Device doesn't have display\n");
1490 		return NULL;
1491 	}
1492 
1493 	gmd_id.ver = REG_FIELD_GET(GMD_ID_ARCH_MASK, val);
1494 	gmd_id.rel = REG_FIELD_GET(GMD_ID_RELEASE_MASK, val);
1495 	gmd_id.step = REG_FIELD_GET(GMD_ID_STEP, val);
1496 
1497 	for (i = 0; i < ARRAY_SIZE(gmdid_display_map); i++) {
1498 		if (gmd_id.ver == gmdid_display_map[i].ver &&
1499 		    gmd_id.rel == gmdid_display_map[i].rel) {
1500 			*ip_ver = gmd_id;
1501 			return gmdid_display_map[i].display;
1502 		}
1503 	}
1504 
1505 	drm_err(display->drm,
1506 		"Unrecognized display IP version %d.%02d; disabling display.\n",
1507 		gmd_id.ver, gmd_id.rel);
1508 	return NULL;
1509 }
1510 
find_platform_desc(struct pci_dev * pdev)1511 static const struct platform_desc *find_platform_desc(struct pci_dev *pdev)
1512 {
1513 	int i;
1514 
1515 	for (i = 0; i < ARRAY_SIZE(intel_display_ids); i++) {
1516 		if (intel_display_ids[i].devid == pdev->device)
1517 			return intel_display_ids[i].desc;
1518 	}
1519 
1520 	return NULL;
1521 }
1522 
1523 static const struct subplatform_desc *
find_subplatform_desc(struct pci_dev * pdev,const struct platform_desc * desc)1524 find_subplatform_desc(struct pci_dev *pdev, const struct platform_desc *desc)
1525 {
1526 	const struct subplatform_desc *sp;
1527 	const u16 *id;
1528 
1529 	for (sp = desc->subplatforms; sp && sp->pciidlist; sp++)
1530 		for (id = sp->pciidlist; *id; id++)
1531 			if (*id == pdev->device)
1532 				return sp;
1533 
1534 	return NULL;
1535 }
1536 
get_pre_gmdid_step(struct intel_display * display,const struct stepping_desc * main,const struct stepping_desc * sub)1537 static enum intel_step get_pre_gmdid_step(struct intel_display *display,
1538 					  const struct stepping_desc *main,
1539 					  const struct stepping_desc *sub)
1540 {
1541 	struct pci_dev *pdev = to_pci_dev(display->drm->dev);
1542 	const enum intel_step *map = main->map;
1543 	int size = main->size;
1544 	int revision = pdev->revision;
1545 	enum intel_step step;
1546 
1547 	/* subplatform stepping info trumps main platform info */
1548 	if (sub && sub->map && sub->size) {
1549 		map = sub->map;
1550 		size = sub->size;
1551 	}
1552 
1553 	/* not all platforms define steppings, and it's fine */
1554 	if (!map || !size)
1555 		return STEP_NONE;
1556 
1557 	if (revision < size && map[revision] != STEP_NONE) {
1558 		step = map[revision];
1559 	} else {
1560 		drm_warn(display->drm, "Unknown revision 0x%02x\n", revision);
1561 
1562 		/*
1563 		 * If we hit a gap in the revision to step map, use the information
1564 		 * for the next revision.
1565 		 *
1566 		 * This may be wrong in all sorts of ways, especially if the
1567 		 * steppings in the array are not monotonically increasing, but
1568 		 * it's better than defaulting to 0.
1569 		 */
1570 		while (revision < size && map[revision] == STEP_NONE)
1571 			revision++;
1572 
1573 		if (revision < size) {
1574 			drm_dbg_kms(display->drm, "Using display stepping for revision 0x%02x\n",
1575 				    revision);
1576 			step = map[revision];
1577 		} else {
1578 			drm_dbg_kms(display->drm, "Using future display stepping\n");
1579 			step = STEP_FUTURE;
1580 		}
1581 	}
1582 
1583 	drm_WARN_ON(display->drm, step == STEP_NONE);
1584 
1585 	return step;
1586 }
1587 
1588 /* Size of the entire bitmap, not the number of platforms */
display_platforms_num_bits(void)1589 static unsigned int display_platforms_num_bits(void)
1590 {
1591 	return sizeof(((struct intel_display_platforms *)0)->bitmap) * BITS_PER_BYTE;
1592 }
1593 
1594 /* Number of platform bits set */
display_platforms_weight(const struct intel_display_platforms * p)1595 static unsigned int display_platforms_weight(const struct intel_display_platforms *p)
1596 {
1597 	return bitmap_weight(p->bitmap, display_platforms_num_bits());
1598 }
1599 
1600 /* Merge the subplatform information from src to dst */
display_platforms_or(struct intel_display_platforms * dst,const struct intel_display_platforms * src)1601 static void display_platforms_or(struct intel_display_platforms *dst,
1602 				 const struct intel_display_platforms *src)
1603 {
1604 	bitmap_or(dst->bitmap, dst->bitmap, src->bitmap, display_platforms_num_bits());
1605 }
1606 
intel_display_device_probe(struct pci_dev * pdev)1607 struct intel_display *intel_display_device_probe(struct pci_dev *pdev)
1608 {
1609 	struct intel_display *display = to_intel_display(pdev);
1610 	const struct intel_display_device_info *info;
1611 	struct intel_display_ip_ver ip_ver = {};
1612 	const struct platform_desc *desc;
1613 	const struct subplatform_desc *subdesc;
1614 	enum intel_step step;
1615 
1616 	/* Add drm device backpointer as early as possible. */
1617 	display->drm = pci_get_drvdata(pdev);
1618 
1619 	intel_display_params_copy(&display->params);
1620 
1621 	if (has_no_display(pdev)) {
1622 		drm_dbg_kms(display->drm, "Device doesn't have display\n");
1623 		goto no_display;
1624 	}
1625 
1626 	desc = find_platform_desc(pdev);
1627 	if (!desc) {
1628 		drm_dbg_kms(display->drm,
1629 			    "Unknown device ID %04x; disabling display.\n",
1630 			    pdev->device);
1631 		goto no_display;
1632 	}
1633 
1634 	info = desc->info;
1635 	if (!info)
1636 		info = probe_gmdid_display(display, &ip_ver);
1637 	if (!info)
1638 		goto no_display;
1639 
1640 	DISPLAY_INFO(display) = info;
1641 
1642 	memcpy(DISPLAY_RUNTIME_INFO(display),
1643 	       &DISPLAY_INFO(display)->__runtime_defaults,
1644 	       sizeof(*DISPLAY_RUNTIME_INFO(display)));
1645 
1646 	drm_WARN_ON(display->drm, !desc->name ||
1647 		    !display_platforms_weight(&desc->platforms));
1648 
1649 	display->platform = desc->platforms;
1650 
1651 	subdesc = find_subplatform_desc(pdev, desc);
1652 	if (subdesc) {
1653 		drm_WARN_ON(display->drm, !subdesc->name ||
1654 			    !display_platforms_weight(&subdesc->platforms));
1655 
1656 		display_platforms_or(&display->platform, &subdesc->platforms);
1657 
1658 		/* Ensure platform and subplatform are distinct */
1659 		drm_WARN_ON(display->drm,
1660 			    display_platforms_weight(&display->platform) !=
1661 			    display_platforms_weight(&desc->platforms) +
1662 			    display_platforms_weight(&subdesc->platforms));
1663 	}
1664 
1665 	if (ip_ver.ver || ip_ver.rel || ip_ver.step) {
1666 		DISPLAY_RUNTIME_INFO(display)->ip = ip_ver;
1667 		step = STEP_A0 + ip_ver.step;
1668 		if (step > STEP_FUTURE) {
1669 			drm_dbg_kms(display->drm, "Using future display stepping\n");
1670 			step = STEP_FUTURE;
1671 		}
1672 	} else {
1673 		step = get_pre_gmdid_step(display, &desc->step_info,
1674 					  subdesc ? &subdesc->step_info : NULL);
1675 	}
1676 
1677 	DISPLAY_RUNTIME_INFO(display)->step = step;
1678 
1679 	drm_info(display->drm, "Found %s%s%s (device ID %04x) %s display version %u.%02u stepping %s\n",
1680 		 desc->name, subdesc ? "/" : "", subdesc ? subdesc->name : "",
1681 		 pdev->device, display->platform.dgfx ? "discrete" : "integrated",
1682 		 DISPLAY_RUNTIME_INFO(display)->ip.ver,
1683 		 DISPLAY_RUNTIME_INFO(display)->ip.rel,
1684 		 step != STEP_NONE ? intel_step_name(step) : "N/A");
1685 
1686 	return display;
1687 
1688 no_display:
1689 	DISPLAY_INFO(display) = &no_display;
1690 
1691 	return display;
1692 }
1693 
intel_display_device_remove(struct intel_display * display)1694 void intel_display_device_remove(struct intel_display *display)
1695 {
1696 	intel_display_params_free(&display->params);
1697 }
1698 
__intel_display_device_info_runtime_init(struct intel_display * display)1699 static void __intel_display_device_info_runtime_init(struct intel_display *display)
1700 {
1701 	struct drm_i915_private *i915 = to_i915(display->drm);
1702 	struct intel_display_runtime_info *display_runtime = DISPLAY_RUNTIME_INFO(display);
1703 	enum pipe pipe;
1704 
1705 	BUILD_BUG_ON(BITS_PER_TYPE(display_runtime->pipe_mask) < I915_MAX_PIPES);
1706 	BUILD_BUG_ON(BITS_PER_TYPE(display_runtime->cpu_transcoder_mask) < I915_MAX_TRANSCODERS);
1707 	BUILD_BUG_ON(BITS_PER_TYPE(display_runtime->port_mask) < I915_MAX_PORTS);
1708 
1709 	/* This covers both ULT and ULX */
1710 	if (display->platform.haswell_ult || display->platform.broadwell_ult)
1711 		display_runtime->port_mask &= ~BIT(PORT_D);
1712 
1713 	if (display->platform.icelake_port_f)
1714 		display_runtime->port_mask |= BIT(PORT_F);
1715 
1716 	/* Wa_14011765242: adl-s A0,A1 */
1717 	if (display->platform.alderlake_s && IS_DISPLAY_STEP(display, STEP_A0, STEP_A2))
1718 		for_each_pipe(display, pipe)
1719 			display_runtime->num_scalers[pipe] = 0;
1720 	else if (DISPLAY_VER(display) >= 11) {
1721 		for_each_pipe(display, pipe)
1722 			display_runtime->num_scalers[pipe] = 2;
1723 	} else if (DISPLAY_VER(display) >= 9) {
1724 		display_runtime->num_scalers[PIPE_A] = 2;
1725 		display_runtime->num_scalers[PIPE_B] = 2;
1726 		display_runtime->num_scalers[PIPE_C] = 1;
1727 	}
1728 
1729 	if (DISPLAY_VER(display) >= 13 || HAS_D12_PLANE_MINIMIZATION(display))
1730 		for_each_pipe(display, pipe)
1731 			display_runtime->num_sprites[pipe] = 4;
1732 	else if (DISPLAY_VER(display) >= 11)
1733 		for_each_pipe(display, pipe)
1734 			display_runtime->num_sprites[pipe] = 6;
1735 	else if (DISPLAY_VER(display) == 10)
1736 		for_each_pipe(display, pipe)
1737 			display_runtime->num_sprites[pipe] = 3;
1738 	else if (display->platform.broxton) {
1739 		/*
1740 		 * Skylake and Broxton currently don't expose the topmost plane as its
1741 		 * use is exclusive with the legacy cursor and we only want to expose
1742 		 * one of those, not both. Until we can safely expose the topmost plane
1743 		 * as a DRM_PLANE_TYPE_CURSOR with all the features exposed/supported,
1744 		 * we don't expose the topmost plane at all to prevent ABI breakage
1745 		 * down the line.
1746 		 */
1747 
1748 		display_runtime->num_sprites[PIPE_A] = 2;
1749 		display_runtime->num_sprites[PIPE_B] = 2;
1750 		display_runtime->num_sprites[PIPE_C] = 1;
1751 	} else if (display->platform.valleyview || display->platform.cherryview) {
1752 		for_each_pipe(display, pipe)
1753 			display_runtime->num_sprites[pipe] = 2;
1754 	} else if (DISPLAY_VER(display) >= 5 || display->platform.g4x) {
1755 		for_each_pipe(display, pipe)
1756 			display_runtime->num_sprites[pipe] = 1;
1757 	}
1758 
1759 	if ((display->platform.dgfx || DISPLAY_VER(display) >= 14) &&
1760 	    !(intel_de_read(display, GU_CNTL_PROTECTED) & DEPRESENT)) {
1761 		drm_info(display->drm, "Display not present, disabling\n");
1762 		goto display_fused_off;
1763 	}
1764 
1765 	if (IS_DISPLAY_VER(display, 7, 8) && HAS_PCH_SPLIT(i915)) {
1766 		u32 fuse_strap = intel_de_read(display, FUSE_STRAP);
1767 		u32 sfuse_strap = intel_de_read(display, SFUSE_STRAP);
1768 
1769 		/*
1770 		 * SFUSE_STRAP is supposed to have a bit signalling the display
1771 		 * is fused off. Unfortunately it seems that, at least in
1772 		 * certain cases, fused off display means that PCH display
1773 		 * reads don't land anywhere. In that case, we read 0s.
1774 		 *
1775 		 * On CPT/PPT, we can detect this case as SFUSE_STRAP_FUSE_LOCK
1776 		 * should be set when taking over after the firmware.
1777 		 */
1778 		if (fuse_strap & ILK_INTERNAL_DISPLAY_DISABLE ||
1779 		    sfuse_strap & SFUSE_STRAP_DISPLAY_DISABLED ||
1780 		    (HAS_PCH_CPT(i915) &&
1781 		     !(sfuse_strap & SFUSE_STRAP_FUSE_LOCK))) {
1782 			drm_info(display->drm,
1783 				 "Display fused off, disabling\n");
1784 			goto display_fused_off;
1785 		} else if (fuse_strap & IVB_PIPE_C_DISABLE) {
1786 			drm_info(display->drm, "PipeC fused off\n");
1787 			display_runtime->pipe_mask &= ~BIT(PIPE_C);
1788 			display_runtime->cpu_transcoder_mask &= ~BIT(TRANSCODER_C);
1789 		}
1790 	} else if (DISPLAY_VER(display) >= 9) {
1791 		u32 dfsm = intel_de_read(display, SKL_DFSM);
1792 
1793 		if (dfsm & SKL_DFSM_PIPE_A_DISABLE) {
1794 			display_runtime->pipe_mask &= ~BIT(PIPE_A);
1795 			display_runtime->cpu_transcoder_mask &= ~BIT(TRANSCODER_A);
1796 			display_runtime->fbc_mask &= ~BIT(INTEL_FBC_A);
1797 		}
1798 		if (dfsm & SKL_DFSM_PIPE_B_DISABLE) {
1799 			display_runtime->pipe_mask &= ~BIT(PIPE_B);
1800 			display_runtime->cpu_transcoder_mask &= ~BIT(TRANSCODER_B);
1801 			display_runtime->fbc_mask &= ~BIT(INTEL_FBC_B);
1802 		}
1803 		if (dfsm & SKL_DFSM_PIPE_C_DISABLE) {
1804 			display_runtime->pipe_mask &= ~BIT(PIPE_C);
1805 			display_runtime->cpu_transcoder_mask &= ~BIT(TRANSCODER_C);
1806 			display_runtime->fbc_mask &= ~BIT(INTEL_FBC_C);
1807 		}
1808 
1809 		if (DISPLAY_VER(display) >= 12 &&
1810 		    (dfsm & TGL_DFSM_PIPE_D_DISABLE)) {
1811 			display_runtime->pipe_mask &= ~BIT(PIPE_D);
1812 			display_runtime->cpu_transcoder_mask &= ~BIT(TRANSCODER_D);
1813 			display_runtime->fbc_mask &= ~BIT(INTEL_FBC_D);
1814 		}
1815 
1816 		if (!display_runtime->pipe_mask)
1817 			goto display_fused_off;
1818 
1819 		if (dfsm & SKL_DFSM_DISPLAY_HDCP_DISABLE)
1820 			display_runtime->has_hdcp = 0;
1821 
1822 		if (display->platform.dg2 || DISPLAY_VER(display) < 13) {
1823 			if (dfsm & SKL_DFSM_DISPLAY_PM_DISABLE)
1824 				display_runtime->fbc_mask = 0;
1825 		}
1826 
1827 		if (DISPLAY_VER(display) >= 11 && (dfsm & ICL_DFSM_DMC_DISABLE))
1828 			display_runtime->has_dmc = 0;
1829 
1830 		if (IS_DISPLAY_VER(display, 10, 12) &&
1831 		    (dfsm & GLK_DFSM_DISPLAY_DSC_DISABLE))
1832 			display_runtime->has_dsc = 0;
1833 
1834 		if (DISPLAY_VER(display) >= 20 &&
1835 		    (dfsm & XE2LPD_DFSM_DBUF_OVERLAP_DISABLE))
1836 			display_runtime->has_dbuf_overlap_detection = false;
1837 	}
1838 
1839 	if (DISPLAY_VER(display) >= 20) {
1840 		u32 cap = intel_de_read(display, XE2LPD_DE_CAP);
1841 
1842 		if (REG_FIELD_GET(XE2LPD_DE_CAP_DSC_MASK, cap) ==
1843 		    XE2LPD_DE_CAP_DSC_REMOVED)
1844 			display_runtime->has_dsc = 0;
1845 
1846 		if (REG_FIELD_GET(XE2LPD_DE_CAP_SCALER_MASK, cap) ==
1847 		    XE2LPD_DE_CAP_SCALER_SINGLE) {
1848 			for_each_pipe(display, pipe)
1849 				if (display_runtime->num_scalers[pipe])
1850 					display_runtime->num_scalers[pipe] = 1;
1851 		}
1852 	}
1853 
1854 	if (DISPLAY_VER(display) >= 30)
1855 		display_runtime->edp_typec_support =
1856 			intel_de_read(display, PICA_PHY_CONFIG_CONTROL) & EDP_ON_TYPEC;
1857 
1858 	display_runtime->rawclk_freq = intel_read_rawclk(display);
1859 	drm_dbg_kms(display->drm, "rawclk rate: %d kHz\n",
1860 		    display_runtime->rawclk_freq);
1861 
1862 	return;
1863 
1864 display_fused_off:
1865 	memset(display_runtime, 0, sizeof(*display_runtime));
1866 }
1867 
intel_display_device_info_runtime_init(struct intel_display * display)1868 void intel_display_device_info_runtime_init(struct intel_display *display)
1869 {
1870 	if (HAS_DISPLAY(display))
1871 		__intel_display_device_info_runtime_init(display);
1872 
1873 	/* Display may have been disabled by runtime init */
1874 	if (!HAS_DISPLAY(display)) {
1875 		display->drm->driver_features &= ~(DRIVER_MODESET | DRIVER_ATOMIC);
1876 		display->info.__device_info = &no_display;
1877 	}
1878 
1879 	/* Disable nuclear pageflip by default on pre-g4x */
1880 	if (!display->params.nuclear_pageflip &&
1881 	    DISPLAY_VER(display) < 5 && !display->platform.g4x)
1882 		display->drm->driver_features &= ~DRIVER_ATOMIC;
1883 }
1884 
intel_display_device_info_print(const struct intel_display_device_info * info,const struct intel_display_runtime_info * runtime,struct drm_printer * p)1885 void intel_display_device_info_print(const struct intel_display_device_info *info,
1886 				     const struct intel_display_runtime_info *runtime,
1887 				     struct drm_printer *p)
1888 {
1889 	if (runtime->ip.rel)
1890 		drm_printf(p, "display version: %u.%02u\n",
1891 			   runtime->ip.ver,
1892 			   runtime->ip.rel);
1893 	else
1894 		drm_printf(p, "display version: %u\n",
1895 			   runtime->ip.ver);
1896 
1897 	drm_printf(p, "display stepping: %s\n", intel_step_name(runtime->step));
1898 
1899 #define PRINT_FLAG(name) drm_printf(p, "%s: %s\n", #name, str_yes_no(info->name))
1900 	DEV_INFO_DISPLAY_FOR_EACH_FLAG(PRINT_FLAG);
1901 #undef PRINT_FLAG
1902 
1903 	drm_printf(p, "has_hdcp: %s\n", str_yes_no(runtime->has_hdcp));
1904 	drm_printf(p, "has_dmc: %s\n", str_yes_no(runtime->has_dmc));
1905 	drm_printf(p, "has_dsc: %s\n", str_yes_no(runtime->has_dsc));
1906 
1907 	drm_printf(p, "rawclk rate: %u kHz\n", runtime->rawclk_freq);
1908 }
1909 
1910 /*
1911  * Assuming the device has display hardware, should it be enabled?
1912  *
1913  * It's an error to call this function if the device does not have display
1914  * hardware.
1915  *
1916  * Disabling display means taking over the display hardware, putting it to
1917  * sleep, and preventing connectors from being connected via any means.
1918  */
intel_display_device_enabled(struct intel_display * display)1919 bool intel_display_device_enabled(struct intel_display *display)
1920 {
1921 	/* Only valid when HAS_DISPLAY() is true */
1922 	drm_WARN_ON(display->drm, !HAS_DISPLAY(display));
1923 
1924 	return !display->params.disable_display &&
1925 		!intel_opregion_headless_sku(display);
1926 }
1927