xref: /linux/drivers/gpu/drm/i915/display/intel_display_device.c (revision 25489a4f556414445d342951615178368ee45cde)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2023 Intel Corporation
4  */
5 
6 #include <linux/pci.h>
7 
8 #include <drm/drm_color_mgmt.h>
9 #include <drm/drm_drv.h>
10 #include <drm/drm_print.h>
11 #include <drm/intel/pciids.h>
12 
13 #include "i915_reg.h"
14 #include "intel_cx0_phy_regs.h"
15 #include "intel_de.h"
16 #include "intel_display.h"
17 #include "intel_display_device.h"
18 #include "intel_display_params.h"
19 #include "intel_display_power.h"
20 #include "intel_display_reg_defs.h"
21 #include "intel_display_types.h"
22 #include "intel_fbc.h"
23 #include "intel_step.h"
24 
25 __diag_push();
26 __diag_ignore_all("-Woverride-init", "Allow field initialization overrides for display info");
27 
28 struct stepping_desc {
29 	const enum intel_step *map; /* revid to step map */
30 	size_t size; /* map size */
31 };
32 
33 #define STEP_INFO(_map)				\
34 	.step_info.map = _map,			\
35 	.step_info.size = ARRAY_SIZE(_map)
36 
37 struct subplatform_desc {
38 	struct intel_display_platforms platforms;
39 	const char *name;
40 	const u16 *pciidlist;
41 	struct stepping_desc step_info;
42 };
43 
44 #define SUBPLATFORM(_platform, _subplatform)				\
45 	.platforms._platform##_##_subplatform = 1,			\
46 	.name = #_subplatform
47 
48 /*
49  * Group subplatform alias that matches multiple subplatforms. For making ult
50  * cover both ult and ulx on HSW/BDW.
51  */
52 #define SUBPLATFORM_GROUP(_platform, _subplatform)			\
53 	.platforms._platform##_##_subplatform = 1
54 
55 struct platform_desc {
56 	struct intel_display_platforms platforms;
57 	const char *name;
58 	const struct subplatform_desc *subplatforms;
59 	const struct intel_display_device_info *info; /* NULL for GMD ID */
60 	struct stepping_desc step_info;
61 };
62 
63 #define PLATFORM(_platform)			 \
64 	.platforms._platform = 1,		 \
65 	.name = #_platform
66 
67 /*
68  * Group platform alias that matches multiple platforms. For aliases such as g4x
69  * that covers both g45 and gm45.
70  */
71 #define PLATFORM_GROUP(_platform)		\
72 	.platforms._platform = 1
73 
74 #define ID(id) (id)
75 
76 static const struct intel_display_device_info no_display = {};
77 
78 #define PIPE_A_OFFSET		0x70000
79 #define PIPE_B_OFFSET		0x71000
80 #define PIPE_C_OFFSET		0x72000
81 #define PIPE_D_OFFSET		0x73000
82 #define CHV_PIPE_C_OFFSET	0x74000
83 /*
84  * There's actually no pipe EDP. Some pipe registers have
85  * simply shifted from the pipe to the transcoder, while
86  * keeping their original offset. Thus we need PIPE_EDP_OFFSET
87  * to access such registers in transcoder EDP.
88  */
89 #define PIPE_EDP_OFFSET	0x7f000
90 
91 /* ICL DSI 0 and 1 */
92 #define PIPE_DSI0_OFFSET	0x7b000
93 #define PIPE_DSI1_OFFSET	0x7b800
94 
95 #define TRANSCODER_A_OFFSET 0x60000
96 #define TRANSCODER_B_OFFSET 0x61000
97 #define TRANSCODER_C_OFFSET 0x62000
98 #define CHV_TRANSCODER_C_OFFSET 0x63000
99 #define TRANSCODER_D_OFFSET 0x63000
100 #define TRANSCODER_EDP_OFFSET 0x6f000
101 #define TRANSCODER_DSI0_OFFSET	0x6b000
102 #define TRANSCODER_DSI1_OFFSET	0x6b800
103 
104 #define CURSOR_A_OFFSET 0x70080
105 #define CURSOR_B_OFFSET 0x700c0
106 #define CHV_CURSOR_C_OFFSET 0x700e0
107 #define IVB_CURSOR_B_OFFSET 0x71080
108 #define IVB_CURSOR_C_OFFSET 0x72080
109 #define TGL_CURSOR_D_OFFSET 0x73080
110 
111 #define I845_PIPE_OFFSETS \
112 	.pipe_offsets = { \
113 		[TRANSCODER_A] = PIPE_A_OFFSET,	\
114 	}, \
115 	.trans_offsets = { \
116 		[TRANSCODER_A] = TRANSCODER_A_OFFSET, \
117 	}
118 
119 #define I9XX_PIPE_OFFSETS \
120 	.pipe_offsets = { \
121 		[TRANSCODER_A] = PIPE_A_OFFSET,	\
122 		[TRANSCODER_B] = PIPE_B_OFFSET, \
123 	}, \
124 	.trans_offsets = { \
125 		[TRANSCODER_A] = TRANSCODER_A_OFFSET, \
126 		[TRANSCODER_B] = TRANSCODER_B_OFFSET, \
127 	}
128 
129 #define IVB_PIPE_OFFSETS \
130 	.pipe_offsets = { \
131 		[TRANSCODER_A] = PIPE_A_OFFSET,	\
132 		[TRANSCODER_B] = PIPE_B_OFFSET, \
133 		[TRANSCODER_C] = PIPE_C_OFFSET, \
134 	}, \
135 	.trans_offsets = { \
136 		[TRANSCODER_A] = TRANSCODER_A_OFFSET, \
137 		[TRANSCODER_B] = TRANSCODER_B_OFFSET, \
138 		[TRANSCODER_C] = TRANSCODER_C_OFFSET, \
139 	}
140 
141 #define HSW_PIPE_OFFSETS \
142 	.pipe_offsets = { \
143 		[TRANSCODER_A] = PIPE_A_OFFSET,	\
144 		[TRANSCODER_B] = PIPE_B_OFFSET, \
145 		[TRANSCODER_C] = PIPE_C_OFFSET, \
146 		[TRANSCODER_EDP] = PIPE_EDP_OFFSET, \
147 	}, \
148 	.trans_offsets = { \
149 		[TRANSCODER_A] = TRANSCODER_A_OFFSET, \
150 		[TRANSCODER_B] = TRANSCODER_B_OFFSET, \
151 		[TRANSCODER_C] = TRANSCODER_C_OFFSET, \
152 		[TRANSCODER_EDP] = TRANSCODER_EDP_OFFSET, \
153 	}
154 
155 #define CHV_PIPE_OFFSETS \
156 	.pipe_offsets = { \
157 		[TRANSCODER_A] = PIPE_A_OFFSET, \
158 		[TRANSCODER_B] = PIPE_B_OFFSET, \
159 		[TRANSCODER_C] = CHV_PIPE_C_OFFSET, \
160 	}, \
161 	.trans_offsets = { \
162 		[TRANSCODER_A] = TRANSCODER_A_OFFSET, \
163 		[TRANSCODER_B] = TRANSCODER_B_OFFSET, \
164 		[TRANSCODER_C] = CHV_TRANSCODER_C_OFFSET, \
165 	}
166 
167 #define I845_CURSOR_OFFSETS \
168 	.cursor_offsets = { \
169 		[PIPE_A] = CURSOR_A_OFFSET, \
170 	}
171 
172 #define I9XX_CURSOR_OFFSETS \
173 	.cursor_offsets = { \
174 		[PIPE_A] = CURSOR_A_OFFSET, \
175 		[PIPE_B] = CURSOR_B_OFFSET, \
176 	}
177 
178 #define CHV_CURSOR_OFFSETS \
179 	.cursor_offsets = { \
180 		[PIPE_A] = CURSOR_A_OFFSET, \
181 		[PIPE_B] = CURSOR_B_OFFSET, \
182 		[PIPE_C] = CHV_CURSOR_C_OFFSET, \
183 	}
184 
185 #define IVB_CURSOR_OFFSETS \
186 	.cursor_offsets = { \
187 		[PIPE_A] = CURSOR_A_OFFSET, \
188 		[PIPE_B] = IVB_CURSOR_B_OFFSET, \
189 		[PIPE_C] = IVB_CURSOR_C_OFFSET, \
190 	}
191 
192 #define TGL_CURSOR_OFFSETS \
193 	.cursor_offsets = { \
194 		[PIPE_A] = CURSOR_A_OFFSET, \
195 		[PIPE_B] = IVB_CURSOR_B_OFFSET, \
196 		[PIPE_C] = IVB_CURSOR_C_OFFSET, \
197 		[PIPE_D] = TGL_CURSOR_D_OFFSET, \
198 	}
199 
200 #define I845_COLORS \
201 	.color = { .gamma_lut_size = 256 }
202 #define I9XX_COLORS \
203 	.color = { .gamma_lut_size = 129, \
204 		   .gamma_lut_tests = DRM_COLOR_LUT_NON_DECREASING, \
205 	}
206 #define ILK_COLORS \
207 	.color = { .gamma_lut_size = 1024 }
208 #define IVB_COLORS \
209 	.color = { .degamma_lut_size = 1024, .gamma_lut_size = 1024 }
210 #define CHV_COLORS \
211 	.color = { \
212 		.degamma_lut_size = 65, .gamma_lut_size = 257, \
213 		.degamma_lut_tests = DRM_COLOR_LUT_NON_DECREASING, \
214 		.gamma_lut_tests = DRM_COLOR_LUT_NON_DECREASING, \
215 	}
216 #define GLK_COLORS \
217 	.color = { \
218 		.degamma_lut_size = 33, .gamma_lut_size = 1024, \
219 		.degamma_lut_tests = DRM_COLOR_LUT_NON_DECREASING | \
220 				     DRM_COLOR_LUT_EQUAL_CHANNELS, \
221 	}
222 #define ICL_COLORS \
223 	.color = { \
224 		.degamma_lut_size = 33, .gamma_lut_size = 262145, \
225 		.degamma_lut_tests = DRM_COLOR_LUT_NON_DECREASING | \
226 				     DRM_COLOR_LUT_EQUAL_CHANNELS, \
227 		.gamma_lut_tests = DRM_COLOR_LUT_NON_DECREASING, \
228 	}
229 
230 #define I830_DISPLAY \
231 	.has_overlay = 1, \
232 	.cursor_needs_physical = 1, \
233 	.overlay_needs_physical = 1, \
234 	.has_gmch = 1, \
235 	I9XX_PIPE_OFFSETS, \
236 	I9XX_CURSOR_OFFSETS, \
237 	I9XX_COLORS, \
238 	\
239 	.__runtime_defaults.ip.ver = 2, \
240 	.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B), \
241 	.__runtime_defaults.cpu_transcoder_mask = \
242 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B)
243 
244 #define I845_DISPLAY \
245 	.has_overlay = 1, \
246 	.overlay_needs_physical = 1, \
247 	.has_gmch = 1, \
248 	I845_PIPE_OFFSETS, \
249 	I845_CURSOR_OFFSETS, \
250 	I845_COLORS, \
251 	\
252 	.__runtime_defaults.ip.ver = 2, \
253 	.__runtime_defaults.pipe_mask = BIT(PIPE_A), \
254 	.__runtime_defaults.cpu_transcoder_mask = BIT(TRANSCODER_A)
255 
256 static const struct platform_desc i830_desc = {
257 	PLATFORM(i830),
258 	PLATFORM_GROUP(mobile),
259 	.info = &(const struct intel_display_device_info) {
260 		I830_DISPLAY,
261 
262 		.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C), /* DVO A/B/C */
263 	},
264 };
265 
266 static const struct platform_desc i845_desc = {
267 	PLATFORM(i845g),
268 	.info = &(const struct intel_display_device_info) {
269 		I845_DISPLAY,
270 
271 		.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C), /* DVO B/C */
272 	},
273 };
274 
275 static const struct platform_desc i85x_desc = {
276 	PLATFORM(i85x),
277 	PLATFORM_GROUP(mobile),
278 	.info = &(const struct intel_display_device_info) {
279 		I830_DISPLAY,
280 
281 		.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C), /* DVO B/C */
282 		.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
283 	},
284 };
285 
286 static const struct platform_desc i865g_desc = {
287 	PLATFORM(i865g),
288 	.info = &(const struct intel_display_device_info) {
289 		I845_DISPLAY,
290 
291 		.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C), /* DVO B/C */
292 		.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
293 	},
294 };
295 
296 #define GEN3_DISPLAY   \
297 	.has_gmch = 1, \
298 	.has_overlay = 1, \
299 	I9XX_PIPE_OFFSETS, \
300 	I9XX_CURSOR_OFFSETS, \
301 	\
302 	.__runtime_defaults.ip.ver = 3, \
303 	.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B), \
304 	.__runtime_defaults.cpu_transcoder_mask = \
305 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B), \
306 	.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C) /* SDVO B/C */
307 
308 static const struct platform_desc i915g_desc = {
309 	PLATFORM(i915g),
310 	.info = &(const struct intel_display_device_info) {
311 		GEN3_DISPLAY,
312 		I845_COLORS,
313 		.cursor_needs_physical = 1,
314 		.overlay_needs_physical = 1,
315 	},
316 };
317 
318 static const struct platform_desc i915gm_desc = {
319 	PLATFORM(i915gm),
320 	PLATFORM_GROUP(mobile),
321 	.info = &(const struct intel_display_device_info) {
322 		GEN3_DISPLAY,
323 		I9XX_COLORS,
324 		.cursor_needs_physical = 1,
325 		.overlay_needs_physical = 1,
326 		.supports_tv = 1,
327 
328 		.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
329 	},
330 };
331 
332 static const struct platform_desc i945g_desc = {
333 	PLATFORM(i945g),
334 	.info = &(const struct intel_display_device_info) {
335 		GEN3_DISPLAY,
336 		I845_COLORS,
337 		.has_hotplug = 1,
338 		.cursor_needs_physical = 1,
339 		.overlay_needs_physical = 1,
340 	},
341 };
342 
343 static const struct platform_desc i945gm_desc = {
344 	PLATFORM(i915gm),
345 	PLATFORM_GROUP(mobile),
346 	.info = &(const struct intel_display_device_info) {
347 		GEN3_DISPLAY,
348 		I9XX_COLORS,
349 		.has_hotplug = 1,
350 		.cursor_needs_physical = 1,
351 		.overlay_needs_physical = 1,
352 		.supports_tv = 1,
353 
354 		.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
355 	},
356 };
357 
358 static const struct platform_desc g33_desc = {
359 	PLATFORM(g33),
360 	.info = &(const struct intel_display_device_info) {
361 		GEN3_DISPLAY,
362 		I845_COLORS,
363 		.has_hotplug = 1,
364 	},
365 };
366 
367 static const struct intel_display_device_info pnv_display = {
368 	GEN3_DISPLAY,
369 	I9XX_COLORS,
370 	.has_hotplug = 1,
371 };
372 
373 static const struct platform_desc pnv_g_desc = {
374 	PLATFORM(pineview),
375 	.info = &pnv_display,
376 };
377 
378 static const struct platform_desc pnv_m_desc = {
379 	PLATFORM(pineview),
380 	PLATFORM_GROUP(mobile),
381 	.info = &pnv_display,
382 };
383 
384 #define GEN4_DISPLAY \
385 	.has_hotplug = 1, \
386 	.has_gmch = 1, \
387 	I9XX_PIPE_OFFSETS, \
388 	I9XX_CURSOR_OFFSETS, \
389 	I9XX_COLORS, \
390 	\
391 	.__runtime_defaults.ip.ver = 4, \
392 	.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B), \
393 	.__runtime_defaults.cpu_transcoder_mask = \
394 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B)
395 
396 static const struct platform_desc i965g_desc = {
397 	PLATFORM(i965g),
398 	.info = &(const struct intel_display_device_info) {
399 		GEN4_DISPLAY,
400 		.has_overlay = 1,
401 
402 		.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C), /* SDVO B/C */
403 	},
404 };
405 
406 static const struct platform_desc i965gm_desc = {
407 	PLATFORM(i965gm),
408 	PLATFORM_GROUP(mobile),
409 	.info = &(const struct intel_display_device_info) {
410 		GEN4_DISPLAY,
411 		.has_overlay = 1,
412 		.supports_tv = 1,
413 
414 		.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C), /* SDVO B/C */
415 		.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
416 	},
417 };
418 
419 static const struct platform_desc g45_desc = {
420 	PLATFORM(g45),
421 	PLATFORM_GROUP(g4x),
422 	.info = &(const struct intel_display_device_info) {
423 		GEN4_DISPLAY,
424 
425 		.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D), /* SDVO/HDMI/DP B/C, DP D */
426 	},
427 };
428 
429 static const struct platform_desc gm45_desc = {
430 	PLATFORM(gm45),
431 	PLATFORM_GROUP(g4x),
432 	PLATFORM_GROUP(mobile),
433 	.info = &(const struct intel_display_device_info) {
434 		GEN4_DISPLAY,
435 		.supports_tv = 1,
436 
437 		.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D), /* SDVO/HDMI/DP B/C, DP D */
438 		.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
439 	},
440 };
441 
442 #define ILK_DISPLAY \
443 	.has_hotplug = 1, \
444 	I9XX_PIPE_OFFSETS, \
445 	I9XX_CURSOR_OFFSETS, \
446 	ILK_COLORS, \
447 	\
448 	.__runtime_defaults.ip.ver = 5, \
449 	.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B), \
450 	.__runtime_defaults.cpu_transcoder_mask = \
451 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B), \
452 	.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D) /* DP A, SDVO/HDMI/DP B, HDMI/DP C/D */
453 
454 static const struct platform_desc ilk_d_desc = {
455 	PLATFORM(ironlake),
456 	.info = &(const struct intel_display_device_info) {
457 		ILK_DISPLAY,
458 	},
459 };
460 
461 static const struct platform_desc ilk_m_desc = {
462 	PLATFORM(ironlake),
463 	PLATFORM_GROUP(mobile),
464 	.info = &(const struct intel_display_device_info) {
465 		ILK_DISPLAY,
466 
467 		.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
468 	},
469 };
470 
471 static const struct intel_display_device_info snb_display = {
472 	.has_hotplug = 1,
473 	I9XX_PIPE_OFFSETS,
474 	I9XX_CURSOR_OFFSETS,
475 	ILK_COLORS,
476 
477 	.__runtime_defaults.ip.ver = 6,
478 	.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B),
479 	.__runtime_defaults.cpu_transcoder_mask =
480 	BIT(TRANSCODER_A) | BIT(TRANSCODER_B),
481 	.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D), /* DP A, SDVO/HDMI/DP B, HDMI/DP C/D */
482 	.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
483 };
484 
485 static const struct platform_desc snb_d_desc = {
486 	PLATFORM(sandybridge),
487 	.info = &snb_display,
488 };
489 
490 static const struct platform_desc snb_m_desc = {
491 	PLATFORM(sandybridge),
492 	PLATFORM_GROUP(mobile),
493 	.info = &snb_display,
494 };
495 
496 static const struct intel_display_device_info ivb_display = {
497 	.has_hotplug = 1,
498 	IVB_PIPE_OFFSETS,
499 	IVB_CURSOR_OFFSETS,
500 	IVB_COLORS,
501 
502 	.__runtime_defaults.ip.ver = 7,
503 	.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C),
504 	.__runtime_defaults.cpu_transcoder_mask =
505 	BIT(TRANSCODER_A) | BIT(TRANSCODER_B) | BIT(TRANSCODER_C),
506 	.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D), /* DP A, SDVO/HDMI/DP B, HDMI/DP C/D */
507 	.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
508 };
509 
510 static const struct platform_desc ivb_d_desc = {
511 	PLATFORM(ivybridge),
512 	.info = &ivb_display,
513 };
514 
515 static const struct platform_desc ivb_m_desc = {
516 	PLATFORM(ivybridge),
517 	PLATFORM_GROUP(mobile),
518 	.info = &ivb_display,
519 };
520 
521 static const struct platform_desc vlv_desc = {
522 	PLATFORM(valleyview),
523 	.info = &(const struct intel_display_device_info) {
524 		.has_gmch = 1,
525 		.has_hotplug = 1,
526 		.mmio_offset = VLV_DISPLAY_BASE,
527 		I9XX_PIPE_OFFSETS,
528 		I9XX_CURSOR_OFFSETS,
529 		I9XX_COLORS,
530 
531 		.__runtime_defaults.ip.ver = 7,
532 		.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B),
533 		.__runtime_defaults.cpu_transcoder_mask =
534 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B),
535 		.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C), /* HDMI/DP B/C */
536 	},
537 };
538 
539 static const u16 hsw_ult_ids[] = {
540 	INTEL_HSW_ULT_GT1_IDS(ID),
541 	INTEL_HSW_ULT_GT2_IDS(ID),
542 	INTEL_HSW_ULT_GT3_IDS(ID),
543 	0
544 };
545 
546 static const u16 hsw_ulx_ids[] = {
547 	INTEL_HSW_ULX_GT1_IDS(ID),
548 	INTEL_HSW_ULX_GT2_IDS(ID),
549 	0
550 };
551 
552 static const struct platform_desc hsw_desc = {
553 	PLATFORM(haswell),
554 	.subplatforms = (const struct subplatform_desc[]) {
555 		/* Special case: Use ult both as group and subplatform. */
556 		{
557 			SUBPLATFORM(haswell, ult),
558 			SUBPLATFORM_GROUP(haswell, ult),
559 			.pciidlist = hsw_ult_ids,
560 		},
561 		{
562 			SUBPLATFORM(haswell, ulx),
563 			SUBPLATFORM_GROUP(haswell, ult),
564 			.pciidlist = hsw_ulx_ids,
565 		},
566 		{},
567 	},
568 	.info = &(const struct intel_display_device_info) {
569 		.has_ddi = 1,
570 		.has_dp_mst = 1,
571 		.has_fpga_dbg = 1,
572 		.has_hotplug = 1,
573 		.has_psr = 1,
574 		.has_psr_hw_tracking = 1,
575 		HSW_PIPE_OFFSETS,
576 		IVB_CURSOR_OFFSETS,
577 		IVB_COLORS,
578 
579 		.__runtime_defaults.ip.ver = 7,
580 		.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C),
581 		.__runtime_defaults.cpu_transcoder_mask =
582 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) |
583 		BIT(TRANSCODER_C) | BIT(TRANSCODER_EDP),
584 		.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D) | BIT(PORT_E),
585 		.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
586 	},
587 };
588 
589 static const u16 bdw_ult_ids[] = {
590 	INTEL_BDW_ULT_GT1_IDS(ID),
591 	INTEL_BDW_ULT_GT2_IDS(ID),
592 	INTEL_BDW_ULT_GT3_IDS(ID),
593 	INTEL_BDW_ULT_RSVD_IDS(ID),
594 	0
595 };
596 
597 static const u16 bdw_ulx_ids[] = {
598 	INTEL_BDW_ULX_GT1_IDS(ID),
599 	INTEL_BDW_ULX_GT2_IDS(ID),
600 	INTEL_BDW_ULX_GT3_IDS(ID),
601 	INTEL_BDW_ULX_RSVD_IDS(ID),
602 	0
603 };
604 
605 static const struct platform_desc bdw_desc = {
606 	PLATFORM(broadwell),
607 	.subplatforms = (const struct subplatform_desc[]) {
608 		/* Special case: Use ult both as group and subplatform. */
609 		{
610 			SUBPLATFORM(broadwell, ult),
611 			SUBPLATFORM_GROUP(broadwell, ult),
612 			.pciidlist = bdw_ult_ids,
613 		},
614 		{
615 			SUBPLATFORM(broadwell, ulx),
616 			SUBPLATFORM_GROUP(broadwell, ult),
617 			.pciidlist = bdw_ulx_ids,
618 		},
619 		{},
620 	},
621 	.info = &(const struct intel_display_device_info) {
622 		.has_ddi = 1,
623 		.has_dp_mst = 1,
624 		.has_fpga_dbg = 1,
625 		.has_hotplug = 1,
626 		.has_psr = 1,
627 		.has_psr_hw_tracking = 1,
628 		HSW_PIPE_OFFSETS,
629 		IVB_CURSOR_OFFSETS,
630 		IVB_COLORS,
631 
632 		.__runtime_defaults.ip.ver = 8,
633 		.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C),
634 		.__runtime_defaults.cpu_transcoder_mask =
635 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) |
636 		BIT(TRANSCODER_C) | BIT(TRANSCODER_EDP),
637 		.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D) | BIT(PORT_E),
638 		.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
639 	},
640 };
641 
642 static const struct platform_desc chv_desc = {
643 	PLATFORM(cherryview),
644 	.info = &(const struct intel_display_device_info) {
645 		.has_hotplug = 1,
646 		.has_gmch = 1,
647 		.mmio_offset = VLV_DISPLAY_BASE,
648 		CHV_PIPE_OFFSETS,
649 		CHV_CURSOR_OFFSETS,
650 		CHV_COLORS,
651 
652 		.__runtime_defaults.ip.ver = 8,
653 		.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C),
654 		.__runtime_defaults.cpu_transcoder_mask =
655 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) | BIT(TRANSCODER_C),
656 		.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D), /* HDMI/DP B/C/D */
657 	},
658 };
659 
660 static const struct intel_display_device_info skl_display = {
661 	.dbuf.size = 896 - 4, /* 4 blocks for bypass path allocation */
662 	.dbuf.slice_mask = BIT(DBUF_S1),
663 	.has_ddi = 1,
664 	.has_dp_mst = 1,
665 	.has_fpga_dbg = 1,
666 	.has_hotplug = 1,
667 	.has_ipc = 1,
668 	.has_psr = 1,
669 	.has_psr_hw_tracking = 1,
670 	HSW_PIPE_OFFSETS,
671 	IVB_CURSOR_OFFSETS,
672 	IVB_COLORS,
673 
674 	.__runtime_defaults.ip.ver = 9,
675 	.__runtime_defaults.has_dmc = 1,
676 	.__runtime_defaults.has_hdcp = 1,
677 	.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C),
678 	.__runtime_defaults.cpu_transcoder_mask =
679 	BIT(TRANSCODER_A) | BIT(TRANSCODER_B) |
680 	BIT(TRANSCODER_C) | BIT(TRANSCODER_EDP),
681 	.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D) | BIT(PORT_E),
682 	.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
683 };
684 
685 static const u16 skl_ult_ids[] = {
686 	INTEL_SKL_ULT_GT1_IDS(ID),
687 	INTEL_SKL_ULT_GT2_IDS(ID),
688 	INTEL_SKL_ULT_GT3_IDS(ID),
689 	0
690 };
691 
692 static const u16 skl_ulx_ids[] = {
693 	INTEL_SKL_ULX_GT1_IDS(ID),
694 	INTEL_SKL_ULX_GT2_IDS(ID),
695 	0
696 };
697 
698 static const enum intel_step skl_steppings[] = {
699 	[0x6] = STEP_G0,
700 	[0x7] = STEP_H0,
701 	[0x9] = STEP_J0,
702 	[0xA] = STEP_I1,
703 };
704 
705 static const struct platform_desc skl_desc = {
706 	PLATFORM(skylake),
707 	.subplatforms = (const struct subplatform_desc[]) {
708 		{
709 			SUBPLATFORM(skylake, ult),
710 			.pciidlist = skl_ult_ids,
711 		},
712 		{
713 			SUBPLATFORM(skylake, ulx),
714 			.pciidlist = skl_ulx_ids,
715 		},
716 		{},
717 	},
718 	.info = &skl_display,
719 	STEP_INFO(skl_steppings),
720 };
721 
722 static const u16 kbl_ult_ids[] = {
723 	INTEL_KBL_ULT_GT1_IDS(ID),
724 	INTEL_KBL_ULT_GT2_IDS(ID),
725 	INTEL_KBL_ULT_GT3_IDS(ID),
726 	0
727 };
728 
729 static const u16 kbl_ulx_ids[] = {
730 	INTEL_KBL_ULX_GT1_IDS(ID),
731 	INTEL_KBL_ULX_GT2_IDS(ID),
732 	INTEL_AML_KBL_GT2_IDS(ID),
733 	0
734 };
735 
736 static const enum intel_step kbl_steppings[] = {
737 	[1] = STEP_B0,
738 	[2] = STEP_B0,
739 	[3] = STEP_B0,
740 	[4] = STEP_C0,
741 	[5] = STEP_B1,
742 	[6] = STEP_B1,
743 	[7] = STEP_C0,
744 };
745 
746 static const struct platform_desc kbl_desc = {
747 	PLATFORM(kabylake),
748 	.subplatforms = (const struct subplatform_desc[]) {
749 		{
750 			SUBPLATFORM(kabylake, ult),
751 			.pciidlist = kbl_ult_ids,
752 		},
753 		{
754 			SUBPLATFORM(kabylake, ulx),
755 			.pciidlist = kbl_ulx_ids,
756 		},
757 		{},
758 	},
759 	.info = &skl_display,
760 	STEP_INFO(kbl_steppings),
761 };
762 
763 static const u16 cfl_ult_ids[] = {
764 	INTEL_CFL_U_GT2_IDS(ID),
765 	INTEL_CFL_U_GT3_IDS(ID),
766 	INTEL_WHL_U_GT1_IDS(ID),
767 	INTEL_WHL_U_GT2_IDS(ID),
768 	INTEL_WHL_U_GT3_IDS(ID),
769 	0
770 };
771 
772 static const u16 cfl_ulx_ids[] = {
773 	INTEL_AML_CFL_GT2_IDS(ID),
774 	0
775 };
776 
777 static const struct platform_desc cfl_desc = {
778 	PLATFORM(coffeelake),
779 	.subplatforms = (const struct subplatform_desc[]) {
780 		{
781 			SUBPLATFORM(coffeelake, ult),
782 			.pciidlist = cfl_ult_ids,
783 		},
784 		{
785 			SUBPLATFORM(coffeelake, ulx),
786 			.pciidlist = cfl_ulx_ids,
787 		},
788 		{},
789 	},
790 	.info = &skl_display,
791 };
792 
793 static const u16 cml_ult_ids[] = {
794 	INTEL_CML_U_GT1_IDS(ID),
795 	INTEL_CML_U_GT2_IDS(ID),
796 	0
797 };
798 
799 static const struct platform_desc cml_desc = {
800 	PLATFORM(cometlake),
801 	.subplatforms = (const struct subplatform_desc[]) {
802 		{
803 			SUBPLATFORM(cometlake, ult),
804 			.pciidlist = cml_ult_ids,
805 		},
806 		{},
807 	},
808 	.info = &skl_display,
809 };
810 
811 #define GEN9_LP_DISPLAY			 \
812 	.dbuf.slice_mask = BIT(DBUF_S1), \
813 	.has_dp_mst = 1, \
814 	.has_ddi = 1, \
815 	.has_fpga_dbg = 1, \
816 	.has_hotplug = 1, \
817 	.has_ipc = 1, \
818 	.has_psr = 1, \
819 	.has_psr_hw_tracking = 1, \
820 	HSW_PIPE_OFFSETS, \
821 	IVB_CURSOR_OFFSETS, \
822 	IVB_COLORS, \
823 	\
824 	.__runtime_defaults.has_dmc = 1, \
825 	.__runtime_defaults.has_hdcp = 1, \
826 	.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A), \
827 	.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C), \
828 	.__runtime_defaults.cpu_transcoder_mask = \
829 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) | \
830 		BIT(TRANSCODER_C) | BIT(TRANSCODER_EDP) | \
831 		BIT(TRANSCODER_DSI_A) | BIT(TRANSCODER_DSI_C), \
832 	.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C)
833 
834 static const enum intel_step bxt_steppings[] = {
835 	[0xA] = STEP_C0,
836 	[0xB] = STEP_C0,
837 	[0xC] = STEP_D0,
838 	[0xD] = STEP_E0,
839 };
840 
841 static const struct platform_desc bxt_desc = {
842 	PLATFORM(broxton),
843 	.info = &(const struct intel_display_device_info) {
844 		GEN9_LP_DISPLAY,
845 		.dbuf.size = 512 - 4, /* 4 blocks for bypass path allocation */
846 
847 		.__runtime_defaults.ip.ver = 9,
848 	},
849 	STEP_INFO(bxt_steppings),
850 };
851 
852 static const enum intel_step glk_steppings[] = {
853 	[3] = STEP_B0,
854 };
855 
856 static const struct platform_desc glk_desc = {
857 	PLATFORM(geminilake),
858 	.info = &(const struct intel_display_device_info) {
859 		GEN9_LP_DISPLAY,
860 		.dbuf.size = 1024 - 4, /* 4 blocks for bypass path allocation */
861 		GLK_COLORS,
862 
863 		.__runtime_defaults.ip.ver = 10,
864 	},
865 	STEP_INFO(glk_steppings),
866 };
867 
868 #define ICL_DISPLAY \
869 	.abox_mask = BIT(0), \
870 	.dbuf.size = 2048, \
871 	.dbuf.slice_mask = BIT(DBUF_S1) | BIT(DBUF_S2), \
872 	.has_ddi = 1, \
873 	.has_dp_mst = 1, \
874 	.has_fpga_dbg = 1, \
875 	.has_hotplug = 1, \
876 	.has_ipc = 1, \
877 	.has_psr = 1, \
878 	.has_psr_hw_tracking = 1, \
879 	.pipe_offsets = { \
880 		[TRANSCODER_A] = PIPE_A_OFFSET, \
881 		[TRANSCODER_B] = PIPE_B_OFFSET, \
882 		[TRANSCODER_C] = PIPE_C_OFFSET, \
883 		[TRANSCODER_EDP] = PIPE_EDP_OFFSET, \
884 		[TRANSCODER_DSI_0] = PIPE_DSI0_OFFSET, \
885 		[TRANSCODER_DSI_1] = PIPE_DSI1_OFFSET, \
886 	}, \
887 	.trans_offsets = { \
888 		[TRANSCODER_A] = TRANSCODER_A_OFFSET, \
889 		[TRANSCODER_B] = TRANSCODER_B_OFFSET, \
890 		[TRANSCODER_C] = TRANSCODER_C_OFFSET, \
891 		[TRANSCODER_EDP] = TRANSCODER_EDP_OFFSET, \
892 		[TRANSCODER_DSI_0] = TRANSCODER_DSI0_OFFSET, \
893 		[TRANSCODER_DSI_1] = TRANSCODER_DSI1_OFFSET, \
894 	}, \
895 	IVB_CURSOR_OFFSETS, \
896 	ICL_COLORS, \
897 	\
898 	.__runtime_defaults.ip.ver = 11, \
899 	.__runtime_defaults.has_dmc = 1, \
900 	.__runtime_defaults.has_dsc = 1, \
901 	.__runtime_defaults.has_hdcp = 1, \
902 	.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C), \
903 	.__runtime_defaults.cpu_transcoder_mask = \
904 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) | \
905 		BIT(TRANSCODER_C) | BIT(TRANSCODER_EDP) | \
906 		BIT(TRANSCODER_DSI_0) | BIT(TRANSCODER_DSI_1), \
907 	.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A)
908 
909 static const u16 icl_port_f_ids[] = {
910 	INTEL_ICL_PORT_F_IDS(ID),
911 	0
912 };
913 
914 static const enum intel_step icl_steppings[] = {
915 	[7] = STEP_D0,
916 };
917 
918 static const struct platform_desc icl_desc = {
919 	PLATFORM(icelake),
920 	.subplatforms = (const struct subplatform_desc[]) {
921 		{
922 			SUBPLATFORM(icelake, port_f),
923 			.pciidlist = icl_port_f_ids,
924 		},
925 		{},
926 	},
927 	.info = &(const struct intel_display_device_info) {
928 		ICL_DISPLAY,
929 
930 		.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D) | BIT(PORT_E),
931 	},
932 	STEP_INFO(icl_steppings),
933 };
934 
935 static const struct intel_display_device_info jsl_ehl_display = {
936 	ICL_DISPLAY,
937 
938 	.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D),
939 };
940 
941 static const enum intel_step jsl_ehl_steppings[] = {
942 	[0] = STEP_A0,
943 	[1] = STEP_B0,
944 };
945 
946 static const struct platform_desc jsl_desc = {
947 	PLATFORM(jasperlake),
948 	.info = &jsl_ehl_display,
949 	STEP_INFO(jsl_ehl_steppings),
950 };
951 
952 static const struct platform_desc ehl_desc = {
953 	PLATFORM(elkhartlake),
954 	.info = &jsl_ehl_display,
955 	STEP_INFO(jsl_ehl_steppings),
956 };
957 
958 #define XE_D_DISPLAY \
959 	.abox_mask = GENMASK(2, 1), \
960 	.dbuf.size = 2048, \
961 	.dbuf.slice_mask = BIT(DBUF_S1) | BIT(DBUF_S2), \
962 	.has_ddi = 1, \
963 	.has_dp_mst = 1, \
964 	.has_dsb = 1, \
965 	.has_fpga_dbg = 1, \
966 	.has_hotplug = 1, \
967 	.has_ipc = 1, \
968 	.has_psr = 1, \
969 	.has_psr_hw_tracking = 1, \
970 	.pipe_offsets = { \
971 		[TRANSCODER_A] = PIPE_A_OFFSET, \
972 		[TRANSCODER_B] = PIPE_B_OFFSET, \
973 		[TRANSCODER_C] = PIPE_C_OFFSET, \
974 		[TRANSCODER_D] = PIPE_D_OFFSET, \
975 		[TRANSCODER_DSI_0] = PIPE_DSI0_OFFSET, \
976 		[TRANSCODER_DSI_1] = PIPE_DSI1_OFFSET, \
977 	}, \
978 	.trans_offsets = { \
979 		[TRANSCODER_A] = TRANSCODER_A_OFFSET, \
980 		[TRANSCODER_B] = TRANSCODER_B_OFFSET, \
981 		[TRANSCODER_C] = TRANSCODER_C_OFFSET, \
982 		[TRANSCODER_D] = TRANSCODER_D_OFFSET, \
983 		[TRANSCODER_DSI_0] = TRANSCODER_DSI0_OFFSET, \
984 		[TRANSCODER_DSI_1] = TRANSCODER_DSI1_OFFSET, \
985 	}, \
986 	TGL_CURSOR_OFFSETS, \
987 	ICL_COLORS, \
988 	\
989 	.__runtime_defaults.ip.ver = 12, \
990 	.__runtime_defaults.has_dmc = 1, \
991 	.__runtime_defaults.has_dsc = 1, \
992 	.__runtime_defaults.has_hdcp = 1, \
993 	.__runtime_defaults.pipe_mask = \
994 		BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C) | BIT(PIPE_D), \
995 	.__runtime_defaults.cpu_transcoder_mask = \
996 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) | \
997 		BIT(TRANSCODER_C) | BIT(TRANSCODER_D) | \
998 		BIT(TRANSCODER_DSI_0) | BIT(TRANSCODER_DSI_1), \
999 	.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A)
1000 
1001 static const u16 tgl_uy_ids[] = {
1002 	INTEL_TGL_GT2_IDS(ID),
1003 	0
1004 };
1005 
1006 static const enum intel_step tgl_steppings[] = {
1007 	[0] = STEP_B0,
1008 	[1] = STEP_D0,
1009 };
1010 
1011 static const enum intel_step tgl_uy_steppings[] = {
1012 	[0] = STEP_A0,
1013 	[1] = STEP_C0,
1014 	[2] = STEP_C0,
1015 	[3] = STEP_D0,
1016 };
1017 
1018 static const struct platform_desc tgl_desc = {
1019 	PLATFORM(tigerlake),
1020 	.subplatforms = (const struct subplatform_desc[]) {
1021 		{
1022 			SUBPLATFORM(tigerlake, uy),
1023 			.pciidlist = tgl_uy_ids,
1024 			STEP_INFO(tgl_uy_steppings),
1025 		},
1026 		{},
1027 	},
1028 	.info = &(const struct intel_display_device_info) {
1029 		XE_D_DISPLAY,
1030 
1031 		/*
1032 		 * FIXME DDI C/combo PHY C missing due to combo PHY
1033 		 * code making a mess on SKUs where the PHY is missing.
1034 		 */
1035 		.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) |
1036 		BIT(PORT_TC1) | BIT(PORT_TC2) | BIT(PORT_TC3) | BIT(PORT_TC4) | BIT(PORT_TC5) | BIT(PORT_TC6),
1037 	},
1038 	STEP_INFO(tgl_steppings),
1039 };
1040 
1041 static const enum intel_step dg1_steppings[] = {
1042 	[0] = STEP_A0,
1043 	[1] = STEP_B0,
1044 };
1045 
1046 static const struct platform_desc dg1_desc = {
1047 	PLATFORM(dg1),
1048 	PLATFORM_GROUP(dgfx),
1049 	.info = &(const struct intel_display_device_info) {
1050 		XE_D_DISPLAY,
1051 
1052 		.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) |
1053 		BIT(PORT_TC1) | BIT(PORT_TC2),
1054 	},
1055 	STEP_INFO(dg1_steppings),
1056 };
1057 
1058 static const enum intel_step rkl_steppings[] = {
1059 	[0] = STEP_A0,
1060 	[1] = STEP_B0,
1061 	[4] = STEP_C0,
1062 };
1063 
1064 static const struct platform_desc rkl_desc = {
1065 	PLATFORM(rocketlake),
1066 	.info = &(const struct intel_display_device_info) {
1067 		XE_D_DISPLAY,
1068 		.abox_mask = BIT(0),
1069 		.has_hti = 1,
1070 		.has_psr_hw_tracking = 0,
1071 
1072 		.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C),
1073 		.__runtime_defaults.cpu_transcoder_mask =
1074 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) | BIT(TRANSCODER_C),
1075 		.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) |
1076 		BIT(PORT_TC1) | BIT(PORT_TC2),
1077 	},
1078 	STEP_INFO(rkl_steppings),
1079 };
1080 
1081 static const u16 adls_rpls_ids[] = {
1082 	INTEL_RPLS_IDS(ID),
1083 	0
1084 };
1085 
1086 static const enum intel_step adl_s_steppings[] = {
1087 	[0x0] = STEP_A0,
1088 	[0x1] = STEP_A2,
1089 	[0x4] = STEP_B0,
1090 	[0x8] = STEP_B0,
1091 	[0xC] = STEP_C0,
1092 };
1093 
1094 static const enum intel_step adl_s_rpl_s_steppings[] = {
1095 	[0x4] = STEP_D0,
1096 	[0xC] = STEP_C0,
1097 };
1098 
1099 static const struct platform_desc adl_s_desc = {
1100 	PLATFORM(alderlake_s),
1101 	.subplatforms = (const struct subplatform_desc[]) {
1102 		{
1103 			SUBPLATFORM(alderlake_s, raptorlake_s),
1104 			.pciidlist = adls_rpls_ids,
1105 			STEP_INFO(adl_s_rpl_s_steppings),
1106 		},
1107 		{},
1108 	},
1109 	.info = &(const struct intel_display_device_info) {
1110 		XE_D_DISPLAY,
1111 		.has_hti = 1,
1112 		.has_psr_hw_tracking = 0,
1113 
1114 		.__runtime_defaults.port_mask = BIT(PORT_A) |
1115 		BIT(PORT_TC1) | BIT(PORT_TC2) | BIT(PORT_TC3) | BIT(PORT_TC4),
1116 	},
1117 	STEP_INFO(adl_s_steppings),
1118 };
1119 
1120 #define XE_LPD_FEATURES \
1121 	.abox_mask = GENMASK(1, 0),						\
1122 	.color = {								\
1123 		.degamma_lut_size = 129, .gamma_lut_size = 1024,		\
1124 		.degamma_lut_tests = DRM_COLOR_LUT_NON_DECREASING |		\
1125 		DRM_COLOR_LUT_EQUAL_CHANNELS,					\
1126 	},									\
1127 	.dbuf.size = 4096,							\
1128 	.dbuf.slice_mask = BIT(DBUF_S1) | BIT(DBUF_S2) | BIT(DBUF_S3) |		\
1129 		BIT(DBUF_S4),							\
1130 	.has_ddi = 1,								\
1131 	.has_dp_mst = 1,							\
1132 	.has_dsb = 1,								\
1133 	.has_fpga_dbg = 1,							\
1134 	.has_hotplug = 1,							\
1135 	.has_ipc = 1,								\
1136 	.has_psr = 1,								\
1137 	.pipe_offsets = {							\
1138 		[TRANSCODER_A] = PIPE_A_OFFSET,					\
1139 		[TRANSCODER_B] = PIPE_B_OFFSET,					\
1140 		[TRANSCODER_C] = PIPE_C_OFFSET,					\
1141 		[TRANSCODER_D] = PIPE_D_OFFSET,					\
1142 		[TRANSCODER_DSI_0] = PIPE_DSI0_OFFSET,				\
1143 		[TRANSCODER_DSI_1] = PIPE_DSI1_OFFSET,				\
1144 	},									\
1145 	.trans_offsets = {							\
1146 		[TRANSCODER_A] = TRANSCODER_A_OFFSET,				\
1147 		[TRANSCODER_B] = TRANSCODER_B_OFFSET,				\
1148 		[TRANSCODER_C] = TRANSCODER_C_OFFSET,				\
1149 		[TRANSCODER_D] = TRANSCODER_D_OFFSET,				\
1150 		[TRANSCODER_DSI_0] = TRANSCODER_DSI0_OFFSET,			\
1151 		[TRANSCODER_DSI_1] = TRANSCODER_DSI1_OFFSET,			\
1152 	},									\
1153 	TGL_CURSOR_OFFSETS,							\
1154 										\
1155 	.__runtime_defaults.ip.ver = 13,					\
1156 	.__runtime_defaults.has_dmc = 1,					\
1157 	.__runtime_defaults.has_dsc = 1,					\
1158 	.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),			\
1159 	.__runtime_defaults.has_hdcp = 1,					\
1160 	.__runtime_defaults.pipe_mask =						\
1161 		BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C) | BIT(PIPE_D)
1162 
1163 static const struct intel_display_device_info xe_lpd_display = {
1164 	XE_LPD_FEATURES,
1165 	.has_cdclk_crawl = 1,
1166 	.has_psr_hw_tracking = 0,
1167 
1168 	.__runtime_defaults.cpu_transcoder_mask =
1169 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) |
1170 		BIT(TRANSCODER_C) | BIT(TRANSCODER_D) |
1171 		BIT(TRANSCODER_DSI_0) | BIT(TRANSCODER_DSI_1),
1172 	.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) |
1173 		BIT(PORT_TC1) | BIT(PORT_TC2) | BIT(PORT_TC3) | BIT(PORT_TC4),
1174 };
1175 
1176 static const u16 adlp_adln_ids[] = {
1177 	INTEL_ADLN_IDS(ID),
1178 	0
1179 };
1180 
1181 static const u16 adlp_rplu_ids[] = {
1182 	INTEL_RPLU_IDS(ID),
1183 	0
1184 };
1185 
1186 static const u16 adlp_rplp_ids[] = {
1187 	INTEL_RPLP_IDS(ID),
1188 	0
1189 };
1190 
1191 static const enum intel_step adl_p_steppings[] = {
1192 	[0x0] = STEP_A0,
1193 	[0x4] = STEP_B0,
1194 	[0x8] = STEP_C0,
1195 	[0xC] = STEP_D0,
1196 };
1197 
1198 static const enum intel_step adl_p_adl_n_steppings[] = {
1199 	[0x0] = STEP_D0,
1200 };
1201 
1202 static const enum intel_step adl_p_rpl_pu_steppings[] = {
1203 	[0x4] = STEP_E0,
1204 };
1205 
1206 static const struct platform_desc adl_p_desc = {
1207 	PLATFORM(alderlake_p),
1208 	.subplatforms = (const struct subplatform_desc[]) {
1209 		{
1210 			SUBPLATFORM(alderlake_p, alderlake_n),
1211 			.pciidlist = adlp_adln_ids,
1212 			STEP_INFO(adl_p_adl_n_steppings),
1213 		},
1214 		{
1215 			SUBPLATFORM(alderlake_p, raptorlake_p),
1216 			.pciidlist = adlp_rplp_ids,
1217 			STEP_INFO(adl_p_rpl_pu_steppings),
1218 		},
1219 		{
1220 			SUBPLATFORM(alderlake_p, raptorlake_u),
1221 			.pciidlist = adlp_rplu_ids,
1222 			STEP_INFO(adl_p_rpl_pu_steppings),
1223 		},
1224 		{},
1225 	},
1226 	.info = &xe_lpd_display,
1227 	STEP_INFO(adl_p_steppings),
1228 };
1229 
1230 static const struct intel_display_device_info xe_hpd_display = {
1231 	XE_LPD_FEATURES,
1232 	.has_cdclk_squash = 1,
1233 
1234 	.__runtime_defaults.cpu_transcoder_mask =
1235 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) |
1236 		BIT(TRANSCODER_C) | BIT(TRANSCODER_D),
1237 	.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D_XELPD) |
1238 		BIT(PORT_TC1),
1239 };
1240 
1241 static const u16 dg2_g10_ids[] = {
1242 	INTEL_DG2_G10_IDS(ID),
1243 	0
1244 };
1245 
1246 static const u16 dg2_g11_ids[] = {
1247 	INTEL_DG2_G11_IDS(ID),
1248 	0
1249 };
1250 
1251 static const u16 dg2_g12_ids[] = {
1252 	INTEL_DG2_G12_IDS(ID),
1253 	0
1254 };
1255 
1256 static const enum intel_step dg2_g10_steppings[] = {
1257 	[0x0] = STEP_A0,
1258 	[0x1] = STEP_A0,
1259 	[0x4] = STEP_B0,
1260 	[0x8] = STEP_C0,
1261 };
1262 
1263 static const enum intel_step dg2_g11_steppings[] = {
1264 	[0x0] = STEP_B0,
1265 	[0x4] = STEP_C0,
1266 	[0x5] = STEP_C0,
1267 };
1268 
1269 static const enum intel_step dg2_g12_steppings[] = {
1270 	[0x0] = STEP_C0,
1271 	[0x1] = STEP_C0,
1272 };
1273 
1274 static const struct platform_desc dg2_desc = {
1275 	PLATFORM(dg2),
1276 	PLATFORM_GROUP(dgfx),
1277 	.subplatforms = (const struct subplatform_desc[]) {
1278 		{
1279 			SUBPLATFORM(dg2, g10),
1280 			.pciidlist = dg2_g10_ids,
1281 			STEP_INFO(dg2_g10_steppings),
1282 		},
1283 		{
1284 			SUBPLATFORM(dg2, g11),
1285 			.pciidlist = dg2_g11_ids,
1286 			STEP_INFO(dg2_g11_steppings),
1287 		},
1288 		{
1289 			SUBPLATFORM(dg2, g12),
1290 			.pciidlist = dg2_g12_ids,
1291 			STEP_INFO(dg2_g12_steppings),
1292 		},
1293 		{},
1294 	},
1295 	.info = &xe_hpd_display,
1296 };
1297 
1298 #define XE_LPDP_FEATURES							\
1299 	.abox_mask = GENMASK(1, 0),						\
1300 	.color = {								\
1301 		.degamma_lut_size = 129, .gamma_lut_size = 1024,		\
1302 		.degamma_lut_tests = DRM_COLOR_LUT_NON_DECREASING |		\
1303 		DRM_COLOR_LUT_EQUAL_CHANNELS,					\
1304 	},									\
1305 	.dbuf.size = 4096,							\
1306 	.dbuf.slice_mask = BIT(DBUF_S1) | BIT(DBUF_S2) | BIT(DBUF_S3) |		\
1307 		BIT(DBUF_S4),							\
1308 	.has_cdclk_crawl = 1,							\
1309 	.has_cdclk_squash = 1,							\
1310 	.has_ddi = 1,								\
1311 	.has_dp_mst = 1,							\
1312 	.has_dsb = 1,								\
1313 	.has_fpga_dbg = 1,							\
1314 	.has_hotplug = 1,							\
1315 	.has_ipc = 1,								\
1316 	.has_psr = 1,								\
1317 	.pipe_offsets = {							\
1318 		[TRANSCODER_A] = PIPE_A_OFFSET,					\
1319 		[TRANSCODER_B] = PIPE_B_OFFSET,					\
1320 		[TRANSCODER_C] = PIPE_C_OFFSET,					\
1321 		[TRANSCODER_D] = PIPE_D_OFFSET,					\
1322 	},									\
1323 	.trans_offsets = {							\
1324 		[TRANSCODER_A] = TRANSCODER_A_OFFSET,				\
1325 		[TRANSCODER_B] = TRANSCODER_B_OFFSET,				\
1326 		[TRANSCODER_C] = TRANSCODER_C_OFFSET,				\
1327 		[TRANSCODER_D] = TRANSCODER_D_OFFSET,				\
1328 	},									\
1329 	TGL_CURSOR_OFFSETS,							\
1330 										\
1331 	.__runtime_defaults.cpu_transcoder_mask =				\
1332 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) |				\
1333 		BIT(TRANSCODER_C) | BIT(TRANSCODER_D),				\
1334 	.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A) | BIT(INTEL_FBC_B),	\
1335 	.__runtime_defaults.has_dmc = 1,					\
1336 	.__runtime_defaults.has_dsc = 1,					\
1337 	.__runtime_defaults.has_hdcp = 1,					\
1338 	.__runtime_defaults.pipe_mask =						\
1339 		BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C) | BIT(PIPE_D),		\
1340 	.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) |		\
1341 		BIT(PORT_TC1) | BIT(PORT_TC2) | BIT(PORT_TC3) | BIT(PORT_TC4)
1342 
1343 static const struct intel_display_device_info xe_lpdp_display = {
1344 	XE_LPDP_FEATURES,
1345 };
1346 
1347 static const struct intel_display_device_info xe2_lpd_display = {
1348 	XE_LPDP_FEATURES,
1349 
1350 	.__runtime_defaults.fbc_mask =
1351 		BIT(INTEL_FBC_A) | BIT(INTEL_FBC_B) |
1352 		BIT(INTEL_FBC_C) | BIT(INTEL_FBC_D),
1353 	.__runtime_defaults.has_dbuf_overlap_detection = true,
1354 };
1355 
1356 static const struct intel_display_device_info xe2_hpd_display = {
1357 	XE_LPDP_FEATURES,
1358 	.__runtime_defaults.port_mask = BIT(PORT_A) |
1359 		BIT(PORT_TC1) | BIT(PORT_TC2) | BIT(PORT_TC3) | BIT(PORT_TC4),
1360 };
1361 
1362 static const u16 mtl_u_ids[] = {
1363 	INTEL_MTL_U_IDS(ID),
1364 	INTEL_ARL_U_IDS(ID),
1365 	0
1366 };
1367 
1368 /*
1369  * Do not initialize the .info member of the platform desc for GMD ID based
1370  * platforms. Their display will be probed automatically based on the IP version
1371  * reported by the hardware.
1372  */
1373 static const struct platform_desc mtl_desc = {
1374 	PLATFORM(meteorlake),
1375 	.subplatforms = (const struct subplatform_desc[]) {
1376 		{
1377 			SUBPLATFORM(meteorlake, u),
1378 			.pciidlist = mtl_u_ids,
1379 		},
1380 		{},
1381 	}
1382 };
1383 
1384 static const struct platform_desc lnl_desc = {
1385 	PLATFORM(lunarlake),
1386 };
1387 
1388 static const struct platform_desc bmg_desc = {
1389 	PLATFORM(battlemage),
1390 	PLATFORM_GROUP(dgfx),
1391 };
1392 
1393 static const struct platform_desc ptl_desc = {
1394 	PLATFORM(pantherlake),
1395 };
1396 
1397 __diag_pop();
1398 
1399 /*
1400  * Separate detection for no display cases to keep the display id array simple.
1401  *
1402  * IVB Q requires subvendor and subdevice matching to differentiate from IVB D
1403  * GT2 server.
1404  */
1405 static bool has_no_display(struct pci_dev *pdev)
1406 {
1407 	static const struct pci_device_id ids[] = {
1408 		INTEL_IVB_Q_IDS(INTEL_VGA_DEVICE, 0),
1409 		{}
1410 	};
1411 
1412 	return pci_match_id(ids, pdev);
1413 }
1414 
1415 #define INTEL_DISPLAY_DEVICE(_id, _desc) { .devid = (_id), .desc = (_desc) }
1416 
1417 static const struct {
1418 	u32 devid;
1419 	const struct platform_desc *desc;
1420 } intel_display_ids[] = {
1421 	INTEL_I830_IDS(INTEL_DISPLAY_DEVICE, &i830_desc),
1422 	INTEL_I845G_IDS(INTEL_DISPLAY_DEVICE, &i845_desc),
1423 	INTEL_I85X_IDS(INTEL_DISPLAY_DEVICE, &i85x_desc),
1424 	INTEL_I865G_IDS(INTEL_DISPLAY_DEVICE, &i865g_desc),
1425 	INTEL_I915G_IDS(INTEL_DISPLAY_DEVICE, &i915g_desc),
1426 	INTEL_I915GM_IDS(INTEL_DISPLAY_DEVICE, &i915gm_desc),
1427 	INTEL_I945G_IDS(INTEL_DISPLAY_DEVICE, &i945g_desc),
1428 	INTEL_I945GM_IDS(INTEL_DISPLAY_DEVICE, &i945gm_desc),
1429 	INTEL_I965G_IDS(INTEL_DISPLAY_DEVICE, &i965g_desc),
1430 	INTEL_G33_IDS(INTEL_DISPLAY_DEVICE, &g33_desc),
1431 	INTEL_I965GM_IDS(INTEL_DISPLAY_DEVICE, &i965gm_desc),
1432 	INTEL_GM45_IDS(INTEL_DISPLAY_DEVICE, &gm45_desc),
1433 	INTEL_G45_IDS(INTEL_DISPLAY_DEVICE, &g45_desc),
1434 	INTEL_PNV_G_IDS(INTEL_DISPLAY_DEVICE, &pnv_g_desc),
1435 	INTEL_PNV_M_IDS(INTEL_DISPLAY_DEVICE, &pnv_m_desc),
1436 	INTEL_ILK_D_IDS(INTEL_DISPLAY_DEVICE, &ilk_d_desc),
1437 	INTEL_ILK_M_IDS(INTEL_DISPLAY_DEVICE, &ilk_m_desc),
1438 	INTEL_SNB_D_IDS(INTEL_DISPLAY_DEVICE, &snb_d_desc),
1439 	INTEL_SNB_M_IDS(INTEL_DISPLAY_DEVICE, &snb_m_desc),
1440 	INTEL_IVB_D_IDS(INTEL_DISPLAY_DEVICE, &ivb_d_desc),
1441 	INTEL_IVB_M_IDS(INTEL_DISPLAY_DEVICE, &ivb_m_desc),
1442 	INTEL_HSW_IDS(INTEL_DISPLAY_DEVICE, &hsw_desc),
1443 	INTEL_VLV_IDS(INTEL_DISPLAY_DEVICE, &vlv_desc),
1444 	INTEL_BDW_IDS(INTEL_DISPLAY_DEVICE, &bdw_desc),
1445 	INTEL_CHV_IDS(INTEL_DISPLAY_DEVICE, &chv_desc),
1446 	INTEL_SKL_IDS(INTEL_DISPLAY_DEVICE, &skl_desc),
1447 	INTEL_BXT_IDS(INTEL_DISPLAY_DEVICE, &bxt_desc),
1448 	INTEL_GLK_IDS(INTEL_DISPLAY_DEVICE, &glk_desc),
1449 	INTEL_KBL_IDS(INTEL_DISPLAY_DEVICE, &kbl_desc),
1450 	INTEL_CFL_IDS(INTEL_DISPLAY_DEVICE, &cfl_desc),
1451 	INTEL_WHL_IDS(INTEL_DISPLAY_DEVICE, &cfl_desc),
1452 	INTEL_CML_IDS(INTEL_DISPLAY_DEVICE, &cml_desc),
1453 	INTEL_ICL_IDS(INTEL_DISPLAY_DEVICE, &icl_desc),
1454 	INTEL_EHL_IDS(INTEL_DISPLAY_DEVICE, &ehl_desc),
1455 	INTEL_JSL_IDS(INTEL_DISPLAY_DEVICE, &jsl_desc),
1456 	INTEL_TGL_IDS(INTEL_DISPLAY_DEVICE, &tgl_desc),
1457 	INTEL_DG1_IDS(INTEL_DISPLAY_DEVICE, &dg1_desc),
1458 	INTEL_RKL_IDS(INTEL_DISPLAY_DEVICE, &rkl_desc),
1459 	INTEL_ADLS_IDS(INTEL_DISPLAY_DEVICE, &adl_s_desc),
1460 	INTEL_RPLS_IDS(INTEL_DISPLAY_DEVICE, &adl_s_desc),
1461 	INTEL_ADLP_IDS(INTEL_DISPLAY_DEVICE, &adl_p_desc),
1462 	INTEL_ADLN_IDS(INTEL_DISPLAY_DEVICE, &adl_p_desc),
1463 	INTEL_RPLU_IDS(INTEL_DISPLAY_DEVICE, &adl_p_desc),
1464 	INTEL_RPLP_IDS(INTEL_DISPLAY_DEVICE, &adl_p_desc),
1465 	INTEL_DG2_IDS(INTEL_DISPLAY_DEVICE, &dg2_desc),
1466 	INTEL_ARL_IDS(INTEL_DISPLAY_DEVICE, &mtl_desc),
1467 	INTEL_MTL_IDS(INTEL_DISPLAY_DEVICE, &mtl_desc),
1468 	INTEL_LNL_IDS(INTEL_DISPLAY_DEVICE, &lnl_desc),
1469 	INTEL_BMG_IDS(INTEL_DISPLAY_DEVICE, &bmg_desc),
1470 	INTEL_PTL_IDS(INTEL_DISPLAY_DEVICE, &ptl_desc),
1471 };
1472 
1473 static const struct {
1474 	u16 ver;
1475 	u16 rel;
1476 	const struct intel_display_device_info *display;
1477 } gmdid_display_map[] = {
1478 	{ 14,  0, &xe_lpdp_display },
1479 	{ 14,  1, &xe2_hpd_display },
1480 	{ 20,  0, &xe2_lpd_display },
1481 	{ 30,  0, &xe2_lpd_display },
1482 };
1483 
1484 static const struct intel_display_device_info *
1485 probe_gmdid_display(struct intel_display *display, struct intel_display_ip_ver *ip_ver)
1486 {
1487 	struct pci_dev *pdev = to_pci_dev(display->drm->dev);
1488 	struct intel_display_ip_ver gmd_id;
1489 	void __iomem *addr;
1490 	u32 val;
1491 	int i;
1492 
1493 	addr = pci_iomap_range(pdev, 0, i915_mmio_reg_offset(GMD_ID_DISPLAY), sizeof(u32));
1494 	if (!addr) {
1495 		drm_err(display->drm,
1496 			"Cannot map MMIO BAR to read display GMD_ID\n");
1497 		return NULL;
1498 	}
1499 
1500 	val = ioread32(addr);
1501 	pci_iounmap(pdev, addr);
1502 
1503 	if (val == 0) {
1504 		drm_dbg_kms(display->drm, "Device doesn't have display\n");
1505 		return NULL;
1506 	}
1507 
1508 	gmd_id.ver = REG_FIELD_GET(GMD_ID_ARCH_MASK, val);
1509 	gmd_id.rel = REG_FIELD_GET(GMD_ID_RELEASE_MASK, val);
1510 	gmd_id.step = REG_FIELD_GET(GMD_ID_STEP, val);
1511 
1512 	for (i = 0; i < ARRAY_SIZE(gmdid_display_map); i++) {
1513 		if (gmd_id.ver == gmdid_display_map[i].ver &&
1514 		    gmd_id.rel == gmdid_display_map[i].rel) {
1515 			*ip_ver = gmd_id;
1516 			return gmdid_display_map[i].display;
1517 		}
1518 	}
1519 
1520 	drm_err(display->drm,
1521 		"Unrecognized display IP version %d.%02d; disabling display.\n",
1522 		gmd_id.ver, gmd_id.rel);
1523 	return NULL;
1524 }
1525 
1526 static const struct platform_desc *find_platform_desc(struct pci_dev *pdev)
1527 {
1528 	int i;
1529 
1530 	for (i = 0; i < ARRAY_SIZE(intel_display_ids); i++) {
1531 		if (intel_display_ids[i].devid == pdev->device)
1532 			return intel_display_ids[i].desc;
1533 	}
1534 
1535 	return NULL;
1536 }
1537 
1538 static const struct subplatform_desc *
1539 find_subplatform_desc(struct pci_dev *pdev, const struct platform_desc *desc)
1540 {
1541 	const struct subplatform_desc *sp;
1542 	const u16 *id;
1543 
1544 	for (sp = desc->subplatforms; sp && sp->pciidlist; sp++)
1545 		for (id = sp->pciidlist; *id; id++)
1546 			if (*id == pdev->device)
1547 				return sp;
1548 
1549 	return NULL;
1550 }
1551 
1552 static enum intel_step get_pre_gmdid_step(struct intel_display *display,
1553 					  const struct stepping_desc *main,
1554 					  const struct stepping_desc *sub)
1555 {
1556 	struct pci_dev *pdev = to_pci_dev(display->drm->dev);
1557 	const enum intel_step *map = main->map;
1558 	int size = main->size;
1559 	int revision = pdev->revision;
1560 	enum intel_step step;
1561 
1562 	/* subplatform stepping info trumps main platform info */
1563 	if (sub && sub->map && sub->size) {
1564 		map = sub->map;
1565 		size = sub->size;
1566 	}
1567 
1568 	/* not all platforms define steppings, and it's fine */
1569 	if (!map || !size)
1570 		return STEP_NONE;
1571 
1572 	if (revision < size && map[revision] != STEP_NONE) {
1573 		step = map[revision];
1574 	} else {
1575 		drm_warn(display->drm, "Unknown revision 0x%02x\n", revision);
1576 
1577 		/*
1578 		 * If we hit a gap in the revision to step map, use the information
1579 		 * for the next revision.
1580 		 *
1581 		 * This may be wrong in all sorts of ways, especially if the
1582 		 * steppings in the array are not monotonically increasing, but
1583 		 * it's better than defaulting to 0.
1584 		 */
1585 		while (revision < size && map[revision] == STEP_NONE)
1586 			revision++;
1587 
1588 		if (revision < size) {
1589 			drm_dbg_kms(display->drm, "Using display stepping for revision 0x%02x\n",
1590 				    revision);
1591 			step = map[revision];
1592 		} else {
1593 			drm_dbg_kms(display->drm, "Using future display stepping\n");
1594 			step = STEP_FUTURE;
1595 		}
1596 	}
1597 
1598 	drm_WARN_ON(display->drm, step == STEP_NONE);
1599 
1600 	return step;
1601 }
1602 
1603 /* Size of the entire bitmap, not the number of platforms */
1604 static unsigned int display_platforms_num_bits(void)
1605 {
1606 	return sizeof(((struct intel_display_platforms *)0)->bitmap) * BITS_PER_BYTE;
1607 }
1608 
1609 /* Number of platform bits set */
1610 static unsigned int display_platforms_weight(const struct intel_display_platforms *p)
1611 {
1612 	return bitmap_weight(p->bitmap, display_platforms_num_bits());
1613 }
1614 
1615 /* Merge the subplatform information from src to dst */
1616 static void display_platforms_or(struct intel_display_platforms *dst,
1617 				 const struct intel_display_platforms *src)
1618 {
1619 	bitmap_or(dst->bitmap, dst->bitmap, src->bitmap, display_platforms_num_bits());
1620 }
1621 
1622 struct intel_display *intel_display_device_probe(struct pci_dev *pdev)
1623 {
1624 	struct intel_display *display = to_intel_display(pdev);
1625 	const struct intel_display_device_info *info;
1626 	struct intel_display_ip_ver ip_ver = {};
1627 	const struct platform_desc *desc;
1628 	const struct subplatform_desc *subdesc;
1629 	enum intel_step step;
1630 
1631 	/* Add drm device backpointer as early as possible. */
1632 	display->drm = pci_get_drvdata(pdev);
1633 
1634 	intel_display_params_copy(&display->params);
1635 
1636 	if (has_no_display(pdev)) {
1637 		drm_dbg_kms(display->drm, "Device doesn't have display\n");
1638 		goto no_display;
1639 	}
1640 
1641 	desc = find_platform_desc(pdev);
1642 	if (!desc) {
1643 		drm_dbg_kms(display->drm,
1644 			    "Unknown device ID %04x; disabling display.\n",
1645 			    pdev->device);
1646 		goto no_display;
1647 	}
1648 
1649 	info = desc->info;
1650 	if (!info)
1651 		info = probe_gmdid_display(display, &ip_ver);
1652 	if (!info)
1653 		goto no_display;
1654 
1655 	DISPLAY_INFO(display) = info;
1656 
1657 	memcpy(DISPLAY_RUNTIME_INFO(display),
1658 	       &DISPLAY_INFO(display)->__runtime_defaults,
1659 	       sizeof(*DISPLAY_RUNTIME_INFO(display)));
1660 
1661 	drm_WARN_ON(display->drm, !desc->name ||
1662 		    !display_platforms_weight(&desc->platforms));
1663 
1664 	display->platform = desc->platforms;
1665 
1666 	subdesc = find_subplatform_desc(pdev, desc);
1667 	if (subdesc) {
1668 		drm_WARN_ON(display->drm, !subdesc->name ||
1669 			    !display_platforms_weight(&subdesc->platforms));
1670 
1671 		display_platforms_or(&display->platform, &subdesc->platforms);
1672 
1673 		/* Ensure platform and subplatform are distinct */
1674 		drm_WARN_ON(display->drm,
1675 			    display_platforms_weight(&display->platform) !=
1676 			    display_platforms_weight(&desc->platforms) +
1677 			    display_platforms_weight(&subdesc->platforms));
1678 	}
1679 
1680 	if (ip_ver.ver || ip_ver.rel || ip_ver.step) {
1681 		DISPLAY_RUNTIME_INFO(display)->ip = ip_ver;
1682 		step = STEP_A0 + ip_ver.step;
1683 		if (step > STEP_FUTURE) {
1684 			drm_dbg_kms(display->drm, "Using future display stepping\n");
1685 			step = STEP_FUTURE;
1686 		}
1687 	} else {
1688 		step = get_pre_gmdid_step(display, &desc->step_info,
1689 					  subdesc ? &subdesc->step_info : NULL);
1690 	}
1691 
1692 	DISPLAY_RUNTIME_INFO(display)->step = step;
1693 
1694 	drm_info(display->drm, "Found %s%s%s (device ID %04x) %s display version %u.%02u stepping %s\n",
1695 		 desc->name, subdesc ? "/" : "", subdesc ? subdesc->name : "",
1696 		 pdev->device, display->platform.dgfx ? "discrete" : "integrated",
1697 		 DISPLAY_RUNTIME_INFO(display)->ip.ver,
1698 		 DISPLAY_RUNTIME_INFO(display)->ip.rel,
1699 		 step != STEP_NONE ? intel_step_name(step) : "N/A");
1700 
1701 	return display;
1702 
1703 no_display:
1704 	DISPLAY_INFO(display) = &no_display;
1705 
1706 	return display;
1707 }
1708 
1709 void intel_display_device_remove(struct intel_display *display)
1710 {
1711 	intel_display_params_free(&display->params);
1712 }
1713 
1714 static void __intel_display_device_info_runtime_init(struct intel_display *display)
1715 {
1716 	struct intel_display_runtime_info *display_runtime = DISPLAY_RUNTIME_INFO(display);
1717 	enum pipe pipe;
1718 
1719 	BUILD_BUG_ON(BITS_PER_TYPE(display_runtime->pipe_mask) < I915_MAX_PIPES);
1720 	BUILD_BUG_ON(BITS_PER_TYPE(display_runtime->cpu_transcoder_mask) < I915_MAX_TRANSCODERS);
1721 	BUILD_BUG_ON(BITS_PER_TYPE(display_runtime->port_mask) < I915_MAX_PORTS);
1722 
1723 	/* This covers both ULT and ULX */
1724 	if (display->platform.haswell_ult || display->platform.broadwell_ult)
1725 		display_runtime->port_mask &= ~BIT(PORT_D);
1726 
1727 	if (display->platform.icelake_port_f)
1728 		display_runtime->port_mask |= BIT(PORT_F);
1729 
1730 	/* Wa_14011765242: adl-s A0,A1 */
1731 	if (display->platform.alderlake_s && IS_DISPLAY_STEP(display, STEP_A0, STEP_A2))
1732 		for_each_pipe(display, pipe)
1733 			display_runtime->num_scalers[pipe] = 0;
1734 	else if (DISPLAY_VER(display) >= 11) {
1735 		for_each_pipe(display, pipe)
1736 			display_runtime->num_scalers[pipe] = 2;
1737 	} else if (DISPLAY_VER(display) >= 9) {
1738 		display_runtime->num_scalers[PIPE_A] = 2;
1739 		display_runtime->num_scalers[PIPE_B] = 2;
1740 		display_runtime->num_scalers[PIPE_C] = 1;
1741 	}
1742 
1743 	if (DISPLAY_VER(display) >= 13 || HAS_D12_PLANE_MINIMIZATION(display))
1744 		for_each_pipe(display, pipe)
1745 			display_runtime->num_sprites[pipe] = 4;
1746 	else if (DISPLAY_VER(display) >= 11)
1747 		for_each_pipe(display, pipe)
1748 			display_runtime->num_sprites[pipe] = 6;
1749 	else if (DISPLAY_VER(display) == 10)
1750 		for_each_pipe(display, pipe)
1751 			display_runtime->num_sprites[pipe] = 3;
1752 	else if (display->platform.broxton) {
1753 		/*
1754 		 * Skylake and Broxton currently don't expose the topmost plane as its
1755 		 * use is exclusive with the legacy cursor and we only want to expose
1756 		 * one of those, not both. Until we can safely expose the topmost plane
1757 		 * as a DRM_PLANE_TYPE_CURSOR with all the features exposed/supported,
1758 		 * we don't expose the topmost plane at all to prevent ABI breakage
1759 		 * down the line.
1760 		 */
1761 
1762 		display_runtime->num_sprites[PIPE_A] = 2;
1763 		display_runtime->num_sprites[PIPE_B] = 2;
1764 		display_runtime->num_sprites[PIPE_C] = 1;
1765 	} else if (display->platform.valleyview || display->platform.cherryview) {
1766 		for_each_pipe(display, pipe)
1767 			display_runtime->num_sprites[pipe] = 2;
1768 	} else if (DISPLAY_VER(display) >= 5 || display->platform.g4x) {
1769 		for_each_pipe(display, pipe)
1770 			display_runtime->num_sprites[pipe] = 1;
1771 	}
1772 
1773 	if ((display->platform.dgfx || DISPLAY_VER(display) >= 14) &&
1774 	    !(intel_de_read(display, GU_CNTL_PROTECTED) & DEPRESENT)) {
1775 		drm_info(display->drm, "Display not present, disabling\n");
1776 		goto display_fused_off;
1777 	}
1778 
1779 	if (IS_DISPLAY_VER(display, 7, 8) && HAS_PCH_SPLIT(display)) {
1780 		u32 fuse_strap = intel_de_read(display, FUSE_STRAP);
1781 		u32 sfuse_strap = intel_de_read(display, SFUSE_STRAP);
1782 
1783 		/*
1784 		 * SFUSE_STRAP is supposed to have a bit signalling the display
1785 		 * is fused off. Unfortunately it seems that, at least in
1786 		 * certain cases, fused off display means that PCH display
1787 		 * reads don't land anywhere. In that case, we read 0s.
1788 		 *
1789 		 * On CPT/PPT, we can detect this case as SFUSE_STRAP_FUSE_LOCK
1790 		 * should be set when taking over after the firmware.
1791 		 */
1792 		if (fuse_strap & ILK_INTERNAL_DISPLAY_DISABLE ||
1793 		    sfuse_strap & SFUSE_STRAP_DISPLAY_DISABLED ||
1794 		    (HAS_PCH_CPT(display) &&
1795 		     !(sfuse_strap & SFUSE_STRAP_FUSE_LOCK))) {
1796 			drm_info(display->drm,
1797 				 "Display fused off, disabling\n");
1798 			goto display_fused_off;
1799 		} else if (fuse_strap & IVB_PIPE_C_DISABLE) {
1800 			drm_info(display->drm, "PipeC fused off\n");
1801 			display_runtime->pipe_mask &= ~BIT(PIPE_C);
1802 			display_runtime->cpu_transcoder_mask &= ~BIT(TRANSCODER_C);
1803 		}
1804 	} else if (DISPLAY_VER(display) >= 9) {
1805 		u32 dfsm = intel_de_read(display, SKL_DFSM);
1806 
1807 		if (dfsm & SKL_DFSM_PIPE_A_DISABLE) {
1808 			display_runtime->pipe_mask &= ~BIT(PIPE_A);
1809 			display_runtime->cpu_transcoder_mask &= ~BIT(TRANSCODER_A);
1810 			display_runtime->fbc_mask &= ~BIT(INTEL_FBC_A);
1811 		}
1812 		if (dfsm & SKL_DFSM_PIPE_B_DISABLE) {
1813 			display_runtime->pipe_mask &= ~BIT(PIPE_B);
1814 			display_runtime->cpu_transcoder_mask &= ~BIT(TRANSCODER_B);
1815 			display_runtime->fbc_mask &= ~BIT(INTEL_FBC_B);
1816 		}
1817 		if (dfsm & SKL_DFSM_PIPE_C_DISABLE) {
1818 			display_runtime->pipe_mask &= ~BIT(PIPE_C);
1819 			display_runtime->cpu_transcoder_mask &= ~BIT(TRANSCODER_C);
1820 			display_runtime->fbc_mask &= ~BIT(INTEL_FBC_C);
1821 		}
1822 
1823 		if (DISPLAY_VER(display) >= 12 &&
1824 		    (dfsm & TGL_DFSM_PIPE_D_DISABLE)) {
1825 			display_runtime->pipe_mask &= ~BIT(PIPE_D);
1826 			display_runtime->cpu_transcoder_mask &= ~BIT(TRANSCODER_D);
1827 			display_runtime->fbc_mask &= ~BIT(INTEL_FBC_D);
1828 		}
1829 
1830 		if (!display_runtime->pipe_mask)
1831 			goto display_fused_off;
1832 
1833 		if (dfsm & SKL_DFSM_DISPLAY_HDCP_DISABLE)
1834 			display_runtime->has_hdcp = 0;
1835 
1836 		if (display->platform.dg2 || DISPLAY_VER(display) < 13) {
1837 			if (dfsm & SKL_DFSM_DISPLAY_PM_DISABLE)
1838 				display_runtime->fbc_mask = 0;
1839 		}
1840 
1841 		if (DISPLAY_VER(display) >= 11 && (dfsm & ICL_DFSM_DMC_DISABLE))
1842 			display_runtime->has_dmc = 0;
1843 
1844 		if (IS_DISPLAY_VER(display, 10, 12) &&
1845 		    (dfsm & GLK_DFSM_DISPLAY_DSC_DISABLE))
1846 			display_runtime->has_dsc = 0;
1847 
1848 		if (DISPLAY_VER(display) >= 20 &&
1849 		    (dfsm & XE2LPD_DFSM_DBUF_OVERLAP_DISABLE))
1850 			display_runtime->has_dbuf_overlap_detection = false;
1851 	}
1852 
1853 	if (DISPLAY_VER(display) >= 20) {
1854 		u32 cap = intel_de_read(display, XE2LPD_DE_CAP);
1855 
1856 		if (REG_FIELD_GET(XE2LPD_DE_CAP_DSC_MASK, cap) ==
1857 		    XE2LPD_DE_CAP_DSC_REMOVED)
1858 			display_runtime->has_dsc = 0;
1859 
1860 		if (REG_FIELD_GET(XE2LPD_DE_CAP_SCALER_MASK, cap) ==
1861 		    XE2LPD_DE_CAP_SCALER_SINGLE) {
1862 			for_each_pipe(display, pipe)
1863 				if (display_runtime->num_scalers[pipe])
1864 					display_runtime->num_scalers[pipe] = 1;
1865 		}
1866 	}
1867 
1868 	if (DISPLAY_VER(display) >= 30)
1869 		display_runtime->edp_typec_support =
1870 			intel_de_read(display, PICA_PHY_CONFIG_CONTROL) & EDP_ON_TYPEC;
1871 
1872 	display_runtime->rawclk_freq = intel_read_rawclk(display);
1873 	drm_dbg_kms(display->drm, "rawclk rate: %d kHz\n",
1874 		    display_runtime->rawclk_freq);
1875 
1876 	return;
1877 
1878 display_fused_off:
1879 	memset(display_runtime, 0, sizeof(*display_runtime));
1880 }
1881 
1882 void intel_display_device_info_runtime_init(struct intel_display *display)
1883 {
1884 	if (HAS_DISPLAY(display))
1885 		__intel_display_device_info_runtime_init(display);
1886 
1887 	/* Display may have been disabled by runtime init */
1888 	if (!HAS_DISPLAY(display)) {
1889 		display->drm->driver_features &= ~(DRIVER_MODESET | DRIVER_ATOMIC);
1890 		display->info.__device_info = &no_display;
1891 	}
1892 
1893 	/* Disable nuclear pageflip by default on pre-g4x */
1894 	if (!display->params.nuclear_pageflip &&
1895 	    DISPLAY_VER(display) < 5 && !display->platform.g4x)
1896 		display->drm->driver_features &= ~DRIVER_ATOMIC;
1897 }
1898 
1899 void intel_display_device_info_print(const struct intel_display_device_info *info,
1900 				     const struct intel_display_runtime_info *runtime,
1901 				     struct drm_printer *p)
1902 {
1903 	if (runtime->ip.rel)
1904 		drm_printf(p, "display version: %u.%02u\n",
1905 			   runtime->ip.ver,
1906 			   runtime->ip.rel);
1907 	else
1908 		drm_printf(p, "display version: %u\n",
1909 			   runtime->ip.ver);
1910 
1911 	drm_printf(p, "display stepping: %s\n", intel_step_name(runtime->step));
1912 
1913 #define PRINT_FLAG(name) drm_printf(p, "%s: %s\n", #name, str_yes_no(info->name))
1914 	DEV_INFO_DISPLAY_FOR_EACH_FLAG(PRINT_FLAG);
1915 #undef PRINT_FLAG
1916 
1917 	drm_printf(p, "has_hdcp: %s\n", str_yes_no(runtime->has_hdcp));
1918 	drm_printf(p, "has_dmc: %s\n", str_yes_no(runtime->has_dmc));
1919 	drm_printf(p, "has_dsc: %s\n", str_yes_no(runtime->has_dsc));
1920 
1921 	drm_printf(p, "rawclk rate: %u kHz\n", runtime->rawclk_freq);
1922 }
1923 
1924 /*
1925  * Assuming the device has display hardware, should it be enabled?
1926  *
1927  * It's an error to call this function if the device does not have display
1928  * hardware.
1929  *
1930  * Disabling display means taking over the display hardware, putting it to
1931  * sleep, and preventing connectors from being connected via any means.
1932  */
1933 bool intel_display_device_enabled(struct intel_display *display)
1934 {
1935 	/* Only valid when HAS_DISPLAY() is true */
1936 	drm_WARN_ON(display->drm, !HAS_DISPLAY(display));
1937 
1938 	return !display->params.disable_display &&
1939 		!intel_opregion_headless_sku(display);
1940 }
1941