xref: /linux/drivers/gpu/drm/i915/display/intel_dram.c (revision 29fdc6e98d3c3657c8b4874ab3bfc75f9df59bf4)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2020 Intel Corporation
4  */
5 
6 #include <linux/string_helpers.h>
7 
8 #include <drm/drm_managed.h>
9 #include <drm/drm_print.h>
10 
11 #include "i915_drv.h"
12 #include "i915_reg.h"
13 #include "intel_display_core.h"
14 #include "intel_display_utils.h"
15 #include "intel_dram.h"
16 #include "intel_mchbar_regs.h"
17 #include "intel_pcode.h"
18 #include "intel_uncore.h"
19 #include "vlv_iosf_sb.h"
20 
21 struct dram_dimm_info {
22 	u16 size;
23 	u8 width, ranks;
24 };
25 
26 struct dram_channel_info {
27 	struct dram_dimm_info dimm_l, dimm_s;
28 	u8 ranks;
29 	bool is_16gb_dimm;
30 };
31 
32 #define DRAM_TYPE_STR(type) [INTEL_DRAM_ ## type] = #type
33 
34 const char *intel_dram_type_str(enum intel_dram_type type)
35 {
36 	static const char * const str[] = {
37 		DRAM_TYPE_STR(UNKNOWN),
38 		DRAM_TYPE_STR(DDR2),
39 		DRAM_TYPE_STR(DDR3),
40 		DRAM_TYPE_STR(DDR4),
41 		DRAM_TYPE_STR(LPDDR3),
42 		DRAM_TYPE_STR(LPDDR4),
43 		DRAM_TYPE_STR(DDR5),
44 		DRAM_TYPE_STR(LPDDR5),
45 		DRAM_TYPE_STR(GDDR),
46 		DRAM_TYPE_STR(GDDR_ECC),
47 	};
48 
49 	BUILD_BUG_ON(ARRAY_SIZE(str) != __INTEL_DRAM_TYPE_MAX);
50 
51 	if (type >= ARRAY_SIZE(str))
52 		type = INTEL_DRAM_UNKNOWN;
53 
54 	return str[type];
55 }
56 
57 #undef DRAM_TYPE_STR
58 
59 static enum intel_dram_type pnv_dram_type(struct intel_display *display)
60 {
61 	struct drm_i915_private *i915 = to_i915(display->drm);
62 
63 	return intel_uncore_read(&i915->uncore, CSHRDDR3CTL) & CSHRDDR3CTL_DDR3 ?
64 		INTEL_DRAM_DDR3 : INTEL_DRAM_DDR2;
65 }
66 
67 static unsigned int pnv_mem_freq(struct intel_display *display)
68 {
69 	struct drm_i915_private *dev_priv = to_i915(display->drm);
70 	u32 tmp;
71 
72 	tmp = intel_uncore_read(&dev_priv->uncore, CLKCFG);
73 
74 	switch (tmp & CLKCFG_MEM_MASK) {
75 	case CLKCFG_MEM_533:
76 		return 533333;
77 	case CLKCFG_MEM_667:
78 		return 666667;
79 	case CLKCFG_MEM_800:
80 		return 800000;
81 	}
82 
83 	return 0;
84 }
85 
86 static unsigned int ilk_mem_freq(struct intel_display *display)
87 {
88 	struct drm_i915_private *dev_priv = to_i915(display->drm);
89 	u16 ddrpll;
90 
91 	ddrpll = intel_uncore_read16(&dev_priv->uncore, DDRMPLL1);
92 	switch (ddrpll & 0xff) {
93 	case 0xc:
94 		return 800000;
95 	case 0x10:
96 		return 1066667;
97 	case 0x14:
98 		return 1333333;
99 	case 0x18:
100 		return 1600000;
101 	default:
102 		drm_dbg_kms(display->drm, "unknown memory frequency 0x%02x\n",
103 			    ddrpll & 0xff);
104 		return 0;
105 	}
106 }
107 
108 static unsigned int chv_mem_freq(struct intel_display *display)
109 {
110 	u32 val;
111 
112 	vlv_iosf_sb_get(display->drm, BIT(VLV_IOSF_SB_CCK));
113 	val = vlv_iosf_sb_read(display->drm, VLV_IOSF_SB_CCK, CCK_FUSE_REG);
114 	vlv_iosf_sb_put(display->drm, BIT(VLV_IOSF_SB_CCK));
115 
116 	switch ((val >> 2) & 0x7) {
117 	case 3:
118 		return 2000000;
119 	default:
120 		return 1600000;
121 	}
122 }
123 
124 static unsigned int vlv_mem_freq(struct intel_display *display)
125 {
126 	u32 val;
127 
128 	vlv_iosf_sb_get(display->drm, BIT(VLV_IOSF_SB_PUNIT));
129 	val = vlv_iosf_sb_read(display->drm, VLV_IOSF_SB_PUNIT, PUNIT_REG_GPU_FREQ_STS);
130 	vlv_iosf_sb_put(display->drm, BIT(VLV_IOSF_SB_PUNIT));
131 
132 	switch ((val >> 6) & 3) {
133 	case 0:
134 	case 1:
135 		return 800000;
136 	case 2:
137 		return 1066667;
138 	case 3:
139 		return 1333333;
140 	}
141 
142 	return 0;
143 }
144 
145 unsigned int intel_mem_freq(struct intel_display *display)
146 {
147 	if (display->platform.pineview)
148 		return pnv_mem_freq(display);
149 	else if (DISPLAY_VER(display) == 5)
150 		return ilk_mem_freq(display);
151 	else if (display->platform.cherryview)
152 		return chv_mem_freq(display);
153 	else if (display->platform.valleyview)
154 		return vlv_mem_freq(display);
155 	else
156 		return 0;
157 }
158 
159 static unsigned int i9xx_fsb_freq(struct intel_display *display)
160 {
161 	struct drm_i915_private *i915 = to_i915(display->drm);
162 	u32 fsb;
163 
164 	/*
165 	 * Note that this only reads the state of the FSB
166 	 * straps, not the actual FSB frequency. Some BIOSen
167 	 * let you configure each independently. Ideally we'd
168 	 * read out the actual FSB frequency but sadly we
169 	 * don't know which registers have that information,
170 	 * and all the relevant docs have gone to bit heaven :(
171 	 */
172 	fsb = intel_uncore_read(&i915->uncore, CLKCFG) & CLKCFG_FSB_MASK;
173 
174 	if (display->platform.pineview || display->platform.mobile) {
175 		switch (fsb) {
176 		case CLKCFG_FSB_400:
177 			return 400000;
178 		case CLKCFG_FSB_533:
179 			return 533333;
180 		case CLKCFG_FSB_667:
181 			return 666667;
182 		case CLKCFG_FSB_800:
183 			return 800000;
184 		case CLKCFG_FSB_1067:
185 			return 1066667;
186 		case CLKCFG_FSB_1333:
187 			return 1333333;
188 		default:
189 			MISSING_CASE(fsb);
190 			return 1333333;
191 		}
192 	} else {
193 		switch (fsb) {
194 		case CLKCFG_FSB_400_ALT:
195 			return 400000;
196 		case CLKCFG_FSB_533:
197 			return 533333;
198 		case CLKCFG_FSB_667:
199 			return 666667;
200 		case CLKCFG_FSB_800:
201 			return 800000;
202 		case CLKCFG_FSB_1067_ALT:
203 			return 1066667;
204 		case CLKCFG_FSB_1333_ALT:
205 			return 1333333;
206 		case CLKCFG_FSB_1600_ALT:
207 			return 1600000;
208 		default:
209 			MISSING_CASE(fsb);
210 			return 1333333;
211 		}
212 	}
213 }
214 
215 static unsigned int ilk_fsb_freq(struct intel_display *display)
216 {
217 	struct drm_i915_private *dev_priv = to_i915(display->drm);
218 	u16 fsb;
219 
220 	fsb = intel_uncore_read16(&dev_priv->uncore, CSIPLL0) & 0x3ff;
221 
222 	switch (fsb) {
223 	case 0x00c:
224 		return 3200000;
225 	case 0x00e:
226 		return 3733333;
227 	case 0x010:
228 		return 4266667;
229 	case 0x012:
230 		return 4800000;
231 	case 0x014:
232 		return 5333333;
233 	case 0x016:
234 		return 5866667;
235 	case 0x018:
236 		return 6400000;
237 	default:
238 		drm_dbg_kms(display->drm, "unknown fsb frequency 0x%04x\n", fsb);
239 		return 0;
240 	}
241 }
242 
243 unsigned int intel_fsb_freq(struct intel_display *display)
244 {
245 	if (DISPLAY_VER(display) == 5)
246 		return ilk_fsb_freq(display);
247 	else if (IS_DISPLAY_VER(display, 3, 4))
248 		return i9xx_fsb_freq(display);
249 	else
250 		return 0;
251 }
252 
253 static int i915_get_dram_info(struct intel_display *display, struct dram_info *dram_info)
254 {
255 	dram_info->fsb_freq = intel_fsb_freq(display);
256 	if (dram_info->fsb_freq)
257 		drm_dbg_kms(display->drm, "FSB frequency: %d kHz\n", dram_info->fsb_freq);
258 
259 	dram_info->mem_freq = intel_mem_freq(display);
260 	if (dram_info->mem_freq)
261 		drm_dbg_kms(display->drm, "DDR speed: %d kHz\n", dram_info->mem_freq);
262 
263 	if (display->platform.pineview)
264 		dram_info->type = pnv_dram_type(display);
265 
266 	return 0;
267 }
268 
269 static int intel_dimm_num_devices(const struct dram_dimm_info *dimm)
270 {
271 	return dimm->ranks * 64 / (dimm->width ?: 1);
272 }
273 
274 /* Returns total Gb for the whole DIMM */
275 static int skl_get_dimm_s_size(u32 val)
276 {
277 	return REG_FIELD_GET(SKL_DIMM_S_SIZE_MASK, val) * 8;
278 }
279 
280 static int skl_get_dimm_l_size(u32 val)
281 {
282 	return REG_FIELD_GET(SKL_DIMM_L_SIZE_MASK, val) * 8;
283 }
284 
285 static int skl_get_dimm_s_width(u32 val)
286 {
287 	if (skl_get_dimm_s_size(val) == 0)
288 		return 0;
289 
290 	switch (val & SKL_DIMM_S_WIDTH_MASK) {
291 	case SKL_DIMM_S_WIDTH_X8:
292 	case SKL_DIMM_S_WIDTH_X16:
293 	case SKL_DIMM_S_WIDTH_X32:
294 		return 8 << REG_FIELD_GET(SKL_DIMM_S_WIDTH_MASK, val);
295 	default:
296 		MISSING_CASE(val);
297 		return 0;
298 	}
299 }
300 
301 static int skl_get_dimm_l_width(u32 val)
302 {
303 	if (skl_get_dimm_l_size(val) == 0)
304 		return 0;
305 
306 	switch (val & SKL_DIMM_L_WIDTH_MASK) {
307 	case SKL_DIMM_L_WIDTH_X8:
308 	case SKL_DIMM_L_WIDTH_X16:
309 	case SKL_DIMM_L_WIDTH_X32:
310 		return 8 << REG_FIELD_GET(SKL_DIMM_L_WIDTH_MASK, val);
311 	default:
312 		MISSING_CASE(val);
313 		return 0;
314 	}
315 }
316 
317 static int skl_get_dimm_s_ranks(u32 val)
318 {
319 	if (skl_get_dimm_s_size(val) == 0)
320 		return 0;
321 
322 	return REG_FIELD_GET(SKL_DIMM_S_RANK_MASK, val) + 1;
323 }
324 
325 static int skl_get_dimm_l_ranks(u32 val)
326 {
327 	if (skl_get_dimm_l_size(val) == 0)
328 		return 0;
329 
330 	return REG_FIELD_GET(SKL_DIMM_L_RANK_MASK, val) + 1;
331 }
332 
333 /* Returns total Gb for the whole DIMM */
334 static int icl_get_dimm_s_size(u32 val)
335 {
336 	return REG_FIELD_GET(ICL_DIMM_S_SIZE_MASK, val) * 8 / 2;
337 }
338 
339 static int icl_get_dimm_l_size(u32 val)
340 {
341 	return REG_FIELD_GET(ICL_DIMM_L_SIZE_MASK, val) * 8 / 2;
342 }
343 
344 static int icl_get_dimm_s_width(u32 val)
345 {
346 	if (icl_get_dimm_s_size(val) == 0)
347 		return 0;
348 
349 	switch (val & ICL_DIMM_S_WIDTH_MASK) {
350 	case ICL_DIMM_S_WIDTH_X8:
351 	case ICL_DIMM_S_WIDTH_X16:
352 	case ICL_DIMM_S_WIDTH_X32:
353 		return 8 << REG_FIELD_GET(ICL_DIMM_S_WIDTH_MASK, val);
354 	default:
355 		MISSING_CASE(val);
356 		return 0;
357 	}
358 }
359 
360 static int icl_get_dimm_l_width(u32 val)
361 {
362 	if (icl_get_dimm_l_size(val) == 0)
363 		return 0;
364 
365 	switch (val & ICL_DIMM_L_WIDTH_MASK) {
366 	case ICL_DIMM_L_WIDTH_X8:
367 	case ICL_DIMM_L_WIDTH_X16:
368 	case ICL_DIMM_L_WIDTH_X32:
369 		return 8 << REG_FIELD_GET(ICL_DIMM_L_WIDTH_MASK, val);
370 	default:
371 		MISSING_CASE(val);
372 		return 0;
373 	}
374 }
375 
376 static int icl_get_dimm_s_ranks(u32 val)
377 {
378 	if (icl_get_dimm_s_size(val) == 0)
379 		return 0;
380 
381 	return REG_FIELD_GET(ICL_DIMM_S_RANK_MASK, val) + 1;
382 }
383 
384 static int icl_get_dimm_l_ranks(u32 val)
385 {
386 	if (icl_get_dimm_l_size(val) == 0)
387 		return 0;
388 
389 	return REG_FIELD_GET(ICL_DIMM_L_RANK_MASK, val) + 1;
390 }
391 
392 static bool
393 skl_is_16gb_dimm(const struct dram_dimm_info *dimm)
394 {
395 	/* Convert total Gb to Gb per DRAM device */
396 	return dimm->size / (intel_dimm_num_devices(dimm) ?: 1) >= 16;
397 }
398 
399 static void
400 skl_dram_print_dimm_info(struct intel_display *display,
401 			 struct dram_dimm_info *dimm,
402 			 int channel, char dimm_name)
403 {
404 	drm_dbg_kms(display->drm,
405 		    "CH%u DIMM %c size: %u Gb, width: X%u, ranks: %u, 16Gb+ DIMMs: %s\n",
406 		    channel, dimm_name, dimm->size, dimm->width, dimm->ranks,
407 		    str_yes_no(skl_is_16gb_dimm(dimm)));
408 }
409 
410 static void
411 skl_dram_get_dimm_l_info(struct intel_display *display,
412 			 struct dram_dimm_info *dimm,
413 			 int channel, u32 val)
414 {
415 	if (DISPLAY_VER(display) >= 11) {
416 		dimm->size = icl_get_dimm_l_size(val);
417 		dimm->width = icl_get_dimm_l_width(val);
418 		dimm->ranks = icl_get_dimm_l_ranks(val);
419 	} else {
420 		dimm->size = skl_get_dimm_l_size(val);
421 		dimm->width = skl_get_dimm_l_width(val);
422 		dimm->ranks = skl_get_dimm_l_ranks(val);
423 	}
424 
425 	skl_dram_print_dimm_info(display, dimm, channel, 'L');
426 }
427 
428 static void
429 skl_dram_get_dimm_s_info(struct intel_display *display,
430 			 struct dram_dimm_info *dimm,
431 			 int channel, u32 val)
432 {
433 	if (DISPLAY_VER(display) >= 11) {
434 		dimm->size = icl_get_dimm_s_size(val);
435 		dimm->width = icl_get_dimm_s_width(val);
436 		dimm->ranks = icl_get_dimm_s_ranks(val);
437 	} else {
438 		dimm->size = skl_get_dimm_s_size(val);
439 		dimm->width = skl_get_dimm_s_width(val);
440 		dimm->ranks = skl_get_dimm_s_ranks(val);
441 	}
442 
443 	skl_dram_print_dimm_info(display, dimm, channel, 'S');
444 }
445 
446 static int
447 skl_dram_get_channel_info(struct intel_display *display,
448 			  struct dram_channel_info *ch,
449 			  int channel, u32 val)
450 {
451 	skl_dram_get_dimm_l_info(display, &ch->dimm_l, channel, val);
452 	skl_dram_get_dimm_s_info(display, &ch->dimm_s, channel, val);
453 
454 	if (ch->dimm_l.size == 0 && ch->dimm_s.size == 0) {
455 		drm_dbg_kms(display->drm, "CH%u not populated\n", channel);
456 		return -EINVAL;
457 	}
458 
459 	if (ch->dimm_l.ranks == 2 || ch->dimm_s.ranks == 2)
460 		ch->ranks = 2;
461 	else if (ch->dimm_l.ranks == 1 && ch->dimm_s.ranks == 1)
462 		ch->ranks = 2;
463 	else
464 		ch->ranks = 1;
465 
466 	ch->is_16gb_dimm = skl_is_16gb_dimm(&ch->dimm_l) ||
467 		skl_is_16gb_dimm(&ch->dimm_s);
468 
469 	drm_dbg_kms(display->drm, "CH%u ranks: %u, 16Gb+ DIMMs: %s\n",
470 		    channel, ch->ranks, str_yes_no(ch->is_16gb_dimm));
471 
472 	return 0;
473 }
474 
475 static bool
476 intel_is_dram_symmetric(const struct dram_channel_info *ch0,
477 			const struct dram_channel_info *ch1)
478 {
479 	return !memcmp(ch0, ch1, sizeof(*ch0)) &&
480 		(ch0->dimm_s.size == 0 ||
481 		 !memcmp(&ch0->dimm_l, &ch0->dimm_s, sizeof(ch0->dimm_l)));
482 }
483 
484 static int
485 skl_dram_get_channels_info(struct intel_display *display, struct dram_info *dram_info)
486 {
487 	struct drm_i915_private *i915 = to_i915(display->drm);
488 	struct dram_channel_info ch0 = {}, ch1 = {};
489 	u32 val;
490 	int ret;
491 
492 	/* Assume 16Gb+ DIMMs are present until proven otherwise */
493 	dram_info->has_16gb_dimms = true;
494 
495 	val = intel_uncore_read(&i915->uncore,
496 				SKL_MAD_DIMM_CH0_0_0_0_MCHBAR_MCMAIN);
497 	ret = skl_dram_get_channel_info(display, &ch0, 0, val);
498 	if (ret == 0)
499 		dram_info->num_channels++;
500 
501 	val = intel_uncore_read(&i915->uncore,
502 				SKL_MAD_DIMM_CH1_0_0_0_MCHBAR_MCMAIN);
503 	ret = skl_dram_get_channel_info(display, &ch1, 1, val);
504 	if (ret == 0)
505 		dram_info->num_channels++;
506 
507 	if (dram_info->num_channels == 0) {
508 		drm_info(display->drm, "Number of memory channels is zero\n");
509 		return -EINVAL;
510 	}
511 
512 	if (ch0.ranks == 0 && ch1.ranks == 0) {
513 		drm_info(display->drm, "couldn't get memory rank information\n");
514 		return -EINVAL;
515 	}
516 
517 	dram_info->has_16gb_dimms = ch0.is_16gb_dimm || ch1.is_16gb_dimm;
518 
519 	dram_info->symmetric_memory = intel_is_dram_symmetric(&ch0, &ch1);
520 
521 	drm_dbg_kms(display->drm, "Memory configuration is symmetric? %s\n",
522 		    str_yes_no(dram_info->symmetric_memory));
523 
524 	drm_dbg_kms(display->drm, "16Gb+ DIMMs: %s\n",
525 		    str_yes_no(dram_info->has_16gb_dimms));
526 
527 	return 0;
528 }
529 
530 static enum intel_dram_type
531 skl_get_dram_type(struct intel_display *display)
532 {
533 	struct drm_i915_private *i915 = to_i915(display->drm);
534 	u32 val;
535 
536 	val = intel_uncore_read(&i915->uncore,
537 				SKL_MAD_INTER_CHANNEL_0_0_0_MCHBAR_MCMAIN);
538 
539 	switch (val & SKL_DRAM_DDR_TYPE_MASK) {
540 	case SKL_DRAM_DDR_TYPE_DDR3:
541 		return INTEL_DRAM_DDR3;
542 	case SKL_DRAM_DDR_TYPE_DDR4:
543 		return INTEL_DRAM_DDR4;
544 	case SKL_DRAM_DDR_TYPE_LPDDR3:
545 		return INTEL_DRAM_LPDDR3;
546 	case SKL_DRAM_DDR_TYPE_LPDDR4:
547 		return INTEL_DRAM_LPDDR4;
548 	default:
549 		MISSING_CASE(val);
550 		return INTEL_DRAM_UNKNOWN;
551 	}
552 }
553 
554 static int
555 skl_get_dram_info(struct intel_display *display, struct dram_info *dram_info)
556 {
557 	int ret;
558 
559 	dram_info->type = skl_get_dram_type(display);
560 
561 	ret = skl_dram_get_channels_info(display, dram_info);
562 	if (ret)
563 		return ret;
564 
565 	return 0;
566 }
567 
568 /* Returns Gb per DRAM device */
569 static int bxt_get_dimm_size(u32 val)
570 {
571 	switch (val & BXT_DRAM_SIZE_MASK) {
572 	case BXT_DRAM_SIZE_4GBIT:
573 		return 4;
574 	case BXT_DRAM_SIZE_6GBIT:
575 		return 6;
576 	case BXT_DRAM_SIZE_8GBIT:
577 		return 8;
578 	case BXT_DRAM_SIZE_12GBIT:
579 		return 12;
580 	case BXT_DRAM_SIZE_16GBIT:
581 		return 16;
582 	default:
583 		MISSING_CASE(val);
584 		return 0;
585 	}
586 }
587 
588 static int bxt_get_dimm_width(u32 val)
589 {
590 	if (!bxt_get_dimm_size(val))
591 		return 0;
592 
593 	val = (val & BXT_DRAM_WIDTH_MASK) >> BXT_DRAM_WIDTH_SHIFT;
594 
595 	return 8 << val;
596 }
597 
598 static int bxt_get_dimm_ranks(u32 val)
599 {
600 	if (!bxt_get_dimm_size(val))
601 		return 0;
602 
603 	switch (val & BXT_DRAM_RANK_MASK) {
604 	case BXT_DRAM_RANK_SINGLE:
605 		return 1;
606 	case BXT_DRAM_RANK_DUAL:
607 		return 2;
608 	default:
609 		MISSING_CASE(val);
610 		return 0;
611 	}
612 }
613 
614 static enum intel_dram_type bxt_get_dimm_type(u32 val)
615 {
616 	if (!bxt_get_dimm_size(val))
617 		return INTEL_DRAM_UNKNOWN;
618 
619 	switch (val & BXT_DRAM_TYPE_MASK) {
620 	case BXT_DRAM_TYPE_DDR3:
621 		return INTEL_DRAM_DDR3;
622 	case BXT_DRAM_TYPE_LPDDR3:
623 		return INTEL_DRAM_LPDDR3;
624 	case BXT_DRAM_TYPE_DDR4:
625 		return INTEL_DRAM_DDR4;
626 	case BXT_DRAM_TYPE_LPDDR4:
627 		return INTEL_DRAM_LPDDR4;
628 	default:
629 		MISSING_CASE(val);
630 		return INTEL_DRAM_UNKNOWN;
631 	}
632 }
633 
634 static void bxt_get_dimm_info(struct dram_dimm_info *dimm, u32 val)
635 {
636 	dimm->width = bxt_get_dimm_width(val);
637 	dimm->ranks = bxt_get_dimm_ranks(val);
638 
639 	/*
640 	 * Size in register is Gb per DRAM device. Convert to total
641 	 * Gb to match the way we report this for non-LP platforms.
642 	 */
643 	dimm->size = bxt_get_dimm_size(val) * intel_dimm_num_devices(dimm);
644 }
645 
646 static int bxt_get_dram_info(struct intel_display *display, struct dram_info *dram_info)
647 {
648 	struct drm_i915_private *i915 = to_i915(display->drm);
649 	u32 val;
650 	u8 valid_ranks = 0;
651 	int i;
652 
653 	/*
654 	 * Now read each DUNIT8/9/10/11 to check the rank of each dimms.
655 	 */
656 	for (i = BXT_D_CR_DRP0_DUNIT_START; i <= BXT_D_CR_DRP0_DUNIT_END; i++) {
657 		struct dram_dimm_info dimm;
658 		enum intel_dram_type type;
659 
660 		val = intel_uncore_read(&i915->uncore, BXT_D_CR_DRP0_DUNIT(i));
661 		if (val == 0xFFFFFFFF)
662 			continue;
663 
664 		dram_info->num_channels++;
665 
666 		bxt_get_dimm_info(&dimm, val);
667 		type = bxt_get_dimm_type(val);
668 
669 		drm_WARN_ON(display->drm, type != INTEL_DRAM_UNKNOWN &&
670 			    dram_info->type != INTEL_DRAM_UNKNOWN &&
671 			    dram_info->type != type);
672 
673 		drm_dbg_kms(display->drm,
674 			    "CH%u DIMM size: %u Gb, width: X%u, ranks: %u\n",
675 			    i - BXT_D_CR_DRP0_DUNIT_START,
676 			    dimm.size, dimm.width, dimm.ranks);
677 
678 		if (valid_ranks == 0)
679 			valid_ranks = dimm.ranks;
680 
681 		if (type != INTEL_DRAM_UNKNOWN)
682 			dram_info->type = type;
683 	}
684 
685 	if (dram_info->type == INTEL_DRAM_UNKNOWN || valid_ranks == 0) {
686 		drm_info(display->drm, "couldn't get memory information\n");
687 		return -EINVAL;
688 	}
689 
690 	return 0;
691 }
692 
693 static int icl_pcode_read_mem_global_info(struct intel_display *display,
694 					  struct dram_info *dram_info)
695 {
696 	u32 val = 0;
697 	int ret;
698 
699 	ret = intel_pcode_read(display->drm, ICL_PCODE_MEM_SUBSYSYSTEM_INFO |
700 			       ICL_PCODE_MEM_SS_READ_GLOBAL_INFO, &val, NULL);
701 	if (ret)
702 		return ret;
703 
704 	if (DISPLAY_VER(display) >= 12) {
705 		switch (val & 0xf) {
706 		case 0:
707 			dram_info->type = INTEL_DRAM_DDR4;
708 			break;
709 		case 1:
710 			dram_info->type = INTEL_DRAM_DDR5;
711 			break;
712 		case 2:
713 			dram_info->type = INTEL_DRAM_LPDDR5;
714 			break;
715 		case 3:
716 			dram_info->type = INTEL_DRAM_LPDDR4;
717 			break;
718 		case 4:
719 			dram_info->type = INTEL_DRAM_DDR3;
720 			break;
721 		case 5:
722 			dram_info->type = INTEL_DRAM_LPDDR3;
723 			break;
724 		default:
725 			MISSING_CASE(val & 0xf);
726 			return -EINVAL;
727 		}
728 	} else {
729 		switch (val & 0xf) {
730 		case 0:
731 			dram_info->type = INTEL_DRAM_DDR4;
732 			break;
733 		case 1:
734 			dram_info->type = INTEL_DRAM_DDR3;
735 			break;
736 		case 2:
737 			dram_info->type = INTEL_DRAM_LPDDR3;
738 			break;
739 		case 3:
740 			dram_info->type = INTEL_DRAM_LPDDR4;
741 			break;
742 		default:
743 			MISSING_CASE(val & 0xf);
744 			return -EINVAL;
745 		}
746 	}
747 
748 	dram_info->num_channels = (val & 0xf0) >> 4;
749 	dram_info->num_qgv_points = (val & 0xf00) >> 8;
750 	dram_info->num_psf_gv_points = (val & 0x3000) >> 12;
751 
752 	return 0;
753 }
754 
755 static int gen11_get_dram_info(struct intel_display *display, struct dram_info *dram_info)
756 {
757 	int ret;
758 
759 	ret = skl_dram_get_channels_info(display, dram_info);
760 	if (ret)
761 		return ret;
762 
763 	return icl_pcode_read_mem_global_info(display, dram_info);
764 }
765 
766 static int gen12_get_dram_info(struct intel_display *display, struct dram_info *dram_info)
767 {
768 	return icl_pcode_read_mem_global_info(display, dram_info);
769 }
770 
771 static int xelpdp_get_dram_info(struct intel_display *display, struct dram_info *dram_info)
772 {
773 	struct drm_i915_private *i915 = to_i915(display->drm);
774 	u32 val = intel_uncore_read(&i915->uncore, MTL_MEM_SS_INFO_GLOBAL);
775 
776 	switch (REG_FIELD_GET(MTL_DDR_TYPE_MASK, val)) {
777 	case 0:
778 		dram_info->type = INTEL_DRAM_DDR4;
779 		break;
780 	case 1:
781 		dram_info->type = INTEL_DRAM_DDR5;
782 		break;
783 	case 2:
784 		dram_info->type = INTEL_DRAM_LPDDR5;
785 		break;
786 	case 3:
787 		dram_info->type = INTEL_DRAM_LPDDR4;
788 		break;
789 	case 4:
790 		dram_info->type = INTEL_DRAM_DDR3;
791 		break;
792 	case 5:
793 		dram_info->type = INTEL_DRAM_LPDDR3;
794 		break;
795 	case 8:
796 		drm_WARN_ON(display->drm, !display->platform.dgfx);
797 		dram_info->type = INTEL_DRAM_GDDR;
798 		break;
799 	case 9:
800 		drm_WARN_ON(display->drm, !display->platform.dgfx);
801 		dram_info->type = INTEL_DRAM_GDDR_ECC;
802 		break;
803 	default:
804 		MISSING_CASE(val);
805 		return -EINVAL;
806 	}
807 
808 	dram_info->num_channels = REG_FIELD_GET(MTL_N_OF_POPULATED_CH_MASK, val);
809 	dram_info->num_qgv_points = REG_FIELD_GET(MTL_N_OF_ENABLED_QGV_POINTS_MASK, val);
810 	/* PSF GV points not supported in D14+ */
811 
812 	if (DISPLAY_VER(display) >= 35)
813 		dram_info->ecc_impacting_de_bw = REG_FIELD_GET(XE3P_ECC_IMPACTING_DE, val);
814 
815 	return 0;
816 }
817 
818 int intel_dram_detect(struct intel_display *display)
819 {
820 	struct dram_info *dram_info;
821 	int ret;
822 
823 	if (display->platform.dg2 || !HAS_DISPLAY(display))
824 		return 0;
825 
826 	dram_info = drmm_kzalloc(display->drm, sizeof(*dram_info), GFP_KERNEL);
827 	if (!dram_info)
828 		return -ENOMEM;
829 
830 	display->dram.info = dram_info;
831 
832 	if (DISPLAY_VER(display) >= 14)
833 		ret = xelpdp_get_dram_info(display, dram_info);
834 	else if (DISPLAY_VER(display) >= 12)
835 		ret = gen12_get_dram_info(display, dram_info);
836 	else if (DISPLAY_VER(display) >= 11)
837 		ret = gen11_get_dram_info(display, dram_info);
838 	else if (display->platform.broxton || display->platform.geminilake)
839 		ret = bxt_get_dram_info(display, dram_info);
840 	else if (DISPLAY_VER(display) >= 9)
841 		ret = skl_get_dram_info(display, dram_info);
842 	else
843 		ret = i915_get_dram_info(display, dram_info);
844 
845 	drm_dbg_kms(display->drm, "DRAM type: %s\n",
846 		    intel_dram_type_str(dram_info->type));
847 
848 	drm_dbg_kms(display->drm, "DRAM channels: %u\n", dram_info->num_channels);
849 
850 	drm_dbg_kms(display->drm, "Num QGV points %u\n", dram_info->num_qgv_points);
851 	drm_dbg_kms(display->drm, "Num PSF GV points %u\n", dram_info->num_psf_gv_points);
852 
853 	/* TODO: Do we want to abort probe on dram detection failures? */
854 	if (ret)
855 		return 0;
856 
857 	return 0;
858 }
859 
860 /*
861  * Returns NULL for platforms that don't have dram info. Avoid overzealous NULL
862  * checks, and prefer not dereferencing on platforms that shouldn't look at dram
863  * info, to catch accidental and incorrect dram info checks.
864  */
865 const struct dram_info *intel_dram_info(struct intel_display *display)
866 {
867 	return display->dram.info;
868 }
869