xref: /linux/drivers/gpu/drm/i915/soc/intel_dram.c (revision 07fdad3a93756b872da7b53647715c48d0f4a2d0)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2020 Intel Corporation
4  */
5 
6 #include <linux/string_helpers.h>
7 
8 #include <drm/drm_managed.h>
9 
10 #include "../display/intel_display_core.h" /* FIXME */
11 
12 #include "i915_drv.h"
13 #include "i915_reg.h"
14 #include "i915_utils.h"
15 #include "intel_dram.h"
16 #include "intel_mchbar_regs.h"
17 #include "intel_pcode.h"
18 #include "intel_uncore.h"
19 #include "vlv_iosf_sb.h"
20 
21 struct dram_dimm_info {
22 	u16 size;
23 	u8 width, ranks;
24 };
25 
26 struct dram_channel_info {
27 	struct dram_dimm_info dimm_l, dimm_s;
28 	u8 ranks;
29 	bool is_16gb_dimm;
30 };
31 
32 #define DRAM_TYPE_STR(type) [INTEL_DRAM_ ## type] = #type
33 
34 const char *intel_dram_type_str(enum intel_dram_type type)
35 {
36 	static const char * const str[] = {
37 		DRAM_TYPE_STR(UNKNOWN),
38 		DRAM_TYPE_STR(DDR2),
39 		DRAM_TYPE_STR(DDR3),
40 		DRAM_TYPE_STR(DDR4),
41 		DRAM_TYPE_STR(LPDDR3),
42 		DRAM_TYPE_STR(LPDDR4),
43 		DRAM_TYPE_STR(DDR5),
44 		DRAM_TYPE_STR(LPDDR5),
45 		DRAM_TYPE_STR(GDDR),
46 		DRAM_TYPE_STR(GDDR_ECC),
47 	};
48 
49 	BUILD_BUG_ON(ARRAY_SIZE(str) != __INTEL_DRAM_TYPE_MAX);
50 
51 	if (type >= ARRAY_SIZE(str))
52 		type = INTEL_DRAM_UNKNOWN;
53 
54 	return str[type];
55 }
56 
57 #undef DRAM_TYPE_STR
58 
59 static enum intel_dram_type pnv_dram_type(struct drm_i915_private *i915)
60 {
61 	return intel_uncore_read(&i915->uncore, CSHRDDR3CTL) & CSHRDDR3CTL_DDR3 ?
62 		INTEL_DRAM_DDR3 : INTEL_DRAM_DDR2;
63 }
64 
65 static unsigned int pnv_mem_freq(struct drm_i915_private *dev_priv)
66 {
67 	u32 tmp;
68 
69 	tmp = intel_uncore_read(&dev_priv->uncore, CLKCFG);
70 
71 	switch (tmp & CLKCFG_MEM_MASK) {
72 	case CLKCFG_MEM_533:
73 		return 533333;
74 	case CLKCFG_MEM_667:
75 		return 666667;
76 	case CLKCFG_MEM_800:
77 		return 800000;
78 	}
79 
80 	return 0;
81 }
82 
83 static unsigned int ilk_mem_freq(struct drm_i915_private *dev_priv)
84 {
85 	u16 ddrpll;
86 
87 	ddrpll = intel_uncore_read16(&dev_priv->uncore, DDRMPLL1);
88 	switch (ddrpll & 0xff) {
89 	case 0xc:
90 		return 800000;
91 	case 0x10:
92 		return 1066667;
93 	case 0x14:
94 		return 1333333;
95 	case 0x18:
96 		return 1600000;
97 	default:
98 		drm_dbg(&dev_priv->drm, "unknown memory frequency 0x%02x\n",
99 			ddrpll & 0xff);
100 		return 0;
101 	}
102 }
103 
104 static unsigned int chv_mem_freq(struct drm_i915_private *i915)
105 {
106 	u32 val;
107 
108 	vlv_iosf_sb_get(&i915->drm, BIT(VLV_IOSF_SB_CCK));
109 	val = vlv_iosf_sb_read(&i915->drm, VLV_IOSF_SB_CCK, CCK_FUSE_REG);
110 	vlv_iosf_sb_put(&i915->drm, BIT(VLV_IOSF_SB_CCK));
111 
112 	switch ((val >> 2) & 0x7) {
113 	case 3:
114 		return 2000000;
115 	default:
116 		return 1600000;
117 	}
118 }
119 
120 static unsigned int vlv_mem_freq(struct drm_i915_private *i915)
121 {
122 	u32 val;
123 
124 	vlv_iosf_sb_get(&i915->drm, BIT(VLV_IOSF_SB_PUNIT));
125 	val = vlv_iosf_sb_read(&i915->drm, VLV_IOSF_SB_PUNIT, PUNIT_REG_GPU_FREQ_STS);
126 	vlv_iosf_sb_put(&i915->drm, BIT(VLV_IOSF_SB_PUNIT));
127 
128 	switch ((val >> 6) & 3) {
129 	case 0:
130 	case 1:
131 		return 800000;
132 	case 2:
133 		return 1066667;
134 	case 3:
135 		return 1333333;
136 	}
137 
138 	return 0;
139 }
140 
141 unsigned int intel_mem_freq(struct drm_i915_private *i915)
142 {
143 	if (IS_PINEVIEW(i915))
144 		return pnv_mem_freq(i915);
145 	else if (GRAPHICS_VER(i915) == 5)
146 		return ilk_mem_freq(i915);
147 	else if (IS_CHERRYVIEW(i915))
148 		return chv_mem_freq(i915);
149 	else if (IS_VALLEYVIEW(i915))
150 		return vlv_mem_freq(i915);
151 	else
152 		return 0;
153 }
154 
155 static unsigned int i9xx_fsb_freq(struct drm_i915_private *i915)
156 {
157 	u32 fsb;
158 
159 	/*
160 	 * Note that this only reads the state of the FSB
161 	 * straps, not the actual FSB frequency. Some BIOSen
162 	 * let you configure each independently. Ideally we'd
163 	 * read out the actual FSB frequency but sadly we
164 	 * don't know which registers have that information,
165 	 * and all the relevant docs have gone to bit heaven :(
166 	 */
167 	fsb = intel_uncore_read(&i915->uncore, CLKCFG) & CLKCFG_FSB_MASK;
168 
169 	if (IS_PINEVIEW(i915) || IS_MOBILE(i915)) {
170 		switch (fsb) {
171 		case CLKCFG_FSB_400:
172 			return 400000;
173 		case CLKCFG_FSB_533:
174 			return 533333;
175 		case CLKCFG_FSB_667:
176 			return 666667;
177 		case CLKCFG_FSB_800:
178 			return 800000;
179 		case CLKCFG_FSB_1067:
180 			return 1066667;
181 		case CLKCFG_FSB_1333:
182 			return 1333333;
183 		default:
184 			MISSING_CASE(fsb);
185 			return 1333333;
186 		}
187 	} else {
188 		switch (fsb) {
189 		case CLKCFG_FSB_400_ALT:
190 			return 400000;
191 		case CLKCFG_FSB_533:
192 			return 533333;
193 		case CLKCFG_FSB_667:
194 			return 666667;
195 		case CLKCFG_FSB_800:
196 			return 800000;
197 		case CLKCFG_FSB_1067_ALT:
198 			return 1066667;
199 		case CLKCFG_FSB_1333_ALT:
200 			return 1333333;
201 		case CLKCFG_FSB_1600_ALT:
202 			return 1600000;
203 		default:
204 			MISSING_CASE(fsb);
205 			return 1333333;
206 		}
207 	}
208 }
209 
210 static unsigned int ilk_fsb_freq(struct drm_i915_private *dev_priv)
211 {
212 	u16 fsb;
213 
214 	fsb = intel_uncore_read16(&dev_priv->uncore, CSIPLL0) & 0x3ff;
215 
216 	switch (fsb) {
217 	case 0x00c:
218 		return 3200000;
219 	case 0x00e:
220 		return 3733333;
221 	case 0x010:
222 		return 4266667;
223 	case 0x012:
224 		return 4800000;
225 	case 0x014:
226 		return 5333333;
227 	case 0x016:
228 		return 5866667;
229 	case 0x018:
230 		return 6400000;
231 	default:
232 		drm_dbg(&dev_priv->drm, "unknown fsb frequency 0x%04x\n", fsb);
233 		return 0;
234 	}
235 }
236 
237 unsigned int intel_fsb_freq(struct drm_i915_private *i915)
238 {
239 	if (GRAPHICS_VER(i915) == 5)
240 		return ilk_fsb_freq(i915);
241 	else if (GRAPHICS_VER(i915) == 3 || GRAPHICS_VER(i915) == 4)
242 		return i9xx_fsb_freq(i915);
243 	else
244 		return 0;
245 }
246 
247 static int i915_get_dram_info(struct drm_i915_private *i915, struct dram_info *dram_info)
248 {
249 	dram_info->fsb_freq = intel_fsb_freq(i915);
250 	if (dram_info->fsb_freq)
251 		drm_dbg(&i915->drm, "FSB frequency: %d kHz\n", dram_info->fsb_freq);
252 
253 	dram_info->mem_freq = intel_mem_freq(i915);
254 	if (dram_info->mem_freq)
255 		drm_dbg(&i915->drm, "DDR speed: %d kHz\n", dram_info->mem_freq);
256 
257 	if (IS_PINEVIEW(i915))
258 		dram_info->type = pnv_dram_type(i915);
259 
260 	return 0;
261 }
262 
263 static int intel_dimm_num_devices(const struct dram_dimm_info *dimm)
264 {
265 	return dimm->ranks * 64 / (dimm->width ?: 1);
266 }
267 
268 /* Returns total Gb for the whole DIMM */
269 static int skl_get_dimm_size(u16 val)
270 {
271 	return (val & SKL_DRAM_SIZE_MASK) * 8;
272 }
273 
274 static int skl_get_dimm_width(u16 val)
275 {
276 	if (skl_get_dimm_size(val) == 0)
277 		return 0;
278 
279 	switch (val & SKL_DRAM_WIDTH_MASK) {
280 	case SKL_DRAM_WIDTH_X8:
281 	case SKL_DRAM_WIDTH_X16:
282 	case SKL_DRAM_WIDTH_X32:
283 		val = (val & SKL_DRAM_WIDTH_MASK) >> SKL_DRAM_WIDTH_SHIFT;
284 		return 8 << val;
285 	default:
286 		MISSING_CASE(val);
287 		return 0;
288 	}
289 }
290 
291 static int skl_get_dimm_ranks(u16 val)
292 {
293 	if (skl_get_dimm_size(val) == 0)
294 		return 0;
295 
296 	val = (val & SKL_DRAM_RANK_MASK) >> SKL_DRAM_RANK_SHIFT;
297 
298 	return val + 1;
299 }
300 
301 /* Returns total Gb for the whole DIMM */
302 static int icl_get_dimm_size(u16 val)
303 {
304 	return (val & ICL_DRAM_SIZE_MASK) * 8 / 2;
305 }
306 
307 static int icl_get_dimm_width(u16 val)
308 {
309 	if (icl_get_dimm_size(val) == 0)
310 		return 0;
311 
312 	switch (val & ICL_DRAM_WIDTH_MASK) {
313 	case ICL_DRAM_WIDTH_X8:
314 	case ICL_DRAM_WIDTH_X16:
315 	case ICL_DRAM_WIDTH_X32:
316 		val = (val & ICL_DRAM_WIDTH_MASK) >> ICL_DRAM_WIDTH_SHIFT;
317 		return 8 << val;
318 	default:
319 		MISSING_CASE(val);
320 		return 0;
321 	}
322 }
323 
324 static int icl_get_dimm_ranks(u16 val)
325 {
326 	if (icl_get_dimm_size(val) == 0)
327 		return 0;
328 
329 	val = (val & ICL_DRAM_RANK_MASK) >> ICL_DRAM_RANK_SHIFT;
330 
331 	return val + 1;
332 }
333 
334 static bool
335 skl_is_16gb_dimm(const struct dram_dimm_info *dimm)
336 {
337 	/* Convert total Gb to Gb per DRAM device */
338 	return dimm->size / (intel_dimm_num_devices(dimm) ?: 1) == 16;
339 }
340 
341 static void
342 skl_dram_get_dimm_info(struct drm_i915_private *i915,
343 		       struct dram_dimm_info *dimm,
344 		       int channel, char dimm_name, u16 val)
345 {
346 	if (GRAPHICS_VER(i915) >= 11) {
347 		dimm->size = icl_get_dimm_size(val);
348 		dimm->width = icl_get_dimm_width(val);
349 		dimm->ranks = icl_get_dimm_ranks(val);
350 	} else {
351 		dimm->size = skl_get_dimm_size(val);
352 		dimm->width = skl_get_dimm_width(val);
353 		dimm->ranks = skl_get_dimm_ranks(val);
354 	}
355 
356 	drm_dbg_kms(&i915->drm,
357 		    "CH%u DIMM %c size: %u Gb, width: X%u, ranks: %u, 16Gb DIMMs: %s\n",
358 		    channel, dimm_name, dimm->size, dimm->width, dimm->ranks,
359 		    str_yes_no(skl_is_16gb_dimm(dimm)));
360 }
361 
362 static int
363 skl_dram_get_channel_info(struct drm_i915_private *i915,
364 			  struct dram_channel_info *ch,
365 			  int channel, u32 val)
366 {
367 	skl_dram_get_dimm_info(i915, &ch->dimm_l,
368 			       channel, 'L', val & 0xffff);
369 	skl_dram_get_dimm_info(i915, &ch->dimm_s,
370 			       channel, 'S', val >> 16);
371 
372 	if (ch->dimm_l.size == 0 && ch->dimm_s.size == 0) {
373 		drm_dbg_kms(&i915->drm, "CH%u not populated\n", channel);
374 		return -EINVAL;
375 	}
376 
377 	if (ch->dimm_l.ranks == 2 || ch->dimm_s.ranks == 2)
378 		ch->ranks = 2;
379 	else if (ch->dimm_l.ranks == 1 && ch->dimm_s.ranks == 1)
380 		ch->ranks = 2;
381 	else
382 		ch->ranks = 1;
383 
384 	ch->is_16gb_dimm = skl_is_16gb_dimm(&ch->dimm_l) ||
385 		skl_is_16gb_dimm(&ch->dimm_s);
386 
387 	drm_dbg_kms(&i915->drm, "CH%u ranks: %u, 16Gb DIMMs: %s\n",
388 		    channel, ch->ranks, str_yes_no(ch->is_16gb_dimm));
389 
390 	return 0;
391 }
392 
393 static bool
394 intel_is_dram_symmetric(const struct dram_channel_info *ch0,
395 			const struct dram_channel_info *ch1)
396 {
397 	return !memcmp(ch0, ch1, sizeof(*ch0)) &&
398 		(ch0->dimm_s.size == 0 ||
399 		 !memcmp(&ch0->dimm_l, &ch0->dimm_s, sizeof(ch0->dimm_l)));
400 }
401 
402 static int
403 skl_dram_get_channels_info(struct drm_i915_private *i915, struct dram_info *dram_info)
404 {
405 	struct dram_channel_info ch0 = {}, ch1 = {};
406 	u32 val;
407 	int ret;
408 
409 	/* Assume 16Gb DIMMs are present until proven otherwise */
410 	dram_info->has_16gb_dimms = true;
411 
412 	val = intel_uncore_read(&i915->uncore,
413 				SKL_MAD_DIMM_CH0_0_0_0_MCHBAR_MCMAIN);
414 	ret = skl_dram_get_channel_info(i915, &ch0, 0, val);
415 	if (ret == 0)
416 		dram_info->num_channels++;
417 
418 	val = intel_uncore_read(&i915->uncore,
419 				SKL_MAD_DIMM_CH1_0_0_0_MCHBAR_MCMAIN);
420 	ret = skl_dram_get_channel_info(i915, &ch1, 1, val);
421 	if (ret == 0)
422 		dram_info->num_channels++;
423 
424 	if (dram_info->num_channels == 0) {
425 		drm_info(&i915->drm, "Number of memory channels is zero\n");
426 		return -EINVAL;
427 	}
428 
429 	if (ch0.ranks == 0 && ch1.ranks == 0) {
430 		drm_info(&i915->drm, "couldn't get memory rank information\n");
431 		return -EINVAL;
432 	}
433 
434 	dram_info->has_16gb_dimms = ch0.is_16gb_dimm || ch1.is_16gb_dimm;
435 
436 	dram_info->symmetric_memory = intel_is_dram_symmetric(&ch0, &ch1);
437 
438 	drm_dbg_kms(&i915->drm, "Memory configuration is symmetric? %s\n",
439 		    str_yes_no(dram_info->symmetric_memory));
440 
441 	drm_dbg_kms(&i915->drm, "16Gb DIMMs: %s\n",
442 		    str_yes_no(dram_info->has_16gb_dimms));
443 
444 	return 0;
445 }
446 
447 static enum intel_dram_type
448 skl_get_dram_type(struct drm_i915_private *i915)
449 {
450 	u32 val;
451 
452 	val = intel_uncore_read(&i915->uncore,
453 				SKL_MAD_INTER_CHANNEL_0_0_0_MCHBAR_MCMAIN);
454 
455 	switch (val & SKL_DRAM_DDR_TYPE_MASK) {
456 	case SKL_DRAM_DDR_TYPE_DDR3:
457 		return INTEL_DRAM_DDR3;
458 	case SKL_DRAM_DDR_TYPE_DDR4:
459 		return INTEL_DRAM_DDR4;
460 	case SKL_DRAM_DDR_TYPE_LPDDR3:
461 		return INTEL_DRAM_LPDDR3;
462 	case SKL_DRAM_DDR_TYPE_LPDDR4:
463 		return INTEL_DRAM_LPDDR4;
464 	default:
465 		MISSING_CASE(val);
466 		return INTEL_DRAM_UNKNOWN;
467 	}
468 }
469 
470 static int
471 skl_get_dram_info(struct drm_i915_private *i915, struct dram_info *dram_info)
472 {
473 	int ret;
474 
475 	dram_info->type = skl_get_dram_type(i915);
476 
477 	ret = skl_dram_get_channels_info(i915, dram_info);
478 	if (ret)
479 		return ret;
480 
481 	return 0;
482 }
483 
484 /* Returns Gb per DRAM device */
485 static int bxt_get_dimm_size(u32 val)
486 {
487 	switch (val & BXT_DRAM_SIZE_MASK) {
488 	case BXT_DRAM_SIZE_4GBIT:
489 		return 4;
490 	case BXT_DRAM_SIZE_6GBIT:
491 		return 6;
492 	case BXT_DRAM_SIZE_8GBIT:
493 		return 8;
494 	case BXT_DRAM_SIZE_12GBIT:
495 		return 12;
496 	case BXT_DRAM_SIZE_16GBIT:
497 		return 16;
498 	default:
499 		MISSING_CASE(val);
500 		return 0;
501 	}
502 }
503 
504 static int bxt_get_dimm_width(u32 val)
505 {
506 	if (!bxt_get_dimm_size(val))
507 		return 0;
508 
509 	val = (val & BXT_DRAM_WIDTH_MASK) >> BXT_DRAM_WIDTH_SHIFT;
510 
511 	return 8 << val;
512 }
513 
514 static int bxt_get_dimm_ranks(u32 val)
515 {
516 	if (!bxt_get_dimm_size(val))
517 		return 0;
518 
519 	switch (val & BXT_DRAM_RANK_MASK) {
520 	case BXT_DRAM_RANK_SINGLE:
521 		return 1;
522 	case BXT_DRAM_RANK_DUAL:
523 		return 2;
524 	default:
525 		MISSING_CASE(val);
526 		return 0;
527 	}
528 }
529 
530 static enum intel_dram_type bxt_get_dimm_type(u32 val)
531 {
532 	if (!bxt_get_dimm_size(val))
533 		return INTEL_DRAM_UNKNOWN;
534 
535 	switch (val & BXT_DRAM_TYPE_MASK) {
536 	case BXT_DRAM_TYPE_DDR3:
537 		return INTEL_DRAM_DDR3;
538 	case BXT_DRAM_TYPE_LPDDR3:
539 		return INTEL_DRAM_LPDDR3;
540 	case BXT_DRAM_TYPE_DDR4:
541 		return INTEL_DRAM_DDR4;
542 	case BXT_DRAM_TYPE_LPDDR4:
543 		return INTEL_DRAM_LPDDR4;
544 	default:
545 		MISSING_CASE(val);
546 		return INTEL_DRAM_UNKNOWN;
547 	}
548 }
549 
550 static void bxt_get_dimm_info(struct dram_dimm_info *dimm, u32 val)
551 {
552 	dimm->width = bxt_get_dimm_width(val);
553 	dimm->ranks = bxt_get_dimm_ranks(val);
554 
555 	/*
556 	 * Size in register is Gb per DRAM device. Convert to total
557 	 * Gb to match the way we report this for non-LP platforms.
558 	 */
559 	dimm->size = bxt_get_dimm_size(val) * intel_dimm_num_devices(dimm);
560 }
561 
562 static int bxt_get_dram_info(struct drm_i915_private *i915, struct dram_info *dram_info)
563 {
564 	u32 val;
565 	u8 valid_ranks = 0;
566 	int i;
567 
568 	/*
569 	 * Now read each DUNIT8/9/10/11 to check the rank of each dimms.
570 	 */
571 	for (i = BXT_D_CR_DRP0_DUNIT_START; i <= BXT_D_CR_DRP0_DUNIT_END; i++) {
572 		struct dram_dimm_info dimm;
573 		enum intel_dram_type type;
574 
575 		val = intel_uncore_read(&i915->uncore, BXT_D_CR_DRP0_DUNIT(i));
576 		if (val == 0xFFFFFFFF)
577 			continue;
578 
579 		dram_info->num_channels++;
580 
581 		bxt_get_dimm_info(&dimm, val);
582 		type = bxt_get_dimm_type(val);
583 
584 		drm_WARN_ON(&i915->drm, type != INTEL_DRAM_UNKNOWN &&
585 			    dram_info->type != INTEL_DRAM_UNKNOWN &&
586 			    dram_info->type != type);
587 
588 		drm_dbg_kms(&i915->drm,
589 			    "CH%u DIMM size: %u Gb, width: X%u, ranks: %u\n",
590 			    i - BXT_D_CR_DRP0_DUNIT_START,
591 			    dimm.size, dimm.width, dimm.ranks);
592 
593 		if (valid_ranks == 0)
594 			valid_ranks = dimm.ranks;
595 
596 		if (type != INTEL_DRAM_UNKNOWN)
597 			dram_info->type = type;
598 	}
599 
600 	if (dram_info->type == INTEL_DRAM_UNKNOWN || valid_ranks == 0) {
601 		drm_info(&i915->drm, "couldn't get memory information\n");
602 		return -EINVAL;
603 	}
604 
605 	return 0;
606 }
607 
608 static int icl_pcode_read_mem_global_info(struct drm_i915_private *dev_priv,
609 					  struct dram_info *dram_info)
610 {
611 	u32 val = 0;
612 	int ret;
613 
614 	ret = intel_pcode_read(&dev_priv->drm, ICL_PCODE_MEM_SUBSYSYSTEM_INFO |
615 			       ICL_PCODE_MEM_SS_READ_GLOBAL_INFO, &val, NULL);
616 	if (ret)
617 		return ret;
618 
619 	if (GRAPHICS_VER(dev_priv) == 12) {
620 		switch (val & 0xf) {
621 		case 0:
622 			dram_info->type = INTEL_DRAM_DDR4;
623 			break;
624 		case 1:
625 			dram_info->type = INTEL_DRAM_DDR5;
626 			break;
627 		case 2:
628 			dram_info->type = INTEL_DRAM_LPDDR5;
629 			break;
630 		case 3:
631 			dram_info->type = INTEL_DRAM_LPDDR4;
632 			break;
633 		case 4:
634 			dram_info->type = INTEL_DRAM_DDR3;
635 			break;
636 		case 5:
637 			dram_info->type = INTEL_DRAM_LPDDR3;
638 			break;
639 		default:
640 			MISSING_CASE(val & 0xf);
641 			return -EINVAL;
642 		}
643 	} else {
644 		switch (val & 0xf) {
645 		case 0:
646 			dram_info->type = INTEL_DRAM_DDR4;
647 			break;
648 		case 1:
649 			dram_info->type = INTEL_DRAM_DDR3;
650 			break;
651 		case 2:
652 			dram_info->type = INTEL_DRAM_LPDDR3;
653 			break;
654 		case 3:
655 			dram_info->type = INTEL_DRAM_LPDDR4;
656 			break;
657 		default:
658 			MISSING_CASE(val & 0xf);
659 			return -EINVAL;
660 		}
661 	}
662 
663 	dram_info->num_channels = (val & 0xf0) >> 4;
664 	dram_info->num_qgv_points = (val & 0xf00) >> 8;
665 	dram_info->num_psf_gv_points = (val & 0x3000) >> 12;
666 
667 	return 0;
668 }
669 
670 static int gen11_get_dram_info(struct drm_i915_private *i915, struct dram_info *dram_info)
671 {
672 	int ret;
673 
674 	ret = skl_dram_get_channels_info(i915, dram_info);
675 	if (ret)
676 		return ret;
677 
678 	return icl_pcode_read_mem_global_info(i915, dram_info);
679 }
680 
681 static int gen12_get_dram_info(struct drm_i915_private *i915, struct dram_info *dram_info)
682 {
683 	return icl_pcode_read_mem_global_info(i915, dram_info);
684 }
685 
686 static int xelpdp_get_dram_info(struct drm_i915_private *i915, struct dram_info *dram_info)
687 {
688 	u32 val = intel_uncore_read(&i915->uncore, MTL_MEM_SS_INFO_GLOBAL);
689 
690 	switch (REG_FIELD_GET(MTL_DDR_TYPE_MASK, val)) {
691 	case 0:
692 		dram_info->type = INTEL_DRAM_DDR4;
693 		break;
694 	case 1:
695 		dram_info->type = INTEL_DRAM_DDR5;
696 		break;
697 	case 2:
698 		dram_info->type = INTEL_DRAM_LPDDR5;
699 		break;
700 	case 3:
701 		dram_info->type = INTEL_DRAM_LPDDR4;
702 		break;
703 	case 4:
704 		dram_info->type = INTEL_DRAM_DDR3;
705 		break;
706 	case 5:
707 		dram_info->type = INTEL_DRAM_LPDDR3;
708 		break;
709 	case 8:
710 		drm_WARN_ON(&i915->drm, !IS_DGFX(i915));
711 		dram_info->type = INTEL_DRAM_GDDR;
712 		break;
713 	case 9:
714 		drm_WARN_ON(&i915->drm, !IS_DGFX(i915));
715 		dram_info->type = INTEL_DRAM_GDDR_ECC;
716 		break;
717 	default:
718 		MISSING_CASE(val);
719 		return -EINVAL;
720 	}
721 
722 	dram_info->num_channels = REG_FIELD_GET(MTL_N_OF_POPULATED_CH_MASK, val);
723 	dram_info->num_qgv_points = REG_FIELD_GET(MTL_N_OF_ENABLED_QGV_POINTS_MASK, val);
724 	/* PSF GV points not supported in D14+ */
725 
726 	return 0;
727 }
728 
729 int intel_dram_detect(struct drm_i915_private *i915)
730 {
731 	struct intel_display *display = i915->display;
732 	struct dram_info *dram_info;
733 	int ret;
734 
735 	if (IS_DG2(i915) || !intel_display_device_present(display))
736 		return 0;
737 
738 	dram_info = drmm_kzalloc(&i915->drm, sizeof(*dram_info), GFP_KERNEL);
739 	if (!dram_info)
740 		return -ENOMEM;
741 
742 	i915->dram_info = dram_info;
743 
744 	if (DISPLAY_VER(display) >= 14)
745 		ret = xelpdp_get_dram_info(i915, dram_info);
746 	else if (GRAPHICS_VER(i915) >= 12)
747 		ret = gen12_get_dram_info(i915, dram_info);
748 	else if (GRAPHICS_VER(i915) >= 11)
749 		ret = gen11_get_dram_info(i915, dram_info);
750 	else if (IS_BROXTON(i915) || IS_GEMINILAKE(i915))
751 		ret = bxt_get_dram_info(i915, dram_info);
752 	else if (GRAPHICS_VER(i915) >= 9)
753 		ret = skl_get_dram_info(i915, dram_info);
754 	else
755 		ret = i915_get_dram_info(i915, dram_info);
756 
757 	drm_dbg_kms(&i915->drm, "DRAM type: %s\n",
758 		    intel_dram_type_str(dram_info->type));
759 
760 	drm_dbg_kms(&i915->drm, "DRAM channels: %u\n", dram_info->num_channels);
761 
762 	drm_dbg_kms(&i915->drm, "Num QGV points %u\n", dram_info->num_qgv_points);
763 	drm_dbg_kms(&i915->drm, "Num PSF GV points %u\n", dram_info->num_psf_gv_points);
764 
765 	/* TODO: Do we want to abort probe on dram detection failures? */
766 	if (ret)
767 		return 0;
768 
769 	return 0;
770 }
771 
772 /*
773  * Returns NULL for platforms that don't have dram info. Avoid overzealous NULL
774  * checks, and prefer not dereferencing on platforms that shouldn't look at dram
775  * info, to catch accidental and incorrect dram info checks.
776  */
777 const struct dram_info *intel_dram_info(struct drm_device *drm)
778 {
779 	struct drm_i915_private *i915 = to_i915(drm);
780 
781 	return i915->dram_info;
782 }
783 
784 static u32 gen9_edram_size_mb(struct drm_i915_private *i915, u32 cap)
785 {
786 	static const u8 ways[8] = { 4, 8, 12, 16, 16, 16, 16, 16 };
787 	static const u8 sets[4] = { 1, 1, 2, 2 };
788 
789 	return EDRAM_NUM_BANKS(cap) *
790 		ways[EDRAM_WAYS_IDX(cap)] *
791 		sets[EDRAM_SETS_IDX(cap)];
792 }
793 
794 void intel_dram_edram_detect(struct drm_i915_private *i915)
795 {
796 	u32 edram_cap = 0;
797 
798 	if (!(IS_HASWELL(i915) || IS_BROADWELL(i915) || GRAPHICS_VER(i915) >= 9))
799 		return;
800 
801 	edram_cap = intel_uncore_read_fw(&i915->uncore, HSW_EDRAM_CAP);
802 
803 	/* NB: We can't write IDICR yet because we don't have gt funcs set up */
804 
805 	if (!(edram_cap & EDRAM_ENABLED))
806 		return;
807 
808 	/*
809 	 * The needed capability bits for size calculation are not there with
810 	 * pre gen9 so return 128MB always.
811 	 */
812 	if (GRAPHICS_VER(i915) < 9)
813 		i915->edram_size_mb = 128;
814 	else
815 		i915->edram_size_mb = gen9_edram_size_mb(i915, edram_cap);
816 
817 	drm_info(&i915->drm, "Found %uMB of eDRAM\n", i915->edram_size_mb);
818 }
819