xref: /linux/drivers/gpu/drm/i915/soc/intel_dram.c (revision 74ba587f402d5501af2c85e50cf1e4044263b6ca)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2020 Intel Corporation
4  */
5 
6 #include <linux/string_helpers.h>
7 
8 #include <drm/drm_managed.h>
9 #include <drm/drm_print.h>
10 
11 #include "../display/intel_display_core.h" /* FIXME */
12 
13 #include "i915_drv.h"
14 #include "i915_reg.h"
15 #include "i915_utils.h"
16 #include "intel_dram.h"
17 #include "intel_mchbar_regs.h"
18 #include "intel_pcode.h"
19 #include "intel_uncore.h"
20 #include "vlv_iosf_sb.h"
21 
22 struct dram_dimm_info {
23 	u16 size;
24 	u8 width, ranks;
25 };
26 
27 struct dram_channel_info {
28 	struct dram_dimm_info dimm_l, dimm_s;
29 	u8 ranks;
30 	bool is_16gb_dimm;
31 };
32 
33 #define DRAM_TYPE_STR(type) [INTEL_DRAM_ ## type] = #type
34 
35 const char *intel_dram_type_str(enum intel_dram_type type)
36 {
37 	static const char * const str[] = {
38 		DRAM_TYPE_STR(UNKNOWN),
39 		DRAM_TYPE_STR(DDR2),
40 		DRAM_TYPE_STR(DDR3),
41 		DRAM_TYPE_STR(DDR4),
42 		DRAM_TYPE_STR(LPDDR3),
43 		DRAM_TYPE_STR(LPDDR4),
44 		DRAM_TYPE_STR(DDR5),
45 		DRAM_TYPE_STR(LPDDR5),
46 		DRAM_TYPE_STR(GDDR),
47 		DRAM_TYPE_STR(GDDR_ECC),
48 	};
49 
50 	BUILD_BUG_ON(ARRAY_SIZE(str) != __INTEL_DRAM_TYPE_MAX);
51 
52 	if (type >= ARRAY_SIZE(str))
53 		type = INTEL_DRAM_UNKNOWN;
54 
55 	return str[type];
56 }
57 
58 #undef DRAM_TYPE_STR
59 
60 static enum intel_dram_type pnv_dram_type(struct drm_i915_private *i915)
61 {
62 	return intel_uncore_read(&i915->uncore, CSHRDDR3CTL) & CSHRDDR3CTL_DDR3 ?
63 		INTEL_DRAM_DDR3 : INTEL_DRAM_DDR2;
64 }
65 
66 static unsigned int pnv_mem_freq(struct drm_i915_private *dev_priv)
67 {
68 	u32 tmp;
69 
70 	tmp = intel_uncore_read(&dev_priv->uncore, CLKCFG);
71 
72 	switch (tmp & CLKCFG_MEM_MASK) {
73 	case CLKCFG_MEM_533:
74 		return 533333;
75 	case CLKCFG_MEM_667:
76 		return 666667;
77 	case CLKCFG_MEM_800:
78 		return 800000;
79 	}
80 
81 	return 0;
82 }
83 
84 static unsigned int ilk_mem_freq(struct drm_i915_private *dev_priv)
85 {
86 	u16 ddrpll;
87 
88 	ddrpll = intel_uncore_read16(&dev_priv->uncore, DDRMPLL1);
89 	switch (ddrpll & 0xff) {
90 	case 0xc:
91 		return 800000;
92 	case 0x10:
93 		return 1066667;
94 	case 0x14:
95 		return 1333333;
96 	case 0x18:
97 		return 1600000;
98 	default:
99 		drm_dbg(&dev_priv->drm, "unknown memory frequency 0x%02x\n",
100 			ddrpll & 0xff);
101 		return 0;
102 	}
103 }
104 
105 static unsigned int chv_mem_freq(struct drm_i915_private *i915)
106 {
107 	u32 val;
108 
109 	vlv_iosf_sb_get(&i915->drm, BIT(VLV_IOSF_SB_CCK));
110 	val = vlv_iosf_sb_read(&i915->drm, VLV_IOSF_SB_CCK, CCK_FUSE_REG);
111 	vlv_iosf_sb_put(&i915->drm, BIT(VLV_IOSF_SB_CCK));
112 
113 	switch ((val >> 2) & 0x7) {
114 	case 3:
115 		return 2000000;
116 	default:
117 		return 1600000;
118 	}
119 }
120 
121 static unsigned int vlv_mem_freq(struct drm_i915_private *i915)
122 {
123 	u32 val;
124 
125 	vlv_iosf_sb_get(&i915->drm, BIT(VLV_IOSF_SB_PUNIT));
126 	val = vlv_iosf_sb_read(&i915->drm, VLV_IOSF_SB_PUNIT, PUNIT_REG_GPU_FREQ_STS);
127 	vlv_iosf_sb_put(&i915->drm, BIT(VLV_IOSF_SB_PUNIT));
128 
129 	switch ((val >> 6) & 3) {
130 	case 0:
131 	case 1:
132 		return 800000;
133 	case 2:
134 		return 1066667;
135 	case 3:
136 		return 1333333;
137 	}
138 
139 	return 0;
140 }
141 
142 unsigned int intel_mem_freq(struct drm_i915_private *i915)
143 {
144 	if (IS_PINEVIEW(i915))
145 		return pnv_mem_freq(i915);
146 	else if (GRAPHICS_VER(i915) == 5)
147 		return ilk_mem_freq(i915);
148 	else if (IS_CHERRYVIEW(i915))
149 		return chv_mem_freq(i915);
150 	else if (IS_VALLEYVIEW(i915))
151 		return vlv_mem_freq(i915);
152 	else
153 		return 0;
154 }
155 
156 static unsigned int i9xx_fsb_freq(struct drm_i915_private *i915)
157 {
158 	u32 fsb;
159 
160 	/*
161 	 * Note that this only reads the state of the FSB
162 	 * straps, not the actual FSB frequency. Some BIOSen
163 	 * let you configure each independently. Ideally we'd
164 	 * read out the actual FSB frequency but sadly we
165 	 * don't know which registers have that information,
166 	 * and all the relevant docs have gone to bit heaven :(
167 	 */
168 	fsb = intel_uncore_read(&i915->uncore, CLKCFG) & CLKCFG_FSB_MASK;
169 
170 	if (IS_PINEVIEW(i915) || IS_MOBILE(i915)) {
171 		switch (fsb) {
172 		case CLKCFG_FSB_400:
173 			return 400000;
174 		case CLKCFG_FSB_533:
175 			return 533333;
176 		case CLKCFG_FSB_667:
177 			return 666667;
178 		case CLKCFG_FSB_800:
179 			return 800000;
180 		case CLKCFG_FSB_1067:
181 			return 1066667;
182 		case CLKCFG_FSB_1333:
183 			return 1333333;
184 		default:
185 			MISSING_CASE(fsb);
186 			return 1333333;
187 		}
188 	} else {
189 		switch (fsb) {
190 		case CLKCFG_FSB_400_ALT:
191 			return 400000;
192 		case CLKCFG_FSB_533:
193 			return 533333;
194 		case CLKCFG_FSB_667:
195 			return 666667;
196 		case CLKCFG_FSB_800:
197 			return 800000;
198 		case CLKCFG_FSB_1067_ALT:
199 			return 1066667;
200 		case CLKCFG_FSB_1333_ALT:
201 			return 1333333;
202 		case CLKCFG_FSB_1600_ALT:
203 			return 1600000;
204 		default:
205 			MISSING_CASE(fsb);
206 			return 1333333;
207 		}
208 	}
209 }
210 
211 static unsigned int ilk_fsb_freq(struct drm_i915_private *dev_priv)
212 {
213 	u16 fsb;
214 
215 	fsb = intel_uncore_read16(&dev_priv->uncore, CSIPLL0) & 0x3ff;
216 
217 	switch (fsb) {
218 	case 0x00c:
219 		return 3200000;
220 	case 0x00e:
221 		return 3733333;
222 	case 0x010:
223 		return 4266667;
224 	case 0x012:
225 		return 4800000;
226 	case 0x014:
227 		return 5333333;
228 	case 0x016:
229 		return 5866667;
230 	case 0x018:
231 		return 6400000;
232 	default:
233 		drm_dbg(&dev_priv->drm, "unknown fsb frequency 0x%04x\n", fsb);
234 		return 0;
235 	}
236 }
237 
238 unsigned int intel_fsb_freq(struct drm_i915_private *i915)
239 {
240 	if (GRAPHICS_VER(i915) == 5)
241 		return ilk_fsb_freq(i915);
242 	else if (GRAPHICS_VER(i915) == 3 || GRAPHICS_VER(i915) == 4)
243 		return i9xx_fsb_freq(i915);
244 	else
245 		return 0;
246 }
247 
248 static int i915_get_dram_info(struct drm_i915_private *i915, struct dram_info *dram_info)
249 {
250 	dram_info->fsb_freq = intel_fsb_freq(i915);
251 	if (dram_info->fsb_freq)
252 		drm_dbg(&i915->drm, "FSB frequency: %d kHz\n", dram_info->fsb_freq);
253 
254 	dram_info->mem_freq = intel_mem_freq(i915);
255 	if (dram_info->mem_freq)
256 		drm_dbg(&i915->drm, "DDR speed: %d kHz\n", dram_info->mem_freq);
257 
258 	if (IS_PINEVIEW(i915))
259 		dram_info->type = pnv_dram_type(i915);
260 
261 	return 0;
262 }
263 
264 static int intel_dimm_num_devices(const struct dram_dimm_info *dimm)
265 {
266 	return dimm->ranks * 64 / (dimm->width ?: 1);
267 }
268 
269 /* Returns total Gb for the whole DIMM */
270 static int skl_get_dimm_size(u16 val)
271 {
272 	return (val & SKL_DRAM_SIZE_MASK) * 8;
273 }
274 
275 static int skl_get_dimm_width(u16 val)
276 {
277 	if (skl_get_dimm_size(val) == 0)
278 		return 0;
279 
280 	switch (val & SKL_DRAM_WIDTH_MASK) {
281 	case SKL_DRAM_WIDTH_X8:
282 	case SKL_DRAM_WIDTH_X16:
283 	case SKL_DRAM_WIDTH_X32:
284 		val = (val & SKL_DRAM_WIDTH_MASK) >> SKL_DRAM_WIDTH_SHIFT;
285 		return 8 << val;
286 	default:
287 		MISSING_CASE(val);
288 		return 0;
289 	}
290 }
291 
292 static int skl_get_dimm_ranks(u16 val)
293 {
294 	if (skl_get_dimm_size(val) == 0)
295 		return 0;
296 
297 	val = (val & SKL_DRAM_RANK_MASK) >> SKL_DRAM_RANK_SHIFT;
298 
299 	return val + 1;
300 }
301 
302 /* Returns total Gb for the whole DIMM */
303 static int icl_get_dimm_size(u16 val)
304 {
305 	return (val & ICL_DRAM_SIZE_MASK) * 8 / 2;
306 }
307 
308 static int icl_get_dimm_width(u16 val)
309 {
310 	if (icl_get_dimm_size(val) == 0)
311 		return 0;
312 
313 	switch (val & ICL_DRAM_WIDTH_MASK) {
314 	case ICL_DRAM_WIDTH_X8:
315 	case ICL_DRAM_WIDTH_X16:
316 	case ICL_DRAM_WIDTH_X32:
317 		val = (val & ICL_DRAM_WIDTH_MASK) >> ICL_DRAM_WIDTH_SHIFT;
318 		return 8 << val;
319 	default:
320 		MISSING_CASE(val);
321 		return 0;
322 	}
323 }
324 
325 static int icl_get_dimm_ranks(u16 val)
326 {
327 	if (icl_get_dimm_size(val) == 0)
328 		return 0;
329 
330 	val = (val & ICL_DRAM_RANK_MASK) >> ICL_DRAM_RANK_SHIFT;
331 
332 	return val + 1;
333 }
334 
335 static bool
336 skl_is_16gb_dimm(const struct dram_dimm_info *dimm)
337 {
338 	/* Convert total Gb to Gb per DRAM device */
339 	return dimm->size / (intel_dimm_num_devices(dimm) ?: 1) == 16;
340 }
341 
342 static void
343 skl_dram_get_dimm_info(struct drm_i915_private *i915,
344 		       struct dram_dimm_info *dimm,
345 		       int channel, char dimm_name, u16 val)
346 {
347 	if (GRAPHICS_VER(i915) >= 11) {
348 		dimm->size = icl_get_dimm_size(val);
349 		dimm->width = icl_get_dimm_width(val);
350 		dimm->ranks = icl_get_dimm_ranks(val);
351 	} else {
352 		dimm->size = skl_get_dimm_size(val);
353 		dimm->width = skl_get_dimm_width(val);
354 		dimm->ranks = skl_get_dimm_ranks(val);
355 	}
356 
357 	drm_dbg_kms(&i915->drm,
358 		    "CH%u DIMM %c size: %u Gb, width: X%u, ranks: %u, 16Gb DIMMs: %s\n",
359 		    channel, dimm_name, dimm->size, dimm->width, dimm->ranks,
360 		    str_yes_no(skl_is_16gb_dimm(dimm)));
361 }
362 
363 static int
364 skl_dram_get_channel_info(struct drm_i915_private *i915,
365 			  struct dram_channel_info *ch,
366 			  int channel, u32 val)
367 {
368 	skl_dram_get_dimm_info(i915, &ch->dimm_l,
369 			       channel, 'L', val & 0xffff);
370 	skl_dram_get_dimm_info(i915, &ch->dimm_s,
371 			       channel, 'S', val >> 16);
372 
373 	if (ch->dimm_l.size == 0 && ch->dimm_s.size == 0) {
374 		drm_dbg_kms(&i915->drm, "CH%u not populated\n", channel);
375 		return -EINVAL;
376 	}
377 
378 	if (ch->dimm_l.ranks == 2 || ch->dimm_s.ranks == 2)
379 		ch->ranks = 2;
380 	else if (ch->dimm_l.ranks == 1 && ch->dimm_s.ranks == 1)
381 		ch->ranks = 2;
382 	else
383 		ch->ranks = 1;
384 
385 	ch->is_16gb_dimm = skl_is_16gb_dimm(&ch->dimm_l) ||
386 		skl_is_16gb_dimm(&ch->dimm_s);
387 
388 	drm_dbg_kms(&i915->drm, "CH%u ranks: %u, 16Gb DIMMs: %s\n",
389 		    channel, ch->ranks, str_yes_no(ch->is_16gb_dimm));
390 
391 	return 0;
392 }
393 
394 static bool
395 intel_is_dram_symmetric(const struct dram_channel_info *ch0,
396 			const struct dram_channel_info *ch1)
397 {
398 	return !memcmp(ch0, ch1, sizeof(*ch0)) &&
399 		(ch0->dimm_s.size == 0 ||
400 		 !memcmp(&ch0->dimm_l, &ch0->dimm_s, sizeof(ch0->dimm_l)));
401 }
402 
403 static int
404 skl_dram_get_channels_info(struct drm_i915_private *i915, struct dram_info *dram_info)
405 {
406 	struct dram_channel_info ch0 = {}, ch1 = {};
407 	u32 val;
408 	int ret;
409 
410 	/* Assume 16Gb DIMMs are present until proven otherwise */
411 	dram_info->has_16gb_dimms = true;
412 
413 	val = intel_uncore_read(&i915->uncore,
414 				SKL_MAD_DIMM_CH0_0_0_0_MCHBAR_MCMAIN);
415 	ret = skl_dram_get_channel_info(i915, &ch0, 0, val);
416 	if (ret == 0)
417 		dram_info->num_channels++;
418 
419 	val = intel_uncore_read(&i915->uncore,
420 				SKL_MAD_DIMM_CH1_0_0_0_MCHBAR_MCMAIN);
421 	ret = skl_dram_get_channel_info(i915, &ch1, 1, val);
422 	if (ret == 0)
423 		dram_info->num_channels++;
424 
425 	if (dram_info->num_channels == 0) {
426 		drm_info(&i915->drm, "Number of memory channels is zero\n");
427 		return -EINVAL;
428 	}
429 
430 	if (ch0.ranks == 0 && ch1.ranks == 0) {
431 		drm_info(&i915->drm, "couldn't get memory rank information\n");
432 		return -EINVAL;
433 	}
434 
435 	dram_info->has_16gb_dimms = ch0.is_16gb_dimm || ch1.is_16gb_dimm;
436 
437 	dram_info->symmetric_memory = intel_is_dram_symmetric(&ch0, &ch1);
438 
439 	drm_dbg_kms(&i915->drm, "Memory configuration is symmetric? %s\n",
440 		    str_yes_no(dram_info->symmetric_memory));
441 
442 	drm_dbg_kms(&i915->drm, "16Gb DIMMs: %s\n",
443 		    str_yes_no(dram_info->has_16gb_dimms));
444 
445 	return 0;
446 }
447 
448 static enum intel_dram_type
449 skl_get_dram_type(struct drm_i915_private *i915)
450 {
451 	u32 val;
452 
453 	val = intel_uncore_read(&i915->uncore,
454 				SKL_MAD_INTER_CHANNEL_0_0_0_MCHBAR_MCMAIN);
455 
456 	switch (val & SKL_DRAM_DDR_TYPE_MASK) {
457 	case SKL_DRAM_DDR_TYPE_DDR3:
458 		return INTEL_DRAM_DDR3;
459 	case SKL_DRAM_DDR_TYPE_DDR4:
460 		return INTEL_DRAM_DDR4;
461 	case SKL_DRAM_DDR_TYPE_LPDDR3:
462 		return INTEL_DRAM_LPDDR3;
463 	case SKL_DRAM_DDR_TYPE_LPDDR4:
464 		return INTEL_DRAM_LPDDR4;
465 	default:
466 		MISSING_CASE(val);
467 		return INTEL_DRAM_UNKNOWN;
468 	}
469 }
470 
471 static int
472 skl_get_dram_info(struct drm_i915_private *i915, struct dram_info *dram_info)
473 {
474 	int ret;
475 
476 	dram_info->type = skl_get_dram_type(i915);
477 
478 	ret = skl_dram_get_channels_info(i915, dram_info);
479 	if (ret)
480 		return ret;
481 
482 	return 0;
483 }
484 
485 /* Returns Gb per DRAM device */
486 static int bxt_get_dimm_size(u32 val)
487 {
488 	switch (val & BXT_DRAM_SIZE_MASK) {
489 	case BXT_DRAM_SIZE_4GBIT:
490 		return 4;
491 	case BXT_DRAM_SIZE_6GBIT:
492 		return 6;
493 	case BXT_DRAM_SIZE_8GBIT:
494 		return 8;
495 	case BXT_DRAM_SIZE_12GBIT:
496 		return 12;
497 	case BXT_DRAM_SIZE_16GBIT:
498 		return 16;
499 	default:
500 		MISSING_CASE(val);
501 		return 0;
502 	}
503 }
504 
505 static int bxt_get_dimm_width(u32 val)
506 {
507 	if (!bxt_get_dimm_size(val))
508 		return 0;
509 
510 	val = (val & BXT_DRAM_WIDTH_MASK) >> BXT_DRAM_WIDTH_SHIFT;
511 
512 	return 8 << val;
513 }
514 
515 static int bxt_get_dimm_ranks(u32 val)
516 {
517 	if (!bxt_get_dimm_size(val))
518 		return 0;
519 
520 	switch (val & BXT_DRAM_RANK_MASK) {
521 	case BXT_DRAM_RANK_SINGLE:
522 		return 1;
523 	case BXT_DRAM_RANK_DUAL:
524 		return 2;
525 	default:
526 		MISSING_CASE(val);
527 		return 0;
528 	}
529 }
530 
531 static enum intel_dram_type bxt_get_dimm_type(u32 val)
532 {
533 	if (!bxt_get_dimm_size(val))
534 		return INTEL_DRAM_UNKNOWN;
535 
536 	switch (val & BXT_DRAM_TYPE_MASK) {
537 	case BXT_DRAM_TYPE_DDR3:
538 		return INTEL_DRAM_DDR3;
539 	case BXT_DRAM_TYPE_LPDDR3:
540 		return INTEL_DRAM_LPDDR3;
541 	case BXT_DRAM_TYPE_DDR4:
542 		return INTEL_DRAM_DDR4;
543 	case BXT_DRAM_TYPE_LPDDR4:
544 		return INTEL_DRAM_LPDDR4;
545 	default:
546 		MISSING_CASE(val);
547 		return INTEL_DRAM_UNKNOWN;
548 	}
549 }
550 
551 static void bxt_get_dimm_info(struct dram_dimm_info *dimm, u32 val)
552 {
553 	dimm->width = bxt_get_dimm_width(val);
554 	dimm->ranks = bxt_get_dimm_ranks(val);
555 
556 	/*
557 	 * Size in register is Gb per DRAM device. Convert to total
558 	 * Gb to match the way we report this for non-LP platforms.
559 	 */
560 	dimm->size = bxt_get_dimm_size(val) * intel_dimm_num_devices(dimm);
561 }
562 
563 static int bxt_get_dram_info(struct drm_i915_private *i915, struct dram_info *dram_info)
564 {
565 	u32 val;
566 	u8 valid_ranks = 0;
567 	int i;
568 
569 	/*
570 	 * Now read each DUNIT8/9/10/11 to check the rank of each dimms.
571 	 */
572 	for (i = BXT_D_CR_DRP0_DUNIT_START; i <= BXT_D_CR_DRP0_DUNIT_END; i++) {
573 		struct dram_dimm_info dimm;
574 		enum intel_dram_type type;
575 
576 		val = intel_uncore_read(&i915->uncore, BXT_D_CR_DRP0_DUNIT(i));
577 		if (val == 0xFFFFFFFF)
578 			continue;
579 
580 		dram_info->num_channels++;
581 
582 		bxt_get_dimm_info(&dimm, val);
583 		type = bxt_get_dimm_type(val);
584 
585 		drm_WARN_ON(&i915->drm, type != INTEL_DRAM_UNKNOWN &&
586 			    dram_info->type != INTEL_DRAM_UNKNOWN &&
587 			    dram_info->type != type);
588 
589 		drm_dbg_kms(&i915->drm,
590 			    "CH%u DIMM size: %u Gb, width: X%u, ranks: %u\n",
591 			    i - BXT_D_CR_DRP0_DUNIT_START,
592 			    dimm.size, dimm.width, dimm.ranks);
593 
594 		if (valid_ranks == 0)
595 			valid_ranks = dimm.ranks;
596 
597 		if (type != INTEL_DRAM_UNKNOWN)
598 			dram_info->type = type;
599 	}
600 
601 	if (dram_info->type == INTEL_DRAM_UNKNOWN || valid_ranks == 0) {
602 		drm_info(&i915->drm, "couldn't get memory information\n");
603 		return -EINVAL;
604 	}
605 
606 	return 0;
607 }
608 
609 static int icl_pcode_read_mem_global_info(struct drm_i915_private *dev_priv,
610 					  struct dram_info *dram_info)
611 {
612 	u32 val = 0;
613 	int ret;
614 
615 	ret = intel_pcode_read(&dev_priv->drm, ICL_PCODE_MEM_SUBSYSYSTEM_INFO |
616 			       ICL_PCODE_MEM_SS_READ_GLOBAL_INFO, &val, NULL);
617 	if (ret)
618 		return ret;
619 
620 	if (GRAPHICS_VER(dev_priv) == 12) {
621 		switch (val & 0xf) {
622 		case 0:
623 			dram_info->type = INTEL_DRAM_DDR4;
624 			break;
625 		case 1:
626 			dram_info->type = INTEL_DRAM_DDR5;
627 			break;
628 		case 2:
629 			dram_info->type = INTEL_DRAM_LPDDR5;
630 			break;
631 		case 3:
632 			dram_info->type = INTEL_DRAM_LPDDR4;
633 			break;
634 		case 4:
635 			dram_info->type = INTEL_DRAM_DDR3;
636 			break;
637 		case 5:
638 			dram_info->type = INTEL_DRAM_LPDDR3;
639 			break;
640 		default:
641 			MISSING_CASE(val & 0xf);
642 			return -EINVAL;
643 		}
644 	} else {
645 		switch (val & 0xf) {
646 		case 0:
647 			dram_info->type = INTEL_DRAM_DDR4;
648 			break;
649 		case 1:
650 			dram_info->type = INTEL_DRAM_DDR3;
651 			break;
652 		case 2:
653 			dram_info->type = INTEL_DRAM_LPDDR3;
654 			break;
655 		case 3:
656 			dram_info->type = INTEL_DRAM_LPDDR4;
657 			break;
658 		default:
659 			MISSING_CASE(val & 0xf);
660 			return -EINVAL;
661 		}
662 	}
663 
664 	dram_info->num_channels = (val & 0xf0) >> 4;
665 	dram_info->num_qgv_points = (val & 0xf00) >> 8;
666 	dram_info->num_psf_gv_points = (val & 0x3000) >> 12;
667 
668 	return 0;
669 }
670 
671 static int gen11_get_dram_info(struct drm_i915_private *i915, struct dram_info *dram_info)
672 {
673 	int ret;
674 
675 	ret = skl_dram_get_channels_info(i915, dram_info);
676 	if (ret)
677 		return ret;
678 
679 	return icl_pcode_read_mem_global_info(i915, dram_info);
680 }
681 
682 static int gen12_get_dram_info(struct drm_i915_private *i915, struct dram_info *dram_info)
683 {
684 	return icl_pcode_read_mem_global_info(i915, dram_info);
685 }
686 
687 static int xelpdp_get_dram_info(struct drm_i915_private *i915, struct dram_info *dram_info)
688 {
689 	u32 val = intel_uncore_read(&i915->uncore, MTL_MEM_SS_INFO_GLOBAL);
690 
691 	switch (REG_FIELD_GET(MTL_DDR_TYPE_MASK, val)) {
692 	case 0:
693 		dram_info->type = INTEL_DRAM_DDR4;
694 		break;
695 	case 1:
696 		dram_info->type = INTEL_DRAM_DDR5;
697 		break;
698 	case 2:
699 		dram_info->type = INTEL_DRAM_LPDDR5;
700 		break;
701 	case 3:
702 		dram_info->type = INTEL_DRAM_LPDDR4;
703 		break;
704 	case 4:
705 		dram_info->type = INTEL_DRAM_DDR3;
706 		break;
707 	case 5:
708 		dram_info->type = INTEL_DRAM_LPDDR3;
709 		break;
710 	case 8:
711 		drm_WARN_ON(&i915->drm, !IS_DGFX(i915));
712 		dram_info->type = INTEL_DRAM_GDDR;
713 		break;
714 	case 9:
715 		drm_WARN_ON(&i915->drm, !IS_DGFX(i915));
716 		dram_info->type = INTEL_DRAM_GDDR_ECC;
717 		break;
718 	default:
719 		MISSING_CASE(val);
720 		return -EINVAL;
721 	}
722 
723 	dram_info->num_channels = REG_FIELD_GET(MTL_N_OF_POPULATED_CH_MASK, val);
724 	dram_info->num_qgv_points = REG_FIELD_GET(MTL_N_OF_ENABLED_QGV_POINTS_MASK, val);
725 	/* PSF GV points not supported in D14+ */
726 
727 	return 0;
728 }
729 
730 int intel_dram_detect(struct drm_i915_private *i915)
731 {
732 	struct intel_display *display = i915->display;
733 	struct dram_info *dram_info;
734 	int ret;
735 
736 	if (IS_DG2(i915) || !intel_display_device_present(display))
737 		return 0;
738 
739 	dram_info = drmm_kzalloc(&i915->drm, sizeof(*dram_info), GFP_KERNEL);
740 	if (!dram_info)
741 		return -ENOMEM;
742 
743 	i915->dram_info = dram_info;
744 
745 	if (DISPLAY_VER(display) >= 14)
746 		ret = xelpdp_get_dram_info(i915, dram_info);
747 	else if (GRAPHICS_VER(i915) >= 12)
748 		ret = gen12_get_dram_info(i915, dram_info);
749 	else if (GRAPHICS_VER(i915) >= 11)
750 		ret = gen11_get_dram_info(i915, dram_info);
751 	else if (IS_BROXTON(i915) || IS_GEMINILAKE(i915))
752 		ret = bxt_get_dram_info(i915, dram_info);
753 	else if (GRAPHICS_VER(i915) >= 9)
754 		ret = skl_get_dram_info(i915, dram_info);
755 	else
756 		ret = i915_get_dram_info(i915, dram_info);
757 
758 	drm_dbg_kms(&i915->drm, "DRAM type: %s\n",
759 		    intel_dram_type_str(dram_info->type));
760 
761 	drm_dbg_kms(&i915->drm, "DRAM channels: %u\n", dram_info->num_channels);
762 
763 	drm_dbg_kms(&i915->drm, "Num QGV points %u\n", dram_info->num_qgv_points);
764 	drm_dbg_kms(&i915->drm, "Num PSF GV points %u\n", dram_info->num_psf_gv_points);
765 
766 	/* TODO: Do we want to abort probe on dram detection failures? */
767 	if (ret)
768 		return 0;
769 
770 	return 0;
771 }
772 
773 /*
774  * Returns NULL for platforms that don't have dram info. Avoid overzealous NULL
775  * checks, and prefer not dereferencing on platforms that shouldn't look at dram
776  * info, to catch accidental and incorrect dram info checks.
777  */
778 const struct dram_info *intel_dram_info(struct drm_device *drm)
779 {
780 	struct drm_i915_private *i915 = to_i915(drm);
781 
782 	return i915->dram_info;
783 }
784 
785 static u32 gen9_edram_size_mb(struct drm_i915_private *i915, u32 cap)
786 {
787 	static const u8 ways[8] = { 4, 8, 12, 16, 16, 16, 16, 16 };
788 	static const u8 sets[4] = { 1, 1, 2, 2 };
789 
790 	return EDRAM_NUM_BANKS(cap) *
791 		ways[EDRAM_WAYS_IDX(cap)] *
792 		sets[EDRAM_SETS_IDX(cap)];
793 }
794 
795 void intel_dram_edram_detect(struct drm_i915_private *i915)
796 {
797 	u32 edram_cap = 0;
798 
799 	if (!(IS_HASWELL(i915) || IS_BROADWELL(i915) || GRAPHICS_VER(i915) >= 9))
800 		return;
801 
802 	edram_cap = intel_uncore_read_fw(&i915->uncore, HSW_EDRAM_CAP);
803 
804 	/* NB: We can't write IDICR yet because we don't have gt funcs set up */
805 
806 	if (!(edram_cap & EDRAM_ENABLED))
807 		return;
808 
809 	/*
810 	 * The needed capability bits for size calculation are not there with
811 	 * pre gen9 so return 128MB always.
812 	 */
813 	if (GRAPHICS_VER(i915) < 9)
814 		i915->edram_size_mb = 128;
815 	else
816 		i915->edram_size_mb = gen9_edram_size_mb(i915, edram_cap);
817 
818 	drm_info(&i915->drm, "Found %uMB of eDRAM\n", i915->edram_size_mb);
819 }
820