xref: /linux/drivers/gpu/drm/i915/soc/intel_dram.c (revision f86ad0ed620cb3c91ec7d5468e93ac68d727539d)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2020 Intel Corporation
4  */
5 
6 #include <linux/string_helpers.h>
7 
8 #include <drm/drm_managed.h>
9 
10 #include "../display/intel_display_core.h" /* FIXME */
11 
12 #include "i915_drv.h"
13 #include "i915_reg.h"
14 #include "intel_dram.h"
15 #include "intel_mchbar_regs.h"
16 #include "intel_pcode.h"
17 #include "vlv_iosf_sb.h"
18 
19 struct dram_dimm_info {
20 	u16 size;
21 	u8 width, ranks;
22 };
23 
24 struct dram_channel_info {
25 	struct dram_dimm_info dimm_l, dimm_s;
26 	u8 ranks;
27 	bool is_16gb_dimm;
28 };
29 
30 #define DRAM_TYPE_STR(type) [INTEL_DRAM_ ## type] = #type
31 
32 static const char *intel_dram_type_str(enum intel_dram_type type)
33 {
34 	static const char * const str[] = {
35 		DRAM_TYPE_STR(UNKNOWN),
36 		DRAM_TYPE_STR(DDR3),
37 		DRAM_TYPE_STR(DDR4),
38 		DRAM_TYPE_STR(LPDDR3),
39 		DRAM_TYPE_STR(LPDDR4),
40 		DRAM_TYPE_STR(DDR5),
41 		DRAM_TYPE_STR(LPDDR5),
42 		DRAM_TYPE_STR(GDDR),
43 		DRAM_TYPE_STR(GDDR_ECC),
44 	};
45 
46 	BUILD_BUG_ON(ARRAY_SIZE(str) != __INTEL_DRAM_TYPE_MAX);
47 
48 	if (type >= ARRAY_SIZE(str))
49 		type = INTEL_DRAM_UNKNOWN;
50 
51 	return str[type];
52 }
53 
54 #undef DRAM_TYPE_STR
55 
56 static bool pnv_is_ddr3(struct drm_i915_private *i915)
57 {
58 	return intel_uncore_read(&i915->uncore, CSHRDDR3CTL) & CSHRDDR3CTL_DDR3;
59 }
60 
61 static unsigned int pnv_mem_freq(struct drm_i915_private *dev_priv)
62 {
63 	u32 tmp;
64 
65 	tmp = intel_uncore_read(&dev_priv->uncore, CLKCFG);
66 
67 	switch (tmp & CLKCFG_MEM_MASK) {
68 	case CLKCFG_MEM_533:
69 		return 533333;
70 	case CLKCFG_MEM_667:
71 		return 666667;
72 	case CLKCFG_MEM_800:
73 		return 800000;
74 	}
75 
76 	return 0;
77 }
78 
79 static unsigned int ilk_mem_freq(struct drm_i915_private *dev_priv)
80 {
81 	u16 ddrpll;
82 
83 	ddrpll = intel_uncore_read16(&dev_priv->uncore, DDRMPLL1);
84 	switch (ddrpll & 0xff) {
85 	case 0xc:
86 		return 800000;
87 	case 0x10:
88 		return 1066667;
89 	case 0x14:
90 		return 1333333;
91 	case 0x18:
92 		return 1600000;
93 	default:
94 		drm_dbg(&dev_priv->drm, "unknown memory frequency 0x%02x\n",
95 			ddrpll & 0xff);
96 		return 0;
97 	}
98 }
99 
100 static unsigned int chv_mem_freq(struct drm_i915_private *i915)
101 {
102 	u32 val;
103 
104 	vlv_iosf_sb_get(&i915->drm, BIT(VLV_IOSF_SB_CCK));
105 	val = vlv_iosf_sb_read(&i915->drm, VLV_IOSF_SB_CCK, CCK_FUSE_REG);
106 	vlv_iosf_sb_put(&i915->drm, BIT(VLV_IOSF_SB_CCK));
107 
108 	switch ((val >> 2) & 0x7) {
109 	case 3:
110 		return 2000000;
111 	default:
112 		return 1600000;
113 	}
114 }
115 
116 static unsigned int vlv_mem_freq(struct drm_i915_private *i915)
117 {
118 	u32 val;
119 
120 	vlv_iosf_sb_get(&i915->drm, BIT(VLV_IOSF_SB_PUNIT));
121 	val = vlv_iosf_sb_read(&i915->drm, VLV_IOSF_SB_PUNIT, PUNIT_REG_GPU_FREQ_STS);
122 	vlv_iosf_sb_put(&i915->drm, BIT(VLV_IOSF_SB_PUNIT));
123 
124 	switch ((val >> 6) & 3) {
125 	case 0:
126 	case 1:
127 		return 800000;
128 	case 2:
129 		return 1066667;
130 	case 3:
131 		return 1333333;
132 	}
133 
134 	return 0;
135 }
136 
137 static void detect_mem_freq(struct drm_i915_private *i915)
138 {
139 	if (IS_PINEVIEW(i915))
140 		i915->mem_freq = pnv_mem_freq(i915);
141 	else if (GRAPHICS_VER(i915) == 5)
142 		i915->mem_freq = ilk_mem_freq(i915);
143 	else if (IS_CHERRYVIEW(i915))
144 		i915->mem_freq = chv_mem_freq(i915);
145 	else if (IS_VALLEYVIEW(i915))
146 		i915->mem_freq = vlv_mem_freq(i915);
147 
148 	if (IS_PINEVIEW(i915))
149 		i915->is_ddr3 = pnv_is_ddr3(i915);
150 
151 	if (i915->mem_freq)
152 		drm_dbg(&i915->drm, "DDR speed: %d kHz\n", i915->mem_freq);
153 }
154 
155 unsigned int i9xx_fsb_freq(struct drm_i915_private *i915)
156 {
157 	u32 fsb;
158 
159 	/*
160 	 * Note that this only reads the state of the FSB
161 	 * straps, not the actual FSB frequency. Some BIOSen
162 	 * let you configure each independently. Ideally we'd
163 	 * read out the actual FSB frequency but sadly we
164 	 * don't know which registers have that information,
165 	 * and all the relevant docs have gone to bit heaven :(
166 	 */
167 	fsb = intel_uncore_read(&i915->uncore, CLKCFG) & CLKCFG_FSB_MASK;
168 
169 	if (IS_PINEVIEW(i915) || IS_MOBILE(i915)) {
170 		switch (fsb) {
171 		case CLKCFG_FSB_400:
172 			return 400000;
173 		case CLKCFG_FSB_533:
174 			return 533333;
175 		case CLKCFG_FSB_667:
176 			return 666667;
177 		case CLKCFG_FSB_800:
178 			return 800000;
179 		case CLKCFG_FSB_1067:
180 			return 1066667;
181 		case CLKCFG_FSB_1333:
182 			return 1333333;
183 		default:
184 			MISSING_CASE(fsb);
185 			return 1333333;
186 		}
187 	} else {
188 		switch (fsb) {
189 		case CLKCFG_FSB_400_ALT:
190 			return 400000;
191 		case CLKCFG_FSB_533:
192 			return 533333;
193 		case CLKCFG_FSB_667:
194 			return 666667;
195 		case CLKCFG_FSB_800:
196 			return 800000;
197 		case CLKCFG_FSB_1067_ALT:
198 			return 1066667;
199 		case CLKCFG_FSB_1333_ALT:
200 			return 1333333;
201 		case CLKCFG_FSB_1600_ALT:
202 			return 1600000;
203 		default:
204 			MISSING_CASE(fsb);
205 			return 1333333;
206 		}
207 	}
208 }
209 
210 static unsigned int ilk_fsb_freq(struct drm_i915_private *dev_priv)
211 {
212 	u16 fsb;
213 
214 	fsb = intel_uncore_read16(&dev_priv->uncore, CSIPLL0) & 0x3ff;
215 
216 	switch (fsb) {
217 	case 0x00c:
218 		return 3200000;
219 	case 0x00e:
220 		return 3733333;
221 	case 0x010:
222 		return 4266667;
223 	case 0x012:
224 		return 4800000;
225 	case 0x014:
226 		return 5333333;
227 	case 0x016:
228 		return 5866667;
229 	case 0x018:
230 		return 6400000;
231 	default:
232 		drm_dbg(&dev_priv->drm, "unknown fsb frequency 0x%04x\n", fsb);
233 		return 0;
234 	}
235 }
236 
237 static void detect_fsb_freq(struct drm_i915_private *i915)
238 {
239 	if (GRAPHICS_VER(i915) == 5)
240 		i915->fsb_freq = ilk_fsb_freq(i915);
241 	else if (GRAPHICS_VER(i915) == 3 || GRAPHICS_VER(i915) == 4)
242 		i915->fsb_freq = i9xx_fsb_freq(i915);
243 
244 	if (i915->fsb_freq)
245 		drm_dbg(&i915->drm, "FSB frequency: %d kHz\n", i915->fsb_freq);
246 }
247 
248 static int intel_dimm_num_devices(const struct dram_dimm_info *dimm)
249 {
250 	return dimm->ranks * 64 / (dimm->width ?: 1);
251 }
252 
253 /* Returns total Gb for the whole DIMM */
254 static int skl_get_dimm_size(u16 val)
255 {
256 	return (val & SKL_DRAM_SIZE_MASK) * 8;
257 }
258 
259 static int skl_get_dimm_width(u16 val)
260 {
261 	if (skl_get_dimm_size(val) == 0)
262 		return 0;
263 
264 	switch (val & SKL_DRAM_WIDTH_MASK) {
265 	case SKL_DRAM_WIDTH_X8:
266 	case SKL_DRAM_WIDTH_X16:
267 	case SKL_DRAM_WIDTH_X32:
268 		val = (val & SKL_DRAM_WIDTH_MASK) >> SKL_DRAM_WIDTH_SHIFT;
269 		return 8 << val;
270 	default:
271 		MISSING_CASE(val);
272 		return 0;
273 	}
274 }
275 
276 static int skl_get_dimm_ranks(u16 val)
277 {
278 	if (skl_get_dimm_size(val) == 0)
279 		return 0;
280 
281 	val = (val & SKL_DRAM_RANK_MASK) >> SKL_DRAM_RANK_SHIFT;
282 
283 	return val + 1;
284 }
285 
286 /* Returns total Gb for the whole DIMM */
287 static int icl_get_dimm_size(u16 val)
288 {
289 	return (val & ICL_DRAM_SIZE_MASK) * 8 / 2;
290 }
291 
292 static int icl_get_dimm_width(u16 val)
293 {
294 	if (icl_get_dimm_size(val) == 0)
295 		return 0;
296 
297 	switch (val & ICL_DRAM_WIDTH_MASK) {
298 	case ICL_DRAM_WIDTH_X8:
299 	case ICL_DRAM_WIDTH_X16:
300 	case ICL_DRAM_WIDTH_X32:
301 		val = (val & ICL_DRAM_WIDTH_MASK) >> ICL_DRAM_WIDTH_SHIFT;
302 		return 8 << val;
303 	default:
304 		MISSING_CASE(val);
305 		return 0;
306 	}
307 }
308 
309 static int icl_get_dimm_ranks(u16 val)
310 {
311 	if (icl_get_dimm_size(val) == 0)
312 		return 0;
313 
314 	val = (val & ICL_DRAM_RANK_MASK) >> ICL_DRAM_RANK_SHIFT;
315 
316 	return val + 1;
317 }
318 
319 static bool
320 skl_is_16gb_dimm(const struct dram_dimm_info *dimm)
321 {
322 	/* Convert total Gb to Gb per DRAM device */
323 	return dimm->size / (intel_dimm_num_devices(dimm) ?: 1) == 16;
324 }
325 
326 static void
327 skl_dram_get_dimm_info(struct drm_i915_private *i915,
328 		       struct dram_dimm_info *dimm,
329 		       int channel, char dimm_name, u16 val)
330 {
331 	if (GRAPHICS_VER(i915) >= 11) {
332 		dimm->size = icl_get_dimm_size(val);
333 		dimm->width = icl_get_dimm_width(val);
334 		dimm->ranks = icl_get_dimm_ranks(val);
335 	} else {
336 		dimm->size = skl_get_dimm_size(val);
337 		dimm->width = skl_get_dimm_width(val);
338 		dimm->ranks = skl_get_dimm_ranks(val);
339 	}
340 
341 	drm_dbg_kms(&i915->drm,
342 		    "CH%u DIMM %c size: %u Gb, width: X%u, ranks: %u, 16Gb DIMMs: %s\n",
343 		    channel, dimm_name, dimm->size, dimm->width, dimm->ranks,
344 		    str_yes_no(skl_is_16gb_dimm(dimm)));
345 }
346 
347 static int
348 skl_dram_get_channel_info(struct drm_i915_private *i915,
349 			  struct dram_channel_info *ch,
350 			  int channel, u32 val)
351 {
352 	skl_dram_get_dimm_info(i915, &ch->dimm_l,
353 			       channel, 'L', val & 0xffff);
354 	skl_dram_get_dimm_info(i915, &ch->dimm_s,
355 			       channel, 'S', val >> 16);
356 
357 	if (ch->dimm_l.size == 0 && ch->dimm_s.size == 0) {
358 		drm_dbg_kms(&i915->drm, "CH%u not populated\n", channel);
359 		return -EINVAL;
360 	}
361 
362 	if (ch->dimm_l.ranks == 2 || ch->dimm_s.ranks == 2)
363 		ch->ranks = 2;
364 	else if (ch->dimm_l.ranks == 1 && ch->dimm_s.ranks == 1)
365 		ch->ranks = 2;
366 	else
367 		ch->ranks = 1;
368 
369 	ch->is_16gb_dimm = skl_is_16gb_dimm(&ch->dimm_l) ||
370 		skl_is_16gb_dimm(&ch->dimm_s);
371 
372 	drm_dbg_kms(&i915->drm, "CH%u ranks: %u, 16Gb DIMMs: %s\n",
373 		    channel, ch->ranks, str_yes_no(ch->is_16gb_dimm));
374 
375 	return 0;
376 }
377 
378 static bool
379 intel_is_dram_symmetric(const struct dram_channel_info *ch0,
380 			const struct dram_channel_info *ch1)
381 {
382 	return !memcmp(ch0, ch1, sizeof(*ch0)) &&
383 		(ch0->dimm_s.size == 0 ||
384 		 !memcmp(&ch0->dimm_l, &ch0->dimm_s, sizeof(ch0->dimm_l)));
385 }
386 
387 static int
388 skl_dram_get_channels_info(struct drm_i915_private *i915, struct dram_info *dram_info)
389 {
390 	struct dram_channel_info ch0 = {}, ch1 = {};
391 	u32 val;
392 	int ret;
393 
394 	val = intel_uncore_read(&i915->uncore,
395 				SKL_MAD_DIMM_CH0_0_0_0_MCHBAR_MCMAIN);
396 	ret = skl_dram_get_channel_info(i915, &ch0, 0, val);
397 	if (ret == 0)
398 		dram_info->num_channels++;
399 
400 	val = intel_uncore_read(&i915->uncore,
401 				SKL_MAD_DIMM_CH1_0_0_0_MCHBAR_MCMAIN);
402 	ret = skl_dram_get_channel_info(i915, &ch1, 1, val);
403 	if (ret == 0)
404 		dram_info->num_channels++;
405 
406 	if (dram_info->num_channels == 0) {
407 		drm_info(&i915->drm, "Number of memory channels is zero\n");
408 		return -EINVAL;
409 	}
410 
411 	if (ch0.ranks == 0 && ch1.ranks == 0) {
412 		drm_info(&i915->drm, "couldn't get memory rank information\n");
413 		return -EINVAL;
414 	}
415 
416 	dram_info->wm_lv_0_adjust_needed = ch0.is_16gb_dimm || ch1.is_16gb_dimm;
417 
418 	dram_info->symmetric_memory = intel_is_dram_symmetric(&ch0, &ch1);
419 
420 	drm_dbg_kms(&i915->drm, "Memory configuration is symmetric? %s\n",
421 		    str_yes_no(dram_info->symmetric_memory));
422 
423 	return 0;
424 }
425 
426 static enum intel_dram_type
427 skl_get_dram_type(struct drm_i915_private *i915)
428 {
429 	u32 val;
430 
431 	val = intel_uncore_read(&i915->uncore,
432 				SKL_MAD_INTER_CHANNEL_0_0_0_MCHBAR_MCMAIN);
433 
434 	switch (val & SKL_DRAM_DDR_TYPE_MASK) {
435 	case SKL_DRAM_DDR_TYPE_DDR3:
436 		return INTEL_DRAM_DDR3;
437 	case SKL_DRAM_DDR_TYPE_DDR4:
438 		return INTEL_DRAM_DDR4;
439 	case SKL_DRAM_DDR_TYPE_LPDDR3:
440 		return INTEL_DRAM_LPDDR3;
441 	case SKL_DRAM_DDR_TYPE_LPDDR4:
442 		return INTEL_DRAM_LPDDR4;
443 	default:
444 		MISSING_CASE(val);
445 		return INTEL_DRAM_UNKNOWN;
446 	}
447 }
448 
449 static int
450 skl_get_dram_info(struct drm_i915_private *i915, struct dram_info *dram_info)
451 {
452 	int ret;
453 
454 	dram_info->type = skl_get_dram_type(i915);
455 
456 	ret = skl_dram_get_channels_info(i915, dram_info);
457 	if (ret)
458 		return ret;
459 
460 	return 0;
461 }
462 
463 /* Returns Gb per DRAM device */
464 static int bxt_get_dimm_size(u32 val)
465 {
466 	switch (val & BXT_DRAM_SIZE_MASK) {
467 	case BXT_DRAM_SIZE_4GBIT:
468 		return 4;
469 	case BXT_DRAM_SIZE_6GBIT:
470 		return 6;
471 	case BXT_DRAM_SIZE_8GBIT:
472 		return 8;
473 	case BXT_DRAM_SIZE_12GBIT:
474 		return 12;
475 	case BXT_DRAM_SIZE_16GBIT:
476 		return 16;
477 	default:
478 		MISSING_CASE(val);
479 		return 0;
480 	}
481 }
482 
483 static int bxt_get_dimm_width(u32 val)
484 {
485 	if (!bxt_get_dimm_size(val))
486 		return 0;
487 
488 	val = (val & BXT_DRAM_WIDTH_MASK) >> BXT_DRAM_WIDTH_SHIFT;
489 
490 	return 8 << val;
491 }
492 
493 static int bxt_get_dimm_ranks(u32 val)
494 {
495 	if (!bxt_get_dimm_size(val))
496 		return 0;
497 
498 	switch (val & BXT_DRAM_RANK_MASK) {
499 	case BXT_DRAM_RANK_SINGLE:
500 		return 1;
501 	case BXT_DRAM_RANK_DUAL:
502 		return 2;
503 	default:
504 		MISSING_CASE(val);
505 		return 0;
506 	}
507 }
508 
509 static enum intel_dram_type bxt_get_dimm_type(u32 val)
510 {
511 	if (!bxt_get_dimm_size(val))
512 		return INTEL_DRAM_UNKNOWN;
513 
514 	switch (val & BXT_DRAM_TYPE_MASK) {
515 	case BXT_DRAM_TYPE_DDR3:
516 		return INTEL_DRAM_DDR3;
517 	case BXT_DRAM_TYPE_LPDDR3:
518 		return INTEL_DRAM_LPDDR3;
519 	case BXT_DRAM_TYPE_DDR4:
520 		return INTEL_DRAM_DDR4;
521 	case BXT_DRAM_TYPE_LPDDR4:
522 		return INTEL_DRAM_LPDDR4;
523 	default:
524 		MISSING_CASE(val);
525 		return INTEL_DRAM_UNKNOWN;
526 	}
527 }
528 
529 static void bxt_get_dimm_info(struct dram_dimm_info *dimm, u32 val)
530 {
531 	dimm->width = bxt_get_dimm_width(val);
532 	dimm->ranks = bxt_get_dimm_ranks(val);
533 
534 	/*
535 	 * Size in register is Gb per DRAM device. Convert to total
536 	 * Gb to match the way we report this for non-LP platforms.
537 	 */
538 	dimm->size = bxt_get_dimm_size(val) * intel_dimm_num_devices(dimm);
539 }
540 
541 static int bxt_get_dram_info(struct drm_i915_private *i915, struct dram_info *dram_info)
542 {
543 	u32 val;
544 	u8 valid_ranks = 0;
545 	int i;
546 
547 	/*
548 	 * Now read each DUNIT8/9/10/11 to check the rank of each dimms.
549 	 */
550 	for (i = BXT_D_CR_DRP0_DUNIT_START; i <= BXT_D_CR_DRP0_DUNIT_END; i++) {
551 		struct dram_dimm_info dimm;
552 		enum intel_dram_type type;
553 
554 		val = intel_uncore_read(&i915->uncore, BXT_D_CR_DRP0_DUNIT(i));
555 		if (val == 0xFFFFFFFF)
556 			continue;
557 
558 		dram_info->num_channels++;
559 
560 		bxt_get_dimm_info(&dimm, val);
561 		type = bxt_get_dimm_type(val);
562 
563 		drm_WARN_ON(&i915->drm, type != INTEL_DRAM_UNKNOWN &&
564 			    dram_info->type != INTEL_DRAM_UNKNOWN &&
565 			    dram_info->type != type);
566 
567 		drm_dbg_kms(&i915->drm,
568 			    "CH%u DIMM size: %u Gb, width: X%u, ranks: %u\n",
569 			    i - BXT_D_CR_DRP0_DUNIT_START,
570 			    dimm.size, dimm.width, dimm.ranks);
571 
572 		if (valid_ranks == 0)
573 			valid_ranks = dimm.ranks;
574 
575 		if (type != INTEL_DRAM_UNKNOWN)
576 			dram_info->type = type;
577 	}
578 
579 	if (dram_info->type == INTEL_DRAM_UNKNOWN || valid_ranks == 0) {
580 		drm_info(&i915->drm, "couldn't get memory information\n");
581 		return -EINVAL;
582 	}
583 
584 	return 0;
585 }
586 
587 static int icl_pcode_read_mem_global_info(struct drm_i915_private *dev_priv,
588 					  struct dram_info *dram_info)
589 {
590 	u32 val = 0;
591 	int ret;
592 
593 	ret = snb_pcode_read(&dev_priv->uncore, ICL_PCODE_MEM_SUBSYSYSTEM_INFO |
594 			     ICL_PCODE_MEM_SS_READ_GLOBAL_INFO, &val, NULL);
595 	if (ret)
596 		return ret;
597 
598 	if (GRAPHICS_VER(dev_priv) == 12) {
599 		switch (val & 0xf) {
600 		case 0:
601 			dram_info->type = INTEL_DRAM_DDR4;
602 			break;
603 		case 1:
604 			dram_info->type = INTEL_DRAM_DDR5;
605 			break;
606 		case 2:
607 			dram_info->type = INTEL_DRAM_LPDDR5;
608 			break;
609 		case 3:
610 			dram_info->type = INTEL_DRAM_LPDDR4;
611 			break;
612 		case 4:
613 			dram_info->type = INTEL_DRAM_DDR3;
614 			break;
615 		case 5:
616 			dram_info->type = INTEL_DRAM_LPDDR3;
617 			break;
618 		default:
619 			MISSING_CASE(val & 0xf);
620 			return -EINVAL;
621 		}
622 	} else {
623 		switch (val & 0xf) {
624 		case 0:
625 			dram_info->type = INTEL_DRAM_DDR4;
626 			break;
627 		case 1:
628 			dram_info->type = INTEL_DRAM_DDR3;
629 			break;
630 		case 2:
631 			dram_info->type = INTEL_DRAM_LPDDR3;
632 			break;
633 		case 3:
634 			dram_info->type = INTEL_DRAM_LPDDR4;
635 			break;
636 		default:
637 			MISSING_CASE(val & 0xf);
638 			return -EINVAL;
639 		}
640 	}
641 
642 	dram_info->num_channels = (val & 0xf0) >> 4;
643 	dram_info->num_qgv_points = (val & 0xf00) >> 8;
644 	dram_info->num_psf_gv_points = (val & 0x3000) >> 12;
645 
646 	return 0;
647 }
648 
649 static int gen11_get_dram_info(struct drm_i915_private *i915, struct dram_info *dram_info)
650 {
651 	int ret = skl_get_dram_info(i915, dram_info);
652 
653 	if (ret)
654 		return ret;
655 
656 	return icl_pcode_read_mem_global_info(i915, dram_info);
657 }
658 
659 static int gen12_get_dram_info(struct drm_i915_private *i915, struct dram_info *dram_info)
660 {
661 	dram_info->wm_lv_0_adjust_needed = false;
662 
663 	return icl_pcode_read_mem_global_info(i915, dram_info);
664 }
665 
666 static int xelpdp_get_dram_info(struct drm_i915_private *i915, struct dram_info *dram_info)
667 {
668 	u32 val = intel_uncore_read(&i915->uncore, MTL_MEM_SS_INFO_GLOBAL);
669 
670 	switch (REG_FIELD_GET(MTL_DDR_TYPE_MASK, val)) {
671 	case 0:
672 		dram_info->type = INTEL_DRAM_DDR4;
673 		break;
674 	case 1:
675 		dram_info->type = INTEL_DRAM_DDR5;
676 		break;
677 	case 2:
678 		dram_info->type = INTEL_DRAM_LPDDR5;
679 		break;
680 	case 3:
681 		dram_info->type = INTEL_DRAM_LPDDR4;
682 		break;
683 	case 4:
684 		dram_info->type = INTEL_DRAM_DDR3;
685 		break;
686 	case 5:
687 		dram_info->type = INTEL_DRAM_LPDDR3;
688 		break;
689 	case 8:
690 		drm_WARN_ON(&i915->drm, !IS_DGFX(i915));
691 		dram_info->type = INTEL_DRAM_GDDR;
692 		break;
693 	case 9:
694 		drm_WARN_ON(&i915->drm, !IS_DGFX(i915));
695 		dram_info->type = INTEL_DRAM_GDDR_ECC;
696 		break;
697 	default:
698 		MISSING_CASE(val);
699 		return -EINVAL;
700 	}
701 
702 	dram_info->num_channels = REG_FIELD_GET(MTL_N_OF_POPULATED_CH_MASK, val);
703 	dram_info->num_qgv_points = REG_FIELD_GET(MTL_N_OF_ENABLED_QGV_POINTS_MASK, val);
704 	/* PSF GV points not supported in D14+ */
705 
706 	return 0;
707 }
708 
709 int intel_dram_detect(struct drm_i915_private *i915)
710 {
711 	struct dram_info *dram_info;
712 	int ret;
713 
714 	detect_fsb_freq(i915);
715 	detect_mem_freq(i915);
716 
717 	if (GRAPHICS_VER(i915) < 9 || IS_DG2(i915) || !HAS_DISPLAY(i915))
718 		return 0;
719 
720 	dram_info = drmm_kzalloc(&i915->drm, sizeof(*dram_info), GFP_KERNEL);
721 	if (!dram_info)
722 		return -ENOMEM;
723 
724 	i915->dram_info = dram_info;
725 
726 	/*
727 	 * Assume level 0 watermark latency adjustment is needed until proven
728 	 * otherwise, this w/a is not needed by bxt/glk.
729 	 */
730 	dram_info->wm_lv_0_adjust_needed = !IS_BROXTON(i915) && !IS_GEMINILAKE(i915);
731 
732 	if (DISPLAY_VER(i915) >= 14)
733 		ret = xelpdp_get_dram_info(i915, dram_info);
734 	else if (GRAPHICS_VER(i915) >= 12)
735 		ret = gen12_get_dram_info(i915, dram_info);
736 	else if (GRAPHICS_VER(i915) >= 11)
737 		ret = gen11_get_dram_info(i915, dram_info);
738 	else if (IS_BROXTON(i915) || IS_GEMINILAKE(i915))
739 		ret = bxt_get_dram_info(i915, dram_info);
740 	else
741 		ret = skl_get_dram_info(i915, dram_info);
742 
743 	drm_dbg_kms(&i915->drm, "DRAM type: %s\n",
744 		    intel_dram_type_str(dram_info->type));
745 
746 	/* TODO: Do we want to abort probe on dram detection failures? */
747 	if (ret)
748 		return 0;
749 
750 	drm_dbg_kms(&i915->drm, "Num qgv points %u\n", dram_info->num_qgv_points);
751 
752 	drm_dbg_kms(&i915->drm, "DRAM channels: %u\n", dram_info->num_channels);
753 
754 	drm_dbg_kms(&i915->drm, "Watermark level 0 adjustment needed: %s\n",
755 		    str_yes_no(dram_info->wm_lv_0_adjust_needed));
756 
757 	return 0;
758 }
759 
760 /*
761  * Returns NULL for platforms that don't have dram info. Avoid overzealous NULL
762  * checks, and prefer not dereferencing on platforms that shouldn't look at dram
763  * info, to catch accidental and incorrect dram info checks.
764  */
765 const struct dram_info *intel_dram_info(struct drm_device *drm)
766 {
767 	struct drm_i915_private *i915 = to_i915(drm);
768 
769 	return i915->dram_info;
770 }
771 
772 static u32 gen9_edram_size_mb(struct drm_i915_private *i915, u32 cap)
773 {
774 	static const u8 ways[8] = { 4, 8, 12, 16, 16, 16, 16, 16 };
775 	static const u8 sets[4] = { 1, 1, 2, 2 };
776 
777 	return EDRAM_NUM_BANKS(cap) *
778 		ways[EDRAM_WAYS_IDX(cap)] *
779 		sets[EDRAM_SETS_IDX(cap)];
780 }
781 
782 void intel_dram_edram_detect(struct drm_i915_private *i915)
783 {
784 	u32 edram_cap = 0;
785 
786 	if (!(IS_HASWELL(i915) || IS_BROADWELL(i915) || GRAPHICS_VER(i915) >= 9))
787 		return;
788 
789 	edram_cap = intel_uncore_read_fw(&i915->uncore, HSW_EDRAM_CAP);
790 
791 	/* NB: We can't write IDICR yet because we don't have gt funcs set up */
792 
793 	if (!(edram_cap & EDRAM_ENABLED))
794 		return;
795 
796 	/*
797 	 * The needed capability bits for size calculation are not there with
798 	 * pre gen9 so return 128MB always.
799 	 */
800 	if (GRAPHICS_VER(i915) < 9)
801 		i915->edram_size_mb = 128;
802 	else
803 		i915->edram_size_mb = gen9_edram_size_mb(i915, edram_cap);
804 
805 	drm_info(&i915->drm, "Found %uMB of eDRAM\n", i915->edram_size_mb);
806 }
807