xref: /linux/drivers/gpu/drm/i915/soc/intel_dram.c (revision a032fe30cf09b6723ab61a05aee057311b00f9e1)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2020 Intel Corporation
4  */
5 
6 #include <linux/string_helpers.h>
7 
8 #include <drm/drm_managed.h>
9 
10 #include "../display/intel_display_core.h" /* FIXME */
11 
12 #include "i915_drv.h"
13 #include "i915_reg.h"
14 #include "intel_dram.h"
15 #include "intel_mchbar_regs.h"
16 #include "intel_pcode.h"
17 #include "intel_uncore.h"
18 #include "vlv_iosf_sb.h"
19 
20 struct dram_dimm_info {
21 	u16 size;
22 	u8 width, ranks;
23 };
24 
25 struct dram_channel_info {
26 	struct dram_dimm_info dimm_l, dimm_s;
27 	u8 ranks;
28 	bool is_16gb_dimm;
29 };
30 
31 #define DRAM_TYPE_STR(type) [INTEL_DRAM_ ## type] = #type
32 
33 static const char *intel_dram_type_str(enum intel_dram_type type)
34 {
35 	static const char * const str[] = {
36 		DRAM_TYPE_STR(UNKNOWN),
37 		DRAM_TYPE_STR(DDR3),
38 		DRAM_TYPE_STR(DDR4),
39 		DRAM_TYPE_STR(LPDDR3),
40 		DRAM_TYPE_STR(LPDDR4),
41 		DRAM_TYPE_STR(DDR5),
42 		DRAM_TYPE_STR(LPDDR5),
43 		DRAM_TYPE_STR(GDDR),
44 		DRAM_TYPE_STR(GDDR_ECC),
45 	};
46 
47 	BUILD_BUG_ON(ARRAY_SIZE(str) != __INTEL_DRAM_TYPE_MAX);
48 
49 	if (type >= ARRAY_SIZE(str))
50 		type = INTEL_DRAM_UNKNOWN;
51 
52 	return str[type];
53 }
54 
55 #undef DRAM_TYPE_STR
56 
57 static bool pnv_is_ddr3(struct drm_i915_private *i915)
58 {
59 	return intel_uncore_read(&i915->uncore, CSHRDDR3CTL) & CSHRDDR3CTL_DDR3;
60 }
61 
62 static unsigned int pnv_mem_freq(struct drm_i915_private *dev_priv)
63 {
64 	u32 tmp;
65 
66 	tmp = intel_uncore_read(&dev_priv->uncore, CLKCFG);
67 
68 	switch (tmp & CLKCFG_MEM_MASK) {
69 	case CLKCFG_MEM_533:
70 		return 533333;
71 	case CLKCFG_MEM_667:
72 		return 666667;
73 	case CLKCFG_MEM_800:
74 		return 800000;
75 	}
76 
77 	return 0;
78 }
79 
80 static unsigned int ilk_mem_freq(struct drm_i915_private *dev_priv)
81 {
82 	u16 ddrpll;
83 
84 	ddrpll = intel_uncore_read16(&dev_priv->uncore, DDRMPLL1);
85 	switch (ddrpll & 0xff) {
86 	case 0xc:
87 		return 800000;
88 	case 0x10:
89 		return 1066667;
90 	case 0x14:
91 		return 1333333;
92 	case 0x18:
93 		return 1600000;
94 	default:
95 		drm_dbg(&dev_priv->drm, "unknown memory frequency 0x%02x\n",
96 			ddrpll & 0xff);
97 		return 0;
98 	}
99 }
100 
101 static unsigned int chv_mem_freq(struct drm_i915_private *i915)
102 {
103 	u32 val;
104 
105 	vlv_iosf_sb_get(&i915->drm, BIT(VLV_IOSF_SB_CCK));
106 	val = vlv_iosf_sb_read(&i915->drm, VLV_IOSF_SB_CCK, CCK_FUSE_REG);
107 	vlv_iosf_sb_put(&i915->drm, BIT(VLV_IOSF_SB_CCK));
108 
109 	switch ((val >> 2) & 0x7) {
110 	case 3:
111 		return 2000000;
112 	default:
113 		return 1600000;
114 	}
115 }
116 
117 static unsigned int vlv_mem_freq(struct drm_i915_private *i915)
118 {
119 	u32 val;
120 
121 	vlv_iosf_sb_get(&i915->drm, BIT(VLV_IOSF_SB_PUNIT));
122 	val = vlv_iosf_sb_read(&i915->drm, VLV_IOSF_SB_PUNIT, PUNIT_REG_GPU_FREQ_STS);
123 	vlv_iosf_sb_put(&i915->drm, BIT(VLV_IOSF_SB_PUNIT));
124 
125 	switch ((val >> 6) & 3) {
126 	case 0:
127 	case 1:
128 		return 800000;
129 	case 2:
130 		return 1066667;
131 	case 3:
132 		return 1333333;
133 	}
134 
135 	return 0;
136 }
137 
138 static void detect_mem_freq(struct drm_i915_private *i915)
139 {
140 	if (IS_PINEVIEW(i915))
141 		i915->mem_freq = pnv_mem_freq(i915);
142 	else if (GRAPHICS_VER(i915) == 5)
143 		i915->mem_freq = ilk_mem_freq(i915);
144 	else if (IS_CHERRYVIEW(i915))
145 		i915->mem_freq = chv_mem_freq(i915);
146 	else if (IS_VALLEYVIEW(i915))
147 		i915->mem_freq = vlv_mem_freq(i915);
148 
149 	if (IS_PINEVIEW(i915))
150 		i915->is_ddr3 = pnv_is_ddr3(i915);
151 
152 	if (i915->mem_freq)
153 		drm_dbg(&i915->drm, "DDR speed: %d kHz\n", i915->mem_freq);
154 }
155 
156 unsigned int i9xx_fsb_freq(struct drm_i915_private *i915)
157 {
158 	u32 fsb;
159 
160 	/*
161 	 * Note that this only reads the state of the FSB
162 	 * straps, not the actual FSB frequency. Some BIOSen
163 	 * let you configure each independently. Ideally we'd
164 	 * read out the actual FSB frequency but sadly we
165 	 * don't know which registers have that information,
166 	 * and all the relevant docs have gone to bit heaven :(
167 	 */
168 	fsb = intel_uncore_read(&i915->uncore, CLKCFG) & CLKCFG_FSB_MASK;
169 
170 	if (IS_PINEVIEW(i915) || IS_MOBILE(i915)) {
171 		switch (fsb) {
172 		case CLKCFG_FSB_400:
173 			return 400000;
174 		case CLKCFG_FSB_533:
175 			return 533333;
176 		case CLKCFG_FSB_667:
177 			return 666667;
178 		case CLKCFG_FSB_800:
179 			return 800000;
180 		case CLKCFG_FSB_1067:
181 			return 1066667;
182 		case CLKCFG_FSB_1333:
183 			return 1333333;
184 		default:
185 			MISSING_CASE(fsb);
186 			return 1333333;
187 		}
188 	} else {
189 		switch (fsb) {
190 		case CLKCFG_FSB_400_ALT:
191 			return 400000;
192 		case CLKCFG_FSB_533:
193 			return 533333;
194 		case CLKCFG_FSB_667:
195 			return 666667;
196 		case CLKCFG_FSB_800:
197 			return 800000;
198 		case CLKCFG_FSB_1067_ALT:
199 			return 1066667;
200 		case CLKCFG_FSB_1333_ALT:
201 			return 1333333;
202 		case CLKCFG_FSB_1600_ALT:
203 			return 1600000;
204 		default:
205 			MISSING_CASE(fsb);
206 			return 1333333;
207 		}
208 	}
209 }
210 
211 static unsigned int ilk_fsb_freq(struct drm_i915_private *dev_priv)
212 {
213 	u16 fsb;
214 
215 	fsb = intel_uncore_read16(&dev_priv->uncore, CSIPLL0) & 0x3ff;
216 
217 	switch (fsb) {
218 	case 0x00c:
219 		return 3200000;
220 	case 0x00e:
221 		return 3733333;
222 	case 0x010:
223 		return 4266667;
224 	case 0x012:
225 		return 4800000;
226 	case 0x014:
227 		return 5333333;
228 	case 0x016:
229 		return 5866667;
230 	case 0x018:
231 		return 6400000;
232 	default:
233 		drm_dbg(&dev_priv->drm, "unknown fsb frequency 0x%04x\n", fsb);
234 		return 0;
235 	}
236 }
237 
238 static void detect_fsb_freq(struct drm_i915_private *i915)
239 {
240 	if (GRAPHICS_VER(i915) == 5)
241 		i915->fsb_freq = ilk_fsb_freq(i915);
242 	else if (GRAPHICS_VER(i915) == 3 || GRAPHICS_VER(i915) == 4)
243 		i915->fsb_freq = i9xx_fsb_freq(i915);
244 
245 	if (i915->fsb_freq)
246 		drm_dbg(&i915->drm, "FSB frequency: %d kHz\n", i915->fsb_freq);
247 }
248 
249 static int intel_dimm_num_devices(const struct dram_dimm_info *dimm)
250 {
251 	return dimm->ranks * 64 / (dimm->width ?: 1);
252 }
253 
254 /* Returns total Gb for the whole DIMM */
255 static int skl_get_dimm_size(u16 val)
256 {
257 	return (val & SKL_DRAM_SIZE_MASK) * 8;
258 }
259 
260 static int skl_get_dimm_width(u16 val)
261 {
262 	if (skl_get_dimm_size(val) == 0)
263 		return 0;
264 
265 	switch (val & SKL_DRAM_WIDTH_MASK) {
266 	case SKL_DRAM_WIDTH_X8:
267 	case SKL_DRAM_WIDTH_X16:
268 	case SKL_DRAM_WIDTH_X32:
269 		val = (val & SKL_DRAM_WIDTH_MASK) >> SKL_DRAM_WIDTH_SHIFT;
270 		return 8 << val;
271 	default:
272 		MISSING_CASE(val);
273 		return 0;
274 	}
275 }
276 
277 static int skl_get_dimm_ranks(u16 val)
278 {
279 	if (skl_get_dimm_size(val) == 0)
280 		return 0;
281 
282 	val = (val & SKL_DRAM_RANK_MASK) >> SKL_DRAM_RANK_SHIFT;
283 
284 	return val + 1;
285 }
286 
287 /* Returns total Gb for the whole DIMM */
288 static int icl_get_dimm_size(u16 val)
289 {
290 	return (val & ICL_DRAM_SIZE_MASK) * 8 / 2;
291 }
292 
293 static int icl_get_dimm_width(u16 val)
294 {
295 	if (icl_get_dimm_size(val) == 0)
296 		return 0;
297 
298 	switch (val & ICL_DRAM_WIDTH_MASK) {
299 	case ICL_DRAM_WIDTH_X8:
300 	case ICL_DRAM_WIDTH_X16:
301 	case ICL_DRAM_WIDTH_X32:
302 		val = (val & ICL_DRAM_WIDTH_MASK) >> ICL_DRAM_WIDTH_SHIFT;
303 		return 8 << val;
304 	default:
305 		MISSING_CASE(val);
306 		return 0;
307 	}
308 }
309 
310 static int icl_get_dimm_ranks(u16 val)
311 {
312 	if (icl_get_dimm_size(val) == 0)
313 		return 0;
314 
315 	val = (val & ICL_DRAM_RANK_MASK) >> ICL_DRAM_RANK_SHIFT;
316 
317 	return val + 1;
318 }
319 
320 static bool
321 skl_is_16gb_dimm(const struct dram_dimm_info *dimm)
322 {
323 	/* Convert total Gb to Gb per DRAM device */
324 	return dimm->size / (intel_dimm_num_devices(dimm) ?: 1) == 16;
325 }
326 
327 static void
328 skl_dram_get_dimm_info(struct drm_i915_private *i915,
329 		       struct dram_dimm_info *dimm,
330 		       int channel, char dimm_name, u16 val)
331 {
332 	if (GRAPHICS_VER(i915) >= 11) {
333 		dimm->size = icl_get_dimm_size(val);
334 		dimm->width = icl_get_dimm_width(val);
335 		dimm->ranks = icl_get_dimm_ranks(val);
336 	} else {
337 		dimm->size = skl_get_dimm_size(val);
338 		dimm->width = skl_get_dimm_width(val);
339 		dimm->ranks = skl_get_dimm_ranks(val);
340 	}
341 
342 	drm_dbg_kms(&i915->drm,
343 		    "CH%u DIMM %c size: %u Gb, width: X%u, ranks: %u, 16Gb DIMMs: %s\n",
344 		    channel, dimm_name, dimm->size, dimm->width, dimm->ranks,
345 		    str_yes_no(skl_is_16gb_dimm(dimm)));
346 }
347 
348 static int
349 skl_dram_get_channel_info(struct drm_i915_private *i915,
350 			  struct dram_channel_info *ch,
351 			  int channel, u32 val)
352 {
353 	skl_dram_get_dimm_info(i915, &ch->dimm_l,
354 			       channel, 'L', val & 0xffff);
355 	skl_dram_get_dimm_info(i915, &ch->dimm_s,
356 			       channel, 'S', val >> 16);
357 
358 	if (ch->dimm_l.size == 0 && ch->dimm_s.size == 0) {
359 		drm_dbg_kms(&i915->drm, "CH%u not populated\n", channel);
360 		return -EINVAL;
361 	}
362 
363 	if (ch->dimm_l.ranks == 2 || ch->dimm_s.ranks == 2)
364 		ch->ranks = 2;
365 	else if (ch->dimm_l.ranks == 1 && ch->dimm_s.ranks == 1)
366 		ch->ranks = 2;
367 	else
368 		ch->ranks = 1;
369 
370 	ch->is_16gb_dimm = skl_is_16gb_dimm(&ch->dimm_l) ||
371 		skl_is_16gb_dimm(&ch->dimm_s);
372 
373 	drm_dbg_kms(&i915->drm, "CH%u ranks: %u, 16Gb DIMMs: %s\n",
374 		    channel, ch->ranks, str_yes_no(ch->is_16gb_dimm));
375 
376 	return 0;
377 }
378 
379 static bool
380 intel_is_dram_symmetric(const struct dram_channel_info *ch0,
381 			const struct dram_channel_info *ch1)
382 {
383 	return !memcmp(ch0, ch1, sizeof(*ch0)) &&
384 		(ch0->dimm_s.size == 0 ||
385 		 !memcmp(&ch0->dimm_l, &ch0->dimm_s, sizeof(ch0->dimm_l)));
386 }
387 
388 static int
389 skl_dram_get_channels_info(struct drm_i915_private *i915, struct dram_info *dram_info)
390 {
391 	struct dram_channel_info ch0 = {}, ch1 = {};
392 	u32 val;
393 	int ret;
394 
395 	val = intel_uncore_read(&i915->uncore,
396 				SKL_MAD_DIMM_CH0_0_0_0_MCHBAR_MCMAIN);
397 	ret = skl_dram_get_channel_info(i915, &ch0, 0, val);
398 	if (ret == 0)
399 		dram_info->num_channels++;
400 
401 	val = intel_uncore_read(&i915->uncore,
402 				SKL_MAD_DIMM_CH1_0_0_0_MCHBAR_MCMAIN);
403 	ret = skl_dram_get_channel_info(i915, &ch1, 1, val);
404 	if (ret == 0)
405 		dram_info->num_channels++;
406 
407 	if (dram_info->num_channels == 0) {
408 		drm_info(&i915->drm, "Number of memory channels is zero\n");
409 		return -EINVAL;
410 	}
411 
412 	if (ch0.ranks == 0 && ch1.ranks == 0) {
413 		drm_info(&i915->drm, "couldn't get memory rank information\n");
414 		return -EINVAL;
415 	}
416 
417 	dram_info->wm_lv_0_adjust_needed = ch0.is_16gb_dimm || ch1.is_16gb_dimm;
418 
419 	dram_info->symmetric_memory = intel_is_dram_symmetric(&ch0, &ch1);
420 
421 	drm_dbg_kms(&i915->drm, "Memory configuration is symmetric? %s\n",
422 		    str_yes_no(dram_info->symmetric_memory));
423 
424 	return 0;
425 }
426 
427 static enum intel_dram_type
428 skl_get_dram_type(struct drm_i915_private *i915)
429 {
430 	u32 val;
431 
432 	val = intel_uncore_read(&i915->uncore,
433 				SKL_MAD_INTER_CHANNEL_0_0_0_MCHBAR_MCMAIN);
434 
435 	switch (val & SKL_DRAM_DDR_TYPE_MASK) {
436 	case SKL_DRAM_DDR_TYPE_DDR3:
437 		return INTEL_DRAM_DDR3;
438 	case SKL_DRAM_DDR_TYPE_DDR4:
439 		return INTEL_DRAM_DDR4;
440 	case SKL_DRAM_DDR_TYPE_LPDDR3:
441 		return INTEL_DRAM_LPDDR3;
442 	case SKL_DRAM_DDR_TYPE_LPDDR4:
443 		return INTEL_DRAM_LPDDR4;
444 	default:
445 		MISSING_CASE(val);
446 		return INTEL_DRAM_UNKNOWN;
447 	}
448 }
449 
450 static int
451 skl_get_dram_info(struct drm_i915_private *i915, struct dram_info *dram_info)
452 {
453 	int ret;
454 
455 	dram_info->type = skl_get_dram_type(i915);
456 
457 	ret = skl_dram_get_channels_info(i915, dram_info);
458 	if (ret)
459 		return ret;
460 
461 	return 0;
462 }
463 
464 /* Returns Gb per DRAM device */
465 static int bxt_get_dimm_size(u32 val)
466 {
467 	switch (val & BXT_DRAM_SIZE_MASK) {
468 	case BXT_DRAM_SIZE_4GBIT:
469 		return 4;
470 	case BXT_DRAM_SIZE_6GBIT:
471 		return 6;
472 	case BXT_DRAM_SIZE_8GBIT:
473 		return 8;
474 	case BXT_DRAM_SIZE_12GBIT:
475 		return 12;
476 	case BXT_DRAM_SIZE_16GBIT:
477 		return 16;
478 	default:
479 		MISSING_CASE(val);
480 		return 0;
481 	}
482 }
483 
484 static int bxt_get_dimm_width(u32 val)
485 {
486 	if (!bxt_get_dimm_size(val))
487 		return 0;
488 
489 	val = (val & BXT_DRAM_WIDTH_MASK) >> BXT_DRAM_WIDTH_SHIFT;
490 
491 	return 8 << val;
492 }
493 
494 static int bxt_get_dimm_ranks(u32 val)
495 {
496 	if (!bxt_get_dimm_size(val))
497 		return 0;
498 
499 	switch (val & BXT_DRAM_RANK_MASK) {
500 	case BXT_DRAM_RANK_SINGLE:
501 		return 1;
502 	case BXT_DRAM_RANK_DUAL:
503 		return 2;
504 	default:
505 		MISSING_CASE(val);
506 		return 0;
507 	}
508 }
509 
510 static enum intel_dram_type bxt_get_dimm_type(u32 val)
511 {
512 	if (!bxt_get_dimm_size(val))
513 		return INTEL_DRAM_UNKNOWN;
514 
515 	switch (val & BXT_DRAM_TYPE_MASK) {
516 	case BXT_DRAM_TYPE_DDR3:
517 		return INTEL_DRAM_DDR3;
518 	case BXT_DRAM_TYPE_LPDDR3:
519 		return INTEL_DRAM_LPDDR3;
520 	case BXT_DRAM_TYPE_DDR4:
521 		return INTEL_DRAM_DDR4;
522 	case BXT_DRAM_TYPE_LPDDR4:
523 		return INTEL_DRAM_LPDDR4;
524 	default:
525 		MISSING_CASE(val);
526 		return INTEL_DRAM_UNKNOWN;
527 	}
528 }
529 
530 static void bxt_get_dimm_info(struct dram_dimm_info *dimm, u32 val)
531 {
532 	dimm->width = bxt_get_dimm_width(val);
533 	dimm->ranks = bxt_get_dimm_ranks(val);
534 
535 	/*
536 	 * Size in register is Gb per DRAM device. Convert to total
537 	 * Gb to match the way we report this for non-LP platforms.
538 	 */
539 	dimm->size = bxt_get_dimm_size(val) * intel_dimm_num_devices(dimm);
540 }
541 
542 static int bxt_get_dram_info(struct drm_i915_private *i915, struct dram_info *dram_info)
543 {
544 	u32 val;
545 	u8 valid_ranks = 0;
546 	int i;
547 
548 	/*
549 	 * Now read each DUNIT8/9/10/11 to check the rank of each dimms.
550 	 */
551 	for (i = BXT_D_CR_DRP0_DUNIT_START; i <= BXT_D_CR_DRP0_DUNIT_END; i++) {
552 		struct dram_dimm_info dimm;
553 		enum intel_dram_type type;
554 
555 		val = intel_uncore_read(&i915->uncore, BXT_D_CR_DRP0_DUNIT(i));
556 		if (val == 0xFFFFFFFF)
557 			continue;
558 
559 		dram_info->num_channels++;
560 
561 		bxt_get_dimm_info(&dimm, val);
562 		type = bxt_get_dimm_type(val);
563 
564 		drm_WARN_ON(&i915->drm, type != INTEL_DRAM_UNKNOWN &&
565 			    dram_info->type != INTEL_DRAM_UNKNOWN &&
566 			    dram_info->type != type);
567 
568 		drm_dbg_kms(&i915->drm,
569 			    "CH%u DIMM size: %u Gb, width: X%u, ranks: %u\n",
570 			    i - BXT_D_CR_DRP0_DUNIT_START,
571 			    dimm.size, dimm.width, dimm.ranks);
572 
573 		if (valid_ranks == 0)
574 			valid_ranks = dimm.ranks;
575 
576 		if (type != INTEL_DRAM_UNKNOWN)
577 			dram_info->type = type;
578 	}
579 
580 	if (dram_info->type == INTEL_DRAM_UNKNOWN || valid_ranks == 0) {
581 		drm_info(&i915->drm, "couldn't get memory information\n");
582 		return -EINVAL;
583 	}
584 
585 	return 0;
586 }
587 
588 static int icl_pcode_read_mem_global_info(struct drm_i915_private *dev_priv,
589 					  struct dram_info *dram_info)
590 {
591 	u32 val = 0;
592 	int ret;
593 
594 	ret = intel_pcode_read(&dev_priv->drm, ICL_PCODE_MEM_SUBSYSYSTEM_INFO |
595 			       ICL_PCODE_MEM_SS_READ_GLOBAL_INFO, &val, NULL);
596 	if (ret)
597 		return ret;
598 
599 	if (GRAPHICS_VER(dev_priv) == 12) {
600 		switch (val & 0xf) {
601 		case 0:
602 			dram_info->type = INTEL_DRAM_DDR4;
603 			break;
604 		case 1:
605 			dram_info->type = INTEL_DRAM_DDR5;
606 			break;
607 		case 2:
608 			dram_info->type = INTEL_DRAM_LPDDR5;
609 			break;
610 		case 3:
611 			dram_info->type = INTEL_DRAM_LPDDR4;
612 			break;
613 		case 4:
614 			dram_info->type = INTEL_DRAM_DDR3;
615 			break;
616 		case 5:
617 			dram_info->type = INTEL_DRAM_LPDDR3;
618 			break;
619 		default:
620 			MISSING_CASE(val & 0xf);
621 			return -EINVAL;
622 		}
623 	} else {
624 		switch (val & 0xf) {
625 		case 0:
626 			dram_info->type = INTEL_DRAM_DDR4;
627 			break;
628 		case 1:
629 			dram_info->type = INTEL_DRAM_DDR3;
630 			break;
631 		case 2:
632 			dram_info->type = INTEL_DRAM_LPDDR3;
633 			break;
634 		case 3:
635 			dram_info->type = INTEL_DRAM_LPDDR4;
636 			break;
637 		default:
638 			MISSING_CASE(val & 0xf);
639 			return -EINVAL;
640 		}
641 	}
642 
643 	dram_info->num_channels = (val & 0xf0) >> 4;
644 	dram_info->num_qgv_points = (val & 0xf00) >> 8;
645 	dram_info->num_psf_gv_points = (val & 0x3000) >> 12;
646 
647 	return 0;
648 }
649 
650 static int gen11_get_dram_info(struct drm_i915_private *i915, struct dram_info *dram_info)
651 {
652 	int ret = skl_get_dram_info(i915, dram_info);
653 
654 	if (ret)
655 		return ret;
656 
657 	return icl_pcode_read_mem_global_info(i915, dram_info);
658 }
659 
660 static int gen12_get_dram_info(struct drm_i915_private *i915, struct dram_info *dram_info)
661 {
662 	dram_info->wm_lv_0_adjust_needed = false;
663 
664 	return icl_pcode_read_mem_global_info(i915, dram_info);
665 }
666 
667 static int xelpdp_get_dram_info(struct drm_i915_private *i915, struct dram_info *dram_info)
668 {
669 	u32 val = intel_uncore_read(&i915->uncore, MTL_MEM_SS_INFO_GLOBAL);
670 
671 	switch (REG_FIELD_GET(MTL_DDR_TYPE_MASK, val)) {
672 	case 0:
673 		dram_info->type = INTEL_DRAM_DDR4;
674 		break;
675 	case 1:
676 		dram_info->type = INTEL_DRAM_DDR5;
677 		break;
678 	case 2:
679 		dram_info->type = INTEL_DRAM_LPDDR5;
680 		break;
681 	case 3:
682 		dram_info->type = INTEL_DRAM_LPDDR4;
683 		break;
684 	case 4:
685 		dram_info->type = INTEL_DRAM_DDR3;
686 		break;
687 	case 5:
688 		dram_info->type = INTEL_DRAM_LPDDR3;
689 		break;
690 	case 8:
691 		drm_WARN_ON(&i915->drm, !IS_DGFX(i915));
692 		dram_info->type = INTEL_DRAM_GDDR;
693 		break;
694 	case 9:
695 		drm_WARN_ON(&i915->drm, !IS_DGFX(i915));
696 		dram_info->type = INTEL_DRAM_GDDR_ECC;
697 		break;
698 	default:
699 		MISSING_CASE(val);
700 		return -EINVAL;
701 	}
702 
703 	dram_info->num_channels = REG_FIELD_GET(MTL_N_OF_POPULATED_CH_MASK, val);
704 	dram_info->num_qgv_points = REG_FIELD_GET(MTL_N_OF_ENABLED_QGV_POINTS_MASK, val);
705 	/* PSF GV points not supported in D14+ */
706 
707 	return 0;
708 }
709 
710 int intel_dram_detect(struct drm_i915_private *i915)
711 {
712 	struct dram_info *dram_info;
713 	int ret;
714 
715 	detect_fsb_freq(i915);
716 	detect_mem_freq(i915);
717 
718 	if (GRAPHICS_VER(i915) < 9 || IS_DG2(i915) || !HAS_DISPLAY(i915))
719 		return 0;
720 
721 	dram_info = drmm_kzalloc(&i915->drm, sizeof(*dram_info), GFP_KERNEL);
722 	if (!dram_info)
723 		return -ENOMEM;
724 
725 	i915->dram_info = dram_info;
726 
727 	/*
728 	 * Assume level 0 watermark latency adjustment is needed until proven
729 	 * otherwise, this w/a is not needed by bxt/glk.
730 	 */
731 	dram_info->wm_lv_0_adjust_needed = !IS_BROXTON(i915) && !IS_GEMINILAKE(i915);
732 
733 	if (DISPLAY_VER(i915) >= 14)
734 		ret = xelpdp_get_dram_info(i915, dram_info);
735 	else if (GRAPHICS_VER(i915) >= 12)
736 		ret = gen12_get_dram_info(i915, dram_info);
737 	else if (GRAPHICS_VER(i915) >= 11)
738 		ret = gen11_get_dram_info(i915, dram_info);
739 	else if (IS_BROXTON(i915) || IS_GEMINILAKE(i915))
740 		ret = bxt_get_dram_info(i915, dram_info);
741 	else
742 		ret = skl_get_dram_info(i915, dram_info);
743 
744 	drm_dbg_kms(&i915->drm, "DRAM type: %s\n",
745 		    intel_dram_type_str(dram_info->type));
746 
747 	/* TODO: Do we want to abort probe on dram detection failures? */
748 	if (ret)
749 		return 0;
750 
751 	drm_dbg_kms(&i915->drm, "Num qgv points %u\n", dram_info->num_qgv_points);
752 
753 	drm_dbg_kms(&i915->drm, "DRAM channels: %u\n", dram_info->num_channels);
754 
755 	drm_dbg_kms(&i915->drm, "Watermark level 0 adjustment needed: %s\n",
756 		    str_yes_no(dram_info->wm_lv_0_adjust_needed));
757 
758 	return 0;
759 }
760 
761 /*
762  * Returns NULL for platforms that don't have dram info. Avoid overzealous NULL
763  * checks, and prefer not dereferencing on platforms that shouldn't look at dram
764  * info, to catch accidental and incorrect dram info checks.
765  */
766 const struct dram_info *intel_dram_info(struct drm_device *drm)
767 {
768 	struct drm_i915_private *i915 = to_i915(drm);
769 
770 	return i915->dram_info;
771 }
772 
773 static u32 gen9_edram_size_mb(struct drm_i915_private *i915, u32 cap)
774 {
775 	static const u8 ways[8] = { 4, 8, 12, 16, 16, 16, 16, 16 };
776 	static const u8 sets[4] = { 1, 1, 2, 2 };
777 
778 	return EDRAM_NUM_BANKS(cap) *
779 		ways[EDRAM_WAYS_IDX(cap)] *
780 		sets[EDRAM_SETS_IDX(cap)];
781 }
782 
783 void intel_dram_edram_detect(struct drm_i915_private *i915)
784 {
785 	u32 edram_cap = 0;
786 
787 	if (!(IS_HASWELL(i915) || IS_BROADWELL(i915) || GRAPHICS_VER(i915) >= 9))
788 		return;
789 
790 	edram_cap = intel_uncore_read_fw(&i915->uncore, HSW_EDRAM_CAP);
791 
792 	/* NB: We can't write IDICR yet because we don't have gt funcs set up */
793 
794 	if (!(edram_cap & EDRAM_ENABLED))
795 		return;
796 
797 	/*
798 	 * The needed capability bits for size calculation are not there with
799 	 * pre gen9 so return 128MB always.
800 	 */
801 	if (GRAPHICS_VER(i915) < 9)
802 		i915->edram_size_mb = 128;
803 	else
804 		i915->edram_size_mb = gen9_edram_size_mb(i915, edram_cap);
805 
806 	drm_info(&i915->drm, "Found %uMB of eDRAM\n", i915->edram_size_mb);
807 }
808