xref: /linux/drivers/gpu/drm/i915/display/intel_dram.c (revision 939faf71cf7ca9ab3d1bd2912ac0e203d4d7156a)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2020 Intel Corporation
4  */
5 
6 #include <linux/string_helpers.h>
7 
8 #include <drm/drm_managed.h>
9 #include <drm/drm_print.h>
10 
11 #include "i915_reg.h"
12 #include "intel_display_core.h"
13 #include "intel_display_utils.h"
14 #include "intel_dram.h"
15 #include "intel_mchbar_regs.h"
16 #include "intel_pcode.h"
17 #include "intel_uncore.h"
18 #include "vlv_iosf_sb.h"
19 
20 struct dram_dimm_info {
21 	u16 size;
22 	u8 width, ranks;
23 };
24 
25 struct dram_channel_info {
26 	struct dram_dimm_info dimm_l, dimm_s;
27 	u8 ranks;
28 	bool is_16gb_dimm;
29 };
30 
31 #define DRAM_TYPE_STR(type) [INTEL_DRAM_ ## type] = #type
32 
33 const char *intel_dram_type_str(enum intel_dram_type type)
34 {
35 	static const char * const str[] = {
36 		DRAM_TYPE_STR(UNKNOWN),
37 		DRAM_TYPE_STR(DDR2),
38 		DRAM_TYPE_STR(DDR3),
39 		DRAM_TYPE_STR(DDR4),
40 		DRAM_TYPE_STR(LPDDR3),
41 		DRAM_TYPE_STR(LPDDR4),
42 		DRAM_TYPE_STR(DDR5),
43 		DRAM_TYPE_STR(LPDDR5),
44 		DRAM_TYPE_STR(GDDR),
45 		DRAM_TYPE_STR(GDDR_ECC),
46 	};
47 
48 	BUILD_BUG_ON(ARRAY_SIZE(str) != __INTEL_DRAM_TYPE_MAX);
49 
50 	if (type >= ARRAY_SIZE(str))
51 		type = INTEL_DRAM_UNKNOWN;
52 
53 	return str[type];
54 }
55 
56 #undef DRAM_TYPE_STR
57 
58 static enum intel_dram_type pnv_dram_type(struct intel_display *display)
59 {
60 	struct intel_uncore *uncore = to_intel_uncore(display->drm);
61 
62 	return intel_uncore_read(uncore, CSHRDDR3CTL) & CSHRDDR3CTL_DDR3 ?
63 		INTEL_DRAM_DDR3 : INTEL_DRAM_DDR2;
64 }
65 
66 static unsigned int pnv_mem_freq(struct intel_display *display)
67 {
68 	struct intel_uncore *uncore = to_intel_uncore(display->drm);
69 	u32 tmp;
70 
71 	tmp = intel_uncore_read(uncore, CLKCFG);
72 
73 	switch (tmp & CLKCFG_MEM_MASK) {
74 	case CLKCFG_MEM_533:
75 		return 533333;
76 	case CLKCFG_MEM_667:
77 		return 666667;
78 	case CLKCFG_MEM_800:
79 		return 800000;
80 	}
81 
82 	return 0;
83 }
84 
85 static unsigned int ilk_mem_freq(struct intel_display *display)
86 {
87 	struct intel_uncore *uncore = to_intel_uncore(display->drm);
88 	u16 ddrpll;
89 
90 	ddrpll = intel_uncore_read16(uncore, DDRMPLL1);
91 	switch (ddrpll & 0xff) {
92 	case 0xc:
93 		return 800000;
94 	case 0x10:
95 		return 1066667;
96 	case 0x14:
97 		return 1333333;
98 	case 0x18:
99 		return 1600000;
100 	default:
101 		drm_dbg_kms(display->drm, "unknown memory frequency 0x%02x\n",
102 			    ddrpll & 0xff);
103 		return 0;
104 	}
105 }
106 
107 static unsigned int chv_mem_freq(struct intel_display *display)
108 {
109 	u32 val;
110 
111 	vlv_iosf_sb_get(display->drm, BIT(VLV_IOSF_SB_CCK));
112 	val = vlv_iosf_sb_read(display->drm, VLV_IOSF_SB_CCK, CCK_FUSE_REG);
113 	vlv_iosf_sb_put(display->drm, BIT(VLV_IOSF_SB_CCK));
114 
115 	switch ((val >> 2) & 0x7) {
116 	case 3:
117 		return 2000000;
118 	default:
119 		return 1600000;
120 	}
121 }
122 
123 static unsigned int vlv_mem_freq(struct intel_display *display)
124 {
125 	u32 val;
126 
127 	vlv_iosf_sb_get(display->drm, BIT(VLV_IOSF_SB_PUNIT));
128 	val = vlv_iosf_sb_read(display->drm, VLV_IOSF_SB_PUNIT, PUNIT_REG_GPU_FREQ_STS);
129 	vlv_iosf_sb_put(display->drm, BIT(VLV_IOSF_SB_PUNIT));
130 
131 	switch ((val >> 6) & 3) {
132 	case 0:
133 	case 1:
134 		return 800000;
135 	case 2:
136 		return 1066667;
137 	case 3:
138 		return 1333333;
139 	}
140 
141 	return 0;
142 }
143 
144 unsigned int intel_mem_freq(struct intel_display *display)
145 {
146 	if (display->platform.pineview)
147 		return pnv_mem_freq(display);
148 	else if (DISPLAY_VER(display) == 5)
149 		return ilk_mem_freq(display);
150 	else if (display->platform.cherryview)
151 		return chv_mem_freq(display);
152 	else if (display->platform.valleyview)
153 		return vlv_mem_freq(display);
154 	else
155 		return 0;
156 }
157 
158 static unsigned int i9xx_fsb_freq(struct intel_display *display)
159 {
160 	struct intel_uncore *uncore = to_intel_uncore(display->drm);
161 	u32 fsb;
162 
163 	/*
164 	 * Note that this only reads the state of the FSB
165 	 * straps, not the actual FSB frequency. Some BIOSen
166 	 * let you configure each independently. Ideally we'd
167 	 * read out the actual FSB frequency but sadly we
168 	 * don't know which registers have that information,
169 	 * and all the relevant docs have gone to bit heaven :(
170 	 */
171 	fsb = intel_uncore_read(uncore, CLKCFG) & CLKCFG_FSB_MASK;
172 
173 	if (display->platform.pineview || display->platform.mobile) {
174 		switch (fsb) {
175 		case CLKCFG_FSB_400:
176 			return 400000;
177 		case CLKCFG_FSB_533:
178 			return 533333;
179 		case CLKCFG_FSB_667:
180 			return 666667;
181 		case CLKCFG_FSB_800:
182 			return 800000;
183 		case CLKCFG_FSB_1067:
184 			return 1066667;
185 		case CLKCFG_FSB_1333:
186 			return 1333333;
187 		default:
188 			MISSING_CASE(fsb);
189 			return 1333333;
190 		}
191 	} else {
192 		switch (fsb) {
193 		case CLKCFG_FSB_400_ALT:
194 			return 400000;
195 		case CLKCFG_FSB_533:
196 			return 533333;
197 		case CLKCFG_FSB_667:
198 			return 666667;
199 		case CLKCFG_FSB_800:
200 			return 800000;
201 		case CLKCFG_FSB_1067_ALT:
202 			return 1066667;
203 		case CLKCFG_FSB_1333_ALT:
204 			return 1333333;
205 		case CLKCFG_FSB_1600_ALT:
206 			return 1600000;
207 		default:
208 			MISSING_CASE(fsb);
209 			return 1333333;
210 		}
211 	}
212 }
213 
214 static unsigned int ilk_fsb_freq(struct intel_display *display)
215 {
216 	struct intel_uncore *uncore = to_intel_uncore(display->drm);
217 	u16 fsb;
218 
219 	fsb = intel_uncore_read16(uncore, CSIPLL0) & 0x3ff;
220 
221 	switch (fsb) {
222 	case 0x00c:
223 		return 3200000;
224 	case 0x00e:
225 		return 3733333;
226 	case 0x010:
227 		return 4266667;
228 	case 0x012:
229 		return 4800000;
230 	case 0x014:
231 		return 5333333;
232 	case 0x016:
233 		return 5866667;
234 	case 0x018:
235 		return 6400000;
236 	default:
237 		drm_dbg_kms(display->drm, "unknown fsb frequency 0x%04x\n", fsb);
238 		return 0;
239 	}
240 }
241 
242 unsigned int intel_fsb_freq(struct intel_display *display)
243 {
244 	if (DISPLAY_VER(display) == 5)
245 		return ilk_fsb_freq(display);
246 	else if (IS_DISPLAY_VER(display, 3, 4))
247 		return i9xx_fsb_freq(display);
248 	else
249 		return 0;
250 }
251 
252 static int i915_get_dram_info(struct intel_display *display, struct dram_info *dram_info)
253 {
254 	dram_info->fsb_freq = intel_fsb_freq(display);
255 	if (dram_info->fsb_freq)
256 		drm_dbg_kms(display->drm, "FSB frequency: %d kHz\n", dram_info->fsb_freq);
257 
258 	dram_info->mem_freq = intel_mem_freq(display);
259 	if (dram_info->mem_freq)
260 		drm_dbg_kms(display->drm, "DDR speed: %d kHz\n", dram_info->mem_freq);
261 
262 	if (display->platform.pineview)
263 		dram_info->type = pnv_dram_type(display);
264 
265 	return 0;
266 }
267 
268 static int intel_dimm_num_devices(const struct dram_dimm_info *dimm)
269 {
270 	return dimm->ranks * 64 / (dimm->width ?: 1);
271 }
272 
273 /* Returns total Gb for the whole DIMM */
274 static int skl_get_dimm_s_size(u32 val)
275 {
276 	return REG_FIELD_GET(SKL_DIMM_S_SIZE_MASK, val) * 8;
277 }
278 
279 static int skl_get_dimm_l_size(u32 val)
280 {
281 	return REG_FIELD_GET(SKL_DIMM_L_SIZE_MASK, val) * 8;
282 }
283 
284 static int skl_get_dimm_s_width(u32 val)
285 {
286 	if (skl_get_dimm_s_size(val) == 0)
287 		return 0;
288 
289 	switch (val & SKL_DIMM_S_WIDTH_MASK) {
290 	case SKL_DIMM_S_WIDTH_X8:
291 	case SKL_DIMM_S_WIDTH_X16:
292 	case SKL_DIMM_S_WIDTH_X32:
293 		return 8 << REG_FIELD_GET(SKL_DIMM_S_WIDTH_MASK, val);
294 	default:
295 		MISSING_CASE(val);
296 		return 0;
297 	}
298 }
299 
300 static int skl_get_dimm_l_width(u32 val)
301 {
302 	if (skl_get_dimm_l_size(val) == 0)
303 		return 0;
304 
305 	switch (val & SKL_DIMM_L_WIDTH_MASK) {
306 	case SKL_DIMM_L_WIDTH_X8:
307 	case SKL_DIMM_L_WIDTH_X16:
308 	case SKL_DIMM_L_WIDTH_X32:
309 		return 8 << REG_FIELD_GET(SKL_DIMM_L_WIDTH_MASK, val);
310 	default:
311 		MISSING_CASE(val);
312 		return 0;
313 	}
314 }
315 
316 static int skl_get_dimm_s_ranks(u32 val)
317 {
318 	if (skl_get_dimm_s_size(val) == 0)
319 		return 0;
320 
321 	return REG_FIELD_GET(SKL_DIMM_S_RANK_MASK, val) + 1;
322 }
323 
324 static int skl_get_dimm_l_ranks(u32 val)
325 {
326 	if (skl_get_dimm_l_size(val) == 0)
327 		return 0;
328 
329 	return REG_FIELD_GET(SKL_DIMM_L_RANK_MASK, val) + 1;
330 }
331 
332 /* Returns total Gb for the whole DIMM */
333 static int icl_get_dimm_s_size(u32 val)
334 {
335 	return REG_FIELD_GET(ICL_DIMM_S_SIZE_MASK, val) * 8 / 2;
336 }
337 
338 static int icl_get_dimm_l_size(u32 val)
339 {
340 	return REG_FIELD_GET(ICL_DIMM_L_SIZE_MASK, val) * 8 / 2;
341 }
342 
343 static int icl_get_dimm_s_width(u32 val)
344 {
345 	if (icl_get_dimm_s_size(val) == 0)
346 		return 0;
347 
348 	switch (val & ICL_DIMM_S_WIDTH_MASK) {
349 	case ICL_DIMM_S_WIDTH_X8:
350 	case ICL_DIMM_S_WIDTH_X16:
351 	case ICL_DIMM_S_WIDTH_X32:
352 		return 8 << REG_FIELD_GET(ICL_DIMM_S_WIDTH_MASK, val);
353 	default:
354 		MISSING_CASE(val);
355 		return 0;
356 	}
357 }
358 
359 static int icl_get_dimm_l_width(u32 val)
360 {
361 	if (icl_get_dimm_l_size(val) == 0)
362 		return 0;
363 
364 	switch (val & ICL_DIMM_L_WIDTH_MASK) {
365 	case ICL_DIMM_L_WIDTH_X8:
366 	case ICL_DIMM_L_WIDTH_X16:
367 	case ICL_DIMM_L_WIDTH_X32:
368 		return 8 << REG_FIELD_GET(ICL_DIMM_L_WIDTH_MASK, val);
369 	default:
370 		MISSING_CASE(val);
371 		return 0;
372 	}
373 }
374 
375 static int icl_get_dimm_s_ranks(u32 val)
376 {
377 	if (icl_get_dimm_s_size(val) == 0)
378 		return 0;
379 
380 	return REG_FIELD_GET(ICL_DIMM_S_RANK_MASK, val) + 1;
381 }
382 
383 static int icl_get_dimm_l_ranks(u32 val)
384 {
385 	if (icl_get_dimm_l_size(val) == 0)
386 		return 0;
387 
388 	return REG_FIELD_GET(ICL_DIMM_L_RANK_MASK, val) + 1;
389 }
390 
391 static bool
392 skl_is_16gb_dimm(const struct dram_dimm_info *dimm)
393 {
394 	/* Convert total Gb to Gb per DRAM device */
395 	return dimm->size / (intel_dimm_num_devices(dimm) ?: 1) >= 16;
396 }
397 
398 static void
399 skl_dram_print_dimm_info(struct intel_display *display,
400 			 struct dram_dimm_info *dimm,
401 			 int channel, char dimm_name)
402 {
403 	drm_dbg_kms(display->drm,
404 		    "CH%u DIMM %c size: %u Gb, width: X%u, ranks: %u, 16Gb+ DIMMs: %s\n",
405 		    channel, dimm_name, dimm->size, dimm->width, dimm->ranks,
406 		    str_yes_no(skl_is_16gb_dimm(dimm)));
407 }
408 
409 static void
410 skl_dram_get_dimm_l_info(struct intel_display *display,
411 			 struct dram_dimm_info *dimm,
412 			 int channel, u32 val)
413 {
414 	if (DISPLAY_VER(display) >= 11) {
415 		dimm->size = icl_get_dimm_l_size(val);
416 		dimm->width = icl_get_dimm_l_width(val);
417 		dimm->ranks = icl_get_dimm_l_ranks(val);
418 	} else {
419 		dimm->size = skl_get_dimm_l_size(val);
420 		dimm->width = skl_get_dimm_l_width(val);
421 		dimm->ranks = skl_get_dimm_l_ranks(val);
422 	}
423 
424 	skl_dram_print_dimm_info(display, dimm, channel, 'L');
425 }
426 
427 static void
428 skl_dram_get_dimm_s_info(struct intel_display *display,
429 			 struct dram_dimm_info *dimm,
430 			 int channel, u32 val)
431 {
432 	if (DISPLAY_VER(display) >= 11) {
433 		dimm->size = icl_get_dimm_s_size(val);
434 		dimm->width = icl_get_dimm_s_width(val);
435 		dimm->ranks = icl_get_dimm_s_ranks(val);
436 	} else {
437 		dimm->size = skl_get_dimm_s_size(val);
438 		dimm->width = skl_get_dimm_s_width(val);
439 		dimm->ranks = skl_get_dimm_s_ranks(val);
440 	}
441 
442 	skl_dram_print_dimm_info(display, dimm, channel, 'S');
443 }
444 
445 static int
446 skl_dram_get_channel_info(struct intel_display *display,
447 			  struct dram_channel_info *ch,
448 			  int channel, u32 val)
449 {
450 	skl_dram_get_dimm_l_info(display, &ch->dimm_l, channel, val);
451 	skl_dram_get_dimm_s_info(display, &ch->dimm_s, channel, val);
452 
453 	if (ch->dimm_l.size == 0 && ch->dimm_s.size == 0) {
454 		drm_dbg_kms(display->drm, "CH%u not populated\n", channel);
455 		return -EINVAL;
456 	}
457 
458 	if (ch->dimm_l.ranks == 2 || ch->dimm_s.ranks == 2)
459 		ch->ranks = 2;
460 	else if (ch->dimm_l.ranks == 1 && ch->dimm_s.ranks == 1)
461 		ch->ranks = 2;
462 	else
463 		ch->ranks = 1;
464 
465 	ch->is_16gb_dimm = skl_is_16gb_dimm(&ch->dimm_l) ||
466 		skl_is_16gb_dimm(&ch->dimm_s);
467 
468 	drm_dbg_kms(display->drm, "CH%u ranks: %u, 16Gb+ DIMMs: %s\n",
469 		    channel, ch->ranks, str_yes_no(ch->is_16gb_dimm));
470 
471 	return 0;
472 }
473 
474 static bool
475 intel_is_dram_symmetric(const struct dram_channel_info *ch0,
476 			const struct dram_channel_info *ch1)
477 {
478 	return !memcmp(ch0, ch1, sizeof(*ch0)) &&
479 		(ch0->dimm_s.size == 0 ||
480 		 !memcmp(&ch0->dimm_l, &ch0->dimm_s, sizeof(ch0->dimm_l)));
481 }
482 
483 static int
484 skl_dram_get_channels_info(struct intel_display *display, struct dram_info *dram_info)
485 {
486 	struct intel_uncore *uncore = to_intel_uncore(display->drm);
487 	struct dram_channel_info ch0 = {}, ch1 = {};
488 	u32 val;
489 	int ret;
490 
491 	/* Assume 16Gb+ DIMMs are present until proven otherwise */
492 	dram_info->has_16gb_dimms = true;
493 
494 	val = intel_uncore_read(uncore, SKL_MAD_DIMM_CH0_0_0_0_MCHBAR_MCMAIN);
495 	ret = skl_dram_get_channel_info(display, &ch0, 0, val);
496 	if (ret == 0)
497 		dram_info->num_channels++;
498 
499 	val = intel_uncore_read(uncore, SKL_MAD_DIMM_CH1_0_0_0_MCHBAR_MCMAIN);
500 	ret = skl_dram_get_channel_info(display, &ch1, 1, val);
501 	if (ret == 0)
502 		dram_info->num_channels++;
503 
504 	if (dram_info->num_channels == 0) {
505 		drm_info(display->drm, "Number of memory channels is zero\n");
506 		return -EINVAL;
507 	}
508 
509 	if (ch0.ranks == 0 && ch1.ranks == 0) {
510 		drm_info(display->drm, "couldn't get memory rank information\n");
511 		return -EINVAL;
512 	}
513 
514 	dram_info->has_16gb_dimms = ch0.is_16gb_dimm || ch1.is_16gb_dimm;
515 
516 	dram_info->symmetric_memory = intel_is_dram_symmetric(&ch0, &ch1);
517 
518 	drm_dbg_kms(display->drm, "Memory configuration is symmetric? %s\n",
519 		    str_yes_no(dram_info->symmetric_memory));
520 
521 	drm_dbg_kms(display->drm, "16Gb+ DIMMs: %s\n",
522 		    str_yes_no(dram_info->has_16gb_dimms));
523 
524 	return 0;
525 }
526 
527 static enum intel_dram_type
528 skl_get_dram_type(struct intel_display *display)
529 {
530 	struct intel_uncore *uncore = to_intel_uncore(display->drm);
531 	u32 val;
532 
533 	val = intel_uncore_read(uncore, SKL_MAD_INTER_CHANNEL_0_0_0_MCHBAR_MCMAIN);
534 
535 	switch (val & SKL_DRAM_DDR_TYPE_MASK) {
536 	case SKL_DRAM_DDR_TYPE_DDR3:
537 		return INTEL_DRAM_DDR3;
538 	case SKL_DRAM_DDR_TYPE_DDR4:
539 		return INTEL_DRAM_DDR4;
540 	case SKL_DRAM_DDR_TYPE_LPDDR3:
541 		return INTEL_DRAM_LPDDR3;
542 	case SKL_DRAM_DDR_TYPE_LPDDR4:
543 		return INTEL_DRAM_LPDDR4;
544 	default:
545 		MISSING_CASE(val);
546 		return INTEL_DRAM_UNKNOWN;
547 	}
548 }
549 
550 static int
551 skl_get_dram_info(struct intel_display *display, struct dram_info *dram_info)
552 {
553 	int ret;
554 
555 	dram_info->type = skl_get_dram_type(display);
556 
557 	ret = skl_dram_get_channels_info(display, dram_info);
558 	if (ret)
559 		return ret;
560 
561 	return 0;
562 }
563 
564 /* Returns Gb per DRAM device */
565 static int bxt_get_dimm_size(u32 val)
566 {
567 	switch (val & BXT_DRAM_SIZE_MASK) {
568 	case BXT_DRAM_SIZE_4GBIT:
569 		return 4;
570 	case BXT_DRAM_SIZE_6GBIT:
571 		return 6;
572 	case BXT_DRAM_SIZE_8GBIT:
573 		return 8;
574 	case BXT_DRAM_SIZE_12GBIT:
575 		return 12;
576 	case BXT_DRAM_SIZE_16GBIT:
577 		return 16;
578 	default:
579 		MISSING_CASE(val);
580 		return 0;
581 	}
582 }
583 
584 static int bxt_get_dimm_width(u32 val)
585 {
586 	if (!bxt_get_dimm_size(val))
587 		return 0;
588 
589 	val = (val & BXT_DRAM_WIDTH_MASK) >> BXT_DRAM_WIDTH_SHIFT;
590 
591 	return 8 << val;
592 }
593 
594 static int bxt_get_dimm_ranks(u32 val)
595 {
596 	if (!bxt_get_dimm_size(val))
597 		return 0;
598 
599 	switch (val & BXT_DRAM_RANK_MASK) {
600 	case BXT_DRAM_RANK_SINGLE:
601 		return 1;
602 	case BXT_DRAM_RANK_DUAL:
603 		return 2;
604 	default:
605 		MISSING_CASE(val);
606 		return 0;
607 	}
608 }
609 
610 static enum intel_dram_type bxt_get_dimm_type(u32 val)
611 {
612 	if (!bxt_get_dimm_size(val))
613 		return INTEL_DRAM_UNKNOWN;
614 
615 	switch (val & BXT_DRAM_TYPE_MASK) {
616 	case BXT_DRAM_TYPE_DDR3:
617 		return INTEL_DRAM_DDR3;
618 	case BXT_DRAM_TYPE_LPDDR3:
619 		return INTEL_DRAM_LPDDR3;
620 	case BXT_DRAM_TYPE_DDR4:
621 		return INTEL_DRAM_DDR4;
622 	case BXT_DRAM_TYPE_LPDDR4:
623 		return INTEL_DRAM_LPDDR4;
624 	default:
625 		MISSING_CASE(val);
626 		return INTEL_DRAM_UNKNOWN;
627 	}
628 }
629 
630 static void bxt_get_dimm_info(struct dram_dimm_info *dimm, u32 val)
631 {
632 	dimm->width = bxt_get_dimm_width(val);
633 	dimm->ranks = bxt_get_dimm_ranks(val);
634 
635 	/*
636 	 * Size in register is Gb per DRAM device. Convert to total
637 	 * Gb to match the way we report this for non-LP platforms.
638 	 */
639 	dimm->size = bxt_get_dimm_size(val) * intel_dimm_num_devices(dimm);
640 }
641 
642 static int bxt_get_dram_info(struct intel_display *display, struct dram_info *dram_info)
643 {
644 	struct intel_uncore *uncore = to_intel_uncore(display->drm);
645 	u32 val;
646 	u8 valid_ranks = 0;
647 	int i;
648 
649 	/*
650 	 * Now read each DUNIT8/9/10/11 to check the rank of each dimms.
651 	 */
652 	for (i = BXT_D_CR_DRP0_DUNIT_START; i <= BXT_D_CR_DRP0_DUNIT_END; i++) {
653 		struct dram_dimm_info dimm;
654 		enum intel_dram_type type;
655 
656 		val = intel_uncore_read(uncore, BXT_D_CR_DRP0_DUNIT(i));
657 		if (val == 0xFFFFFFFF)
658 			continue;
659 
660 		dram_info->num_channels++;
661 
662 		bxt_get_dimm_info(&dimm, val);
663 		type = bxt_get_dimm_type(val);
664 
665 		drm_WARN_ON(display->drm, type != INTEL_DRAM_UNKNOWN &&
666 			    dram_info->type != INTEL_DRAM_UNKNOWN &&
667 			    dram_info->type != type);
668 
669 		drm_dbg_kms(display->drm,
670 			    "CH%u DIMM size: %u Gb, width: X%u, ranks: %u\n",
671 			    i - BXT_D_CR_DRP0_DUNIT_START,
672 			    dimm.size, dimm.width, dimm.ranks);
673 
674 		if (valid_ranks == 0)
675 			valid_ranks = dimm.ranks;
676 
677 		if (type != INTEL_DRAM_UNKNOWN)
678 			dram_info->type = type;
679 	}
680 
681 	if (dram_info->type == INTEL_DRAM_UNKNOWN || valid_ranks == 0) {
682 		drm_info(display->drm, "couldn't get memory information\n");
683 		return -EINVAL;
684 	}
685 
686 	return 0;
687 }
688 
689 static int icl_pcode_read_mem_global_info(struct intel_display *display,
690 					  struct dram_info *dram_info)
691 {
692 	u32 val = 0;
693 	int ret;
694 
695 	ret = intel_pcode_read(display->drm, ICL_PCODE_MEM_SUBSYSYSTEM_INFO |
696 			       ICL_PCODE_MEM_SS_READ_GLOBAL_INFO, &val, NULL);
697 	if (ret)
698 		return ret;
699 
700 	if (DISPLAY_VER(display) >= 12) {
701 		switch (val & 0xf) {
702 		case 0:
703 			dram_info->type = INTEL_DRAM_DDR4;
704 			break;
705 		case 1:
706 			dram_info->type = INTEL_DRAM_DDR5;
707 			break;
708 		case 2:
709 			dram_info->type = INTEL_DRAM_LPDDR5;
710 			break;
711 		case 3:
712 			dram_info->type = INTEL_DRAM_LPDDR4;
713 			break;
714 		case 4:
715 			dram_info->type = INTEL_DRAM_DDR3;
716 			break;
717 		case 5:
718 			dram_info->type = INTEL_DRAM_LPDDR3;
719 			break;
720 		default:
721 			MISSING_CASE(val & 0xf);
722 			return -EINVAL;
723 		}
724 	} else {
725 		switch (val & 0xf) {
726 		case 0:
727 			dram_info->type = INTEL_DRAM_DDR4;
728 			break;
729 		case 1:
730 			dram_info->type = INTEL_DRAM_DDR3;
731 			break;
732 		case 2:
733 			dram_info->type = INTEL_DRAM_LPDDR3;
734 			break;
735 		case 3:
736 			dram_info->type = INTEL_DRAM_LPDDR4;
737 			break;
738 		default:
739 			MISSING_CASE(val & 0xf);
740 			return -EINVAL;
741 		}
742 	}
743 
744 	dram_info->num_channels = (val & 0xf0) >> 4;
745 	dram_info->num_qgv_points = (val & 0xf00) >> 8;
746 	dram_info->num_psf_gv_points = (val & 0x3000) >> 12;
747 
748 	return 0;
749 }
750 
751 static int gen11_get_dram_info(struct intel_display *display, struct dram_info *dram_info)
752 {
753 	int ret;
754 
755 	ret = skl_dram_get_channels_info(display, dram_info);
756 	if (ret)
757 		return ret;
758 
759 	return icl_pcode_read_mem_global_info(display, dram_info);
760 }
761 
762 static int gen12_get_dram_info(struct intel_display *display, struct dram_info *dram_info)
763 {
764 	return icl_pcode_read_mem_global_info(display, dram_info);
765 }
766 
767 static int xelpdp_get_dram_info(struct intel_display *display, struct dram_info *dram_info)
768 {
769 	struct intel_uncore *uncore = to_intel_uncore(display->drm);
770 	u32 val = intel_uncore_read(uncore, MTL_MEM_SS_INFO_GLOBAL);
771 
772 	switch (REG_FIELD_GET(MTL_DDR_TYPE_MASK, val)) {
773 	case 0:
774 		dram_info->type = INTEL_DRAM_DDR4;
775 		break;
776 	case 1:
777 		dram_info->type = INTEL_DRAM_DDR5;
778 		break;
779 	case 2:
780 		dram_info->type = INTEL_DRAM_LPDDR5;
781 		break;
782 	case 3:
783 		dram_info->type = INTEL_DRAM_LPDDR4;
784 		break;
785 	case 4:
786 		dram_info->type = INTEL_DRAM_DDR3;
787 		break;
788 	case 5:
789 		dram_info->type = INTEL_DRAM_LPDDR3;
790 		break;
791 	case 8:
792 		drm_WARN_ON(display->drm, !display->platform.dgfx);
793 		dram_info->type = INTEL_DRAM_GDDR;
794 		break;
795 	case 9:
796 		drm_WARN_ON(display->drm, !display->platform.dgfx);
797 		dram_info->type = INTEL_DRAM_GDDR_ECC;
798 		break;
799 	default:
800 		MISSING_CASE(val);
801 		return -EINVAL;
802 	}
803 
804 	dram_info->num_channels = REG_FIELD_GET(MTL_N_OF_POPULATED_CH_MASK, val);
805 	dram_info->num_qgv_points = REG_FIELD_GET(MTL_N_OF_ENABLED_QGV_POINTS_MASK, val);
806 	/* PSF GV points not supported in D14+ */
807 
808 	if (DISPLAY_VER(display) >= 35)
809 		dram_info->ecc_impacting_de_bw = REG_FIELD_GET(XE3P_ECC_IMPACTING_DE, val);
810 
811 	return 0;
812 }
813 
814 int intel_dram_detect(struct intel_display *display)
815 {
816 	struct dram_info *dram_info;
817 	int ret;
818 
819 	if (display->platform.dg2 || !HAS_DISPLAY(display))
820 		return 0;
821 
822 	dram_info = drmm_kzalloc(display->drm, sizeof(*dram_info), GFP_KERNEL);
823 	if (!dram_info)
824 		return -ENOMEM;
825 
826 	display->dram.info = dram_info;
827 
828 	if (DISPLAY_VER(display) >= 14)
829 		ret = xelpdp_get_dram_info(display, dram_info);
830 	else if (DISPLAY_VER(display) >= 12)
831 		ret = gen12_get_dram_info(display, dram_info);
832 	else if (DISPLAY_VER(display) >= 11)
833 		ret = gen11_get_dram_info(display, dram_info);
834 	else if (display->platform.broxton || display->platform.geminilake)
835 		ret = bxt_get_dram_info(display, dram_info);
836 	else if (DISPLAY_VER(display) >= 9)
837 		ret = skl_get_dram_info(display, dram_info);
838 	else
839 		ret = i915_get_dram_info(display, dram_info);
840 
841 	drm_dbg_kms(display->drm, "DRAM type: %s\n",
842 		    intel_dram_type_str(dram_info->type));
843 
844 	drm_dbg_kms(display->drm, "DRAM channels: %u\n", dram_info->num_channels);
845 
846 	drm_dbg_kms(display->drm, "Num QGV points %u\n", dram_info->num_qgv_points);
847 	drm_dbg_kms(display->drm, "Num PSF GV points %u\n", dram_info->num_psf_gv_points);
848 
849 	/* TODO: Do we want to abort probe on dram detection failures? */
850 	if (ret)
851 		return 0;
852 
853 	return 0;
854 }
855 
856 /*
857  * Returns NULL for platforms that don't have dram info. Avoid overzealous NULL
858  * checks, and prefer not dereferencing on platforms that shouldn't look at dram
859  * info, to catch accidental and incorrect dram info checks.
860  */
861 const struct dram_info *intel_dram_info(struct intel_display *display)
862 {
863 	return display->dram.info;
864 }
865