xref: /linux/drivers/gpu/drm/i915/display/intel_dram.c (revision 5ea5880764cbb164afb17a62e76ca75dc371409d)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2020 Intel Corporation
4  */
5 
6 #include <linux/string_helpers.h>
7 
8 #include <drm/drm_managed.h>
9 #include <drm/drm_print.h>
10 #include <drm/intel/intel_pcode_regs.h>
11 
12 #include "intel_display_core.h"
13 #include "intel_display_utils.h"
14 #include "intel_display_regs.h"
15 #include "intel_dram.h"
16 #include "intel_mchbar_regs.h"
17 #include "intel_parent.h"
18 #include "intel_uncore.h"
19 #include "vlv_iosf_sb.h"
20 
21 struct dram_dimm_info {
22 	u16 size;
23 	u8 width, ranks;
24 };
25 
26 struct dram_channel_info {
27 	struct dram_dimm_info dimm_l, dimm_s;
28 	u8 ranks;
29 	bool is_16gb_dimm;
30 };
31 
32 #define DRAM_TYPE_STR(type) [INTEL_DRAM_ ## type] = #type
33 
34 const char *intel_dram_type_str(enum intel_dram_type type)
35 {
36 	static const char * const str[] = {
37 		DRAM_TYPE_STR(UNKNOWN),
38 		DRAM_TYPE_STR(DDR2),
39 		DRAM_TYPE_STR(DDR3),
40 		DRAM_TYPE_STR(DDR4),
41 		DRAM_TYPE_STR(LPDDR3),
42 		DRAM_TYPE_STR(LPDDR4),
43 		DRAM_TYPE_STR(DDR5),
44 		DRAM_TYPE_STR(LPDDR5),
45 		DRAM_TYPE_STR(GDDR),
46 		DRAM_TYPE_STR(GDDR_ECC),
47 	};
48 
49 	BUILD_BUG_ON(ARRAY_SIZE(str) != __INTEL_DRAM_TYPE_MAX);
50 
51 	if (type >= ARRAY_SIZE(str))
52 		type = INTEL_DRAM_UNKNOWN;
53 
54 	return str[type];
55 }
56 
57 #undef DRAM_TYPE_STR
58 
59 static enum intel_dram_type pnv_dram_type(struct intel_display *display)
60 {
61 	struct intel_uncore *uncore = to_intel_uncore(display->drm);
62 
63 	return intel_uncore_read(uncore, CSHRDDR3CTL) & CSHRDDR3CTL_DDR3 ?
64 		INTEL_DRAM_DDR3 : INTEL_DRAM_DDR2;
65 }
66 
67 static unsigned int pnv_mem_freq(struct intel_display *display)
68 {
69 	struct intel_uncore *uncore = to_intel_uncore(display->drm);
70 	u32 tmp;
71 
72 	tmp = intel_uncore_read(uncore, CLKCFG);
73 
74 	switch (tmp & CLKCFG_MEM_MASK) {
75 	case CLKCFG_MEM_533:
76 		return 533333;
77 	case CLKCFG_MEM_667:
78 		return 666667;
79 	case CLKCFG_MEM_800:
80 		return 800000;
81 	}
82 
83 	return 0;
84 }
85 
86 static unsigned int ilk_mem_freq(struct intel_display *display)
87 {
88 	struct intel_uncore *uncore = to_intel_uncore(display->drm);
89 	u16 ddrpll;
90 
91 	ddrpll = intel_uncore_read16(uncore, DDRMPLL1);
92 	switch (ddrpll & 0xff) {
93 	case 0xc:
94 		return 800000;
95 	case 0x10:
96 		return 1066667;
97 	case 0x14:
98 		return 1333333;
99 	case 0x18:
100 		return 1600000;
101 	default:
102 		drm_dbg_kms(display->drm, "unknown memory frequency 0x%02x\n",
103 			    ddrpll & 0xff);
104 		return 0;
105 	}
106 }
107 
108 static unsigned int chv_mem_freq(struct intel_display *display)
109 {
110 	u32 val;
111 
112 	vlv_iosf_sb_get(display->drm, BIT(VLV_IOSF_SB_CCK));
113 	val = vlv_iosf_sb_read(display->drm, VLV_IOSF_SB_CCK, CCK_FUSE_REG);
114 	vlv_iosf_sb_put(display->drm, BIT(VLV_IOSF_SB_CCK));
115 
116 	switch ((val >> 2) & 0x7) {
117 	case 3:
118 		return 2000000;
119 	default:
120 		return 1600000;
121 	}
122 }
123 
124 static unsigned int vlv_mem_freq(struct intel_display *display)
125 {
126 	u32 val;
127 
128 	vlv_iosf_sb_get(display->drm, BIT(VLV_IOSF_SB_PUNIT));
129 	val = vlv_iosf_sb_read(display->drm, VLV_IOSF_SB_PUNIT, PUNIT_REG_GPU_FREQ_STS);
130 	vlv_iosf_sb_put(display->drm, BIT(VLV_IOSF_SB_PUNIT));
131 
132 	switch ((val >> 6) & 3) {
133 	case 0:
134 	case 1:
135 		return 800000;
136 	case 2:
137 		return 1066667;
138 	case 3:
139 		return 1333333;
140 	}
141 
142 	return 0;
143 }
144 
145 unsigned int intel_mem_freq(struct intel_display *display)
146 {
147 	if (display->platform.pineview)
148 		return pnv_mem_freq(display);
149 	else if (DISPLAY_VER(display) == 5)
150 		return ilk_mem_freq(display);
151 	else if (display->platform.cherryview)
152 		return chv_mem_freq(display);
153 	else if (display->platform.valleyview)
154 		return vlv_mem_freq(display);
155 	else
156 		return 0;
157 }
158 
159 static unsigned int i9xx_fsb_freq(struct intel_display *display)
160 {
161 	struct intel_uncore *uncore = to_intel_uncore(display->drm);
162 	u32 fsb;
163 
164 	/*
165 	 * Note that this only reads the state of the FSB
166 	 * straps, not the actual FSB frequency. Some BIOSen
167 	 * let you configure each independently. Ideally we'd
168 	 * read out the actual FSB frequency but sadly we
169 	 * don't know which registers have that information,
170 	 * and all the relevant docs have gone to bit heaven :(
171 	 */
172 	fsb = intel_uncore_read(uncore, CLKCFG) & CLKCFG_FSB_MASK;
173 
174 	if (display->platform.pineview || display->platform.mobile) {
175 		switch (fsb) {
176 		case CLKCFG_FSB_400:
177 			return 400000;
178 		case CLKCFG_FSB_533:
179 			return 533333;
180 		case CLKCFG_FSB_667:
181 			return 666667;
182 		case CLKCFG_FSB_800:
183 			return 800000;
184 		case CLKCFG_FSB_1067:
185 			return 1066667;
186 		case CLKCFG_FSB_1333:
187 			return 1333333;
188 		default:
189 			MISSING_CASE(fsb);
190 			return 1333333;
191 		}
192 	} else {
193 		switch (fsb) {
194 		case CLKCFG_FSB_400_ALT:
195 			return 400000;
196 		case CLKCFG_FSB_533:
197 			return 533333;
198 		case CLKCFG_FSB_667:
199 			return 666667;
200 		case CLKCFG_FSB_800:
201 			return 800000;
202 		case CLKCFG_FSB_1067_ALT:
203 			return 1066667;
204 		case CLKCFG_FSB_1333_ALT:
205 			return 1333333;
206 		case CLKCFG_FSB_1600_ALT:
207 			return 1600000;
208 		default:
209 			MISSING_CASE(fsb);
210 			return 1333333;
211 		}
212 	}
213 }
214 
215 static unsigned int ilk_fsb_freq(struct intel_display *display)
216 {
217 	struct intel_uncore *uncore = to_intel_uncore(display->drm);
218 	u16 fsb;
219 
220 	fsb = intel_uncore_read16(uncore, CSIPLL0) & 0x3ff;
221 
222 	switch (fsb) {
223 	case 0x00c:
224 		return 3200000;
225 	case 0x00e:
226 		return 3733333;
227 	case 0x010:
228 		return 4266667;
229 	case 0x012:
230 		return 4800000;
231 	case 0x014:
232 		return 5333333;
233 	case 0x016:
234 		return 5866667;
235 	case 0x018:
236 		return 6400000;
237 	default:
238 		drm_dbg_kms(display->drm, "unknown fsb frequency 0x%04x\n", fsb);
239 		return 0;
240 	}
241 }
242 
243 unsigned int intel_fsb_freq(struct intel_display *display)
244 {
245 	if (DISPLAY_VER(display) == 5)
246 		return ilk_fsb_freq(display);
247 	else if (IS_DISPLAY_VER(display, 3, 4))
248 		return i9xx_fsb_freq(display);
249 	else
250 		return 0;
251 }
252 
253 static int i915_get_dram_info(struct intel_display *display, struct dram_info *dram_info)
254 {
255 	dram_info->fsb_freq = intel_fsb_freq(display);
256 	if (dram_info->fsb_freq)
257 		drm_dbg_kms(display->drm, "FSB frequency: %d kHz\n", dram_info->fsb_freq);
258 
259 	dram_info->mem_freq = intel_mem_freq(display);
260 	if (dram_info->mem_freq)
261 		drm_dbg_kms(display->drm, "DDR speed: %d kHz\n", dram_info->mem_freq);
262 
263 	if (display->platform.pineview)
264 		dram_info->type = pnv_dram_type(display);
265 
266 	return 0;
267 }
268 
269 static int intel_dimm_num_devices(const struct dram_dimm_info *dimm)
270 {
271 	return dimm->ranks * 64 / (dimm->width ?: 1);
272 }
273 
274 /* Returns total Gb for the whole DIMM */
275 static int skl_get_dimm_s_size(u32 val)
276 {
277 	return REG_FIELD_GET(SKL_DIMM_S_SIZE_MASK, val) * 8;
278 }
279 
280 static int skl_get_dimm_l_size(u32 val)
281 {
282 	return REG_FIELD_GET(SKL_DIMM_L_SIZE_MASK, val) * 8;
283 }
284 
285 static int skl_get_dimm_s_width(u32 val)
286 {
287 	if (skl_get_dimm_s_size(val) == 0)
288 		return 0;
289 
290 	switch (val & SKL_DIMM_S_WIDTH_MASK) {
291 	case SKL_DIMM_S_WIDTH_X8:
292 	case SKL_DIMM_S_WIDTH_X16:
293 	case SKL_DIMM_S_WIDTH_X32:
294 		return 8 << REG_FIELD_GET(SKL_DIMM_S_WIDTH_MASK, val);
295 	default:
296 		MISSING_CASE(val);
297 		return 0;
298 	}
299 }
300 
301 static int skl_get_dimm_l_width(u32 val)
302 {
303 	if (skl_get_dimm_l_size(val) == 0)
304 		return 0;
305 
306 	switch (val & SKL_DIMM_L_WIDTH_MASK) {
307 	case SKL_DIMM_L_WIDTH_X8:
308 	case SKL_DIMM_L_WIDTH_X16:
309 	case SKL_DIMM_L_WIDTH_X32:
310 		return 8 << REG_FIELD_GET(SKL_DIMM_L_WIDTH_MASK, val);
311 	default:
312 		MISSING_CASE(val);
313 		return 0;
314 	}
315 }
316 
317 static int skl_get_dimm_s_ranks(u32 val)
318 {
319 	if (skl_get_dimm_s_size(val) == 0)
320 		return 0;
321 
322 	return REG_FIELD_GET(SKL_DIMM_S_RANK_MASK, val) + 1;
323 }
324 
325 static int skl_get_dimm_l_ranks(u32 val)
326 {
327 	if (skl_get_dimm_l_size(val) == 0)
328 		return 0;
329 
330 	return REG_FIELD_GET(SKL_DIMM_L_RANK_MASK, val) + 1;
331 }
332 
333 /* Returns total Gb for the whole DIMM */
334 static int icl_get_dimm_s_size(u32 val)
335 {
336 	return REG_FIELD_GET(ICL_DIMM_S_SIZE_MASK, val) * 8 / 2;
337 }
338 
339 static int icl_get_dimm_l_size(u32 val)
340 {
341 	return REG_FIELD_GET(ICL_DIMM_L_SIZE_MASK, val) * 8 / 2;
342 }
343 
344 static int icl_get_dimm_s_width(u32 val)
345 {
346 	if (icl_get_dimm_s_size(val) == 0)
347 		return 0;
348 
349 	switch (val & ICL_DIMM_S_WIDTH_MASK) {
350 	case ICL_DIMM_S_WIDTH_X8:
351 	case ICL_DIMM_S_WIDTH_X16:
352 	case ICL_DIMM_S_WIDTH_X32:
353 		return 8 << REG_FIELD_GET(ICL_DIMM_S_WIDTH_MASK, val);
354 	default:
355 		MISSING_CASE(val);
356 		return 0;
357 	}
358 }
359 
360 static int icl_get_dimm_l_width(u32 val)
361 {
362 	if (icl_get_dimm_l_size(val) == 0)
363 		return 0;
364 
365 	switch (val & ICL_DIMM_L_WIDTH_MASK) {
366 	case ICL_DIMM_L_WIDTH_X8:
367 	case ICL_DIMM_L_WIDTH_X16:
368 	case ICL_DIMM_L_WIDTH_X32:
369 		return 8 << REG_FIELD_GET(ICL_DIMM_L_WIDTH_MASK, val);
370 	default:
371 		MISSING_CASE(val);
372 		return 0;
373 	}
374 }
375 
376 static int icl_get_dimm_s_ranks(u32 val)
377 {
378 	if (icl_get_dimm_s_size(val) == 0)
379 		return 0;
380 
381 	return REG_FIELD_GET(ICL_DIMM_S_RANK_MASK, val) + 1;
382 }
383 
384 static int icl_get_dimm_l_ranks(u32 val)
385 {
386 	if (icl_get_dimm_l_size(val) == 0)
387 		return 0;
388 
389 	return REG_FIELD_GET(ICL_DIMM_L_RANK_MASK, val) + 1;
390 }
391 
392 static bool
393 skl_is_16gb_dimm(const struct dram_dimm_info *dimm)
394 {
395 	/* Convert total Gb to Gb per DRAM device */
396 	return dimm->size / (intel_dimm_num_devices(dimm) ?: 1) >= 16;
397 }
398 
399 static void
400 skl_dram_print_dimm_info(struct intel_display *display,
401 			 struct dram_dimm_info *dimm,
402 			 int channel, char dimm_name)
403 {
404 	drm_dbg_kms(display->drm,
405 		    "CH%u DIMM %c size: %u Gb, width: X%u, ranks: %u, 16Gb+ DIMMs: %s\n",
406 		    channel, dimm_name, dimm->size, dimm->width, dimm->ranks,
407 		    str_yes_no(skl_is_16gb_dimm(dimm)));
408 }
409 
410 static void
411 skl_dram_get_dimm_l_info(struct intel_display *display,
412 			 struct dram_dimm_info *dimm,
413 			 int channel, u32 val)
414 {
415 	if (DISPLAY_VER(display) >= 11) {
416 		dimm->size = icl_get_dimm_l_size(val);
417 		dimm->width = icl_get_dimm_l_width(val);
418 		dimm->ranks = icl_get_dimm_l_ranks(val);
419 	} else {
420 		dimm->size = skl_get_dimm_l_size(val);
421 		dimm->width = skl_get_dimm_l_width(val);
422 		dimm->ranks = skl_get_dimm_l_ranks(val);
423 	}
424 
425 	skl_dram_print_dimm_info(display, dimm, channel, 'L');
426 }
427 
428 static void
429 skl_dram_get_dimm_s_info(struct intel_display *display,
430 			 struct dram_dimm_info *dimm,
431 			 int channel, u32 val)
432 {
433 	if (DISPLAY_VER(display) >= 11) {
434 		dimm->size = icl_get_dimm_s_size(val);
435 		dimm->width = icl_get_dimm_s_width(val);
436 		dimm->ranks = icl_get_dimm_s_ranks(val);
437 	} else {
438 		dimm->size = skl_get_dimm_s_size(val);
439 		dimm->width = skl_get_dimm_s_width(val);
440 		dimm->ranks = skl_get_dimm_s_ranks(val);
441 	}
442 
443 	skl_dram_print_dimm_info(display, dimm, channel, 'S');
444 }
445 
446 static int
447 skl_dram_get_channel_info(struct intel_display *display,
448 			  struct dram_channel_info *ch,
449 			  int channel, u32 val)
450 {
451 	skl_dram_get_dimm_l_info(display, &ch->dimm_l, channel, val);
452 	skl_dram_get_dimm_s_info(display, &ch->dimm_s, channel, val);
453 
454 	if (ch->dimm_l.size == 0 && ch->dimm_s.size == 0) {
455 		drm_dbg_kms(display->drm, "CH%u not populated\n", channel);
456 		return -EINVAL;
457 	}
458 
459 	if (ch->dimm_l.ranks == 2 || ch->dimm_s.ranks == 2)
460 		ch->ranks = 2;
461 	else if (ch->dimm_l.ranks == 1 && ch->dimm_s.ranks == 1)
462 		ch->ranks = 2;
463 	else
464 		ch->ranks = 1;
465 
466 	ch->is_16gb_dimm = skl_is_16gb_dimm(&ch->dimm_l) ||
467 		skl_is_16gb_dimm(&ch->dimm_s);
468 
469 	drm_dbg_kms(display->drm, "CH%u ranks: %u, 16Gb+ DIMMs: %s\n",
470 		    channel, ch->ranks, str_yes_no(ch->is_16gb_dimm));
471 
472 	return 0;
473 }
474 
475 static bool
476 intel_is_dram_symmetric(const struct dram_channel_info *ch0,
477 			const struct dram_channel_info *ch1)
478 {
479 	return !memcmp(ch0, ch1, sizeof(*ch0)) &&
480 		(ch0->dimm_s.size == 0 ||
481 		 !memcmp(&ch0->dimm_l, &ch0->dimm_s, sizeof(ch0->dimm_l)));
482 }
483 
484 static int
485 skl_dram_get_channels_info(struct intel_display *display, struct dram_info *dram_info)
486 {
487 	struct intel_uncore *uncore = to_intel_uncore(display->drm);
488 	struct dram_channel_info ch0 = {}, ch1 = {};
489 	u32 val;
490 	int ret;
491 
492 	/* Assume 16Gb+ DIMMs are present until proven otherwise */
493 	dram_info->has_16gb_dimms = true;
494 
495 	val = intel_uncore_read(uncore, SKL_MAD_DIMM_CH0_0_0_0_MCHBAR_MCMAIN);
496 	ret = skl_dram_get_channel_info(display, &ch0, 0, val);
497 	if (ret == 0)
498 		dram_info->num_channels++;
499 
500 	val = intel_uncore_read(uncore, SKL_MAD_DIMM_CH1_0_0_0_MCHBAR_MCMAIN);
501 	ret = skl_dram_get_channel_info(display, &ch1, 1, val);
502 	if (ret == 0)
503 		dram_info->num_channels++;
504 
505 	if (dram_info->num_channels == 0) {
506 		drm_info(display->drm, "Number of memory channels is zero\n");
507 		return -EINVAL;
508 	}
509 
510 	if (ch0.ranks == 0 && ch1.ranks == 0) {
511 		drm_info(display->drm, "couldn't get memory rank information\n");
512 		return -EINVAL;
513 	}
514 
515 	dram_info->has_16gb_dimms = ch0.is_16gb_dimm || ch1.is_16gb_dimm;
516 
517 	dram_info->symmetric_memory = intel_is_dram_symmetric(&ch0, &ch1);
518 
519 	drm_dbg_kms(display->drm, "Memory configuration is symmetric? %s\n",
520 		    str_yes_no(dram_info->symmetric_memory));
521 
522 	drm_dbg_kms(display->drm, "16Gb+ DIMMs: %s\n",
523 		    str_yes_no(dram_info->has_16gb_dimms));
524 
525 	return 0;
526 }
527 
528 static enum intel_dram_type
529 skl_get_dram_type(struct intel_display *display)
530 {
531 	struct intel_uncore *uncore = to_intel_uncore(display->drm);
532 	u32 val;
533 
534 	val = intel_uncore_read(uncore, SKL_MAD_INTER_CHANNEL_0_0_0_MCHBAR_MCMAIN);
535 
536 	switch (val & SKL_DRAM_DDR_TYPE_MASK) {
537 	case SKL_DRAM_DDR_TYPE_DDR3:
538 		return INTEL_DRAM_DDR3;
539 	case SKL_DRAM_DDR_TYPE_DDR4:
540 		return INTEL_DRAM_DDR4;
541 	case SKL_DRAM_DDR_TYPE_LPDDR3:
542 		return INTEL_DRAM_LPDDR3;
543 	case SKL_DRAM_DDR_TYPE_LPDDR4:
544 		return INTEL_DRAM_LPDDR4;
545 	default:
546 		MISSING_CASE(val);
547 		return INTEL_DRAM_UNKNOWN;
548 	}
549 }
550 
551 static int
552 skl_get_dram_info(struct intel_display *display, struct dram_info *dram_info)
553 {
554 	int ret;
555 
556 	dram_info->type = skl_get_dram_type(display);
557 
558 	ret = skl_dram_get_channels_info(display, dram_info);
559 	if (ret)
560 		return ret;
561 
562 	return 0;
563 }
564 
565 /* Returns Gb per DRAM device */
566 static int bxt_get_dimm_size(u32 val)
567 {
568 	switch (val & BXT_DRAM_SIZE_MASK) {
569 	case BXT_DRAM_SIZE_4GBIT:
570 		return 4;
571 	case BXT_DRAM_SIZE_6GBIT:
572 		return 6;
573 	case BXT_DRAM_SIZE_8GBIT:
574 		return 8;
575 	case BXT_DRAM_SIZE_12GBIT:
576 		return 12;
577 	case BXT_DRAM_SIZE_16GBIT:
578 		return 16;
579 	default:
580 		MISSING_CASE(val);
581 		return 0;
582 	}
583 }
584 
585 static int bxt_get_dimm_width(u32 val)
586 {
587 	if (!bxt_get_dimm_size(val))
588 		return 0;
589 
590 	val = (val & BXT_DRAM_WIDTH_MASK) >> BXT_DRAM_WIDTH_SHIFT;
591 
592 	return 8 << val;
593 }
594 
595 static int bxt_get_dimm_ranks(u32 val)
596 {
597 	if (!bxt_get_dimm_size(val))
598 		return 0;
599 
600 	switch (val & BXT_DRAM_RANK_MASK) {
601 	case BXT_DRAM_RANK_SINGLE:
602 		return 1;
603 	case BXT_DRAM_RANK_DUAL:
604 		return 2;
605 	default:
606 		MISSING_CASE(val);
607 		return 0;
608 	}
609 }
610 
611 static enum intel_dram_type bxt_get_dimm_type(u32 val)
612 {
613 	if (!bxt_get_dimm_size(val))
614 		return INTEL_DRAM_UNKNOWN;
615 
616 	switch (val & BXT_DRAM_TYPE_MASK) {
617 	case BXT_DRAM_TYPE_DDR3:
618 		return INTEL_DRAM_DDR3;
619 	case BXT_DRAM_TYPE_LPDDR3:
620 		return INTEL_DRAM_LPDDR3;
621 	case BXT_DRAM_TYPE_DDR4:
622 		return INTEL_DRAM_DDR4;
623 	case BXT_DRAM_TYPE_LPDDR4:
624 		return INTEL_DRAM_LPDDR4;
625 	default:
626 		MISSING_CASE(val);
627 		return INTEL_DRAM_UNKNOWN;
628 	}
629 }
630 
631 static void bxt_get_dimm_info(struct dram_dimm_info *dimm, u32 val)
632 {
633 	dimm->width = bxt_get_dimm_width(val);
634 	dimm->ranks = bxt_get_dimm_ranks(val);
635 
636 	/*
637 	 * Size in register is Gb per DRAM device. Convert to total
638 	 * Gb to match the way we report this for non-LP platforms.
639 	 */
640 	dimm->size = bxt_get_dimm_size(val) * intel_dimm_num_devices(dimm);
641 }
642 
643 static int bxt_get_dram_info(struct intel_display *display, struct dram_info *dram_info)
644 {
645 	struct intel_uncore *uncore = to_intel_uncore(display->drm);
646 	u32 val;
647 	u8 valid_ranks = 0;
648 	int i;
649 
650 	/*
651 	 * Now read each DUNIT8/9/10/11 to check the rank of each dimms.
652 	 */
653 	for (i = BXT_D_CR_DRP0_DUNIT_START; i <= BXT_D_CR_DRP0_DUNIT_END; i++) {
654 		struct dram_dimm_info dimm;
655 		enum intel_dram_type type;
656 
657 		val = intel_uncore_read(uncore, BXT_D_CR_DRP0_DUNIT(i));
658 		if (val == 0xFFFFFFFF)
659 			continue;
660 
661 		dram_info->num_channels++;
662 
663 		bxt_get_dimm_info(&dimm, val);
664 		type = bxt_get_dimm_type(val);
665 
666 		drm_WARN_ON(display->drm, type != INTEL_DRAM_UNKNOWN &&
667 			    dram_info->type != INTEL_DRAM_UNKNOWN &&
668 			    dram_info->type != type);
669 
670 		drm_dbg_kms(display->drm,
671 			    "CH%u DIMM size: %u Gb, width: X%u, ranks: %u\n",
672 			    i - BXT_D_CR_DRP0_DUNIT_START,
673 			    dimm.size, dimm.width, dimm.ranks);
674 
675 		if (valid_ranks == 0)
676 			valid_ranks = dimm.ranks;
677 
678 		if (type != INTEL_DRAM_UNKNOWN)
679 			dram_info->type = type;
680 	}
681 
682 	if (dram_info->type == INTEL_DRAM_UNKNOWN || valid_ranks == 0) {
683 		drm_info(display->drm, "couldn't get memory information\n");
684 		return -EINVAL;
685 	}
686 
687 	return 0;
688 }
689 
690 static int icl_pcode_read_mem_global_info(struct intel_display *display,
691 					  struct dram_info *dram_info)
692 {
693 	u32 val = 0;
694 	int ret;
695 
696 	ret = intel_parent_pcode_read(display, ICL_PCODE_MEM_SUBSYSYSTEM_INFO |
697 				      ICL_PCODE_MEM_SS_READ_GLOBAL_INFO, &val, NULL);
698 	if (ret)
699 		return ret;
700 
701 	if (DISPLAY_VER(display) >= 12) {
702 		switch (val & 0xf) {
703 		case 0:
704 			dram_info->type = INTEL_DRAM_DDR4;
705 			break;
706 		case 1:
707 			dram_info->type = INTEL_DRAM_DDR5;
708 			break;
709 		case 2:
710 			dram_info->type = INTEL_DRAM_LPDDR5;
711 			break;
712 		case 3:
713 			dram_info->type = INTEL_DRAM_LPDDR4;
714 			break;
715 		case 4:
716 			dram_info->type = INTEL_DRAM_DDR3;
717 			break;
718 		case 5:
719 			dram_info->type = INTEL_DRAM_LPDDR3;
720 			break;
721 		default:
722 			MISSING_CASE(val & 0xf);
723 			return -EINVAL;
724 		}
725 	} else {
726 		switch (val & 0xf) {
727 		case 0:
728 			dram_info->type = INTEL_DRAM_DDR4;
729 			break;
730 		case 1:
731 			dram_info->type = INTEL_DRAM_DDR3;
732 			break;
733 		case 2:
734 			dram_info->type = INTEL_DRAM_LPDDR3;
735 			break;
736 		case 3:
737 			dram_info->type = INTEL_DRAM_LPDDR4;
738 			break;
739 		default:
740 			MISSING_CASE(val & 0xf);
741 			return -EINVAL;
742 		}
743 	}
744 
745 	dram_info->num_channels = (val & 0xf0) >> 4;
746 	dram_info->num_qgv_points = (val & 0xf00) >> 8;
747 	dram_info->num_psf_gv_points = (val & 0x3000) >> 12;
748 
749 	return 0;
750 }
751 
752 static int gen11_get_dram_info(struct intel_display *display, struct dram_info *dram_info)
753 {
754 	int ret;
755 
756 	ret = skl_dram_get_channels_info(display, dram_info);
757 	if (ret)
758 		return ret;
759 
760 	return icl_pcode_read_mem_global_info(display, dram_info);
761 }
762 
763 static int gen12_get_dram_info(struct intel_display *display, struct dram_info *dram_info)
764 {
765 	return icl_pcode_read_mem_global_info(display, dram_info);
766 }
767 
768 static int xelpdp_get_dram_info(struct intel_display *display, struct dram_info *dram_info)
769 {
770 	struct intel_uncore *uncore = to_intel_uncore(display->drm);
771 	u32 val = intel_uncore_read(uncore, MTL_MEM_SS_INFO_GLOBAL);
772 
773 	switch (REG_FIELD_GET(MTL_DDR_TYPE_MASK, val)) {
774 	case 0:
775 		dram_info->type = INTEL_DRAM_DDR4;
776 		break;
777 	case 1:
778 		dram_info->type = INTEL_DRAM_DDR5;
779 		break;
780 	case 2:
781 		dram_info->type = INTEL_DRAM_LPDDR5;
782 		break;
783 	case 3:
784 		dram_info->type = INTEL_DRAM_LPDDR4;
785 		break;
786 	case 4:
787 		dram_info->type = INTEL_DRAM_DDR3;
788 		break;
789 	case 5:
790 		dram_info->type = INTEL_DRAM_LPDDR3;
791 		break;
792 	case 8:
793 		drm_WARN_ON(display->drm, !display->platform.dgfx);
794 		dram_info->type = INTEL_DRAM_GDDR;
795 		break;
796 	case 9:
797 		drm_WARN_ON(display->drm, !display->platform.dgfx);
798 		dram_info->type = INTEL_DRAM_GDDR_ECC;
799 		break;
800 	default:
801 		MISSING_CASE(val);
802 		return -EINVAL;
803 	}
804 
805 	dram_info->num_channels = REG_FIELD_GET(MTL_N_OF_POPULATED_CH_MASK, val);
806 	dram_info->num_qgv_points = REG_FIELD_GET(MTL_N_OF_ENABLED_QGV_POINTS_MASK, val);
807 	/* PSF GV points not supported in D14+ */
808 
809 	if (DISPLAY_VER(display) >= 35)
810 		dram_info->ecc_impacting_de_bw = REG_FIELD_GET(XE3P_ECC_IMPACTING_DE, val);
811 
812 	return 0;
813 }
814 
815 int intel_dram_detect(struct intel_display *display)
816 {
817 	struct dram_info *dram_info;
818 	int ret;
819 
820 	if (display->platform.dg2 || !HAS_DISPLAY(display))
821 		return 0;
822 
823 	dram_info = drmm_kzalloc(display->drm, sizeof(*dram_info), GFP_KERNEL);
824 	if (!dram_info)
825 		return -ENOMEM;
826 
827 	display->dram.info = dram_info;
828 
829 	if (DISPLAY_VER(display) >= 14)
830 		ret = xelpdp_get_dram_info(display, dram_info);
831 	else if (DISPLAY_VER(display) >= 12)
832 		ret = gen12_get_dram_info(display, dram_info);
833 	else if (DISPLAY_VER(display) >= 11)
834 		ret = gen11_get_dram_info(display, dram_info);
835 	else if (display->platform.broxton || display->platform.geminilake)
836 		ret = bxt_get_dram_info(display, dram_info);
837 	else if (DISPLAY_VER(display) >= 9)
838 		ret = skl_get_dram_info(display, dram_info);
839 	else
840 		ret = i915_get_dram_info(display, dram_info);
841 
842 	drm_dbg_kms(display->drm, "DRAM type: %s\n",
843 		    intel_dram_type_str(dram_info->type));
844 
845 	drm_dbg_kms(display->drm, "DRAM channels: %u\n", dram_info->num_channels);
846 
847 	drm_dbg_kms(display->drm, "Num QGV points %u\n", dram_info->num_qgv_points);
848 	drm_dbg_kms(display->drm, "Num PSF GV points %u\n", dram_info->num_psf_gv_points);
849 
850 	/* TODO: Do we want to abort probe on dram detection failures? */
851 	if (ret)
852 		return 0;
853 
854 	return 0;
855 }
856 
857 /*
858  * Returns NULL for platforms that don't have dram info. Avoid overzealous NULL
859  * checks, and prefer not dereferencing on platforms that shouldn't look at dram
860  * info, to catch accidental and incorrect dram info checks.
861  */
862 const struct dram_info *intel_dram_info(struct intel_display *display)
863 {
864 	return display->dram.info;
865 }
866