Lines Matching full:pvt
19 static inline u32 get_umc_reg(struct amd64_pvt *pvt, u32 reg) in get_umc_reg() argument
21 if (!pvt->flags.zn_regs_v2) in get_umc_reg()
104 static void f15h_select_dct(struct amd64_pvt *pvt, u8 dct) in f15h_select_dct() argument
108 amd64_read_pci_cfg(pvt->F1, DCT_CFG_SEL, ®); in f15h_select_dct()
109 reg &= (pvt->model == 0x30) ? ~3 : ~1; in f15h_select_dct()
111 amd64_write_pci_cfg(pvt->F1, DCT_CFG_SEL, reg); in f15h_select_dct()
128 static inline int amd64_read_dct_pci_cfg(struct amd64_pvt *pvt, u8 dct, in amd64_read_dct_pci_cfg() argument
131 switch (pvt->fam) { in amd64_read_dct_pci_cfg()
144 if (dct_ganging_enabled(pvt)) in amd64_read_dct_pci_cfg()
156 dct = (dct && pvt->model == 0x30) ? 3 : dct; in amd64_read_dct_pci_cfg()
157 f15h_select_dct(pvt, dct); in amd64_read_dct_pci_cfg()
168 return amd64_read_pci_cfg(pvt->F2, offset, val); in amd64_read_dct_pci_cfg()
189 static int __set_scrub_rate(struct amd64_pvt *pvt, u32 new_bw, u32 min_rate) in __set_scrub_rate() argument
217 if (pvt->fam == 0x15 && pvt->model == 0x60) { in __set_scrub_rate()
218 f15h_select_dct(pvt, 0); in __set_scrub_rate()
219 pci_write_bits32(pvt->F2, F15H_M60H_SCRCTRL, scrubval, 0x001F); in __set_scrub_rate()
220 f15h_select_dct(pvt, 1); in __set_scrub_rate()
221 pci_write_bits32(pvt->F2, F15H_M60H_SCRCTRL, scrubval, 0x001F); in __set_scrub_rate()
223 pci_write_bits32(pvt->F3, SCRCTRL, scrubval, 0x001F); in __set_scrub_rate()
234 struct amd64_pvt *pvt = mci->pvt_info; in set_scrub_rate() local
237 if (pvt->fam == 0xf) in set_scrub_rate()
240 if (pvt->fam == 0x15) { in set_scrub_rate()
242 if (pvt->model < 0x10) in set_scrub_rate()
243 f15h_select_dct(pvt, 0); in set_scrub_rate()
245 if (pvt->model == 0x60) in set_scrub_rate()
248 return __set_scrub_rate(pvt, bw, min_scrubrate); in set_scrub_rate()
253 struct amd64_pvt *pvt = mci->pvt_info; in get_scrub_rate() local
257 if (pvt->fam == 0x15) { in get_scrub_rate()
259 if (pvt->model < 0x10) in get_scrub_rate()
260 f15h_select_dct(pvt, 0); in get_scrub_rate()
262 if (pvt->model == 0x60) in get_scrub_rate()
263 amd64_read_pci_cfg(pvt->F2, F15H_M60H_SCRCTRL, &scrubval); in get_scrub_rate()
265 amd64_read_pci_cfg(pvt->F3, SCRCTRL, &scrubval); in get_scrub_rate()
267 amd64_read_pci_cfg(pvt->F3, SCRCTRL, &scrubval); in get_scrub_rate()
285 static bool base_limit_match(struct amd64_pvt *pvt, u64 sys_addr, u8 nid) in base_limit_match() argument
297 return ((addr >= get_dram_base(pvt, nid)) && in base_limit_match()
298 (addr <= get_dram_limit(pvt, nid))); in base_limit_match()
310 struct amd64_pvt *pvt; in find_mc_by_sys_addr() local
318 pvt = mci->pvt_info; in find_mc_by_sys_addr()
325 intlv_en = dram_intlv_en(pvt, 0); in find_mc_by_sys_addr()
329 if (base_limit_match(pvt, sys_addr, node_id)) in find_mc_by_sys_addr()
345 if ((dram_intlv_sel(pvt, node_id) & intlv_en) == bits) in find_mc_by_sys_addr()
353 if (unlikely(!base_limit_match(pvt, sys_addr, node_id))) { in find_mc_by_sys_addr()
374 static void get_cs_base_and_mask(struct amd64_pvt *pvt, int csrow, u8 dct, in get_cs_base_and_mask() argument
380 if (pvt->fam == 0xf && pvt->ext_model < K8_REV_F) { in get_cs_base_and_mask()
381 csbase = pvt->csels[dct].csbases[csrow]; in get_cs_base_and_mask()
382 csmask = pvt->csels[dct].csmasks[csrow]; in get_cs_base_and_mask()
391 } else if (pvt->fam == 0x16 || in get_cs_base_and_mask()
392 (pvt->fam == 0x15 && pvt->model >= 0x30)) { in get_cs_base_and_mask()
393 csbase = pvt->csels[dct].csbases[csrow]; in get_cs_base_and_mask()
394 csmask = pvt->csels[dct].csmasks[csrow >> 1]; in get_cs_base_and_mask()
409 csbase = pvt->csels[dct].csbases[csrow]; in get_cs_base_and_mask()
410 csmask = pvt->csels[dct].csmasks[csrow >> 1]; in get_cs_base_and_mask()
413 if (pvt->fam == 0x15) in get_cs_base_and_mask()
430 #define for_each_chip_select(i, dct, pvt) \ argument
431 for (i = 0; i < pvt->csels[dct].b_cnt; i++)
433 #define chip_select_base(i, dct, pvt) \ argument
434 pvt->csels[dct].csbases[i]
436 #define for_each_chip_select_mask(i, dct, pvt) \ argument
437 for (i = 0; i < pvt->csels[dct].m_cnt; i++)
440 for (i = 0; i < pvt->max_mcs; i++)
448 struct amd64_pvt *pvt; in input_addr_to_csrow() local
452 pvt = mci->pvt_info; in input_addr_to_csrow()
454 for_each_chip_select(csrow, 0, pvt) { in input_addr_to_csrow()
455 if (!csrow_enabled(csrow, 0, pvt)) in input_addr_to_csrow()
458 get_cs_base_and_mask(pvt, csrow, 0, &base, &mask); in input_addr_to_csrow()
465 pvt->mc_node_id); in input_addr_to_csrow()
471 (unsigned long)input_addr, pvt->mc_node_id); in input_addr_to_csrow()
495 struct amd64_pvt *pvt = mci->pvt_info; in get_dram_hole_info() local
498 if (pvt->fam == 0xf && pvt->ext_model < K8_REV_E) { in get_dram_hole_info()
500 pvt->ext_model, pvt->mc_node_id); in get_dram_hole_info()
505 if (pvt->fam >= 0x10 && !dhar_mem_hoist_valid(pvt)) { in get_dram_hole_info()
510 if (!dhar_valid(pvt)) { in get_dram_hole_info()
512 pvt->mc_node_id); in get_dram_hole_info()
534 *hole_base = dhar_base(pvt); in get_dram_hole_info()
537 *hole_offset = (pvt->fam > 0xf) ? f10_dhar_offset(pvt) in get_dram_hole_info()
538 : k8_dhar_offset(pvt); in get_dram_hole_info()
541 pvt->mc_node_id, (unsigned long)*hole_base, in get_dram_hole_info()
553 struct amd64_pvt *pvt = mci->pvt_info; \
555 return sprintf(data, "0x%016llx\n", (u64)pvt->reg); \
604 struct amd64_pvt *pvt = mci->pvt_info; in inject_section_show() local
605 return sprintf(buf, "0x%x\n", pvt->injection.section); in inject_section_show()
619 struct amd64_pvt *pvt = mci->pvt_info; in inject_section_store() local
632 pvt->injection.section = (u32) value; in inject_section_store()
640 struct amd64_pvt *pvt = mci->pvt_info; in inject_word_show() local
641 return sprintf(buf, "0x%x\n", pvt->injection.word); in inject_word_show()
655 struct amd64_pvt *pvt = mci->pvt_info; in inject_word_store() local
668 pvt->injection.word = (u32) value; in inject_word_store()
677 struct amd64_pvt *pvt = mci->pvt_info; in inject_ecc_vector_show() local
678 return sprintf(buf, "0x%x\n", pvt->injection.bit_map); in inject_ecc_vector_show()
691 struct amd64_pvt *pvt = mci->pvt_info; in inject_ecc_vector_store() local
704 pvt->injection.bit_map = (u32) value; in inject_ecc_vector_store()
709 * Do a DRAM ECC read. Assemble staged values in the pvt area, format into
717 struct amd64_pvt *pvt = mci->pvt_info; in inject_read_store() local
727 section = F10_NB_ARRAY_DRAM | SET_NB_ARRAY_ADDR(pvt->injection.section); in inject_read_store()
729 amd64_write_pci_cfg(pvt->F3, F10_NB_ARRAY_ADDR, section); in inject_read_store()
731 word_bits = SET_NB_DRAM_INJECTION_READ(pvt->injection); in inject_read_store()
734 amd64_write_pci_cfg(pvt->F3, F10_NB_ARRAY_DATA, word_bits); in inject_read_store()
742 * Do a DRAM ECC write. Assemble staged values in the pvt area and format into
750 struct amd64_pvt *pvt = mci->pvt_info; in inject_write_store() local
760 section = F10_NB_ARRAY_DRAM | SET_NB_ARRAY_ADDR(pvt->injection.section); in inject_write_store()
762 amd64_write_pci_cfg(pvt->F3, F10_NB_ARRAY_ADDR, section); in inject_write_store()
764 word_bits = SET_NB_DRAM_INJECTION_WRITE(pvt->injection); in inject_write_store()
773 amd64_write_pci_cfg(pvt->F3, F10_NB_ARRAY_DATA, word_bits); in inject_write_store()
777 amd64_read_pci_cfg(pvt->F3, F10_NB_ARRAY_DATA, &tmp); in inject_write_store()
813 struct amd64_pvt *pvt = mci->pvt_info; in inj_is_visible() local
816 if (pvt->fam >= 0x10 && pvt->fam <= 0x16) in inj_is_visible()
859 struct amd64_pvt *pvt = mci->pvt_info; in sys_addr_to_dram_addr() local
863 dram_base = get_dram_base(pvt, pvt->mc_node_id); in sys_addr_to_dram_addr()
914 struct amd64_pvt *pvt; in dram_addr_to_input_addr() local
918 pvt = mci->pvt_info; in dram_addr_to_input_addr()
924 intlv_shift = num_node_interleave_bits(dram_intlv_en(pvt, 0)); in dram_addr_to_input_addr()
1001 static int gpu_get_node_map(struct amd64_pvt *pvt) in gpu_get_node_map() argument
1012 if (pvt->F3->device != PCI_DEVICE_ID_AMD_MI200_DF_F3) in gpu_get_node_map()
1064 static unsigned long dct_determine_edac_cap(struct amd64_pvt *pvt) in dct_determine_edac_cap() argument
1069 bit = (pvt->fam > 0xf || pvt->ext_model >= K8_REV_F) in dct_determine_edac_cap()
1073 if (pvt->dclr0 & BIT(bit)) in dct_determine_edac_cap()
1079 static unsigned long umc_determine_edac_cap(struct amd64_pvt *pvt) in umc_determine_edac_cap() argument
1085 if (!(pvt->umc[i].sdp_ctrl & UMC_SDP_INIT)) in umc_determine_edac_cap()
1091 if (pvt->umc[i].umc_cfg & BIT(12)) in umc_determine_edac_cap()
1105 static void dct_debug_display_dimm_sizes(struct amd64_pvt *pvt, u8 ctrl) in dct_debug_display_dimm_sizes() argument
1107 u32 *dcsb = ctrl ? pvt->csels[1].csbases : pvt->csels[0].csbases; in dct_debug_display_dimm_sizes()
1108 u32 dbam = ctrl ? pvt->dbam1 : pvt->dbam0; in dct_debug_display_dimm_sizes()
1111 if (pvt->fam == 0xf) { in dct_debug_display_dimm_sizes()
1113 if (pvt->ext_model < K8_REV_F) in dct_debug_display_dimm_sizes()
1119 if (pvt->fam == 0x10) { in dct_debug_display_dimm_sizes()
1120 dbam = (ctrl && !dct_ganging_enabled(pvt)) ? pvt->dbam1 in dct_debug_display_dimm_sizes()
1121 : pvt->dbam0; in dct_debug_display_dimm_sizes()
1122 dcsb = (ctrl && !dct_ganging_enabled(pvt)) ? in dct_debug_display_dimm_sizes()
1123 pvt->csels[1].csbases : in dct_debug_display_dimm_sizes()
1124 pvt->csels[0].csbases; in dct_debug_display_dimm_sizes()
1126 dbam = pvt->dbam0; in dct_debug_display_dimm_sizes()
1127 dcsb = pvt->csels[1].csbases; in dct_debug_display_dimm_sizes()
1144 size0 = pvt->ops->dbam_to_cs(pvt, ctrl, in dct_debug_display_dimm_sizes()
1150 size1 = pvt->ops->dbam_to_cs(pvt, ctrl, in dct_debug_display_dimm_sizes()
1161 static void debug_dump_dramcfg_low(struct amd64_pvt *pvt, u32 dclr, int chan) in debug_dump_dramcfg_low() argument
1165 if (pvt->dram_type == MEM_LRDDR3) { in debug_dump_dramcfg_low()
1166 u32 dcsm = pvt->csels[chan].csmasks[0]; in debug_dump_dramcfg_low()
1181 if (pvt->fam == 0x10) in debug_dump_dramcfg_low()
1201 static int umc_get_cs_mode(int dimm, u8 ctrl, struct amd64_pvt *pvt) in umc_get_cs_mode() argument
1206 if (csrow_enabled(2 * dimm, ctrl, pvt)) in umc_get_cs_mode()
1209 if (csrow_enabled(2 * dimm + 1, ctrl, pvt)) in umc_get_cs_mode()
1212 if (csrow_sec_enabled(2 * dimm, ctrl, pvt)) in umc_get_cs_mode()
1215 if (csrow_sec_enabled(2 * dimm + 1, ctrl, pvt)) in umc_get_cs_mode()
1223 for_each_chip_select(base, ctrl, pvt) in umc_get_cs_mode()
1224 count += csrow_enabled(base, ctrl, pvt); in umc_get_cs_mode()
1227 pvt->csels[ctrl].csmasks[0] == pvt->csels[ctrl].csmasks[1]) { in umc_get_cs_mode()
1283 static int umc_addr_mask_to_cs_size(struct amd64_pvt *pvt, u8 umc, in umc_addr_mask_to_cs_size() argument
1322 if (!pvt->flags.zn_regs_v2) in umc_addr_mask_to_cs_size()
1326 addr_mask = pvt->csels[umc].csmasks[cs_mask_nr]; in umc_addr_mask_to_cs_size()
1329 addr_mask_sec = pvt->csels[umc].csmasks_sec[cs_mask_nr]; in umc_addr_mask_to_cs_size()
1334 static void umc_debug_display_dimm_sizes(struct amd64_pvt *pvt, u8 ctrl) in umc_debug_display_dimm_sizes() argument
1344 cs_mode = umc_get_cs_mode(dimm, ctrl, pvt); in umc_debug_display_dimm_sizes()
1346 size0 = umc_addr_mask_to_cs_size(pvt, ctrl, cs_mode, cs0); in umc_debug_display_dimm_sizes()
1347 size1 = umc_addr_mask_to_cs_size(pvt, ctrl, cs_mode, cs1); in umc_debug_display_dimm_sizes()
1355 static void umc_dump_misc_regs(struct amd64_pvt *pvt) in umc_dump_misc_regs() argument
1361 umc = &pvt->umc[i]; in umc_dump_misc_regs()
1379 umc_debug_display_dimm_sizes(pvt, i); in umc_dump_misc_regs()
1383 static void dct_dump_misc_regs(struct amd64_pvt *pvt) in dct_dump_misc_regs() argument
1385 edac_dbg(1, "F3xE8 (NB Cap): 0x%08x\n", pvt->nbcap); in dct_dump_misc_regs()
1388 str_yes_no(pvt->nbcap & NBCAP_DCT_DUAL)); in dct_dump_misc_regs()
1391 str_yes_no(pvt->nbcap & NBCAP_SECDED), in dct_dump_misc_regs()
1392 str_yes_no(pvt->nbcap & NBCAP_CHIPKILL)); in dct_dump_misc_regs()
1394 debug_dump_dramcfg_low(pvt, pvt->dclr0, 0); in dct_dump_misc_regs()
1396 edac_dbg(1, "F3xB0 (Online Spare): 0x%08x\n", pvt->online_spare); in dct_dump_misc_regs()
1399 pvt->dhar, dhar_base(pvt), in dct_dump_misc_regs()
1400 (pvt->fam == 0xf) ? k8_dhar_offset(pvt) in dct_dump_misc_regs()
1401 : f10_dhar_offset(pvt)); in dct_dump_misc_regs()
1403 dct_debug_display_dimm_sizes(pvt, 0); in dct_dump_misc_regs()
1406 if (pvt->fam == 0xf) in dct_dump_misc_regs()
1409 dct_debug_display_dimm_sizes(pvt, 1); in dct_dump_misc_regs()
1412 if (!dct_ganging_enabled(pvt)) in dct_dump_misc_regs()
1413 debug_dump_dramcfg_low(pvt, pvt->dclr1, 1); in dct_dump_misc_regs()
1415 edac_dbg(1, " DramHoleValid: %s\n", str_yes_no(dhar_valid(pvt))); in dct_dump_misc_regs()
1417 amd64_info("using x%u syndromes.\n", pvt->ecc_sym_sz); in dct_dump_misc_regs()
1423 static void dct_prep_chip_selects(struct amd64_pvt *pvt) in dct_prep_chip_selects() argument
1425 if (pvt->fam == 0xf && pvt->ext_model < K8_REV_F) { in dct_prep_chip_selects()
1426 pvt->csels[0].b_cnt = pvt->csels[1].b_cnt = 8; in dct_prep_chip_selects()
1427 pvt->csels[0].m_cnt = pvt->csels[1].m_cnt = 8; in dct_prep_chip_selects()
1428 } else if (pvt->fam == 0x15 && pvt->model == 0x30) { in dct_prep_chip_selects()
1429 pvt->csels[0].b_cnt = pvt->csels[1].b_cnt = 4; in dct_prep_chip_selects()
1430 pvt->csels[0].m_cnt = pvt->csels[1].m_cnt = 2; in dct_prep_chip_selects()
1432 pvt->csels[0].b_cnt = pvt->csels[1].b_cnt = 8; in dct_prep_chip_selects()
1433 pvt->csels[0].m_cnt = pvt->csels[1].m_cnt = 4; in dct_prep_chip_selects()
1437 static void umc_prep_chip_selects(struct amd64_pvt *pvt) in umc_prep_chip_selects() argument
1442 pvt->csels[umc].b_cnt = 4; in umc_prep_chip_selects()
1443 pvt->csels[umc].m_cnt = pvt->flags.zn_regs_v2 ? 4 : 2; in umc_prep_chip_selects()
1447 static void umc_read_base_mask(struct amd64_pvt *pvt) in umc_read_base_mask() argument
1462 for_each_chip_select(cs, umc, pvt) { in umc_read_base_mask()
1463 base = &pvt->csels[umc].csbases[cs]; in umc_read_base_mask()
1464 base_sec = &pvt->csels[umc].csbases_sec[cs]; in umc_read_base_mask()
1469 if (!amd_smn_read(pvt->mc_node_id, base_reg, &tmp)) { in umc_read_base_mask()
1475 if (!amd_smn_read(pvt->mc_node_id, base_reg_sec, &tmp)) { in umc_read_base_mask()
1483 umc_mask_reg_sec = get_umc_base(umc) + get_umc_reg(pvt, UMCCH_ADDR_MASK_SEC); in umc_read_base_mask()
1485 for_each_chip_select_mask(cs, umc, pvt) { in umc_read_base_mask()
1486 mask = &pvt->csels[umc].csmasks[cs]; in umc_read_base_mask()
1487 mask_sec = &pvt->csels[umc].csmasks_sec[cs]; in umc_read_base_mask()
1492 if (!amd_smn_read(pvt->mc_node_id, mask_reg, &tmp)) { in umc_read_base_mask()
1498 if (!amd_smn_read(pvt->mc_node_id, mask_reg_sec, &tmp)) { in umc_read_base_mask()
1510 static void dct_read_base_mask(struct amd64_pvt *pvt) in dct_read_base_mask() argument
1514 for_each_chip_select(cs, 0, pvt) { in dct_read_base_mask()
1517 u32 *base0 = &pvt->csels[0].csbases[cs]; in dct_read_base_mask()
1518 u32 *base1 = &pvt->csels[1].csbases[cs]; in dct_read_base_mask()
1520 if (!amd64_read_dct_pci_cfg(pvt, 0, reg0, base0)) in dct_read_base_mask()
1524 if (pvt->fam == 0xf) in dct_read_base_mask()
1527 if (!amd64_read_dct_pci_cfg(pvt, 1, reg0, base1)) in dct_read_base_mask()
1529 cs, *base1, (pvt->fam == 0x10) ? reg1 in dct_read_base_mask()
1533 for_each_chip_select_mask(cs, 0, pvt) { in dct_read_base_mask()
1536 u32 *mask0 = &pvt->csels[0].csmasks[cs]; in dct_read_base_mask()
1537 u32 *mask1 = &pvt->csels[1].csmasks[cs]; in dct_read_base_mask()
1539 if (!amd64_read_dct_pci_cfg(pvt, 0, reg0, mask0)) in dct_read_base_mask()
1543 if (pvt->fam == 0xf) in dct_read_base_mask()
1546 if (!amd64_read_dct_pci_cfg(pvt, 1, reg0, mask1)) in dct_read_base_mask()
1548 cs, *mask1, (pvt->fam == 0x10) ? reg1 in dct_read_base_mask()
1553 static void umc_determine_memory_type(struct amd64_pvt *pvt) in umc_determine_memory_type() argument
1559 umc = &pvt->umc[i]; in umc_determine_memory_type()
1570 if (pvt->flags.zn_regs_v2 && ((umc->umc_cfg & GENMASK(2, 0)) == 0x1)) { in umc_determine_memory_type()
1590 static void dct_determine_memory_type(struct amd64_pvt *pvt) in dct_determine_memory_type() argument
1594 switch (pvt->fam) { in dct_determine_memory_type()
1596 if (pvt->ext_model >= K8_REV_F) in dct_determine_memory_type()
1599 pvt->dram_type = (pvt->dclr0 & BIT(18)) ? MEM_DDR : MEM_RDDR; in dct_determine_memory_type()
1603 if (pvt->dchr0 & DDR3_MODE) in dct_determine_memory_type()
1606 pvt->dram_type = (pvt->dclr0 & BIT(16)) ? MEM_DDR2 : MEM_RDDR2; in dct_determine_memory_type()
1610 if (pvt->model < 0x60) in dct_determine_memory_type()
1622 amd64_read_dct_pci_cfg(pvt, 0, DRAM_CONTROL, &dram_ctrl); in dct_determine_memory_type()
1623 dcsm = pvt->csels[0].csmasks[0]; in dct_determine_memory_type()
1626 pvt->dram_type = MEM_DDR4; in dct_determine_memory_type()
1627 else if (pvt->dclr0 & BIT(16)) in dct_determine_memory_type()
1628 pvt->dram_type = MEM_DDR3; in dct_determine_memory_type()
1630 pvt->dram_type = MEM_LRDDR3; in dct_determine_memory_type()
1632 pvt->dram_type = MEM_RDDR3; in dct_determine_memory_type()
1640 WARN(1, KERN_ERR "%s: Family??? 0x%x\n", __func__, pvt->fam); in dct_determine_memory_type()
1641 pvt->dram_type = MEM_EMPTY; in dct_determine_memory_type()
1644 edac_dbg(1, " DIMM type: %s\n", edac_mem_types[pvt->dram_type]); in dct_determine_memory_type()
1648 pvt->dram_type = (pvt->dclr0 & BIT(16)) ? MEM_DDR3 : MEM_RDDR3; in dct_determine_memory_type()
1652 static u64 get_error_address(struct amd64_pvt *pvt, struct mce *m) in get_error_address() argument
1664 pvt = mci->pvt_info; in get_error_address()
1666 if (pvt->fam == 0xf) { in get_error_address()
1676 if (pvt->fam == 0x15) { in get_error_address()
1685 amd64_read_pci_cfg(pvt->F1, DRAM_LOCAL_NODE_LIM, &tmp); in get_error_address()
1700 amd64_read_pci_cfg(pvt->F1, DRAM_LOCAL_NODE_BASE, &tmp); in get_error_address()
1733 static void read_dram_base_limit_regs(struct amd64_pvt *pvt, unsigned range) in read_dram_base_limit_regs() argument
1741 amd64_read_pci_cfg(pvt->F1, DRAM_BASE_LO + off, &pvt->ranges[range].base.lo); in read_dram_base_limit_regs()
1742 amd64_read_pci_cfg(pvt->F1, DRAM_LIMIT_LO + off, &pvt->ranges[range].lim.lo); in read_dram_base_limit_regs()
1744 if (pvt->fam == 0xf) in read_dram_base_limit_regs()
1747 if (!dram_rw(pvt, range)) in read_dram_base_limit_regs()
1750 amd64_read_pci_cfg(pvt->F1, DRAM_BASE_HI + off, &pvt->ranges[range].base.hi); in read_dram_base_limit_regs()
1751 amd64_read_pci_cfg(pvt->F1, DRAM_LIMIT_HI + off, &pvt->ranges[range].lim.hi); in read_dram_base_limit_regs()
1754 if (pvt->fam != 0x15) in read_dram_base_limit_regs()
1757 nb = node_to_amd_nb(dram_dst_node(pvt, range)); in read_dram_base_limit_regs()
1761 if (pvt->model == 0x60) in read_dram_base_limit_regs()
1763 else if (pvt->model == 0x30) in read_dram_base_limit_regs()
1774 pvt->ranges[range].lim.lo &= GENMASK_ULL(15, 0); in read_dram_base_limit_regs()
1777 pvt->ranges[range].lim.lo |= ((llim & 0x1fff) << 3 | 0x7) << 16; in read_dram_base_limit_regs()
1779 pvt->ranges[range].lim.hi &= GENMASK_ULL(7, 0); in read_dram_base_limit_regs()
1782 pvt->ranges[range].lim.hi |= llim >> 13; in read_dram_base_limit_regs()
1790 struct amd64_pvt *pvt = mci->pvt_info; in k8_map_sysaddr_to_csrow() local
1814 if (pvt->nbcfg & NBCFG_CHIPKILL) { in k8_map_sysaddr_to_csrow()
1855 static int k8_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct, in k8_dbam_to_chip_select() argument
1858 u32 dclr = dct ? pvt->dclr1 : pvt->dclr0; in k8_dbam_to_chip_select()
1860 if (pvt->ext_model >= K8_REV_F) { in k8_dbam_to_chip_select()
1864 else if (pvt->ext_model >= K8_REV_D) { in k8_dbam_to_chip_select()
1959 static int f10_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct, in f10_dbam_to_chip_select() argument
1962 u32 dclr = dct ? pvt->dclr1 : pvt->dclr0; in f10_dbam_to_chip_select()
1966 if (pvt->dchr0 & DDR3_MODE || pvt->dchr1 & DDR3_MODE) in f10_dbam_to_chip_select()
1975 static int f15_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct, in f15_dbam_to_chip_select() argument
1984 static int f15_m60h_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct, in f15_m60h_dbam_to_chip_select() argument
1988 u32 dcsm = pvt->csels[dct].csmasks[cs_mask_nr]; in f15_m60h_dbam_to_chip_select()
1992 if (pvt->dram_type == MEM_DDR4) { in f15_m60h_dbam_to_chip_select()
1997 } else if (pvt->dram_type == MEM_LRDDR3) { in f15_m60h_dbam_to_chip_select()
2017 static int f16_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct, in f16_dbam_to_chip_select() argument
2029 static void read_dram_ctl_register(struct amd64_pvt *pvt) in read_dram_ctl_register() argument
2032 if (pvt->fam == 0xf) in read_dram_ctl_register()
2035 if (!amd64_read_pci_cfg(pvt->F2, DCT_SEL_LO, &pvt->dct_sel_lo)) { in read_dram_ctl_register()
2037 pvt->dct_sel_lo, dct_sel_baseaddr(pvt)); in read_dram_ctl_register()
2040 (dct_ganging_enabled(pvt) ? "ganged" : "unganged")); in read_dram_ctl_register()
2042 if (!dct_ganging_enabled(pvt)) in read_dram_ctl_register()
2044 str_yes_no(dct_high_range_enabled(pvt))); in read_dram_ctl_register()
2047 str_enabled_disabled(dct_data_intlv_enabled(pvt)), in read_dram_ctl_register()
2048 str_yes_no(dct_memory_cleared(pvt))); in read_dram_ctl_register()
2052 str_enabled_disabled(dct_interleave_enabled(pvt)), in read_dram_ctl_register()
2053 dct_sel_interleave_addr(pvt)); in read_dram_ctl_register()
2056 amd64_read_pci_cfg(pvt->F2, DCT_SEL_HI, &pvt->dct_sel_hi); in read_dram_ctl_register()
2063 static u8 f15_m30h_determine_channel(struct amd64_pvt *pvt, u64 sys_addr, in f15_m30h_determine_channel() argument
2077 u8 intlv_addr = dct_sel_interleave_addr(pvt); in f15_m30h_determine_channel()
2094 static u8 f1x_determine_channel(struct amd64_pvt *pvt, u64 sys_addr, in f1x_determine_channel() argument
2097 u8 dct_sel_high = (pvt->dct_sel_lo >> 1) & 1; in f1x_determine_channel()
2099 if (dct_ganging_enabled(pvt)) in f1x_determine_channel()
2108 if (dct_interleave_enabled(pvt)) { in f1x_determine_channel()
2109 u8 intlv_addr = dct_sel_interleave_addr(pvt); in f1x_determine_channel()
2131 if (dct_high_range_enabled(pvt)) in f1x_determine_channel()
2138 static u64 f1x_get_norm_dct_addr(struct amd64_pvt *pvt, u8 range, in f1x_get_norm_dct_addr() argument
2143 u64 dram_base = get_dram_base(pvt, range); in f1x_get_norm_dct_addr()
2144 u64 hole_off = f10_dhar_offset(pvt); in f1x_get_norm_dct_addr()
2145 u64 dct_sel_base_off = (u64)(pvt->dct_sel_hi & 0xFFFFFC00) << 16; in f1x_get_norm_dct_addr()
2160 dct_sel_base_addr < dhar_base(pvt)) && in f1x_get_norm_dct_addr()
2161 dhar_valid(pvt) && in f1x_get_norm_dct_addr()
2176 if (dhar_valid(pvt) && (sys_addr >= BIT_64(32))) in f1x_get_norm_dct_addr()
2189 static int f10_process_possible_spare(struct amd64_pvt *pvt, u8 dct, int csrow) in f10_process_possible_spare() argument
2193 if (online_spare_swap_done(pvt, dct) && in f10_process_possible_spare()
2194 csrow == online_spare_bad_dramcs(pvt, dct)) { in f10_process_possible_spare()
2196 for_each_chip_select(tmp_cs, dct, pvt) { in f10_process_possible_spare()
2197 if (chip_select_base(tmp_cs, dct, pvt) & 0x2) { in f10_process_possible_spare()
2217 struct amd64_pvt *pvt; in f1x_lookup_addr_in_dct() local
2226 pvt = mci->pvt_info; in f1x_lookup_addr_in_dct()
2230 for_each_chip_select(csrow, dct, pvt) { in f1x_lookup_addr_in_dct()
2231 if (!csrow_enabled(csrow, dct, pvt)) in f1x_lookup_addr_in_dct()
2234 get_cs_base_and_mask(pvt, csrow, dct, &cs_base, &cs_mask); in f1x_lookup_addr_in_dct()
2245 if (pvt->fam == 0x15 && pvt->model >= 0x30) { in f1x_lookup_addr_in_dct()
2249 cs_found = f10_process_possible_spare(pvt, dct, csrow); in f1x_lookup_addr_in_dct()
2263 static u64 f1x_swap_interleaved_region(struct amd64_pvt *pvt, u64 sys_addr) in f1x_swap_interleaved_region() argument
2267 if (pvt->fam == 0x10) { in f1x_swap_interleaved_region()
2269 if (pvt->model < 4 || (pvt->model < 0xa && pvt->stepping < 3)) in f1x_swap_interleaved_region()
2273 amd64_read_pci_cfg(pvt->F2, SWAP_INTLV_REG, &swap_reg); in f1x_swap_interleaved_region()
2293 static int f1x_match_to_this_node(struct amd64_pvt *pvt, unsigned range, in f1x_match_to_this_node() argument
2302 u8 node_id = dram_dst_node(pvt, range); in f1x_match_to_this_node()
2303 u8 intlv_en = dram_intlv_en(pvt, range); in f1x_match_to_this_node()
2304 u32 intlv_sel = dram_intlv_sel(pvt, range); in f1x_match_to_this_node()
2307 range, sys_addr, get_dram_limit(pvt, range)); in f1x_match_to_this_node()
2309 if (dhar_valid(pvt) && in f1x_match_to_this_node()
2310 dhar_base(pvt) <= sys_addr && in f1x_match_to_this_node()
2320 sys_addr = f1x_swap_interleaved_region(pvt, sys_addr); in f1x_match_to_this_node()
2322 dct_sel_base = dct_sel_baseaddr(pvt); in f1x_match_to_this_node()
2328 if (dct_high_range_enabled(pvt) && in f1x_match_to_this_node()
2329 !dct_ganging_enabled(pvt) && in f1x_match_to_this_node()
2333 channel = f1x_determine_channel(pvt, sys_addr, high_range, intlv_en); in f1x_match_to_this_node()
2335 chan_addr = f1x_get_norm_dct_addr(pvt, range, sys_addr, in f1x_match_to_this_node()
2344 if (dct_interleave_enabled(pvt) && in f1x_match_to_this_node()
2345 !dct_high_range_enabled(pvt) && in f1x_match_to_this_node()
2346 !dct_ganging_enabled(pvt)) { in f1x_match_to_this_node()
2348 if (dct_sel_interleave_addr(pvt) != 1) { in f1x_match_to_this_node()
2349 if (dct_sel_interleave_addr(pvt) == 0x3) in f1x_match_to_this_node()
2373 static int f15_m30h_match_to_this_node(struct amd64_pvt *pvt, unsigned range, in f15_m30h_match_to_this_node() argument
2383 u64 dhar_offset = f10_dhar_offset(pvt); in f15_m30h_match_to_this_node()
2384 u8 intlv_addr = dct_sel_interleave_addr(pvt); in f15_m30h_match_to_this_node()
2385 u8 node_id = dram_dst_node(pvt, range); in f15_m30h_match_to_this_node()
2386 u8 intlv_en = dram_intlv_en(pvt, range); in f15_m30h_match_to_this_node()
2388 amd64_read_pci_cfg(pvt->F1, DRAM_CONT_BASE, &dct_cont_base_reg); in f15_m30h_match_to_this_node()
2389 amd64_read_pci_cfg(pvt->F1, DRAM_CONT_LIMIT, &dct_cont_limit_reg); in f15_m30h_match_to_this_node()
2395 range, sys_addr, get_dram_limit(pvt, range)); in f15_m30h_match_to_this_node()
2397 if (!(get_dram_base(pvt, range) <= sys_addr) && in f15_m30h_match_to_this_node()
2398 !(get_dram_limit(pvt, range) >= sys_addr)) in f15_m30h_match_to_this_node()
2401 if (dhar_valid(pvt) && in f15_m30h_match_to_this_node()
2402 dhar_base(pvt) <= sys_addr && in f15_m30h_match_to_this_node()
2410 dct_base = (u64) dct_sel_baseaddr(pvt); in f15_m30h_match_to_this_node()
2424 if (pvt->model >= 0x60) in f15_m30h_match_to_this_node()
2425 channel = f1x_determine_channel(pvt, sys_addr, false, intlv_en); in f15_m30h_match_to_this_node()
2427 channel = f15_m30h_determine_channel(pvt, sys_addr, intlv_en, in f15_m30h_match_to_this_node()
2467 amd64_read_pci_cfg(pvt->F1, in f15_m30h_match_to_this_node()
2473 f15h_select_dct(pvt, channel); in f15_m30h_match_to_this_node()
2482 * pvt->csels[1]. So we need to use '1' here to get correct info. in f15_m30h_match_to_this_node()
2495 static int f1x_translate_sysaddr_to_cs(struct amd64_pvt *pvt, in f1x_translate_sysaddr_to_cs() argument
2503 if (!dram_rw(pvt, range)) in f1x_translate_sysaddr_to_cs()
2506 if (pvt->fam == 0x15 && pvt->model >= 0x30) in f1x_translate_sysaddr_to_cs()
2507 cs_found = f15_m30h_match_to_this_node(pvt, range, in f1x_translate_sysaddr_to_cs()
2511 else if ((get_dram_base(pvt, range) <= sys_addr) && in f1x_translate_sysaddr_to_cs()
2512 (get_dram_limit(pvt, range) >= sys_addr)) { in f1x_translate_sysaddr_to_cs()
2513 cs_found = f1x_match_to_this_node(pvt, range, in f1x_translate_sysaddr_to_cs()
2532 struct amd64_pvt *pvt = mci->pvt_info; in f1x_map_sysaddr_to_csrow() local
2536 err->csrow = f1x_translate_sysaddr_to_cs(pvt, sys_addr, &err->channel); in f1x_map_sysaddr_to_csrow()
2547 if (dct_ganging_enabled(pvt)) in f1x_map_sysaddr_to_csrow()
2688 struct amd64_pvt *pvt = mci->pvt_info; in get_channel_from_ecc_syndrome() local
2691 if (pvt->ecc_sym_sz == 8) in get_channel_from_ecc_syndrome()
2694 pvt->ecc_sym_sz); in get_channel_from_ecc_syndrome()
2695 else if (pvt->ecc_sym_sz == 4) in get_channel_from_ecc_syndrome()
2698 pvt->ecc_sym_sz); in get_channel_from_ecc_syndrome()
2700 amd64_warn("Illegal syndrome type: %u\n", pvt->ecc_sym_sz); in get_channel_from_ecc_syndrome()
2704 return map_err_sym_to_channel(err_sym, pvt->ecc_sym_sz); in get_channel_from_ecc_syndrome()
2757 struct amd64_pvt *pvt; in decode_bus_error() local
2768 pvt = mci->pvt_info; in decode_bus_error()
2780 sys_addr = get_error_address(pvt, m); in decode_bus_error()
2785 pvt->ops->map_sysaddr_to_csrow(mci, sys_addr, &err); in decode_bus_error()
2813 struct amd64_pvt *pvt; in decode_umc_error() local
2823 pvt = mci->pvt_info; in decode_umc_error()
2844 pvt->ops->get_err_info(m, &err); in decode_umc_error()
2863 * Use pvt->F3 which contains the F3 CPU PCI device to get the related
2867 reserve_mc_sibling_devs(struct amd64_pvt *pvt, u16 pci_id1, u16 pci_id2) in reserve_mc_sibling_devs() argument
2870 pvt->F1 = pci_get_related_function(pvt->F3->vendor, pci_id1, pvt->F3); in reserve_mc_sibling_devs()
2871 if (!pvt->F1) { in reserve_mc_sibling_devs()
2877 pvt->F2 = pci_get_related_function(pvt->F3->vendor, pci_id2, pvt->F3); in reserve_mc_sibling_devs()
2878 if (!pvt->F2) { in reserve_mc_sibling_devs()
2879 pci_dev_put(pvt->F1); in reserve_mc_sibling_devs()
2880 pvt->F1 = NULL; in reserve_mc_sibling_devs()
2887 pci_ctl_dev = &pvt->F2->dev; in reserve_mc_sibling_devs()
2889 edac_dbg(1, "F1: %s\n", pci_name(pvt->F1)); in reserve_mc_sibling_devs()
2890 edac_dbg(1, "F2: %s\n", pci_name(pvt->F2)); in reserve_mc_sibling_devs()
2891 edac_dbg(1, "F3: %s\n", pci_name(pvt->F3)); in reserve_mc_sibling_devs()
2896 static void determine_ecc_sym_sz(struct amd64_pvt *pvt) in determine_ecc_sym_sz() argument
2898 pvt->ecc_sym_sz = 4; in determine_ecc_sym_sz()
2900 if (pvt->fam >= 0x10) { in determine_ecc_sym_sz()
2903 amd64_read_pci_cfg(pvt->F3, EXT_NB_MCA_CFG, &tmp); in determine_ecc_sym_sz()
2905 if (pvt->fam != 0x16) in determine_ecc_sym_sz()
2906 amd64_read_dct_pci_cfg(pvt, 1, DBAM0, &pvt->dbam1); in determine_ecc_sym_sz()
2909 if ((pvt->fam > 0x10 || pvt->model > 7) && tmp & BIT(25)) in determine_ecc_sym_sz()
2910 pvt->ecc_sym_sz = 8; in determine_ecc_sym_sz()
2917 static void umc_read_mc_regs(struct amd64_pvt *pvt) in umc_read_mc_regs() argument
2919 u8 nid = pvt->mc_node_id; in umc_read_mc_regs()
2927 umc = &pvt->umc[i]; in umc_read_mc_regs()
2929 if (!amd_smn_read(nid, umc_base + get_umc_reg(pvt, UMCCH_DIMM_CFG), &tmp)) in umc_read_mc_regs()
2950 static void dct_read_mc_regs(struct amd64_pvt *pvt) in dct_read_mc_regs() argument
2959 rdmsrq(MSR_K8_TOP_MEM1, pvt->top_mem); in dct_read_mc_regs()
2960 edac_dbg(0, " TOP_MEM: 0x%016llx\n", pvt->top_mem); in dct_read_mc_regs()
2965 rdmsrq(MSR_K8_TOP_MEM2, pvt->top_mem2); in dct_read_mc_regs()
2966 edac_dbg(0, " TOP_MEM2: 0x%016llx\n", pvt->top_mem2); in dct_read_mc_regs()
2971 amd64_read_pci_cfg(pvt->F3, NBCAP, &pvt->nbcap); in dct_read_mc_regs()
2973 read_dram_ctl_register(pvt); in dct_read_mc_regs()
2979 read_dram_base_limit_regs(pvt, range); in dct_read_mc_regs()
2981 rw = dram_rw(pvt, range); in dct_read_mc_regs()
2987 get_dram_base(pvt, range), in dct_read_mc_regs()
2988 get_dram_limit(pvt, range)); in dct_read_mc_regs()
2991 dram_intlv_en(pvt, range) ? "Enabled" : "Disabled", in dct_read_mc_regs()
2994 dram_intlv_sel(pvt, range), in dct_read_mc_regs()
2995 dram_dst_node(pvt, range)); in dct_read_mc_regs()
2998 amd64_read_pci_cfg(pvt->F1, DHAR, &pvt->dhar); in dct_read_mc_regs()
2999 amd64_read_dct_pci_cfg(pvt, 0, DBAM0, &pvt->dbam0); in dct_read_mc_regs()
3001 amd64_read_pci_cfg(pvt->F3, F10_ONLINE_SPARE, &pvt->online_spare); in dct_read_mc_regs()
3003 amd64_read_dct_pci_cfg(pvt, 0, DCLR0, &pvt->dclr0); in dct_read_mc_regs()
3004 amd64_read_dct_pci_cfg(pvt, 0, DCHR0, &pvt->dchr0); in dct_read_mc_regs()
3006 if (!dct_ganging_enabled(pvt)) { in dct_read_mc_regs()
3007 amd64_read_dct_pci_cfg(pvt, 1, DCLR0, &pvt->dclr1); in dct_read_mc_regs()
3008 amd64_read_dct_pci_cfg(pvt, 1, DCHR0, &pvt->dchr1); in dct_read_mc_regs()
3011 determine_ecc_sym_sz(pvt); in dct_read_mc_regs()
3048 static u32 dct_get_csrow_nr_pages(struct amd64_pvt *pvt, u8 dct, int csrow_nr) in dct_get_csrow_nr_pages() argument
3050 u32 dbam = dct ? pvt->dbam1 : pvt->dbam0; in dct_get_csrow_nr_pages()
3056 nr_pages = pvt->ops->dbam_to_cs(pvt, dct, cs_mode, csrow_nr); in dct_get_csrow_nr_pages()
3066 static u32 umc_get_csrow_nr_pages(struct amd64_pvt *pvt, u8 dct, int csrow_nr_orig) in umc_get_csrow_nr_pages() argument
3071 cs_mode = umc_get_cs_mode(csrow_nr >> 1, dct, pvt); in umc_get_csrow_nr_pages()
3073 nr_pages = umc_addr_mask_to_cs_size(pvt, dct, cs_mode, csrow_nr); in umc_get_csrow_nr_pages()
3085 struct amd64_pvt *pvt = mci->pvt_info; in umc_init_csrows() local
3105 for_each_chip_select(cs, umc, pvt) { in umc_init_csrows()
3106 if (!csrow_enabled(cs, umc, pvt)) in umc_init_csrows()
3112 pvt->mc_node_id, cs); in umc_init_csrows()
3114 dimm->nr_pages = umc_get_csrow_nr_pages(pvt, umc, cs); in umc_init_csrows()
3115 dimm->mtype = pvt->umc[umc].dram_type; in umc_init_csrows()
3129 struct amd64_pvt *pvt = mci->pvt_info; in dct_init_csrows() local
3137 amd64_read_pci_cfg(pvt->F3, NBCFG, &val); in dct_init_csrows()
3139 pvt->nbcfg = val; in dct_init_csrows()
3142 pvt->mc_node_id, val, in dct_init_csrows()
3148 for_each_chip_select(i, 0, pvt) { in dct_init_csrows()
3149 bool row_dct0 = !!csrow_enabled(i, 0, pvt); in dct_init_csrows()
3152 if (pvt->fam != 0xf) in dct_init_csrows()
3153 row_dct1 = !!csrow_enabled(i, 1, pvt); in dct_init_csrows()
3161 pvt->mc_node_id, i); in dct_init_csrows()
3164 nr_pages = dct_get_csrow_nr_pages(pvt, 0, i); in dct_init_csrows()
3169 if (pvt->fam != 0xf && row_dct1) { in dct_init_csrows()
3170 int row_dct1_pages = dct_get_csrow_nr_pages(pvt, 1, i); in dct_init_csrows()
3179 if (pvt->nbcfg & NBCFG_ECC_ENABLE) { in dct_init_csrows()
3180 edac_mode = (pvt->nbcfg & NBCFG_CHIPKILL) in dct_init_csrows()
3185 for (j = 0; j < pvt->max_mcs; j++) { in dct_init_csrows()
3187 dimm->mtype = pvt->dram_type; in dct_init_csrows()
3353 static bool dct_ecc_enabled(struct amd64_pvt *pvt) in dct_ecc_enabled() argument
3355 u16 nid = pvt->mc_node_id; in dct_ecc_enabled()
3360 amd64_read_pci_cfg(pvt->F3, NBCFG, &value); in dct_ecc_enabled()
3374 static bool umc_ecc_enabled(struct amd64_pvt *pvt) in umc_ecc_enabled() argument
3382 umc = &pvt->umc[i]; in umc_ecc_enabled()
3391 edac_dbg(3, "Node %d: DRAM ECC %s.\n", pvt->mc_node_id, str_enabled_disabled(ecc_en)); in umc_ecc_enabled()
3397 umc_determine_edac_ctl_cap(struct mem_ctl_info *mci, struct amd64_pvt *pvt) in umc_determine_edac_ctl_cap() argument
3402 if (pvt->umc[i].sdp_ctrl & UMC_SDP_INIT) { in umc_determine_edac_ctl_cap()
3403 ecc_en &= !!(pvt->umc[i].umc_cap_hi & UMC_ECC_ENABLED); in umc_determine_edac_ctl_cap()
3404 cpk_en &= !!(pvt->umc[i].umc_cap_hi & UMC_ECC_CHIPKILL_CAP); in umc_determine_edac_ctl_cap()
3406 dev_x4 &= !!(pvt->umc[i].dimm_cfg & BIT(6)); in umc_determine_edac_ctl_cap()
3407 dev_x16 &= !!(pvt->umc[i].dimm_cfg & BIT(7)); in umc_determine_edac_ctl_cap()
3429 struct amd64_pvt *pvt = mci->pvt_info; in dct_setup_mci_misc_attrs() local
3434 if (pvt->nbcap & NBCAP_SECDED) in dct_setup_mci_misc_attrs()
3437 if (pvt->nbcap & NBCAP_CHIPKILL) in dct_setup_mci_misc_attrs()
3440 mci->edac_cap = dct_determine_edac_cap(pvt); in dct_setup_mci_misc_attrs()
3442 mci->ctl_name = pvt->ctl_name; in dct_setup_mci_misc_attrs()
3443 mci->dev_name = pci_name(pvt->F3); in dct_setup_mci_misc_attrs()
3455 struct amd64_pvt *pvt = mci->pvt_info; in umc_setup_mci_misc_attrs() local
3460 umc_determine_edac_ctl_cap(mci, pvt); in umc_setup_mci_misc_attrs()
3462 mci->edac_cap = umc_determine_edac_cap(pvt); in umc_setup_mci_misc_attrs()
3464 mci->ctl_name = pvt->ctl_name; in umc_setup_mci_misc_attrs()
3465 mci->dev_name = pci_name(pvt->F3); in umc_setup_mci_misc_attrs()
3471 static int dct_hw_info_get(struct amd64_pvt *pvt) in dct_hw_info_get() argument
3473 int ret = reserve_mc_sibling_devs(pvt, pvt->f1_id, pvt->f2_id); in dct_hw_info_get()
3478 dct_prep_chip_selects(pvt); in dct_hw_info_get()
3479 dct_read_base_mask(pvt); in dct_hw_info_get()
3480 dct_read_mc_regs(pvt); in dct_hw_info_get()
3481 dct_determine_memory_type(pvt); in dct_hw_info_get()
3486 static int umc_hw_info_get(struct amd64_pvt *pvt) in umc_hw_info_get() argument
3488 pvt->umc = kcalloc(pvt->max_mcs, sizeof(struct amd64_umc), GFP_KERNEL); in umc_hw_info_get()
3489 if (!pvt->umc) in umc_hw_info_get()
3492 umc_prep_chip_selects(pvt); in umc_hw_info_get()
3493 umc_read_base_mask(pvt); in umc_hw_info_get()
3494 umc_read_mc_regs(pvt); in umc_hw_info_get()
3495 umc_determine_memory_type(pvt); in umc_hw_info_get()
3526 static int gpu_addr_mask_to_cs_size(struct amd64_pvt *pvt, u8 umc, in gpu_addr_mask_to_cs_size() argument
3529 u32 addr_mask = pvt->csels[umc].csmasks[csrow_nr]; in gpu_addr_mask_to_cs_size()
3530 u32 addr_mask_sec = pvt->csels[umc].csmasks_sec[csrow_nr]; in gpu_addr_mask_to_cs_size()
3535 static void gpu_debug_display_dimm_sizes(struct amd64_pvt *pvt, u8 ctrl) in gpu_debug_display_dimm_sizes() argument
3543 for_each_chip_select(cs, ctrl, pvt) { in gpu_debug_display_dimm_sizes()
3544 size = gpu_addr_mask_to_cs_size(pvt, ctrl, cs_mode, cs); in gpu_debug_display_dimm_sizes()
3549 static void gpu_dump_misc_regs(struct amd64_pvt *pvt) in gpu_dump_misc_regs() argument
3555 umc = &pvt->umc[i]; in gpu_dump_misc_regs()
3562 gpu_debug_display_dimm_sizes(pvt, i); in gpu_dump_misc_regs()
3566 static u32 gpu_get_csrow_nr_pages(struct amd64_pvt *pvt, u8 dct, int csrow_nr) in gpu_get_csrow_nr_pages() argument
3571 nr_pages = gpu_addr_mask_to_cs_size(pvt, dct, cs_mode, csrow_nr); in gpu_get_csrow_nr_pages()
3582 struct amd64_pvt *pvt = mci->pvt_info; in gpu_init_csrows() local
3587 for_each_chip_select(cs, umc, pvt) { in gpu_init_csrows()
3588 if (!csrow_enabled(cs, umc, pvt)) in gpu_init_csrows()
3594 pvt->mc_node_id, cs); in gpu_init_csrows()
3596 dimm->nr_pages = gpu_get_csrow_nr_pages(pvt, umc, cs); in gpu_init_csrows()
3598 dimm->mtype = pvt->dram_type; in gpu_init_csrows()
3607 struct amd64_pvt *pvt = mci->pvt_info; in gpu_setup_mci_misc_attrs() local
3614 mci->ctl_name = pvt->ctl_name; in gpu_setup_mci_misc_attrs()
3615 mci->dev_name = pci_name(pvt->F3); in gpu_setup_mci_misc_attrs()
3622 static bool gpu_ecc_enabled(struct amd64_pvt *pvt) in gpu_ecc_enabled() argument
3627 static inline u32 gpu_get_umc_base(struct amd64_pvt *pvt, u8 umc, u8 channel) in gpu_get_umc_base() argument
3649 return pvt->gpu_umc_base + (umc << 20) + ((channel % 4) << 12); in gpu_get_umc_base()
3652 static void gpu_read_mc_regs(struct amd64_pvt *pvt) in gpu_read_mc_regs() argument
3654 u8 nid = pvt->mc_node_id; in gpu_read_mc_regs()
3660 umc_base = gpu_get_umc_base(pvt, i, 0); in gpu_read_mc_regs()
3661 umc = &pvt->umc[i]; in gpu_read_mc_regs()
3674 static void gpu_read_base_mask(struct amd64_pvt *pvt) in gpu_read_base_mask() argument
3681 for_each_chip_select(cs, umc, pvt) { in gpu_read_base_mask()
3682 base_reg = gpu_get_umc_base(pvt, umc, cs) + UMCCH_BASE_ADDR; in gpu_read_base_mask()
3683 base = &pvt->csels[umc].csbases[cs]; in gpu_read_base_mask()
3685 if (!amd_smn_read(pvt->mc_node_id, base_reg, base)) { in gpu_read_base_mask()
3690 mask_reg = gpu_get_umc_base(pvt, umc, cs) + UMCCH_ADDR_MASK; in gpu_read_base_mask()
3691 mask = &pvt->csels[umc].csmasks[cs]; in gpu_read_base_mask()
3693 if (!amd_smn_read(pvt->mc_node_id, mask_reg, mask)) { in gpu_read_base_mask()
3701 static void gpu_prep_chip_selects(struct amd64_pvt *pvt) in gpu_prep_chip_selects() argument
3706 pvt->csels[umc].b_cnt = 8; in gpu_prep_chip_selects()
3707 pvt->csels[umc].m_cnt = 8; in gpu_prep_chip_selects()
3711 static int gpu_hw_info_get(struct amd64_pvt *pvt) in gpu_hw_info_get() argument
3715 ret = gpu_get_node_map(pvt); in gpu_hw_info_get()
3719 pvt->umc = kcalloc(pvt->max_mcs, sizeof(struct amd64_umc), GFP_KERNEL); in gpu_hw_info_get()
3720 if (!pvt->umc) in gpu_hw_info_get()
3723 gpu_prep_chip_selects(pvt); in gpu_hw_info_get()
3724 gpu_read_base_mask(pvt); in gpu_hw_info_get()
3725 gpu_read_mc_regs(pvt); in gpu_hw_info_get()
3730 static void hw_info_put(struct amd64_pvt *pvt) in hw_info_put() argument
3732 pci_dev_put(pvt->F1); in hw_info_put()
3733 pci_dev_put(pvt->F2); in hw_info_put()
3734 kfree(pvt->umc); in hw_info_put()
3763 static int per_family_init(struct amd64_pvt *pvt) in per_family_init() argument
3765 pvt->ext_model = boot_cpu_data.x86_model >> 4; in per_family_init()
3766 pvt->stepping = boot_cpu_data.x86_stepping; in per_family_init()
3767 pvt->model = boot_cpu_data.x86_model; in per_family_init()
3768 pvt->fam = boot_cpu_data.x86; in per_family_init()
3769 pvt->max_mcs = 2; in per_family_init()
3775 if (pvt->fam >= 0x17) in per_family_init()
3776 pvt->ops = &umc_ops; in per_family_init()
3778 pvt->ops = &dct_ops; in per_family_init()
3780 switch (pvt->fam) { in per_family_init()
3782 pvt->ctl_name = (pvt->ext_model >= K8_REV_F) ? in per_family_init()
3784 pvt->f1_id = PCI_DEVICE_ID_AMD_K8_NB_ADDRMAP; in per_family_init()
3785 pvt->f2_id = PCI_DEVICE_ID_AMD_K8_NB_MEMCTL; in per_family_init()
3786 pvt->ops->map_sysaddr_to_csrow = k8_map_sysaddr_to_csrow; in per_family_init()
3787 pvt->ops->dbam_to_cs = k8_dbam_to_chip_select; in per_family_init()
3791 pvt->ctl_name = "F10h"; in per_family_init()
3792 pvt->f1_id = PCI_DEVICE_ID_AMD_10H_NB_MAP; in per_family_init()
3793 pvt->f2_id = PCI_DEVICE_ID_AMD_10H_NB_DRAM; in per_family_init()
3794 pvt->ops->dbam_to_cs = f10_dbam_to_chip_select; in per_family_init()
3798 switch (pvt->model) { in per_family_init()
3800 pvt->ctl_name = "F15h_M30h"; in per_family_init()
3801 pvt->f1_id = PCI_DEVICE_ID_AMD_15H_M30H_NB_F1; in per_family_init()
3802 pvt->f2_id = PCI_DEVICE_ID_AMD_15H_M30H_NB_F2; in per_family_init()
3805 pvt->ctl_name = "F15h_M60h"; in per_family_init()
3806 pvt->f1_id = PCI_DEVICE_ID_AMD_15H_M60H_NB_F1; in per_family_init()
3807 pvt->f2_id = PCI_DEVICE_ID_AMD_15H_M60H_NB_F2; in per_family_init()
3808 pvt->ops->dbam_to_cs = f15_m60h_dbam_to_chip_select; in per_family_init()
3814 pvt->ctl_name = "F15h"; in per_family_init()
3815 pvt->f1_id = PCI_DEVICE_ID_AMD_15H_NB_F1; in per_family_init()
3816 pvt->f2_id = PCI_DEVICE_ID_AMD_15H_NB_F2; in per_family_init()
3817 pvt->ops->dbam_to_cs = f15_dbam_to_chip_select; in per_family_init()
3823 switch (pvt->model) { in per_family_init()
3825 pvt->ctl_name = "F16h_M30h"; in per_family_init()
3826 pvt->f1_id = PCI_DEVICE_ID_AMD_16H_M30H_NB_F1; in per_family_init()
3827 pvt->f2_id = PCI_DEVICE_ID_AMD_16H_M30H_NB_F2; in per_family_init()
3830 pvt->ctl_name = "F16h"; in per_family_init()
3831 pvt->f1_id = PCI_DEVICE_ID_AMD_16H_NB_F1; in per_family_init()
3832 pvt->f2_id = PCI_DEVICE_ID_AMD_16H_NB_F2; in per_family_init()
3838 switch (pvt->model) { in per_family_init()
3840 pvt->ctl_name = "F17h_M10h"; in per_family_init()
3843 pvt->ctl_name = "F17h_M30h"; in per_family_init()
3844 pvt->max_mcs = 8; in per_family_init()
3847 pvt->ctl_name = "F17h_M60h"; in per_family_init()
3850 pvt->ctl_name = "F17h_M70h"; in per_family_init()
3853 pvt->ctl_name = "F17h"; in per_family_init()
3859 pvt->ctl_name = "F18h"; in per_family_init()
3863 switch (pvt->model) { in per_family_init()
3865 pvt->ctl_name = "F19h"; in per_family_init()
3866 pvt->max_mcs = 8; in per_family_init()
3869 pvt->ctl_name = "F19h_M10h"; in per_family_init()
3870 pvt->max_mcs = 12; in per_family_init()
3871 pvt->flags.zn_regs_v2 = 1; in per_family_init()
3874 pvt->ctl_name = "F19h_M20h"; in per_family_init()
3877 if (pvt->F3->device == PCI_DEVICE_ID_AMD_MI200_DF_F3) { in per_family_init()
3878 pvt->ctl_name = "MI200"; in per_family_init()
3879 pvt->max_mcs = 4; in per_family_init()
3880 pvt->dram_type = MEM_HBM2; in per_family_init()
3881 pvt->gpu_umc_base = 0x50000; in per_family_init()
3882 pvt->ops = &gpu_ops; in per_family_init()
3884 pvt->ctl_name = "F19h_M30h"; in per_family_init()
3885 pvt->max_mcs = 8; in per_family_init()
3889 pvt->ctl_name = "F19h_M50h"; in per_family_init()
3892 pvt->ctl_name = "F19h_M60h"; in per_family_init()
3893 pvt->flags.zn_regs_v2 = 1; in per_family_init()
3896 pvt->ctl_name = "F19h_M70h"; in per_family_init()
3897 pvt->max_mcs = 4; in per_family_init()
3898 pvt->flags.zn_regs_v2 = 1; in per_family_init()
3901 pvt->ctl_name = "F19h_M90h"; in per_family_init()
3902 pvt->max_mcs = 4; in per_family_init()
3903 pvt->dram_type = MEM_HBM3; in per_family_init()
3904 pvt->gpu_umc_base = 0x90000; in per_family_init()
3905 pvt->ops = &gpu_ops; in per_family_init()
3908 pvt->ctl_name = "F19h_MA0h"; in per_family_init()
3909 pvt->max_mcs = 12; in per_family_init()
3910 pvt->flags.zn_regs_v2 = 1; in per_family_init()
3916 switch (pvt->model) { in per_family_init()
3918 pvt->ctl_name = "F1Ah"; in per_family_init()
3919 pvt->max_mcs = 12; in per_family_init()
3920 pvt->flags.zn_regs_v2 = 1; in per_family_init()
3923 pvt->ctl_name = "F1Ah_M40h"; in per_family_init()
3924 pvt->flags.zn_regs_v2 = 1; in per_family_init()
3927 pvt->ctl_name = "F1Ah_M50h"; in per_family_init()
3928 pvt->max_mcs = 16; in per_family_init()
3929 pvt->flags.zn_regs_v2 = 1; in per_family_init()
3932 pvt->ctl_name = "F1Ah_M90h"; in per_family_init()
3933 pvt->max_mcs = 8; in per_family_init()
3934 pvt->flags.zn_regs_v2 = 1; in per_family_init()
3937 pvt->ctl_name = "F1Ah_MA0h"; in per_family_init()
3938 pvt->max_mcs = 8; in per_family_init()
3939 pvt->flags.zn_regs_v2 = 1; in per_family_init()
3942 pvt->ctl_name = "F1Ah_MC0h"; in per_family_init()
3943 pvt->max_mcs = 16; in per_family_init()
3944 pvt->flags.zn_regs_v2 = 1; in per_family_init()
3969 static unsigned int get_layer_size(struct amd64_pvt *pvt, u8 layer) in get_layer_size() argument
3971 bool is_gpu = (pvt->ops == &gpu_ops); in get_layer_size()
3974 return is_gpu ? pvt->max_mcs in get_layer_size()
3975 : pvt->csels[0].b_cnt; in get_layer_size()
3977 return is_gpu ? pvt->csels[0].b_cnt in get_layer_size()
3978 : pvt->max_mcs; in get_layer_size()
3981 static int init_one_instance(struct amd64_pvt *pvt) in init_one_instance() argument
3988 layers[0].size = get_layer_size(pvt, 0); in init_one_instance()
3991 layers[1].size = get_layer_size(pvt, 1); in init_one_instance()
3994 mci = edac_mc_alloc(pvt->mc_node_id, ARRAY_SIZE(layers), layers, 0); in init_one_instance()
3998 mci->pvt_info = pvt; in init_one_instance()
3999 mci->pdev = &pvt->F3->dev; in init_one_instance()
4001 pvt->ops->setup_mci_misc_attrs(mci); in init_one_instance()
4013 static bool instance_has_memory(struct amd64_pvt *pvt) in instance_has_memory() argument
4018 for (dct = 0; dct < pvt->max_mcs; dct++) { in instance_has_memory()
4019 for_each_chip_select(cs, dct, pvt) in instance_has_memory()
4020 cs_enabled |= csrow_enabled(cs, dct, pvt); in instance_has_memory()
4029 struct amd64_pvt *pvt = NULL; in probe_one_instance() local
4040 pvt = kzalloc(sizeof(struct amd64_pvt), GFP_KERNEL); in probe_one_instance()
4041 if (!pvt) in probe_one_instance()
4044 pvt->mc_node_id = nid; in probe_one_instance()
4045 pvt->F3 = F3; in probe_one_instance()
4047 ret = per_family_init(pvt); in probe_one_instance()
4051 ret = pvt->ops->hw_info_get(pvt); in probe_one_instance()
4056 if (!instance_has_memory(pvt)) { in probe_one_instance()
4061 if (!pvt->ops->ecc_enabled(pvt)) { in probe_one_instance()
4077 ret = init_one_instance(pvt); in probe_one_instance()
4087 amd64_info("%s detected (node %d).\n", pvt->ctl_name, pvt->mc_node_id); in probe_one_instance()
4090 pvt->ops->dump_misc_regs(pvt); in probe_one_instance()
4095 hw_info_put(pvt); in probe_one_instance()
4096 kfree(pvt); in probe_one_instance()
4111 struct amd64_pvt *pvt; in remove_one_instance() local
4118 pvt = mci->pvt_info; in remove_one_instance()
4128 hw_info_put(pvt); in remove_one_instance()
4129 kfree(pvt); in remove_one_instance()