Lines Matching refs:pvt

104 static void f15h_select_dct(struct amd64_pvt *pvt, u8 dct)  in f15h_select_dct()  argument
108 amd64_read_pci_cfg(pvt->F1, DCT_CFG_SEL, &reg); in f15h_select_dct()
109 reg &= (pvt->model == 0x30) ? ~3 : ~1; in f15h_select_dct()
111 amd64_write_pci_cfg(pvt->F1, DCT_CFG_SEL, reg); in f15h_select_dct()
128 static inline int amd64_read_dct_pci_cfg(struct amd64_pvt *pvt, u8 dct, in amd64_read_dct_pci_cfg() argument
131 switch (pvt->fam) { in amd64_read_dct_pci_cfg()
144 if (dct_ganging_enabled(pvt)) in amd64_read_dct_pci_cfg()
156 dct = (dct && pvt->model == 0x30) ? 3 : dct; in amd64_read_dct_pci_cfg()
157 f15h_select_dct(pvt, dct); in amd64_read_dct_pci_cfg()
168 return amd64_read_pci_cfg(pvt->F2, offset, val); in amd64_read_dct_pci_cfg()
185 static inline void __f17h_set_scrubval(struct amd64_pvt *pvt, u32 scrubval) in __f17h_set_scrubval() argument
194 pci_write_bits32(pvt->F6, F17H_SCR_LIMIT_ADDR, scrubval, 0xF); in __f17h_set_scrubval()
195 pci_write_bits32(pvt->F6, F17H_SCR_BASE_ADDR, 1, 0x1); in __f17h_set_scrubval()
197 pci_write_bits32(pvt->F6, F17H_SCR_BASE_ADDR, 0, 0x1); in __f17h_set_scrubval()
204 static int __set_scrub_rate(struct amd64_pvt *pvt, u32 new_bw, u32 min_rate) in __set_scrub_rate() argument
232 if (pvt->umc) { in __set_scrub_rate()
233 __f17h_set_scrubval(pvt, scrubval); in __set_scrub_rate()
234 } else if (pvt->fam == 0x15 && pvt->model == 0x60) { in __set_scrub_rate()
235 f15h_select_dct(pvt, 0); in __set_scrub_rate()
236 pci_write_bits32(pvt->F2, F15H_M60H_SCRCTRL, scrubval, 0x001F); in __set_scrub_rate()
237 f15h_select_dct(pvt, 1); in __set_scrub_rate()
238 pci_write_bits32(pvt->F2, F15H_M60H_SCRCTRL, scrubval, 0x001F); in __set_scrub_rate()
240 pci_write_bits32(pvt->F3, SCRCTRL, scrubval, 0x001F); in __set_scrub_rate()
251 struct amd64_pvt *pvt = mci->pvt_info; in set_scrub_rate() local
254 if (pvt->fam == 0xf) in set_scrub_rate()
257 if (pvt->fam == 0x15) { in set_scrub_rate()
259 if (pvt->model < 0x10) in set_scrub_rate()
260 f15h_select_dct(pvt, 0); in set_scrub_rate()
262 if (pvt->model == 0x60) in set_scrub_rate()
265 return __set_scrub_rate(pvt, bw, min_scrubrate); in set_scrub_rate()
270 struct amd64_pvt *pvt = mci->pvt_info; in get_scrub_rate() local
274 if (pvt->umc) { in get_scrub_rate()
275 amd64_read_pci_cfg(pvt->F6, F17H_SCR_BASE_ADDR, &scrubval); in get_scrub_rate()
277 amd64_read_pci_cfg(pvt->F6, F17H_SCR_LIMIT_ADDR, &scrubval); in get_scrub_rate()
283 } else if (pvt->fam == 0x15) { in get_scrub_rate()
285 if (pvt->model < 0x10) in get_scrub_rate()
286 f15h_select_dct(pvt, 0); in get_scrub_rate()
288 if (pvt->model == 0x60) in get_scrub_rate()
289 amd64_read_pci_cfg(pvt->F2, F15H_M60H_SCRCTRL, &scrubval); in get_scrub_rate()
291 amd64_read_pci_cfg(pvt->F3, SCRCTRL, &scrubval); in get_scrub_rate()
293 amd64_read_pci_cfg(pvt->F3, SCRCTRL, &scrubval); in get_scrub_rate()
311 static bool base_limit_match(struct amd64_pvt *pvt, u64 sys_addr, u8 nid) in base_limit_match() argument
323 return ((addr >= get_dram_base(pvt, nid)) && in base_limit_match()
324 (addr <= get_dram_limit(pvt, nid))); in base_limit_match()
336 struct amd64_pvt *pvt; in find_mc_by_sys_addr() local
344 pvt = mci->pvt_info; in find_mc_by_sys_addr()
351 intlv_en = dram_intlv_en(pvt, 0); in find_mc_by_sys_addr()
355 if (base_limit_match(pvt, sys_addr, node_id)) in find_mc_by_sys_addr()
371 if ((dram_intlv_sel(pvt, node_id) & intlv_en) == bits) in find_mc_by_sys_addr()
379 if (unlikely(!base_limit_match(pvt, sys_addr, node_id))) { in find_mc_by_sys_addr()
400 static void get_cs_base_and_mask(struct amd64_pvt *pvt, int csrow, u8 dct, in get_cs_base_and_mask() argument
406 if (pvt->fam == 0xf && pvt->ext_model < K8_REV_F) { in get_cs_base_and_mask()
407 csbase = pvt->csels[dct].csbases[csrow]; in get_cs_base_and_mask()
408 csmask = pvt->csels[dct].csmasks[csrow]; in get_cs_base_and_mask()
417 } else if (pvt->fam == 0x16 || in get_cs_base_and_mask()
418 (pvt->fam == 0x15 && pvt->model >= 0x30)) { in get_cs_base_and_mask()
419 csbase = pvt->csels[dct].csbases[csrow]; in get_cs_base_and_mask()
420 csmask = pvt->csels[dct].csmasks[csrow >> 1]; in get_cs_base_and_mask()
435 csbase = pvt->csels[dct].csbases[csrow]; in get_cs_base_and_mask()
436 csmask = pvt->csels[dct].csmasks[csrow >> 1]; in get_cs_base_and_mask()
439 if (pvt->fam == 0x15) in get_cs_base_and_mask()
456 #define for_each_chip_select(i, dct, pvt) \ argument
457 for (i = 0; i < pvt->csels[dct].b_cnt; i++)
459 #define chip_select_base(i, dct, pvt) \ argument
460 pvt->csels[dct].csbases[i]
462 #define for_each_chip_select_mask(i, dct, pvt) \ argument
463 for (i = 0; i < pvt->csels[dct].m_cnt; i++)
474 struct amd64_pvt *pvt; in input_addr_to_csrow() local
478 pvt = mci->pvt_info; in input_addr_to_csrow()
480 for_each_chip_select(csrow, 0, pvt) { in input_addr_to_csrow()
481 if (!csrow_enabled(csrow, 0, pvt)) in input_addr_to_csrow()
484 get_cs_base_and_mask(pvt, csrow, 0, &base, &mask); in input_addr_to_csrow()
491 pvt->mc_node_id); in input_addr_to_csrow()
497 (unsigned long)input_addr, pvt->mc_node_id); in input_addr_to_csrow()
521 struct amd64_pvt *pvt = mci->pvt_info; in get_dram_hole_info() local
524 if (pvt->fam == 0xf && pvt->ext_model < K8_REV_E) { in get_dram_hole_info()
526 pvt->ext_model, pvt->mc_node_id); in get_dram_hole_info()
531 if (pvt->fam >= 0x10 && !dhar_mem_hoist_valid(pvt)) { in get_dram_hole_info()
536 if (!dhar_valid(pvt)) { in get_dram_hole_info()
538 pvt->mc_node_id); in get_dram_hole_info()
560 *hole_base = dhar_base(pvt); in get_dram_hole_info()
563 *hole_offset = (pvt->fam > 0xf) ? f10_dhar_offset(pvt) in get_dram_hole_info()
564 : k8_dhar_offset(pvt); in get_dram_hole_info()
567 pvt->mc_node_id, (unsigned long)*hole_base, in get_dram_hole_info()
579 struct amd64_pvt *pvt = mci->pvt_info; \
581 return sprintf(data, "0x%016llx\n", (u64)pvt->reg); \
630 struct amd64_pvt *pvt = mci->pvt_info; in inject_section_show() local
631 return sprintf(buf, "0x%x\n", pvt->injection.section); in inject_section_show()
645 struct amd64_pvt *pvt = mci->pvt_info; in inject_section_store() local
658 pvt->injection.section = (u32) value; in inject_section_store()
666 struct amd64_pvt *pvt = mci->pvt_info; in inject_word_show() local
667 return sprintf(buf, "0x%x\n", pvt->injection.word); in inject_word_show()
681 struct amd64_pvt *pvt = mci->pvt_info; in inject_word_store() local
694 pvt->injection.word = (u32) value; in inject_word_store()
703 struct amd64_pvt *pvt = mci->pvt_info; in inject_ecc_vector_show() local
704 return sprintf(buf, "0x%x\n", pvt->injection.bit_map); in inject_ecc_vector_show()
717 struct amd64_pvt *pvt = mci->pvt_info; in inject_ecc_vector_store() local
730 pvt->injection.bit_map = (u32) value; in inject_ecc_vector_store()
743 struct amd64_pvt *pvt = mci->pvt_info; in inject_read_store() local
753 section = F10_NB_ARRAY_DRAM | SET_NB_ARRAY_ADDR(pvt->injection.section); in inject_read_store()
755 amd64_write_pci_cfg(pvt->F3, F10_NB_ARRAY_ADDR, section); in inject_read_store()
757 word_bits = SET_NB_DRAM_INJECTION_READ(pvt->injection); in inject_read_store()
760 amd64_write_pci_cfg(pvt->F3, F10_NB_ARRAY_DATA, word_bits); in inject_read_store()
776 struct amd64_pvt *pvt = mci->pvt_info; in inject_write_store() local
786 section = F10_NB_ARRAY_DRAM | SET_NB_ARRAY_ADDR(pvt->injection.section); in inject_write_store()
788 amd64_write_pci_cfg(pvt->F3, F10_NB_ARRAY_ADDR, section); in inject_write_store()
790 word_bits = SET_NB_DRAM_INJECTION_WRITE(pvt->injection); in inject_write_store()
799 amd64_write_pci_cfg(pvt->F3, F10_NB_ARRAY_DATA, word_bits); in inject_write_store()
803 amd64_read_pci_cfg(pvt->F3, F10_NB_ARRAY_DATA, &tmp); in inject_write_store()
839 struct amd64_pvt *pvt = mci->pvt_info; in inj_is_visible() local
842 if (pvt->fam >= 0x10 && pvt->fam <= 0x16) in inj_is_visible()
885 struct amd64_pvt *pvt = mci->pvt_info; in sys_addr_to_dram_addr() local
889 dram_base = get_dram_base(pvt, pvt->mc_node_id); in sys_addr_to_dram_addr()
940 struct amd64_pvt *pvt; in dram_addr_to_input_addr() local
944 pvt = mci->pvt_info; in dram_addr_to_input_addr()
950 intlv_shift = num_node_interleave_bits(dram_intlv_en(pvt, 0)); in dram_addr_to_input_addr()
1287 static unsigned long determine_edac_cap(struct amd64_pvt *pvt) in determine_edac_cap() argument
1292 if (pvt->umc) { in determine_edac_cap()
1296 if (!(pvt->umc[i].sdp_ctrl & UMC_SDP_INIT)) in determine_edac_cap()
1302 if (pvt->umc[i].umc_cfg & BIT(12)) in determine_edac_cap()
1309 bit = (pvt->fam > 0xf || pvt->ext_model >= K8_REV_F) in determine_edac_cap()
1313 if (pvt->dclr0 & BIT(bit)) in determine_edac_cap()
1322 static void debug_dump_dramcfg_low(struct amd64_pvt *pvt, u32 dclr, int chan) in debug_dump_dramcfg_low() argument
1326 if (pvt->dram_type == MEM_LRDDR3) { in debug_dump_dramcfg_low()
1327 u32 dcsm = pvt->csels[chan].csmasks[0]; in debug_dump_dramcfg_low()
1343 if (pvt->fam == 0x10) in debug_dump_dramcfg_low()
1363 static int f17_get_cs_mode(int dimm, u8 ctrl, struct amd64_pvt *pvt) in f17_get_cs_mode() argument
1368 if (csrow_enabled(2 * dimm, ctrl, pvt)) in f17_get_cs_mode()
1371 if (csrow_enabled(2 * dimm + 1, ctrl, pvt)) in f17_get_cs_mode()
1375 if (csrow_sec_enabled(2 * dimm + 1, ctrl, pvt)) in f17_get_cs_mode()
1383 for_each_chip_select(base, ctrl, pvt) in f17_get_cs_mode()
1384 count += csrow_enabled(base, ctrl, pvt); in f17_get_cs_mode()
1387 pvt->csels[ctrl].csmasks[0] == pvt->csels[ctrl].csmasks[1]) { in f17_get_cs_mode()
1395 static void debug_display_dimm_sizes_df(struct amd64_pvt *pvt, u8 ctrl) in debug_display_dimm_sizes_df() argument
1405 cs_mode = f17_get_cs_mode(dimm, ctrl, pvt); in debug_display_dimm_sizes_df()
1407 size0 = pvt->ops->dbam_to_cs(pvt, ctrl, cs_mode, cs0); in debug_display_dimm_sizes_df()
1408 size1 = pvt->ops->dbam_to_cs(pvt, ctrl, cs_mode, cs1); in debug_display_dimm_sizes_df()
1416 static void __dump_misc_regs_df(struct amd64_pvt *pvt) in __dump_misc_regs_df() argument
1423 umc = &pvt->umc[i]; in __dump_misc_regs_df()
1430 amd_smn_read(pvt->mc_node_id, umc_base + UMCCH_ECC_BAD_SYMBOL, &tmp); in __dump_misc_regs_df()
1433 amd_smn_read(pvt->mc_node_id, umc_base + UMCCH_UMC_CAP, &tmp); in __dump_misc_regs_df()
1448 amd_smn_read(pvt->mc_node_id, in __dump_misc_regs_df()
1455 debug_display_dimm_sizes_df(pvt, i); in __dump_misc_regs_df()
1459 pvt->dhar, dhar_base(pvt)); in __dump_misc_regs_df()
1463 static void __dump_misc_regs(struct amd64_pvt *pvt) in __dump_misc_regs() argument
1465 edac_dbg(1, "F3xE8 (NB Cap): 0x%08x\n", pvt->nbcap); in __dump_misc_regs()
1468 (pvt->nbcap & NBCAP_DCT_DUAL) ? "yes" : "no"); in __dump_misc_regs()
1471 (pvt->nbcap & NBCAP_SECDED) ? "yes" : "no", in __dump_misc_regs()
1472 (pvt->nbcap & NBCAP_CHIPKILL) ? "yes" : "no"); in __dump_misc_regs()
1474 debug_dump_dramcfg_low(pvt, pvt->dclr0, 0); in __dump_misc_regs()
1476 edac_dbg(1, "F3xB0 (Online Spare): 0x%08x\n", pvt->online_spare); in __dump_misc_regs()
1479 pvt->dhar, dhar_base(pvt), in __dump_misc_regs()
1480 (pvt->fam == 0xf) ? k8_dhar_offset(pvt) in __dump_misc_regs()
1481 : f10_dhar_offset(pvt)); in __dump_misc_regs()
1483 debug_display_dimm_sizes(pvt, 0); in __dump_misc_regs()
1486 if (pvt->fam == 0xf) in __dump_misc_regs()
1489 debug_display_dimm_sizes(pvt, 1); in __dump_misc_regs()
1492 if (!dct_ganging_enabled(pvt)) in __dump_misc_regs()
1493 debug_dump_dramcfg_low(pvt, pvt->dclr1, 1); in __dump_misc_regs()
1497 static void dump_misc_regs(struct amd64_pvt *pvt) in dump_misc_regs() argument
1499 if (pvt->umc) in dump_misc_regs()
1500 __dump_misc_regs_df(pvt); in dump_misc_regs()
1502 __dump_misc_regs(pvt); in dump_misc_regs()
1504 edac_dbg(1, " DramHoleValid: %s\n", dhar_valid(pvt) ? "yes" : "no"); in dump_misc_regs()
1506 amd64_info("using x%u syndromes.\n", pvt->ecc_sym_sz); in dump_misc_regs()
1512 static void prep_chip_selects(struct amd64_pvt *pvt) in prep_chip_selects() argument
1514 if (pvt->fam == 0xf && pvt->ext_model < K8_REV_F) { in prep_chip_selects()
1515 pvt->csels[0].b_cnt = pvt->csels[1].b_cnt = 8; in prep_chip_selects()
1516 pvt->csels[0].m_cnt = pvt->csels[1].m_cnt = 8; in prep_chip_selects()
1517 } else if (pvt->fam == 0x15 && pvt->model == 0x30) { in prep_chip_selects()
1518 pvt->csels[0].b_cnt = pvt->csels[1].b_cnt = 4; in prep_chip_selects()
1519 pvt->csels[0].m_cnt = pvt->csels[1].m_cnt = 2; in prep_chip_selects()
1520 } else if (pvt->fam >= 0x17) { in prep_chip_selects()
1524 pvt->csels[umc].b_cnt = 4; in prep_chip_selects()
1525 pvt->csels[umc].m_cnt = fam_type->flags.zn_regs_v2 ? 4 : 2; in prep_chip_selects()
1529 pvt->csels[0].b_cnt = pvt->csels[1].b_cnt = 8; in prep_chip_selects()
1530 pvt->csels[0].m_cnt = pvt->csels[1].m_cnt = 4; in prep_chip_selects()
1534 static void read_umc_base_mask(struct amd64_pvt *pvt) in read_umc_base_mask() argument
1548 for_each_chip_select(cs, umc, pvt) { in read_umc_base_mask()
1549 base = &pvt->csels[umc].csbases[cs]; in read_umc_base_mask()
1550 base_sec = &pvt->csels[umc].csbases_sec[cs]; in read_umc_base_mask()
1555 if (!amd_smn_read(pvt->mc_node_id, base_reg, base)) in read_umc_base_mask()
1559 if (!amd_smn_read(pvt->mc_node_id, base_reg_sec, base_sec)) in read_umc_base_mask()
1567 for_each_chip_select_mask(cs, umc, pvt) { in read_umc_base_mask()
1568 mask = &pvt->csels[umc].csmasks[cs]; in read_umc_base_mask()
1569 mask_sec = &pvt->csels[umc].csmasks_sec[cs]; in read_umc_base_mask()
1574 if (!amd_smn_read(pvt->mc_node_id, mask_reg, mask)) in read_umc_base_mask()
1578 if (!amd_smn_read(pvt->mc_node_id, mask_reg_sec, mask_sec)) in read_umc_base_mask()
1588 static void read_dct_base_mask(struct amd64_pvt *pvt) in read_dct_base_mask() argument
1592 prep_chip_selects(pvt); in read_dct_base_mask()
1594 if (pvt->umc) in read_dct_base_mask()
1595 return read_umc_base_mask(pvt); in read_dct_base_mask()
1597 for_each_chip_select(cs, 0, pvt) { in read_dct_base_mask()
1600 u32 *base0 = &pvt->csels[0].csbases[cs]; in read_dct_base_mask()
1601 u32 *base1 = &pvt->csels[1].csbases[cs]; in read_dct_base_mask()
1603 if (!amd64_read_dct_pci_cfg(pvt, 0, reg0, base0)) in read_dct_base_mask()
1607 if (pvt->fam == 0xf) in read_dct_base_mask()
1610 if (!amd64_read_dct_pci_cfg(pvt, 1, reg0, base1)) in read_dct_base_mask()
1612 cs, *base1, (pvt->fam == 0x10) ? reg1 in read_dct_base_mask()
1616 for_each_chip_select_mask(cs, 0, pvt) { in read_dct_base_mask()
1619 u32 *mask0 = &pvt->csels[0].csmasks[cs]; in read_dct_base_mask()
1620 u32 *mask1 = &pvt->csels[1].csmasks[cs]; in read_dct_base_mask()
1622 if (!amd64_read_dct_pci_cfg(pvt, 0, reg0, mask0)) in read_dct_base_mask()
1626 if (pvt->fam == 0xf) in read_dct_base_mask()
1629 if (!amd64_read_dct_pci_cfg(pvt, 1, reg0, mask1)) in read_dct_base_mask()
1631 cs, *mask1, (pvt->fam == 0x10) ? reg1 in read_dct_base_mask()
1636 static void determine_memory_type_df(struct amd64_pvt *pvt) in determine_memory_type_df() argument
1642 umc = &pvt->umc[i]; in determine_memory_type_df()
1673 static void determine_memory_type(struct amd64_pvt *pvt) in determine_memory_type() argument
1677 if (pvt->umc) in determine_memory_type()
1678 return determine_memory_type_df(pvt); in determine_memory_type()
1680 switch (pvt->fam) { in determine_memory_type()
1682 if (pvt->ext_model >= K8_REV_F) in determine_memory_type()
1685 pvt->dram_type = (pvt->dclr0 & BIT(18)) ? MEM_DDR : MEM_RDDR; in determine_memory_type()
1689 if (pvt->dchr0 & DDR3_MODE) in determine_memory_type()
1692 pvt->dram_type = (pvt->dclr0 & BIT(16)) ? MEM_DDR2 : MEM_RDDR2; in determine_memory_type()
1696 if (pvt->model < 0x60) in determine_memory_type()
1708 amd64_read_dct_pci_cfg(pvt, 0, DRAM_CONTROL, &dram_ctrl); in determine_memory_type()
1709 dcsm = pvt->csels[0].csmasks[0]; in determine_memory_type()
1712 pvt->dram_type = MEM_DDR4; in determine_memory_type()
1713 else if (pvt->dclr0 & BIT(16)) in determine_memory_type()
1714 pvt->dram_type = MEM_DDR3; in determine_memory_type()
1716 pvt->dram_type = MEM_LRDDR3; in determine_memory_type()
1718 pvt->dram_type = MEM_RDDR3; in determine_memory_type()
1726 WARN(1, KERN_ERR "%s: Family??? 0x%x\n", __func__, pvt->fam); in determine_memory_type()
1727 pvt->dram_type = MEM_EMPTY; in determine_memory_type()
1732 pvt->dram_type = (pvt->dclr0 & BIT(16)) ? MEM_DDR3 : MEM_RDDR3; in determine_memory_type()
1736 static int k8_early_channel_count(struct amd64_pvt *pvt) in k8_early_channel_count() argument
1740 if (pvt->ext_model >= K8_REV_F) in k8_early_channel_count()
1742 flag = pvt->dclr0 & WIDTH_128; in k8_early_channel_count()
1745 flag = pvt->dclr0 & REVE_WIDTH_128; in k8_early_channel_count()
1748 pvt->dclr1 = 0; in k8_early_channel_count()
1754 static u64 get_error_address(struct amd64_pvt *pvt, struct mce *m) in get_error_address() argument
1766 pvt = mci->pvt_info; in get_error_address()
1768 if (pvt->fam == 0xf) { in get_error_address()
1778 if (pvt->fam == 0x15) { in get_error_address()
1787 amd64_read_pci_cfg(pvt->F1, DRAM_LOCAL_NODE_LIM, &tmp); in get_error_address()
1802 amd64_read_pci_cfg(pvt->F1, DRAM_LOCAL_NODE_BASE, &tmp); in get_error_address()
1835 static void read_dram_base_limit_regs(struct amd64_pvt *pvt, unsigned range) in read_dram_base_limit_regs() argument
1843 amd64_read_pci_cfg(pvt->F1, DRAM_BASE_LO + off, &pvt->ranges[range].base.lo); in read_dram_base_limit_regs()
1844 amd64_read_pci_cfg(pvt->F1, DRAM_LIMIT_LO + off, &pvt->ranges[range].lim.lo); in read_dram_base_limit_regs()
1846 if (pvt->fam == 0xf) in read_dram_base_limit_regs()
1849 if (!dram_rw(pvt, range)) in read_dram_base_limit_regs()
1852 amd64_read_pci_cfg(pvt->F1, DRAM_BASE_HI + off, &pvt->ranges[range].base.hi); in read_dram_base_limit_regs()
1853 amd64_read_pci_cfg(pvt->F1, DRAM_LIMIT_HI + off, &pvt->ranges[range].lim.hi); in read_dram_base_limit_regs()
1856 if (pvt->fam != 0x15) in read_dram_base_limit_regs()
1859 nb = node_to_amd_nb(dram_dst_node(pvt, range)); in read_dram_base_limit_regs()
1863 if (pvt->model == 0x60) in read_dram_base_limit_regs()
1865 else if (pvt->model == 0x30) in read_dram_base_limit_regs()
1876 pvt->ranges[range].lim.lo &= GENMASK_ULL(15, 0); in read_dram_base_limit_regs()
1879 pvt->ranges[range].lim.lo |= ((llim & 0x1fff) << 3 | 0x7) << 16; in read_dram_base_limit_regs()
1881 pvt->ranges[range].lim.hi &= GENMASK_ULL(7, 0); in read_dram_base_limit_regs()
1884 pvt->ranges[range].lim.hi |= llim >> 13; in read_dram_base_limit_regs()
1892 struct amd64_pvt *pvt = mci->pvt_info; in k8_map_sysaddr_to_csrow() local
1916 if (pvt->nbcfg & NBCFG_CHIPKILL) { in k8_map_sysaddr_to_csrow()
1957 static int k8_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct, in k8_dbam_to_chip_select() argument
1960 u32 dclr = dct ? pvt->dclr1 : pvt->dclr0; in k8_dbam_to_chip_select()
1962 if (pvt->ext_model >= K8_REV_F) { in k8_dbam_to_chip_select()
1966 else if (pvt->ext_model >= K8_REV_D) { in k8_dbam_to_chip_select()
2012 static int f1x_early_channel_count(struct amd64_pvt *pvt) in f1x_early_channel_count() argument
2017 if (pvt->fam == 0x10 && (pvt->dclr0 & WIDTH_128)) in f1x_early_channel_count()
2036 u32 dbam = (i ? pvt->dbam1 : pvt->dbam0); in f1x_early_channel_count()
2054 static int f17_early_channel_count(struct amd64_pvt *pvt) in f17_early_channel_count() argument
2060 channels += !!(pvt->umc[i].sdp_ctrl & UMC_SDP_INIT); in f17_early_channel_count()
2124 static int f10_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct, in f10_dbam_to_chip_select() argument
2127 u32 dclr = dct ? pvt->dclr1 : pvt->dclr0; in f10_dbam_to_chip_select()
2131 if (pvt->dchr0 & DDR3_MODE || pvt->dchr1 & DDR3_MODE) in f10_dbam_to_chip_select()
2140 static int f15_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct, in f15_dbam_to_chip_select() argument
2149 static int f15_m60h_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct, in f15_m60h_dbam_to_chip_select() argument
2153 u32 dcsm = pvt->csels[dct].csmasks[cs_mask_nr]; in f15_m60h_dbam_to_chip_select()
2157 if (pvt->dram_type == MEM_DDR4) { in f15_m60h_dbam_to_chip_select()
2162 } else if (pvt->dram_type == MEM_LRDDR3) { in f15_m60h_dbam_to_chip_select()
2182 static int f16_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct, in f16_dbam_to_chip_select() argument
2194 static int f17_addr_mask_to_cs_size(struct amd64_pvt *pvt, u8 umc, in f17_addr_mask_to_cs_size() argument
2239 addr_mask_orig = pvt->csels[umc].csmasks_sec[cs_mask_nr]; in f17_addr_mask_to_cs_size()
2241 addr_mask_orig = pvt->csels[umc].csmasks[cs_mask_nr]; in f17_addr_mask_to_cs_size()
2272 static void read_dram_ctl_register(struct amd64_pvt *pvt) in read_dram_ctl_register() argument
2275 if (pvt->fam == 0xf) in read_dram_ctl_register()
2278 if (!amd64_read_pci_cfg(pvt->F2, DCT_SEL_LO, &pvt->dct_sel_lo)) { in read_dram_ctl_register()
2280 pvt->dct_sel_lo, dct_sel_baseaddr(pvt)); in read_dram_ctl_register()
2283 (dct_ganging_enabled(pvt) ? "ganged" : "unganged")); in read_dram_ctl_register()
2285 if (!dct_ganging_enabled(pvt)) in read_dram_ctl_register()
2287 (dct_high_range_enabled(pvt) ? "yes" : "no")); in read_dram_ctl_register()
2290 (dct_data_intlv_enabled(pvt) ? "enabled" : "disabled"), in read_dram_ctl_register()
2291 (dct_memory_cleared(pvt) ? "yes" : "no")); in read_dram_ctl_register()
2295 (dct_interleave_enabled(pvt) ? "enabled" : "disabled"), in read_dram_ctl_register()
2296 dct_sel_interleave_addr(pvt)); in read_dram_ctl_register()
2299 amd64_read_pci_cfg(pvt->F2, DCT_SEL_HI, &pvt->dct_sel_hi); in read_dram_ctl_register()
2306 static u8 f15_m30h_determine_channel(struct amd64_pvt *pvt, u64 sys_addr, in f15_m30h_determine_channel() argument
2320 u8 intlv_addr = dct_sel_interleave_addr(pvt); in f15_m30h_determine_channel()
2337 static u8 f1x_determine_channel(struct amd64_pvt *pvt, u64 sys_addr, in f1x_determine_channel() argument
2340 u8 dct_sel_high = (pvt->dct_sel_lo >> 1) & 1; in f1x_determine_channel()
2342 if (dct_ganging_enabled(pvt)) in f1x_determine_channel()
2351 if (dct_interleave_enabled(pvt)) { in f1x_determine_channel()
2352 u8 intlv_addr = dct_sel_interleave_addr(pvt); in f1x_determine_channel()
2374 if (dct_high_range_enabled(pvt)) in f1x_determine_channel()
2381 static u64 f1x_get_norm_dct_addr(struct amd64_pvt *pvt, u8 range, in f1x_get_norm_dct_addr() argument
2386 u64 dram_base = get_dram_base(pvt, range); in f1x_get_norm_dct_addr()
2387 u64 hole_off = f10_dhar_offset(pvt); in f1x_get_norm_dct_addr()
2388 u64 dct_sel_base_off = (u64)(pvt->dct_sel_hi & 0xFFFFFC00) << 16; in f1x_get_norm_dct_addr()
2403 dct_sel_base_addr < dhar_base(pvt)) && in f1x_get_norm_dct_addr()
2404 dhar_valid(pvt) && in f1x_get_norm_dct_addr()
2419 if (dhar_valid(pvt) && (sys_addr >= BIT_64(32))) in f1x_get_norm_dct_addr()
2432 static int f10_process_possible_spare(struct amd64_pvt *pvt, u8 dct, int csrow) in f10_process_possible_spare() argument
2436 if (online_spare_swap_done(pvt, dct) && in f10_process_possible_spare()
2437 csrow == online_spare_bad_dramcs(pvt, dct)) { in f10_process_possible_spare()
2439 for_each_chip_select(tmp_cs, dct, pvt) { in f10_process_possible_spare()
2440 if (chip_select_base(tmp_cs, dct, pvt) & 0x2) { in f10_process_possible_spare()
2460 struct amd64_pvt *pvt; in f1x_lookup_addr_in_dct() local
2469 pvt = mci->pvt_info; in f1x_lookup_addr_in_dct()
2473 for_each_chip_select(csrow, dct, pvt) { in f1x_lookup_addr_in_dct()
2474 if (!csrow_enabled(csrow, dct, pvt)) in f1x_lookup_addr_in_dct()
2477 get_cs_base_and_mask(pvt, csrow, dct, &cs_base, &cs_mask); in f1x_lookup_addr_in_dct()
2488 if (pvt->fam == 0x15 && pvt->model >= 0x30) { in f1x_lookup_addr_in_dct()
2492 cs_found = f10_process_possible_spare(pvt, dct, csrow); in f1x_lookup_addr_in_dct()
2506 static u64 f1x_swap_interleaved_region(struct amd64_pvt *pvt, u64 sys_addr) in f1x_swap_interleaved_region() argument
2510 if (pvt->fam == 0x10) { in f1x_swap_interleaved_region()
2512 if (pvt->model < 4 || (pvt->model < 0xa && pvt->stepping < 3)) in f1x_swap_interleaved_region()
2516 amd64_read_pci_cfg(pvt->F2, SWAP_INTLV_REG, &swap_reg); in f1x_swap_interleaved_region()
2536 static int f1x_match_to_this_node(struct amd64_pvt *pvt, unsigned range, in f1x_match_to_this_node() argument
2545 u8 node_id = dram_dst_node(pvt, range); in f1x_match_to_this_node()
2546 u8 intlv_en = dram_intlv_en(pvt, range); in f1x_match_to_this_node()
2547 u32 intlv_sel = dram_intlv_sel(pvt, range); in f1x_match_to_this_node()
2550 range, sys_addr, get_dram_limit(pvt, range)); in f1x_match_to_this_node()
2552 if (dhar_valid(pvt) && in f1x_match_to_this_node()
2553 dhar_base(pvt) <= sys_addr && in f1x_match_to_this_node()
2563 sys_addr = f1x_swap_interleaved_region(pvt, sys_addr); in f1x_match_to_this_node()
2565 dct_sel_base = dct_sel_baseaddr(pvt); in f1x_match_to_this_node()
2571 if (dct_high_range_enabled(pvt) && in f1x_match_to_this_node()
2572 !dct_ganging_enabled(pvt) && in f1x_match_to_this_node()
2576 channel = f1x_determine_channel(pvt, sys_addr, high_range, intlv_en); in f1x_match_to_this_node()
2578 chan_addr = f1x_get_norm_dct_addr(pvt, range, sys_addr, in f1x_match_to_this_node()
2587 if (dct_interleave_enabled(pvt) && in f1x_match_to_this_node()
2588 !dct_high_range_enabled(pvt) && in f1x_match_to_this_node()
2589 !dct_ganging_enabled(pvt)) { in f1x_match_to_this_node()
2591 if (dct_sel_interleave_addr(pvt) != 1) { in f1x_match_to_this_node()
2592 if (dct_sel_interleave_addr(pvt) == 0x3) in f1x_match_to_this_node()
2616 static int f15_m30h_match_to_this_node(struct amd64_pvt *pvt, unsigned range, in f15_m30h_match_to_this_node() argument
2626 u64 dhar_offset = f10_dhar_offset(pvt); in f15_m30h_match_to_this_node()
2627 u8 intlv_addr = dct_sel_interleave_addr(pvt); in f15_m30h_match_to_this_node()
2628 u8 node_id = dram_dst_node(pvt, range); in f15_m30h_match_to_this_node()
2629 u8 intlv_en = dram_intlv_en(pvt, range); in f15_m30h_match_to_this_node()
2631 amd64_read_pci_cfg(pvt->F1, DRAM_CONT_BASE, &dct_cont_base_reg); in f15_m30h_match_to_this_node()
2632 amd64_read_pci_cfg(pvt->F1, DRAM_CONT_LIMIT, &dct_cont_limit_reg); in f15_m30h_match_to_this_node()
2638 range, sys_addr, get_dram_limit(pvt, range)); in f15_m30h_match_to_this_node()
2640 if (!(get_dram_base(pvt, range) <= sys_addr) && in f15_m30h_match_to_this_node()
2641 !(get_dram_limit(pvt, range) >= sys_addr)) in f15_m30h_match_to_this_node()
2644 if (dhar_valid(pvt) && in f15_m30h_match_to_this_node()
2645 dhar_base(pvt) <= sys_addr && in f15_m30h_match_to_this_node()
2653 dct_base = (u64) dct_sel_baseaddr(pvt); in f15_m30h_match_to_this_node()
2667 if (pvt->model >= 0x60) in f15_m30h_match_to_this_node()
2668 channel = f1x_determine_channel(pvt, sys_addr, false, intlv_en); in f15_m30h_match_to_this_node()
2670 channel = f15_m30h_determine_channel(pvt, sys_addr, intlv_en, in f15_m30h_match_to_this_node()
2710 amd64_read_pci_cfg(pvt->F1, in f15_m30h_match_to_this_node()
2716 f15h_select_dct(pvt, channel); in f15_m30h_match_to_this_node()
2738 static int f1x_translate_sysaddr_to_cs(struct amd64_pvt *pvt, in f1x_translate_sysaddr_to_cs() argument
2746 if (!dram_rw(pvt, range)) in f1x_translate_sysaddr_to_cs()
2749 if (pvt->fam == 0x15 && pvt->model >= 0x30) in f1x_translate_sysaddr_to_cs()
2750 cs_found = f15_m30h_match_to_this_node(pvt, range, in f1x_translate_sysaddr_to_cs()
2754 else if ((get_dram_base(pvt, range) <= sys_addr) && in f1x_translate_sysaddr_to_cs()
2755 (get_dram_limit(pvt, range) >= sys_addr)) { in f1x_translate_sysaddr_to_cs()
2756 cs_found = f1x_match_to_this_node(pvt, range, in f1x_translate_sysaddr_to_cs()
2775 struct amd64_pvt *pvt = mci->pvt_info; in f1x_map_sysaddr_to_csrow() local
2779 err->csrow = f1x_translate_sysaddr_to_cs(pvt, sys_addr, &err->channel); in f1x_map_sysaddr_to_csrow()
2790 if (dct_ganging_enabled(pvt)) in f1x_map_sysaddr_to_csrow()
2798 static void debug_display_dimm_sizes(struct amd64_pvt *pvt, u8 ctrl) in debug_display_dimm_sizes() argument
2801 u32 *dcsb = ctrl ? pvt->csels[1].csbases : pvt->csels[0].csbases; in debug_display_dimm_sizes()
2802 u32 dbam = ctrl ? pvt->dbam1 : pvt->dbam0; in debug_display_dimm_sizes()
2804 if (pvt->fam == 0xf) { in debug_display_dimm_sizes()
2806 if (pvt->ext_model < K8_REV_F) in debug_display_dimm_sizes()
2812 if (pvt->fam == 0x10) { in debug_display_dimm_sizes()
2813 dbam = (ctrl && !dct_ganging_enabled(pvt)) ? pvt->dbam1 in debug_display_dimm_sizes()
2814 : pvt->dbam0; in debug_display_dimm_sizes()
2815 dcsb = (ctrl && !dct_ganging_enabled(pvt)) ? in debug_display_dimm_sizes()
2816 pvt->csels[1].csbases : in debug_display_dimm_sizes()
2817 pvt->csels[0].csbases; in debug_display_dimm_sizes()
2819 dbam = pvt->dbam0; in debug_display_dimm_sizes()
2820 dcsb = pvt->csels[1].csbases; in debug_display_dimm_sizes()
2838 size0 = pvt->ops->dbam_to_cs(pvt, ctrl, in debug_display_dimm_sizes()
2844 size1 = pvt->ops->dbam_to_cs(pvt, ctrl, in debug_display_dimm_sizes()
3152 struct amd64_pvt *pvt = mci->pvt_info; in get_channel_from_ecc_syndrome() local
3155 if (pvt->ecc_sym_sz == 8) in get_channel_from_ecc_syndrome()
3158 pvt->ecc_sym_sz); in get_channel_from_ecc_syndrome()
3159 else if (pvt->ecc_sym_sz == 4) in get_channel_from_ecc_syndrome()
3162 pvt->ecc_sym_sz); in get_channel_from_ecc_syndrome()
3164 amd64_warn("Illegal syndrome type: %u\n", pvt->ecc_sym_sz); in get_channel_from_ecc_syndrome()
3168 return map_err_sym_to_channel(err_sym, pvt->ecc_sym_sz); in get_channel_from_ecc_syndrome()
3221 struct amd64_pvt *pvt; in decode_bus_error() local
3232 pvt = mci->pvt_info; in decode_bus_error()
3244 sys_addr = get_error_address(pvt, m); in decode_bus_error()
3249 pvt->ops->map_sysaddr_to_csrow(mci, sys_addr, &err); in decode_bus_error()
3272 struct amd64_pvt *pvt; in decode_umc_error() local
3280 pvt = mci->pvt_info; in decode_umc_error()
3305 if (umc_normaddr_to_sysaddr(m->addr, pvt->mc_node_id, err.channel, &sys_addr)) { in decode_umc_error()
3322 reserve_mc_sibling_devs(struct amd64_pvt *pvt, u16 pci_id1, u16 pci_id2) in reserve_mc_sibling_devs() argument
3324 if (pvt->umc) { in reserve_mc_sibling_devs()
3325 pvt->F0 = pci_get_related_function(pvt->F3->vendor, pci_id1, pvt->F3); in reserve_mc_sibling_devs()
3326 if (!pvt->F0) { in reserve_mc_sibling_devs()
3331 pvt->F6 = pci_get_related_function(pvt->F3->vendor, pci_id2, pvt->F3); in reserve_mc_sibling_devs()
3332 if (!pvt->F6) { in reserve_mc_sibling_devs()
3333 pci_dev_put(pvt->F0); in reserve_mc_sibling_devs()
3334 pvt->F0 = NULL; in reserve_mc_sibling_devs()
3341 pci_ctl_dev = &pvt->F0->dev; in reserve_mc_sibling_devs()
3343 edac_dbg(1, "F0: %s\n", pci_name(pvt->F0)); in reserve_mc_sibling_devs()
3344 edac_dbg(1, "F3: %s\n", pci_name(pvt->F3)); in reserve_mc_sibling_devs()
3345 edac_dbg(1, "F6: %s\n", pci_name(pvt->F6)); in reserve_mc_sibling_devs()
3351 pvt->F1 = pci_get_related_function(pvt->F3->vendor, pci_id1, pvt->F3); in reserve_mc_sibling_devs()
3352 if (!pvt->F1) { in reserve_mc_sibling_devs()
3358 pvt->F2 = pci_get_related_function(pvt->F3->vendor, pci_id2, pvt->F3); in reserve_mc_sibling_devs()
3359 if (!pvt->F2) { in reserve_mc_sibling_devs()
3360 pci_dev_put(pvt->F1); in reserve_mc_sibling_devs()
3361 pvt->F1 = NULL; in reserve_mc_sibling_devs()
3368 pci_ctl_dev = &pvt->F2->dev; in reserve_mc_sibling_devs()
3370 edac_dbg(1, "F1: %s\n", pci_name(pvt->F1)); in reserve_mc_sibling_devs()
3371 edac_dbg(1, "F2: %s\n", pci_name(pvt->F2)); in reserve_mc_sibling_devs()
3372 edac_dbg(1, "F3: %s\n", pci_name(pvt->F3)); in reserve_mc_sibling_devs()
3377 static void free_mc_sibling_devs(struct amd64_pvt *pvt) in free_mc_sibling_devs() argument
3379 if (pvt->umc) { in free_mc_sibling_devs()
3380 pci_dev_put(pvt->F0); in free_mc_sibling_devs()
3381 pci_dev_put(pvt->F6); in free_mc_sibling_devs()
3383 pci_dev_put(pvt->F1); in free_mc_sibling_devs()
3384 pci_dev_put(pvt->F2); in free_mc_sibling_devs()
3388 static void determine_ecc_sym_sz(struct amd64_pvt *pvt) in determine_ecc_sym_sz() argument
3390 pvt->ecc_sym_sz = 4; in determine_ecc_sym_sz()
3392 if (pvt->umc) { in determine_ecc_sym_sz()
3397 if (pvt->umc[i].sdp_ctrl & UMC_SDP_INIT) { in determine_ecc_sym_sz()
3398 if (pvt->umc[i].ecc_ctrl & BIT(9)) { in determine_ecc_sym_sz()
3399 pvt->ecc_sym_sz = 16; in determine_ecc_sym_sz()
3401 } else if (pvt->umc[i].ecc_ctrl & BIT(7)) { in determine_ecc_sym_sz()
3402 pvt->ecc_sym_sz = 8; in determine_ecc_sym_sz()
3407 } else if (pvt->fam >= 0x10) { in determine_ecc_sym_sz()
3410 amd64_read_pci_cfg(pvt->F3, EXT_NB_MCA_CFG, &tmp); in determine_ecc_sym_sz()
3412 if (pvt->fam != 0x16) in determine_ecc_sym_sz()
3413 amd64_read_dct_pci_cfg(pvt, 1, DBAM0, &pvt->dbam1); in determine_ecc_sym_sz()
3416 if ((pvt->fam > 0x10 || pvt->model > 7) && tmp & BIT(25)) in determine_ecc_sym_sz()
3417 pvt->ecc_sym_sz = 8; in determine_ecc_sym_sz()
3424 static void __read_mc_regs_df(struct amd64_pvt *pvt) in __read_mc_regs_df() argument
3426 u8 nid = pvt->mc_node_id; in __read_mc_regs_df()
3434 umc = &pvt->umc[i]; in __read_mc_regs_df()
3448 static void read_mc_regs(struct amd64_pvt *pvt) in read_mc_regs() argument
3457 rdmsrl(MSR_K8_TOP_MEM1, pvt->top_mem); in read_mc_regs()
3458 edac_dbg(0, " TOP_MEM: 0x%016llx\n", pvt->top_mem); in read_mc_regs()
3463 rdmsrl(MSR_K8_TOP_MEM2, pvt->top_mem2); in read_mc_regs()
3464 edac_dbg(0, " TOP_MEM2: 0x%016llx\n", pvt->top_mem2); in read_mc_regs()
3469 if (pvt->umc) { in read_mc_regs()
3470 __read_mc_regs_df(pvt); in read_mc_regs()
3471 amd64_read_pci_cfg(pvt->F0, DF_DHAR, &pvt->dhar); in read_mc_regs()
3476 amd64_read_pci_cfg(pvt->F3, NBCAP, &pvt->nbcap); in read_mc_regs()
3478 read_dram_ctl_register(pvt); in read_mc_regs()
3484 read_dram_base_limit_regs(pvt, range); in read_mc_regs()
3486 rw = dram_rw(pvt, range); in read_mc_regs()
3492 get_dram_base(pvt, range), in read_mc_regs()
3493 get_dram_limit(pvt, range)); in read_mc_regs()
3496 dram_intlv_en(pvt, range) ? "Enabled" : "Disabled", in read_mc_regs()
3499 dram_intlv_sel(pvt, range), in read_mc_regs()
3500 dram_dst_node(pvt, range)); in read_mc_regs()
3503 amd64_read_pci_cfg(pvt->F1, DHAR, &pvt->dhar); in read_mc_regs()
3504 amd64_read_dct_pci_cfg(pvt, 0, DBAM0, &pvt->dbam0); in read_mc_regs()
3506 amd64_read_pci_cfg(pvt->F3, F10_ONLINE_SPARE, &pvt->online_spare); in read_mc_regs()
3508 amd64_read_dct_pci_cfg(pvt, 0, DCLR0, &pvt->dclr0); in read_mc_regs()
3509 amd64_read_dct_pci_cfg(pvt, 0, DCHR0, &pvt->dchr0); in read_mc_regs()
3511 if (!dct_ganging_enabled(pvt)) { in read_mc_regs()
3512 amd64_read_dct_pci_cfg(pvt, 1, DCLR0, &pvt->dclr1); in read_mc_regs()
3513 amd64_read_dct_pci_cfg(pvt, 1, DCHR0, &pvt->dchr1); in read_mc_regs()
3517 read_dct_base_mask(pvt); in read_mc_regs()
3519 determine_memory_type(pvt); in read_mc_regs()
3521 if (!pvt->umc) in read_mc_regs()
3522 edac_dbg(1, " DIMM type: %s\n", edac_mem_types[pvt->dram_type]); in read_mc_regs()
3524 determine_ecc_sym_sz(pvt); in read_mc_regs()
3561 static u32 get_csrow_nr_pages(struct amd64_pvt *pvt, u8 dct, int csrow_nr_orig) in get_csrow_nr_pages() argument
3563 u32 dbam = dct ? pvt->dbam1 : pvt->dbam0; in get_csrow_nr_pages()
3567 if (!pvt->umc) { in get_csrow_nr_pages()
3571 cs_mode = f17_get_cs_mode(csrow_nr >> 1, dct, pvt); in get_csrow_nr_pages()
3574 nr_pages = pvt->ops->dbam_to_cs(pvt, dct, cs_mode, csrow_nr); in get_csrow_nr_pages()
3586 struct amd64_pvt *pvt = mci->pvt_info; in init_csrows_df() local
3607 for_each_chip_select(cs, umc, pvt) { in init_csrows_df()
3608 if (!csrow_enabled(cs, umc, pvt)) in init_csrows_df()
3615 pvt->mc_node_id, cs); in init_csrows_df()
3617 dimm->nr_pages = get_csrow_nr_pages(pvt, umc, cs); in init_csrows_df()
3618 dimm->mtype = pvt->umc[umc].dram_type; in init_csrows_df()
3634 struct amd64_pvt *pvt = mci->pvt_info; in init_csrows() local
3642 if (pvt->umc) in init_csrows()
3645 amd64_read_pci_cfg(pvt->F3, NBCFG, &val); in init_csrows()
3647 pvt->nbcfg = val; in init_csrows()
3650 pvt->mc_node_id, val, in init_csrows()
3656 for_each_chip_select(i, 0, pvt) { in init_csrows()
3657 bool row_dct0 = !!csrow_enabled(i, 0, pvt); in init_csrows()
3660 if (pvt->fam != 0xf) in init_csrows()
3661 row_dct1 = !!csrow_enabled(i, 1, pvt); in init_csrows()
3670 pvt->mc_node_id, i); in init_csrows()
3673 nr_pages = get_csrow_nr_pages(pvt, 0, i); in init_csrows()
3678 if (pvt->fam != 0xf && row_dct1) { in init_csrows()
3679 int row_dct1_pages = get_csrow_nr_pages(pvt, 1, i); in init_csrows()
3688 if (pvt->nbcfg & NBCFG_ECC_ENABLE) { in init_csrows()
3689 edac_mode = (pvt->nbcfg & NBCFG_CHIPKILL) in init_csrows()
3694 for (j = 0; j < pvt->channel_count; j++) { in init_csrows()
3696 dimm->mtype = pvt->dram_type; in init_csrows()
3865 static bool ecc_enabled(struct amd64_pvt *pvt) in ecc_enabled() argument
3867 u16 nid = pvt->mc_node_id; in ecc_enabled()
3877 umc = &pvt->umc[i]; in ecc_enabled()
3898 amd64_read_pci_cfg(pvt->F3, NBCFG, &value); in ecc_enabled()
3917 f17h_determine_edac_ctl_cap(struct mem_ctl_info *mci, struct amd64_pvt *pvt) in f17h_determine_edac_ctl_cap() argument
3922 if (pvt->umc[i].sdp_ctrl & UMC_SDP_INIT) { in f17h_determine_edac_ctl_cap()
3923 ecc_en &= !!(pvt->umc[i].umc_cap_hi & UMC_ECC_ENABLED); in f17h_determine_edac_ctl_cap()
3924 cpk_en &= !!(pvt->umc[i].umc_cap_hi & UMC_ECC_CHIPKILL_CAP); in f17h_determine_edac_ctl_cap()
3926 dev_x4 &= !!(pvt->umc[i].dimm_cfg & BIT(6)); in f17h_determine_edac_ctl_cap()
3927 dev_x16 &= !!(pvt->umc[i].dimm_cfg & BIT(7)); in f17h_determine_edac_ctl_cap()
3949 struct amd64_pvt *pvt = mci->pvt_info; in setup_mci_misc_attrs() local
3954 if (pvt->umc) { in setup_mci_misc_attrs()
3955 f17h_determine_edac_ctl_cap(mci, pvt); in setup_mci_misc_attrs()
3957 if (pvt->nbcap & NBCAP_SECDED) in setup_mci_misc_attrs()
3960 if (pvt->nbcap & NBCAP_CHIPKILL) in setup_mci_misc_attrs()
3964 mci->edac_cap = determine_edac_cap(pvt); in setup_mci_misc_attrs()
3967 mci->dev_name = pci_name(pvt->F3); in setup_mci_misc_attrs()
3978 static struct amd64_family_type *per_family_init(struct amd64_pvt *pvt) in per_family_init() argument
3980 pvt->ext_model = boot_cpu_data.x86_model >> 4; in per_family_init()
3981 pvt->stepping = boot_cpu_data.x86_stepping; in per_family_init()
3982 pvt->model = boot_cpu_data.x86_model; in per_family_init()
3983 pvt->fam = boot_cpu_data.x86; in per_family_init()
3985 switch (pvt->fam) { in per_family_init()
3988 pvt->ops = &family_types[K8_CPUS].ops; in per_family_init()
3993 pvt->ops = &family_types[F10_CPUS].ops; in per_family_init()
3997 if (pvt->model == 0x30) { in per_family_init()
3999 pvt->ops = &family_types[F15_M30H_CPUS].ops; in per_family_init()
4001 } else if (pvt->model == 0x60) { in per_family_init()
4003 pvt->ops = &family_types[F15_M60H_CPUS].ops; in per_family_init()
4006 } else if (pvt->model == 0x13) { in per_family_init()
4010 pvt->ops = &family_types[F15_CPUS].ops; in per_family_init()
4015 if (pvt->model == 0x30) { in per_family_init()
4017 pvt->ops = &family_types[F16_M30H_CPUS].ops; in per_family_init()
4021 pvt->ops = &family_types[F16_CPUS].ops; in per_family_init()
4025 if (pvt->model >= 0x10 && pvt->model <= 0x2f) { in per_family_init()
4027 pvt->ops = &family_types[F17_M10H_CPUS].ops; in per_family_init()
4029 } else if (pvt->model >= 0x30 && pvt->model <= 0x3f) { in per_family_init()
4031 pvt->ops = &family_types[F17_M30H_CPUS].ops; in per_family_init()
4033 } else if (pvt->model >= 0x60 && pvt->model <= 0x6f) { in per_family_init()
4035 pvt->ops = &family_types[F17_M60H_CPUS].ops; in per_family_init()
4037 } else if (pvt->model >= 0x70 && pvt->model <= 0x7f) { in per_family_init()
4039 pvt->ops = &family_types[F17_M70H_CPUS].ops; in per_family_init()
4045 pvt->ops = &family_types[F17_CPUS].ops; in per_family_init()
4047 if (pvt->fam == 0x18) in per_family_init()
4052 if (pvt->model >= 0x10 && pvt->model <= 0x1f) { in per_family_init()
4054 pvt->ops = &family_types[F19_M10H_CPUS].ops; in per_family_init()
4056 } else if (pvt->model >= 0x20 && pvt->model <= 0x2f) { in per_family_init()
4058 pvt->ops = &family_types[F17_M70H_CPUS].ops; in per_family_init()
4061 } else if (pvt->model >= 0x50 && pvt->model <= 0x5f) { in per_family_init()
4063 pvt->ops = &family_types[F19_M50H_CPUS].ops; in per_family_init()
4066 } else if (pvt->model >= 0xa0 && pvt->model <= 0xaf) { in per_family_init()
4068 pvt->ops = &family_types[F19_M10H_CPUS].ops; in per_family_init()
4073 pvt->ops = &family_types[F19_CPUS].ops; in per_family_init()
4093 static int hw_info_get(struct amd64_pvt *pvt) in hw_info_get() argument
4098 if (pvt->fam >= 0x17) { in hw_info_get()
4099 pvt->umc = kcalloc(fam_type->max_mcs, sizeof(struct amd64_umc), GFP_KERNEL); in hw_info_get()
4100 if (!pvt->umc) in hw_info_get()
4110 ret = reserve_mc_sibling_devs(pvt, pci_id1, pci_id2); in hw_info_get()
4114 read_mc_regs(pvt); in hw_info_get()
4119 static void hw_info_put(struct amd64_pvt *pvt) in hw_info_put() argument
4121 if (pvt->F0 || pvt->F1) in hw_info_put()
4122 free_mc_sibling_devs(pvt); in hw_info_put()
4124 kfree(pvt->umc); in hw_info_put()
4127 static int init_one_instance(struct amd64_pvt *pvt) in init_one_instance() argument
4138 pvt->channel_count = pvt->ops->early_channel_count(pvt); in init_one_instance()
4139 if (pvt->channel_count < 0) in init_one_instance()
4144 layers[0].size = pvt->csels[0].b_cnt; in init_one_instance()
4156 mci = edac_mc_alloc(pvt->mc_node_id, ARRAY_SIZE(layers), layers, 0); in init_one_instance()
4160 mci->pvt_info = pvt; in init_one_instance()
4161 mci->pdev = &pvt->F3->dev; in init_one_instance()
4178 static bool instance_has_memory(struct amd64_pvt *pvt) in instance_has_memory() argument
4184 for_each_chip_select(cs, dct, pvt) in instance_has_memory()
4185 cs_enabled |= csrow_enabled(cs, dct, pvt); in instance_has_memory()
4194 struct amd64_pvt *pvt = NULL; in probe_one_instance() local
4205 pvt = kzalloc(sizeof(struct amd64_pvt), GFP_KERNEL); in probe_one_instance()
4206 if (!pvt) in probe_one_instance()
4209 pvt->mc_node_id = nid; in probe_one_instance()
4210 pvt->F3 = F3; in probe_one_instance()
4213 fam_type = per_family_init(pvt); in probe_one_instance()
4217 ret = hw_info_get(pvt); in probe_one_instance()
4222 if (!instance_has_memory(pvt)) { in probe_one_instance()
4227 if (!ecc_enabled(pvt)) { in probe_one_instance()
4243 ret = init_one_instance(pvt); in probe_one_instance()
4254 (pvt->fam == 0xf ? in probe_one_instance()
4255 (pvt->ext_model >= K8_REV_F ? "revF or later " in probe_one_instance()
4257 : ""), pvt->mc_node_id); in probe_one_instance()
4259 dump_misc_regs(pvt); in probe_one_instance()
4264 hw_info_put(pvt); in probe_one_instance()
4265 kfree(pvt); in probe_one_instance()
4280 struct amd64_pvt *pvt; in remove_one_instance() local
4287 pvt = mci->pvt_info; in remove_one_instance()
4297 hw_info_put(pvt); in remove_one_instance()
4298 kfree(pvt); in remove_one_instance()