Lines Matching refs:pvt
318 u64 (*get_tolm)(struct sbridge_pvt *pvt);
319 u64 (*get_tohm)(struct sbridge_pvt *pvt);
328 u8 (*get_node_id)(struct sbridge_pvt *pvt);
330 enum mem_type (*get_memory_type)(struct sbridge_pvt *pvt);
331 enum dev_type (*get_width)(struct sbridge_pvt *pvt, u32 mtr);
800 static u64 sbridge_get_tolm(struct sbridge_pvt *pvt) in sbridge_get_tolm() argument
805 pci_read_config_dword(pvt->pci_sad1, TOLM, ®); in sbridge_get_tolm()
809 static u64 sbridge_get_tohm(struct sbridge_pvt *pvt) in sbridge_get_tohm() argument
813 pci_read_config_dword(pvt->pci_sad1, TOHM, ®); in sbridge_get_tohm()
817 static u64 ibridge_get_tolm(struct sbridge_pvt *pvt) in ibridge_get_tolm() argument
821 pci_read_config_dword(pvt->pci_br1, TOLM, ®); in ibridge_get_tolm()
826 static u64 ibridge_get_tohm(struct sbridge_pvt *pvt) in ibridge_get_tohm() argument
830 pci_read_config_dword(pvt->pci_br1, TOHM, ®); in ibridge_get_tohm()
883 static enum mem_type get_memory_type(struct sbridge_pvt *pvt) in get_memory_type() argument
888 if (pvt->pci_ddrio) { in get_memory_type()
889 pci_read_config_dword(pvt->pci_ddrio, pvt->info.rankcfgr, in get_memory_type()
902 static enum mem_type haswell_get_memory_type(struct sbridge_pvt *pvt) in haswell_get_memory_type() argument
908 if (!pvt->pci_ddrio) in haswell_get_memory_type()
911 pci_read_config_dword(pvt->pci_ddrio, in haswell_get_memory_type()
917 pci_read_config_dword(pvt->pci_ta, MCMTR, ®); in haswell_get_memory_type()
934 static enum dev_type knl_get_width(struct sbridge_pvt *pvt, u32 mtr) in knl_get_width() argument
940 static enum dev_type sbridge_get_width(struct sbridge_pvt *pvt, u32 mtr) in sbridge_get_width() argument
965 static enum dev_type ibridge_get_width(struct sbridge_pvt *pvt, u32 mtr) in ibridge_get_width() argument
974 static enum dev_type broadwell_get_width(struct sbridge_pvt *pvt, u32 mtr) in broadwell_get_width() argument
980 static enum mem_type knl_get_memory_type(struct sbridge_pvt *pvt) in knl_get_memory_type() argument
986 static u8 get_node_id(struct sbridge_pvt *pvt) in get_node_id() argument
989 pci_read_config_dword(pvt->pci_br0, SAD_CONTROL, ®); in get_node_id()
993 static u8 haswell_get_node_id(struct sbridge_pvt *pvt) in haswell_get_node_id() argument
997 pci_read_config_dword(pvt->pci_sad1, SAD_CONTROL, ®); in haswell_get_node_id()
1001 static u8 knl_get_node_id(struct sbridge_pvt *pvt) in knl_get_node_id() argument
1005 pci_read_config_dword(pvt->pci_sad1, SAD_CONTROL, ®); in knl_get_node_id()
1043 static u64 haswell_get_tolm(struct sbridge_pvt *pvt) in haswell_get_tolm() argument
1047 pci_read_config_dword(pvt->info.pci_vtd, HASWELL_TOLM, ®); in haswell_get_tolm()
1051 static u64 haswell_get_tohm(struct sbridge_pvt *pvt) in haswell_get_tohm() argument
1056 pci_read_config_dword(pvt->info.pci_vtd, HASWELL_TOHM_0, ®); in haswell_get_tohm()
1058 pci_read_config_dword(pvt->info.pci_vtd, HASWELL_TOHM_1, ®); in haswell_get_tohm()
1064 static u64 knl_get_tolm(struct sbridge_pvt *pvt) in knl_get_tolm() argument
1068 pci_read_config_dword(pvt->knl.pci_mc_info, KNL_TOLM, ®); in knl_get_tolm()
1072 static u64 knl_get_tohm(struct sbridge_pvt *pvt) in knl_get_tohm() argument
1077 pci_read_config_dword(pvt->knl.pci_mc_info, KNL_TOHM_0, ®_lo); in knl_get_tohm()
1078 pci_read_config_dword(pvt->knl.pci_mc_info, KNL_TOHM_1, ®_hi); in knl_get_tohm()
1152 static int knl_get_tad(const struct sbridge_pvt *pvt, in knl_get_tad() argument
1165 pci_mc = pvt->knl.pci_mc0; in knl_get_tad()
1168 pci_mc = pvt->knl.pci_mc1; in knl_get_tad()
1346 static int knl_get_dimm_capacity(struct sbridge_pvt *pvt, u64 *mc_sizes) in knl_get_dimm_capacity() argument
1373 pci_read_config_dword(pvt->knl.pci_cha[i], in knl_get_dimm_capacity()
1399 pci_read_config_dword(pvt->knl.pci_cha[i], in knl_get_dimm_capacity()
1422 for (sad_rule = 0; sad_rule < pvt->info.max_sad; sad_rule++) { in knl_get_dimm_capacity()
1426 pci_read_config_dword(pvt->pci_sad0, in knl_get_dimm_capacity()
1427 pvt->info.dram_rule[sad_rule], &dram_rule); in knl_get_dimm_capacity()
1434 sad_limit = pvt->info.sad_limit(dram_rule)+1; in knl_get_dimm_capacity()
1436 pci_read_config_dword(pvt->pci_sad0, in knl_get_dimm_capacity()
1437 pvt->info.interleave_list[sad_rule], &interleave_reg); in knl_get_dimm_capacity()
1443 first_pkg = sad_pkg(pvt->info.interleave_pkg, in knl_get_dimm_capacity()
1446 pkg = sad_pkg(pvt->info.interleave_pkg, in knl_get_dimm_capacity()
1490 if (knl_get_tad(pvt, in knl_get_dimm_capacity()
1571 struct sbridge_pvt *pvt = mci->pvt_info; in get_source_id() local
1574 if (pvt->info.type == HASWELL || pvt->info.type == BROADWELL || in get_source_id()
1575 pvt->info.type == KNIGHTS_LANDING) in get_source_id()
1576 pci_read_config_dword(pvt->pci_sad1, SAD_TARGET, ®); in get_source_id()
1578 pci_read_config_dword(pvt->pci_br0, SAD_TARGET, ®); in get_source_id()
1580 if (pvt->info.type == KNIGHTS_LANDING) in get_source_id()
1581 pvt->sbridge_dev->source_id = SOURCE_ID_KNL(reg); in get_source_id()
1583 pvt->sbridge_dev->source_id = SOURCE_ID(reg); in get_source_id()
1590 struct sbridge_pvt *pvt = mci->pvt_info; in __populate_dimms() local
1591 int channels = pvt->info.type == KNIGHTS_LANDING ? KNL_MAX_CHANNELS in __populate_dimms()
1598 mtype = pvt->info.get_memory_type(pvt); in __populate_dimms()
1616 if (pvt->info.type == KNIGHTS_LANDING) { in __populate_dimms()
1618 if (!pvt->knl.pci_channel[i]) in __populate_dimms()
1622 if (!pvt->pci_tad[i]) in __populate_dimms()
1624 pci_read_config_dword(pvt->pci_tad[i], 0x8c, &amap); in __populate_dimms()
1629 if (pvt->info.type == KNIGHTS_LANDING) { in __populate_dimms()
1630 pci_read_config_dword(pvt->knl.pci_channel[i], in __populate_dimms()
1633 pci_read_config_dword(pvt->pci_tad[i], in __populate_dimms()
1639 if (!IS_ECC_ENABLED(pvt->info.mcmtr)) { in __populate_dimms()
1641 pvt->sbridge_dev->source_id, in __populate_dimms()
1642 pvt->sbridge_dev->dom, i); in __populate_dimms()
1645 pvt->channel[i].dimms++; in __populate_dimms()
1647 ranks = numrank(pvt->info.type, mtr); in __populate_dimms()
1649 if (pvt->info.type == KNIGHTS_LANDING) { in __populate_dimms()
1663 pvt->sbridge_dev->mc, pvt->sbridge_dev->dom, i, j, in __populate_dimms()
1669 dimm->dtype = pvt->info.get_width(pvt, mtr); in __populate_dimms()
1672 pvt->channel[i].dimm[j].rowbits = order_base_2(rows); in __populate_dimms()
1673 pvt->channel[i].dimm[j].colbits = order_base_2(cols); in __populate_dimms()
1674 pvt->channel[i].dimm[j].bank_xor_enable = in __populate_dimms()
1675 GET_BITFIELD(pvt->info.mcmtr, 9, 9); in __populate_dimms()
1676 pvt->channel[i].dimm[j].amap_fine = GET_BITFIELD(amap, 0, 0); in __populate_dimms()
1679 pvt->sbridge_dev->source_id, pvt->sbridge_dev->dom, i, j); in __populate_dimms()
1689 struct sbridge_pvt *pvt = mci->pvt_info; in get_dimm_config() local
1694 pvt->sbridge_dev->node_id = pvt->info.get_node_id(pvt); in get_dimm_config()
1696 pvt->sbridge_dev->mc, in get_dimm_config()
1697 pvt->sbridge_dev->node_id, in get_dimm_config()
1698 pvt->sbridge_dev->source_id); in get_dimm_config()
1703 if (pvt->info.type == KNIGHTS_LANDING) { in get_dimm_config()
1705 pvt->mirror_mode = NON_MIRRORING; in get_dimm_config()
1706 pvt->is_cur_addr_mirrored = false; in get_dimm_config()
1708 if (knl_get_dimm_capacity(pvt, knl_mc_sizes) != 0) in get_dimm_config()
1710 if (pci_read_config_dword(pvt->pci_ta, KNL_MCMTR, &pvt->info.mcmtr)) { in get_dimm_config()
1715 if (pvt->info.type == HASWELL || pvt->info.type == BROADWELL) { in get_dimm_config()
1716 if (pci_read_config_dword(pvt->pci_ha, HASWELL_HASYSDEFEATURE2, ®)) { in get_dimm_config()
1720 pvt->is_chan_hash = GET_BITFIELD(reg, 21, 21); in get_dimm_config()
1722 pvt->mirror_mode = ADDR_RANGE_MIRRORING; in get_dimm_config()
1727 if (pci_read_config_dword(pvt->pci_ras, RASENABLES, ®)) { in get_dimm_config()
1732 pvt->mirror_mode = FULL_MIRRORING; in get_dimm_config()
1735 pvt->mirror_mode = NON_MIRRORING; in get_dimm_config()
1740 if (pci_read_config_dword(pvt->pci_ta, MCMTR, &pvt->info.mcmtr)) { in get_dimm_config()
1744 if (IS_LOCKSTEP_ENABLED(pvt->info.mcmtr)) { in get_dimm_config()
1747 pvt->is_lockstep = true; in get_dimm_config()
1751 pvt->is_lockstep = false; in get_dimm_config()
1753 if (IS_CLOSE_PG(pvt->info.mcmtr)) { in get_dimm_config()
1755 pvt->is_close_pg = true; in get_dimm_config()
1758 pvt->is_close_pg = false; in get_dimm_config()
1767 struct sbridge_pvt *pvt = mci->pvt_info; in get_memory_layout() local
1779 pvt->tolm = pvt->info.get_tolm(pvt); in get_memory_layout()
1780 tmp_mb = (1 + pvt->tolm) >> 20; in get_memory_layout()
1784 gb, (mb*1000)/1024, (u64)pvt->tolm); in get_memory_layout()
1787 pvt->tohm = pvt->info.get_tohm(pvt); in get_memory_layout()
1788 tmp_mb = (1 + pvt->tohm) >> 20; in get_memory_layout()
1792 gb, (mb*1000)/1024, (u64)pvt->tohm); in get_memory_layout()
1801 for (n_sads = 0; n_sads < pvt->info.max_sad; n_sads++) { in get_memory_layout()
1803 pci_read_config_dword(pvt->pci_sad0, pvt->info.dram_rule[n_sads], in get_memory_layout()
1805 limit = pvt->info.sad_limit(reg); in get_memory_layout()
1817 show_dram_attr(pvt->info.dram_attr(reg)), in get_memory_layout()
1820 get_intlv_mode_str(reg, pvt->info.type), in get_memory_layout()
1824 pci_read_config_dword(pvt->pci_sad0, pvt->info.interleave_list[n_sads], in get_memory_layout()
1826 sad_interl = sad_pkg(pvt->info.interleave_pkg, reg, 0); in get_memory_layout()
1828 u32 pkg = sad_pkg(pvt->info.interleave_pkg, reg, j); in get_memory_layout()
1837 if (pvt->info.type == KNIGHTS_LANDING) in get_memory_layout()
1845 pci_read_config_dword(pvt->pci_ha, tad_dram_rule[n_tads], ®); in get_memory_layout()
1869 if (!pvt->channel[i].dimms) in get_memory_layout()
1872 pci_read_config_dword(pvt->pci_tad[i], in get_memory_layout()
1889 if (!pvt->channel[i].dimms) in get_memory_layout()
1892 pci_read_config_dword(pvt->pci_tad[i], in get_memory_layout()
1899 tmp_mb = pvt->info.rir_limit(reg) >> 20; in get_memory_layout()
1910 pci_read_config_dword(pvt->pci_tad[i], in get_memory_layout()
1913 tmp_mb = RIR_OFFSET(pvt->info.type, reg) << 6; in get_memory_layout()
1920 (u32)RIR_RNK_TGT(pvt->info.type, reg), in get_memory_layout()
1982 struct sbridge_pvt *pvt; in sb_decode_ddr4() local
1987 pvt = mci->pvt_info; in sb_decode_ddr4()
1988 amap_fine = pvt->channel[ch].dimm[dimmno].amap_fine; in sb_decode_ddr4()
1990 rowbits = pvt->channel[ch].dimm[dimmno].rowbits; in sb_decode_ddr4()
1991 colbits = pvt->channel[ch].dimm[dimmno].colbits; in sb_decode_ddr4()
1992 bank_xor_enable = pvt->channel[ch].dimm[dimmno].bank_xor_enable; in sb_decode_ddr4()
1994 if (pvt->is_lockstep) { in sb_decode_ddr4()
2000 if (pvt->is_close_pg) { in sb_decode_ddr4()
2039 struct sbridge_pvt *pvt = mci->pvt_info; in get_memory_error_data() local
2061 if ((addr > (u64) pvt->tolm) && (addr < (1LL << 32))) { in get_memory_error_data()
2065 if (addr >= (u64)pvt->tohm) { in get_memory_error_data()
2073 for (n_sads = 0; n_sads < pvt->info.max_sad; n_sads++) { in get_memory_error_data()
2074 pci_read_config_dword(pvt->pci_sad0, pvt->info.dram_rule[n_sads], in get_memory_error_data()
2080 limit = pvt->info.sad_limit(reg); in get_memory_error_data()
2089 if (n_sads == pvt->info.max_sad) { in get_memory_error_data()
2094 *area_type = show_dram_attr(pvt->info.dram_attr(dram_rule)); in get_memory_error_data()
2095 interleave_mode = pvt->info.interleave_mode(dram_rule); in get_memory_error_data()
2097 pci_read_config_dword(pvt->pci_sad0, pvt->info.interleave_list[n_sads], in get_memory_error_data()
2100 if (pvt->info.type == SANDY_BRIDGE) { in get_memory_error_data()
2101 sad_interl = sad_pkg(pvt->info.interleave_pkg, reg, 0); in get_memory_error_data()
2103 u32 pkg = sad_pkg(pvt->info.interleave_pkg, reg, sad_way); in get_memory_error_data()
2111 pvt->sbridge_dev->mc, in get_memory_error_data()
2140 } else if (pvt->info.type == HASWELL || pvt->info.type == BROADWELL) { in get_memory_error_data()
2157 pkg = sad_pkg(pvt->info.interleave_pkg, reg, idx); in get_memory_error_data()
2163 pci_read_config_dword(pvt->pci_ha, HASWELL_HASYSDEFEATURE2, ®); in get_memory_error_data()
2172 pkg = sad_pkg(pvt->info.interleave_pkg, reg, idx); in get_memory_error_data()
2192 pvt = mci->pvt_info; in get_memory_error_data()
2198 pci_ha = pvt->pci_ha; in get_memory_error_data()
2222 if (pvt->is_chan_hash) in get_memory_error_data()
2249 pci_read_config_dword(pvt->pci_tad[base_ch], tad_ch_nilv_offset[n_tads], &tad_offset); in get_memory_error_data()
2251 if (pvt->mirror_mode == FULL_MIRRORING || in get_memory_error_data()
2252 (pvt->mirror_mode == ADDR_RANGE_MIRRORING && n_tads == 0)) { in get_memory_error_data()
2264 pvt->is_cur_addr_mirrored = true; in get_memory_error_data()
2267 pvt->is_cur_addr_mirrored = false; in get_memory_error_data()
2270 if (pvt->is_lockstep) in get_memory_error_data()
2305 pci_read_config_dword(pvt->pci_tad[base_ch], rir_way_limit[n_rir], ®); in get_memory_error_data()
2310 limit = pvt->info.rir_limit(reg); in get_memory_error_data()
2327 if (pvt->is_close_pg) in get_memory_error_data()
2333 pci_read_config_dword(pvt->pci_tad[base_ch], rir_offset[n_rir][idx], ®); in get_memory_error_data()
2334 *rank = RIR_RNK_TGT(pvt->info.type, reg); in get_memory_error_data()
2336 if (pvt->info.type == BROADWELL) { in get_memory_error_data()
2337 if (pvt->is_close_pg) in get_memory_error_data()
2346 rank_addr -= RIR_OFFSET(pvt->info.type, reg); in get_memory_error_data()
2348 mtype = pvt->info.get_memory_type(pvt); in get_memory_error_data()
2375 struct sbridge_pvt *pvt; in get_memory_error_data_from_mce() local
2384 pvt = mci->pvt_info; in get_memory_error_data_from_mce()
2385 if (!pvt->info.get_ha) { in get_memory_error_data_from_mce()
2389 *ha = pvt->info.get_ha(m->bank); in get_memory_error_data_from_mce()
2402 pvt = new_mci->pvt_info; in get_memory_error_data_from_mce()
2403 pci_ha = pvt->pci_ha; in get_memory_error_data_from_mce()
2408 if (pvt->mirror_mode == FULL_MIRRORING || in get_memory_error_data_from_mce()
2409 (pvt->mirror_mode == ADDR_RANGE_MIRRORING && tad0)) { in get_memory_error_data_from_mce()
2411 pvt->is_cur_addr_mirrored = true; in get_memory_error_data_from_mce()
2413 pvt->is_cur_addr_mirrored = false; in get_memory_error_data_from_mce()
2416 if (pvt->is_lockstep) in get_memory_error_data_from_mce()
2618 struct sbridge_pvt *pvt = mci->pvt_info; in sbridge_mci_bind_devs() local
2630 pvt->pci_sad0 = pdev; in sbridge_mci_bind_devs()
2633 pvt->pci_sad1 = pdev; in sbridge_mci_bind_devs()
2636 pvt->pci_br0 = pdev; in sbridge_mci_bind_devs()
2639 pvt->pci_ha = pdev; in sbridge_mci_bind_devs()
2642 pvt->pci_ta = pdev; in sbridge_mci_bind_devs()
2645 pvt->pci_ras = pdev; in sbridge_mci_bind_devs()
2653 pvt->pci_tad[id] = pdev; in sbridge_mci_bind_devs()
2658 pvt->pci_ddrio = pdev; in sbridge_mci_bind_devs()
2671 if (!pvt->pci_sad0 || !pvt->pci_sad1 || !pvt->pci_ha || in sbridge_mci_bind_devs()
2672 !pvt->pci_ras || !pvt->pci_ta) in sbridge_mci_bind_devs()
2692 struct sbridge_pvt *pvt = mci->pvt_info; in ibridge_mci_bind_devs() local
2705 pvt->pci_ha = pdev; in ibridge_mci_bind_devs()
2709 pvt->pci_ta = pdev; in ibridge_mci_bind_devs()
2713 pvt->pci_ras = pdev; in ibridge_mci_bind_devs()
2725 pvt->pci_tad[id] = pdev; in ibridge_mci_bind_devs()
2730 pvt->pci_ddrio = pdev; in ibridge_mci_bind_devs()
2733 pvt->pci_ddrio = pdev; in ibridge_mci_bind_devs()
2736 pvt->pci_sad0 = pdev; in ibridge_mci_bind_devs()
2739 pvt->pci_br0 = pdev; in ibridge_mci_bind_devs()
2742 pvt->pci_br1 = pdev; in ibridge_mci_bind_devs()
2755 if (!pvt->pci_sad0 || !pvt->pci_ha || !pvt->pci_br0 || in ibridge_mci_bind_devs()
2756 !pvt->pci_br1 || !pvt->pci_ras || !pvt->pci_ta) in ibridge_mci_bind_devs()
2778 struct sbridge_pvt *pvt = mci->pvt_info; in haswell_mci_bind_devs() local
2784 if (pvt->info.pci_vtd == NULL) in haswell_mci_bind_devs()
2786 pvt->info.pci_vtd = pci_get_device(PCI_VENDOR_ID_INTEL, in haswell_mci_bind_devs()
2797 pvt->pci_sad0 = pdev; in haswell_mci_bind_devs()
2800 pvt->pci_sad1 = pdev; in haswell_mci_bind_devs()
2804 pvt->pci_ha = pdev; in haswell_mci_bind_devs()
2808 pvt->pci_ta = pdev; in haswell_mci_bind_devs()
2812 pvt->pci_ras = pdev; in haswell_mci_bind_devs()
2824 pvt->pci_tad[id] = pdev; in haswell_mci_bind_devs()
2832 if (!pvt->pci_ddrio) in haswell_mci_bind_devs()
2833 pvt->pci_ddrio = pdev; in haswell_mci_bind_devs()
2846 if (!pvt->pci_sad0 || !pvt->pci_ha || !pvt->pci_sad1 || in haswell_mci_bind_devs()
2847 !pvt->pci_ras || !pvt->pci_ta || !pvt->info.pci_vtd) in haswell_mci_bind_devs()
2863 struct sbridge_pvt *pvt = mci->pvt_info; in broadwell_mci_bind_devs() local
2869 if (pvt->info.pci_vtd == NULL) in broadwell_mci_bind_devs()
2871 pvt->info.pci_vtd = pci_get_device(PCI_VENDOR_ID_INTEL, in broadwell_mci_bind_devs()
2882 pvt->pci_sad0 = pdev; in broadwell_mci_bind_devs()
2885 pvt->pci_sad1 = pdev; in broadwell_mci_bind_devs()
2889 pvt->pci_ha = pdev; in broadwell_mci_bind_devs()
2893 pvt->pci_ta = pdev; in broadwell_mci_bind_devs()
2897 pvt->pci_ras = pdev; in broadwell_mci_bind_devs()
2909 pvt->pci_tad[id] = pdev; in broadwell_mci_bind_devs()
2914 pvt->pci_ddrio = pdev; in broadwell_mci_bind_devs()
2927 if (!pvt->pci_sad0 || !pvt->pci_ha || !pvt->pci_sad1 || in broadwell_mci_bind_devs()
2928 !pvt->pci_ras || !pvt->pci_ta || !pvt->info.pci_vtd) in broadwell_mci_bind_devs()
2944 struct sbridge_pvt *pvt = mci->pvt_info; in knl_mci_bind_devs() local
2963 pvt->knl.pci_mc0 = pdev; in knl_mci_bind_devs()
2965 pvt->knl.pci_mc1 = pdev; in knl_mci_bind_devs()
2975 pvt->pci_sad0 = pdev; in knl_mci_bind_devs()
2979 pvt->pci_sad1 = pdev; in knl_mci_bind_devs()
2995 WARN_ON(pvt->knl.pci_cha[devidx] != NULL); in knl_mci_bind_devs()
2997 pvt->knl.pci_cha[devidx] = pdev; in knl_mci_bind_devs()
3020 WARN_ON(pvt->knl.pci_channel[devidx] != NULL); in knl_mci_bind_devs()
3021 pvt->knl.pci_channel[devidx] = pdev; in knl_mci_bind_devs()
3025 pvt->knl.pci_mc_info = pdev; in knl_mci_bind_devs()
3029 pvt->pci_ta = pdev; in knl_mci_bind_devs()
3039 if (!pvt->knl.pci_mc0 || !pvt->knl.pci_mc1 || in knl_mci_bind_devs()
3040 !pvt->pci_sad0 || !pvt->pci_sad1 || in knl_mci_bind_devs()
3041 !pvt->pci_ta) { in knl_mci_bind_devs()
3046 if (!pvt->knl.pci_channel[i]) { in knl_mci_bind_devs()
3053 if (!pvt->knl.pci_cha[i]) { in knl_mci_bind_devs()
3080 struct sbridge_pvt *pvt = mci->pvt_info; in sbridge_mce_output_error() local
3103 if (pvt->info.type != SANDY_BRIDGE) in sbridge_mce_output_error()
3151 if (pvt->info.type == KNIGHTS_LANDING) { in sbridge_mce_output_error()
3200 pvt = mci->pvt_info; in sbridge_mce_output_error()
3219 if (!pvt->is_lockstep && !pvt->is_cur_addr_mirrored && !pvt->is_close_pg) in sbridge_mce_output_error()
3351 struct sbridge_pvt *pvt; in sbridge_register_mci() local
3364 sizeof(*pvt)); in sbridge_register_mci()
3372 pvt = mci->pvt_info; in sbridge_register_mci()
3373 memset(pvt, 0, sizeof(*pvt)); in sbridge_register_mci()
3376 pvt->sbridge_dev = sbridge_dev; in sbridge_register_mci()
3387 pvt->info.type = type; in sbridge_register_mci()
3390 pvt->info.rankcfgr = IB_RANK_CFG_A; in sbridge_register_mci()
3391 pvt->info.get_tolm = ibridge_get_tolm; in sbridge_register_mci()
3392 pvt->info.get_tohm = ibridge_get_tohm; in sbridge_register_mci()
3393 pvt->info.dram_rule = ibridge_dram_rule; in sbridge_register_mci()
3394 pvt->info.get_memory_type = get_memory_type; in sbridge_register_mci()
3395 pvt->info.get_node_id = get_node_id; in sbridge_register_mci()
3396 pvt->info.get_ha = ibridge_get_ha; in sbridge_register_mci()
3397 pvt->info.rir_limit = rir_limit; in sbridge_register_mci()
3398 pvt->info.sad_limit = sad_limit; in sbridge_register_mci()
3399 pvt->info.interleave_mode = interleave_mode; in sbridge_register_mci()
3400 pvt->info.dram_attr = dram_attr; in sbridge_register_mci()
3401 pvt->info.max_sad = ARRAY_SIZE(ibridge_dram_rule); in sbridge_register_mci()
3402 pvt->info.interleave_list = ibridge_interleave_list; in sbridge_register_mci()
3403 pvt->info.interleave_pkg = ibridge_interleave_pkg; in sbridge_register_mci()
3404 pvt->info.get_width = ibridge_get_width; in sbridge_register_mci()
3412 pvt->sbridge_dev->source_id, pvt->sbridge_dev->dom); in sbridge_register_mci()
3415 pvt->info.rankcfgr = SB_RANK_CFG_A; in sbridge_register_mci()
3416 pvt->info.get_tolm = sbridge_get_tolm; in sbridge_register_mci()
3417 pvt->info.get_tohm = sbridge_get_tohm; in sbridge_register_mci()
3418 pvt->info.dram_rule = sbridge_dram_rule; in sbridge_register_mci()
3419 pvt->info.get_memory_type = get_memory_type; in sbridge_register_mci()
3420 pvt->info.get_node_id = get_node_id; in sbridge_register_mci()
3421 pvt->info.get_ha = sbridge_get_ha; in sbridge_register_mci()
3422 pvt->info.rir_limit = rir_limit; in sbridge_register_mci()
3423 pvt->info.sad_limit = sad_limit; in sbridge_register_mci()
3424 pvt->info.interleave_mode = interleave_mode; in sbridge_register_mci()
3425 pvt->info.dram_attr = dram_attr; in sbridge_register_mci()
3426 pvt->info.max_sad = ARRAY_SIZE(sbridge_dram_rule); in sbridge_register_mci()
3427 pvt->info.interleave_list = sbridge_interleave_list; in sbridge_register_mci()
3428 pvt->info.interleave_pkg = sbridge_interleave_pkg; in sbridge_register_mci()
3429 pvt->info.get_width = sbridge_get_width; in sbridge_register_mci()
3437 pvt->sbridge_dev->source_id, pvt->sbridge_dev->dom); in sbridge_register_mci()
3441 pvt->info.get_tolm = haswell_get_tolm; in sbridge_register_mci()
3442 pvt->info.get_tohm = haswell_get_tohm; in sbridge_register_mci()
3443 pvt->info.dram_rule = ibridge_dram_rule; in sbridge_register_mci()
3444 pvt->info.get_memory_type = haswell_get_memory_type; in sbridge_register_mci()
3445 pvt->info.get_node_id = haswell_get_node_id; in sbridge_register_mci()
3446 pvt->info.get_ha = ibridge_get_ha; in sbridge_register_mci()
3447 pvt->info.rir_limit = haswell_rir_limit; in sbridge_register_mci()
3448 pvt->info.sad_limit = sad_limit; in sbridge_register_mci()
3449 pvt->info.interleave_mode = interleave_mode; in sbridge_register_mci()
3450 pvt->info.dram_attr = dram_attr; in sbridge_register_mci()
3451 pvt->info.max_sad = ARRAY_SIZE(ibridge_dram_rule); in sbridge_register_mci()
3452 pvt->info.interleave_list = ibridge_interleave_list; in sbridge_register_mci()
3453 pvt->info.interleave_pkg = ibridge_interleave_pkg; in sbridge_register_mci()
3454 pvt->info.get_width = ibridge_get_width; in sbridge_register_mci()
3462 pvt->sbridge_dev->source_id, pvt->sbridge_dev->dom); in sbridge_register_mci()
3466 pvt->info.get_tolm = haswell_get_tolm; in sbridge_register_mci()
3467 pvt->info.get_tohm = haswell_get_tohm; in sbridge_register_mci()
3468 pvt->info.dram_rule = ibridge_dram_rule; in sbridge_register_mci()
3469 pvt->info.get_memory_type = haswell_get_memory_type; in sbridge_register_mci()
3470 pvt->info.get_node_id = haswell_get_node_id; in sbridge_register_mci()
3471 pvt->info.get_ha = ibridge_get_ha; in sbridge_register_mci()
3472 pvt->info.rir_limit = haswell_rir_limit; in sbridge_register_mci()
3473 pvt->info.sad_limit = sad_limit; in sbridge_register_mci()
3474 pvt->info.interleave_mode = interleave_mode; in sbridge_register_mci()
3475 pvt->info.dram_attr = dram_attr; in sbridge_register_mci()
3476 pvt->info.max_sad = ARRAY_SIZE(ibridge_dram_rule); in sbridge_register_mci()
3477 pvt->info.interleave_list = ibridge_interleave_list; in sbridge_register_mci()
3478 pvt->info.interleave_pkg = ibridge_interleave_pkg; in sbridge_register_mci()
3479 pvt->info.get_width = broadwell_get_width; in sbridge_register_mci()
3487 pvt->sbridge_dev->source_id, pvt->sbridge_dev->dom); in sbridge_register_mci()
3491 pvt->info.get_tolm = knl_get_tolm; in sbridge_register_mci()
3492 pvt->info.get_tohm = knl_get_tohm; in sbridge_register_mci()
3493 pvt->info.dram_rule = knl_dram_rule; in sbridge_register_mci()
3494 pvt->info.get_memory_type = knl_get_memory_type; in sbridge_register_mci()
3495 pvt->info.get_node_id = knl_get_node_id; in sbridge_register_mci()
3496 pvt->info.get_ha = knl_get_ha; in sbridge_register_mci()
3497 pvt->info.rir_limit = NULL; in sbridge_register_mci()
3498 pvt->info.sad_limit = knl_sad_limit; in sbridge_register_mci()
3499 pvt->info.interleave_mode = knl_interleave_mode; in sbridge_register_mci()
3500 pvt->info.dram_attr = dram_attr_knl; in sbridge_register_mci()
3501 pvt->info.max_sad = ARRAY_SIZE(knl_dram_rule); in sbridge_register_mci()
3502 pvt->info.interleave_list = knl_interleave_list; in sbridge_register_mci()
3503 pvt->info.interleave_pkg = ibridge_interleave_pkg; in sbridge_register_mci()
3504 pvt->info.get_width = knl_get_width; in sbridge_register_mci()
3511 pvt->sbridge_dev->source_id, pvt->sbridge_dev->dom); in sbridge_register_mci()