Lines Matching refs:pvt

86 static void f15h_select_dct(struct amd64_pvt *pvt, u8 dct)  in f15h_select_dct()  argument
90 amd64_read_pci_cfg(pvt->F1, DCT_CFG_SEL, &reg); in f15h_select_dct()
91 reg &= (pvt->model == 0x30) ? ~3 : ~1; in f15h_select_dct()
93 amd64_write_pci_cfg(pvt->F1, DCT_CFG_SEL, reg); in f15h_select_dct()
110 static inline int amd64_read_dct_pci_cfg(struct amd64_pvt *pvt, u8 dct, in amd64_read_dct_pci_cfg() argument
113 switch (pvt->fam) { in amd64_read_dct_pci_cfg()
126 if (dct_ganging_enabled(pvt)) in amd64_read_dct_pci_cfg()
138 dct = (dct && pvt->model == 0x30) ? 3 : dct; in amd64_read_dct_pci_cfg()
139 f15h_select_dct(pvt, dct); in amd64_read_dct_pci_cfg()
150 return amd64_read_pci_cfg(pvt->F2, offset, val); in amd64_read_dct_pci_cfg()
167 static inline void __f17h_set_scrubval(struct amd64_pvt *pvt, u32 scrubval) in __f17h_set_scrubval() argument
176 pci_write_bits32(pvt->F6, F17H_SCR_LIMIT_ADDR, scrubval, 0xF); in __f17h_set_scrubval()
177 pci_write_bits32(pvt->F6, F17H_SCR_BASE_ADDR, 1, 0x1); in __f17h_set_scrubval()
179 pci_write_bits32(pvt->F6, F17H_SCR_BASE_ADDR, 0, 0x1); in __f17h_set_scrubval()
186 static int __set_scrub_rate(struct amd64_pvt *pvt, u32 new_bw, u32 min_rate) in __set_scrub_rate() argument
214 if (pvt->fam == 0x17) { in __set_scrub_rate()
215 __f17h_set_scrubval(pvt, scrubval); in __set_scrub_rate()
216 } else if (pvt->fam == 0x15 && pvt->model == 0x60) { in __set_scrub_rate()
217 f15h_select_dct(pvt, 0); in __set_scrub_rate()
218 pci_write_bits32(pvt->F2, F15H_M60H_SCRCTRL, scrubval, 0x001F); in __set_scrub_rate()
219 f15h_select_dct(pvt, 1); in __set_scrub_rate()
220 pci_write_bits32(pvt->F2, F15H_M60H_SCRCTRL, scrubval, 0x001F); in __set_scrub_rate()
222 pci_write_bits32(pvt->F3, SCRCTRL, scrubval, 0x001F); in __set_scrub_rate()
233 struct amd64_pvt *pvt = mci->pvt_info; in set_scrub_rate() local
236 if (pvt->fam == 0xf) in set_scrub_rate()
239 if (pvt->fam == 0x15) { in set_scrub_rate()
241 if (pvt->model < 0x10) in set_scrub_rate()
242 f15h_select_dct(pvt, 0); in set_scrub_rate()
244 if (pvt->model == 0x60) in set_scrub_rate()
247 return __set_scrub_rate(pvt, bw, min_scrubrate); in set_scrub_rate()
252 struct amd64_pvt *pvt = mci->pvt_info; in get_scrub_rate() local
256 switch (pvt->fam) { in get_scrub_rate()
259 if (pvt->model < 0x10) in get_scrub_rate()
260 f15h_select_dct(pvt, 0); in get_scrub_rate()
262 if (pvt->model == 0x60) in get_scrub_rate()
263 amd64_read_pci_cfg(pvt->F2, F15H_M60H_SCRCTRL, &scrubval); in get_scrub_rate()
267 amd64_read_pci_cfg(pvt->F6, F17H_SCR_BASE_ADDR, &scrubval); in get_scrub_rate()
269 amd64_read_pci_cfg(pvt->F6, F17H_SCR_LIMIT_ADDR, &scrubval); in get_scrub_rate()
278 amd64_read_pci_cfg(pvt->F3, SCRCTRL, &scrubval); in get_scrub_rate()
297 static bool base_limit_match(struct amd64_pvt *pvt, u64 sys_addr, u8 nid) in base_limit_match() argument
309 return ((addr >= get_dram_base(pvt, nid)) && in base_limit_match()
310 (addr <= get_dram_limit(pvt, nid))); in base_limit_match()
322 struct amd64_pvt *pvt; in find_mc_by_sys_addr() local
330 pvt = mci->pvt_info; in find_mc_by_sys_addr()
337 intlv_en = dram_intlv_en(pvt, 0); in find_mc_by_sys_addr()
341 if (base_limit_match(pvt, sys_addr, node_id)) in find_mc_by_sys_addr()
357 if ((dram_intlv_sel(pvt, node_id) & intlv_en) == bits) in find_mc_by_sys_addr()
365 if (unlikely(!base_limit_match(pvt, sys_addr, node_id))) { in find_mc_by_sys_addr()
386 static void get_cs_base_and_mask(struct amd64_pvt *pvt, int csrow, u8 dct, in get_cs_base_and_mask() argument
392 if (pvt->fam == 0xf && pvt->ext_model < K8_REV_F) { in get_cs_base_and_mask()
393 csbase = pvt->csels[dct].csbases[csrow]; in get_cs_base_and_mask()
394 csmask = pvt->csels[dct].csmasks[csrow]; in get_cs_base_and_mask()
403 } else if (pvt->fam == 0x16 || in get_cs_base_and_mask()
404 (pvt->fam == 0x15 && pvt->model >= 0x30)) { in get_cs_base_and_mask()
405 csbase = pvt->csels[dct].csbases[csrow]; in get_cs_base_and_mask()
406 csmask = pvt->csels[dct].csmasks[csrow >> 1]; in get_cs_base_and_mask()
421 csbase = pvt->csels[dct].csbases[csrow]; in get_cs_base_and_mask()
422 csmask = pvt->csels[dct].csmasks[csrow >> 1]; in get_cs_base_and_mask()
425 if (pvt->fam == 0x15) in get_cs_base_and_mask()
442 #define for_each_chip_select(i, dct, pvt) \ argument
443 for (i = 0; i < pvt->csels[dct].b_cnt; i++)
445 #define chip_select_base(i, dct, pvt) \ argument
446 pvt->csels[dct].csbases[i]
448 #define for_each_chip_select_mask(i, dct, pvt) \ argument
449 for (i = 0; i < pvt->csels[dct].m_cnt; i++)
457 struct amd64_pvt *pvt; in input_addr_to_csrow() local
461 pvt = mci->pvt_info; in input_addr_to_csrow()
463 for_each_chip_select(csrow, 0, pvt) { in input_addr_to_csrow()
464 if (!csrow_enabled(csrow, 0, pvt)) in input_addr_to_csrow()
467 get_cs_base_and_mask(pvt, csrow, 0, &base, &mask); in input_addr_to_csrow()
474 pvt->mc_node_id); in input_addr_to_csrow()
480 (unsigned long)input_addr, pvt->mc_node_id); in input_addr_to_csrow()
504 struct amd64_pvt *pvt = mci->pvt_info; in amd64_get_dram_hole_info() local
507 if (pvt->fam == 0xf && pvt->ext_model < K8_REV_E) { in amd64_get_dram_hole_info()
509 pvt->ext_model, pvt->mc_node_id); in amd64_get_dram_hole_info()
514 if (pvt->fam >= 0x10 && !dhar_mem_hoist_valid(pvt)) { in amd64_get_dram_hole_info()
519 if (!dhar_valid(pvt)) { in amd64_get_dram_hole_info()
521 pvt->mc_node_id); in amd64_get_dram_hole_info()
543 *hole_base = dhar_base(pvt); in amd64_get_dram_hole_info()
546 *hole_offset = (pvt->fam > 0xf) ? f10_dhar_offset(pvt) in amd64_get_dram_hole_info()
547 : k8_dhar_offset(pvt); in amd64_get_dram_hole_info()
550 pvt->mc_node_id, (unsigned long)*hole_base, in amd64_get_dram_hole_info()
588 struct amd64_pvt *pvt = mci->pvt_info; in sys_addr_to_dram_addr() local
592 dram_base = get_dram_base(pvt, pvt->mc_node_id); in sys_addr_to_dram_addr()
644 struct amd64_pvt *pvt; in dram_addr_to_input_addr() local
648 pvt = mci->pvt_info; in dram_addr_to_input_addr()
654 intlv_shift = num_node_interleave_bits(dram_intlv_en(pvt, 0)); in dram_addr_to_input_addr()
716 static unsigned long determine_edac_cap(struct amd64_pvt *pvt) in determine_edac_cap() argument
721 if (pvt->umc) { in determine_edac_cap()
725 if (!(pvt->umc[i].sdp_ctrl & UMC_SDP_INIT)) in determine_edac_cap()
731 if (pvt->umc[i].umc_cfg & BIT(12)) in determine_edac_cap()
738 bit = (pvt->fam > 0xf || pvt->ext_model >= K8_REV_F) in determine_edac_cap()
742 if (pvt->dclr0 & BIT(bit)) in determine_edac_cap()
751 static void debug_dump_dramcfg_low(struct amd64_pvt *pvt, u32 dclr, int chan) in debug_dump_dramcfg_low() argument
755 if (pvt->dram_type == MEM_LRDDR3) { in debug_dump_dramcfg_low()
756 u32 dcsm = pvt->csels[chan].csmasks[0]; in debug_dump_dramcfg_low()
772 if (pvt->fam == 0x10) in debug_dump_dramcfg_low()
783 static void debug_display_dimm_sizes_df(struct amd64_pvt *pvt, u8 ctrl) in debug_display_dimm_sizes_df() argument
793 if (csrow_enabled(cs0, ctrl, pvt)) in debug_display_dimm_sizes_df()
794 size0 = pvt->ops->dbam_to_cs(pvt, ctrl, 0, cs0); in debug_display_dimm_sizes_df()
799 if (csrow_enabled(cs1, ctrl, pvt)) in debug_display_dimm_sizes_df()
800 size1 = pvt->ops->dbam_to_cs(pvt, ctrl, 0, cs1); in debug_display_dimm_sizes_df()
808 static void __dump_misc_regs_df(struct amd64_pvt *pvt) in __dump_misc_regs_df() argument
815 umc = &pvt->umc[i]; in __dump_misc_regs_df()
822 amd_smn_read(pvt->mc_node_id, umc_base + UMCCH_ECC_BAD_SYMBOL, &tmp); in __dump_misc_regs_df()
825 amd_smn_read(pvt->mc_node_id, umc_base + UMCCH_UMC_CAP, &tmp); in __dump_misc_regs_df()
839 if (pvt->dram_type == MEM_LRDDR4) { in __dump_misc_regs_df()
840 amd_smn_read(pvt->mc_node_id, umc_base + UMCCH_ADDR_CFG, &tmp); in __dump_misc_regs_df()
845 debug_display_dimm_sizes_df(pvt, i); in __dump_misc_regs_df()
849 pvt->dhar, dhar_base(pvt)); in __dump_misc_regs_df()
853 static void __dump_misc_regs(struct amd64_pvt *pvt) in __dump_misc_regs() argument
855 edac_dbg(1, "F3xE8 (NB Cap): 0x%08x\n", pvt->nbcap); in __dump_misc_regs()
858 (pvt->nbcap & NBCAP_DCT_DUAL) ? "yes" : "no"); in __dump_misc_regs()
861 (pvt->nbcap & NBCAP_SECDED) ? "yes" : "no", in __dump_misc_regs()
862 (pvt->nbcap & NBCAP_CHIPKILL) ? "yes" : "no"); in __dump_misc_regs()
864 debug_dump_dramcfg_low(pvt, pvt->dclr0, 0); in __dump_misc_regs()
866 edac_dbg(1, "F3xB0 (Online Spare): 0x%08x\n", pvt->online_spare); in __dump_misc_regs()
869 pvt->dhar, dhar_base(pvt), in __dump_misc_regs()
870 (pvt->fam == 0xf) ? k8_dhar_offset(pvt) in __dump_misc_regs()
871 : f10_dhar_offset(pvt)); in __dump_misc_regs()
873 debug_display_dimm_sizes(pvt, 0); in __dump_misc_regs()
876 if (pvt->fam == 0xf) in __dump_misc_regs()
879 debug_display_dimm_sizes(pvt, 1); in __dump_misc_regs()
882 if (!dct_ganging_enabled(pvt)) in __dump_misc_regs()
883 debug_dump_dramcfg_low(pvt, pvt->dclr1, 1); in __dump_misc_regs()
887 static void dump_misc_regs(struct amd64_pvt *pvt) in dump_misc_regs() argument
889 if (pvt->umc) in dump_misc_regs()
890 __dump_misc_regs_df(pvt); in dump_misc_regs()
892 __dump_misc_regs(pvt); in dump_misc_regs()
894 edac_dbg(1, " DramHoleValid: %s\n", dhar_valid(pvt) ? "yes" : "no"); in dump_misc_regs()
897 ((pvt->ecc_sym_sz == 8) ? "x8" : "x4")); in dump_misc_regs()
903 static void prep_chip_selects(struct amd64_pvt *pvt) in prep_chip_selects() argument
905 if (pvt->fam == 0xf && pvt->ext_model < K8_REV_F) { in prep_chip_selects()
906 pvt->csels[0].b_cnt = pvt->csels[1].b_cnt = 8; in prep_chip_selects()
907 pvt->csels[0].m_cnt = pvt->csels[1].m_cnt = 8; in prep_chip_selects()
908 } else if (pvt->fam == 0x15 && pvt->model == 0x30) { in prep_chip_selects()
909 pvt->csels[0].b_cnt = pvt->csels[1].b_cnt = 4; in prep_chip_selects()
910 pvt->csels[0].m_cnt = pvt->csels[1].m_cnt = 2; in prep_chip_selects()
912 pvt->csels[0].b_cnt = pvt->csels[1].b_cnt = 8; in prep_chip_selects()
913 pvt->csels[0].m_cnt = pvt->csels[1].m_cnt = 4; in prep_chip_selects()
920 static void read_dct_base_mask(struct amd64_pvt *pvt) in read_dct_base_mask() argument
924 prep_chip_selects(pvt); in read_dct_base_mask()
926 if (pvt->umc) { in read_dct_base_mask()
938 for_each_chip_select(cs, 0, pvt) { in read_dct_base_mask()
941 u32 *base0 = &pvt->csels[0].csbases[cs]; in read_dct_base_mask()
942 u32 *base1 = &pvt->csels[1].csbases[cs]; in read_dct_base_mask()
944 if (pvt->umc) { in read_dct_base_mask()
945 if (!amd_smn_read(pvt->mc_node_id, reg0, base0)) in read_dct_base_mask()
949 if (!amd_smn_read(pvt->mc_node_id, reg1, base1)) in read_dct_base_mask()
953 if (!amd64_read_dct_pci_cfg(pvt, 0, reg0, base0)) in read_dct_base_mask()
957 if (pvt->fam == 0xf) in read_dct_base_mask()
960 if (!amd64_read_dct_pci_cfg(pvt, 1, reg0, base1)) in read_dct_base_mask()
962 cs, *base1, (pvt->fam == 0x10) ? reg1 in read_dct_base_mask()
967 for_each_chip_select_mask(cs, 0, pvt) { in read_dct_base_mask()
970 u32 *mask0 = &pvt->csels[0].csmasks[cs]; in read_dct_base_mask()
971 u32 *mask1 = &pvt->csels[1].csmasks[cs]; in read_dct_base_mask()
973 if (pvt->umc) { in read_dct_base_mask()
974 if (!amd_smn_read(pvt->mc_node_id, reg0, mask0)) in read_dct_base_mask()
978 if (!amd_smn_read(pvt->mc_node_id, reg1, mask1)) in read_dct_base_mask()
982 if (!amd64_read_dct_pci_cfg(pvt, 0, reg0, mask0)) in read_dct_base_mask()
986 if (pvt->fam == 0xf) in read_dct_base_mask()
989 if (!amd64_read_dct_pci_cfg(pvt, 1, reg0, mask1)) in read_dct_base_mask()
991 cs, *mask1, (pvt->fam == 0x10) ? reg1 in read_dct_base_mask()
997 static void determine_memory_type(struct amd64_pvt *pvt) in determine_memory_type() argument
1001 switch (pvt->fam) { in determine_memory_type()
1003 if (pvt->ext_model >= K8_REV_F) in determine_memory_type()
1006 pvt->dram_type = (pvt->dclr0 & BIT(18)) ? MEM_DDR : MEM_RDDR; in determine_memory_type()
1010 if (pvt->dchr0 & DDR3_MODE) in determine_memory_type()
1013 pvt->dram_type = (pvt->dclr0 & BIT(16)) ? MEM_DDR2 : MEM_RDDR2; in determine_memory_type()
1017 if (pvt->model < 0x60) in determine_memory_type()
1029 amd64_read_dct_pci_cfg(pvt, 0, DRAM_CONTROL, &dram_ctrl); in determine_memory_type()
1030 dcsm = pvt->csels[0].csmasks[0]; in determine_memory_type()
1033 pvt->dram_type = MEM_DDR4; in determine_memory_type()
1034 else if (pvt->dclr0 & BIT(16)) in determine_memory_type()
1035 pvt->dram_type = MEM_DDR3; in determine_memory_type()
1037 pvt->dram_type = MEM_LRDDR3; in determine_memory_type()
1039 pvt->dram_type = MEM_RDDR3; in determine_memory_type()
1047 if ((pvt->umc[0].dimm_cfg | pvt->umc[1].dimm_cfg) & BIT(5)) in determine_memory_type()
1048 pvt->dram_type = MEM_LRDDR4; in determine_memory_type()
1049 else if ((pvt->umc[0].dimm_cfg | pvt->umc[1].dimm_cfg) & BIT(4)) in determine_memory_type()
1050 pvt->dram_type = MEM_RDDR4; in determine_memory_type()
1052 pvt->dram_type = MEM_DDR4; in determine_memory_type()
1056 WARN(1, KERN_ERR "%s: Family??? 0x%x\n", __func__, pvt->fam); in determine_memory_type()
1057 pvt->dram_type = MEM_EMPTY; in determine_memory_type()
1062 pvt->dram_type = (pvt->dclr0 & BIT(16)) ? MEM_DDR3 : MEM_RDDR3; in determine_memory_type()
1066 static int k8_early_channel_count(struct amd64_pvt *pvt) in k8_early_channel_count() argument
1070 if (pvt->ext_model >= K8_REV_F) in k8_early_channel_count()
1072 flag = pvt->dclr0 & WIDTH_128; in k8_early_channel_count()
1075 flag = pvt->dclr0 & REVE_WIDTH_128; in k8_early_channel_count()
1078 pvt->dclr1 = 0; in k8_early_channel_count()
1084 static u64 get_error_address(struct amd64_pvt *pvt, struct mce *m) in get_error_address() argument
1096 pvt = mci->pvt_info; in get_error_address()
1098 if (pvt->fam == 0xf) { in get_error_address()
1108 if (pvt->fam == 0x15) { in get_error_address()
1117 amd64_read_pci_cfg(pvt->F1, DRAM_LOCAL_NODE_LIM, &tmp); in get_error_address()
1132 amd64_read_pci_cfg(pvt->F1, DRAM_LOCAL_NODE_BASE, &tmp); in get_error_address()
1165 static void read_dram_base_limit_regs(struct amd64_pvt *pvt, unsigned range) in read_dram_base_limit_regs() argument
1173 amd64_read_pci_cfg(pvt->F1, DRAM_BASE_LO + off, &pvt->ranges[range].base.lo); in read_dram_base_limit_regs()
1174 amd64_read_pci_cfg(pvt->F1, DRAM_LIMIT_LO + off, &pvt->ranges[range].lim.lo); in read_dram_base_limit_regs()
1176 if (pvt->fam == 0xf) in read_dram_base_limit_regs()
1179 if (!dram_rw(pvt, range)) in read_dram_base_limit_regs()
1182 amd64_read_pci_cfg(pvt->F1, DRAM_BASE_HI + off, &pvt->ranges[range].base.hi); in read_dram_base_limit_regs()
1183 amd64_read_pci_cfg(pvt->F1, DRAM_LIMIT_HI + off, &pvt->ranges[range].lim.hi); in read_dram_base_limit_regs()
1186 if (pvt->fam != 0x15) in read_dram_base_limit_regs()
1189 nb = node_to_amd_nb(dram_dst_node(pvt, range)); in read_dram_base_limit_regs()
1193 if (pvt->model == 0x60) in read_dram_base_limit_regs()
1195 else if (pvt->model == 0x30) in read_dram_base_limit_regs()
1206 pvt->ranges[range].lim.lo &= GENMASK_ULL(15, 0); in read_dram_base_limit_regs()
1209 pvt->ranges[range].lim.lo |= ((llim & 0x1fff) << 3 | 0x7) << 16; in read_dram_base_limit_regs()
1211 pvt->ranges[range].lim.hi &= GENMASK_ULL(7, 0); in read_dram_base_limit_regs()
1214 pvt->ranges[range].lim.hi |= llim >> 13; in read_dram_base_limit_regs()
1222 struct amd64_pvt *pvt = mci->pvt_info; in k8_map_sysaddr_to_csrow() local
1246 if (pvt->nbcfg & NBCFG_CHIPKILL) { in k8_map_sysaddr_to_csrow()
1287 static int k8_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct, in k8_dbam_to_chip_select() argument
1290 u32 dclr = dct ? pvt->dclr1 : pvt->dclr0; in k8_dbam_to_chip_select()
1292 if (pvt->ext_model >= K8_REV_F) { in k8_dbam_to_chip_select()
1296 else if (pvt->ext_model >= K8_REV_D) { in k8_dbam_to_chip_select()
1342 static int f1x_early_channel_count(struct amd64_pvt *pvt) in f1x_early_channel_count() argument
1347 if (pvt->fam == 0x10 && (pvt->dclr0 & WIDTH_128)) in f1x_early_channel_count()
1366 u32 dbam = (i ? pvt->dbam1 : pvt->dbam0); in f1x_early_channel_count()
1384 static int f17_early_channel_count(struct amd64_pvt *pvt) in f17_early_channel_count() argument
1390 channels += !!(pvt->umc[i].sdp_ctrl & UMC_SDP_INIT); in f17_early_channel_count()
1454 static int f10_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct, in f10_dbam_to_chip_select() argument
1457 u32 dclr = dct ? pvt->dclr1 : pvt->dclr0; in f10_dbam_to_chip_select()
1461 if (pvt->dchr0 & DDR3_MODE || pvt->dchr1 & DDR3_MODE) in f10_dbam_to_chip_select()
1470 static int f15_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct, in f15_dbam_to_chip_select() argument
1479 static int f15_m60h_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct, in f15_m60h_dbam_to_chip_select() argument
1483 u32 dcsm = pvt->csels[dct].csmasks[cs_mask_nr]; in f15_m60h_dbam_to_chip_select()
1487 if (pvt->dram_type == MEM_DDR4) { in f15_m60h_dbam_to_chip_select()
1492 } else if (pvt->dram_type == MEM_LRDDR3) { in f15_m60h_dbam_to_chip_select()
1512 static int f16_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct, in f16_dbam_to_chip_select() argument
1524 static int f17_base_addr_to_cs_size(struct amd64_pvt *pvt, u8 umc, in f17_base_addr_to_cs_size() argument
1527 u32 base_addr = pvt->csels[umc].csbases[csrow_nr]; in f17_base_addr_to_cs_size()
1530 u32 addr_mask = pvt->csels[umc].csmasks[csrow_nr >> 1]; in f17_base_addr_to_cs_size()
1541 static void read_dram_ctl_register(struct amd64_pvt *pvt) in read_dram_ctl_register() argument
1544 if (pvt->fam == 0xf) in read_dram_ctl_register()
1547 if (!amd64_read_pci_cfg(pvt->F2, DCT_SEL_LO, &pvt->dct_sel_lo)) { in read_dram_ctl_register()
1549 pvt->dct_sel_lo, dct_sel_baseaddr(pvt)); in read_dram_ctl_register()
1552 (dct_ganging_enabled(pvt) ? "ganged" : "unganged")); in read_dram_ctl_register()
1554 if (!dct_ganging_enabled(pvt)) in read_dram_ctl_register()
1556 (dct_high_range_enabled(pvt) ? "yes" : "no")); in read_dram_ctl_register()
1559 (dct_data_intlv_enabled(pvt) ? "enabled" : "disabled"), in read_dram_ctl_register()
1560 (dct_memory_cleared(pvt) ? "yes" : "no")); in read_dram_ctl_register()
1564 (dct_interleave_enabled(pvt) ? "enabled" : "disabled"), in read_dram_ctl_register()
1565 dct_sel_interleave_addr(pvt)); in read_dram_ctl_register()
1568 amd64_read_pci_cfg(pvt->F2, DCT_SEL_HI, &pvt->dct_sel_hi); in read_dram_ctl_register()
1575 static u8 f15_m30h_determine_channel(struct amd64_pvt *pvt, u64 sys_addr, in f15_m30h_determine_channel() argument
1589 u8 intlv_addr = dct_sel_interleave_addr(pvt); in f15_m30h_determine_channel()
1606 static u8 f1x_determine_channel(struct amd64_pvt *pvt, u64 sys_addr, in f1x_determine_channel() argument
1609 u8 dct_sel_high = (pvt->dct_sel_lo >> 1) & 1; in f1x_determine_channel()
1611 if (dct_ganging_enabled(pvt)) in f1x_determine_channel()
1620 if (dct_interleave_enabled(pvt)) { in f1x_determine_channel()
1621 u8 intlv_addr = dct_sel_interleave_addr(pvt); in f1x_determine_channel()
1643 if (dct_high_range_enabled(pvt)) in f1x_determine_channel()
1650 static u64 f1x_get_norm_dct_addr(struct amd64_pvt *pvt, u8 range, in f1x_get_norm_dct_addr() argument
1655 u64 dram_base = get_dram_base(pvt, range); in f1x_get_norm_dct_addr()
1656 u64 hole_off = f10_dhar_offset(pvt); in f1x_get_norm_dct_addr()
1657 u64 dct_sel_base_off = (u64)(pvt->dct_sel_hi & 0xFFFFFC00) << 16; in f1x_get_norm_dct_addr()
1672 dct_sel_base_addr < dhar_base(pvt)) && in f1x_get_norm_dct_addr()
1673 dhar_valid(pvt) && in f1x_get_norm_dct_addr()
1688 if (dhar_valid(pvt) && (sys_addr >= BIT_64(32))) in f1x_get_norm_dct_addr()
1701 static int f10_process_possible_spare(struct amd64_pvt *pvt, u8 dct, int csrow) in f10_process_possible_spare() argument
1705 if (online_spare_swap_done(pvt, dct) && in f10_process_possible_spare()
1706 csrow == online_spare_bad_dramcs(pvt, dct)) { in f10_process_possible_spare()
1708 for_each_chip_select(tmp_cs, dct, pvt) { in f10_process_possible_spare()
1709 if (chip_select_base(tmp_cs, dct, pvt) & 0x2) { in f10_process_possible_spare()
1729 struct amd64_pvt *pvt; in f1x_lookup_addr_in_dct() local
1738 pvt = mci->pvt_info; in f1x_lookup_addr_in_dct()
1742 for_each_chip_select(csrow, dct, pvt) { in f1x_lookup_addr_in_dct()
1743 if (!csrow_enabled(csrow, dct, pvt)) in f1x_lookup_addr_in_dct()
1746 get_cs_base_and_mask(pvt, csrow, dct, &cs_base, &cs_mask); in f1x_lookup_addr_in_dct()
1757 if (pvt->fam == 0x15 && pvt->model >= 0x30) { in f1x_lookup_addr_in_dct()
1761 cs_found = f10_process_possible_spare(pvt, dct, csrow); in f1x_lookup_addr_in_dct()
1775 static u64 f1x_swap_interleaved_region(struct amd64_pvt *pvt, u64 sys_addr) in f1x_swap_interleaved_region() argument
1779 if (pvt->fam == 0x10) { in f1x_swap_interleaved_region()
1781 if (pvt->model < 4 || (pvt->model < 0xa && pvt->stepping < 3)) in f1x_swap_interleaved_region()
1785 amd64_read_pci_cfg(pvt->F2, SWAP_INTLV_REG, &swap_reg); in f1x_swap_interleaved_region()
1805 static int f1x_match_to_this_node(struct amd64_pvt *pvt, unsigned range, in f1x_match_to_this_node() argument
1814 u8 node_id = dram_dst_node(pvt, range); in f1x_match_to_this_node()
1815 u8 intlv_en = dram_intlv_en(pvt, range); in f1x_match_to_this_node()
1816 u32 intlv_sel = dram_intlv_sel(pvt, range); in f1x_match_to_this_node()
1819 range, sys_addr, get_dram_limit(pvt, range)); in f1x_match_to_this_node()
1821 if (dhar_valid(pvt) && in f1x_match_to_this_node()
1822 dhar_base(pvt) <= sys_addr && in f1x_match_to_this_node()
1832 sys_addr = f1x_swap_interleaved_region(pvt, sys_addr); in f1x_match_to_this_node()
1834 dct_sel_base = dct_sel_baseaddr(pvt); in f1x_match_to_this_node()
1840 if (dct_high_range_enabled(pvt) && in f1x_match_to_this_node()
1841 !dct_ganging_enabled(pvt) && in f1x_match_to_this_node()
1845 channel = f1x_determine_channel(pvt, sys_addr, high_range, intlv_en); in f1x_match_to_this_node()
1847 chan_addr = f1x_get_norm_dct_addr(pvt, range, sys_addr, in f1x_match_to_this_node()
1856 if (dct_interleave_enabled(pvt) && in f1x_match_to_this_node()
1857 !dct_high_range_enabled(pvt) && in f1x_match_to_this_node()
1858 !dct_ganging_enabled(pvt)) { in f1x_match_to_this_node()
1860 if (dct_sel_interleave_addr(pvt) != 1) { in f1x_match_to_this_node()
1861 if (dct_sel_interleave_addr(pvt) == 0x3) in f1x_match_to_this_node()
1885 static int f15_m30h_match_to_this_node(struct amd64_pvt *pvt, unsigned range, in f15_m30h_match_to_this_node() argument
1895 u64 dhar_offset = f10_dhar_offset(pvt); in f15_m30h_match_to_this_node()
1896 u8 intlv_addr = dct_sel_interleave_addr(pvt); in f15_m30h_match_to_this_node()
1897 u8 node_id = dram_dst_node(pvt, range); in f15_m30h_match_to_this_node()
1898 u8 intlv_en = dram_intlv_en(pvt, range); in f15_m30h_match_to_this_node()
1900 amd64_read_pci_cfg(pvt->F1, DRAM_CONT_BASE, &dct_cont_base_reg); in f15_m30h_match_to_this_node()
1901 amd64_read_pci_cfg(pvt->F1, DRAM_CONT_LIMIT, &dct_cont_limit_reg); in f15_m30h_match_to_this_node()
1907 range, sys_addr, get_dram_limit(pvt, range)); in f15_m30h_match_to_this_node()
1909 if (!(get_dram_base(pvt, range) <= sys_addr) && in f15_m30h_match_to_this_node()
1910 !(get_dram_limit(pvt, range) >= sys_addr)) in f15_m30h_match_to_this_node()
1913 if (dhar_valid(pvt) && in f15_m30h_match_to_this_node()
1914 dhar_base(pvt) <= sys_addr && in f15_m30h_match_to_this_node()
1922 dct_base = (u64) dct_sel_baseaddr(pvt); in f15_m30h_match_to_this_node()
1936 if (pvt->model >= 0x60) in f15_m30h_match_to_this_node()
1937 channel = f1x_determine_channel(pvt, sys_addr, false, intlv_en); in f15_m30h_match_to_this_node()
1939 channel = f15_m30h_determine_channel(pvt, sys_addr, intlv_en, in f15_m30h_match_to_this_node()
1979 amd64_read_pci_cfg(pvt->F1, in f15_m30h_match_to_this_node()
1985 f15h_select_dct(pvt, channel); in f15_m30h_match_to_this_node()
2007 static int f1x_translate_sysaddr_to_cs(struct amd64_pvt *pvt, in f1x_translate_sysaddr_to_cs() argument
2015 if (!dram_rw(pvt, range)) in f1x_translate_sysaddr_to_cs()
2018 if (pvt->fam == 0x15 && pvt->model >= 0x30) in f1x_translate_sysaddr_to_cs()
2019 cs_found = f15_m30h_match_to_this_node(pvt, range, in f1x_translate_sysaddr_to_cs()
2023 else if ((get_dram_base(pvt, range) <= sys_addr) && in f1x_translate_sysaddr_to_cs()
2024 (get_dram_limit(pvt, range) >= sys_addr)) { in f1x_translate_sysaddr_to_cs()
2025 cs_found = f1x_match_to_this_node(pvt, range, in f1x_translate_sysaddr_to_cs()
2044 struct amd64_pvt *pvt = mci->pvt_info; in f1x_map_sysaddr_to_csrow() local
2048 err->csrow = f1x_translate_sysaddr_to_cs(pvt, sys_addr, &err->channel); in f1x_map_sysaddr_to_csrow()
2059 if (dct_ganging_enabled(pvt)) in f1x_map_sysaddr_to_csrow()
2067 static void debug_display_dimm_sizes(struct amd64_pvt *pvt, u8 ctrl) in debug_display_dimm_sizes() argument
2070 u32 *dcsb = ctrl ? pvt->csels[1].csbases : pvt->csels[0].csbases; in debug_display_dimm_sizes()
2071 u32 dbam = ctrl ? pvt->dbam1 : pvt->dbam0; in debug_display_dimm_sizes()
2073 if (pvt->fam == 0xf) { in debug_display_dimm_sizes()
2075 if (pvt->ext_model < K8_REV_F) in debug_display_dimm_sizes()
2081 if (pvt->fam == 0x10) { in debug_display_dimm_sizes()
2082 dbam = (ctrl && !dct_ganging_enabled(pvt)) ? pvt->dbam1 in debug_display_dimm_sizes()
2083 : pvt->dbam0; in debug_display_dimm_sizes()
2084 dcsb = (ctrl && !dct_ganging_enabled(pvt)) ? in debug_display_dimm_sizes()
2085 pvt->csels[1].csbases : in debug_display_dimm_sizes()
2086 pvt->csels[0].csbases; in debug_display_dimm_sizes()
2088 dbam = pvt->dbam0; in debug_display_dimm_sizes()
2089 dcsb = pvt->csels[1].csbases; in debug_display_dimm_sizes()
2107 size0 = pvt->ops->dbam_to_cs(pvt, ctrl, in debug_display_dimm_sizes()
2113 size1 = pvt->ops->dbam_to_cs(pvt, ctrl, in debug_display_dimm_sizes()
2350 struct amd64_pvt *pvt = mci->pvt_info; in get_channel_from_ecc_syndrome() local
2353 if (pvt->ecc_sym_sz == 8) in get_channel_from_ecc_syndrome()
2356 pvt->ecc_sym_sz); in get_channel_from_ecc_syndrome()
2357 else if (pvt->ecc_sym_sz == 4) in get_channel_from_ecc_syndrome()
2360 pvt->ecc_sym_sz); in get_channel_from_ecc_syndrome()
2362 amd64_warn("Illegal syndrome type: %u\n", pvt->ecc_sym_sz); in get_channel_from_ecc_syndrome()
2366 return map_err_sym_to_channel(err_sym, pvt->ecc_sym_sz); in get_channel_from_ecc_syndrome()
2419 struct amd64_pvt *pvt; in decode_bus_error() local
2430 pvt = mci->pvt_info; in decode_bus_error()
2442 sys_addr = get_error_address(pvt, m); in decode_bus_error()
2447 pvt->ops->map_sysaddr_to_csrow(mci, sys_addr, &err); in decode_bus_error()
2457 static int find_umc_channel(struct amd64_pvt *pvt, struct mce *m) in find_umc_channel() argument
2474 struct amd64_pvt *pvt; in decode_umc_error() local
2482 pvt = mci->pvt_info; in decode_umc_error()
2489 err.channel = find_umc_channel(pvt, m); in decode_umc_error()
2495 if (umc_normaddr_to_sysaddr(m->addr, pvt->mc_node_id, err.channel, &sys_addr)) { in decode_umc_error()
2528 reserve_mc_sibling_devs(struct amd64_pvt *pvt, u16 pci_id1, u16 pci_id2) in reserve_mc_sibling_devs() argument
2530 if (pvt->umc) { in reserve_mc_sibling_devs()
2531 pvt->F0 = pci_get_related_function(pvt->F3->vendor, pci_id1, pvt->F3); in reserve_mc_sibling_devs()
2532 if (!pvt->F0) { in reserve_mc_sibling_devs()
2537 pvt->F6 = pci_get_related_function(pvt->F3->vendor, pci_id2, pvt->F3); in reserve_mc_sibling_devs()
2538 if (!pvt->F6) { in reserve_mc_sibling_devs()
2539 pci_dev_put(pvt->F0); in reserve_mc_sibling_devs()
2540 pvt->F0 = NULL; in reserve_mc_sibling_devs()
2546 edac_dbg(1, "F0: %s\n", pci_name(pvt->F0)); in reserve_mc_sibling_devs()
2547 edac_dbg(1, "F3: %s\n", pci_name(pvt->F3)); in reserve_mc_sibling_devs()
2548 edac_dbg(1, "F6: %s\n", pci_name(pvt->F6)); in reserve_mc_sibling_devs()
2554 pvt->F1 = pci_get_related_function(pvt->F3->vendor, pci_id1, pvt->F3); in reserve_mc_sibling_devs()
2555 if (!pvt->F1) { in reserve_mc_sibling_devs()
2561 pvt->F2 = pci_get_related_function(pvt->F3->vendor, pci_id2, pvt->F3); in reserve_mc_sibling_devs()
2562 if (!pvt->F2) { in reserve_mc_sibling_devs()
2563 pci_dev_put(pvt->F1); in reserve_mc_sibling_devs()
2564 pvt->F1 = NULL; in reserve_mc_sibling_devs()
2570 edac_dbg(1, "F1: %s\n", pci_name(pvt->F1)); in reserve_mc_sibling_devs()
2571 edac_dbg(1, "F2: %s\n", pci_name(pvt->F2)); in reserve_mc_sibling_devs()
2572 edac_dbg(1, "F3: %s\n", pci_name(pvt->F3)); in reserve_mc_sibling_devs()
2577 static void free_mc_sibling_devs(struct amd64_pvt *pvt) in free_mc_sibling_devs() argument
2579 if (pvt->umc) { in free_mc_sibling_devs()
2580 pci_dev_put(pvt->F0); in free_mc_sibling_devs()
2581 pci_dev_put(pvt->F6); in free_mc_sibling_devs()
2583 pci_dev_put(pvt->F1); in free_mc_sibling_devs()
2584 pci_dev_put(pvt->F2); in free_mc_sibling_devs()
2588 static void determine_ecc_sym_sz(struct amd64_pvt *pvt) in determine_ecc_sym_sz() argument
2590 pvt->ecc_sym_sz = 4; in determine_ecc_sym_sz()
2592 if (pvt->umc) { in determine_ecc_sym_sz()
2597 if ((pvt->umc[i].sdp_ctrl & UMC_SDP_INIT) && in determine_ecc_sym_sz()
2598 (pvt->umc[i].ecc_ctrl & BIT(7))) { in determine_ecc_sym_sz()
2599 pvt->ecc_sym_sz = 8; in determine_ecc_sym_sz()
2607 if (pvt->fam >= 0x10) { in determine_ecc_sym_sz()
2610 amd64_read_pci_cfg(pvt->F3, EXT_NB_MCA_CFG, &tmp); in determine_ecc_sym_sz()
2612 if (pvt->fam != 0x16) in determine_ecc_sym_sz()
2613 amd64_read_dct_pci_cfg(pvt, 1, DBAM0, &pvt->dbam1); in determine_ecc_sym_sz()
2616 if ((pvt->fam > 0x10 || pvt->model > 7) && tmp & BIT(25)) in determine_ecc_sym_sz()
2617 pvt->ecc_sym_sz = 8; in determine_ecc_sym_sz()
2624 static void __read_mc_regs_df(struct amd64_pvt *pvt) in __read_mc_regs_df() argument
2626 u8 nid = pvt->mc_node_id; in __read_mc_regs_df()
2634 umc = &pvt->umc[i]; in __read_mc_regs_df()
2648 static void read_mc_regs(struct amd64_pvt *pvt) in read_mc_regs() argument
2657 rdmsrl(MSR_K8_TOP_MEM1, pvt->top_mem); in read_mc_regs()
2658 edac_dbg(0, " TOP_MEM: 0x%016llx\n", pvt->top_mem); in read_mc_regs()
2663 rdmsrl(MSR_K8_TOP_MEM2, pvt->top_mem2); in read_mc_regs()
2664 edac_dbg(0, " TOP_MEM2: 0x%016llx\n", pvt->top_mem2); in read_mc_regs()
2669 if (pvt->umc) { in read_mc_regs()
2670 __read_mc_regs_df(pvt); in read_mc_regs()
2671 amd64_read_pci_cfg(pvt->F0, DF_DHAR, &pvt->dhar); in read_mc_regs()
2676 amd64_read_pci_cfg(pvt->F3, NBCAP, &pvt->nbcap); in read_mc_regs()
2678 read_dram_ctl_register(pvt); in read_mc_regs()
2684 read_dram_base_limit_regs(pvt, range); in read_mc_regs()
2686 rw = dram_rw(pvt, range); in read_mc_regs()
2692 get_dram_base(pvt, range), in read_mc_regs()
2693 get_dram_limit(pvt, range)); in read_mc_regs()
2696 dram_intlv_en(pvt, range) ? "Enabled" : "Disabled", in read_mc_regs()
2699 dram_intlv_sel(pvt, range), in read_mc_regs()
2700 dram_dst_node(pvt, range)); in read_mc_regs()
2703 amd64_read_pci_cfg(pvt->F1, DHAR, &pvt->dhar); in read_mc_regs()
2704 amd64_read_dct_pci_cfg(pvt, 0, DBAM0, &pvt->dbam0); in read_mc_regs()
2706 amd64_read_pci_cfg(pvt->F3, F10_ONLINE_SPARE, &pvt->online_spare); in read_mc_regs()
2708 amd64_read_dct_pci_cfg(pvt, 0, DCLR0, &pvt->dclr0); in read_mc_regs()
2709 amd64_read_dct_pci_cfg(pvt, 0, DCHR0, &pvt->dchr0); in read_mc_regs()
2711 if (!dct_ganging_enabled(pvt)) { in read_mc_regs()
2712 amd64_read_dct_pci_cfg(pvt, 1, DCLR0, &pvt->dclr1); in read_mc_regs()
2713 amd64_read_dct_pci_cfg(pvt, 1, DCHR0, &pvt->dchr1); in read_mc_regs()
2717 read_dct_base_mask(pvt); in read_mc_regs()
2719 determine_memory_type(pvt); in read_mc_regs()
2720 edac_dbg(1, " DIMM type: %s\n", edac_mem_types[pvt->dram_type]); in read_mc_regs()
2722 determine_ecc_sym_sz(pvt); in read_mc_regs()
2724 dump_misc_regs(pvt); in read_mc_regs()
2761 static u32 get_csrow_nr_pages(struct amd64_pvt *pvt, u8 dct, int csrow_nr_orig) in get_csrow_nr_pages() argument
2763 u32 dbam = dct ? pvt->dbam1 : pvt->dbam0; in get_csrow_nr_pages()
2767 if (!pvt->umc) in get_csrow_nr_pages()
2772 nr_pages = pvt->ops->dbam_to_cs(pvt, dct, cs_mode, csrow_nr); in get_csrow_nr_pages()
2788 struct amd64_pvt *pvt = mci->pvt_info; in init_csrows() local
2796 if (!pvt->umc) { in init_csrows()
2797 amd64_read_pci_cfg(pvt->F3, NBCFG, &val); in init_csrows()
2799 pvt->nbcfg = val; in init_csrows()
2802 pvt->mc_node_id, val, in init_csrows()
2809 for_each_chip_select(i, 0, pvt) { in init_csrows()
2810 bool row_dct0 = !!csrow_enabled(i, 0, pvt); in init_csrows()
2813 if (pvt->fam != 0xf) in init_csrows()
2814 row_dct1 = !!csrow_enabled(i, 1, pvt); in init_csrows()
2823 pvt->mc_node_id, i); in init_csrows()
2826 nr_pages = get_csrow_nr_pages(pvt, 0, i); in init_csrows()
2831 if (pvt->fam != 0xf && row_dct1) { in init_csrows()
2832 int row_dct1_pages = get_csrow_nr_pages(pvt, 1, i); in init_csrows()
2841 if (pvt->umc) { in init_csrows()
2847 } else if (pvt->nbcfg & NBCFG_ECC_ENABLE) { in init_csrows()
2848 edac_mode = (pvt->nbcfg & NBCFG_CHIPKILL) in init_csrows()
2853 for (j = 0; j < pvt->channel_count; j++) { in init_csrows()
2855 dimm->mtype = pvt->dram_type; in init_csrows()
3093 f17h_determine_edac_ctl_cap(struct mem_ctl_info *mci, struct amd64_pvt *pvt) in f17h_determine_edac_ctl_cap() argument
3098 if (pvt->umc[i].sdp_ctrl & UMC_SDP_INIT) { in f17h_determine_edac_ctl_cap()
3099 ecc_en &= !!(pvt->umc[i].umc_cap_hi & UMC_ECC_ENABLED); in f17h_determine_edac_ctl_cap()
3100 cpk_en &= !!(pvt->umc[i].umc_cap_hi & UMC_ECC_CHIPKILL_CAP); in f17h_determine_edac_ctl_cap()
3116 struct amd64_pvt *pvt = mci->pvt_info; in setup_mci_misc_attrs() local
3121 if (pvt->umc) { in setup_mci_misc_attrs()
3122 f17h_determine_edac_ctl_cap(mci, pvt); in setup_mci_misc_attrs()
3124 if (pvt->nbcap & NBCAP_SECDED) in setup_mci_misc_attrs()
3127 if (pvt->nbcap & NBCAP_CHIPKILL) in setup_mci_misc_attrs()
3131 mci->edac_cap = determine_edac_cap(pvt); in setup_mci_misc_attrs()
3134 mci->dev_name = pci_name(pvt->F3); in setup_mci_misc_attrs()
3145 static struct amd64_family_type *per_family_init(struct amd64_pvt *pvt) in per_family_init() argument
3149 pvt->ext_model = boot_cpu_data.x86_model >> 4; in per_family_init()
3150 pvt->stepping = boot_cpu_data.x86_stepping; in per_family_init()
3151 pvt->model = boot_cpu_data.x86_model; in per_family_init()
3152 pvt->fam = boot_cpu_data.x86; in per_family_init()
3154 switch (pvt->fam) { in per_family_init()
3157 pvt->ops = &family_types[K8_CPUS].ops; in per_family_init()
3162 pvt->ops = &family_types[F10_CPUS].ops; in per_family_init()
3166 if (pvt->model == 0x30) { in per_family_init()
3168 pvt->ops = &family_types[F15_M30H_CPUS].ops; in per_family_init()
3170 } else if (pvt->model == 0x60) { in per_family_init()
3172 pvt->ops = &family_types[F15_M60H_CPUS].ops; in per_family_init()
3177 pvt->ops = &family_types[F15_CPUS].ops; in per_family_init()
3181 if (pvt->model == 0x30) { in per_family_init()
3183 pvt->ops = &family_types[F16_M30H_CPUS].ops; in per_family_init()
3187 pvt->ops = &family_types[F16_CPUS].ops; in per_family_init()
3192 pvt->ops = &family_types[F17_CPUS].ops; in per_family_init()
3201 (pvt->fam == 0xf ? in per_family_init()
3202 (pvt->ext_model >= K8_REV_F ? "revF or later " in per_family_init()
3204 : ""), pvt->mc_node_id); in per_family_init()
3224 struct amd64_pvt *pvt = NULL; in init_one_instance() local
3229 pvt = kzalloc(sizeof(struct amd64_pvt), GFP_KERNEL); in init_one_instance()
3230 if (!pvt) in init_one_instance()
3233 pvt->mc_node_id = nid; in init_one_instance()
3234 pvt->F3 = F3; in init_one_instance()
3237 fam_type = per_family_init(pvt); in init_one_instance()
3241 if (pvt->fam >= 0x17) { in init_one_instance()
3242 pvt->umc = kcalloc(NUM_UMCS, sizeof(struct amd64_umc), GFP_KERNEL); in init_one_instance()
3243 if (!pvt->umc) { in init_one_instance()
3255 err = reserve_mc_sibling_devs(pvt, pci_id1, pci_id2); in init_one_instance()
3259 read_mc_regs(pvt); in init_one_instance()
3267 pvt->channel_count = pvt->ops->early_channel_count(pvt); in init_one_instance()
3268 if (pvt->channel_count < 0) in init_one_instance()
3273 layers[0].size = pvt->csels[0].b_cnt; in init_one_instance()
3289 mci->pvt_info = pvt; in init_one_instance()
3290 mci->pdev = &pvt->F3->dev; in init_one_instance()
3309 free_mc_sibling_devs(pvt); in init_one_instance()
3312 if (pvt->fam >= 0x17) in init_one_instance()
3313 kfree(pvt->umc); in init_one_instance()
3316 kfree(pvt); in init_one_instance()
3376 struct amd64_pvt *pvt; in remove_one_instance() local
3386 pvt = mci->pvt_info; in remove_one_instance()
3390 free_mc_sibling_devs(pvt); in remove_one_instance()
3398 kfree(pvt); in remove_one_instance()
3405 struct amd64_pvt *pvt; in setup_pci_device() local
3414 pvt = mci->pvt_info; in setup_pci_device()
3415 if (pvt->umc) in setup_pci_device()
3416 pci_ctl = edac_pci_create_generic_ctl(&pvt->F0->dev, EDAC_MOD_STR); in setup_pci_device()
3418 pci_ctl = edac_pci_create_generic_ctl(&pvt->F2->dev, EDAC_MOD_STR); in setup_pci_device()