Lines Matching refs:imc

90 static void __enable_retry_rd_err_log(struct skx_imc *imc, int chan, bool enable,  in __enable_retry_rd_err_log()  argument
96 s = I10NM_GET_REG32(imc, chan, offsets_scrub[0]); in __enable_retry_rd_err_log()
97 d = I10NM_GET_REG32(imc, chan, offsets_demand[0]); in __enable_retry_rd_err_log()
99 d2 = I10NM_GET_REG32(imc, chan, offsets_demand2[0]); in __enable_retry_rd_err_log()
103 imc->chan[chan].retry_rd_err_log_s = s; in __enable_retry_rd_err_log()
104 imc->chan[chan].retry_rd_err_log_d = d; in __enable_retry_rd_err_log()
106 imc->chan[chan].retry_rd_err_log_d2 = d2; in __enable_retry_rd_err_log()
120 if (imc->chan[chan].retry_rd_err_log_s & RETRY_RD_ERR_LOG_UC) in __enable_retry_rd_err_log()
122 if (imc->chan[chan].retry_rd_err_log_s & RETRY_RD_ERR_LOG_NOOVER) in __enable_retry_rd_err_log()
124 if (!(imc->chan[chan].retry_rd_err_log_s & RETRY_RD_ERR_LOG_EN)) in __enable_retry_rd_err_log()
126 if (imc->chan[chan].retry_rd_err_log_d & RETRY_RD_ERR_LOG_UC) in __enable_retry_rd_err_log()
128 if (imc->chan[chan].retry_rd_err_log_d & RETRY_RD_ERR_LOG_NOOVER) in __enable_retry_rd_err_log()
130 if (!(imc->chan[chan].retry_rd_err_log_d & RETRY_RD_ERR_LOG_EN)) in __enable_retry_rd_err_log()
134 if (imc->chan[chan].retry_rd_err_log_d2 & RETRY_RD_ERR_LOG_UC) in __enable_retry_rd_err_log()
136 if (!(imc->chan[chan].retry_rd_err_log_d2 & RETRY_RD_ERR_LOG_NOOVER)) in __enable_retry_rd_err_log()
138 if (!(imc->chan[chan].retry_rd_err_log_d2 & RETRY_RD_ERR_LOG_EN)) in __enable_retry_rd_err_log()
143 I10NM_SET_REG32(imc, chan, offsets_scrub[0], s); in __enable_retry_rd_err_log()
144 I10NM_SET_REG32(imc, chan, offsets_demand[0], d); in __enable_retry_rd_err_log()
146 I10NM_SET_REG32(imc, chan, offsets_demand2[0], d2); in __enable_retry_rd_err_log()
151 struct skx_imc *imc; in enable_retry_rd_err_log() local
159 imc = &d->imc[i]; in enable_retry_rd_err_log()
160 if (!imc->mbase) in enable_retry_rd_err_log()
164 if (imc->hbm_mc) { in enable_retry_rd_err_log()
165 __enable_retry_rd_err_log(imc, j, enable, in enable_retry_rd_err_log()
169 __enable_retry_rd_err_log(imc, j, enable, in enable_retry_rd_err_log()
174 __enable_retry_rd_err_log(imc, j, enable, in enable_retry_rd_err_log()
186 struct skx_imc *imc = &res->dev->imc[res->imc]; in show_retry_rd_err_log() local
196 if (!imc->mbase) in show_retry_rd_err_log()
199 if (imc->hbm_mc) { in show_retry_rd_err_log()
217 log0 = I10NM_GET_REG32(imc, res->channel, offsets[0]); in show_retry_rd_err_log()
218 log1 = I10NM_GET_REG32(imc, res->channel, offsets[1]); in show_retry_rd_err_log()
219 log3 = I10NM_GET_REG32(imc, res->channel, offsets[3]); in show_retry_rd_err_log()
220 log4 = I10NM_GET_REG32(imc, res->channel, offsets[4]); in show_retry_rd_err_log()
221 log5 = I10NM_GET_REG64(imc, res->channel, offsets[5]); in show_retry_rd_err_log()
224 lxg0 = I10NM_GET_REG32(imc, res->channel, xffsets[0]); in show_retry_rd_err_log()
225 lxg1 = I10NM_GET_REG32(imc, res->channel, xffsets[1]); in show_retry_rd_err_log()
226 lxg3 = I10NM_GET_REG32(imc, res->channel, xffsets[3]); in show_retry_rd_err_log()
227 lxg4 = I10NM_GET_REG32(imc, res->channel, xffsets[4]); in show_retry_rd_err_log()
228 lxg5 = I10NM_GET_REG64(imc, res->channel, xffsets[5]); in show_retry_rd_err_log()
232 log2a = I10NM_GET_REG64(imc, res->channel, offsets[2]); in show_retry_rd_err_log()
238 lxg2a = I10NM_GET_REG64(imc, res->channel, xffsets[2]); in show_retry_rd_err_log()
246 log2 = I10NM_GET_REG32(imc, res->channel, offsets[2]); in show_retry_rd_err_log()
251 if (imc->hbm_mc) { in show_retry_rd_err_log()
253 corr0 = I10NM_GET_REG32(imc, res->channel, 0x2c18); in show_retry_rd_err_log()
254 corr1 = I10NM_GET_REG32(imc, res->channel, 0x2c1c); in show_retry_rd_err_log()
255 corr2 = I10NM_GET_REG32(imc, res->channel, 0x2c20); in show_retry_rd_err_log()
256 corr3 = I10NM_GET_REG32(imc, res->channel, 0x2c24); in show_retry_rd_err_log()
258 corr0 = I10NM_GET_REG32(imc, res->channel, 0x2818); in show_retry_rd_err_log()
259 corr1 = I10NM_GET_REG32(imc, res->channel, 0x281c); in show_retry_rd_err_log()
260 corr2 = I10NM_GET_REG32(imc, res->channel, 0x2820); in show_retry_rd_err_log()
261 corr3 = I10NM_GET_REG32(imc, res->channel, 0x2824); in show_retry_rd_err_log()
264 corr0 = I10NM_GET_REG32(imc, res->channel, 0x22c18); in show_retry_rd_err_log()
265 corr1 = I10NM_GET_REG32(imc, res->channel, 0x22c1c); in show_retry_rd_err_log()
266 corr2 = I10NM_GET_REG32(imc, res->channel, 0x22c20); in show_retry_rd_err_log()
267 corr3 = I10NM_GET_REG32(imc, res->channel, 0x22c24); in show_retry_rd_err_log()
282 I10NM_SET_REG32(imc, res->channel, offsets[0], log0); in show_retry_rd_err_log()
287 I10NM_SET_REG32(imc, res->channel, xffsets[0], lxg0); in show_retry_rd_err_log()
402 if (d->imc[0].src_id == m->socketid) { in i10nm_mc_decode()
412 res->imc = bank / 4; in i10nm_mc_decode()
421 m->socketid, res->imc); in i10nm_mc_decode()
475 d->imc[i].mdev = mdev; in i10nm_get_ddr_munits()
494 d->imc[i].mbase = mbase; in i10nm_get_ddr_munits()
556 d->imc[lmc].mdev = mdev; in i10nm_get_hbm_munits()
564 pci_dev_put(d->imc[lmc].mdev); in i10nm_get_hbm_munits()
565 d->imc[lmc].mdev = NULL; in i10nm_get_hbm_munits()
572 d->imc[lmc].mbase = mbase; in i10nm_get_hbm_munits()
573 d->imc[lmc].hbm_mc = true; in i10nm_get_hbm_munits()
575 mcmtr = I10NM_GET_MCMTR(&d->imc[lmc], 0); in i10nm_get_hbm_munits()
577 iounmap(d->imc[lmc].mbase); in i10nm_get_hbm_munits()
578 d->imc[lmc].mbase = NULL; in i10nm_get_hbm_munits()
579 d->imc[lmc].hbm_mc = false; in i10nm_get_hbm_munits()
580 pci_dev_put(d->imc[lmc].mdev); in i10nm_get_hbm_munits()
581 d->imc[lmc].mdev = NULL; in i10nm_get_hbm_munits()
645 static bool i10nm_check_ecc(struct skx_imc *imc, int chan) in i10nm_check_ecc() argument
649 mcmtr = I10NM_GET_MCMTR(imc, chan); in i10nm_check_ecc()
659 struct skx_imc *imc = pvt->imc; in i10nm_get_dimm_config() local
664 for (i = 0; i < imc->num_channels; i++) { in i10nm_get_dimm_config()
665 if (!imc->mbase) in i10nm_get_dimm_config()
669 amap = I10NM_GET_AMAP(imc, i); in i10nm_get_dimm_config()
670 mcddrtcfg = I10NM_GET_MCDDRTCFG(imc, i); in i10nm_get_dimm_config()
671 for (j = 0; j < imc->num_dimms; j++) { in i10nm_get_dimm_config()
673 mtr = I10NM_GET_DIMMMTR(imc, i, j); in i10nm_get_dimm_config()
675 mtr, mcddrtcfg, imc->mc, i, j); in i10nm_get_dimm_config()
679 imc, i, j, cfg); in i10nm_get_dimm_config()
681 ndimms += skx_get_nvdimm_info(dimm, imc, i, j, in i10nm_get_dimm_config()
684 if (ndimms && !i10nm_check_ecc(imc, i)) { in i10nm_get_dimm_config()
686 imc->mc, i); in i10nm_get_dimm_config()
804 if (!d->imc[i].mdev) in i10nm_init()
807 d->imc[i].mc = mc++; in i10nm_init()
808 d->imc[i].lmc = i; in i10nm_init()
809 d->imc[i].src_id = src_id; in i10nm_init()
810 d->imc[i].node_id = node_id; in i10nm_init()
811 if (d->imc[i].hbm_mc) { in i10nm_init()
812 d->imc[i].chan_mmio_sz = cfg->hbm_chan_mmio_sz; in i10nm_init()
813 d->imc[i].num_channels = I10NM_NUM_HBM_CHANNELS; in i10nm_init()
814 d->imc[i].num_dimms = I10NM_NUM_HBM_DIMMS; in i10nm_init()
816 d->imc[i].chan_mmio_sz = cfg->ddr_chan_mmio_sz; in i10nm_init()
817 d->imc[i].num_channels = I10NM_NUM_DDR_CHANNELS; in i10nm_init()
818 d->imc[i].num_dimms = I10NM_NUM_DDR_DIMMS; in i10nm_init()
821 rc = skx_register_mci(&d->imc[i], d->imc[i].mdev, in i10nm_init()