Lines Matching full:pmu

75 	struct pmu *pmu; /* for custom pmu ops */  member
97 struct pmu pmu; member
127 struct intel_uncore_pmu *pmu; member
201 return box->pmu->type->box_ctl + in uncore_mmio_box_ctl()
202 box->pmu->type->mmio_offset * box->pmu->pmu_idx; in uncore_mmio_box_ctl()
207 return box->pmu->type->box_ctl; in uncore_pci_box_ctl()
212 return box->pmu->type->fixed_ctl; in uncore_pci_fixed_ctl()
217 return box->pmu->type->fixed_ctr; in uncore_pci_fixed_ctr()
224 return idx * 8 + box->pmu->type->event_ctl; in uncore_pci_event_ctl()
226 return idx * 4 + box->pmu->type->event_ctl; in uncore_pci_event_ctl()
232 return idx * 8 + box->pmu->type->perf_ctr; in uncore_pci_perf_ctr()
237 struct intel_uncore_pmu *pmu = box->pmu; in uncore_msr_box_offset() local
238 return pmu->type->msr_offsets ? in uncore_msr_box_offset()
239 pmu->type->msr_offsets[pmu->pmu_idx] : in uncore_msr_box_offset()
240 pmu->type->msr_offset * pmu->pmu_idx; in uncore_msr_box_offset()
245 if (!box->pmu->type->box_ctl) in uncore_msr_box_ctl()
247 return box->pmu->type->box_ctl + uncore_msr_box_offset(box); in uncore_msr_box_ctl()
252 if (!box->pmu->type->fixed_ctl) in uncore_msr_fixed_ctl()
254 return box->pmu->type->fixed_ctl + uncore_msr_box_offset(box); in uncore_msr_fixed_ctl()
259 return box->pmu->type->fixed_ctr + uncore_msr_box_offset(box); in uncore_msr_fixed_ctr()
309 struct intel_uncore_pmu *pmu = box->pmu; in uncore_freerunning_counter() local
311 return pmu->type->freerunning[type].counter_base + in uncore_freerunning_counter()
312 pmu->type->freerunning[type].counter_offset * idx + in uncore_freerunning_counter()
313 pmu->type->freerunning[type].box_offset * pmu->pmu_idx; in uncore_freerunning_counter()
321 (box->pmu->type->pair_ctr_ctl ? 2 * idx : idx); in uncore_msr_event_ctl()
323 return box->pmu->type->event_ctl + in uncore_msr_event_ctl()
324 (box->pmu->type->pair_ctr_ctl ? 2 * idx : idx) + in uncore_msr_event_ctl()
334 (box->pmu->type->pair_ctr_ctl ? 2 * idx : idx); in uncore_msr_perf_ctr()
336 return box->pmu->type->perf_ctr + in uncore_msr_perf_ctr()
337 (box->pmu->type->pair_ctr_ctl ? 2 * idx : idx) + in uncore_msr_perf_ctr()
380 return box->pmu->type->perf_ctr_bits; in uncore_perf_ctr_bits()
385 return box->pmu->type->fixed_ctr_bits; in uncore_fixed_ctr_bits()
394 return box->pmu->type->freerunning[type].bits; in uncore_freerunning_bits()
402 return box->pmu->type->freerunning[type].num_counters; in uncore_num_freerunning()
408 return box->pmu->type->num_freerunning_types; in uncore_num_freerunning_types()
423 return box->pmu->type->num_counters; in uncore_num_counters()
447 box->pmu->type->ops->disable_event(box, event); in uncore_disable_event()
453 box->pmu->type->ops->enable_event(box, event); in uncore_enable_event()
459 return box->pmu->type->ops->read_counter(box, event); in uncore_read_counter()
465 if (box->pmu->type->ops->init_box) in uncore_box_init()
466 box->pmu->type->ops->init_box(box); in uncore_box_init()
473 if (box->pmu->type->ops->exit_box) in uncore_box_exit()
474 box->pmu->type->ops->exit_box(box); in uncore_box_exit()
485 return container_of(event->pmu, struct intel_uncore_pmu, pmu); in uncore_event_to_pmu()
493 struct intel_uncore_box *uncore_pmu_to_box(struct intel_uncore_pmu *pmu, int cpu);