Home
last modified time | relevance | path

Searched refs:eventsel (Results 1 – 15 of 15) sorted by relevance

/Linux-v6.1/arch/x86/kvm/
Dpmu.c276 key = pmc->eventsel & AMD64_RAW_EVENT_MASK_NB; in check_pmu_event_filter()
299 u64 eventsel = pmc->eventsel; in reprogram_counter() local
300 u64 new_config = eventsel; in reprogram_counter()
311 if (eventsel & ARCH_PERFMON_EVENTSEL_PIN_CONTROL) in reprogram_counter()
318 eventsel |= ARCH_PERFMON_EVENTSEL_OS; in reprogram_counter()
320 eventsel |= ARCH_PERFMON_EVENTSEL_USR; in reprogram_counter()
322 eventsel |= ARCH_PERFMON_EVENTSEL_INT; in reprogram_counter()
333 (eventsel & pmu->raw_event_mask), in reprogram_counter()
334 !(eventsel & ARCH_PERFMON_EVENTSEL_USR), in reprogram_counter()
335 !(eventsel & ARCH_PERFMON_EVENTSEL_OS), in reprogram_counter()
[all …]
Dpmu.h22 u8 eventsel; member
158 return pmc->eventsel & ARCH_PERFMON_EVENTSEL_ENABLE; in pmc_speculative_in_use()
/Linux-v6.1/arch/x86/kvm/svm/
Dpmu.c135 msr_info->data = pmc->eventsel; in amd_pmu_get_msr()
160 if (data != pmc->eventsel) { in amd_pmu_set_msr()
161 pmc->eventsel = data; in amd_pmu_set_msr()
215 pmc->counter = pmc->eventsel = 0; in amd_pmu_reset()
/Linux-v6.1/arch/x86/kvm/vmx/
Dpmu_intel.c86 u8 event_select = pmc->eventsel & ARCH_PERFMON_EVENTSEL_EVENT; in intel_hw_event_available()
87 u8 unit_mask = (pmc->eventsel & ARCH_PERFMON_EVENTSEL_UMASK) >> 8; in intel_hw_event_available()
91 if (intel_arch_events[i].eventsel != event_select || in intel_hw_event_available()
381 msr_info->data = pmc->eventsel; in intel_pmu_get_msr()
472 if (data == pmc->eventsel) in intel_pmu_set_msr()
479 pmc->eventsel = data; in intel_pmu_set_msr()
500 pmc->eventsel = (intel_arch_events[event].unit_mask << 8) | in setup_fixed_pmc_eventsel()
501 intel_arch_events[event].eventsel; in setup_fixed_pmc_eventsel()
650 pmc->counter = pmc->eventsel = 0; in intel_pmu_reset()
/Linux-v6.1/arch/arm64/kvm/
Dpmu-emul.c121 u64 eventsel, reg; in kvm_pmu_idx_has_chain_evtype() local
129 eventsel = __vcpu_sys_reg(vcpu, reg) & kvm_pmu_event_mask(vcpu->kvm); in kvm_pmu_idx_has_chain_evtype()
131 return eventsel == ARMV8_PMUV3_PERFCTR_CHAIN; in kvm_pmu_idx_has_chain_evtype()
631 u64 eventsel, counter, reg, data; in kvm_pmu_create_perf_event() local
646 eventsel = ARMV8_PMUV3_PERFCTR_CPU_CYCLES; in kvm_pmu_create_perf_event()
648 eventsel = data & kvm_pmu_event_mask(vcpu->kvm); in kvm_pmu_create_perf_event()
651 if (eventsel == ARMV8_PMUV3_PERFCTR_SW_INCR) in kvm_pmu_create_perf_event()
659 !test_bit(eventsel, vcpu->kvm->arch.pmu_filter)) in kvm_pmu_create_perf_event()
671 attr.config = eventsel; in kvm_pmu_create_perf_event()
/Linux-v6.1/arch/x86/events/amd/
Dcore.c286 static inline int amd_pmu_addr_offset(int index, bool eventsel) in amd_pmu_addr_offset() argument
293 if (eventsel) in amd_pmu_addr_offset()
306 if (eventsel) in amd_pmu_addr_offset()
1245 .eventsel = MSR_K7_EVNTSEL0,
1347 x86_pmu.eventsel = MSR_F15H_PERF_CTL; in amd_core_pmu_init()
/Linux-v6.1/drivers/gpu/drm/amd/amdgpu/
Ddf_v3_6.c412 uint32_t eventsel, instance, unitmask; in df_v3_6_pmc_get_ctrl_settings() local
424 eventsel = DF_V3_6_GET_EVENT(config) & 0x3f; in df_v3_6_pmc_get_ctrl_settings()
432 *lo_val = (unitmask << 8) | (instance_10 << 6) | eventsel; in df_v3_6_pmc_get_ctrl_settings()
/Linux-v6.1/arch/x86/events/intel/
Dp6.c210 .eventsel = MSR_P6_EVNTSEL0,
Dknc.c299 .eventsel = MSR_KNC_EVNTSEL0,
Dp4.c1345 .eventsel = MSR_P4_BPU_CCCR0,
Dcore.c4766 .eventsel = MSR_ARCH_PERFMON_EVENTSEL0,
4819 .eventsel = MSR_ARCH_PERFMON_EVENTSEL0,
/Linux-v6.1/arch/x86/events/
Dperf_event.h754 unsigned eventsel; member
756 int (*addr_offset)(int index, bool eventsel);
1093 return x86_pmu.eventsel + (x86_pmu.addr_offset ? in x86_pmu_config_addr()
/Linux-v6.1/arch/x86/events/zhaoxin/
Dcore.c468 .eventsel = MSR_ARCH_PERFMON_EVENTSEL0,
/Linux-v6.1/arch/x86/include/asm/
Dkvm_host.h492 u64 eventsel; member
/Linux-v6.1/Documentation/virt/kvm/
Dapi.rst5047 The eventsel+umask of each event the guest attempts to program is compared