/Linux-v6.6/arch/x86/kvm/svm/ |
D | pmu.c | 28 static struct kvm_pmc *amd_pmc_idx_to_pmc(struct kvm_pmu *pmu, int pmc_idx) in amd_pmc_idx_to_pmc() 38 static inline struct kvm_pmc *get_gp_pmc_amd(struct kvm_pmu *pmu, u32 msr, in get_gp_pmc_amd() 83 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_is_valid_rdpmc_ecx() 99 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_msr_idx_to_pmc() 110 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_is_valid_msr() 133 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_pmu_get_msr() 155 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_pmu_set_msr() 183 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_pmu_refresh() 222 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_pmu_init() 238 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_pmu_reset()
|
/Linux-v6.6/arch/x86/kvm/ |
D | pmu.h | 23 struct kvm_pmc *(*pmc_idx_to_pmc)(struct kvm_pmu *pmu, int pmc_idx); 44 static inline bool kvm_pmu_has_perf_global_ctrl(struct kvm_pmu *pmu) in kvm_pmu_has_perf_global_ctrl() 60 struct kvm_pmu *pmu = pmc_to_pmu(pmc); in pmc_bitmask() 111 static inline bool kvm_valid_perf_global_ctrl(struct kvm_pmu *pmu, in kvm_valid_perf_global_ctrl() 121 static inline struct kvm_pmc *get_gp_pmc(struct kvm_pmu *pmu, u32 msr, in get_gp_pmc() 135 static inline struct kvm_pmc *get_fixed_pmc(struct kvm_pmu *pmu, u32 msr) in get_fixed_pmc() 170 struct kvm_pmu *pmu = pmc_to_pmu(pmc); in pmc_speculative_in_use() 229 static inline void reprogram_counters(struct kvm_pmu *pmu, u64 diff) in reprogram_counters() 248 struct kvm_pmu *pmu = pmc_to_pmu(pmc); in pmc_is_globally_enabled()
|
D | pmu.c | 98 struct kvm_pmu *pmu = pmc_to_pmu(pmc); in __kvm_perf_overflow() 168 struct kvm_pmu *pmu = pmc_to_pmu(pmc); in pmc_reprogram_counter() 383 struct kvm_pmu *pmu = pmc_to_pmu(pmc); in reprogram_counter() 438 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_handle_event() 504 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_rdpmc() 551 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_mark_pmc_in_use() 560 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_get_msr() 585 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_set_msr() 662 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_init() 674 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_cleanup() [all …]
|
D | x86.c | 12293 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_arch_sched_in()
|
/Linux-v6.6/arch/riscv/kvm/ |
D | vcpu_pmu.c | 157 static int kvm_pmu_get_programmable_pmc_index(struct kvm_pmu *kvpmu, unsigned long eidx, in kvm_pmu_get_programmable_pmc_index() 186 static int pmu_get_pmc_index(struct kvm_pmu *pmu, unsigned long eidx, in pmu_get_pmc_index() 202 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in pmu_ctr_read() 222 static int kvm_pmu_validate_counter_mask(struct kvm_pmu *kvpmu, unsigned long ctr_base, in kvm_pmu_validate_counter_mask() 265 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_riscv_vcpu_pmu_incr_fw() 282 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_riscv_vcpu_pmu_read_hpm() 316 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_riscv_vcpu_pmu_num_ctrs() 326 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_riscv_vcpu_pmu_ctr_info() 342 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_riscv_vcpu_pmu_ctr_start() 398 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_riscv_vcpu_pmu_ctr_stop() [all …]
|
D | vcpu_sbi_pmu.c | 21 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_sbi_ext_pmu_handler() 76 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_sbi_ext_pmu_probe()
|
/Linux-v6.6/arch/x86/kvm/vmx/ |
D | pmu_intel.c | 71 static void reprogram_fixed_counters(struct kvm_pmu *pmu, u64 data) in reprogram_fixed_counters() 92 static struct kvm_pmc *intel_pmc_idx_to_pmc(struct kvm_pmu *pmu, int pmc_idx) in intel_pmc_idx_to_pmc() 106 struct kvm_pmu *pmu = pmc_to_pmu(pmc); in intel_hw_event_available() 130 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_is_valid_rdpmc_ecx() 142 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_rdpmc_ecx_to_pmc() 174 static inline struct kvm_pmc *get_fw_gp_pmc(struct kvm_pmu *pmu, u32 msr) in get_fw_gp_pmc() 202 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_is_valid_msr() 233 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_msr_idx_to_pmc() 257 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_pmu_create_guest_lbr_event() 350 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_pmu_get_msr() [all …]
|
D | vmx.h | 673 void intel_pmu_cross_mapped_check(struct kvm_pmu *pmu);
|
D | vmx.c | 7132 struct kvm_pmu *pmu = vcpu_to_pmu(&vmx->vcpu); in atomic_switch_perf_msrs()
|
/Linux-v6.6/arch/riscv/include/asm/ |
D | kvm_vcpu_pmu.h | 42 struct kvm_pmu { struct 92 struct kvm_pmu { struct
|
D | kvm_host.h | 246 struct kvm_pmu pmu_context;
|
/Linux-v6.6/include/kvm/ |
D | arm_pmu.h | 27 struct kvm_pmu { struct 106 struct kvm_pmu { struct
|
/Linux-v6.6/arch/arm64/kvm/ |
D | pmu-emul.c | 217 struct kvm_pmu *pmu = &vcpu->arch.pmu; in kvm_pmu_vcpu_init() 338 struct kvm_pmu *pmu = &vcpu->arch.pmu; in kvm_pmu_update_state() 359 struct kvm_pmu *pmu = &vcpu->arch.pmu; in kvm_pmu_should_notify_user()
|
/Linux-v6.6/arch/x86/events/intel/ |
D | core.c | 4060 struct kvm_pmu *kvm_pmu = (struct kvm_pmu *)data; in intel_guest_get_msrs() local 4093 if (!kvm_pmu || !x86_pmu.pebs_ept) in intel_guest_get_msrs() 4099 .guest = kvm_pmu->ds_area, in intel_guest_get_msrs() 4106 .guest = kvm_pmu->pebs_data_cfg, in intel_guest_get_msrs() 4122 arr[pebs_enable].guest &= ~kvm_pmu->host_cross_mapped_mask; in intel_guest_get_msrs() 4123 arr[global_ctrl].guest &= ~kvm_pmu->host_cross_mapped_mask; in intel_guest_get_msrs()
|
/Linux-v6.6/arch/x86/include/asm/ |
D | kvm_host.h | 515 struct kvm_pmu { struct 925 struct kvm_pmu pmu;
|
/Linux-v6.6/arch/arm64/include/asm/ |
D | kvm_host.h | 556 struct kvm_pmu pmu;
|