Searched refs:MSR_HV (Results 1 – 16 of 16) sorted by relevance
74 #define MSR_HV __MASK(MSR_HV_LG) /* Hypervisor state */ macro80 #define MSR_HV 0 macro138 #define __MSR (MSR_ME | MSR_RI | MSR_IR | MSR_DR | MSR_ISF |MSR_HV)141 #define MSR_IDLE (MSR_ME | MSR_SF | MSR_HV)144 #define MSR_IDLE (MSR_ME | MSR_SF | MSR_HV | MSR_LE)
582 msr &= ~MSR_HV; in sanitize_msr()
442 if (mfmsr() & MSR_HV) in udbg_init_debug_lpar()459 if (mfmsr() & MSR_HV) in udbg_init_debug_lpar_hvsi()
152 if (kvmppc_get_msr(vcpu) & MSR_HV) in kvmppc_emulate_treclaim()227 if (guest_msr & MSR_HV) in kvmppc_emulate_tabort()513 if (kvmppc_get_msr(vcpu) & MSR_HV) in kvmppc_core_emulate_op_pr()756 (mfmsr() & MSR_HV)) in kvmppc_core_emulate_mtspr_pr()
206 smsr |= MSR_ISF | MSR_HV; in kvmppc_recalc_shadow_msr()457 msr = (msr & ~MSR_HV) | MSR_ME; in kvmppc_set_msr_pr()564 if (vcpu->arch.mmu.is_dcbz32(vcpu) && (mfmsr() & MSR_HV) && in kvmppc_set_pvr_pr()
1265 if (vcpu->arch.shregs.msr & MSR_HV) { in kvmppc_handle_exit_hv()1454 if (vcpu->arch.shregs.msr & MSR_HV) { in kvmppc_handle_nested_exit()
415 if (!(mfmsr() & MSR_HV)) in mmu_early_init_devtree()
140 hv_mode = !!(mfmsr() & MSR_HV); in cpufeatures_setup_cpu()890 if (!(mfmsr() & MSR_HV)) { in process_cpufeatures_node()
1305 {MSR_HV, "HV"},
398 if (!(regs->msr & MSR_HV)) in hv_nmi_check_nonrecoverable()
477 MSR_ILE|MSR_HV|MSR_SF)) == (MSR_DR|MSR_SF)) { in pSeries_system_reset_exception()
242 if ((regs->msr & MSR_HV) && freeze_events_kernel != MMCR0_FCHV) in perf_flags_from_msr()2286 #ifdef MSR_HV in register_power_pmu()2290 if (mfmsr() & MSR_HV) in register_power_pmu()
239 pnv_npu2_map_lpar_dev(gpdev, 0, MSR_DR | MSR_PR | MSR_HV); in pnv_npu_release_ownership()
1296 pnv_npu2_map_lpar(pe, MSR_DR | MSR_PR | MSR_HV); in pnv_pci_ioda_setup_PEs()
1826 if (!(mfmsr() & MSR_HV)) in dump_206_sprs()1877 if (!(msr & MSR_HV)) in dump_207_sprs()1890 bool hv = mfmsr() & MSR_HV; in dump_300_sprs()
198 #define CXL_PSL_SR_An_HV MSR_HV /* Hypervisor, GA1: 0 */