Searched refs:MSR_TM (Results 1 – 14 of 14) sorted by relevance
76 (newmsr & MSR_TM))); in kvmhv_p9_tm_emulation()124 (newmsr & MSR_TM))); in kvmhv_p9_tm_emulation()147 if (!(msr & MSR_TM)) { in kvmhv_p9_tm_emulation()179 if (!(msr & MSR_TM)) { in kvmhv_p9_tm_emulation()219 if (!(msr & MSR_TM)) { in kvmhv_p9_tm_emulation()
42 if (!(MSR_TM_TRANSACTIONAL(newmsr) && (newmsr & MSR_TM))) in kvmhv_p9_tm_emulation_early()79 if (!(MSR_TM_TRANSACTIONAL(newmsr) && (newmsr & MSR_TM))) in kvmhv_p9_tm_emulation_early()95 if (!(vcpu->arch.hfscr & HFSCR_TM) || !(msr & MSR_TM)) in kvmhv_p9_tm_emulation_early()
275 if (((cur_msr & MSR_TM) == 0) && in kvmppc_core_emulate_op_pr()276 ((srr1 & MSR_TM) == 0) && in kvmppc_core_emulate_op_pr()491 if (!(kvmppc_get_msr(vcpu) & MSR_TM)) { in kvmppc_core_emulate_op_pr()529 if (!(kvmppc_get_msr(vcpu) & MSR_TM)) { in kvmppc_core_emulate_op_pr()555 if (!(kvmppc_get_msr(vcpu) & MSR_TM)) { in kvmppc_core_emulate_op_pr()589 if (!(kvmppc_get_msr(vcpu) & MSR_TM)) { in kvmppc_core_emulate_op_pr()785 if (!(kvmppc_get_msr(vcpu) & MSR_TM)) { in kvmppc_core_emulate_mtspr_pr()966 if (!(kvmppc_get_msr(vcpu) & MSR_TM)) { in kvmppc_core_emulate_mfspr_pr()
239 MSR_TM | MSR_TS_MASK; in kvmppc_recalc_shadow_msr()258 smsr &= ~MSR_TM; in kvmppc_recalc_shadow_msr()402 if (kvmppc_get_msr(vcpu) & MSR_TM) { in kvmppc_restore_tm_pr()414 if (kvmppc_get_msr(vcpu) & MSR_TM) { in kvmppc_restore_tm_pr()551 if (kvmppc_get_msr(vcpu) & MSR_TM) in kvmppc_set_msr_pr()1003 guest_fac_enabled = kvmppc_get_msr(vcpu) & MSR_TM; in kvmppc_handle_fac()
235 li r6, MSR_TM >> 32
510 msr_needed |= MSR_TM; in kvmppc_msr_hard_disable_set_facilities()
4813 (current->thread.regs->msr & MSR_TM)) { in kvmppc_vcpu_run_hv()4848 msr |= MSR_TM; in kvmppc_vcpu_run_hv()
101 mtmsr(mfmsr() | MSR_TM); in system_call_exception()
930 return tsk && tsk->thread.regs && (tsk->thread.regs->msr & MSR_TM); in tm_enabled()1031 if (!(thread->regs->msr & MSR_TM)) in tm_recheckpoint()1100 prev->thread.regs->msr &= ~MSR_TM; in __switch_to_tm()1208 if (usermsr & MSR_TM) { in kvmppc_save_user_regs()1212 current->thread.regs->msr &= ~MSR_TM; in kvmppc_save_user_regs()1513 {MSR_TM, "E"},1526 if (val & (MSR_TM | MSR_TS_S | MSR_TS_T)) { in print_tm_bits()
53 li r3, MSR_TM >> 3264 li r3, MSR_TM >> 32
594 regs_set_return_msr(regs, regs->msr | MSR_TM); in restore_tm_sigcontexts()
1726 regs_set_return_msr(regs, regs->msr | MSR_TM); in tm_unavailable()
115 #define MSR_TM __MASK(MSR_TM_LG) /* Transactional Mem Available */ macro
2062 if (msr & MSR_TM) { in dump_207_sprs()