Searched refs:MSR_LE (Results 1 – 22 of 22) sorted by relevance
71 newmsr = (newmsr & ~MSR_LE) | (msr & MSR_LE); in kvmhv_p9_tm_emulation_early()
103 newmsr = (newmsr & ~MSR_LE) | (msr & MSR_LE); in kvmhv_p9_tm_emulation()
195 smsr &= MSR_FE0 | MSR_FE1 | MSR_SF | MSR_SE | MSR_BE | MSR_LE | in kvmppc_recalc_shadow_msr()198 smsr &= MSR_FE0 | MSR_FE1 | MSR_SF | MSR_SE | MSR_BE | MSR_LE; in kvmppc_recalc_shadow_msr()1532 if (vcpu->arch.intr_msr & MSR_LE) in kvmppc_get_one_reg_pr()1615 vcpu->arch.intr_msr |= MSR_LE; in kvmppc_set_lpcr_pr()1617 vcpu->arch.intr_msr &= ~MSR_LE; in kvmppc_set_lpcr_pr()1768 vcpu->arch.shadow_msr = MSR_USER64 & ~MSR_LE; in kvmppc_core_vcpu_create_pr()
251 if ((kvmppc_get_msr(vcpu) & MSR_LE) && in kvmppc_core_emulate_op_pr()
184 if (vcpu->arch.intr_msr & MSR_LE) in kvmppc_kvm_pv()
1594 vcpu->arch.intr_msr |= MSR_LE; in kvmppc_set_lpcr()1596 vcpu->arch.intr_msr &= ~MSR_LE; in kvmppc_set_lpcr()
48 #define MSR_LE 1UL macro67 thread_endianness = MSR_LE & ucp->uc_mcontext.gp_regs[PT_MSR]; in trap_signal_handler()
354 regs->msr = (regs->msr & ~MSR_LE) | (msr & MSR_LE); in restore_sigcontext()467 regs->msr = (regs->msr & ~MSR_LE) | (msr & MSR_LE); in restore_tm_sigcontexts()900 regs->msr &= ~MSR_LE; in handle_rt_signal64()901 regs->msr |= (MSR_KERNEL & MSR_LE); in handle_rt_signal64()
660 regs->msr = (regs->msr & ~MSR_LE) | (msr & MSR_LE); in restore_user_regs()767 regs->msr = (regs->msr & ~MSR_LE) | (msr & MSR_LE); in restore_tm_user_regs()982 regs->msr &= ~MSR_LE; in handle_rt_signal32()983 regs->msr |= (MSR_KERNEL & MSR_LE); in handle_rt_signal32()1431 regs->msr &= ~MSR_LE; in handle_signal32()
308 if ((regs->msr & MSR_LE) != (MSR_KERNEL & MSR_LE)) { in fix_alignment()
125 current->thread.regs->msr ^= MSR_LE; in SYSCALL_DEFINE0()
1327 {MSR_LE, "LE"},1913 regs->msr &= ~MSR_LE; in set_endian()1915 regs->msr |= MSR_LE; in set_endian()1934 if (regs->msr & MSR_LE) { in get_endian()
1205 ori r9,r9,MSR_IR|MSR_DR|MSR_FE0|MSR_FE1|MSR_FP|MSR_RI|MSR_LE1318 LOAD_REG_IMMEDIATE(r12, MSR_SF | MSR_ISF | MSR_LE)
903 swap = (msr & MSR_LE) != (MSR_KERNEL & MSR_LE); in p9_hmi_special_emu()
1490 xori r12,r12,MSR_LE
63 #define MSR_LE 0x0000000000000001 macro
43 li r11,MSR_LE
27 li r0,MSR_IR|MSR_DR|MSR_LE
391 return (kvmppc_get_msr(vcpu) & MSR_LE) != (MSR_KERNEL & MSR_LE); in kvmppc_need_byteswap()
117 #define MSR_LE __MASK(MSR_LE_LG) /* Little Endian */ macro143 #define MSR_ (__MSR | MSR_LE)144 #define MSR_IDLE (MSR_ME | MSR_SF | MSR_HV | MSR_LE)
476 (MSR_LE|MSR_RI|MSR_DR|MSR_IR|MSR_ME|MSR_PR| in pSeries_system_reset_exception()
1529 op->val = 0xffffffff & ~(MSR_ME | MSR_LE); in analyse_instr()2859 cross_endian = (regs->msr & MSR_LE) != (MSR_KERNEL & MSR_LE); in emulate_loadstore()3184 regs->msr ^= MSR_LE; in emulate_step()