| /Linux-v5.4/arch/powerpc/kvm/ |
| D | book3s_hv_tm_builtin.c | 42 if ((msr & MSR_PR) && (vcpu->arch.vcore->pcr & PCR_ARCH_206)) in kvmhv_p9_tm_emulation_early() 46 ((msr & MSR_PR) && !(mfspr(SPRN_FSCR) & FSCR_EBB))) in kvmhv_p9_tm_emulation_early() 80 if ((msr & MSR_PR) && (vcpu->arch.vcore->pcr & PCR_ARCH_206)) in kvmhv_p9_tm_emulation_early()
|
| D | book3s_hv_tm.c | 23 if (msr & MSR_PR) { in emulate_tx_failure() 62 if ((msr & MSR_PR) && (vcpu->arch.vcore->pcr & PCR_ARCH_206)) { in kvmhv_p9_tm_emulation() 73 if ((msr & MSR_PR) && !(vcpu->arch.fscr & FSCR_EBB)) { in kvmhv_p9_tm_emulation() 110 if ((msr & MSR_PR) && (vcpu->arch.vcore->pcr & PCR_ARCH_206)) { in kvmhv_p9_tm_emulation()
|
| D | book3s_emulate.c | 81 if ((kvmppc_get_msr(vcpu) & MSR_PR) && level > PRIV_PROBLEM) in spr_allowed() 149 if (kvmppc_get_msr(vcpu) & MSR_PR) in kvmppc_emulate_treclaim() 224 if (guest_msr & MSR_PR) in kvmppc_emulate_tabort() 365 if ((kvmppc_get_msr(vcpu) & MSR_PR) || in kvmppc_core_emulate_op_pr() 501 if (!(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_core_emulate_op_pr() 543 WARN_ON(guest_msr & MSR_PR); in kvmppc_core_emulate_op_pr() 566 if (guest_msr & MSR_PR) { in kvmppc_core_emulate_op_pr() 600 if (guest_msr & MSR_PR) { in kvmppc_core_emulate_op_pr() 849 if (kvmppc_get_msr(vcpu) & MSR_PR) { in kvmppc_core_emulate_mtspr_pr() 854 if ((kvmppc_get_msr(vcpu) & MSR_PR) || sprn == 0) { in kvmppc_core_emulate_mtspr_pr() [all …]
|
| D | book3s_32_mmu.c | 146 if (kvmppc_get_msr(vcpu) & MSR_PR) { in kvmppc_mmu_book3s_32_xlate_bat() 229 if ((sr_kp(sre) && (kvmppc_get_msr(vcpu) & MSR_PR)) || in kvmppc_mmu_book3s_32_xlate_pte() 230 (sr_ks(sre) && !(kvmppc_get_msr(vcpu) & MSR_PR))) in kvmppc_mmu_book3s_32_xlate_pte() 308 !(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_mmu_book3s_32_xlate() 390 if (msr & MSR_PR) in kvmppc_mmu_book3s_32_esid_to_vsid()
|
| D | book3s_64_mmu.c | 226 !(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_mmu_book3s_64_xlate() 271 if ((kvmppc_get_msr(vcpu) & MSR_PR) && slbe->Kp) in kvmppc_mmu_book3s_64_xlate() 273 else if (!(kvmppc_get_msr(vcpu) & MSR_PR) && slbe->Ks) in kvmppc_mmu_book3s_64_xlate() 316 !(kvmppc_get_msr(vcpu) & MSR_PR)) in kvmppc_mmu_book3s_64_xlate() 582 return mp_ea && !(kvmppc_get_msr(vcpu) & MSR_PR) && in segment_contains_magic_page() 643 if (kvmppc_get_msr(vcpu) & MSR_PR) in kvmppc_mmu_book3s_64_esid_to_vsid() 653 !(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_mmu_book3s_64_esid_to_vsid()
|
| D | book3s_pr.c | 201 smsr |= MSR_ME | MSR_RI | MSR_IR | MSR_DR | MSR_PR | MSR_EE; in kvmppc_recalc_shadow_msr() 214 if (!(guest_msr & MSR_PR)) in kvmppc_recalc_shadow_msr() 283 if (unlikely((old_msr & MSR_PR) && in kvmppc_copy_from_svcpu() 468 if (!(msr & MSR_PR) && MSR_TM_TRANSACTIONAL(msr)) in kvmppc_set_msr_pr() 495 if ((kvmppc_get_msr(vcpu) & (MSR_PR|MSR_IR|MSR_DR)) != in kvmppc_set_msr_pr() 496 (old_msr & (MSR_PR|MSR_IR|MSR_DR))) { in kvmppc_set_msr_pr() 501 if (!(msr & MSR_PR) && vcpu->arch.magic_page_pa) { in kvmppc_set_msr_pr() 520 !(old_msr & MSR_PR) && !(old_msr & MSR_SF) && (msr & MSR_SF)) { in kvmppc_set_msr_pr() 958 if (!(kvmppc_get_msr(vcpu) & MSR_PR)) in kvmppc_emulate_fac() 1016 if ((fac == FSCR_TM_LG) && !(kvmppc_get_msr(vcpu) & MSR_PR)) in kvmppc_handle_fac() [all …]
|
| D | book3s_32_mmu_host.c | 84 if (kvmppc_get_msr(vcpu) & MSR_PR) in find_sid_vsid() 270 if (kvmppc_get_msr(vcpu) & MSR_PR) in create_sid_map()
|
| D | book3s_64_mmu_host.c | 50 if (kvmppc_get_msr(vcpu) & MSR_PR) in find_sid_vsid() 231 if (kvmppc_get_msr(vcpu) & MSR_PR) in create_sid_map()
|
| D | e500.h | 220 return !!(vcpu->arch.shared->msr & MSR_PR); in get_cur_pr()
|
| D | booke.c | 409 crit = crit && !(vcpu->arch.shared->msr & MSR_PR); in kvmppc_booke_irqprio_deliver() 1103 if (vcpu->arch.shared->msr & (MSR_PR | MSR_GS)) { in kvmppc_handle_exit() 1207 if (!(vcpu->arch.shared->msr & MSR_PR)) { in kvmppc_handle_exit() 1221 if (!(vcpu->arch.shared->msr & MSR_PR) && in kvmppc_handle_exit() 1242 if (!(vcpu->arch.shared->msr & MSR_PR) && in kvmppc_handle_exit() 1970 if (!(vcpu->arch.shared->msr & MSR_PR) && in kvmppc_xlate()
|
| D | e500_mmu_host.c | 311 u32 pr = vcpu->arch.shared->msr & MSR_PR; in kvmppc_e500_setup_stlbe() 672 pr = vcpu->arch.shared->msr & MSR_PR; in kvmppc_load_last_inst()
|
| /Linux-v5.4/arch/powerpc/include/asm/ |
| D | reg_booke.h | 43 #define MSR_USER32 (MSR_ | MSR_PR | MSR_EE) 47 #define MSR_USER (MSR_KERNEL|MSR_PR|MSR_EE) 50 #define MSR_USER (MSR_KERNEL|MSR_PR|MSR_EE)
|
| D | ptrace.h | 163 #define user_mode(regs) (((regs)->msr & MSR_PR) != 0)
|
| D | reg.h | 100 #define MSR_PR __MASK(MSR_PR_LG) /* Problem State / Privilege Level */ macro 147 #define MSR_USER32 (MSR_ | MSR_PR | MSR_EE) 152 #define MSR_USER (MSR_KERNEL|MSR_PR|MSR_EE)
|
| /Linux-v5.4/arch/powerpc/kernel/ |
| D | exceptions-64e.S | 73 andi. r3,r3,MSR_PR 139 andi. r3,r3,MSR_PR 281 andi. r10,r11,MSR_PR; /* save stack pointer */ \ 637 andi. r0,r12,MSR_PR; 656 andi. r0,r12,MSR_PR; 783 1: andi. r14,r11,MSR_PR; /* check for userspace again */ 855 1: andi. r14,r11,MSR_PR; /* check for userspace again */ 1092 andi. r6,r10,MSR_PR
|
| D | head_32.h | 24 andi. r11,r11,MSR_PR
|
| D | head_booke.h | 55 andi. r11, r11, MSR_PR; /* check whether user or kernel */\ 240 andi. r11,r11,MSR_PR; \
|
| D | entry_64.S | 266 andi. r6,r8,MSR_PR 755 andi. r3,r3,MSR_PR 969 andi. r0,r3,MSR_PR
|
| D | head_40x.S | 110 andi. r11,r11,MSR_PR; \ 615 andi. r10,r9,MSR_IR|MSR_PR /* check supervisor + MMU off */
|
| D | entry_32.S | 137 andi. r2,r9,MSR_PR 837 andi. r0,r3,MSR_PR 1073 andi. r3,r3,MSR_PR; \
|
| D | head_fsl_booke.S | 450 andi. r10,r11,MSR_PR 551 andi. r10,r11,MSR_PR
|
| /Linux-v5.4/arch/powerpc/lib/ |
| D | sstep.c | 1232 if (regs->msr & MSR_PR) in analyse_instr() 1519 if (regs->msr & MSR_PR) in analyse_instr() 1525 if (regs->msr & MSR_PR) in analyse_instr() 1533 if (regs->msr & MSR_PR) in analyse_instr() 2963 if (!(regs->msr & MSR_PR) && !(regs->msr & MSR_FP)) in emulate_loadstore() 2970 if (!(regs->msr & MSR_PR) && !(regs->msr & MSR_VEC)) in emulate_loadstore() 2985 if (!(regs->msr & MSR_PR) && !(regs->msr & msrbit)) in emulate_loadstore() 3022 !(regs->msr & MSR_PR) && in emulate_loadstore() 3034 if (!(regs->msr & MSR_PR) && !(regs->msr & MSR_FP)) in emulate_loadstore() 3041 if (!(regs->msr & MSR_PR) && !(regs->msr & MSR_VEC)) in emulate_loadstore() [all …]
|
| /Linux-v5.4/arch/powerpc/xmon/ |
| D | xmon.c | 514 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) == (MSR_IR|MSR_64BIT)) in xmon_core() 659 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) == (MSR_IR|MSR_64BIT)) { in xmon_core() 709 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) != (MSR_IR|MSR_64BIT)) in xmon_bpt() 740 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) != (MSR_IR|MSR_64BIT)) in xmon_break_match() 750 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) != (MSR_IR|MSR_64BIT)) in xmon_iabr_match() 775 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) == (MSR_IR|MSR_64BIT)) { in xmon_fault_handler() 1126 if ((regs->msr & (MSR_64BIT|MSR_PR|MSR_IR)) == (MSR_64BIT|MSR_IR)) { in do_step() 1614 if (regs->msr & MSR_PR) in print_bug_trap()
|
| /Linux-v5.4/drivers/vfio/pci/ |
| D | vfio_pci_nvlink2.c | 202 kvm->arch.lpid, MSR_DR | MSR_PR)) in vfio_pci_nvgpu_group_notifier()
|
| /Linux-v5.4/arch/powerpc/platforms/pseries/ |
| D | ras.c | 476 (MSR_LE|MSR_RI|MSR_DR|MSR_IR|MSR_ME|MSR_PR| in pSeries_system_reset_exception()
|