Home
last modified time | relevance | path

Searched refs:MSR_VEC (Results 1 – 20 of 20) sorted by relevance

/Linux-v5.4/arch/powerpc/kernel/
Dprocess.c262 msr &= ~MSR_VEC; in __giveup_altivec()
274 msr_check_and_set(MSR_VEC); in giveup_altivec()
276 msr_check_and_clear(MSR_VEC); in giveup_altivec()
286 cpumsr = msr_check_and_set(MSR_VEC); in enable_kernel_altivec()
288 if (current->thread.regs && (current->thread.regs->msr & MSR_VEC)) { in enable_kernel_altivec()
313 if (tsk->thread.regs->msr & MSR_VEC) { in flush_altivec_to_thread()
347 WARN_ON((msr & MSR_VSX) && !((msr & MSR_FP) && (msr & MSR_VEC))); in __giveup_vsx()
352 if (msr & MSR_VEC) in __giveup_vsx()
360 msr_check_and_set(MSR_FP|MSR_VEC|MSR_VSX); in giveup_vsx()
362 msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX); in giveup_vsx()
[all …]
Dsignal_64.c127 msr |= MSR_VEC; in setup_sigcontext()
223 msr |= tsk->thread.ckpt_regs.msr & (MSR_FP | MSR_VEC | MSR_VSX); in setup_tm_sigcontexts()
244 if (msr & MSR_VEC) in setup_tm_sigcontexts()
256 msr |= MSR_VEC; in setup_tm_sigcontexts()
264 if (msr & MSR_VEC) in setup_tm_sigcontexts()
376 regs->msr &= ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX); in restore_sigcontext()
385 if (v_regs != NULL && (msr & MSR_VEC) != 0) { in restore_sigcontext()
494 regs->msr &= ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX); in restore_tm_sigcontexts()
506 if (v_regs != NULL && tm_v_regs != NULL && (msr & MSR_VEC) != 0) { in restore_tm_sigcontexts()
587 msr_check_and_set(msr & (MSR_FP | MSR_VEC)); in restore_tm_sigcontexts()
[all …]
Dvector.S50 oris r5,r5,MSR_VEC@h
69 oris r9,r9,MSR_VEC@h
73 oris r12,r12,MSR_VEC@h
124 andis. r5,r12,MSR_VEC@h
Dsignal_32.c405 msr |= MSR_VEC; in save_user_regs()
525 if (msr & MSR_VEC) { in save_tm_user_regs()
540 msr |= MSR_VEC; in save_tm_user_regs()
553 if (msr & MSR_VEC) { in save_tm_user_regs()
667 regs->msr &= ~MSR_VEC; in restore_user_regs()
668 if (msr & MSR_VEC) { in restore_user_regs()
770 regs->msr &= ~MSR_VEC; in restore_tm_user_regs()
771 if (msr & MSR_VEC) { in restore_tm_user_regs()
876 msr_check_and_set(msr & (MSR_FP | MSR_VEC)); in restore_tm_user_regs()
882 if (msr & MSR_VEC) { in restore_tm_user_regs()
[all …]
Dtm.S132 oris r15, r15, MSR_VEC@h
373 oris r5, r5, MSR_VEC@h
Dentry_64.S249 andis. r0,r8,MSR_VEC@h
Dentry_32.S685 oris r0,r0,MSR_VEC@h /* Disable altivec */
Dtraps.c961 msr_mask = MSR_VEC; in p9_hmi_special_emu()
/Linux-v5.4/arch/powerpc/include/asm/
Dswitch_to.h49 msr_check_and_clear(MSR_VEC); in disable_kernel_altivec()
61 msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX); in disable_kernel_vsx()
Dreg.h92 #define MSR_VEC __MASK(MSR_VEC_LG) /* Enable AltiVec */ macro
/Linux-v5.4/arch/powerpc/lib/
Dldstfp.S72 oris r7, r6, MSR_VEC@h
96 oris r7, r6, MSR_VEC@h
Dsstep.c576 if (regs->msr & MSR_VEC) in do_vec_load()
599 if (regs->msr & MSR_VEC) in do_vec_store()
839 if (regs->msr & MSR_VEC) in do_vsx_load()
870 if (regs->msr & MSR_VEC) in do_vsx_store()
2970 if (!(regs->msr & MSR_PR) && !(regs->msr & MSR_VEC)) in emulate_loadstore()
2984 msrbit = MSR_VEC; in emulate_loadstore()
3041 if (!(regs->msr & MSR_PR) && !(regs->msr & MSR_VEC)) in emulate_loadstore()
3055 msrbit = MSR_VEC; in emulate_loadstore()
Dtest_emulate_step.c78 regs->msr |= MSR_VEC; in init_pt_regs()
/Linux-v5.4/arch/powerpc/kvm/
Dbook3s_pr.c136 kvmppc_giveup_ext(vcpu, MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_core_vcpu_put_pr()
325 (MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_handle_lost_math_exts()
332 else if (ext_diff == MSR_VEC) in kvmppc_handle_lost_math_exts()
793 msr |= MSR_FP | MSR_VEC; in kvmppc_giveup_ext()
815 if (msr & MSR_VEC) { in kvmppc_giveup_ext()
816 if (current->thread.regs->msr & MSR_VEC) in kvmppc_giveup_ext()
874 msr = MSR_FP | MSR_VEC | MSR_VSX; in kvmppc_handle_ext()
895 if (msr & MSR_VEC) { in kvmppc_handle_ext()
933 if (lost_ext & MSR_VEC) { in kvmppc_handle_lost_ext()
1351 ext_msr = MSR_VEC; in kvmppc_handle_exit_pr()
[all …]
Demulate_loadstore.c54 if (!(kvmppc_get_msr(vcpu) & MSR_VEC)) { in kvmppc_check_altivec_disabled()
277 MSR_VEC); in kvmppc_emulate_loadstore()
Dtm.S45 oris r8, r8, (MSR_VEC | MSR_VSX)@h
239 oris r5, r5, (MSR_VEC | MSR_VSX)@h
Dbooke.c177 if (!(current->thread.regs->msr & MSR_VEC)) { in kvmppc_load_guest_altivec()
182 current->thread.regs->msr |= MSR_VEC; in kvmppc_load_guest_altivec()
196 if (current->thread.regs->msr & MSR_VEC) in kvmppc_save_guest_altivec()
Dbook3s_hv_rmhandlers.S3029 oris r8,r8,MSR_VEC@h
3064 oris r8,r8,MSR_VEC@h
Dpowerpc.c1195 vcpu->kvm->arch.kvm_ops->giveup_ext(vcpu, MSR_VEC); in kvmppc_complete_mmio_load()
Dbook3s_hv.c3559 msr_check_and_set(MSR_FP | MSR_VEC | MSR_VSX); in kvmhv_p9_guest_entry()
3664 msr_check_and_set(MSR_FP | MSR_VEC | MSR_VSX); in kvmhv_p9_guest_entry()