Searched refs:MSR_VEC (Results  1 – 20 of 20) sorted by relevance
| /Linux-v5.4/arch/powerpc/kernel/ | 
| D | process.c | 262 	msr &= ~MSR_VEC;  in __giveup_altivec() 274 	msr_check_and_set(MSR_VEC);  in giveup_altivec() 276 	msr_check_and_clear(MSR_VEC);  in giveup_altivec() 286 	cpumsr = msr_check_and_set(MSR_VEC);  in enable_kernel_altivec() 288 	if (current->thread.regs && (current->thread.regs->msr & MSR_VEC)) {  in enable_kernel_altivec() 313 		if (tsk->thread.regs->msr & MSR_VEC) {  in flush_altivec_to_thread() 347 	WARN_ON((msr & MSR_VSX) && !((msr & MSR_FP) && (msr & MSR_VEC)));  in __giveup_vsx() 352 	if (msr & MSR_VEC)  in __giveup_vsx() 360 	msr_check_and_set(MSR_FP|MSR_VEC|MSR_VSX);  in giveup_vsx() 362 	msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX);  in giveup_vsx() [all …] 
 | 
| D | signal_64.c | 127 		msr |= MSR_VEC;  in setup_sigcontext() 223 	msr |= tsk->thread.ckpt_regs.msr & (MSR_FP | MSR_VEC | MSR_VSX);  in setup_tm_sigcontexts() 244 		if (msr & MSR_VEC)  in setup_tm_sigcontexts() 256 		msr |= MSR_VEC;  in setup_tm_sigcontexts() 264 	if (msr & MSR_VEC)  in setup_tm_sigcontexts() 376 	regs->msr &= ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX);  in restore_sigcontext() 385 	if (v_regs != NULL && (msr & MSR_VEC) != 0) {  in restore_sigcontext() 494 	regs->msr &= ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX);  in restore_tm_sigcontexts() 506 	if (v_regs != NULL && tm_v_regs != NULL && (msr & MSR_VEC) != 0) {  in restore_tm_sigcontexts() 587 	msr_check_and_set(msr & (MSR_FP | MSR_VEC));  in restore_tm_sigcontexts() [all …] 
 | 
| D | vector.S | 50 	oris	r5,r5,MSR_VEC@h 69 	oris	r9,r9,MSR_VEC@h 73 	oris	r12,r12,MSR_VEC@h 124 	andis.	r5,r12,MSR_VEC@h
  | 
| D | signal_32.c | 405 		msr |= MSR_VEC;  in save_user_regs() 525 		if (msr & MSR_VEC) {  in save_tm_user_regs() 540 		msr |= MSR_VEC;  in save_tm_user_regs() 553 	if (msr & MSR_VEC) {  in save_tm_user_regs() 667 	regs->msr &= ~MSR_VEC;  in restore_user_regs() 668 	if (msr & MSR_VEC) {  in restore_user_regs() 770 	regs->msr &= ~MSR_VEC;  in restore_tm_user_regs() 771 	if (msr & MSR_VEC) {  in restore_tm_user_regs() 876 	msr_check_and_set(msr & (MSR_FP | MSR_VEC));  in restore_tm_user_regs() 882 	if (msr & MSR_VEC) {  in restore_tm_user_regs() [all …] 
 | 
| D | tm.S | 132 	oris	r15, r15, MSR_VEC@h 373 	oris	r5, r5, MSR_VEC@h
  | 
| D | entry_64.S | 249 	andis.	r0,r8,MSR_VEC@h
  | 
| D | entry_32.S | 685 	oris	r0,r0,MSR_VEC@h	/* Disable altivec */
  | 
| D | traps.c | 961 		msr_mask = MSR_VEC;  in p9_hmi_special_emu()
  | 
| /Linux-v5.4/arch/powerpc/include/asm/ | 
| D | switch_to.h | 49 	msr_check_and_clear(MSR_VEC);  in disable_kernel_altivec() 61 	msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX);  in disable_kernel_vsx()
  | 
| D | reg.h | 92 #define MSR_VEC		__MASK(MSR_VEC_LG)	/* Enable AltiVec */  macro
  | 
| /Linux-v5.4/arch/powerpc/lib/ | 
| D | ldstfp.S | 72 	oris	r7, r6, MSR_VEC@h 96 	oris	r7, r6, MSR_VEC@h
  | 
| D | sstep.c | 576 	if (regs->msr & MSR_VEC)  in do_vec_load() 599 	if (regs->msr & MSR_VEC)  in do_vec_store() 839 		if (regs->msr & MSR_VEC)  in do_vsx_load() 870 		if (regs->msr & MSR_VEC)  in do_vsx_store() 2970 		if (!(regs->msr & MSR_PR) && !(regs->msr & MSR_VEC))  in emulate_loadstore() 2984 			msrbit = MSR_VEC;  in emulate_loadstore() 3041 		if (!(regs->msr & MSR_PR) && !(regs->msr & MSR_VEC))  in emulate_loadstore() 3055 			msrbit = MSR_VEC;  in emulate_loadstore()
  | 
| D | test_emulate_step.c | 78 	regs->msr |= MSR_VEC;  in init_pt_regs()
  | 
| /Linux-v5.4/arch/powerpc/kvm/ | 
| D | book3s_pr.c | 136 	kvmppc_giveup_ext(vcpu, MSR_FP | MSR_VEC | MSR_VSX);  in kvmppc_core_vcpu_put_pr() 325 		(MSR_FP | MSR_VEC | MSR_VSX);  in kvmppc_handle_lost_math_exts() 332 	else if (ext_diff == MSR_VEC)  in kvmppc_handle_lost_math_exts() 793 		msr |= MSR_FP | MSR_VEC;  in kvmppc_giveup_ext() 815 	if (msr & MSR_VEC) {  in kvmppc_giveup_ext() 816 		if (current->thread.regs->msr & MSR_VEC)  in kvmppc_giveup_ext() 874 		msr = MSR_FP | MSR_VEC | MSR_VSX;  in kvmppc_handle_ext() 895 	if (msr & MSR_VEC) {  in kvmppc_handle_ext() 933 	if (lost_ext & MSR_VEC) {  in kvmppc_handle_lost_ext() 1351 			ext_msr = MSR_VEC;  in kvmppc_handle_exit_pr() [all …] 
 | 
| D | emulate_loadstore.c | 54 	if (!(kvmppc_get_msr(vcpu) & MSR_VEC)) {  in kvmppc_check_altivec_disabled() 277 						MSR_VEC);  in kvmppc_emulate_loadstore()
  | 
| D | tm.S | 45 	oris    r8, r8, (MSR_VEC | MSR_VSX)@h 239 	oris	r5, r5, (MSR_VEC | MSR_VSX)@h
  | 
| D | booke.c | 177 		if (!(current->thread.regs->msr & MSR_VEC)) {  in kvmppc_load_guest_altivec() 182 			current->thread.regs->msr |= MSR_VEC;  in kvmppc_load_guest_altivec() 196 		if (current->thread.regs->msr & MSR_VEC)  in kvmppc_save_guest_altivec()
  | 
| D | book3s_hv_rmhandlers.S | 3029 	oris	r8,r8,MSR_VEC@h 3064 	oris	r8,r8,MSR_VEC@h
  | 
| D | powerpc.c | 1195 			vcpu->kvm->arch.kvm_ops->giveup_ext(vcpu, MSR_VEC);  in kvmppc_complete_mmio_load()
  | 
| D | book3s_hv.c | 3559 	msr_check_and_set(MSR_FP | MSR_VEC | MSR_VSX);  in kvmhv_p9_guest_entry() 3664 	msr_check_and_set(MSR_FP | MSR_VEC | MSR_VSX);  in kvmhv_p9_guest_entry()
  |