Lines Matching refs:tifn
258 unsigned long tifp, unsigned long tifn) in switch_to_bitmap() argument
260 if (tifn & _TIF_IO_BITMAP) { in switch_to_bitmap()
342 static __always_inline void amd_set_core_ssb_state(unsigned long tifn) in amd_set_core_ssb_state() argument
348 msr |= ssbd_tif_to_amd_ls_cfg(tifn); in amd_set_core_ssb_state()
353 if (tifn & _TIF_SSBD) { in amd_set_core_ssb_state()
381 static __always_inline void amd_set_core_ssb_state(unsigned long tifn) in amd_set_core_ssb_state() argument
383 u64 msr = x86_amd_ls_cfg_base | ssbd_tif_to_amd_ls_cfg(tifn); in amd_set_core_ssb_state()
389 static __always_inline void amd_set_ssb_virt_state(unsigned long tifn) in amd_set_ssb_virt_state() argument
395 wrmsrl(MSR_AMD64_VIRT_SPEC_CTRL, ssbd_tif_to_spec_ctrl(tifn)); in amd_set_ssb_virt_state()
398 static __always_inline void intel_set_ssb_state(unsigned long tifn) in intel_set_ssb_state() argument
400 u64 msr = x86_spec_ctrl_base | ssbd_tif_to_spec_ctrl(tifn); in intel_set_ssb_state()
405 static __always_inline void __speculative_store_bypass_update(unsigned long tifn) in __speculative_store_bypass_update() argument
408 amd_set_ssb_virt_state(tifn); in __speculative_store_bypass_update()
410 amd_set_core_ssb_state(tifn); in __speculative_store_bypass_update()
412 intel_set_ssb_state(tifn); in __speculative_store_bypass_update()
426 unsigned long tifp, tifn; in __switch_to_xtra() local
431 tifn = READ_ONCE(task_thread_info(next_p)->flags); in __switch_to_xtra()
433 switch_to_bitmap(tss, prev, next, tifp, tifn); in __switch_to_xtra()
437 if ((tifp & _TIF_BLOCKSTEP || tifn & _TIF_BLOCKSTEP) && in __switch_to_xtra()
443 msk = tifn & _TIF_BLOCKSTEP; in __switch_to_xtra()
448 if ((tifp ^ tifn) & _TIF_NOTSC) in __switch_to_xtra()
451 if ((tifp ^ tifn) & _TIF_NOCPUID) in __switch_to_xtra()
452 set_cpuid_faulting(!!(tifn & _TIF_NOCPUID)); in __switch_to_xtra()
454 if ((tifp ^ tifn) & _TIF_SSBD) in __switch_to_xtra()
455 __speculative_store_bypass_update(tifn); in __switch_to_xtra()