Searched refs:X86_EFLAGS_IF (Results 1 – 23 of 23) sorted by relevance
41 return (-flags) & X86_EFLAGS_IF; in xen_save_fl()50 flags = !(flags & X86_EFLAGS_IF); in xen_restore_fl()
80 testw $X86_EFLAGS_IF, %di82 testb $X86_EFLAGS_IF>>8, %ah
79 testb $X86_EFLAGS_IF>>8, 8+1+ESP_OFFSET(%esp)
301 v86.regs.eflags &= ~X86_EFLAGS_IF; in main()311 v86.regs.eax = X86_EFLAGS_IF; in main()316 v86.regs.eax = X86_EFLAGS_IF; in main()
540 = (regs->flags & (X86_EFLAGS_TF | X86_EFLAGS_IF)); in set_current_kprobe()542 kcb->kprobe_saved_flags &= ~X86_EFLAGS_IF; in set_current_kprobe()606 regs->flags &= ~X86_EFLAGS_IF; in setup_singlestep()921 *tos &= ~(X86_EFLAGS_TF | X86_EFLAGS_IF); in resume_execution()
164 return !(flags & X86_EFLAGS_IF); in arch_irqs_disabled_flags()
35 #define ARCH_EFI_IRQ_FLAGS_MASK X86_EFLAGS_IF
424 if (flags & X86_EFLAGS_IF) in set_vflags_long()434 if (flags & X86_EFLAGS_IF) in set_vflags_short()445 flags |= X86_EFLAGS_IF; in get_vflags()737 if (vmpi->force_return_for_pic && (VEFLAGS & (X86_EFLAGS_IF | X86_EFLAGS_VIF))) { in handle_vm86_fault()
194 regs->flags = X86_EFLAGS_IF; in start_thread()
79 if (regs->flags & X86_EFLAGS_IF) in cond_local_irq_enable()85 if (regs->flags & X86_EFLAGS_IF) in cond_local_irq_disable()
471 regs->flags = X86_EFLAGS_IF; in start_thread_common()
294 ctx->saved_flags = (regs->flags & (X86_EFLAGS_TF | X86_EFLAGS_IF)); in kmmio_handler()305 regs->flags &= ~X86_EFLAGS_IF; in kmmio_handler()
1341 if (regs->flags & X86_EFLAGS_IF) in do_user_addr_fault()
26 #define X86_EFLAGS_IF _BITUL(X86_EFLAGS_IF_BIT) macro
78 orl $X86_EFLAGS_IF, (%rsp) /* Fix saved flags */
74 testl $X86_EFLAGS_IF, PT_EFLAGS(%esp) # interrupts off?908 orl $X86_EFLAGS_IF, (%esp) /* Fix IF */1091 testl $X86_EFLAGS_IF, PT_EFLAGS(%esp) # interrupts off (exception path) ?
394 testl $X86_EFLAGS_IF, %eax
1054 flags = (flags & EFLAGS_MASK) | X86_EFLAGS_IF; in test_cc()1913 change_mask |= X86_EFLAGS_IF; in emulate_popf()1918 change_mask |= X86_EFLAGS_IF; in emulate_popf()1921 change_mask |= (X86_EFLAGS_IOPL | X86_EFLAGS_IF); in emulate_popf()2062 ctxt->eflags &= ~(X86_EFLAGS_IF | X86_EFLAGS_TF | X86_EFLAGS_AC); in __emulate_int_real()2130 X86_EFLAGS_IF | X86_EFLAGS_DF | X86_EFLAGS_OF | in emulate_iret_real()2809 ctxt->eflags &= ~(X86_EFLAGS_VM | X86_EFLAGS_IF); in em_syscall()2847 ctxt->eflags &= ~(X86_EFLAGS_VM | X86_EFLAGS_IF); in em_sysenter()3948 ctxt->eflags &= ~X86_EFLAGS_IF; in em_cli()3958 ctxt->eflags |= X86_EFLAGS_IF; in em_sti()[all …]
3498 if (kvm_get_rflags(&svm->vcpu) & X86_EFLAGS_IF) in enter_svm_guest_mode()5411 ret = !!(kvm_get_rflags(vcpu) & X86_EFLAGS_IF); in svm_interrupt_allowed()
6774 if (unlikely((ctxt->eflags & ~rflags) & X86_EFLAGS_IF)) in x86_emulate_instruction()7464 kvm_run->if_flag = (kvm_get_rflags(vcpu) & X86_EFLAGS_IF) != 0; in post_kvm_run_save()
1694 X86_EFLAGS_TF|X86_EFLAGS_DF|X86_EFLAGS_IF| in syscall_init()
4495 vmcs_readl(GUEST_RFLAGS) & X86_EFLAGS_IF) && in vmx_interrupt_allowed()4592 .flags = X86_EFLAGS_IF, in kvm_machine_check()
3278 (vmcs12->guest_rflags & X86_EFLAGS_IF))) { in nested_vmx_run()