Searched refs:vcpu_is_preempted (Results 1 – 16 of 16) sorted by relevance
40 #define vcpu_is_preempted vcpu_is_preempted macro41 static inline bool vcpu_is_preempted(long cpu) in vcpu_is_preempted() function
317 struct paravirt_callee_save vcpu_is_preempted; member
699 return PVOP_CALLEE1(bool, pv_lock_ops.vcpu_is_preempted, cpu); in pv_vcpu_is_preempted()
32 return pv_lock_ops.vcpu_is_preempted.func == in pv_is_native_vcpu_is_preempted()42 .vcpu_is_preempted = PV_CALLEE_SAVE(__native_vcpu_is_preempted),
15 DEF_NATIVE(pv_lock_ops, vcpu_is_preempted, "xor %eax, %eax");62 case PARAVIRT_PATCH(pv_lock_ops.vcpu_is_preempted): in native_patch()
23 DEF_NATIVE(pv_lock_ops, vcpu_is_preempted, "xor %eax, %eax");72 case PARAVIRT_PATCH(pv_lock_ops.vcpu_is_preempted): in native_patch()
859 pv_lock_ops.vcpu_is_preempted = in kvm_spinlock_init()
56 #define vcpu_is_preempted vcpu_is_preempted macro57 static inline bool vcpu_is_preempted(int cpu) in vcpu_is_preempted() function
148 pv_lock_ops.vcpu_is_preempted = PV_CALLEE_SAVE(xen_vcpu_stolen); in xen_init_spinlocks()
29 #define vcpu_is_preempted arch_vcpu_is_preempted macro
143 if (need_resched() || vcpu_is_preempted(node_cpu(node->prev))) in osq_lock()
274 return READ_ONCE(prev->state) != vcpu_running || vcpu_is_preempted(prev->cpu); in pv_wait_early()
542 vcpu_is_preempted(task_cpu(owner))) { in mutex_spin_on_owner()578 retval = owner->on_cpu && !vcpu_is_preempted(task_cpu(owner)); in mutex_can_spin_on_owner()
356 return owner->on_cpu && !vcpu_is_preempted(task_cpu(owner)); in owner_on_cpu()
1759 #ifndef vcpu_is_preempted1760 # define vcpu_is_preempted(cpu) false macro
4018 if (vcpu_is_preempted(cpu)) in available_idle_cpu()