Searched refs:_Q_LOCKED_VAL (Results 1 – 5 of 5) sorted by relevance
/Linux-v4.19/kernel/locking/ |
D | qspinlock_paravirt.h | 93 (cmpxchg_acquire(&lock->locked, 0, _Q_LOCKED_VAL) == 0)) { in pv_hybrid_queued_unfair_trylock() 125 _Q_LOCKED_VAL) == _Q_PENDING_VAL); in trylock_clear_pending() 147 new = (val & ~_Q_PENDING_MASK) | _Q_LOCKED_VAL; in trylock_clear_pending() 463 WRITE_ONCE(lock->locked, _Q_LOCKED_VAL); in pv_wait_head_or_lock() 486 return (u32)(atomic_read(&lock->val) | _Q_LOCKED_VAL); in pv_wait_head_or_lock() 557 locked = cmpxchg_release(&lock->locked, _Q_LOCKED_VAL, 0); in __pv_queued_spin_unlock() 558 if (likely(locked == _Q_LOCKED_VAL)) in __pv_queued_spin_unlock()
|
D | qspinlock.c | 156 WRITE_ONCE(lock->locked_pending, _Q_LOCKED_VAL); in clear_pending_set_locked() 200 atomic_add(-_Q_PENDING_VAL + _Q_LOCKED_VAL, &lock->val); in clear_pending_set_locked() 242 WRITE_ONCE(lock->locked, _Q_LOCKED_VAL); in set_locked() 485 atomic_try_cmpxchg_relaxed(&lock->val, &val, _Q_LOCKED_VAL)) in queued_spin_lock_slowpath()
|
/Linux-v4.19/include/asm-generic/ |
D | qspinlock.h | 70 (atomic_cmpxchg_acquire(&lock->val, 0, _Q_LOCKED_VAL) == 0)) in queued_spin_trylock() 85 val = atomic_cmpxchg_acquire(&lock->val, 0, _Q_LOCKED_VAL); in queued_spin_lock()
|
D | qspinlock_types.h | 109 #define _Q_LOCKED_VAL (1U << _Q_LOCKED_OFFSET) macro
|
/Linux-v4.19/arch/x86/include/asm/ |
D | qspinlock.h | 67 } while (atomic_cmpxchg(&lock->val, 0, _Q_LOCKED_VAL) != 0); in virt_spin_lock()
|