/Linux-v4.19/include/linux/ |
D | spinlock_up.h | 27 #define arch_spin_is_locked(x) ((x)->slock == 0) macro 62 #define arch_spin_is_locked(lock) ((void)(lock), 0) macro
|
D | spinlock.h | 108 #define raw_spin_is_locked(lock) arch_spin_is_locked(&(lock)->raw_lock)
|
/Linux-v4.19/arch/riscv/include/asm/ |
D | spinlock.h | 28 #define arch_spin_is_locked(x) (READ_ONCE((x)->lock) != 0) macro 52 if (arch_spin_is_locked(lock)) in arch_spin_lock()
|
/Linux-v4.19/arch/parisc/include/asm/ |
D | spinlock.h | 10 static inline int arch_spin_is_locked(arch_spinlock_t *x) in arch_spin_is_locked() function 111 while (arch_spin_is_locked(&rw->lock) && rw->counter >= 0) in arch_read_trylock()
|
/Linux-v4.19/tools/include/linux/ |
D | spinlock.h | 29 static inline bool arch_spin_is_locked(arch_spinlock_t *mutex) in arch_spin_is_locked() function
|
/Linux-v4.19/arch/hexagon/include/asm/ |
D | spinlock.h | 170 #define arch_spin_is_locked(x) ((x)->lock != 0) macro
|
/Linux-v4.19/arch/alpha/include/asm/ |
D | spinlock.h | 17 #define arch_spin_is_locked(x) ((x)->lock != 0) macro
|
/Linux-v4.19/include/asm-generic/ |
D | qspinlock.h | 116 #define arch_spin_is_locked(l) queued_spin_is_locked(l) macro
|
/Linux-v4.19/arch/sh/include/asm/ |
D | spinlock-cas.h | 29 #define arch_spin_is_locked(x) ((x)->lock <= 0) macro
|
D | spinlock-llsc.h | 21 #define arch_spin_is_locked(x) ((x)->lock <= 0) macro
|
/Linux-v4.19/arch/xtensa/include/asm/ |
D | spinlock.h | 34 #define arch_spin_is_locked(x) ((x)->slock != 0) macro
|
/Linux-v4.19/arch/sparc/include/asm/ |
D | spinlock_32.h | 16 #define arch_spin_is_locked(lock) (*((volatile unsigned char *)(lock)) != 0) macro
|
/Linux-v4.19/arch/s390/include/asm/ |
D | spinlock.h | 57 static inline int arch_spin_is_locked(arch_spinlock_t *lp) in arch_spin_is_locked() function
|
/Linux-v4.19/arch/arm/include/asm/ |
D | spinlock.h | 119 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
|
/Linux-v4.19/arch/powerpc/include/asm/ |
D | spinlock.h | 70 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
|
/Linux-v4.19/arch/arc/include/asm/ |
D | spinlock.h | 16 #define arch_spin_is_locked(x) ((x)->slock != __ARCH_SPIN_LOCK_UNLOCKED__) macro
|
/Linux-v4.19/arch/ia64/include/asm/ |
D | spinlock.h | 99 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
|
/Linux-v4.19/arch/x86/kernel/ |
D | hpet.c | 809 if (arch_spin_is_locked(&old.lock)) in read_hpet() 841 } while ((new.value == old.value) && arch_spin_is_locked(&new.lock)); in read_hpet()
|
/Linux-v4.19/kernel/locking/ |
D | lockdep.c | 105 if (debug_locks && !arch_spin_is_locked(&lockdep_lock)) { in graph_unlock()
|