Lines Matching +full:lock +full:- +full:- +full:- +full:-
1 /* SPDX-License-Identifier: GPL-2.0 */
7 * include/linux/spinlock.h - generic spinlock/rwlock declarations
24 * (also included on UP-debug builds:)
35 * (which is an empty structure on non-debug builds)
44 * builds. (which are NOPs on non-debug, non-preempt
47 * (included on UP-non-debug builds:)
71 #define LOCK_SECTION_NAME ".text..lock."KBUILD_BASENAME
91 * Pull the arch_spin*() functions/declarations (UP-nondebug doesn't need them):
100 extern void __raw_spin_lock_init(raw_spinlock_t *lock, const char *name,
103 # define raw_spin_lock_init(lock) \ argument
107 __raw_spin_lock_init((lock), #lock, &__key, LD_WAIT_SPIN); \
111 # define raw_spin_lock_init(lock) \ argument
112 do { *(lock) = __RAW_SPIN_LOCK_UNLOCKED(lock); } while (0)
115 #define raw_spin_is_locked(lock) arch_spin_is_locked(&(lock)->raw_lock) argument
118 #define raw_spin_is_contended(lock) arch_spin_is_contended(&(lock)->raw_lock) argument
120 #define raw_spin_is_contended(lock) (((void)(lock), 0)) argument
125 * between program-order earlier lock acquisitions and program-order later
165 * Property (2) upgrades the lock to an RCsc lock.
167 * Since most load-store architectures implement ACQUIRE with an smp_mb() after
179 extern void do_raw_spin_lock(raw_spinlock_t *lock) __acquires(lock);
180 extern int do_raw_spin_trylock(raw_spinlock_t *lock);
181 extern void do_raw_spin_unlock(raw_spinlock_t *lock) __releases(lock);
183 static inline void do_raw_spin_lock(raw_spinlock_t *lock) __acquires(lock) in do_raw_spin_lock() argument
185 __acquire(lock); in do_raw_spin_lock()
186 arch_spin_lock(&lock->raw_lock); in do_raw_spin_lock()
190 static inline int do_raw_spin_trylock(raw_spinlock_t *lock) in do_raw_spin_trylock() argument
192 int ret = arch_spin_trylock(&(lock)->raw_lock); in do_raw_spin_trylock()
200 static inline void do_raw_spin_unlock(raw_spinlock_t *lock) __releases(lock) in do_raw_spin_unlock() argument
203 arch_spin_unlock(&lock->raw_lock); in do_raw_spin_unlock()
204 __release(lock); in do_raw_spin_unlock()
214 #define raw_spin_trylock(lock) __cond_lock(lock, _raw_spin_trylock(lock)) argument
216 #define raw_spin_lock(lock) _raw_spin_lock(lock) argument
219 # define raw_spin_lock_nested(lock, subclass) \ argument
220 _raw_spin_lock_nested(lock, subclass)
222 # define raw_spin_lock_nest_lock(lock, nest_lock) \ argument
224 typecheck(struct lockdep_map *, &(nest_lock)->dep_map);\
225 _raw_spin_lock_nest_lock(lock, &(nest_lock)->dep_map); \
230 * warns about set-but-not-used variables when building with
233 # define raw_spin_lock_nested(lock, subclass) \ argument
234 _raw_spin_lock(((void)(subclass), (lock)))
235 # define raw_spin_lock_nest_lock(lock, nest_lock) _raw_spin_lock(lock) argument
240 #define raw_spin_lock_irqsave(lock, flags) \ argument
243 flags = _raw_spin_lock_irqsave(lock); \
247 #define raw_spin_lock_irqsave_nested(lock, flags, subclass) \ argument
250 flags = _raw_spin_lock_irqsave_nested(lock, subclass); \
253 #define raw_spin_lock_irqsave_nested(lock, flags, subclass) \ argument
256 flags = _raw_spin_lock_irqsave(lock); \
262 #define raw_spin_lock_irqsave(lock, flags) \ argument
265 _raw_spin_lock_irqsave(lock, flags); \
268 #define raw_spin_lock_irqsave_nested(lock, flags, subclass) \ argument
269 raw_spin_lock_irqsave(lock, flags)
273 #define raw_spin_lock_irq(lock) _raw_spin_lock_irq(lock) argument
274 #define raw_spin_lock_bh(lock) _raw_spin_lock_bh(lock) argument
275 #define raw_spin_unlock(lock) _raw_spin_unlock(lock) argument
276 #define raw_spin_unlock_irq(lock) _raw_spin_unlock_irq(lock) argument
278 #define raw_spin_unlock_irqrestore(lock, flags) \ argument
281 _raw_spin_unlock_irqrestore(lock, flags); \
283 #define raw_spin_unlock_bh(lock) _raw_spin_unlock_bh(lock) argument
285 #define raw_spin_trylock_bh(lock) \ argument
286 __cond_lock(lock, _raw_spin_trylock_bh(lock))
288 #define raw_spin_trylock_irq(lock) \ argument
291 raw_spin_trylock(lock) ? \
295 #define raw_spin_trylock_irqsave(lock, flags) \ argument
298 raw_spin_trylock(lock) ? \
323 static __always_inline raw_spinlock_t *spinlock_check(spinlock_t *lock) in spinlock_check() argument
325 return &lock->rlock; in spinlock_check()
330 # define spin_lock_init(lock) \ argument
334 __raw_spin_lock_init(spinlock_check(lock), \
335 #lock, &__key, LD_WAIT_CONFIG); \
348 static __always_inline void spin_lock(spinlock_t *lock) in spin_lock() argument
350 raw_spin_lock(&lock->rlock); in spin_lock()
353 static __always_inline void spin_lock_bh(spinlock_t *lock) in spin_lock_bh() argument
355 raw_spin_lock_bh(&lock->rlock); in spin_lock_bh()
358 static __always_inline int spin_trylock(spinlock_t *lock) in spin_trylock() argument
360 return raw_spin_trylock(&lock->rlock); in spin_trylock()
363 #define spin_lock_nested(lock, subclass) \ argument
365 raw_spin_lock_nested(spinlock_check(lock), subclass); \
368 #define spin_lock_nest_lock(lock, nest_lock) \ argument
370 raw_spin_lock_nest_lock(spinlock_check(lock), nest_lock); \
373 static __always_inline void spin_lock_irq(spinlock_t *lock) in spin_lock_irq() argument
375 raw_spin_lock_irq(&lock->rlock); in spin_lock_irq()
378 #define spin_lock_irqsave(lock, flags) \ argument
380 raw_spin_lock_irqsave(spinlock_check(lock), flags); \
383 #define spin_lock_irqsave_nested(lock, flags, subclass) \ argument
385 raw_spin_lock_irqsave_nested(spinlock_check(lock), flags, subclass); \
388 static __always_inline void spin_unlock(spinlock_t *lock) in spin_unlock() argument
390 raw_spin_unlock(&lock->rlock); in spin_unlock()
393 static __always_inline void spin_unlock_bh(spinlock_t *lock) in spin_unlock_bh() argument
395 raw_spin_unlock_bh(&lock->rlock); in spin_unlock_bh()
398 static __always_inline void spin_unlock_irq(spinlock_t *lock) in spin_unlock_irq() argument
400 raw_spin_unlock_irq(&lock->rlock); in spin_unlock_irq()
403 static __always_inline void spin_unlock_irqrestore(spinlock_t *lock, unsigned long flags) in spin_unlock_irqrestore() argument
405 raw_spin_unlock_irqrestore(&lock->rlock, flags); in spin_unlock_irqrestore()
408 static __always_inline int spin_trylock_bh(spinlock_t *lock) in spin_trylock_bh() argument
410 return raw_spin_trylock_bh(&lock->rlock); in spin_trylock_bh()
413 static __always_inline int spin_trylock_irq(spinlock_t *lock) in spin_trylock_irq() argument
415 return raw_spin_trylock_irq(&lock->rlock); in spin_trylock_irq()
418 #define spin_trylock_irqsave(lock, flags) \ argument
420 raw_spin_trylock_irqsave(spinlock_check(lock), flags); \
424 * spin_is_locked() - Check whether a spinlock is locked.
425 * @lock: Pointer to the spinlock.
432 * Returns: 1 if @lock is locked, 0 otherwise.
441 static __always_inline int spin_is_locked(spinlock_t *lock) in spin_is_locked() argument
443 return raw_spin_is_locked(&lock->rlock); in spin_is_locked()
446 static __always_inline int spin_is_contended(spinlock_t *lock) in spin_is_contended() argument
448 return raw_spin_is_contended(&lock->rlock); in spin_is_contended()
451 #define assert_spin_locked(lock) assert_raw_spin_locked(&(lock)->rlock) argument
459 * (asm-mips/atomic.h needs above definitions)
463 * atomic_dec_and_lock - lock on reaching reference count zero
465 * @lock: the spinlock in question
468 * @lock. Returns false for all other cases.
470 extern int _atomic_dec_and_lock(atomic_t *atomic, spinlock_t *lock);
471 #define atomic_dec_and_lock(atomic, lock) \ argument
472 __cond_lock(lock, _atomic_dec_and_lock(atomic, lock))
474 extern int _atomic_dec_and_lock_irqsave(atomic_t *atomic, spinlock_t *lock,
476 #define atomic_dec_and_lock_irqsave(atomic, lock, flags) \ argument
477 __cond_lock(lock, _atomic_dec_and_lock_irqsave(atomic, lock, &(flags)))