/Linux-v4.19/arch/parisc/include/asm/ |
D | spinlock.h | 35 static inline void arch_spin_unlock(arch_spinlock_t *x) in arch_spin_unlock() function 75 arch_spin_unlock(&rw->lock); in arch_read_lock() 87 arch_spin_unlock(&rw->lock); in arch_read_unlock() 100 arch_spin_unlock(&rw->lock); in arch_read_trylock() 127 arch_spin_unlock(&rw->lock); in arch_write_lock() 144 arch_spin_unlock(&rw->lock); in arch_write_unlock() 161 arch_spin_unlock(&rw->lock); in arch_write_trylock()
|
D | futex.h | 31 arch_spin_unlock(s); in _futex_spin_unlock_irqrestore()
|
/Linux-v4.19/arch/arc/include/asm/ |
D | spinlock.h | 66 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() function 289 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() function 344 arch_spin_unlock(&(rw->lock_mutex)); in arch_read_trylock() 370 arch_spin_unlock(&(rw->lock_mutex)); in arch_write_trylock() 395 arch_spin_unlock(&(rw->lock_mutex)); in arch_read_unlock() 406 arch_spin_unlock(&(rw->lock_mutex)); in arch_write_unlock()
|
D | smp.h | 119 arch_spin_unlock(&smp_atomic_ops_lock); \ 129 arch_spin_unlock(&smp_bitops_lock); \
|
/Linux-v4.19/kernel/locking/ |
D | qrwlock.c | 63 arch_spin_unlock(&lock->wait_lock); in queued_read_lock_slowpath() 90 arch_spin_unlock(&lock->wait_lock); in queued_write_lock_slowpath()
|
/Linux-v4.19/include/linux/ |
D | spinlock_up.h | 45 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() function 66 # define arch_spin_unlock(lock) do { barrier(); (void)(lock); } while (0) macro
|
/Linux-v4.19/arch/arm/common/ |
D | mcpm_entry.c | 235 arch_spin_unlock(&mcpm_lock); in mcpm_cpu_power_up() 271 arch_spin_unlock(&mcpm_lock); in mcpm_cpu_power_down() 277 arch_spin_unlock(&mcpm_lock); in mcpm_cpu_power_down() 338 arch_spin_unlock(&mcpm_lock); in mcpm_cpu_suspend() 368 arch_spin_unlock(&mcpm_lock); in mcpm_cpu_powered_up()
|
/Linux-v4.19/arch/powerpc/kvm/ |
D | book3s_xive.c | 346 arch_spin_unlock(&sb->lock); in xive_lock_and_mask() 401 arch_spin_unlock(&sb->lock); in xive_lock_for_unmask() 641 arch_spin_unlock(&sb->lock); in kvmppc_xive_set_xive() 663 arch_spin_unlock(&sb->lock); in kvmppc_xive_get_xive() 702 arch_spin_unlock(&sb->lock); in kvmppc_xive_int_on() 728 arch_spin_unlock(&sb->lock); in kvmppc_xive_int_off() 910 arch_spin_unlock(&sb->lock); in kvmppc_xive_set_mapped() 983 arch_spin_unlock(&sb->lock); in kvmppc_xive_clr_mapped() 1020 arch_spin_unlock(&sb->lock); in kvmppc_xive_disable_vcpu_interrupts() 1244 arch_spin_unlock(&sb->lock); in xive_pre_save_mask_irq() [all …]
|
D | book3s_xics.c | 163 arch_spin_unlock(&ics->lock); in write_xive() 219 arch_spin_unlock(&ics->lock); in kvmppc_xics_get_xive() 476 arch_spin_unlock(&ics->lock); in icp_deliver_irq() 505 arch_spin_unlock(&ics->lock); in icp_deliver_irq() 512 arch_spin_unlock(&ics->lock); in icp_deliver_irq() 1014 arch_spin_unlock(&ics->lock); in xics_debug_show() 1242 arch_spin_unlock(&ics->lock); in xics_get_source() 1300 arch_spin_unlock(&ics->lock); in xics_set_source()
|
/Linux-v4.19/kernel/trace/ |
D | trace_stack.c | 195 arch_spin_unlock(&stack_trace_max_lock); in check_stack() 269 arch_spin_unlock(&stack_trace_max_lock); in stack_max_size_write() 319 arch_spin_unlock(&stack_trace_max_lock); in t_stop()
|
D | trace_clock.c | 123 arch_spin_unlock(&trace_clock_struct.lock); in trace_clock_global()
|
/Linux-v4.19/tools/include/linux/ |
D | spinlock.h | 24 static inline void arch_spin_unlock(arch_spinlock_t *mutex) in arch_spin_unlock() function
|
/Linux-v4.19/arch/hexagon/include/asm/ |
D | spinlock.h | 144 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() function
|
/Linux-v4.19/arch/x86/kernel/ |
D | tsc_sync.c | 248 arch_spin_unlock(&sync_lock); in check_tsc_warp() 278 arch_spin_unlock(&sync_lock); in check_tsc_warp()
|
/Linux-v4.19/arch/alpha/include/asm/ |
D | spinlock.h | 24 static inline void arch_spin_unlock(arch_spinlock_t * lock) in arch_spin_unlock() function
|
/Linux-v4.19/arch/riscv/include/asm/ |
D | spinlock.h | 30 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() function
|
/Linux-v4.19/include/asm-generic/ |
D | qspinlock.h | 121 #define arch_spin_unlock(l) queued_spin_unlock(l) macro
|
/Linux-v4.19/arch/sh/include/asm/ |
D | spinlock-cas.h | 36 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() function
|
D | spinlock-llsc.h | 49 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() function
|
/Linux-v4.19/arch/xtensa/include/asm/ |
D | spinlock.h | 69 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() function
|
/Linux-v4.19/arch/s390/lib/ |
D | spinlock.c | 287 arch_spin_unlock(&rw->wait); in arch_read_lock_wait() 310 arch_spin_unlock(&rw->wait); in arch_write_lock_wait()
|
/Linux-v4.19/arch/sparc/include/asm/ |
D | spinlock_32.h | 48 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() function
|
/Linux-v4.19/arch/powerpc/platforms/pasemi/ |
D | setup.c | 89 arch_spin_unlock(&timebase_lock); in pas_give_timebase() 105 arch_spin_unlock(&timebase_lock); in pas_take_timebase()
|
/Linux-v4.19/arch/s390/include/asm/ |
D | spinlock.h | 89 static inline void arch_spin_unlock(arch_spinlock_t *lp) in arch_spin_unlock() function
|
/Linux-v4.19/arch/arm/include/asm/ |
D | spinlock.h | 107 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() function
|