Searched refs:debug_locks (Results 1 – 23 of 23) sorted by relevance
24 int debug_locks = 1; variable25 EXPORT_SYMBOL_GPL(debug_locks);
1148 if (expected == FAILURE && debug_locks) { in dotest()1154 if (debug_locks != expected) { in dotest()1165 lockclass_mask, debug_locks, expected); in dotest()1974 if (!debug_locks) { in locking_selftest()2077 debug_locks = 0; in locking_selftest()2086 debug_locks = 1; in locking_selftest()2092 debug_locks = 1; in locking_selftest()2098 debug_locks = 1; in locking_selftest()
36 obj-y += bcd.o div64.o sort.o parser.o debug_locks.o random32.o \
53 return (debug_locks) ? lockdep_is_held(&ht->mutex) : 1; in lockdep_rht_mutex_is_held()61 return (debug_locks) ? lockdep_is_held(lock) : 1; in lockdep_rht_bucket_is_held()
11 extern int debug_locks;17 return xchg(&debug_locks, 0); in __debug_locks_off()
370 #define lockdep_depth(tsk) (debug_locks ? (tsk)->lockdep_depth : 0)373 WARN_ON(debug_locks && !lockdep_is_held(l)); \377 WARN_ON(debug_locks && !lockdep_is_held_type(l, 0)); \381 WARN_ON(debug_locks && !lockdep_is_held_type(l, 1)); \385 WARN_ON_ONCE(debug_locks && !lockdep_is_held(l)); \427 # define lockdep_reset() do { debug_locks = 1; } while (0)600 WARN_ONCE(debug_locks && !current->lockdep_recursion && \606 WARN_ONCE(debug_locks && !current->lockdep_recursion && \
339 WARN_ON_ONCE(debug_locks && in inode_to_wb()
77 if (!debug_locks || chwalk == RT_MUTEX_FULL_CHAINWALK || !act_waiter) in debug_rt_mutex_deadlock()91 if (!waiter->deadlock_lock || !debug_locks) in debug_rt_mutex_print_deadlock()
94 if (!debug_locks) { in graph_lock()105 if (debug_locks && !arch_spin_is_locked(&lockdep_lock)) { in graph_unlock()2422 if (unlikely(!debug_locks)) in validate_chain()2844 if (unlikely(!debug_locks || current->lockdep_recursion)) in lockdep_hardirqs_on()2890 if (unlikely(!debug_locks || current->lockdep_recursion)) in lockdep_hardirqs_off()2919 if (unlikely(!debug_locks || current->lockdep_recursion)) in trace_softirqs_on()2959 if (unlikely(!debug_locks || current->lockdep_recursion)) in trace_softirqs_off()3199 if (unlikely(!debug_locks)) in __lockdep_init_map()3279 if (unlikely(!debug_locks)) in __lock_acquire()3418 if (unlikely(!debug_locks)) in __lock_acquire()[all …]
74 if (likely(debug_locks)) { in debug_mutex_unlock()
329 debug_locks); in lockdep_stats_show()514 if (unlikely(!debug_locks)) in seq_header()
11 extern bool debug_locks;
12 bool debug_locks = true; variable
116 if (debug_locks) in rcu_read_lock_sched_held()252 return rcu_scheduler_active != RCU_SCHEDULER_INACTIVE && debug_locks && in debug_lockdep_rcu_enabled()
311 WARN_ON(debug_locks && !lock_is_held(&rcu_lock_map)); in i915_gem_object_lookup_rcu()
1293 GEM_BUG_ON(debug_locks && in i915_request_wait()
659 GEM_BUG_ON(debug_locks && in i915_gem_object_wait()
25 rcu_scheduler_active = 1, debug_locks = 0
1537 WARN_ON_ONCE(!lockdep_sock_is_held(sk) && debug_locks); in sock_owned_by_me()
129 if (debug_locks) in lockdep_ovsl_is_held()
367 return !debug_locks || in xfs_isilocked()
268 if (unlikely(!debug_locks)) in module_assert_mutex_or_preempt()
1157 WARN_ON_ONCE(debug_locks && !(lockdep_is_held(&p->pi_lock) || in set_task_cpu()