Lines Matching refs:this_rq
94 extern void calc_global_load_tick(struct rq *this_rq);
95 extern long calc_load_fold_active(struct rq *this_rq, long adjust);
98 extern void cpu_load_update_active(struct rq *this_rq);
100 static inline void cpu_load_update_active(struct rq *this_rq) { } in cpu_load_update_active() argument
951 #define this_rq() this_cpu_ptr(&runqueues) macro
1531 void (*task_woken)(struct rq *this_rq, struct task_struct *task);
1550 void (*switched_from)(struct rq *this_rq, struct task_struct *task);
1551 void (*switched_to) (struct rq *this_rq, struct task_struct *task);
1552 void (*prio_changed) (struct rq *this_rq, struct task_struct *task,
1873 static inline int _double_lock_balance(struct rq *this_rq, struct rq *busiest) in _double_lock_balance() argument
1874 __releases(this_rq->lock) in _double_lock_balance()
1876 __acquires(this_rq->lock) in _double_lock_balance()
1878 raw_spin_unlock(&this_rq->lock); in _double_lock_balance()
1879 double_rq_lock(this_rq, busiest); in _double_lock_balance()
1892 static inline int _double_lock_balance(struct rq *this_rq, struct rq *busiest) in _double_lock_balance() argument
1893 __releases(this_rq->lock) in _double_lock_balance()
1895 __acquires(this_rq->lock) in _double_lock_balance()
1900 if (busiest < this_rq) { in _double_lock_balance()
1901 raw_spin_unlock(&this_rq->lock); in _double_lock_balance()
1903 raw_spin_lock_nested(&this_rq->lock, in _double_lock_balance()
1918 static inline int double_lock_balance(struct rq *this_rq, struct rq *busiest) in double_lock_balance() argument
1922 raw_spin_unlock(&this_rq->lock); in double_lock_balance()
1926 return _double_lock_balance(this_rq, busiest); in double_lock_balance()
1929 static inline void double_unlock_balance(struct rq *this_rq, struct rq *busiest) in double_unlock_balance() argument
1933 lock_set_subclass(&this_rq->lock.dep_map, 0, _RET_IP_); in double_unlock_balance()