Home
last modified time | relevance | path

Searched refs:task_rq (Results 1 – 5 of 5) sorted by relevance

/Linux-v5.4/kernel/sched/
Dcore.c86 rq = task_rq(p); in __task_rq_lock()
88 if (likely(rq == task_rq(p) && !task_on_rq_migrating(p))) { in __task_rq_lock()
110 rq = task_rq(p); in task_rq_lock()
129 if (likely(rq == task_rq(p) && !task_on_rq_migrating(p))) { in task_rq_lock()
1567 if (task_rq(p) == rq) { in migration_cpu_stop()
1592 struct rq *rq = task_rq(p); in do_set_cpus_allowed()
1737 lockdep_is_held(&task_rq(p)->lock))); in set_task_cpu()
1765 src_rq = task_rq(p); in __migrate_swap_task()
1905 rq = task_rq(p); in wait_task_inactive()
2624 atomic_dec(&task_rq(p)->nr_iowait); in try_to_wake_up()
[all …]
Ddeadline.c36 struct rq *rq = task_rq(p); in dl_rq_of_se()
165 rq = task_rq(p); in dl_change_utilization()
924 struct rq *rq = task_rq(p); in start_dl_timer()
1649 rq = task_rq(p); in migrate_task_rq_dl()
1891 if (!cpudl_find(&task_rq(task)->rd->cpudl, task, later_mask)) in find_later_rq()
1989 if (unlikely(task_rq(task) != rq || in find_lock_later_rq()
2236 rq = task_rq(p); in set_cpus_allowed_dl()
Drt.c241 return task_rq(p); in rq_of_rt_se()
1646 if (!cpupri_find(&task_rq(task)->rd->cpupri, task, lowest_mask)) in find_lowest_rq()
1740 if (unlikely(task_rq(task) != rq || in find_lock_lowest_rq()
Dfair.c448 return &task_rq(p)->cfs; in task_cfs_rq()
454 struct rq *rq = task_rq(p); in cfs_rq_of()
1095 (lockdep_is_held(&task_rq(p)->lock) && !READ_ONCE(p->on_cpu))); in deref_task_numa_group()
5121 SCHED_WARN_ON(task_rq(p) != rq); in hrtick_start_fair()
6545 lockdep_assert_held(&task_rq(p)->lock); in migrate_task_rq_fair()
7437 BUG_ON(task_rq(p) != rq); in attach_task()
9955 update_overutilized_status(task_rq(curr)); in task_tick_fair()
Dsched.h1047 #define task_rq(p) cpu_rq(task_cpu(p)) macro