Home
last modified time | relevance | path

Searched refs:this_rq (Results 1 – 9 of 9) sorted by relevance

/Linux-v5.15/kernel/sched/
Dloadavg.c79 long calc_load_fold_active(struct rq *this_rq, long adjust) in calc_load_fold_active() argument
83 nr_active = this_rq->nr_running - adjust; in calc_load_fold_active()
84 nr_active += (int)this_rq->nr_uninterruptible; in calc_load_fold_active()
86 if (nr_active != this_rq->calc_load_active) { in calc_load_fold_active()
87 delta = nr_active - this_rq->calc_load_active; in calc_load_fold_active()
88 this_rq->calc_load_active = nr_active; in calc_load_fold_active()
252 calc_load_nohz_fold(this_rq()); in calc_load_nohz_start()
266 struct rq *this_rq = this_rq(); in calc_load_nohz_stop() local
271 this_rq->calc_load_update = READ_ONCE(calc_load_update); in calc_load_nohz_stop()
272 if (time_before(jiffies, this_rq->calc_load_update)) in calc_load_nohz_stop()
[all …]
Dsched.h100 extern void calc_global_load_tick(struct rq *this_rq);
101 extern long calc_load_fold_active(struct rq *this_rq, long adjust);
1367 #define this_rq() this_cpu_ptr(&runqueues) macro
1663 rq = this_rq(); in this_rq_lock_irq()
2138 void (*task_woken)(struct rq *this_rq, struct task_struct *task);
2159 void (*switched_from)(struct rq *this_rq, struct task_struct *task);
2160 void (*switched_to) (struct rq *this_rq, struct task_struct *task);
2161 void (*prio_changed) (struct rq *this_rq, struct task_struct *task,
2532 static inline int _double_lock_balance(struct rq *this_rq, struct rq *busiest) in _double_lock_balance() argument
2533 __releases(this_rq->lock) in _double_lock_balance()
[all …]
Drt.c263 static void pull_rt_task(struct rq *this_rq);
426 static inline void pull_rt_task(struct rq *this_rq) in pull_rt_task() argument
585 return this_rq()->rd->span; in sched_rt_period_mask()
2128 rq = this_rq(); in rto_push_irq_work_func()
2158 static void pull_rt_task(struct rq *this_rq) in pull_rt_task() argument
2160 int this_cpu = this_rq->cpu, cpu; in pull_rt_task()
2164 int rt_overload_count = rt_overloaded(this_rq); in pull_rt_task()
2177 cpumask_test_cpu(this_rq->cpu, this_rq->rd->rto_mask)) in pull_rt_task()
2182 tell_cpu_to_push(this_rq); in pull_rt_task()
2187 for_each_cpu(cpu, this_rq->rd->rto_mask) { in pull_rt_task()
[all …]
Dfair.c3947 static int newidle_balance(struct rq *this_rq, struct rq_flags *rf);
6273 struct rq *this_rq = this_rq(); in select_idle_cpu() local
6293 if (unlikely(this_rq->wake_stamp < now)) { in select_idle_cpu()
6294 while (this_rq->wake_stamp < now && this_rq->wake_avg_idle) { in select_idle_cpu()
6295 this_rq->wake_stamp++; in select_idle_cpu()
6296 this_rq->wake_avg_idle >>= 1; in select_idle_cpu()
6300 avg_idle = this_rq->wake_avg_idle; in select_idle_cpu()
6337 this_rq->wake_avg_idle -= min(this_rq->wake_avg_idle, time); in select_idle_cpu()
6433 this_rq()->nr_running <= 1) { in select_idle_sibling()
9782 static int load_balance(int this_cpu, struct rq *this_rq, in load_balance() argument
[all …]
Ddeadline.c2225 static void pull_dl_task(struct rq *this_rq) in pull_dl_task() argument
2227 int this_cpu = this_rq->cpu, cpu; in pull_dl_task()
2233 if (likely(!dl_overloaded(this_rq))) in pull_dl_task()
2242 for_each_cpu(cpu, this_rq->rd->dlo_mask) { in pull_dl_task()
2252 if (this_rq->dl.dl_nr_running && in pull_dl_task()
2253 dl_time_before(this_rq->dl.earliest_dl.curr, in pull_dl_task()
2259 double_lock_balance(this_rq, src_rq); in pull_dl_task()
2276 (!this_rq->dl.dl_nr_running || in pull_dl_task()
2278 this_rq->dl.earliest_dl.curr))) { in pull_dl_task()
2295 activate_task(this_rq, p, 0); in pull_dl_task()
[all …]
Dcputime.c222 struct rq *rq = this_rq(); in account_idle_time()
242 steal -= this_rq()->prev_steal_time; in steal_account_process_time()
245 this_rq()->prev_steal_time += steal; in steal_account_process_time()
385 } else if (p == this_rq()->idle) { in irqtime_account_process_tick()
491 else if ((p != this_rq()->idle) || (irq_count() != HARDIRQ_OFFSET)) in account_process_tick()
Dmembarrier.c234 struct rq *rq = this_rq(); in membarrier_update_current_mm()
Didle.c22 idle_set_state(this_rq(), idle_state); in sched_idle_set_state()
Dcore.c756 if (rq == this_rq()) in hrtick_start()
2138 this_rq()->nr_pinned++; in migrate_disable()
2167 this_rq()->nr_pinned--; in migrate_enable()
2295 struct rq *rq = this_rq(); in migration_cpu_stop()
2394 struct rq *lowest_rq = NULL, *rq = this_rq(); in push_cpu_stop()
3487 rq = this_rq(); in ttwu_stat()
3628 struct rq *rq = this_rq(); in sched_ttwu_pending()
4774 struct rq *rq = this_rq(); in finish_task_switch()
5518 schedstat_inc(this_rq()->sched_count); in schedule_debug()
8301 rq = this_rq(); in yield_to()
[all …]