Home
last modified time | relevance | path

Searched refs:this_rq (Results 1 – 8 of 8) sorted by relevance

/Linux-v4.19/kernel/sched/
Dloadavg.c79 long calc_load_fold_active(struct rq *this_rq, long adjust) in calc_load_fold_active() argument
83 nr_active = this_rq->nr_running - adjust; in calc_load_fold_active()
84 nr_active += (long)this_rq->nr_uninterruptible; in calc_load_fold_active()
86 if (nr_active != this_rq->calc_load_active) { in calc_load_fold_active()
87 delta = nr_active - this_rq->calc_load_active; in calc_load_fold_active()
88 this_rq->calc_load_active = nr_active; in calc_load_fold_active()
182 struct rq *this_rq = this_rq(); in calc_load_nohz_start() local
189 delta = calc_load_fold_active(this_rq, 0); in calc_load_nohz_start()
199 struct rq *this_rq = this_rq(); in calc_load_nohz_stop() local
204 this_rq->calc_load_update = READ_ONCE(calc_load_update); in calc_load_nohz_stop()
[all …]
Dsched.h94 extern void calc_global_load_tick(struct rq *this_rq);
95 extern long calc_load_fold_active(struct rq *this_rq, long adjust);
98 extern void cpu_load_update_active(struct rq *this_rq);
100 static inline void cpu_load_update_active(struct rq *this_rq) { } in cpu_load_update_active() argument
951 #define this_rq() this_cpu_ptr(&runqueues) macro
1531 void (*task_woken)(struct rq *this_rq, struct task_struct *task);
1550 void (*switched_from)(struct rq *this_rq, struct task_struct *task);
1551 void (*switched_to) (struct rq *this_rq, struct task_struct *task);
1552 void (*prio_changed) (struct rq *this_rq, struct task_struct *task,
1873 static inline int _double_lock_balance(struct rq *this_rq, struct rq *busiest) in _double_lock_balance() argument
[all …]
Dfair.c3618 static int idle_balance(struct rq *this_rq, struct rq_flags *rf);
5314 static void cpu_load_update(struct rq *this_rq, unsigned long this_load, in cpu_load_update() argument
5317 unsigned long __maybe_unused tickless_load = this_rq->cpu_load[0]; in cpu_load_update()
5320 this_rq->nr_load_updates++; in cpu_load_update()
5323 this_rq->cpu_load[0] = this_load; /* Fasttrack for idx 0 */ in cpu_load_update()
5329 old_load = this_rq->cpu_load[i]; in cpu_load_update()
5351 this_rq->cpu_load[i] = (old_load * (scale - 1) + new_load) >> i; in cpu_load_update()
5376 static void cpu_load_update_nohz(struct rq *this_rq, in cpu_load_update_nohz() argument
5382 pending_updates = curr_jiffies - this_rq->last_load_update_tick; in cpu_load_update_nohz()
5384 this_rq->last_load_update_tick = curr_jiffies; in cpu_load_update_nohz()
[all …]
Drt.c260 static void pull_rt_task(struct rq *this_rq);
422 static inline void pull_rt_task(struct rq *this_rq) in pull_rt_task() argument
542 return this_rq()->rd->span; in sched_rt_period_mask()
2029 rq = this_rq(); in rto_push_irq_work_func()
2058 static void pull_rt_task(struct rq *this_rq) in pull_rt_task() argument
2060 int this_cpu = this_rq->cpu, cpu; in pull_rt_task()
2064 int rt_overload_count = rt_overloaded(this_rq); in pull_rt_task()
2077 cpumask_test_cpu(this_rq->cpu, this_rq->rd->rto_mask)) in pull_rt_task()
2082 tell_cpu_to_push(this_rq); in pull_rt_task()
2087 for_each_cpu(cpu, this_rq->rd->rto_mask) { in pull_rt_task()
[all …]
Dcputime.c220 struct rq *rq = this_rq(); in account_idle_time()
240 steal -= this_rq()->prev_steal_time; in steal_account_process_time()
243 this_rq()->prev_steal_time += steal; in steal_account_process_time()
394 struct rq *rq = this_rq(); in irqtime_account_idle_ticks()
477 struct rq *rq = this_rq(); in account_process_tick()
Ddeadline.c2124 static void pull_dl_task(struct rq *this_rq) in pull_dl_task() argument
2126 int this_cpu = this_rq->cpu, cpu; in pull_dl_task()
2132 if (likely(!dl_overloaded(this_rq))) in pull_dl_task()
2141 for_each_cpu(cpu, this_rq->rd->dlo_mask) { in pull_dl_task()
2151 if (this_rq->dl.dl_nr_running && in pull_dl_task()
2152 dl_time_before(this_rq->dl.earliest_dl.curr, in pull_dl_task()
2157 double_lock_balance(this_rq, src_rq); in pull_dl_task()
2174 (!this_rq->dl.dl_nr_running || in pull_dl_task()
2176 this_rq->dl.earliest_dl.curr))) { in pull_dl_task()
2194 add_rq_bw(&p->dl, &this_rq->dl); in pull_dl_task()
[all …]
Didle.c21 idle_set_state(this_rq(), idle_state); in sched_idle_set_state()
Dcore.c282 if (rq == this_rq()) { in hrtick_start()
965 struct rq *rq = this_rq(); in migration_cpu_stop()
1605 rq = this_rq(); in ttwu_stat()
1728 struct rq *rq = this_rq(); in sched_ttwu_pending()
1754 if (llist_empty(&this_rq()->wake_list) && !got_nohz_idle_kick()) in scheduler_ipi()
1777 this_rq()->idle_balance = 1; in scheduler_ipi()
2073 if (WARN_ON_ONCE(rq != this_rq()) || in try_to_wake_up_local()
2638 struct rq *rq = this_rq(); in finish_task_switch()
2932 struct rq *rq = this_rq(); in get_iowait_load()
3297 schedstat_inc(this_rq()->sched_count); in schedule_debug()
[all …]