Lines Matching refs:load_avg
745 sa->runnable_load_avg = sa->load_avg = scale_load_down(se->load.weight); in init_entity_runnable_average()
791 sa->util_avg /= (cfs_rq->avg.load_avg + 1); in post_init_entity_util_avg()
2855 cfs_rq->avg.load_avg += se->avg.load_avg; in enqueue_load_avg()
2862 sub_positive(&cfs_rq->avg.load_avg, se->avg.load_avg); in dequeue_load_avg()
2895 se->avg.load_avg = div_u64(se_weight(se) * se->avg.load_sum, divider); in reweight_entity()
3001 load = max(scale_load_down(cfs_rq->load.weight), cfs_rq->avg.load_avg); in calc_group_shares()
3003 tg_weight = atomic_long_read(&tg->load_avg); in calc_group_shares()
3057 long runnable, load_avg; in calc_group_runnable() local
3059 load_avg = max(cfs_rq->avg.load_avg, in calc_group_runnable()
3066 if (load_avg) in calc_group_runnable()
3067 runnable /= load_avg; in calc_group_runnable()
3151 long delta = cfs_rq->avg.load_avg - cfs_rq->tg_load_avg_contrib; in update_tg_load_avg()
3160 atomic_long_add(delta, &cfs_rq->tg->load_avg); in update_tg_load_avg()
3161 cfs_rq->tg_load_avg_contrib = cfs_rq->avg.load_avg; in update_tg_load_avg()
3313 unsigned long runnable_load_avg, load_avg; in update_tg_cfs_runnable() local
3353 load_avg = div_s64(load_sum, LOAD_AVG_MAX); in update_tg_cfs_runnable()
3356 delta_avg = load_avg - se->avg.load_avg; in update_tg_cfs_runnable()
3359 se->avg.load_avg = load_avg; in update_tg_cfs_runnable()
3360 add_positive(&cfs_rq->avg.load_avg, delta_avg); in update_tg_cfs_runnable()
3422 if (se->avg.load_avg || se->avg.util_avg) in skip_blocked_update()
3482 swap(cfs_rq->removed.load_avg, removed_load); in update_cfs_rq_load_avg()
3488 sub_positive(&sa->load_avg, r); in update_cfs_rq_load_avg()
3547 div_u64(se->avg.load_avg * se->avg.load_sum, se_weight(se)); in attach_entity_load_avg()
3677 cfs_rq->removed.load_avg += se->avg.load_avg; in remove_entity_load_avg()
3689 return cfs_rq->avg.load_avg; in cfs_rq_load_avg()
5387 unsigned long load_avg = cpu_runnable_load(rq); in cpu_avg_load_per_task() local
5390 return load_avg / nr_running; in cpu_avg_load_per_task()
7482 if (cfs_rq->avg.load_avg) in cfs_rq_has_blocked()
7624 load = div64_ul(load * se->avg.load_avg, in update_cfs_rq_h_load()
7637 return div64_ul(p->se.avg.load_avg * cfs_rq->h_load, in task_h_load()
7668 return p->se.avg.load_avg; in task_h_load()