Searched refs:scale_load_down (Results 1 – 4 of 4) sorted by relevance
302 scale_load_down(cfs_rq->load.weight), in __update_load_avg_cfs_rq()303 scale_load_down(cfs_rq->runnable_weight), in __update_load_avg_cfs_rq()
125 # define scale_load_down(w) ((w) >> SCHED_FIXEDPOINT_SHIFT) macro129 # define scale_load_down(w) (w) macro692 return scale_load_down(se->load.weight); in se_weight()697 return scale_load_down(se->runnable_weight); in se_runnable()
196 w = scale_load_down(lw->weight); in __update_inv_weight()220 u64 fact = scale_load_down(weight); in __calc_delta()711 sa->runnable_load_avg = sa->load_avg = scale_load_down(se->load.weight); in init_entity_runnable_average()2928 load = max(scale_load_down(cfs_rq->load.weight), cfs_rq->avg.load_avg); in calc_group_shares()2987 scale_load_down(cfs_rq->load.weight)); in calc_group_runnable()2990 scale_load_down(cfs_rq->runnable_weight)); in calc_group_runnable()3261 if (scale_load_down(gcfs_rq->load.weight)) { in update_tg_cfs_runnable()3263 scale_load_down(gcfs_rq->load.weight)); in update_tg_cfs_runnable()5700 unsigned long imbalance = scale_load_down(NICE_0_LOAD) * in find_idlest_group()8248 load_above_capacity *= scale_load_down(NICE_0_LOAD); in calculate_imbalance()
6495 return (u64) scale_load_down(tg->shares); in cpu_shares_read_u64()6851 u64 weight = scale_load_down(tg->shares); in cpu_weight_read_u64()6877 unsigned long weight = scale_load_down(css_tg(css)->shares); in cpu_weight_nice_read_s64()