Searched refs:load_avg (Results 1 – 5 of 5) sorted by relevance
| /linux/kernel/sched/ |
| H A D | pelt.h | 20 return READ_ONCE(rq->avg_hw.load_avg); in hw_load_avg()
|
| H A D | fair.c | 1161 sa->load_avg = scale_load_down(se->load.weight); in init_entity_runnable_average() 1219 sa->util_avg /= (cfs_rq->avg.load_avg + 1); in post_init_entity_util_avg() 3832 __update_sa(&cfs_rq->avg, load, se->avg.load_avg, in enqueue_load_avg() 3839 __update_sa(&cfs_rq->avg, load, -se->avg.load_avg, in dequeue_load_avg() 3886 se->avg.load_avg = div_u64(se_weight(se) * se->avg.load_sum, divider); in reweight_entity() 3995 load = max(scale_load_down(cfs_rq->load.weight), cfs_rq->avg.load_avg); in calc_group_shares() 3997 tg_weight = atomic_long_read(&tg->load_avg); in calc_group_shares() 4088 WARN_ON_ONCE(sa->load_avg || in load_avg_is_decayed() 4183 delta = cfs_rq->avg.load_avg - cfs_rq->tg_load_avg_contrib; in update_tg_load_avg() 4185 atomic_long_add(delta, &cfs_rq->tg->load_avg); in update_tg_load_avg() [all …]
|
| H A D | pelt.c | 265 sa->load_avg = div_u64(load * sa->load_sum, divider); in ___update_load_avg()
|
| H A D | sched.h | 493 atomic_long_t load_avg ____cacheline_aligned; 713 unsigned long load_avg; member
|
| /linux/include/linux/ |
| H A D | sched.h | 516 unsigned long load_avg; member
|