Lines Matching full:sa

102 accumulate_sum(u64 delta, struct sched_avg *sa,  in accumulate_sum()  argument
108 delta += sa->period_contrib; in accumulate_sum()
115 sa->load_sum = decay_load(sa->load_sum, periods); in accumulate_sum()
116 sa->runnable_sum = in accumulate_sum()
117 decay_load(sa->runnable_sum, periods); in accumulate_sum()
118 sa->util_sum = decay_load((u64)(sa->util_sum), periods); in accumulate_sum()
136 1024 - sa->period_contrib, delta); in accumulate_sum()
139 sa->period_contrib = delta; in accumulate_sum()
142 sa->load_sum += load * contrib; in accumulate_sum()
144 sa->runnable_sum += runnable * contrib << SCHED_CAPACITY_SHIFT; in accumulate_sum()
146 sa->util_sum += contrib << SCHED_CAPACITY_SHIFT; in accumulate_sum()
180 ___update_load_sum(u64 now, struct sched_avg *sa, in ___update_load_sum() argument
185 delta = now - sa->last_update_time; in ___update_load_sum()
191 sa->last_update_time = now; in ___update_load_sum()
203 sa->last_update_time += delta << 10; in ___update_load_sum()
226 if (!accumulate_sum(delta, sa, load, runnable, running)) in ___update_load_sum()
241 * LOAD_AVG_MAX*y + sa->period_contrib
245 * LOAD_AVG_MAX - 1024 + sa->period_contrib
257 ___update_load_avg(struct sched_avg *sa, unsigned long load) in ___update_load_avg() argument
259 u32 divider = get_pelt_divider(sa); in ___update_load_avg()
264 sa->load_avg = div_u64(load * sa->load_sum, divider); in ___update_load_avg()
265 sa->runnable_avg = div_u64(sa->runnable_sum, divider); in ___update_load_avg()
266 WRITE_ONCE(sa->util_avg, sa->util_sum / divider); in ___update_load_avg()