Lines Matching full:count

70 	fbc->count = amount;  in percpu_counter_set()
95 s64 count; in percpu_counter_add_batch() local
98 count = this_cpu_read(*fbc->counters); in percpu_counter_add_batch()
100 if (unlikely(abs(count + amount) >= batch)) { in percpu_counter_add_batch()
106 count = __this_cpu_read(*fbc->counters); in percpu_counter_add_batch()
107 fbc->count += count + amount; in percpu_counter_add_batch()
108 __this_cpu_sub(*fbc->counters, count); in percpu_counter_add_batch()
112 } while (!this_cpu_try_cmpxchg(*fbc->counters, &count, count + amount)); in percpu_counter_add_batch()
122 s64 count; in percpu_counter_add_batch() local
126 count = __this_cpu_read(*fbc->counters) + amount; in percpu_counter_add_batch()
127 if (abs(count) >= batch) { in percpu_counter_add_batch()
129 fbc->count += count; in percpu_counter_add_batch()
130 __this_cpu_sub(*fbc->counters, count - amount); in percpu_counter_add_batch()
141 * For percpu_counter with a big batch, the devication of its count could
149 s64 count; in percpu_counter_sync() local
152 count = __this_cpu_read(*fbc->counters); in percpu_counter_sync()
153 fbc->count += count; in percpu_counter_sync()
154 __this_cpu_sub(*fbc->counters, count); in percpu_counter_sync()
166 * notifier called to fold the percpu count back into the global counter sum.
178 ret = fbc->count; in __percpu_counter_sum()
211 fbc[i].count = amount; in __percpu_counter_init_many()
279 fbc->count += *pcount; in percpu_counter_cpu_dead()
294 s64 count; in __percpu_counter_compare() local
296 count = percpu_counter_read(fbc); in __percpu_counter_compare()
297 /* Check to see if rough count will be sufficient for comparison */ in __percpu_counter_compare()
298 if (abs(count - rhs) > (batch * num_online_cpus())) { in __percpu_counter_compare()
299 if (count > rhs) in __percpu_counter_compare()
304 /* Need to use precise count */ in __percpu_counter_compare()
305 count = percpu_counter_sum(fbc); in __percpu_counter_compare()
306 if (count > rhs) in __percpu_counter_compare()
308 else if (count < rhs) in __percpu_counter_compare()
330 s64 count; in __percpu_counter_limited_add() local
340 count = __this_cpu_read(*fbc->counters); in __percpu_counter_limited_add()
343 if (abs(count + amount) <= batch && in __percpu_counter_limited_add()
344 ((amount > 0 && fbc->count + unknown <= limit) || in __percpu_counter_limited_add()
345 (amount < 0 && fbc->count - unknown >= limit))) { in __percpu_counter_limited_add()
352 count = fbc->count + amount; in __percpu_counter_limited_add()
356 if (count - unknown > limit) in __percpu_counter_limited_add()
358 if (count + unknown <= limit) in __percpu_counter_limited_add()
361 if (count + unknown < limit) in __percpu_counter_limited_add()
363 if (count - unknown >= limit) in __percpu_counter_limited_add()
373 count += *pcount; in __percpu_counter_limited_add()
376 if (count > limit) in __percpu_counter_limited_add()
379 if (count < limit) in __percpu_counter_limited_add()
385 count = __this_cpu_read(*fbc->counters); in __percpu_counter_limited_add()
386 fbc->count += count + amount; in __percpu_counter_limited_add()
387 __this_cpu_sub(*fbc->counters, count); in __percpu_counter_limited_add()