Lines Matching full:decay
20 /* Indicates global state. Atomic because decay reads this w/o locking. */
58 arena_decay_t *decay, size_t npages_new) NOT_REACHED in background_thread_create()
102 decay_npurge_after_interval(arena_decay_t *decay, size_t interval) {
106 sum += decay->backlog[i] * h_steps[i];
109 sum += decay->backlog[i] * (h_steps[i] - h_steps[i - interval]);
116 arena_decay_compute_purge_interval_impl(tsdn_t *tsdn, arena_decay_t *decay,
118 if (malloc_mutex_trylock(tsdn, &decay->mtx)) {
119 /* Use minimal interval if decay is contended. */
124 ssize_t decay_time = atomic_load_zd(&decay->time_ms, ATOMIC_RELAXED);
131 uint64_t decay_interval_ns = nstime_ns(&decay->interval);
137 if (decay->backlog[i] > 0) {
165 npurge_lb = decay_npurge_after_interval(decay, lb);
170 npurge_ub = decay_npurge_after_interval(decay, ub);
181 npurge = decay_npurge_after_interval(decay, target);
195 malloc_mutex_unlock(tsdn, &decay->mtx);
658 arena_decay_t *decay, size_t npages_new) {
674 if (malloc_mutex_trylock(tsdn, &decay->mtx)) {
678 ssize_t decay_time = atomic_load_zd(&decay->time_ms, ATOMIC_RELAXED);
683 uint64_t decay_interval_ns = nstime_ns(&decay->interval);
688 if (nstime_compare(&diff, &decay->epoch) <= 0) {
691 nstime_subtract(&diff, &decay->epoch);
734 malloc_mutex_unlock(tsdn, &decay->mtx);