1a4bd5210SJason Evans #define JEMALLOC_EXTENT_C_ 2*b7eaed25SJason Evans #include "jemalloc/internal/jemalloc_preamble.h" 3*b7eaed25SJason Evans #include "jemalloc/internal/jemalloc_internal_includes.h" 4*b7eaed25SJason Evans 5*b7eaed25SJason Evans #include "jemalloc/internal/assert.h" 6*b7eaed25SJason Evans #include "jemalloc/internal/extent_dss.h" 7*b7eaed25SJason Evans #include "jemalloc/internal/extent_mmap.h" 8*b7eaed25SJason Evans #include "jemalloc/internal/ph.h" 9*b7eaed25SJason Evans #include "jemalloc/internal/rtree.h" 10*b7eaed25SJason Evans #include "jemalloc/internal/mutex.h" 11*b7eaed25SJason Evans #include "jemalloc/internal/mutex_pool.h" 12a4bd5210SJason Evans 13a4bd5210SJason Evans /******************************************************************************/ 14*b7eaed25SJason Evans /* Data. */ 15*b7eaed25SJason Evans 16*b7eaed25SJason Evans rtree_t extents_rtree; 17*b7eaed25SJason Evans /* Keyed by the address of the extent_t being protected. */ 18*b7eaed25SJason Evans mutex_pool_t extent_mutex_pool; 19*b7eaed25SJason Evans 20*b7eaed25SJason Evans static const bitmap_info_t extents_bitmap_info = 21*b7eaed25SJason Evans BITMAP_INFO_INITIALIZER(NPSIZES+1); 22*b7eaed25SJason Evans 23*b7eaed25SJason Evans static void *extent_alloc_default(extent_hooks_t *extent_hooks, void *new_addr, 24*b7eaed25SJason Evans size_t size, size_t alignment, bool *zero, bool *commit, 25*b7eaed25SJason Evans unsigned arena_ind); 26*b7eaed25SJason Evans static bool extent_dalloc_default(extent_hooks_t *extent_hooks, void *addr, 27*b7eaed25SJason Evans size_t size, bool committed, unsigned arena_ind); 28*b7eaed25SJason Evans static void extent_destroy_default(extent_hooks_t *extent_hooks, void *addr, 29*b7eaed25SJason Evans size_t size, bool committed, unsigned arena_ind); 30*b7eaed25SJason Evans static bool extent_commit_default(extent_hooks_t *extent_hooks, void *addr, 31*b7eaed25SJason Evans size_t size, size_t offset, size_t length, unsigned arena_ind); 32*b7eaed25SJason Evans static bool extent_commit_impl(tsdn_t *tsdn, arena_t *arena, 33*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, extent_t *extent, size_t offset, 34*b7eaed25SJason Evans size_t length, bool growing_retained); 35*b7eaed25SJason Evans static bool extent_decommit_default(extent_hooks_t *extent_hooks, 36*b7eaed25SJason Evans void *addr, size_t size, size_t offset, size_t length, unsigned arena_ind); 37*b7eaed25SJason Evans #ifdef PAGES_CAN_PURGE_LAZY 38*b7eaed25SJason Evans static bool extent_purge_lazy_default(extent_hooks_t *extent_hooks, void *addr, 39*b7eaed25SJason Evans size_t size, size_t offset, size_t length, unsigned arena_ind); 40*b7eaed25SJason Evans #endif 41*b7eaed25SJason Evans static bool extent_purge_lazy_impl(tsdn_t *tsdn, arena_t *arena, 42*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, extent_t *extent, size_t offset, 43*b7eaed25SJason Evans size_t length, bool growing_retained); 44*b7eaed25SJason Evans #ifdef PAGES_CAN_PURGE_FORCED 45*b7eaed25SJason Evans static bool extent_purge_forced_default(extent_hooks_t *extent_hooks, 46*b7eaed25SJason Evans void *addr, size_t size, size_t offset, size_t length, unsigned arena_ind); 47*b7eaed25SJason Evans #endif 48*b7eaed25SJason Evans static bool extent_purge_forced_impl(tsdn_t *tsdn, arena_t *arena, 49*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, extent_t *extent, size_t offset, 50*b7eaed25SJason Evans size_t length, bool growing_retained); 51*b7eaed25SJason Evans #ifdef JEMALLOC_MAPS_COALESCE 52*b7eaed25SJason Evans static bool extent_split_default(extent_hooks_t *extent_hooks, void *addr, 53*b7eaed25SJason Evans size_t size, size_t size_a, size_t size_b, bool committed, 54*b7eaed25SJason Evans unsigned arena_ind); 55*b7eaed25SJason Evans #endif 56*b7eaed25SJason Evans static extent_t *extent_split_impl(tsdn_t *tsdn, arena_t *arena, 57*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, extent_t *extent, size_t size_a, 58*b7eaed25SJason Evans szind_t szind_a, bool slab_a, size_t size_b, szind_t szind_b, bool slab_b, 59*b7eaed25SJason Evans bool growing_retained); 60*b7eaed25SJason Evans #ifdef JEMALLOC_MAPS_COALESCE 61*b7eaed25SJason Evans static bool extent_merge_default(extent_hooks_t *extent_hooks, void *addr_a, 62*b7eaed25SJason Evans size_t size_a, void *addr_b, size_t size_b, bool committed, 63*b7eaed25SJason Evans unsigned arena_ind); 64*b7eaed25SJason Evans #endif 65*b7eaed25SJason Evans static bool extent_merge_impl(tsdn_t *tsdn, arena_t *arena, 66*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, extent_t *a, extent_t *b, 67*b7eaed25SJason Evans bool growing_retained); 68*b7eaed25SJason Evans 69*b7eaed25SJason Evans const extent_hooks_t extent_hooks_default = { 70*b7eaed25SJason Evans extent_alloc_default, 71*b7eaed25SJason Evans extent_dalloc_default, 72*b7eaed25SJason Evans extent_destroy_default, 73*b7eaed25SJason Evans extent_commit_default, 74*b7eaed25SJason Evans extent_decommit_default 75*b7eaed25SJason Evans #ifdef PAGES_CAN_PURGE_LAZY 76*b7eaed25SJason Evans , 77*b7eaed25SJason Evans extent_purge_lazy_default 78*b7eaed25SJason Evans #else 79*b7eaed25SJason Evans , 80*b7eaed25SJason Evans NULL 81*b7eaed25SJason Evans #endif 82*b7eaed25SJason Evans #ifdef PAGES_CAN_PURGE_FORCED 83*b7eaed25SJason Evans , 84*b7eaed25SJason Evans extent_purge_forced_default 85*b7eaed25SJason Evans #else 86*b7eaed25SJason Evans , 87*b7eaed25SJason Evans NULL 88*b7eaed25SJason Evans #endif 89*b7eaed25SJason Evans #ifdef JEMALLOC_MAPS_COALESCE 90*b7eaed25SJason Evans , 91*b7eaed25SJason Evans extent_split_default, 92*b7eaed25SJason Evans extent_merge_default 93*b7eaed25SJason Evans #endif 94*b7eaed25SJason Evans }; 95*b7eaed25SJason Evans 96*b7eaed25SJason Evans /* Used exclusively for gdump triggering. */ 97*b7eaed25SJason Evans static atomic_zu_t curpages; 98*b7eaed25SJason Evans static atomic_zu_t highpages; 99*b7eaed25SJason Evans 100*b7eaed25SJason Evans /******************************************************************************/ 101*b7eaed25SJason Evans /* 102*b7eaed25SJason Evans * Function prototypes for static functions that are referenced prior to 103*b7eaed25SJason Evans * definition. 104*b7eaed25SJason Evans */ 105*b7eaed25SJason Evans 106*b7eaed25SJason Evans static void extent_deregister(tsdn_t *tsdn, extent_t *extent); 107*b7eaed25SJason Evans static extent_t *extent_recycle(tsdn_t *tsdn, arena_t *arena, 108*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, extents_t *extents, void *new_addr, 109*b7eaed25SJason Evans size_t usize, size_t pad, size_t alignment, bool slab, szind_t szind, 110*b7eaed25SJason Evans bool *zero, bool *commit, bool growing_retained); 111*b7eaed25SJason Evans static extent_t *extent_try_coalesce(tsdn_t *tsdn, arena_t *arena, 112*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, rtree_ctx_t *rtree_ctx, extents_t *extents, 113*b7eaed25SJason Evans extent_t *extent, bool *coalesced, bool growing_retained); 114*b7eaed25SJason Evans static void extent_record(tsdn_t *tsdn, arena_t *arena, 115*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, extents_t *extents, extent_t *extent, 116*b7eaed25SJason Evans bool growing_retained); 117*b7eaed25SJason Evans 118*b7eaed25SJason Evans /******************************************************************************/ 119*b7eaed25SJason Evans 120*b7eaed25SJason Evans rb_gen(UNUSED, extent_avail_, extent_tree_t, extent_t, rb_link, 121*b7eaed25SJason Evans extent_esnead_comp) 122*b7eaed25SJason Evans 123*b7eaed25SJason Evans typedef enum { 124*b7eaed25SJason Evans lock_result_success, 125*b7eaed25SJason Evans lock_result_failure, 126*b7eaed25SJason Evans lock_result_no_extent 127*b7eaed25SJason Evans } lock_result_t; 128*b7eaed25SJason Evans 129*b7eaed25SJason Evans static lock_result_t 130*b7eaed25SJason Evans extent_rtree_leaf_elm_try_lock(tsdn_t *tsdn, rtree_leaf_elm_t *elm, 131*b7eaed25SJason Evans extent_t **result) { 132*b7eaed25SJason Evans extent_t *extent1 = rtree_leaf_elm_extent_read(tsdn, &extents_rtree, 133*b7eaed25SJason Evans elm, true); 134*b7eaed25SJason Evans 135*b7eaed25SJason Evans if (extent1 == NULL) { 136*b7eaed25SJason Evans return lock_result_no_extent; 137*b7eaed25SJason Evans } 138*b7eaed25SJason Evans /* 139*b7eaed25SJason Evans * It's possible that the extent changed out from under us, and with it 140*b7eaed25SJason Evans * the leaf->extent mapping. We have to recheck while holding the lock. 141*b7eaed25SJason Evans */ 142*b7eaed25SJason Evans extent_lock(tsdn, extent1); 143*b7eaed25SJason Evans extent_t *extent2 = rtree_leaf_elm_extent_read(tsdn, 144*b7eaed25SJason Evans &extents_rtree, elm, true); 145*b7eaed25SJason Evans 146*b7eaed25SJason Evans if (extent1 == extent2) { 147*b7eaed25SJason Evans *result = extent1; 148*b7eaed25SJason Evans return lock_result_success; 149*b7eaed25SJason Evans } else { 150*b7eaed25SJason Evans extent_unlock(tsdn, extent1); 151*b7eaed25SJason Evans return lock_result_failure; 152*b7eaed25SJason Evans } 153*b7eaed25SJason Evans } 154*b7eaed25SJason Evans 155*b7eaed25SJason Evans /* 156*b7eaed25SJason Evans * Returns a pool-locked extent_t * if there's one associated with the given 157*b7eaed25SJason Evans * address, and NULL otherwise. 158*b7eaed25SJason Evans */ 159*b7eaed25SJason Evans static extent_t * 160*b7eaed25SJason Evans extent_lock_from_addr(tsdn_t *tsdn, rtree_ctx_t *rtree_ctx, void *addr) { 161*b7eaed25SJason Evans extent_t *ret = NULL; 162*b7eaed25SJason Evans rtree_leaf_elm_t *elm = rtree_leaf_elm_lookup(tsdn, &extents_rtree, 163*b7eaed25SJason Evans rtree_ctx, (uintptr_t)addr, false, false); 164*b7eaed25SJason Evans if (elm == NULL) { 165*b7eaed25SJason Evans return NULL; 166*b7eaed25SJason Evans } 167*b7eaed25SJason Evans lock_result_t lock_result; 168*b7eaed25SJason Evans do { 169*b7eaed25SJason Evans lock_result = extent_rtree_leaf_elm_try_lock(tsdn, elm, &ret); 170*b7eaed25SJason Evans } while (lock_result == lock_result_failure); 171*b7eaed25SJason Evans return ret; 172*b7eaed25SJason Evans } 173*b7eaed25SJason Evans 174*b7eaed25SJason Evans extent_t * 175*b7eaed25SJason Evans extent_alloc(tsdn_t *tsdn, arena_t *arena) { 176*b7eaed25SJason Evans malloc_mutex_lock(tsdn, &arena->extent_avail_mtx); 177*b7eaed25SJason Evans extent_t *extent = extent_avail_first(&arena->extent_avail); 178*b7eaed25SJason Evans if (extent == NULL) { 179*b7eaed25SJason Evans malloc_mutex_unlock(tsdn, &arena->extent_avail_mtx); 180*b7eaed25SJason Evans return base_alloc_extent(tsdn, arena->base); 181*b7eaed25SJason Evans } 182*b7eaed25SJason Evans extent_avail_remove(&arena->extent_avail, extent); 183*b7eaed25SJason Evans malloc_mutex_unlock(tsdn, &arena->extent_avail_mtx); 184*b7eaed25SJason Evans return extent; 185*b7eaed25SJason Evans } 186*b7eaed25SJason Evans 187*b7eaed25SJason Evans void 188*b7eaed25SJason Evans extent_dalloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent) { 189*b7eaed25SJason Evans malloc_mutex_lock(tsdn, &arena->extent_avail_mtx); 190*b7eaed25SJason Evans extent_avail_insert(&arena->extent_avail, extent); 191*b7eaed25SJason Evans malloc_mutex_unlock(tsdn, &arena->extent_avail_mtx); 192*b7eaed25SJason Evans } 193*b7eaed25SJason Evans 194*b7eaed25SJason Evans extent_hooks_t * 195*b7eaed25SJason Evans extent_hooks_get(arena_t *arena) { 196*b7eaed25SJason Evans return base_extent_hooks_get(arena->base); 197*b7eaed25SJason Evans } 198*b7eaed25SJason Evans 199*b7eaed25SJason Evans extent_hooks_t * 200*b7eaed25SJason Evans extent_hooks_set(tsd_t *tsd, arena_t *arena, extent_hooks_t *extent_hooks) { 201*b7eaed25SJason Evans background_thread_info_t *info; 202*b7eaed25SJason Evans if (have_background_thread) { 203*b7eaed25SJason Evans info = arena_background_thread_info_get(arena); 204*b7eaed25SJason Evans malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx); 205*b7eaed25SJason Evans } 206*b7eaed25SJason Evans extent_hooks_t *ret = base_extent_hooks_set(arena->base, extent_hooks); 207*b7eaed25SJason Evans if (have_background_thread) { 208*b7eaed25SJason Evans malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx); 209*b7eaed25SJason Evans } 210*b7eaed25SJason Evans 211*b7eaed25SJason Evans return ret; 212*b7eaed25SJason Evans } 213*b7eaed25SJason Evans 214*b7eaed25SJason Evans static void 215*b7eaed25SJason Evans extent_hooks_assure_initialized(arena_t *arena, 216*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks) { 217*b7eaed25SJason Evans if (*r_extent_hooks == EXTENT_HOOKS_INITIALIZER) { 218*b7eaed25SJason Evans *r_extent_hooks = extent_hooks_get(arena); 219*b7eaed25SJason Evans } 220*b7eaed25SJason Evans } 221a4bd5210SJason Evans 2228244f2aaSJason Evans #ifndef JEMALLOC_JET 2238244f2aaSJason Evans static 2248244f2aaSJason Evans #endif 2258244f2aaSJason Evans size_t 2268244f2aaSJason Evans extent_size_quantize_floor(size_t size) { 2277fa7f12fSJason Evans size_t ret; 228*b7eaed25SJason Evans pszind_t pind; 229d0e79aa3SJason Evans 2307fa7f12fSJason Evans assert(size > 0); 231*b7eaed25SJason Evans assert((size & PAGE_MASK) == 0); 2327fa7f12fSJason Evans 233*b7eaed25SJason Evans pind = sz_psz2ind(size - sz_large_pad + 1); 234*b7eaed25SJason Evans if (pind == 0) { 235*b7eaed25SJason Evans /* 236*b7eaed25SJason Evans * Avoid underflow. This short-circuit would also do the right 237*b7eaed25SJason Evans * thing for all sizes in the range for which there are 238*b7eaed25SJason Evans * PAGE-spaced size classes, but it's simplest to just handle 239*b7eaed25SJason Evans * the one case that would cause erroneous results. 240*b7eaed25SJason Evans */ 241*b7eaed25SJason Evans return size; 242d0e79aa3SJason Evans } 243*b7eaed25SJason Evans ret = sz_pind2sz(pind - 1) + sz_large_pad; 2447fa7f12fSJason Evans assert(ret <= size); 245*b7eaed25SJason Evans return ret; 246a4bd5210SJason Evans } 247a4bd5210SJason Evans 248*b7eaed25SJason Evans #ifndef JEMALLOC_JET 249*b7eaed25SJason Evans static 250*b7eaed25SJason Evans #endif 2518244f2aaSJason Evans size_t 2528244f2aaSJason Evans extent_size_quantize_ceil(size_t size) { 2538244f2aaSJason Evans size_t ret; 2548244f2aaSJason Evans 2558244f2aaSJason Evans assert(size > 0); 256*b7eaed25SJason Evans assert(size - sz_large_pad <= LARGE_MAXCLASS); 257*b7eaed25SJason Evans assert((size & PAGE_MASK) == 0); 2588244f2aaSJason Evans 2598244f2aaSJason Evans ret = extent_size_quantize_floor(size); 2608244f2aaSJason Evans if (ret < size) { 2618244f2aaSJason Evans /* 2628244f2aaSJason Evans * Skip a quantization that may have an adequately large extent, 2638244f2aaSJason Evans * because under-sized extents may be mixed in. This only 2648244f2aaSJason Evans * happens when an unusual size is requested, i.e. for aligned 2658244f2aaSJason Evans * allocation, and is just one of several places where linear 2668244f2aaSJason Evans * search would potentially find sufficiently aligned available 2678244f2aaSJason Evans * memory somewhere lower. 2688244f2aaSJason Evans */ 269*b7eaed25SJason Evans ret = sz_pind2sz(sz_psz2ind(ret - sz_large_pad + 1)) + 270*b7eaed25SJason Evans sz_large_pad; 2718244f2aaSJason Evans } 2728244f2aaSJason Evans return ret; 2738244f2aaSJason Evans } 2748244f2aaSJason Evans 275*b7eaed25SJason Evans /* Generate pairing heap functions. */ 276*b7eaed25SJason Evans ph_gen(, extent_heap_, extent_heap_t, extent_t, ph_link, extent_snad_comp) 2777fa7f12fSJason Evans 278*b7eaed25SJason Evans bool 279*b7eaed25SJason Evans extents_init(tsdn_t *tsdn, extents_t *extents, extent_state_t state, 280*b7eaed25SJason Evans bool delay_coalesce) { 281*b7eaed25SJason Evans if (malloc_mutex_init(&extents->mtx, "extents", WITNESS_RANK_EXTENTS, 282*b7eaed25SJason Evans malloc_mutex_rank_exclusive)) { 283*b7eaed25SJason Evans return true; 284*b7eaed25SJason Evans } 285*b7eaed25SJason Evans for (unsigned i = 0; i < NPSIZES+1; i++) { 286*b7eaed25SJason Evans extent_heap_new(&extents->heaps[i]); 287*b7eaed25SJason Evans } 288*b7eaed25SJason Evans bitmap_init(extents->bitmap, &extents_bitmap_info, true); 289*b7eaed25SJason Evans extent_list_init(&extents->lru); 290*b7eaed25SJason Evans atomic_store_zu(&extents->npages, 0, ATOMIC_RELAXED); 291*b7eaed25SJason Evans extents->state = state; 292*b7eaed25SJason Evans extents->delay_coalesce = delay_coalesce; 293*b7eaed25SJason Evans return false; 2947fa7f12fSJason Evans } 2957fa7f12fSJason Evans 296*b7eaed25SJason Evans extent_state_t 297*b7eaed25SJason Evans extents_state_get(const extents_t *extents) { 298*b7eaed25SJason Evans return extents->state; 2997fa7f12fSJason Evans } 300a4bd5210SJason Evans 301*b7eaed25SJason Evans size_t 302*b7eaed25SJason Evans extents_npages_get(extents_t *extents) { 303*b7eaed25SJason Evans return atomic_load_zu(&extents->npages, ATOMIC_RELAXED); 304a4bd5210SJason Evans } 305a4bd5210SJason Evans 306*b7eaed25SJason Evans static void 307*b7eaed25SJason Evans extents_insert_locked(tsdn_t *tsdn, extents_t *extents, extent_t *extent, 308*b7eaed25SJason Evans bool preserve_lru) { 309*b7eaed25SJason Evans malloc_mutex_assert_owner(tsdn, &extents->mtx); 310*b7eaed25SJason Evans assert(extent_state_get(extent) == extents->state); 3117fa7f12fSJason Evans 312*b7eaed25SJason Evans size_t size = extent_size_get(extent); 313*b7eaed25SJason Evans size_t psz = extent_size_quantize_floor(size); 314*b7eaed25SJason Evans pszind_t pind = sz_psz2ind(psz); 315*b7eaed25SJason Evans if (extent_heap_empty(&extents->heaps[pind])) { 316*b7eaed25SJason Evans bitmap_unset(extents->bitmap, &extents_bitmap_info, 317*b7eaed25SJason Evans (size_t)pind); 318*b7eaed25SJason Evans } 319*b7eaed25SJason Evans extent_heap_insert(&extents->heaps[pind], extent); 320*b7eaed25SJason Evans if (!preserve_lru) { 321*b7eaed25SJason Evans extent_list_append(&extents->lru, extent); 322*b7eaed25SJason Evans } 323*b7eaed25SJason Evans size_t npages = size >> LG_PAGE; 324*b7eaed25SJason Evans /* 325*b7eaed25SJason Evans * All modifications to npages hold the mutex (as asserted above), so we 326*b7eaed25SJason Evans * don't need an atomic fetch-add; we can get by with a load followed by 327*b7eaed25SJason Evans * a store. 328*b7eaed25SJason Evans */ 329*b7eaed25SJason Evans size_t cur_extents_npages = 330*b7eaed25SJason Evans atomic_load_zu(&extents->npages, ATOMIC_RELAXED); 331*b7eaed25SJason Evans atomic_store_zu(&extents->npages, cur_extents_npages + npages, 332*b7eaed25SJason Evans ATOMIC_RELAXED); 3337fa7f12fSJason Evans } 3347fa7f12fSJason Evans 335*b7eaed25SJason Evans static void 336*b7eaed25SJason Evans extents_remove_locked(tsdn_t *tsdn, extents_t *extents, extent_t *extent, 337*b7eaed25SJason Evans bool preserve_lru) { 338*b7eaed25SJason Evans malloc_mutex_assert_owner(tsdn, &extents->mtx); 339*b7eaed25SJason Evans assert(extent_state_get(extent) == extents->state); 3407fa7f12fSJason Evans 341*b7eaed25SJason Evans size_t size = extent_size_get(extent); 342*b7eaed25SJason Evans size_t psz = extent_size_quantize_floor(size); 343*b7eaed25SJason Evans pszind_t pind = sz_psz2ind(psz); 344*b7eaed25SJason Evans extent_heap_remove(&extents->heaps[pind], extent); 345*b7eaed25SJason Evans if (extent_heap_empty(&extents->heaps[pind])) { 346*b7eaed25SJason Evans bitmap_set(extents->bitmap, &extents_bitmap_info, 347*b7eaed25SJason Evans (size_t)pind); 348*b7eaed25SJason Evans } 349*b7eaed25SJason Evans if (!preserve_lru) { 350*b7eaed25SJason Evans extent_list_remove(&extents->lru, extent); 351*b7eaed25SJason Evans } 352*b7eaed25SJason Evans size_t npages = size >> LG_PAGE; 353*b7eaed25SJason Evans /* 354*b7eaed25SJason Evans * As in extents_insert_locked, we hold extents->mtx and so don't need 355*b7eaed25SJason Evans * atomic operations for updating extents->npages. 356*b7eaed25SJason Evans */ 357*b7eaed25SJason Evans size_t cur_extents_npages = 358*b7eaed25SJason Evans atomic_load_zu(&extents->npages, ATOMIC_RELAXED); 359*b7eaed25SJason Evans assert(cur_extents_npages >= npages); 360*b7eaed25SJason Evans atomic_store_zu(&extents->npages, 361*b7eaed25SJason Evans cur_extents_npages - (size >> LG_PAGE), ATOMIC_RELAXED); 362*b7eaed25SJason Evans } 363*b7eaed25SJason Evans 364*b7eaed25SJason Evans /* Do any-best-fit extent selection, i.e. select any extent that best fits. */ 365*b7eaed25SJason Evans static extent_t * 366*b7eaed25SJason Evans extents_best_fit_locked(tsdn_t *tsdn, arena_t *arena, extents_t *extents, 367*b7eaed25SJason Evans size_t size) { 368*b7eaed25SJason Evans pszind_t pind = sz_psz2ind(extent_size_quantize_ceil(size)); 369*b7eaed25SJason Evans pszind_t i = (pszind_t)bitmap_ffu(extents->bitmap, &extents_bitmap_info, 370*b7eaed25SJason Evans (size_t)pind); 371*b7eaed25SJason Evans if (i < NPSIZES+1) { 372*b7eaed25SJason Evans assert(!extent_heap_empty(&extents->heaps[i])); 373*b7eaed25SJason Evans extent_t *extent = extent_heap_any(&extents->heaps[i]); 374*b7eaed25SJason Evans assert(extent_size_get(extent) >= size); 375*b7eaed25SJason Evans return extent; 376*b7eaed25SJason Evans } 377*b7eaed25SJason Evans 378*b7eaed25SJason Evans return NULL; 379*b7eaed25SJason Evans } 380*b7eaed25SJason Evans 381*b7eaed25SJason Evans /* 382*b7eaed25SJason Evans * Do first-fit extent selection, i.e. select the oldest/lowest extent that is 383*b7eaed25SJason Evans * large enough. 384*b7eaed25SJason Evans */ 385*b7eaed25SJason Evans static extent_t * 386*b7eaed25SJason Evans extents_first_fit_locked(tsdn_t *tsdn, arena_t *arena, extents_t *extents, 387*b7eaed25SJason Evans size_t size) { 388*b7eaed25SJason Evans extent_t *ret = NULL; 389*b7eaed25SJason Evans 390*b7eaed25SJason Evans pszind_t pind = sz_psz2ind(extent_size_quantize_ceil(size)); 391*b7eaed25SJason Evans for (pszind_t i = (pszind_t)bitmap_ffu(extents->bitmap, 392*b7eaed25SJason Evans &extents_bitmap_info, (size_t)pind); i < NPSIZES+1; i = 393*b7eaed25SJason Evans (pszind_t)bitmap_ffu(extents->bitmap, &extents_bitmap_info, 394*b7eaed25SJason Evans (size_t)i+1)) { 395*b7eaed25SJason Evans assert(!extent_heap_empty(&extents->heaps[i])); 396*b7eaed25SJason Evans extent_t *extent = extent_heap_first(&extents->heaps[i]); 397*b7eaed25SJason Evans assert(extent_size_get(extent) >= size); 398*b7eaed25SJason Evans if (ret == NULL || extent_snad_comp(extent, ret) < 0) { 399*b7eaed25SJason Evans ret = extent; 400*b7eaed25SJason Evans } 401*b7eaed25SJason Evans if (i == NPSIZES) { 402*b7eaed25SJason Evans break; 403*b7eaed25SJason Evans } 404*b7eaed25SJason Evans assert(i < NPSIZES); 405*b7eaed25SJason Evans } 406*b7eaed25SJason Evans 407*b7eaed25SJason Evans return ret; 408*b7eaed25SJason Evans } 409*b7eaed25SJason Evans 410*b7eaed25SJason Evans /* 411*b7eaed25SJason Evans * Do {best,first}-fit extent selection, where the selection policy choice is 412*b7eaed25SJason Evans * based on extents->delay_coalesce. Best-fit selection requires less 413*b7eaed25SJason Evans * searching, but its layout policy is less stable and may cause higher virtual 414*b7eaed25SJason Evans * memory fragmentation as a side effect. 415*b7eaed25SJason Evans */ 416*b7eaed25SJason Evans static extent_t * 417*b7eaed25SJason Evans extents_fit_locked(tsdn_t *tsdn, arena_t *arena, extents_t *extents, 418*b7eaed25SJason Evans size_t size) { 419*b7eaed25SJason Evans malloc_mutex_assert_owner(tsdn, &extents->mtx); 420*b7eaed25SJason Evans 421*b7eaed25SJason Evans return extents->delay_coalesce ? extents_best_fit_locked(tsdn, arena, 422*b7eaed25SJason Evans extents, size) : extents_first_fit_locked(tsdn, arena, extents, 423*b7eaed25SJason Evans size); 424*b7eaed25SJason Evans } 425*b7eaed25SJason Evans 426*b7eaed25SJason Evans static bool 427*b7eaed25SJason Evans extent_try_delayed_coalesce(tsdn_t *tsdn, arena_t *arena, 428*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, rtree_ctx_t *rtree_ctx, extents_t *extents, 429*b7eaed25SJason Evans extent_t *extent) { 430*b7eaed25SJason Evans extent_state_set(extent, extent_state_active); 431*b7eaed25SJason Evans bool coalesced; 432*b7eaed25SJason Evans extent = extent_try_coalesce(tsdn, arena, r_extent_hooks, rtree_ctx, 433*b7eaed25SJason Evans extents, extent, &coalesced, false); 434*b7eaed25SJason Evans extent_state_set(extent, extents_state_get(extents)); 435*b7eaed25SJason Evans 436*b7eaed25SJason Evans if (!coalesced) { 437*b7eaed25SJason Evans return true; 438*b7eaed25SJason Evans } 439*b7eaed25SJason Evans extents_insert_locked(tsdn, extents, extent, true); 440*b7eaed25SJason Evans return false; 441*b7eaed25SJason Evans } 442*b7eaed25SJason Evans 443*b7eaed25SJason Evans extent_t * 444*b7eaed25SJason Evans extents_alloc(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks, 445*b7eaed25SJason Evans extents_t *extents, void *new_addr, size_t size, size_t pad, 446*b7eaed25SJason Evans size_t alignment, bool slab, szind_t szind, bool *zero, bool *commit) { 447*b7eaed25SJason Evans assert(size + pad != 0); 448*b7eaed25SJason Evans assert(alignment != 0); 449*b7eaed25SJason Evans witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn), 450*b7eaed25SJason Evans WITNESS_RANK_CORE, 0); 451*b7eaed25SJason Evans 452*b7eaed25SJason Evans return extent_recycle(tsdn, arena, r_extent_hooks, extents, new_addr, 453*b7eaed25SJason Evans size, pad, alignment, slab, szind, zero, commit, false); 454*b7eaed25SJason Evans } 455*b7eaed25SJason Evans 456*b7eaed25SJason Evans void 457*b7eaed25SJason Evans extents_dalloc(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks, 458*b7eaed25SJason Evans extents_t *extents, extent_t *extent) { 459*b7eaed25SJason Evans assert(extent_base_get(extent) != NULL); 460*b7eaed25SJason Evans assert(extent_size_get(extent) != 0); 461*b7eaed25SJason Evans witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn), 462*b7eaed25SJason Evans WITNESS_RANK_CORE, 0); 463*b7eaed25SJason Evans 464*b7eaed25SJason Evans extent_addr_set(extent, extent_base_get(extent)); 465*b7eaed25SJason Evans extent_zeroed_set(extent, false); 466*b7eaed25SJason Evans 467*b7eaed25SJason Evans extent_record(tsdn, arena, r_extent_hooks, extents, extent, false); 468*b7eaed25SJason Evans } 469*b7eaed25SJason Evans 470*b7eaed25SJason Evans extent_t * 471*b7eaed25SJason Evans extents_evict(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks, 472*b7eaed25SJason Evans extents_t *extents, size_t npages_min) { 473*b7eaed25SJason Evans rtree_ctx_t rtree_ctx_fallback; 474*b7eaed25SJason Evans rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback); 475*b7eaed25SJason Evans 476*b7eaed25SJason Evans malloc_mutex_lock(tsdn, &extents->mtx); 477*b7eaed25SJason Evans 478*b7eaed25SJason Evans /* 479*b7eaed25SJason Evans * Get the LRU coalesced extent, if any. If coalescing was delayed, 480*b7eaed25SJason Evans * the loop will iterate until the LRU extent is fully coalesced. 481*b7eaed25SJason Evans */ 482*b7eaed25SJason Evans extent_t *extent; 483*b7eaed25SJason Evans while (true) { 484*b7eaed25SJason Evans /* Get the LRU extent, if any. */ 485*b7eaed25SJason Evans extent = extent_list_first(&extents->lru); 486*b7eaed25SJason Evans if (extent == NULL) { 487*b7eaed25SJason Evans goto label_return; 488*b7eaed25SJason Evans } 489*b7eaed25SJason Evans /* Check the eviction limit. */ 490*b7eaed25SJason Evans size_t npages = extent_size_get(extent) >> LG_PAGE; 491*b7eaed25SJason Evans size_t extents_npages = atomic_load_zu(&extents->npages, 492*b7eaed25SJason Evans ATOMIC_RELAXED); 493*b7eaed25SJason Evans if (extents_npages - npages < npages_min) { 494*b7eaed25SJason Evans extent = NULL; 495*b7eaed25SJason Evans goto label_return; 496*b7eaed25SJason Evans } 497*b7eaed25SJason Evans extents_remove_locked(tsdn, extents, extent, false); 498*b7eaed25SJason Evans if (!extents->delay_coalesce) { 499*b7eaed25SJason Evans break; 500*b7eaed25SJason Evans } 501*b7eaed25SJason Evans /* Try to coalesce. */ 502*b7eaed25SJason Evans if (extent_try_delayed_coalesce(tsdn, arena, r_extent_hooks, 503*b7eaed25SJason Evans rtree_ctx, extents, extent)) { 504*b7eaed25SJason Evans break; 505*b7eaed25SJason Evans } 506*b7eaed25SJason Evans /* 507*b7eaed25SJason Evans * The LRU extent was just coalesced and the result placed in 508*b7eaed25SJason Evans * the LRU at its neighbor's position. Start over. 509*b7eaed25SJason Evans */ 510*b7eaed25SJason Evans } 511*b7eaed25SJason Evans 512*b7eaed25SJason Evans /* 513*b7eaed25SJason Evans * Either mark the extent active or deregister it to protect against 514*b7eaed25SJason Evans * concurrent operations. 515*b7eaed25SJason Evans */ 516*b7eaed25SJason Evans switch (extents_state_get(extents)) { 517*b7eaed25SJason Evans case extent_state_active: 518*b7eaed25SJason Evans not_reached(); 519*b7eaed25SJason Evans case extent_state_dirty: 520*b7eaed25SJason Evans case extent_state_muzzy: 521*b7eaed25SJason Evans extent_state_set(extent, extent_state_active); 522*b7eaed25SJason Evans break; 523*b7eaed25SJason Evans case extent_state_retained: 524*b7eaed25SJason Evans extent_deregister(tsdn, extent); 525*b7eaed25SJason Evans break; 526*b7eaed25SJason Evans default: 527*b7eaed25SJason Evans not_reached(); 528*b7eaed25SJason Evans } 529*b7eaed25SJason Evans 530*b7eaed25SJason Evans label_return: 531*b7eaed25SJason Evans malloc_mutex_unlock(tsdn, &extents->mtx); 532*b7eaed25SJason Evans return extent; 533*b7eaed25SJason Evans } 534*b7eaed25SJason Evans 535*b7eaed25SJason Evans static void 536*b7eaed25SJason Evans extents_leak(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks, 537*b7eaed25SJason Evans extents_t *extents, extent_t *extent, bool growing_retained) { 538*b7eaed25SJason Evans /* 539*b7eaed25SJason Evans * Leak extent after making sure its pages have already been purged, so 540*b7eaed25SJason Evans * that this is only a virtual memory leak. 541*b7eaed25SJason Evans */ 542*b7eaed25SJason Evans if (extents_state_get(extents) == extent_state_dirty) { 543*b7eaed25SJason Evans if (extent_purge_lazy_impl(tsdn, arena, r_extent_hooks, 544*b7eaed25SJason Evans extent, 0, extent_size_get(extent), growing_retained)) { 545*b7eaed25SJason Evans extent_purge_forced_impl(tsdn, arena, r_extent_hooks, 546*b7eaed25SJason Evans extent, 0, extent_size_get(extent), 547*b7eaed25SJason Evans growing_retained); 548*b7eaed25SJason Evans } 549*b7eaed25SJason Evans } 550*b7eaed25SJason Evans extent_dalloc(tsdn, arena, extent); 551*b7eaed25SJason Evans } 552*b7eaed25SJason Evans 553*b7eaed25SJason Evans void 554*b7eaed25SJason Evans extents_prefork(tsdn_t *tsdn, extents_t *extents) { 555*b7eaed25SJason Evans malloc_mutex_prefork(tsdn, &extents->mtx); 556*b7eaed25SJason Evans } 557*b7eaed25SJason Evans 558*b7eaed25SJason Evans void 559*b7eaed25SJason Evans extents_postfork_parent(tsdn_t *tsdn, extents_t *extents) { 560*b7eaed25SJason Evans malloc_mutex_postfork_parent(tsdn, &extents->mtx); 561*b7eaed25SJason Evans } 562*b7eaed25SJason Evans 563*b7eaed25SJason Evans void 564*b7eaed25SJason Evans extents_postfork_child(tsdn_t *tsdn, extents_t *extents) { 565*b7eaed25SJason Evans malloc_mutex_postfork_child(tsdn, &extents->mtx); 566*b7eaed25SJason Evans } 567*b7eaed25SJason Evans 568*b7eaed25SJason Evans static void 569*b7eaed25SJason Evans extent_deactivate_locked(tsdn_t *tsdn, arena_t *arena, extents_t *extents, 570*b7eaed25SJason Evans extent_t *extent, bool preserve_lru) { 571*b7eaed25SJason Evans assert(extent_arena_get(extent) == arena); 572*b7eaed25SJason Evans assert(extent_state_get(extent) == extent_state_active); 573*b7eaed25SJason Evans 574*b7eaed25SJason Evans extent_state_set(extent, extents_state_get(extents)); 575*b7eaed25SJason Evans extents_insert_locked(tsdn, extents, extent, preserve_lru); 576*b7eaed25SJason Evans } 577*b7eaed25SJason Evans 578*b7eaed25SJason Evans static void 579*b7eaed25SJason Evans extent_deactivate(tsdn_t *tsdn, arena_t *arena, extents_t *extents, 580*b7eaed25SJason Evans extent_t *extent, bool preserve_lru) { 581*b7eaed25SJason Evans malloc_mutex_lock(tsdn, &extents->mtx); 582*b7eaed25SJason Evans extent_deactivate_locked(tsdn, arena, extents, extent, preserve_lru); 583*b7eaed25SJason Evans malloc_mutex_unlock(tsdn, &extents->mtx); 584*b7eaed25SJason Evans } 585*b7eaed25SJason Evans 586*b7eaed25SJason Evans static void 587*b7eaed25SJason Evans extent_activate_locked(tsdn_t *tsdn, arena_t *arena, extents_t *extents, 588*b7eaed25SJason Evans extent_t *extent, bool preserve_lru) { 589*b7eaed25SJason Evans assert(extent_arena_get(extent) == arena); 590*b7eaed25SJason Evans assert(extent_state_get(extent) == extents_state_get(extents)); 591*b7eaed25SJason Evans 592*b7eaed25SJason Evans extents_remove_locked(tsdn, extents, extent, preserve_lru); 593*b7eaed25SJason Evans extent_state_set(extent, extent_state_active); 594*b7eaed25SJason Evans } 595*b7eaed25SJason Evans 596*b7eaed25SJason Evans static bool 597*b7eaed25SJason Evans extent_rtree_leaf_elms_lookup(tsdn_t *tsdn, rtree_ctx_t *rtree_ctx, 598*b7eaed25SJason Evans const extent_t *extent, bool dependent, bool init_missing, 599*b7eaed25SJason Evans rtree_leaf_elm_t **r_elm_a, rtree_leaf_elm_t **r_elm_b) { 600*b7eaed25SJason Evans *r_elm_a = rtree_leaf_elm_lookup(tsdn, &extents_rtree, rtree_ctx, 601*b7eaed25SJason Evans (uintptr_t)extent_base_get(extent), dependent, init_missing); 602*b7eaed25SJason Evans if (!dependent && *r_elm_a == NULL) { 603*b7eaed25SJason Evans return true; 604*b7eaed25SJason Evans } 605*b7eaed25SJason Evans assert(*r_elm_a != NULL); 606*b7eaed25SJason Evans 607*b7eaed25SJason Evans *r_elm_b = rtree_leaf_elm_lookup(tsdn, &extents_rtree, rtree_ctx, 608*b7eaed25SJason Evans (uintptr_t)extent_last_get(extent), dependent, init_missing); 609*b7eaed25SJason Evans if (!dependent && *r_elm_b == NULL) { 610*b7eaed25SJason Evans return true; 611*b7eaed25SJason Evans } 612*b7eaed25SJason Evans assert(*r_elm_b != NULL); 613*b7eaed25SJason Evans 614*b7eaed25SJason Evans return false; 615*b7eaed25SJason Evans } 616*b7eaed25SJason Evans 617*b7eaed25SJason Evans static void 618*b7eaed25SJason Evans extent_rtree_write_acquired(tsdn_t *tsdn, rtree_leaf_elm_t *elm_a, 619*b7eaed25SJason Evans rtree_leaf_elm_t *elm_b, extent_t *extent, szind_t szind, bool slab) { 620*b7eaed25SJason Evans rtree_leaf_elm_write(tsdn, &extents_rtree, elm_a, extent, szind, slab); 621*b7eaed25SJason Evans if (elm_b != NULL) { 622*b7eaed25SJason Evans rtree_leaf_elm_write(tsdn, &extents_rtree, elm_b, extent, szind, 623*b7eaed25SJason Evans slab); 624*b7eaed25SJason Evans } 625*b7eaed25SJason Evans } 626*b7eaed25SJason Evans 627*b7eaed25SJason Evans static void 628*b7eaed25SJason Evans extent_interior_register(tsdn_t *tsdn, rtree_ctx_t *rtree_ctx, extent_t *extent, 629*b7eaed25SJason Evans szind_t szind) { 630*b7eaed25SJason Evans assert(extent_slab_get(extent)); 631*b7eaed25SJason Evans 632*b7eaed25SJason Evans /* Register interior. */ 633*b7eaed25SJason Evans for (size_t i = 1; i < (extent_size_get(extent) >> LG_PAGE) - 1; i++) { 634*b7eaed25SJason Evans rtree_write(tsdn, &extents_rtree, rtree_ctx, 635*b7eaed25SJason Evans (uintptr_t)extent_base_get(extent) + (uintptr_t)(i << 636*b7eaed25SJason Evans LG_PAGE), extent, szind, true); 637*b7eaed25SJason Evans } 638*b7eaed25SJason Evans } 639*b7eaed25SJason Evans 640*b7eaed25SJason Evans static void 641*b7eaed25SJason Evans extent_gdump_add(tsdn_t *tsdn, const extent_t *extent) { 642*b7eaed25SJason Evans cassert(config_prof); 643*b7eaed25SJason Evans /* prof_gdump() requirement. */ 644*b7eaed25SJason Evans witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn), 645*b7eaed25SJason Evans WITNESS_RANK_CORE, 0); 646*b7eaed25SJason Evans 647*b7eaed25SJason Evans if (opt_prof && extent_state_get(extent) == extent_state_active) { 648*b7eaed25SJason Evans size_t nadd = extent_size_get(extent) >> LG_PAGE; 649*b7eaed25SJason Evans size_t cur = atomic_fetch_add_zu(&curpages, nadd, 650*b7eaed25SJason Evans ATOMIC_RELAXED) + nadd; 651*b7eaed25SJason Evans size_t high = atomic_load_zu(&highpages, ATOMIC_RELAXED); 652*b7eaed25SJason Evans while (cur > high && !atomic_compare_exchange_weak_zu( 653*b7eaed25SJason Evans &highpages, &high, cur, ATOMIC_RELAXED, ATOMIC_RELAXED)) { 654*b7eaed25SJason Evans /* 655*b7eaed25SJason Evans * Don't refresh cur, because it may have decreased 656*b7eaed25SJason Evans * since this thread lost the highpages update race. 657*b7eaed25SJason Evans * Note that high is updated in case of CAS failure. 658*b7eaed25SJason Evans */ 659*b7eaed25SJason Evans } 660*b7eaed25SJason Evans if (cur > high && prof_gdump_get_unlocked()) { 661*b7eaed25SJason Evans prof_gdump(tsdn); 662*b7eaed25SJason Evans } 663*b7eaed25SJason Evans } 664*b7eaed25SJason Evans } 665*b7eaed25SJason Evans 666*b7eaed25SJason Evans static void 667*b7eaed25SJason Evans extent_gdump_sub(tsdn_t *tsdn, const extent_t *extent) { 668*b7eaed25SJason Evans cassert(config_prof); 669*b7eaed25SJason Evans 670*b7eaed25SJason Evans if (opt_prof && extent_state_get(extent) == extent_state_active) { 671*b7eaed25SJason Evans size_t nsub = extent_size_get(extent) >> LG_PAGE; 672*b7eaed25SJason Evans assert(atomic_load_zu(&curpages, ATOMIC_RELAXED) >= nsub); 673*b7eaed25SJason Evans atomic_fetch_sub_zu(&curpages, nsub, ATOMIC_RELAXED); 674*b7eaed25SJason Evans } 675*b7eaed25SJason Evans } 676*b7eaed25SJason Evans 677*b7eaed25SJason Evans static bool 678*b7eaed25SJason Evans extent_register_impl(tsdn_t *tsdn, extent_t *extent, bool gdump_add) { 679*b7eaed25SJason Evans rtree_ctx_t rtree_ctx_fallback; 680*b7eaed25SJason Evans rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback); 681*b7eaed25SJason Evans rtree_leaf_elm_t *elm_a, *elm_b; 682*b7eaed25SJason Evans 683*b7eaed25SJason Evans /* 684*b7eaed25SJason Evans * We need to hold the lock to protect against a concurrent coalesce 685*b7eaed25SJason Evans * operation that sees us in a partial state. 686*b7eaed25SJason Evans */ 687*b7eaed25SJason Evans extent_lock(tsdn, extent); 688*b7eaed25SJason Evans 689*b7eaed25SJason Evans if (extent_rtree_leaf_elms_lookup(tsdn, rtree_ctx, extent, false, true, 690*b7eaed25SJason Evans &elm_a, &elm_b)) { 691*b7eaed25SJason Evans return true; 692*b7eaed25SJason Evans } 693*b7eaed25SJason Evans 694*b7eaed25SJason Evans szind_t szind = extent_szind_get_maybe_invalid(extent); 695*b7eaed25SJason Evans bool slab = extent_slab_get(extent); 696*b7eaed25SJason Evans extent_rtree_write_acquired(tsdn, elm_a, elm_b, extent, szind, slab); 697*b7eaed25SJason Evans if (slab) { 698*b7eaed25SJason Evans extent_interior_register(tsdn, rtree_ctx, extent, szind); 699*b7eaed25SJason Evans } 700*b7eaed25SJason Evans 701*b7eaed25SJason Evans extent_unlock(tsdn, extent); 702*b7eaed25SJason Evans 703*b7eaed25SJason Evans if (config_prof && gdump_add) { 704*b7eaed25SJason Evans extent_gdump_add(tsdn, extent); 705*b7eaed25SJason Evans } 706*b7eaed25SJason Evans 707*b7eaed25SJason Evans return false; 708*b7eaed25SJason Evans } 709*b7eaed25SJason Evans 710*b7eaed25SJason Evans static bool 711*b7eaed25SJason Evans extent_register(tsdn_t *tsdn, extent_t *extent) { 712*b7eaed25SJason Evans return extent_register_impl(tsdn, extent, true); 713*b7eaed25SJason Evans } 714*b7eaed25SJason Evans 715*b7eaed25SJason Evans static bool 716*b7eaed25SJason Evans extent_register_no_gdump_add(tsdn_t *tsdn, extent_t *extent) { 717*b7eaed25SJason Evans return extent_register_impl(tsdn, extent, false); 718*b7eaed25SJason Evans } 719*b7eaed25SJason Evans 720*b7eaed25SJason Evans static void 721*b7eaed25SJason Evans extent_reregister(tsdn_t *tsdn, extent_t *extent) { 722*b7eaed25SJason Evans bool err = extent_register(tsdn, extent); 723*b7eaed25SJason Evans assert(!err); 724*b7eaed25SJason Evans } 725*b7eaed25SJason Evans 726*b7eaed25SJason Evans static void 727*b7eaed25SJason Evans extent_interior_deregister(tsdn_t *tsdn, rtree_ctx_t *rtree_ctx, 728*b7eaed25SJason Evans extent_t *extent) { 729*b7eaed25SJason Evans size_t i; 730*b7eaed25SJason Evans 731*b7eaed25SJason Evans assert(extent_slab_get(extent)); 732*b7eaed25SJason Evans 733*b7eaed25SJason Evans for (i = 1; i < (extent_size_get(extent) >> LG_PAGE) - 1; i++) { 734*b7eaed25SJason Evans rtree_clear(tsdn, &extents_rtree, rtree_ctx, 735*b7eaed25SJason Evans (uintptr_t)extent_base_get(extent) + (uintptr_t)(i << 736*b7eaed25SJason Evans LG_PAGE)); 737*b7eaed25SJason Evans } 738*b7eaed25SJason Evans } 739*b7eaed25SJason Evans 740*b7eaed25SJason Evans static void 741*b7eaed25SJason Evans extent_deregister(tsdn_t *tsdn, extent_t *extent) { 742*b7eaed25SJason Evans rtree_ctx_t rtree_ctx_fallback; 743*b7eaed25SJason Evans rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback); 744*b7eaed25SJason Evans rtree_leaf_elm_t *elm_a, *elm_b; 745*b7eaed25SJason Evans extent_rtree_leaf_elms_lookup(tsdn, rtree_ctx, extent, true, false, 746*b7eaed25SJason Evans &elm_a, &elm_b); 747*b7eaed25SJason Evans 748*b7eaed25SJason Evans extent_lock(tsdn, extent); 749*b7eaed25SJason Evans 750*b7eaed25SJason Evans extent_rtree_write_acquired(tsdn, elm_a, elm_b, NULL, NSIZES, false); 751*b7eaed25SJason Evans if (extent_slab_get(extent)) { 752*b7eaed25SJason Evans extent_interior_deregister(tsdn, rtree_ctx, extent); 753*b7eaed25SJason Evans extent_slab_set(extent, false); 754*b7eaed25SJason Evans } 755*b7eaed25SJason Evans 756*b7eaed25SJason Evans extent_unlock(tsdn, extent); 757*b7eaed25SJason Evans 758*b7eaed25SJason Evans if (config_prof) { 759*b7eaed25SJason Evans extent_gdump_sub(tsdn, extent); 760*b7eaed25SJason Evans } 761*b7eaed25SJason Evans } 762*b7eaed25SJason Evans 763*b7eaed25SJason Evans static extent_t * 764*b7eaed25SJason Evans extent_recycle_extract(tsdn_t *tsdn, arena_t *arena, 765*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, rtree_ctx_t *rtree_ctx, extents_t *extents, 766*b7eaed25SJason Evans void *new_addr, size_t size, size_t pad, size_t alignment, bool slab, 767*b7eaed25SJason Evans bool *zero, bool *commit, bool growing_retained) { 768*b7eaed25SJason Evans witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn), 769*b7eaed25SJason Evans WITNESS_RANK_CORE, growing_retained ? 1 : 0); 770*b7eaed25SJason Evans assert(alignment > 0); 771*b7eaed25SJason Evans if (config_debug && new_addr != NULL) { 772*b7eaed25SJason Evans /* 773*b7eaed25SJason Evans * Non-NULL new_addr has two use cases: 774*b7eaed25SJason Evans * 775*b7eaed25SJason Evans * 1) Recycle a known-extant extent, e.g. during purging. 776*b7eaed25SJason Evans * 2) Perform in-place expanding reallocation. 777*b7eaed25SJason Evans * 778*b7eaed25SJason Evans * Regardless of use case, new_addr must either refer to a 779*b7eaed25SJason Evans * non-existing extent, or to the base of an extant extent, 780*b7eaed25SJason Evans * since only active slabs support interior lookups (which of 781*b7eaed25SJason Evans * course cannot be recycled). 782*b7eaed25SJason Evans */ 783*b7eaed25SJason Evans assert(PAGE_ADDR2BASE(new_addr) == new_addr); 784*b7eaed25SJason Evans assert(pad == 0); 785*b7eaed25SJason Evans assert(alignment <= PAGE); 786*b7eaed25SJason Evans } 787*b7eaed25SJason Evans 788*b7eaed25SJason Evans size_t esize = size + pad; 789*b7eaed25SJason Evans size_t alloc_size = esize + PAGE_CEILING(alignment) - PAGE; 790*b7eaed25SJason Evans /* Beware size_t wrap-around. */ 791*b7eaed25SJason Evans if (alloc_size < esize) { 792*b7eaed25SJason Evans return NULL; 793*b7eaed25SJason Evans } 794*b7eaed25SJason Evans malloc_mutex_lock(tsdn, &extents->mtx); 795*b7eaed25SJason Evans extent_hooks_assure_initialized(arena, r_extent_hooks); 796*b7eaed25SJason Evans extent_t *extent; 797*b7eaed25SJason Evans if (new_addr != NULL) { 798*b7eaed25SJason Evans extent = extent_lock_from_addr(tsdn, rtree_ctx, new_addr); 799*b7eaed25SJason Evans if (extent != NULL) { 800*b7eaed25SJason Evans /* 801*b7eaed25SJason Evans * We might null-out extent to report an error, but we 802*b7eaed25SJason Evans * still need to unlock the associated mutex after. 803*b7eaed25SJason Evans */ 804*b7eaed25SJason Evans extent_t *unlock_extent = extent; 805*b7eaed25SJason Evans assert(extent_base_get(extent) == new_addr); 806*b7eaed25SJason Evans if (extent_arena_get(extent) != arena || 807*b7eaed25SJason Evans extent_size_get(extent) < esize || 808*b7eaed25SJason Evans extent_state_get(extent) != 809*b7eaed25SJason Evans extents_state_get(extents)) { 810*b7eaed25SJason Evans extent = NULL; 811*b7eaed25SJason Evans } 812*b7eaed25SJason Evans extent_unlock(tsdn, unlock_extent); 813*b7eaed25SJason Evans } 814*b7eaed25SJason Evans } else { 815*b7eaed25SJason Evans extent = extents_fit_locked(tsdn, arena, extents, alloc_size); 816*b7eaed25SJason Evans } 817*b7eaed25SJason Evans if (extent == NULL) { 818*b7eaed25SJason Evans malloc_mutex_unlock(tsdn, &extents->mtx); 819*b7eaed25SJason Evans return NULL; 820*b7eaed25SJason Evans } 821*b7eaed25SJason Evans 822*b7eaed25SJason Evans extent_activate_locked(tsdn, arena, extents, extent, false); 823*b7eaed25SJason Evans malloc_mutex_unlock(tsdn, &extents->mtx); 824*b7eaed25SJason Evans 825*b7eaed25SJason Evans if (extent_zeroed_get(extent)) { 826*b7eaed25SJason Evans *zero = true; 827*b7eaed25SJason Evans } 828*b7eaed25SJason Evans if (extent_committed_get(extent)) { 829*b7eaed25SJason Evans *commit = true; 830*b7eaed25SJason Evans } 831*b7eaed25SJason Evans 832*b7eaed25SJason Evans return extent; 833*b7eaed25SJason Evans } 834*b7eaed25SJason Evans 835*b7eaed25SJason Evans static extent_t * 836*b7eaed25SJason Evans extent_recycle_split(tsdn_t *tsdn, arena_t *arena, 837*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, rtree_ctx_t *rtree_ctx, extents_t *extents, 838*b7eaed25SJason Evans void *new_addr, size_t size, size_t pad, size_t alignment, bool slab, 839*b7eaed25SJason Evans szind_t szind, extent_t *extent, bool growing_retained) { 840*b7eaed25SJason Evans size_t esize = size + pad; 841*b7eaed25SJason Evans size_t leadsize = ALIGNMENT_CEILING((uintptr_t)extent_base_get(extent), 842*b7eaed25SJason Evans PAGE_CEILING(alignment)) - (uintptr_t)extent_base_get(extent); 843*b7eaed25SJason Evans assert(new_addr == NULL || leadsize == 0); 844*b7eaed25SJason Evans assert(extent_size_get(extent) >= leadsize + esize); 845*b7eaed25SJason Evans size_t trailsize = extent_size_get(extent) - leadsize - esize; 846*b7eaed25SJason Evans 847*b7eaed25SJason Evans /* Split the lead. */ 848*b7eaed25SJason Evans if (leadsize != 0) { 849*b7eaed25SJason Evans extent_t *lead = extent; 850*b7eaed25SJason Evans extent = extent_split_impl(tsdn, arena, r_extent_hooks, 851*b7eaed25SJason Evans lead, leadsize, NSIZES, false, esize + trailsize, szind, 852*b7eaed25SJason Evans slab, growing_retained); 853*b7eaed25SJason Evans if (extent == NULL) { 854*b7eaed25SJason Evans extent_deregister(tsdn, lead); 855*b7eaed25SJason Evans extents_leak(tsdn, arena, r_extent_hooks, extents, 856*b7eaed25SJason Evans lead, growing_retained); 857*b7eaed25SJason Evans return NULL; 858*b7eaed25SJason Evans } 859*b7eaed25SJason Evans extent_deactivate(tsdn, arena, extents, lead, false); 860*b7eaed25SJason Evans } 861*b7eaed25SJason Evans 862*b7eaed25SJason Evans /* Split the trail. */ 863*b7eaed25SJason Evans if (trailsize != 0) { 864*b7eaed25SJason Evans extent_t *trail = extent_split_impl(tsdn, arena, 865*b7eaed25SJason Evans r_extent_hooks, extent, esize, szind, slab, trailsize, 866*b7eaed25SJason Evans NSIZES, false, growing_retained); 867*b7eaed25SJason Evans if (trail == NULL) { 868*b7eaed25SJason Evans extent_deregister(tsdn, extent); 869*b7eaed25SJason Evans extents_leak(tsdn, arena, r_extent_hooks, extents, 870*b7eaed25SJason Evans extent, growing_retained); 871*b7eaed25SJason Evans return NULL; 872*b7eaed25SJason Evans } 873*b7eaed25SJason Evans extent_deactivate(tsdn, arena, extents, trail, false); 874*b7eaed25SJason Evans } else if (leadsize == 0) { 875*b7eaed25SJason Evans /* 876*b7eaed25SJason Evans * Splitting causes szind to be set as a side effect, but no 877*b7eaed25SJason Evans * splitting occurred. 878*b7eaed25SJason Evans */ 879*b7eaed25SJason Evans extent_szind_set(extent, szind); 880*b7eaed25SJason Evans if (szind != NSIZES) { 881*b7eaed25SJason Evans rtree_szind_slab_update(tsdn, &extents_rtree, rtree_ctx, 882*b7eaed25SJason Evans (uintptr_t)extent_addr_get(extent), szind, slab); 883*b7eaed25SJason Evans if (slab && extent_size_get(extent) > PAGE) { 884*b7eaed25SJason Evans rtree_szind_slab_update(tsdn, &extents_rtree, 885*b7eaed25SJason Evans rtree_ctx, 886*b7eaed25SJason Evans (uintptr_t)extent_past_get(extent) - 887*b7eaed25SJason Evans (uintptr_t)PAGE, szind, slab); 888*b7eaed25SJason Evans } 889*b7eaed25SJason Evans } 890*b7eaed25SJason Evans } 891*b7eaed25SJason Evans 892*b7eaed25SJason Evans return extent; 893*b7eaed25SJason Evans } 894*b7eaed25SJason Evans 895*b7eaed25SJason Evans static extent_t * 896*b7eaed25SJason Evans extent_recycle(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks, 897*b7eaed25SJason Evans extents_t *extents, void *new_addr, size_t size, size_t pad, 898*b7eaed25SJason Evans size_t alignment, bool slab, szind_t szind, bool *zero, bool *commit, 899*b7eaed25SJason Evans bool growing_retained) { 900*b7eaed25SJason Evans witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn), 901*b7eaed25SJason Evans WITNESS_RANK_CORE, growing_retained ? 1 : 0); 902*b7eaed25SJason Evans assert(new_addr == NULL || !slab); 903*b7eaed25SJason Evans assert(pad == 0 || !slab); 904*b7eaed25SJason Evans assert(!*zero || !slab); 905*b7eaed25SJason Evans 906*b7eaed25SJason Evans rtree_ctx_t rtree_ctx_fallback; 907*b7eaed25SJason Evans rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback); 908*b7eaed25SJason Evans 909*b7eaed25SJason Evans bool committed = false; 910*b7eaed25SJason Evans extent_t *extent = extent_recycle_extract(tsdn, arena, r_extent_hooks, 911*b7eaed25SJason Evans rtree_ctx, extents, new_addr, size, pad, alignment, slab, zero, 912*b7eaed25SJason Evans &committed, growing_retained); 913*b7eaed25SJason Evans if (extent == NULL) { 914*b7eaed25SJason Evans return NULL; 915*b7eaed25SJason Evans } 916*b7eaed25SJason Evans if (committed) { 917*b7eaed25SJason Evans *commit = true; 918*b7eaed25SJason Evans } 919*b7eaed25SJason Evans 920*b7eaed25SJason Evans extent = extent_recycle_split(tsdn, arena, r_extent_hooks, rtree_ctx, 921*b7eaed25SJason Evans extents, new_addr, size, pad, alignment, slab, szind, extent, 922*b7eaed25SJason Evans growing_retained); 923*b7eaed25SJason Evans if (extent == NULL) { 924*b7eaed25SJason Evans return NULL; 925*b7eaed25SJason Evans } 926*b7eaed25SJason Evans 927*b7eaed25SJason Evans if (*commit && !extent_committed_get(extent)) { 928*b7eaed25SJason Evans if (extent_commit_impl(tsdn, arena, r_extent_hooks, extent, 929*b7eaed25SJason Evans 0, extent_size_get(extent), growing_retained)) { 930*b7eaed25SJason Evans extent_record(tsdn, arena, r_extent_hooks, extents, 931*b7eaed25SJason Evans extent, growing_retained); 932*b7eaed25SJason Evans return NULL; 933*b7eaed25SJason Evans } 934*b7eaed25SJason Evans extent_zeroed_set(extent, true); 935*b7eaed25SJason Evans } 936*b7eaed25SJason Evans 937*b7eaed25SJason Evans if (pad != 0) { 938*b7eaed25SJason Evans extent_addr_randomize(tsdn, extent, alignment); 939*b7eaed25SJason Evans } 940*b7eaed25SJason Evans assert(extent_state_get(extent) == extent_state_active); 941*b7eaed25SJason Evans if (slab) { 942*b7eaed25SJason Evans extent_slab_set(extent, slab); 943*b7eaed25SJason Evans extent_interior_register(tsdn, rtree_ctx, extent, szind); 944*b7eaed25SJason Evans } 945*b7eaed25SJason Evans 946*b7eaed25SJason Evans if (*zero) { 947*b7eaed25SJason Evans void *addr = extent_base_get(extent); 948*b7eaed25SJason Evans size_t size = extent_size_get(extent); 949*b7eaed25SJason Evans if (!extent_zeroed_get(extent)) { 950*b7eaed25SJason Evans if (pages_purge_forced(addr, size)) { 951*b7eaed25SJason Evans memset(addr, 0, size); 952*b7eaed25SJason Evans } 953*b7eaed25SJason Evans } else if (config_debug) { 954*b7eaed25SJason Evans size_t *p = (size_t *)(uintptr_t)addr; 955*b7eaed25SJason Evans for (size_t i = 0; i < size / sizeof(size_t); i++) { 956*b7eaed25SJason Evans assert(p[i] == 0); 957*b7eaed25SJason Evans } 958*b7eaed25SJason Evans } 959*b7eaed25SJason Evans } 960*b7eaed25SJason Evans return extent; 961*b7eaed25SJason Evans } 962*b7eaed25SJason Evans 963*b7eaed25SJason Evans /* 964*b7eaed25SJason Evans * If the caller specifies (!*zero), it is still possible to receive zeroed 965*b7eaed25SJason Evans * memory, in which case *zero is toggled to true. arena_extent_alloc() takes 966*b7eaed25SJason Evans * advantage of this to avoid demanding zeroed extents, but taking advantage of 967*b7eaed25SJason Evans * them if they are returned. 968*b7eaed25SJason Evans */ 969*b7eaed25SJason Evans static void * 970*b7eaed25SJason Evans extent_alloc_core(tsdn_t *tsdn, arena_t *arena, void *new_addr, size_t size, 971*b7eaed25SJason Evans size_t alignment, bool *zero, bool *commit, dss_prec_t dss_prec) { 972*b7eaed25SJason Evans void *ret; 973*b7eaed25SJason Evans 974*b7eaed25SJason Evans assert(size != 0); 975*b7eaed25SJason Evans assert(alignment != 0); 976*b7eaed25SJason Evans 977*b7eaed25SJason Evans /* "primary" dss. */ 978*b7eaed25SJason Evans if (have_dss && dss_prec == dss_prec_primary && (ret = 979*b7eaed25SJason Evans extent_alloc_dss(tsdn, arena, new_addr, size, alignment, zero, 980*b7eaed25SJason Evans commit)) != NULL) { 981*b7eaed25SJason Evans return ret; 982*b7eaed25SJason Evans } 983*b7eaed25SJason Evans /* mmap. */ 984*b7eaed25SJason Evans if ((ret = extent_alloc_mmap(new_addr, size, alignment, zero, commit)) 985*b7eaed25SJason Evans != NULL) { 986*b7eaed25SJason Evans return ret; 987*b7eaed25SJason Evans } 988*b7eaed25SJason Evans /* "secondary" dss. */ 989*b7eaed25SJason Evans if (have_dss && dss_prec == dss_prec_secondary && (ret = 990*b7eaed25SJason Evans extent_alloc_dss(tsdn, arena, new_addr, size, alignment, zero, 991*b7eaed25SJason Evans commit)) != NULL) { 992*b7eaed25SJason Evans return ret; 993*b7eaed25SJason Evans } 994*b7eaed25SJason Evans 995*b7eaed25SJason Evans /* All strategies for allocation failed. */ 996*b7eaed25SJason Evans return NULL; 997*b7eaed25SJason Evans } 998*b7eaed25SJason Evans 999*b7eaed25SJason Evans static void * 1000*b7eaed25SJason Evans extent_alloc_default_impl(tsdn_t *tsdn, arena_t *arena, void *new_addr, 1001*b7eaed25SJason Evans size_t size, size_t alignment, bool *zero, bool *commit) { 1002*b7eaed25SJason Evans void *ret; 1003*b7eaed25SJason Evans 1004*b7eaed25SJason Evans ret = extent_alloc_core(tsdn, arena, new_addr, size, alignment, zero, 1005*b7eaed25SJason Evans commit, (dss_prec_t)atomic_load_u(&arena->dss_prec, 1006*b7eaed25SJason Evans ATOMIC_RELAXED)); 1007*b7eaed25SJason Evans return ret; 1008*b7eaed25SJason Evans } 1009*b7eaed25SJason Evans 1010*b7eaed25SJason Evans static void * 1011*b7eaed25SJason Evans extent_alloc_default(extent_hooks_t *extent_hooks, void *new_addr, size_t size, 1012*b7eaed25SJason Evans size_t alignment, bool *zero, bool *commit, unsigned arena_ind) { 1013*b7eaed25SJason Evans tsdn_t *tsdn; 1014*b7eaed25SJason Evans arena_t *arena; 1015*b7eaed25SJason Evans 1016*b7eaed25SJason Evans tsdn = tsdn_fetch(); 1017*b7eaed25SJason Evans arena = arena_get(tsdn, arena_ind, false); 1018*b7eaed25SJason Evans /* 1019*b7eaed25SJason Evans * The arena we're allocating on behalf of must have been initialized 1020*b7eaed25SJason Evans * already. 1021*b7eaed25SJason Evans */ 1022*b7eaed25SJason Evans assert(arena != NULL); 1023*b7eaed25SJason Evans 1024*b7eaed25SJason Evans return extent_alloc_default_impl(tsdn, arena, new_addr, size, 1025*b7eaed25SJason Evans alignment, zero, commit); 1026*b7eaed25SJason Evans } 1027*b7eaed25SJason Evans 1028*b7eaed25SJason Evans /* 1029*b7eaed25SJason Evans * If virtual memory is retained, create increasingly larger extents from which 1030*b7eaed25SJason Evans * to split requested extents in order to limit the total number of disjoint 1031*b7eaed25SJason Evans * virtual memory ranges retained by each arena. 1032*b7eaed25SJason Evans */ 1033*b7eaed25SJason Evans static extent_t * 1034*b7eaed25SJason Evans extent_grow_retained(tsdn_t *tsdn, arena_t *arena, 1035*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, size_t size, size_t pad, size_t alignment, 1036*b7eaed25SJason Evans bool slab, szind_t szind, bool *zero, bool *commit) { 1037*b7eaed25SJason Evans malloc_mutex_assert_owner(tsdn, &arena->extent_grow_mtx); 1038*b7eaed25SJason Evans assert(pad == 0 || !slab); 1039*b7eaed25SJason Evans assert(!*zero || !slab); 1040*b7eaed25SJason Evans 1041*b7eaed25SJason Evans size_t esize = size + pad; 1042*b7eaed25SJason Evans size_t alloc_size_min = esize + PAGE_CEILING(alignment) - PAGE; 1043*b7eaed25SJason Evans /* Beware size_t wrap-around. */ 1044*b7eaed25SJason Evans if (alloc_size_min < esize) { 1045*b7eaed25SJason Evans goto label_err; 1046*b7eaed25SJason Evans } 1047*b7eaed25SJason Evans /* 1048*b7eaed25SJason Evans * Find the next extent size in the series that would be large enough to 1049*b7eaed25SJason Evans * satisfy this request. 1050*b7eaed25SJason Evans */ 1051*b7eaed25SJason Evans pszind_t egn_skip = 0; 1052*b7eaed25SJason Evans size_t alloc_size = sz_pind2sz(arena->extent_grow_next + egn_skip); 1053*b7eaed25SJason Evans while (alloc_size < alloc_size_min) { 1054*b7eaed25SJason Evans egn_skip++; 1055*b7eaed25SJason Evans if (arena->extent_grow_next + egn_skip == NPSIZES) { 1056*b7eaed25SJason Evans /* Outside legal range. */ 1057*b7eaed25SJason Evans goto label_err; 1058*b7eaed25SJason Evans } 1059*b7eaed25SJason Evans assert(arena->extent_grow_next + egn_skip < NPSIZES); 1060*b7eaed25SJason Evans alloc_size = sz_pind2sz(arena->extent_grow_next + egn_skip); 1061*b7eaed25SJason Evans } 1062*b7eaed25SJason Evans 1063*b7eaed25SJason Evans extent_t *extent = extent_alloc(tsdn, arena); 1064*b7eaed25SJason Evans if (extent == NULL) { 1065*b7eaed25SJason Evans goto label_err; 1066*b7eaed25SJason Evans } 1067*b7eaed25SJason Evans bool zeroed = false; 1068*b7eaed25SJason Evans bool committed = false; 1069*b7eaed25SJason Evans 1070*b7eaed25SJason Evans void *ptr; 1071*b7eaed25SJason Evans if (*r_extent_hooks == &extent_hooks_default) { 1072*b7eaed25SJason Evans ptr = extent_alloc_core(tsdn, arena, NULL, alloc_size, PAGE, 1073*b7eaed25SJason Evans &zeroed, &committed, (dss_prec_t)atomic_load_u( 1074*b7eaed25SJason Evans &arena->dss_prec, ATOMIC_RELAXED)); 1075*b7eaed25SJason Evans } else { 1076*b7eaed25SJason Evans ptr = (*r_extent_hooks)->alloc(*r_extent_hooks, NULL, 1077*b7eaed25SJason Evans alloc_size, PAGE, &zeroed, &committed, 1078*b7eaed25SJason Evans arena_ind_get(arena)); 1079*b7eaed25SJason Evans } 1080*b7eaed25SJason Evans 1081*b7eaed25SJason Evans extent_init(extent, arena, ptr, alloc_size, false, NSIZES, 1082*b7eaed25SJason Evans arena_extent_sn_next(arena), extent_state_active, zeroed, 1083*b7eaed25SJason Evans committed); 1084*b7eaed25SJason Evans if (ptr == NULL) { 1085*b7eaed25SJason Evans extent_dalloc(tsdn, arena, extent); 1086*b7eaed25SJason Evans goto label_err; 1087*b7eaed25SJason Evans } 1088*b7eaed25SJason Evans if (extent_register_no_gdump_add(tsdn, extent)) { 1089*b7eaed25SJason Evans extents_leak(tsdn, arena, r_extent_hooks, 1090*b7eaed25SJason Evans &arena->extents_retained, extent, true); 1091*b7eaed25SJason Evans goto label_err; 1092*b7eaed25SJason Evans } 1093*b7eaed25SJason Evans 1094*b7eaed25SJason Evans size_t leadsize = ALIGNMENT_CEILING((uintptr_t)ptr, 1095*b7eaed25SJason Evans PAGE_CEILING(alignment)) - (uintptr_t)ptr; 1096*b7eaed25SJason Evans assert(alloc_size >= leadsize + esize); 1097*b7eaed25SJason Evans size_t trailsize = alloc_size - leadsize - esize; 1098*b7eaed25SJason Evans if (extent_zeroed_get(extent) && extent_committed_get(extent)) { 1099*b7eaed25SJason Evans *zero = true; 1100*b7eaed25SJason Evans } 1101*b7eaed25SJason Evans if (extent_committed_get(extent)) { 1102*b7eaed25SJason Evans *commit = true; 1103*b7eaed25SJason Evans } 1104*b7eaed25SJason Evans 1105*b7eaed25SJason Evans /* Split the lead. */ 1106*b7eaed25SJason Evans if (leadsize != 0) { 1107*b7eaed25SJason Evans extent_t *lead = extent; 1108*b7eaed25SJason Evans extent = extent_split_impl(tsdn, arena, r_extent_hooks, lead, 1109*b7eaed25SJason Evans leadsize, NSIZES, false, esize + trailsize, szind, slab, 1110*b7eaed25SJason Evans true); 1111*b7eaed25SJason Evans if (extent == NULL) { 1112*b7eaed25SJason Evans extent_deregister(tsdn, lead); 1113*b7eaed25SJason Evans extents_leak(tsdn, arena, r_extent_hooks, 1114*b7eaed25SJason Evans &arena->extents_retained, lead, true); 1115*b7eaed25SJason Evans goto label_err; 1116*b7eaed25SJason Evans } 1117*b7eaed25SJason Evans extent_record(tsdn, arena, r_extent_hooks, 1118*b7eaed25SJason Evans &arena->extents_retained, lead, true); 1119*b7eaed25SJason Evans } 1120*b7eaed25SJason Evans 1121*b7eaed25SJason Evans /* Split the trail. */ 1122*b7eaed25SJason Evans if (trailsize != 0) { 1123*b7eaed25SJason Evans extent_t *trail = extent_split_impl(tsdn, arena, r_extent_hooks, 1124*b7eaed25SJason Evans extent, esize, szind, slab, trailsize, NSIZES, false, true); 1125*b7eaed25SJason Evans if (trail == NULL) { 1126*b7eaed25SJason Evans extent_deregister(tsdn, extent); 1127*b7eaed25SJason Evans extents_leak(tsdn, arena, r_extent_hooks, 1128*b7eaed25SJason Evans &arena->extents_retained, extent, true); 1129*b7eaed25SJason Evans goto label_err; 1130*b7eaed25SJason Evans } 1131*b7eaed25SJason Evans extent_record(tsdn, arena, r_extent_hooks, 1132*b7eaed25SJason Evans &arena->extents_retained, trail, true); 1133*b7eaed25SJason Evans } else if (leadsize == 0) { 1134*b7eaed25SJason Evans /* 1135*b7eaed25SJason Evans * Splitting causes szind to be set as a side effect, but no 1136*b7eaed25SJason Evans * splitting occurred. 1137*b7eaed25SJason Evans */ 1138*b7eaed25SJason Evans rtree_ctx_t rtree_ctx_fallback; 1139*b7eaed25SJason Evans rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, 1140*b7eaed25SJason Evans &rtree_ctx_fallback); 1141*b7eaed25SJason Evans 1142*b7eaed25SJason Evans extent_szind_set(extent, szind); 1143*b7eaed25SJason Evans if (szind != NSIZES) { 1144*b7eaed25SJason Evans rtree_szind_slab_update(tsdn, &extents_rtree, rtree_ctx, 1145*b7eaed25SJason Evans (uintptr_t)extent_addr_get(extent), szind, slab); 1146*b7eaed25SJason Evans if (slab && extent_size_get(extent) > PAGE) { 1147*b7eaed25SJason Evans rtree_szind_slab_update(tsdn, &extents_rtree, 1148*b7eaed25SJason Evans rtree_ctx, 1149*b7eaed25SJason Evans (uintptr_t)extent_past_get(extent) - 1150*b7eaed25SJason Evans (uintptr_t)PAGE, szind, slab); 1151*b7eaed25SJason Evans } 1152*b7eaed25SJason Evans } 1153*b7eaed25SJason Evans } 1154*b7eaed25SJason Evans 1155*b7eaed25SJason Evans if (*commit && !extent_committed_get(extent)) { 1156*b7eaed25SJason Evans if (extent_commit_impl(tsdn, arena, r_extent_hooks, extent, 0, 1157*b7eaed25SJason Evans extent_size_get(extent), true)) { 1158*b7eaed25SJason Evans extent_record(tsdn, arena, r_extent_hooks, 1159*b7eaed25SJason Evans &arena->extents_retained, extent, true); 1160*b7eaed25SJason Evans goto label_err; 1161*b7eaed25SJason Evans } 1162*b7eaed25SJason Evans extent_zeroed_set(extent, true); 1163*b7eaed25SJason Evans } 1164*b7eaed25SJason Evans 1165*b7eaed25SJason Evans /* 1166*b7eaed25SJason Evans * Increment extent_grow_next if doing so wouldn't exceed the legal 1167*b7eaed25SJason Evans * range. 1168*b7eaed25SJason Evans */ 1169*b7eaed25SJason Evans if (arena->extent_grow_next + egn_skip + 1 < NPSIZES) { 1170*b7eaed25SJason Evans arena->extent_grow_next += egn_skip + 1; 1171*b7eaed25SJason Evans } else { 1172*b7eaed25SJason Evans arena->extent_grow_next = NPSIZES - 1; 1173*b7eaed25SJason Evans } 1174*b7eaed25SJason Evans /* All opportunities for failure are past. */ 1175*b7eaed25SJason Evans malloc_mutex_unlock(tsdn, &arena->extent_grow_mtx); 1176*b7eaed25SJason Evans 1177*b7eaed25SJason Evans if (config_prof) { 1178*b7eaed25SJason Evans /* Adjust gdump stats now that extent is final size. */ 1179*b7eaed25SJason Evans extent_gdump_add(tsdn, extent); 1180*b7eaed25SJason Evans } 1181*b7eaed25SJason Evans if (pad != 0) { 1182*b7eaed25SJason Evans extent_addr_randomize(tsdn, extent, alignment); 1183*b7eaed25SJason Evans } 1184*b7eaed25SJason Evans if (slab) { 1185*b7eaed25SJason Evans rtree_ctx_t rtree_ctx_fallback; 1186*b7eaed25SJason Evans rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, 1187*b7eaed25SJason Evans &rtree_ctx_fallback); 1188*b7eaed25SJason Evans 1189*b7eaed25SJason Evans extent_slab_set(extent, true); 1190*b7eaed25SJason Evans extent_interior_register(tsdn, rtree_ctx, extent, szind); 1191*b7eaed25SJason Evans } 1192*b7eaed25SJason Evans if (*zero && !extent_zeroed_get(extent)) { 1193*b7eaed25SJason Evans void *addr = extent_base_get(extent); 1194*b7eaed25SJason Evans size_t size = extent_size_get(extent); 1195*b7eaed25SJason Evans if (pages_purge_forced(addr, size)) { 1196*b7eaed25SJason Evans memset(addr, 0, size); 1197*b7eaed25SJason Evans } 1198*b7eaed25SJason Evans } 1199*b7eaed25SJason Evans 1200*b7eaed25SJason Evans return extent; 1201*b7eaed25SJason Evans label_err: 1202*b7eaed25SJason Evans malloc_mutex_unlock(tsdn, &arena->extent_grow_mtx); 1203*b7eaed25SJason Evans return NULL; 1204*b7eaed25SJason Evans } 1205*b7eaed25SJason Evans 1206*b7eaed25SJason Evans static extent_t * 1207*b7eaed25SJason Evans extent_alloc_retained(tsdn_t *tsdn, arena_t *arena, 1208*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, void *new_addr, size_t size, size_t pad, 1209*b7eaed25SJason Evans size_t alignment, bool slab, szind_t szind, bool *zero, bool *commit) { 1210*b7eaed25SJason Evans assert(size != 0); 1211*b7eaed25SJason Evans assert(alignment != 0); 1212*b7eaed25SJason Evans 1213*b7eaed25SJason Evans malloc_mutex_lock(tsdn, &arena->extent_grow_mtx); 1214*b7eaed25SJason Evans 1215*b7eaed25SJason Evans extent_t *extent = extent_recycle(tsdn, arena, r_extent_hooks, 1216*b7eaed25SJason Evans &arena->extents_retained, new_addr, size, pad, alignment, slab, 1217*b7eaed25SJason Evans szind, zero, commit, true); 1218*b7eaed25SJason Evans if (extent != NULL) { 1219*b7eaed25SJason Evans malloc_mutex_unlock(tsdn, &arena->extent_grow_mtx); 1220*b7eaed25SJason Evans if (config_prof) { 1221*b7eaed25SJason Evans extent_gdump_add(tsdn, extent); 1222*b7eaed25SJason Evans } 1223*b7eaed25SJason Evans } else if (opt_retain && new_addr == NULL) { 1224*b7eaed25SJason Evans extent = extent_grow_retained(tsdn, arena, r_extent_hooks, size, 1225*b7eaed25SJason Evans pad, alignment, slab, szind, zero, commit); 1226*b7eaed25SJason Evans /* extent_grow_retained() always releases extent_grow_mtx. */ 1227*b7eaed25SJason Evans } else { 1228*b7eaed25SJason Evans malloc_mutex_unlock(tsdn, &arena->extent_grow_mtx); 1229*b7eaed25SJason Evans } 1230*b7eaed25SJason Evans malloc_mutex_assert_not_owner(tsdn, &arena->extent_grow_mtx); 1231*b7eaed25SJason Evans 1232*b7eaed25SJason Evans return extent; 1233*b7eaed25SJason Evans } 1234*b7eaed25SJason Evans 1235*b7eaed25SJason Evans static extent_t * 1236*b7eaed25SJason Evans extent_alloc_wrapper_hard(tsdn_t *tsdn, arena_t *arena, 1237*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, void *new_addr, size_t size, size_t pad, 1238*b7eaed25SJason Evans size_t alignment, bool slab, szind_t szind, bool *zero, bool *commit) { 1239*b7eaed25SJason Evans size_t esize = size + pad; 1240*b7eaed25SJason Evans extent_t *extent = extent_alloc(tsdn, arena); 1241*b7eaed25SJason Evans if (extent == NULL) { 1242*b7eaed25SJason Evans return NULL; 1243*b7eaed25SJason Evans } 1244*b7eaed25SJason Evans void *addr; 1245*b7eaed25SJason Evans if (*r_extent_hooks == &extent_hooks_default) { 1246*b7eaed25SJason Evans /* Call directly to propagate tsdn. */ 1247*b7eaed25SJason Evans addr = extent_alloc_default_impl(tsdn, arena, new_addr, esize, 1248*b7eaed25SJason Evans alignment, zero, commit); 1249*b7eaed25SJason Evans } else { 1250*b7eaed25SJason Evans addr = (*r_extent_hooks)->alloc(*r_extent_hooks, new_addr, 1251*b7eaed25SJason Evans esize, alignment, zero, commit, arena_ind_get(arena)); 1252*b7eaed25SJason Evans } 1253*b7eaed25SJason Evans if (addr == NULL) { 1254*b7eaed25SJason Evans extent_dalloc(tsdn, arena, extent); 1255*b7eaed25SJason Evans return NULL; 1256*b7eaed25SJason Evans } 1257*b7eaed25SJason Evans extent_init(extent, arena, addr, esize, slab, szind, 1258*b7eaed25SJason Evans arena_extent_sn_next(arena), extent_state_active, zero, commit); 1259*b7eaed25SJason Evans if (pad != 0) { 1260*b7eaed25SJason Evans extent_addr_randomize(tsdn, extent, alignment); 1261*b7eaed25SJason Evans } 1262*b7eaed25SJason Evans if (extent_register(tsdn, extent)) { 1263*b7eaed25SJason Evans extents_leak(tsdn, arena, r_extent_hooks, 1264*b7eaed25SJason Evans &arena->extents_retained, extent, false); 1265*b7eaed25SJason Evans return NULL; 1266*b7eaed25SJason Evans } 1267*b7eaed25SJason Evans 1268*b7eaed25SJason Evans return extent; 1269*b7eaed25SJason Evans } 1270*b7eaed25SJason Evans 1271*b7eaed25SJason Evans extent_t * 1272*b7eaed25SJason Evans extent_alloc_wrapper(tsdn_t *tsdn, arena_t *arena, 1273*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, void *new_addr, size_t size, size_t pad, 1274*b7eaed25SJason Evans size_t alignment, bool slab, szind_t szind, bool *zero, bool *commit) { 1275*b7eaed25SJason Evans witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn), 1276*b7eaed25SJason Evans WITNESS_RANK_CORE, 0); 1277*b7eaed25SJason Evans 1278*b7eaed25SJason Evans extent_hooks_assure_initialized(arena, r_extent_hooks); 1279*b7eaed25SJason Evans 1280*b7eaed25SJason Evans extent_t *extent = extent_alloc_retained(tsdn, arena, r_extent_hooks, 1281*b7eaed25SJason Evans new_addr, size, pad, alignment, slab, szind, zero, commit); 1282*b7eaed25SJason Evans if (extent == NULL) { 1283*b7eaed25SJason Evans extent = extent_alloc_wrapper_hard(tsdn, arena, r_extent_hooks, 1284*b7eaed25SJason Evans new_addr, size, pad, alignment, slab, szind, zero, commit); 1285*b7eaed25SJason Evans } 1286*b7eaed25SJason Evans 1287*b7eaed25SJason Evans return extent; 1288*b7eaed25SJason Evans } 1289*b7eaed25SJason Evans 1290*b7eaed25SJason Evans static bool 1291*b7eaed25SJason Evans extent_can_coalesce(arena_t *arena, extents_t *extents, const extent_t *inner, 1292*b7eaed25SJason Evans const extent_t *outer) { 1293*b7eaed25SJason Evans assert(extent_arena_get(inner) == arena); 1294*b7eaed25SJason Evans if (extent_arena_get(outer) != arena) { 1295*b7eaed25SJason Evans return false; 1296*b7eaed25SJason Evans } 1297*b7eaed25SJason Evans 1298*b7eaed25SJason Evans assert(extent_state_get(inner) == extent_state_active); 1299*b7eaed25SJason Evans if (extent_state_get(outer) != extents->state) { 1300*b7eaed25SJason Evans return false; 1301*b7eaed25SJason Evans } 1302*b7eaed25SJason Evans 1303*b7eaed25SJason Evans if (extent_committed_get(inner) != extent_committed_get(outer)) { 1304*b7eaed25SJason Evans return false; 1305*b7eaed25SJason Evans } 1306*b7eaed25SJason Evans 1307*b7eaed25SJason Evans return true; 1308*b7eaed25SJason Evans } 1309*b7eaed25SJason Evans 1310*b7eaed25SJason Evans static bool 1311*b7eaed25SJason Evans extent_coalesce(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks, 1312*b7eaed25SJason Evans extents_t *extents, extent_t *inner, extent_t *outer, bool forward, 1313*b7eaed25SJason Evans bool growing_retained) { 1314*b7eaed25SJason Evans assert(extent_can_coalesce(arena, extents, inner, outer)); 1315*b7eaed25SJason Evans 1316*b7eaed25SJason Evans if (forward && extents->delay_coalesce) { 1317*b7eaed25SJason Evans /* 1318*b7eaed25SJason Evans * The extent that remains after coalescing must occupy the 1319*b7eaed25SJason Evans * outer extent's position in the LRU. For forward coalescing, 1320*b7eaed25SJason Evans * swap the inner extent into the LRU. 1321*b7eaed25SJason Evans */ 1322*b7eaed25SJason Evans extent_list_replace(&extents->lru, outer, inner); 1323*b7eaed25SJason Evans } 1324*b7eaed25SJason Evans extent_activate_locked(tsdn, arena, extents, outer, 1325*b7eaed25SJason Evans extents->delay_coalesce); 1326*b7eaed25SJason Evans 1327*b7eaed25SJason Evans malloc_mutex_unlock(tsdn, &extents->mtx); 1328*b7eaed25SJason Evans bool err = extent_merge_impl(tsdn, arena, r_extent_hooks, 1329*b7eaed25SJason Evans forward ? inner : outer, forward ? outer : inner, growing_retained); 1330*b7eaed25SJason Evans malloc_mutex_lock(tsdn, &extents->mtx); 1331*b7eaed25SJason Evans 1332*b7eaed25SJason Evans if (err) { 1333*b7eaed25SJason Evans if (forward && extents->delay_coalesce) { 1334*b7eaed25SJason Evans extent_list_replace(&extents->lru, inner, outer); 1335*b7eaed25SJason Evans } 1336*b7eaed25SJason Evans extent_deactivate_locked(tsdn, arena, extents, outer, 1337*b7eaed25SJason Evans extents->delay_coalesce); 1338*b7eaed25SJason Evans } 1339*b7eaed25SJason Evans 1340*b7eaed25SJason Evans return err; 1341*b7eaed25SJason Evans } 1342*b7eaed25SJason Evans 1343*b7eaed25SJason Evans static extent_t * 1344*b7eaed25SJason Evans extent_try_coalesce(tsdn_t *tsdn, arena_t *arena, 1345*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, rtree_ctx_t *rtree_ctx, extents_t *extents, 1346*b7eaed25SJason Evans extent_t *extent, bool *coalesced, bool growing_retained) { 1347*b7eaed25SJason Evans /* 1348*b7eaed25SJason Evans * Continue attempting to coalesce until failure, to protect against 1349*b7eaed25SJason Evans * races with other threads that are thwarted by this one. 1350*b7eaed25SJason Evans */ 1351*b7eaed25SJason Evans bool again; 1352*b7eaed25SJason Evans do { 1353*b7eaed25SJason Evans again = false; 1354*b7eaed25SJason Evans 1355*b7eaed25SJason Evans /* Try to coalesce forward. */ 1356*b7eaed25SJason Evans extent_t *next = extent_lock_from_addr(tsdn, rtree_ctx, 1357*b7eaed25SJason Evans extent_past_get(extent)); 1358*b7eaed25SJason Evans if (next != NULL) { 1359*b7eaed25SJason Evans /* 1360*b7eaed25SJason Evans * extents->mtx only protects against races for 1361*b7eaed25SJason Evans * like-state extents, so call extent_can_coalesce() 1362*b7eaed25SJason Evans * before releasing next's pool lock. 1363*b7eaed25SJason Evans */ 1364*b7eaed25SJason Evans bool can_coalesce = extent_can_coalesce(arena, extents, 1365*b7eaed25SJason Evans extent, next); 1366*b7eaed25SJason Evans 1367*b7eaed25SJason Evans extent_unlock(tsdn, next); 1368*b7eaed25SJason Evans 1369*b7eaed25SJason Evans if (can_coalesce && !extent_coalesce(tsdn, arena, 1370*b7eaed25SJason Evans r_extent_hooks, extents, extent, next, true, 1371*b7eaed25SJason Evans growing_retained)) { 1372*b7eaed25SJason Evans if (extents->delay_coalesce) { 1373*b7eaed25SJason Evans /* Do minimal coalescing. */ 1374*b7eaed25SJason Evans *coalesced = true; 1375*b7eaed25SJason Evans return extent; 1376*b7eaed25SJason Evans } 1377*b7eaed25SJason Evans again = true; 1378*b7eaed25SJason Evans } 1379*b7eaed25SJason Evans } 1380*b7eaed25SJason Evans 1381*b7eaed25SJason Evans /* Try to coalesce backward. */ 1382*b7eaed25SJason Evans extent_t *prev = extent_lock_from_addr(tsdn, rtree_ctx, 1383*b7eaed25SJason Evans extent_before_get(extent)); 1384*b7eaed25SJason Evans if (prev != NULL) { 1385*b7eaed25SJason Evans bool can_coalesce = extent_can_coalesce(arena, extents, 1386*b7eaed25SJason Evans extent, prev); 1387*b7eaed25SJason Evans extent_unlock(tsdn, prev); 1388*b7eaed25SJason Evans 1389*b7eaed25SJason Evans if (can_coalesce && !extent_coalesce(tsdn, arena, 1390*b7eaed25SJason Evans r_extent_hooks, extents, extent, prev, false, 1391*b7eaed25SJason Evans growing_retained)) { 1392*b7eaed25SJason Evans extent = prev; 1393*b7eaed25SJason Evans if (extents->delay_coalesce) { 1394*b7eaed25SJason Evans /* Do minimal coalescing. */ 1395*b7eaed25SJason Evans *coalesced = true; 1396*b7eaed25SJason Evans return extent; 1397*b7eaed25SJason Evans } 1398*b7eaed25SJason Evans again = true; 1399*b7eaed25SJason Evans } 1400*b7eaed25SJason Evans } 1401*b7eaed25SJason Evans } while (again); 1402*b7eaed25SJason Evans 1403*b7eaed25SJason Evans if (extents->delay_coalesce) { 1404*b7eaed25SJason Evans *coalesced = false; 1405*b7eaed25SJason Evans } 1406*b7eaed25SJason Evans return extent; 1407*b7eaed25SJason Evans } 1408*b7eaed25SJason Evans 1409*b7eaed25SJason Evans static void 1410*b7eaed25SJason Evans extent_record(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks, 1411*b7eaed25SJason Evans extents_t *extents, extent_t *extent, bool growing_retained) { 1412*b7eaed25SJason Evans rtree_ctx_t rtree_ctx_fallback; 1413*b7eaed25SJason Evans rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback); 1414*b7eaed25SJason Evans 1415*b7eaed25SJason Evans assert((extents_state_get(extents) != extent_state_dirty && 1416*b7eaed25SJason Evans extents_state_get(extents) != extent_state_muzzy) || 1417*b7eaed25SJason Evans !extent_zeroed_get(extent)); 1418*b7eaed25SJason Evans 1419*b7eaed25SJason Evans malloc_mutex_lock(tsdn, &extents->mtx); 1420*b7eaed25SJason Evans extent_hooks_assure_initialized(arena, r_extent_hooks); 1421*b7eaed25SJason Evans 1422*b7eaed25SJason Evans extent_szind_set(extent, NSIZES); 1423*b7eaed25SJason Evans if (extent_slab_get(extent)) { 1424*b7eaed25SJason Evans extent_interior_deregister(tsdn, rtree_ctx, extent); 1425*b7eaed25SJason Evans extent_slab_set(extent, false); 1426*b7eaed25SJason Evans } 1427*b7eaed25SJason Evans 1428*b7eaed25SJason Evans assert(rtree_extent_read(tsdn, &extents_rtree, rtree_ctx, 1429*b7eaed25SJason Evans (uintptr_t)extent_base_get(extent), true) == extent); 1430*b7eaed25SJason Evans 1431*b7eaed25SJason Evans if (!extents->delay_coalesce) { 1432*b7eaed25SJason Evans extent = extent_try_coalesce(tsdn, arena, r_extent_hooks, 1433*b7eaed25SJason Evans rtree_ctx, extents, extent, NULL, growing_retained); 1434*b7eaed25SJason Evans } 1435*b7eaed25SJason Evans 1436*b7eaed25SJason Evans extent_deactivate_locked(tsdn, arena, extents, extent, false); 1437*b7eaed25SJason Evans 1438*b7eaed25SJason Evans malloc_mutex_unlock(tsdn, &extents->mtx); 1439*b7eaed25SJason Evans } 1440*b7eaed25SJason Evans 1441*b7eaed25SJason Evans void 1442*b7eaed25SJason Evans extent_dalloc_gap(tsdn_t *tsdn, arena_t *arena, extent_t *extent) { 1443*b7eaed25SJason Evans extent_hooks_t *extent_hooks = EXTENT_HOOKS_INITIALIZER; 1444*b7eaed25SJason Evans 1445*b7eaed25SJason Evans witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn), 1446*b7eaed25SJason Evans WITNESS_RANK_CORE, 0); 1447*b7eaed25SJason Evans 1448*b7eaed25SJason Evans if (extent_register(tsdn, extent)) { 1449*b7eaed25SJason Evans extents_leak(tsdn, arena, &extent_hooks, 1450*b7eaed25SJason Evans &arena->extents_retained, extent, false); 1451*b7eaed25SJason Evans return; 1452*b7eaed25SJason Evans } 1453*b7eaed25SJason Evans extent_dalloc_wrapper(tsdn, arena, &extent_hooks, extent); 1454*b7eaed25SJason Evans } 1455*b7eaed25SJason Evans 1456*b7eaed25SJason Evans static bool 1457*b7eaed25SJason Evans extent_dalloc_default_impl(void *addr, size_t size) { 1458*b7eaed25SJason Evans if (!have_dss || !extent_in_dss(addr)) { 1459*b7eaed25SJason Evans return extent_dalloc_mmap(addr, size); 1460*b7eaed25SJason Evans } 1461*b7eaed25SJason Evans return true; 1462*b7eaed25SJason Evans } 1463*b7eaed25SJason Evans 1464*b7eaed25SJason Evans static bool 1465*b7eaed25SJason Evans extent_dalloc_default(extent_hooks_t *extent_hooks, void *addr, size_t size, 1466*b7eaed25SJason Evans bool committed, unsigned arena_ind) { 1467*b7eaed25SJason Evans return extent_dalloc_default_impl(addr, size); 1468*b7eaed25SJason Evans } 1469*b7eaed25SJason Evans 1470*b7eaed25SJason Evans static bool 1471*b7eaed25SJason Evans extent_dalloc_wrapper_try(tsdn_t *tsdn, arena_t *arena, 1472*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, extent_t *extent) { 1473*b7eaed25SJason Evans bool err; 1474*b7eaed25SJason Evans 1475*b7eaed25SJason Evans assert(extent_base_get(extent) != NULL); 1476*b7eaed25SJason Evans assert(extent_size_get(extent) != 0); 1477*b7eaed25SJason Evans witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn), 1478*b7eaed25SJason Evans WITNESS_RANK_CORE, 0); 1479*b7eaed25SJason Evans 1480*b7eaed25SJason Evans extent_addr_set(extent, extent_base_get(extent)); 1481*b7eaed25SJason Evans 1482*b7eaed25SJason Evans extent_hooks_assure_initialized(arena, r_extent_hooks); 1483*b7eaed25SJason Evans /* Try to deallocate. */ 1484*b7eaed25SJason Evans if (*r_extent_hooks == &extent_hooks_default) { 1485*b7eaed25SJason Evans /* Call directly to propagate tsdn. */ 1486*b7eaed25SJason Evans err = extent_dalloc_default_impl(extent_base_get(extent), 1487*b7eaed25SJason Evans extent_size_get(extent)); 1488*b7eaed25SJason Evans } else { 1489*b7eaed25SJason Evans err = ((*r_extent_hooks)->dalloc == NULL || 1490*b7eaed25SJason Evans (*r_extent_hooks)->dalloc(*r_extent_hooks, 1491*b7eaed25SJason Evans extent_base_get(extent), extent_size_get(extent), 1492*b7eaed25SJason Evans extent_committed_get(extent), arena_ind_get(arena))); 1493*b7eaed25SJason Evans } 1494*b7eaed25SJason Evans 1495*b7eaed25SJason Evans if (!err) { 1496*b7eaed25SJason Evans extent_dalloc(tsdn, arena, extent); 1497*b7eaed25SJason Evans } 1498*b7eaed25SJason Evans 1499*b7eaed25SJason Evans return err; 1500*b7eaed25SJason Evans } 1501*b7eaed25SJason Evans 1502*b7eaed25SJason Evans void 1503*b7eaed25SJason Evans extent_dalloc_wrapper(tsdn_t *tsdn, arena_t *arena, 1504*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, extent_t *extent) { 1505*b7eaed25SJason Evans witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn), 1506*b7eaed25SJason Evans WITNESS_RANK_CORE, 0); 1507*b7eaed25SJason Evans 1508*b7eaed25SJason Evans /* 1509*b7eaed25SJason Evans * Deregister first to avoid a race with other allocating threads, and 1510*b7eaed25SJason Evans * reregister if deallocation fails. 1511*b7eaed25SJason Evans */ 1512*b7eaed25SJason Evans extent_deregister(tsdn, extent); 1513*b7eaed25SJason Evans if (!extent_dalloc_wrapper_try(tsdn, arena, r_extent_hooks, extent)) { 1514*b7eaed25SJason Evans return; 1515*b7eaed25SJason Evans } 1516*b7eaed25SJason Evans 1517*b7eaed25SJason Evans extent_reregister(tsdn, extent); 1518*b7eaed25SJason Evans /* Try to decommit; purge if that fails. */ 1519*b7eaed25SJason Evans bool zeroed; 1520*b7eaed25SJason Evans if (!extent_committed_get(extent)) { 1521*b7eaed25SJason Evans zeroed = true; 1522*b7eaed25SJason Evans } else if (!extent_decommit_wrapper(tsdn, arena, r_extent_hooks, extent, 1523*b7eaed25SJason Evans 0, extent_size_get(extent))) { 1524*b7eaed25SJason Evans zeroed = true; 1525*b7eaed25SJason Evans } else if ((*r_extent_hooks)->purge_forced != NULL && 1526*b7eaed25SJason Evans !(*r_extent_hooks)->purge_forced(*r_extent_hooks, 1527*b7eaed25SJason Evans extent_base_get(extent), extent_size_get(extent), 0, 1528*b7eaed25SJason Evans extent_size_get(extent), arena_ind_get(arena))) { 1529*b7eaed25SJason Evans zeroed = true; 1530*b7eaed25SJason Evans } else if (extent_state_get(extent) == extent_state_muzzy || 1531*b7eaed25SJason Evans ((*r_extent_hooks)->purge_lazy != NULL && 1532*b7eaed25SJason Evans !(*r_extent_hooks)->purge_lazy(*r_extent_hooks, 1533*b7eaed25SJason Evans extent_base_get(extent), extent_size_get(extent), 0, 1534*b7eaed25SJason Evans extent_size_get(extent), arena_ind_get(arena)))) { 1535*b7eaed25SJason Evans zeroed = false; 1536*b7eaed25SJason Evans } else { 1537*b7eaed25SJason Evans zeroed = false; 1538*b7eaed25SJason Evans } 1539*b7eaed25SJason Evans extent_zeroed_set(extent, zeroed); 1540*b7eaed25SJason Evans 1541*b7eaed25SJason Evans if (config_prof) { 1542*b7eaed25SJason Evans extent_gdump_sub(tsdn, extent); 1543*b7eaed25SJason Evans } 1544*b7eaed25SJason Evans 1545*b7eaed25SJason Evans extent_record(tsdn, arena, r_extent_hooks, &arena->extents_retained, 1546*b7eaed25SJason Evans extent, false); 1547*b7eaed25SJason Evans } 1548*b7eaed25SJason Evans 1549*b7eaed25SJason Evans static void 1550*b7eaed25SJason Evans extent_destroy_default_impl(void *addr, size_t size) { 1551*b7eaed25SJason Evans if (!have_dss || !extent_in_dss(addr)) { 1552*b7eaed25SJason Evans pages_unmap(addr, size); 1553*b7eaed25SJason Evans } 1554*b7eaed25SJason Evans } 1555*b7eaed25SJason Evans 1556*b7eaed25SJason Evans static void 1557*b7eaed25SJason Evans extent_destroy_default(extent_hooks_t *extent_hooks, void *addr, size_t size, 1558*b7eaed25SJason Evans bool committed, unsigned arena_ind) { 1559*b7eaed25SJason Evans extent_destroy_default_impl(addr, size); 1560*b7eaed25SJason Evans } 1561*b7eaed25SJason Evans 1562*b7eaed25SJason Evans void 1563*b7eaed25SJason Evans extent_destroy_wrapper(tsdn_t *tsdn, arena_t *arena, 1564*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, extent_t *extent) { 1565*b7eaed25SJason Evans assert(extent_base_get(extent) != NULL); 1566*b7eaed25SJason Evans assert(extent_size_get(extent) != 0); 1567*b7eaed25SJason Evans witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn), 1568*b7eaed25SJason Evans WITNESS_RANK_CORE, 0); 1569*b7eaed25SJason Evans 1570*b7eaed25SJason Evans /* Deregister first to avoid a race with other allocating threads. */ 1571*b7eaed25SJason Evans extent_deregister(tsdn, extent); 1572*b7eaed25SJason Evans 1573*b7eaed25SJason Evans extent_addr_set(extent, extent_base_get(extent)); 1574*b7eaed25SJason Evans 1575*b7eaed25SJason Evans extent_hooks_assure_initialized(arena, r_extent_hooks); 1576*b7eaed25SJason Evans /* Try to destroy; silently fail otherwise. */ 1577*b7eaed25SJason Evans if (*r_extent_hooks == &extent_hooks_default) { 1578*b7eaed25SJason Evans /* Call directly to propagate tsdn. */ 1579*b7eaed25SJason Evans extent_destroy_default_impl(extent_base_get(extent), 1580*b7eaed25SJason Evans extent_size_get(extent)); 1581*b7eaed25SJason Evans } else if ((*r_extent_hooks)->destroy != NULL) { 1582*b7eaed25SJason Evans (*r_extent_hooks)->destroy(*r_extent_hooks, 1583*b7eaed25SJason Evans extent_base_get(extent), extent_size_get(extent), 1584*b7eaed25SJason Evans extent_committed_get(extent), arena_ind_get(arena)); 1585*b7eaed25SJason Evans } 1586*b7eaed25SJason Evans 1587*b7eaed25SJason Evans extent_dalloc(tsdn, arena, extent); 1588*b7eaed25SJason Evans } 1589*b7eaed25SJason Evans 1590*b7eaed25SJason Evans static bool 1591*b7eaed25SJason Evans extent_commit_default(extent_hooks_t *extent_hooks, void *addr, size_t size, 1592*b7eaed25SJason Evans size_t offset, size_t length, unsigned arena_ind) { 1593*b7eaed25SJason Evans return pages_commit((void *)((uintptr_t)addr + (uintptr_t)offset), 1594*b7eaed25SJason Evans length); 1595*b7eaed25SJason Evans } 1596*b7eaed25SJason Evans 1597*b7eaed25SJason Evans static bool 1598*b7eaed25SJason Evans extent_commit_impl(tsdn_t *tsdn, arena_t *arena, 1599*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, extent_t *extent, size_t offset, 1600*b7eaed25SJason Evans size_t length, bool growing_retained) { 1601*b7eaed25SJason Evans witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn), 1602*b7eaed25SJason Evans WITNESS_RANK_CORE, growing_retained ? 1 : 0); 1603*b7eaed25SJason Evans 1604*b7eaed25SJason Evans extent_hooks_assure_initialized(arena, r_extent_hooks); 1605*b7eaed25SJason Evans bool err = ((*r_extent_hooks)->commit == NULL || 1606*b7eaed25SJason Evans (*r_extent_hooks)->commit(*r_extent_hooks, extent_base_get(extent), 1607*b7eaed25SJason Evans extent_size_get(extent), offset, length, arena_ind_get(arena))); 1608*b7eaed25SJason Evans extent_committed_set(extent, extent_committed_get(extent) || !err); 1609*b7eaed25SJason Evans return err; 1610*b7eaed25SJason Evans } 1611*b7eaed25SJason Evans 1612*b7eaed25SJason Evans bool 1613*b7eaed25SJason Evans extent_commit_wrapper(tsdn_t *tsdn, arena_t *arena, 1614*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, extent_t *extent, size_t offset, 1615*b7eaed25SJason Evans size_t length) { 1616*b7eaed25SJason Evans return extent_commit_impl(tsdn, arena, r_extent_hooks, extent, offset, 1617*b7eaed25SJason Evans length, false); 1618*b7eaed25SJason Evans } 1619*b7eaed25SJason Evans 1620*b7eaed25SJason Evans static bool 1621*b7eaed25SJason Evans extent_decommit_default(extent_hooks_t *extent_hooks, void *addr, size_t size, 1622*b7eaed25SJason Evans size_t offset, size_t length, unsigned arena_ind) { 1623*b7eaed25SJason Evans return pages_decommit((void *)((uintptr_t)addr + (uintptr_t)offset), 1624*b7eaed25SJason Evans length); 1625*b7eaed25SJason Evans } 1626*b7eaed25SJason Evans 1627*b7eaed25SJason Evans bool 1628*b7eaed25SJason Evans extent_decommit_wrapper(tsdn_t *tsdn, arena_t *arena, 1629*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, extent_t *extent, size_t offset, 1630*b7eaed25SJason Evans size_t length) { 1631*b7eaed25SJason Evans witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn), 1632*b7eaed25SJason Evans WITNESS_RANK_CORE, 0); 1633*b7eaed25SJason Evans 1634*b7eaed25SJason Evans extent_hooks_assure_initialized(arena, r_extent_hooks); 1635*b7eaed25SJason Evans 1636*b7eaed25SJason Evans bool err = ((*r_extent_hooks)->decommit == NULL || 1637*b7eaed25SJason Evans (*r_extent_hooks)->decommit(*r_extent_hooks, 1638*b7eaed25SJason Evans extent_base_get(extent), extent_size_get(extent), offset, length, 1639*b7eaed25SJason Evans arena_ind_get(arena))); 1640*b7eaed25SJason Evans extent_committed_set(extent, extent_committed_get(extent) && err); 1641*b7eaed25SJason Evans return err; 1642*b7eaed25SJason Evans } 1643*b7eaed25SJason Evans 1644*b7eaed25SJason Evans #ifdef PAGES_CAN_PURGE_LAZY 1645*b7eaed25SJason Evans static bool 1646*b7eaed25SJason Evans extent_purge_lazy_default(extent_hooks_t *extent_hooks, void *addr, size_t size, 1647*b7eaed25SJason Evans size_t offset, size_t length, unsigned arena_ind) { 1648*b7eaed25SJason Evans assert(addr != NULL); 1649*b7eaed25SJason Evans assert((offset & PAGE_MASK) == 0); 1650*b7eaed25SJason Evans assert(length != 0); 1651*b7eaed25SJason Evans assert((length & PAGE_MASK) == 0); 1652*b7eaed25SJason Evans 1653*b7eaed25SJason Evans return pages_purge_lazy((void *)((uintptr_t)addr + (uintptr_t)offset), 1654*b7eaed25SJason Evans length); 1655*b7eaed25SJason Evans } 1656*b7eaed25SJason Evans #endif 1657*b7eaed25SJason Evans 1658*b7eaed25SJason Evans static bool 1659*b7eaed25SJason Evans extent_purge_lazy_impl(tsdn_t *tsdn, arena_t *arena, 1660*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, extent_t *extent, size_t offset, 1661*b7eaed25SJason Evans size_t length, bool growing_retained) { 1662*b7eaed25SJason Evans witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn), 1663*b7eaed25SJason Evans WITNESS_RANK_CORE, growing_retained ? 1 : 0); 1664*b7eaed25SJason Evans 1665*b7eaed25SJason Evans extent_hooks_assure_initialized(arena, r_extent_hooks); 1666*b7eaed25SJason Evans return ((*r_extent_hooks)->purge_lazy == NULL || 1667*b7eaed25SJason Evans (*r_extent_hooks)->purge_lazy(*r_extent_hooks, 1668*b7eaed25SJason Evans extent_base_get(extent), extent_size_get(extent), offset, length, 1669*b7eaed25SJason Evans arena_ind_get(arena))); 1670*b7eaed25SJason Evans } 1671*b7eaed25SJason Evans 1672*b7eaed25SJason Evans bool 1673*b7eaed25SJason Evans extent_purge_lazy_wrapper(tsdn_t *tsdn, arena_t *arena, 1674*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, extent_t *extent, size_t offset, 1675*b7eaed25SJason Evans size_t length) { 1676*b7eaed25SJason Evans return extent_purge_lazy_impl(tsdn, arena, r_extent_hooks, extent, 1677*b7eaed25SJason Evans offset, length, false); 1678*b7eaed25SJason Evans } 1679*b7eaed25SJason Evans 1680*b7eaed25SJason Evans #ifdef PAGES_CAN_PURGE_FORCED 1681*b7eaed25SJason Evans static bool 1682*b7eaed25SJason Evans extent_purge_forced_default(extent_hooks_t *extent_hooks, void *addr, 1683*b7eaed25SJason Evans size_t size, size_t offset, size_t length, unsigned arena_ind) { 1684*b7eaed25SJason Evans assert(addr != NULL); 1685*b7eaed25SJason Evans assert((offset & PAGE_MASK) == 0); 1686*b7eaed25SJason Evans assert(length != 0); 1687*b7eaed25SJason Evans assert((length & PAGE_MASK) == 0); 1688*b7eaed25SJason Evans 1689*b7eaed25SJason Evans return pages_purge_forced((void *)((uintptr_t)addr + 1690*b7eaed25SJason Evans (uintptr_t)offset), length); 1691*b7eaed25SJason Evans } 1692*b7eaed25SJason Evans #endif 1693*b7eaed25SJason Evans 1694*b7eaed25SJason Evans static bool 1695*b7eaed25SJason Evans extent_purge_forced_impl(tsdn_t *tsdn, arena_t *arena, 1696*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, extent_t *extent, size_t offset, 1697*b7eaed25SJason Evans size_t length, bool growing_retained) { 1698*b7eaed25SJason Evans witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn), 1699*b7eaed25SJason Evans WITNESS_RANK_CORE, growing_retained ? 1 : 0); 1700*b7eaed25SJason Evans 1701*b7eaed25SJason Evans extent_hooks_assure_initialized(arena, r_extent_hooks); 1702*b7eaed25SJason Evans return ((*r_extent_hooks)->purge_forced == NULL || 1703*b7eaed25SJason Evans (*r_extent_hooks)->purge_forced(*r_extent_hooks, 1704*b7eaed25SJason Evans extent_base_get(extent), extent_size_get(extent), offset, length, 1705*b7eaed25SJason Evans arena_ind_get(arena))); 1706*b7eaed25SJason Evans } 1707*b7eaed25SJason Evans 1708*b7eaed25SJason Evans bool 1709*b7eaed25SJason Evans extent_purge_forced_wrapper(tsdn_t *tsdn, arena_t *arena, 1710*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, extent_t *extent, size_t offset, 1711*b7eaed25SJason Evans size_t length) { 1712*b7eaed25SJason Evans return extent_purge_forced_impl(tsdn, arena, r_extent_hooks, extent, 1713*b7eaed25SJason Evans offset, length, false); 1714*b7eaed25SJason Evans } 1715*b7eaed25SJason Evans 1716*b7eaed25SJason Evans #ifdef JEMALLOC_MAPS_COALESCE 1717*b7eaed25SJason Evans static bool 1718*b7eaed25SJason Evans extent_split_default(extent_hooks_t *extent_hooks, void *addr, size_t size, 1719*b7eaed25SJason Evans size_t size_a, size_t size_b, bool committed, unsigned arena_ind) { 1720*b7eaed25SJason Evans return !maps_coalesce; 1721*b7eaed25SJason Evans } 1722*b7eaed25SJason Evans #endif 1723*b7eaed25SJason Evans 1724*b7eaed25SJason Evans static extent_t * 1725*b7eaed25SJason Evans extent_split_impl(tsdn_t *tsdn, arena_t *arena, 1726*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, extent_t *extent, size_t size_a, 1727*b7eaed25SJason Evans szind_t szind_a, bool slab_a, size_t size_b, szind_t szind_b, bool slab_b, 1728*b7eaed25SJason Evans bool growing_retained) { 1729*b7eaed25SJason Evans assert(extent_size_get(extent) == size_a + size_b); 1730*b7eaed25SJason Evans witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn), 1731*b7eaed25SJason Evans WITNESS_RANK_CORE, growing_retained ? 1 : 0); 1732*b7eaed25SJason Evans 1733*b7eaed25SJason Evans extent_hooks_assure_initialized(arena, r_extent_hooks); 1734*b7eaed25SJason Evans 1735*b7eaed25SJason Evans if ((*r_extent_hooks)->split == NULL) { 1736*b7eaed25SJason Evans return NULL; 1737*b7eaed25SJason Evans } 1738*b7eaed25SJason Evans 1739*b7eaed25SJason Evans extent_t *trail = extent_alloc(tsdn, arena); 1740*b7eaed25SJason Evans if (trail == NULL) { 1741*b7eaed25SJason Evans goto label_error_a; 1742*b7eaed25SJason Evans } 1743*b7eaed25SJason Evans 1744*b7eaed25SJason Evans extent_init(trail, arena, (void *)((uintptr_t)extent_base_get(extent) + 1745*b7eaed25SJason Evans size_a), size_b, slab_b, szind_b, extent_sn_get(extent), 1746*b7eaed25SJason Evans extent_state_get(extent), extent_zeroed_get(extent), 1747*b7eaed25SJason Evans extent_committed_get(extent)); 1748*b7eaed25SJason Evans 1749*b7eaed25SJason Evans rtree_ctx_t rtree_ctx_fallback; 1750*b7eaed25SJason Evans rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback); 1751*b7eaed25SJason Evans rtree_leaf_elm_t *lead_elm_a, *lead_elm_b; 1752*b7eaed25SJason Evans { 1753*b7eaed25SJason Evans extent_t lead; 1754*b7eaed25SJason Evans 1755*b7eaed25SJason Evans extent_init(&lead, arena, extent_addr_get(extent), size_a, 1756*b7eaed25SJason Evans slab_a, szind_a, extent_sn_get(extent), 1757*b7eaed25SJason Evans extent_state_get(extent), extent_zeroed_get(extent), 1758*b7eaed25SJason Evans extent_committed_get(extent)); 1759*b7eaed25SJason Evans 1760*b7eaed25SJason Evans extent_rtree_leaf_elms_lookup(tsdn, rtree_ctx, &lead, false, 1761*b7eaed25SJason Evans true, &lead_elm_a, &lead_elm_b); 1762*b7eaed25SJason Evans } 1763*b7eaed25SJason Evans rtree_leaf_elm_t *trail_elm_a, *trail_elm_b; 1764*b7eaed25SJason Evans extent_rtree_leaf_elms_lookup(tsdn, rtree_ctx, trail, false, true, 1765*b7eaed25SJason Evans &trail_elm_a, &trail_elm_b); 1766*b7eaed25SJason Evans 1767*b7eaed25SJason Evans if (lead_elm_a == NULL || lead_elm_b == NULL || trail_elm_a == NULL 1768*b7eaed25SJason Evans || trail_elm_b == NULL) { 1769*b7eaed25SJason Evans goto label_error_b; 1770*b7eaed25SJason Evans } 1771*b7eaed25SJason Evans 1772*b7eaed25SJason Evans extent_lock2(tsdn, extent, trail); 1773*b7eaed25SJason Evans 1774*b7eaed25SJason Evans if ((*r_extent_hooks)->split(*r_extent_hooks, extent_base_get(extent), 1775*b7eaed25SJason Evans size_a + size_b, size_a, size_b, extent_committed_get(extent), 1776*b7eaed25SJason Evans arena_ind_get(arena))) { 1777*b7eaed25SJason Evans goto label_error_c; 1778*b7eaed25SJason Evans } 1779*b7eaed25SJason Evans 1780*b7eaed25SJason Evans extent_size_set(extent, size_a); 1781*b7eaed25SJason Evans extent_szind_set(extent, szind_a); 1782*b7eaed25SJason Evans 1783*b7eaed25SJason Evans extent_rtree_write_acquired(tsdn, lead_elm_a, lead_elm_b, extent, 1784*b7eaed25SJason Evans szind_a, slab_a); 1785*b7eaed25SJason Evans extent_rtree_write_acquired(tsdn, trail_elm_a, trail_elm_b, trail, 1786*b7eaed25SJason Evans szind_b, slab_b); 1787*b7eaed25SJason Evans 1788*b7eaed25SJason Evans extent_unlock2(tsdn, extent, trail); 1789*b7eaed25SJason Evans 1790*b7eaed25SJason Evans return trail; 1791*b7eaed25SJason Evans label_error_c: 1792*b7eaed25SJason Evans extent_unlock2(tsdn, extent, trail); 1793*b7eaed25SJason Evans label_error_b: 1794*b7eaed25SJason Evans extent_dalloc(tsdn, arena, trail); 1795*b7eaed25SJason Evans label_error_a: 1796*b7eaed25SJason Evans return NULL; 1797*b7eaed25SJason Evans } 1798*b7eaed25SJason Evans 1799*b7eaed25SJason Evans extent_t * 1800*b7eaed25SJason Evans extent_split_wrapper(tsdn_t *tsdn, arena_t *arena, 1801*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, extent_t *extent, size_t size_a, 1802*b7eaed25SJason Evans szind_t szind_a, bool slab_a, size_t size_b, szind_t szind_b, bool slab_b) { 1803*b7eaed25SJason Evans return extent_split_impl(tsdn, arena, r_extent_hooks, extent, size_a, 1804*b7eaed25SJason Evans szind_a, slab_a, size_b, szind_b, slab_b, false); 1805*b7eaed25SJason Evans } 1806*b7eaed25SJason Evans 1807*b7eaed25SJason Evans static bool 1808*b7eaed25SJason Evans extent_merge_default_impl(void *addr_a, void *addr_b) { 1809*b7eaed25SJason Evans if (!maps_coalesce) { 1810*b7eaed25SJason Evans return true; 1811*b7eaed25SJason Evans } 1812*b7eaed25SJason Evans if (have_dss && !extent_dss_mergeable(addr_a, addr_b)) { 1813*b7eaed25SJason Evans return true; 1814*b7eaed25SJason Evans } 1815*b7eaed25SJason Evans 1816*b7eaed25SJason Evans return false; 1817*b7eaed25SJason Evans } 1818*b7eaed25SJason Evans 1819*b7eaed25SJason Evans #ifdef JEMALLOC_MAPS_COALESCE 1820*b7eaed25SJason Evans static bool 1821*b7eaed25SJason Evans extent_merge_default(extent_hooks_t *extent_hooks, void *addr_a, size_t size_a, 1822*b7eaed25SJason Evans void *addr_b, size_t size_b, bool committed, unsigned arena_ind) { 1823*b7eaed25SJason Evans return extent_merge_default_impl(addr_a, addr_b); 1824*b7eaed25SJason Evans } 1825*b7eaed25SJason Evans #endif 1826*b7eaed25SJason Evans 1827*b7eaed25SJason Evans static bool 1828*b7eaed25SJason Evans extent_merge_impl(tsdn_t *tsdn, arena_t *arena, 1829*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, extent_t *a, extent_t *b, 1830*b7eaed25SJason Evans bool growing_retained) { 1831*b7eaed25SJason Evans witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn), 1832*b7eaed25SJason Evans WITNESS_RANK_CORE, growing_retained ? 1 : 0); 1833*b7eaed25SJason Evans 1834*b7eaed25SJason Evans extent_hooks_assure_initialized(arena, r_extent_hooks); 1835*b7eaed25SJason Evans 1836*b7eaed25SJason Evans if ((*r_extent_hooks)->merge == NULL) { 1837*b7eaed25SJason Evans return true; 1838*b7eaed25SJason Evans } 1839*b7eaed25SJason Evans 1840*b7eaed25SJason Evans bool err; 1841*b7eaed25SJason Evans if (*r_extent_hooks == &extent_hooks_default) { 1842*b7eaed25SJason Evans /* Call directly to propagate tsdn. */ 1843*b7eaed25SJason Evans err = extent_merge_default_impl(extent_base_get(a), 1844*b7eaed25SJason Evans extent_base_get(b)); 1845*b7eaed25SJason Evans } else { 1846*b7eaed25SJason Evans err = (*r_extent_hooks)->merge(*r_extent_hooks, 1847*b7eaed25SJason Evans extent_base_get(a), extent_size_get(a), extent_base_get(b), 1848*b7eaed25SJason Evans extent_size_get(b), extent_committed_get(a), 1849*b7eaed25SJason Evans arena_ind_get(arena)); 1850*b7eaed25SJason Evans } 1851*b7eaed25SJason Evans 1852*b7eaed25SJason Evans if (err) { 1853*b7eaed25SJason Evans return true; 1854*b7eaed25SJason Evans } 1855*b7eaed25SJason Evans 1856*b7eaed25SJason Evans /* 1857*b7eaed25SJason Evans * The rtree writes must happen while all the relevant elements are 1858*b7eaed25SJason Evans * owned, so the following code uses decomposed helper functions rather 1859*b7eaed25SJason Evans * than extent_{,de}register() to do things in the right order. 1860*b7eaed25SJason Evans */ 1861*b7eaed25SJason Evans rtree_ctx_t rtree_ctx_fallback; 1862*b7eaed25SJason Evans rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback); 1863*b7eaed25SJason Evans rtree_leaf_elm_t *a_elm_a, *a_elm_b, *b_elm_a, *b_elm_b; 1864*b7eaed25SJason Evans extent_rtree_leaf_elms_lookup(tsdn, rtree_ctx, a, true, false, &a_elm_a, 1865*b7eaed25SJason Evans &a_elm_b); 1866*b7eaed25SJason Evans extent_rtree_leaf_elms_lookup(tsdn, rtree_ctx, b, true, false, &b_elm_a, 1867*b7eaed25SJason Evans &b_elm_b); 1868*b7eaed25SJason Evans 1869*b7eaed25SJason Evans extent_lock2(tsdn, a, b); 1870*b7eaed25SJason Evans 1871*b7eaed25SJason Evans if (a_elm_b != NULL) { 1872*b7eaed25SJason Evans rtree_leaf_elm_write(tsdn, &extents_rtree, a_elm_b, NULL, 1873*b7eaed25SJason Evans NSIZES, false); 1874*b7eaed25SJason Evans } 1875*b7eaed25SJason Evans if (b_elm_b != NULL) { 1876*b7eaed25SJason Evans rtree_leaf_elm_write(tsdn, &extents_rtree, b_elm_a, NULL, 1877*b7eaed25SJason Evans NSIZES, false); 1878*b7eaed25SJason Evans } else { 1879*b7eaed25SJason Evans b_elm_b = b_elm_a; 1880*b7eaed25SJason Evans } 1881*b7eaed25SJason Evans 1882*b7eaed25SJason Evans extent_size_set(a, extent_size_get(a) + extent_size_get(b)); 1883*b7eaed25SJason Evans extent_szind_set(a, NSIZES); 1884*b7eaed25SJason Evans extent_sn_set(a, (extent_sn_get(a) < extent_sn_get(b)) ? 1885*b7eaed25SJason Evans extent_sn_get(a) : extent_sn_get(b)); 1886*b7eaed25SJason Evans extent_zeroed_set(a, extent_zeroed_get(a) && extent_zeroed_get(b)); 1887*b7eaed25SJason Evans 1888*b7eaed25SJason Evans extent_rtree_write_acquired(tsdn, a_elm_a, b_elm_b, a, NSIZES, false); 1889*b7eaed25SJason Evans 1890*b7eaed25SJason Evans extent_unlock2(tsdn, a, b); 1891*b7eaed25SJason Evans 1892*b7eaed25SJason Evans extent_dalloc(tsdn, extent_arena_get(b), b); 1893*b7eaed25SJason Evans 1894*b7eaed25SJason Evans return false; 1895*b7eaed25SJason Evans } 1896*b7eaed25SJason Evans 1897*b7eaed25SJason Evans bool 1898*b7eaed25SJason Evans extent_merge_wrapper(tsdn_t *tsdn, arena_t *arena, 1899*b7eaed25SJason Evans extent_hooks_t **r_extent_hooks, extent_t *a, extent_t *b) { 1900*b7eaed25SJason Evans return extent_merge_impl(tsdn, arena, r_extent_hooks, a, b, false); 1901*b7eaed25SJason Evans } 1902*b7eaed25SJason Evans 1903*b7eaed25SJason Evans bool 1904*b7eaed25SJason Evans extent_boot(void) { 1905*b7eaed25SJason Evans if (rtree_new(&extents_rtree, true)) { 1906*b7eaed25SJason Evans return true; 1907*b7eaed25SJason Evans } 1908*b7eaed25SJason Evans 1909*b7eaed25SJason Evans if (mutex_pool_init(&extent_mutex_pool, "extent_mutex_pool", 1910*b7eaed25SJason Evans WITNESS_RANK_EXTENT_POOL)) { 1911*b7eaed25SJason Evans return true; 1912*b7eaed25SJason Evans } 1913*b7eaed25SJason Evans 1914*b7eaed25SJason Evans if (have_dss) { 1915*b7eaed25SJason Evans extent_dss_boot(); 1916*b7eaed25SJason Evans } 1917*b7eaed25SJason Evans 1918*b7eaed25SJason Evans return false; 1919*b7eaed25SJason Evans } 1920