/freebsd/contrib/jemalloc/include/jemalloc/internal/ |
H A D | arena_inlines_b.h | 38 arena_prof_tctx_get(tsdn_t *tsdn, const void *ptr, alloc_ctx_t *alloc_ctx) { in arena_prof_tctx_get() argument 44 const extent_t *extent = iealloc(tsdn, ptr); in arena_prof_tctx_get() 46 return large_prof_tctx_get(tsdn, extent); in arena_prof_tctx_get() 50 return large_prof_tctx_get(tsdn, iealloc(tsdn, ptr)); in arena_prof_tctx_get() 57 arena_prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize, in arena_prof_tctx_set() argument 64 extent_t *extent = iealloc(tsdn, ptr); in arena_prof_tctx_set() 66 large_prof_tctx_set(tsdn, extent, tctx); in arena_prof_tctx_set() 70 large_prof_tctx_set(tsdn, iealloc(tsdn, ptr), tctx); in arena_prof_tctx_set() 76 arena_prof_tctx_reset(tsdn_t *tsdn, const void *ptr, prof_tctx_t *tctx) { in arena_prof_tctx_reset() argument 80 extent_t *extent = iealloc(tsdn, ptr); in arena_prof_tctx_reset() [all …]
|
H A D | jemalloc_internal_inlines_c.h | 28 iaalloc(tsdn_t *tsdn, const void *ptr) { in iaalloc() argument 31 return arena_aalloc(tsdn, ptr); in iaalloc() 35 isalloc(tsdn_t *tsdn, const void *ptr) { in isalloc() argument 38 return arena_salloc(tsdn, ptr); in isalloc() 42 iallocztm(tsdn_t *tsdn, size_t size, szind_t ind, bool zero, tcache_t *tcache, in iallocztm() argument 48 if (!tsdn_null(tsdn) && tsd_reentrancy_level_get(tsdn_tsd(tsdn)) == 0) { in iallocztm() 49 witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn), in iallocztm() 53 ret = arena_malloc(tsdn, arena, size, ind, zero, tcache, slow_path); in iallocztm() 55 arena_internal_add(iaalloc(tsdn, ret), isalloc(tsdn, ret)); in iallocztm() 67 ipallocztm(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero, in ipallocztm() argument [all …]
|
H A D | mutex_pool.h | 27 mutex_pool_assert_not_held(tsdn_t *tsdn, mutex_pool_t *pool) { in mutex_pool_assert_not_held() argument 29 malloc_mutex_assert_not_owner(tsdn, &pool->mutexes[i]); in mutex_pool_assert_not_held() 41 mutex_pool_lock(tsdn_t *tsdn, mutex_pool_t *pool, uintptr_t key) { in mutex_pool_lock() argument 42 mutex_pool_assert_not_held(tsdn, pool); in mutex_pool_lock() 45 malloc_mutex_lock(tsdn, mutex); in mutex_pool_lock() 49 mutex_pool_unlock(tsdn_t *tsdn, mutex_pool_t *pool, uintptr_t key) { in mutex_pool_unlock() argument 51 malloc_mutex_unlock(tsdn, mutex); in mutex_pool_unlock() 53 mutex_pool_assert_not_held(tsdn, pool); in mutex_pool_unlock() 57 mutex_pool_lock2(tsdn_t *tsdn, mutex_pool_t *pool, uintptr_t key1, in mutex_pool_lock2() argument 59 mutex_pool_assert_not_held(tsdn, pool); in mutex_pool_lock2() [all …]
|
H A D | arena_externs.h | 22 void arena_basic_stats_merge(tsdn_t *tsdn, arena_t *arena, 25 void arena_stats_merge(tsdn_t *tsdn, arena_t *arena, unsigned *nthreads, 30 void arena_extents_dirty_dalloc(tsdn_t *tsdn, arena_t *arena, 35 extent_t *arena_extent_alloc_large(tsdn_t *tsdn, arena_t *arena, 37 void arena_extent_dalloc_large_prep(tsdn_t *tsdn, arena_t *arena, 39 void arena_extent_ralloc_large_shrink(tsdn_t *tsdn, arena_t *arena, 41 void arena_extent_ralloc_large_expand(tsdn_t *tsdn, arena_t *arena, 44 bool arena_dirty_decay_ms_set(tsdn_t *tsdn, arena_t *arena, ssize_t decay_ms); 46 bool arena_muzzy_decay_ms_set(tsdn_t *tsdn, arena_t *arena, ssize_t decay_ms); 47 void arena_decay(tsdn_t *tsdn, arena_t *arena, bool is_background_thread, [all …]
|
H A D | rtree.h | 126 void rtree_delete(tsdn_t *tsdn, rtree_t *rtree); 128 rtree_leaf_elm_t *rtree_leaf_elm_lookup_hard(tsdn_t *tsdn, rtree_t *rtree, 173 rtree_leaf_elm_bits_read(tsdn_t *tsdn, rtree_t *rtree, in rtree_leaf_elm_bits_read() argument 211 rtree_leaf_elm_extent_read(tsdn_t *tsdn, rtree_t *rtree, in rtree_leaf_elm_extent_read() argument 214 uintptr_t bits = rtree_leaf_elm_bits_read(tsdn, rtree, elm, dependent); in rtree_leaf_elm_extent_read() 224 rtree_leaf_elm_szind_read(tsdn_t *tsdn, rtree_t *rtree, in rtree_leaf_elm_szind_read() argument 227 uintptr_t bits = rtree_leaf_elm_bits_read(tsdn, rtree, elm, dependent); in rtree_leaf_elm_szind_read() 236 rtree_leaf_elm_slab_read(tsdn_t *tsdn, rtree_t *rtree, in rtree_leaf_elm_slab_read() argument 239 uintptr_t bits = rtree_leaf_elm_bits_read(tsdn, rtree, elm, dependent); in rtree_leaf_elm_slab_read() 248 rtree_leaf_elm_extent_write(tsdn_t *tsdn, rtree_t *rtree, in rtree_leaf_elm_extent_write() argument [all …]
|
H A D | arena_stats.h | 131 arena_stats_init(tsdn_t *tsdn, arena_stats_t *arena_stats) { in arena_stats_init() argument 148 arena_stats_lock(tsdn_t *tsdn, arena_stats_t *arena_stats) { in arena_stats_lock() argument 150 malloc_mutex_lock(tsdn, &arena_stats->mtx); in arena_stats_lock() 155 arena_stats_unlock(tsdn_t *tsdn, arena_stats_t *arena_stats) { in arena_stats_unlock() argument 157 malloc_mutex_unlock(tsdn, &arena_stats->mtx); in arena_stats_unlock() 162 arena_stats_read_u64(tsdn_t *tsdn, arena_stats_t *arena_stats, in arena_stats_read_u64() argument 167 malloc_mutex_assert_owner(tsdn, &arena_stats->mtx); in arena_stats_read_u64() 173 arena_stats_add_u64(tsdn_t *tsdn, arena_stats_t *arena_stats, in arena_stats_add_u64() argument 178 malloc_mutex_assert_owner(tsdn, &arena_stats->mtx); in arena_stats_add_u64() 184 arena_stats_sub_u64(tsdn_t *tsdn, arena_stats_t *arena_stats, in arena_stats_sub_u64() argument [all …]
|
H A D | extent_externs.h | 15 extent_t *extent_alloc(tsdn_t *tsdn, arena_t *arena); 16 void extent_dalloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent); 30 bool extents_init(tsdn_t *tsdn, extents_t *extents, extent_state_t state, 38 extent_t *extents_alloc(tsdn_t *tsdn, arena_t *arena, 42 void extents_dalloc(tsdn_t *tsdn, arena_t *arena, 44 extent_t *extents_evict(tsdn_t *tsdn, arena_t *arena, 46 void extents_prefork(tsdn_t *tsdn, extents_t *extents); 47 void extents_postfork_parent(tsdn_t *tsdn, extents_t *extents); 48 void extents_postfork_child(tsdn_t *tsdn, extents_t *extents); 49 extent_t *extent_alloc_wrapper(tsdn_t *tsdn, arena_t *arena, [all …]
|
H A D | prof_externs.h | 47 void prof_malloc_sample_object(tsdn_t *tsdn, const void *ptr, size_t usize, 67 bool prof_accum_init(tsdn_t *tsdn, prof_accum_t *prof_accum); 68 void prof_idump(tsdn_t *tsdn); 70 void prof_gdump(tsdn_t *tsdn); 75 bool prof_active_get(tsdn_t *tsdn); 76 bool prof_active_set(tsdn_t *tsdn, bool active); 81 bool prof_thread_active_init_get(tsdn_t *tsdn); 82 bool prof_thread_active_init_set(tsdn_t *tsdn, bool active_init); 83 bool prof_gdump_get(tsdn_t *tsdn); 84 bool prof_gdump_set(tsdn_t *tsdn, bool active); [all …]
|
H A D | mutex.h | 142 void malloc_mutex_prefork(tsdn_t *tsdn, malloc_mutex_t *mutex); 143 void malloc_mutex_postfork_parent(tsdn_t *tsdn, malloc_mutex_t *mutex); 144 void malloc_mutex_postfork_child(tsdn_t *tsdn, malloc_mutex_t *mutex); 147 void malloc_mutex_prof_data_reset(tsdn_t *tsdn, malloc_mutex_t *mutex); 163 mutex_owner_stats_update(tsdn_t *tsdn, malloc_mutex_t *mutex) { in mutex_owner_stats_update() argument 167 if (data->prev_owner != tsdn) { in mutex_owner_stats_update() 168 data->prev_owner = tsdn; in mutex_owner_stats_update() 176 malloc_mutex_trylock(tsdn_t *tsdn, malloc_mutex_t *mutex) { in malloc_mutex_trylock() argument 177 witness_assert_not_owner(tsdn_witness_tsdp_get(tsdn), &mutex->witness); in malloc_mutex_trylock() 183 mutex_owner_stats_update(tsdn, mutex); in malloc_mutex_trylock() [all …]
|
H A D | large_externs.h | 6 void *large_malloc(tsdn_t *tsdn, arena_t *arena, size_t usize, bool zero); 7 void *large_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment, 9 bool large_ralloc_no_move(tsdn_t *tsdn, extent_t *extent, size_t usize_min, 11 void *large_ralloc(tsdn_t *tsdn, arena_t *arena, void *ptr, size_t usize, 21 void large_dalloc_prep_junked_locked(tsdn_t *tsdn, extent_t *extent); 22 void large_dalloc_finish(tsdn_t *tsdn, extent_t *extent); 23 void large_dalloc(tsdn_t *tsdn, extent_t *extent); 24 size_t large_salloc(tsdn_t *tsdn, const extent_t *extent); 25 prof_tctx_t *large_prof_tctx_get(tsdn_t *tsdn, const extent_t *extent); 26 void large_prof_tctx_set(tsdn_t *tsdn, extent_t *extent, prof_tctx_t *tctx); [all …]
|
H A D | base_externs.h | 8 base_t *base_new(tsdn_t *tsdn, unsigned ind, extent_hooks_t *extent_hooks); 9 void base_delete(tsdn_t *tsdn, base_t *base); 13 void *base_alloc(tsdn_t *tsdn, base_t *base, size_t size, size_t alignment); 14 extent_t *base_alloc_extent(tsdn_t *tsdn, base_t *base); 15 void base_stats_get(tsdn_t *tsdn, base_t *base, size_t *allocated, 17 void base_prefork(tsdn_t *tsdn, base_t *base); 18 void base_postfork_parent(tsdn_t *tsdn, base_t *base); 19 void base_postfork_child(tsdn_t *tsdn, base_t *base); 20 bool base_boot(tsdn_t *tsdn);
|
H A D | background_thread_externs.h | 15 void background_thread_interval_check(tsdn_t *tsdn, arena_t *arena, 17 void background_thread_prefork0(tsdn_t *tsdn); 18 void background_thread_prefork1(tsdn_t *tsdn); 19 void background_thread_postfork_parent(tsdn_t *tsdn); 20 void background_thread_postfork_child(tsdn_t *tsdn); 21 bool background_thread_stats_read(tsdn_t *tsdn, 23 void background_thread_ctl_init(tsdn_t *tsdn); 30 bool background_thread_boot1(tsdn_t *tsdn);
|
H A D | tcache_externs.h | 28 size_t tcache_salloc(tsdn_t *tsdn, const void *ptr); 30 void *tcache_alloc_small_hard(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache, 36 void tcache_arena_reassociate(tsdn_t *tsdn, tcache_t *tcache, 40 void tcache_stats_merge(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena); 44 bool tcache_boot(tsdn_t *tsdn); 45 void tcache_arena_associate(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena); 46 void tcache_prefork(tsdn_t *tsdn); 47 void tcache_postfork_parent(tsdn_t *tsdn); 48 void tcache_postfork_child(tsdn_t *tsdn);
|
H A D | prof_inlines_b.h | 41 prof_tctx_get(tsdn_t *tsdn, const void *ptr, alloc_ctx_t *alloc_ctx) { in prof_tctx_get() argument 45 return arena_prof_tctx_get(tsdn, ptr, alloc_ctx); in prof_tctx_get() 49 prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize, in prof_tctx_set() argument 54 arena_prof_tctx_set(tsdn, ptr, usize, alloc_ctx, tctx); in prof_tctx_set() 58 prof_tctx_reset(tsdn_t *tsdn, const void *ptr, prof_tctx_t *tctx) { in prof_tctx_reset() argument 62 arena_prof_tctx_reset(tsdn, ptr, tctx); in prof_tctx_reset() 66 prof_alloc_time_get(tsdn_t *tsdn, const void *ptr, alloc_ctx_t *alloc_ctx) { in prof_alloc_time_get() argument 70 return arena_prof_alloc_time_get(tsdn, ptr, alloc_ctx); in prof_alloc_time_get() 74 prof_alloc_time_set(tsdn_t *tsdn, const void *ptr, alloc_ctx_t *alloc_ctx, in prof_alloc_time_set() argument 79 arena_prof_alloc_time_set(tsdn, ptr, alloc_ctx, t); in prof_alloc_time_set() [all …]
|
H A D | tsd.h | 121 void tsd_global_slow_inc(tsdn_t *tsdn); 122 void tsd_global_slow_dec(tsdn_t *tsdn); 230 tsdn_null(const tsdn_t *tsdn) { in tsdn_null() argument 231 return tsdn == NULL; in tsdn_null() 235 tsdn_tsd(tsdn_t *tsdn) { in tsdn_tsd() argument 236 assert(!tsdn_null(tsdn)); in tsdn_tsd() 238 return &tsdn->tsd; in tsdn_tsd() 294 tsdn_##n##p_get(tsdn_t *tsdn) { \ 295 if (tsdn_null(tsdn)) { \ 298 tsd_t *tsd = tsdn_tsd(tsdn); \ [all …]
|
H A D | bin.h | 101 void bin_prefork(tsdn_t *tsdn, bin_t *bin); 102 void bin_postfork_parent(tsdn_t *tsdn, bin_t *bin); 103 void bin_postfork_child(tsdn_t *tsdn, bin_t *bin); 107 bin_stats_merge(tsdn_t *tsdn, bin_stats_t *dst_bin_stats, bin_t *bin) { in bin_stats_merge() argument 108 malloc_mutex_lock(tsdn, &bin->lock); in bin_stats_merge() 109 malloc_mutex_prof_accum(tsdn, &dst_bin_stats->mutex_data, &bin->lock); in bin_stats_merge() 120 malloc_mutex_unlock(tsdn, &bin->lock); in bin_stats_merge()
|
/freebsd/contrib/jemalloc/src/ |
H A D | extent.c | 34 static bool extent_commit_impl(tsdn_t *tsdn, arena_t *arena, 43 static bool extent_purge_lazy_impl(tsdn_t *tsdn, arena_t *arena, 50 static bool extent_purge_forced_impl(tsdn_t *tsdn, arena_t *arena, 56 static extent_t *extent_split_impl(tsdn_t *tsdn, arena_t *arena, 63 static bool extent_merge_impl(tsdn_t *tsdn, arena_t *arena, 102 static void extent_deregister(tsdn_t *tsdn, extent_t *extent); 103 static extent_t *extent_recycle(tsdn_t *tsdn, arena_t *arena, 107 static extent_t *extent_try_coalesce(tsdn_t *tsdn, arena_t *arena, 110 static void extent_record(tsdn_t *tsdn, arena_t *arena, 130 extent_rtree_leaf_elm_try_lock(tsdn_t *tsdn, rtree_leaf_elm_t *elm, in extent_rtree_leaf_elm_try_lock() argument [all …]
|
H A D | large.c | 14 large_malloc(tsdn_t *tsdn, arena_t *arena, size_t usize, bool zero) { in large_malloc() argument 17 return large_palloc(tsdn, arena, usize, CACHELINE, zero); in large_malloc() 21 large_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment, in large_palloc() argument 28 assert(!tsdn_null(tsdn) || arena != NULL); in large_palloc() 44 if (likely(!tsdn_null(tsdn))) { in large_palloc() 45 arena = arena_choose_maybe_huge(tsdn_tsd(tsdn), arena, usize); in large_palloc() 47 if (unlikely(arena == NULL) || (extent = arena_extent_alloc_large(tsdn, in large_palloc() 55 malloc_mutex_lock(tsdn, &arena->large_mtx); in large_palloc() 57 malloc_mutex_unlock(tsdn, &arena->large_mtx); in large_palloc() 59 if (config_prof && arena_prof_accum(tsdn, arena, usize)) { in large_palloc() [all …]
|
H A D | arena.c | 57 static void arena_decay_to_limit(tsdn_t *tsdn, arena_t *arena, 60 static bool arena_decay_dirty(tsdn_t *tsdn, arena_t *arena, 62 static void arena_dalloc_bin_slab(tsdn_t *tsdn, arena_t *arena, extent_t *slab, 64 static void arena_bin_lower_slab(tsdn_t *tsdn, arena_t *arena, extent_t *slab, 70 arena_basic_stats_merge(tsdn_t *tsdn, arena_t *arena, unsigned *nthreads, in arena_basic_stats_merge() argument 83 arena_stats_merge(tsdn_t *tsdn, arena_t *arena, unsigned *nthreads, in arena_stats_merge() argument 90 arena_basic_stats_merge(tsdn, arena, nthreads, dss, dirty_decay_ms, in arena_stats_merge() 94 base_stats_get(tsdn, arena->base, &base_allocated, &base_resident, in arena_stats_merge() 97 arena_stats_lock(tsdn, &arena->stats); in arena_stats_merge() 100 + arena_stats_read_zu(tsdn, &arena->stats, &arena->stats.mapped)); in arena_stats_merge() [all …]
|
H A D | background_thread.c | 57 void background_thread_interval_check(tsdn_t *tsdn, arena_t *arena, in background_thread_create() 59 void background_thread_prefork0(tsdn_t *tsdn) NOT_REACHED in background_thread_create() 60 void background_thread_prefork1(tsdn_t *tsdn) NOT_REACHED in background_thread_create() 61 void background_thread_postfork_parent(tsdn_t *tsdn) NOT_REACHED in background_thread_create() 62 void background_thread_postfork_child(tsdn_t *tsdn) NOT_REACHED in background_thread_create() 63 bool background_thread_stats_read(tsdn_t *tsdn, in background_thread_create() 65 void background_thread_ctl_init(tsdn_t *tsdn) NOT_REACHED in background_thread_create() 72 background_thread_info_init(tsdn_t *tsdn, background_thread_info_t *info) { 73 background_thread_wakeup_time_set(tsdn, info, 0); 116 arena_decay_compute_purge_interval_impl(tsdn_t *tsdn, arena_decay_t *decay, [all …]
|
H A D | rtree.c | 31 rtree_node_alloc_impl(tsdn_t *tsdn, rtree_t *rtree, size_t nelms) { in rtree_node_alloc_impl() argument 32 return (rtree_node_elm_t *)base_alloc(tsdn, b0get(), nelms * in rtree_node_alloc_impl() 38 rtree_node_dalloc_impl(tsdn_t *tsdn, rtree_t *rtree, rtree_node_elm_t *node) { in rtree_node_dalloc_impl() argument 46 rtree_leaf_alloc_impl(tsdn_t *tsdn, rtree_t *rtree, size_t nelms) { in rtree_leaf_alloc_impl() argument 47 return (rtree_leaf_elm_t *)base_alloc(tsdn, b0get(), nelms * in rtree_leaf_alloc_impl() 53 rtree_leaf_dalloc_impl(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *leaf) { in rtree_leaf_dalloc_impl() argument 63 rtree_delete_subtree(tsdn_t *tsdn, rtree_t *rtree, rtree_node_elm_t *subtree, in rtree_delete_subtree() argument 72 rtree_delete_subtree(tsdn, rtree, node, level + in rtree_delete_subtree() 82 rtree_leaf_dalloc(tsdn, rtree, leaf); in rtree_delete_subtree() 88 rtree_node_dalloc(tsdn, rtree, subtree); in rtree_delete_subtree() [all …]
|
H A D | base.c | 32 base_map(tsdn_t *tsdn, extent_hooks_t *extent_hooks, unsigned ind, size_t size) { in base_map() argument 44 tsd_t *tsd = tsdn_null(tsdn) ? tsd_fetch() : tsdn_tsd(tsdn); in base_map() 55 base_unmap(tsdn_t *tsdn, extent_hooks_t *extent_hooks, unsigned ind, void *addr, in base_unmap() argument 83 tsd_t *tsd = tsdn_null(tsdn) ? tsd_fetch() : tsdn_tsd(tsdn); in base_unmap() 144 base_auto_thp_switch(tsdn_t *tsdn, base_t *base) { in base_auto_thp_switch() argument 146 malloc_mutex_assert_owner(tsdn, &base->mtx); in base_auto_thp_switch() 248 base_block_alloc(tsdn_t *tsdn, base_t *base, extent_hooks_t *extent_hooks, in base_block_alloc() argument 270 base_block_t *block = (base_block_t *)base_map(tsdn, extent_hooks, ind, in base_block_alloc() 285 malloc_mutex_lock(tsdn, &base->mtx); in base_block_alloc() 286 base_auto_thp_switch(tsdn, base); in base_block_alloc() [all …]
|
H A D | prof.c | 236 static bool prof_tctx_should_destroy(tsdn_t *tsdn, prof_tctx_t *tctx); 238 static bool prof_tdata_should_destroy(tsdn_t *tsdn, prof_tdata_t *tdata, 242 static char *prof_thread_name_alloc(tsdn_t *tsdn, const char *thread_name); 343 prof_malloc_sample_object(tsdn_t *tsdn, const void *ptr, size_t usize, in prof_malloc_sample_object() argument 345 prof_tctx_set(tsdn, ptr, usize, NULL, tctx); in prof_malloc_sample_object() 351 prof_alloc_time_set(tsdn, ptr, NULL, t); in prof_malloc_sample_object() 353 malloc_mutex_lock(tsdn, tctx->tdata->lock); in prof_malloc_sample_object() 361 malloc_mutex_unlock(tsdn, tctx->tdata->lock); in prof_malloc_sample_object() 821 prof_gctx_create(tsdn_t *tsdn, prof_bt_t *bt) { in prof_gctx_create() argument 826 prof_gctx_t *gctx = (prof_gctx_t *)iallocztm(tsdn, size, in prof_gctx_create() [all …]
|
H A D | hook.c | 48 hook_install(tsdn_t *tsdn, hooks_t *to_install) { in hook_install() argument 49 malloc_mutex_lock(tsdn, &hooks_mu); in hook_install() 52 tsd_global_slow_inc(tsdn); in hook_install() 54 malloc_mutex_unlock(tsdn, &hooks_mu); in hook_install() 73 hook_remove(tsdn_t *tsdn, void *opaque) { in hook_remove() argument 81 malloc_mutex_lock(tsdn, &hooks_mu); in hook_remove() 83 tsd_global_slow_dec(tsdn); in hook_remove() 84 malloc_mutex_unlock(tsdn, &hooks_mu); in hook_remove() 132 tsdn_t *tsdn = tsdn_fetch(); in hook_reentrantp() local 133 tcache_t *tcache = tsdn_tcachep_get(tsdn); in hook_reentrantp()
|
H A D | tcache.c | 36 tcache_salloc(tsdn_t *tsdn, const void *ptr) { in tcache_salloc() argument 37 return arena_salloc(tsdn, ptr); in tcache_salloc() 89 tcache_alloc_small_hard(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache, in tcache_alloc_small_hard() argument 94 arena_tcache_fill_small(tsdn, arena, tcache, tbin, binind, in tcache_alloc_small_hard() 106 tbin_extents_lookup_size_check(tsdn_t *tsdn, cache_bin_t *tbin, szind_t binind, in tbin_extents_lookup_size_check() argument 109 rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback); in tbin_extents_lookup_size_check() 120 rtree_extent_szind_read(tsdn, &extents_rtree, in tbin_extents_lookup_size_check() 341 tcache_arena_associate(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena) { in tcache_arena_associate() argument 347 malloc_mutex_lock(tsdn, &arena->tcache_ql_mtx); in tcache_arena_associate() 357 malloc_mutex_unlock(tsdn, &arena->tcache_ql_mtx); in tcache_arena_associate() [all …]
|