/freebsd/contrib/jemalloc/include/jemalloc/internal/ |
H A D | arena_inlines_b.h | 191 szind_t szind = rtree_szind_read(tsdn, &extents_rtree, rtree_ctx, in arena_salloc() local 193 assert(szind != SC_NSIZES); in arena_salloc() 195 return sz_index2size(szind); in arena_salloc() 213 szind_t szind; in arena_vsalloc() local 215 (uintptr_t)ptr, false, &extent, &szind)) { in arena_vsalloc() 226 assert(szind != SC_NSIZES); in arena_vsalloc() 228 return sz_index2size(szind); in arena_vsalloc() 232 arena_dalloc_large_no_tcache(tsdn_t *tsdn, void *ptr, szind_t szind) { in arena_dalloc_large_no_tcache() argument 233 if (config_prof && unlikely(szind < SC_NBINS)) { in arena_dalloc_large_no_tcache() 248 szind_t szind; in arena_dalloc_no_tcache() local [all …]
|
H A D | rtree.h | 263 rtree_leaf_elm_t *elm, szind_t szind) { in rtree_leaf_elm_szind_write() argument 264 assert(szind <= SC_NSIZES); in rtree_leaf_elm_szind_write() 269 uintptr_t bits = ((uintptr_t)szind << LG_VADDR) | in rtree_leaf_elm_szind_write() 275 atomic_store_u(&elm->le_szind, szind, ATOMIC_RELEASE); in rtree_leaf_elm_szind_write() 296 rtree_leaf_elm_t *elm, extent_t *extent, szind_t szind, bool slab) { in rtree_leaf_elm_write() argument 298 uintptr_t bits = ((uintptr_t)szind << LG_VADDR) | in rtree_leaf_elm_write() 304 rtree_leaf_elm_szind_write(tsdn, rtree, elm, szind); in rtree_leaf_elm_write() 315 rtree_leaf_elm_t *elm, szind_t szind, bool slab) { in rtree_leaf_elm_szind_slab_update() argument 316 assert(!slab || szind < SC_NBINS); in rtree_leaf_elm_szind_slab_update() 323 rtree_leaf_elm_szind_write(tsdn, rtree, elm, szind); in rtree_leaf_elm_szind_slab_update() [all …]
|
H A D | extent_inlines.h | 56 szind_t szind = (szind_t)((extent->e_bits & EXTENT_BITS_SZIND_MASK) >> in extent_szind_get_maybe_invalid() local 58 assert(szind <= SC_NSIZES); in extent_szind_get_maybe_invalid() 59 return szind; in extent_szind_get_maybe_invalid() 64 szind_t szind = extent_szind_get_maybe_invalid(extent); in extent_szind_get() local 65 assert(szind < SC_NSIZES); /* Never call when "invalid". */ in extent_szind_get() 66 return szind; in extent_szind_get() 259 extent_szind_set(extent_t *extent, szind_t szind) { in extent_szind_set() argument 260 assert(szind <= SC_NSIZES); /* SC_NSIZES means "invalid". */ in extent_szind_set() 262 ((uint64_t)szind << EXTENT_BITS_SZIND_SHIFT); in extent_szind_set() 368 bool slab, szind_t szind, size_t sn, extent_state_t state, bool zeroed, in extent_init() argument [all …]
|
H A D | extent_externs.h | 40 size_t size, size_t pad, size_t alignment, bool slab, szind_t szind, 51 size_t alignment, bool slab, szind_t szind, bool *zero, bool *commit);
|
H A D | arena_stats.h | 256 szind_t szind, uint64_t nrequests) { in arena_stats_large_flush_nrequests_add() argument 258 arena_stats_large_t *lstats = &arena_stats->lstats[szind - SC_NBINS]; in arena_stats_large_flush_nrequests_add()
|
H A D | arena_structs_b.h | 228 szind_t szind; member
|
/freebsd/contrib/jemalloc/src/ |
H A D | extent.c | 105 size_t usize, size_t pad, size_t alignment, bool slab, szind_t szind, 539 size_t alignment, bool slab, szind_t szind, bool *zero, bool *commit) { in extents_alloc() argument 546 new_addr, size, pad, alignment, slab, szind, zero, commit, false); in extents_alloc() 722 rtree_leaf_elm_t *elm_b, extent_t *extent, szind_t szind, bool slab) { in extent_rtree_write_acquired() argument 723 rtree_leaf_elm_write(tsdn, &extents_rtree, elm_a, extent, szind, slab); in extent_rtree_write_acquired() 725 rtree_leaf_elm_write(tsdn, &extents_rtree, elm_b, extent, szind, in extent_rtree_write_acquired() 732 szind_t szind) { in extent_interior_register() argument 739 LG_PAGE), extent, szind, true); in extent_interior_register() 798 szind_t szind = extent_szind_get_maybe_invalid(extent); in extent_register_impl() local 800 extent_rtree_write_acquired(tsdn, elm_a, elm_b, extent, szind, slab); in extent_register_impl() [all …]
|
H A D | jemalloc.c | 2575 (uintptr_t)ptr, true, &alloc_ctx.szind, &alloc_ctx.slab); in ifree() 2576 assert(alloc_ctx.szind != SC_NSIZES); in ifree() 2580 usize = sz_index2size(alloc_ctx.szind); in ifree() 2583 usize = sz_index2size(alloc_ctx.szind); in ifree() 2618 alloc_ctx.szind = sz_size2index(usize); in isfree() 2625 rtree_ctx, (uintptr_t)ptr, true, &dbg_ctx.szind, in isfree() 2627 assert(dbg_ctx.szind == alloc_ctx.szind); in isfree() 2633 (uintptr_t)ptr, true, &alloc_ctx.szind, &alloc_ctx.slab); in isfree() 2634 assert(alloc_ctx.szind == sz_size2index(usize)); in isfree() 2683 (uintptr_t)ptr, true, &alloc_ctx.szind, &alloc_ctx.slab); in je_realloc() [all …]
|
H A D | large.c | 181 szind_t szind = sz_size2index(usize); in large_ralloc_no_move_expand() local 182 extent_szind_set(extent, szind); in large_ralloc_no_move_expand() 184 (uintptr_t)extent_addr_get(extent), szind, false); in large_ralloc_no_move_expand()
|
H A D | arena.c | 435 szind_t szind = sz_size2index(usize); in arena_extent_alloc_large() local 440 szind, zero, &commit); in arena_extent_alloc_large() 444 false, szind, zero, &commit); in arena_extent_alloc_large() 449 usize, sz_large_pad, alignment, false, szind, zero, in arena_extent_alloc_large() 1123 (uintptr_t)ptr, true, &alloc_ctx.szind, &alloc_ctx.slab); in arena_reset() 1124 assert(alloc_ctx.szind != SC_NSIZES); in arena_reset() 1127 usize = sz_index2size(alloc_ctx.szind); in arena_reset() 1208 szind_t szind) { in arena_slab_alloc_hard() argument 1218 bin_info->slab_size, 0, PAGE, true, szind, &zero, &commit); in arena_slab_alloc_hard() 1235 szind_t szind = sz_size2index(bin_info->reg_size); in arena_slab_alloc() local [all …]
|
H A D | tcache.c | 117 szind_t szind; in tbin_extents_lookup_size_check() local 122 &extents[i], &szind); in tbin_extents_lookup_size_check() 123 sz_sum -= szind; in tbin_extents_lookup_size_check()
|