/freebsd/contrib/jemalloc/src/ |
H A D | arena.c | 57 static void arena_decay_to_limit(tsdn_t *tsdn, arena_t *arena, 60 static bool arena_decay_dirty(tsdn_t *tsdn, arena_t *arena, 62 static void arena_dalloc_bin_slab(tsdn_t *tsdn, arena_t *arena, extent_t *slab, 64 static void arena_bin_lower_slab(tsdn_t *tsdn, arena_t *arena, extent_t *slab, 70 arena_basic_stats_merge(tsdn_t *tsdn, arena_t *arena, unsigned *nthreads, in arena_basic_stats_merge() argument 73 *nthreads += arena_nthreads_get(arena, false); in arena_basic_stats_merge() 74 *dss = dss_prec_names[arena_dss_prec_get(arena)]; in arena_basic_stats_merge() 75 *dirty_decay_ms = arena_dirty_decay_ms_get(arena); in arena_basic_stats_merge() 76 *muzzy_decay_ms = arena_muzzy_decay_ms_get(arena); in arena_basic_stats_merge() 77 *nactive += atomic_load_zu(&arena->nactive, ATOMIC_RELAXED); in arena_basic_stats_merge() [all …]
|
H A D | large.c | 14 large_malloc(tsdn_t *tsdn, arena_t *arena, size_t usize, bool zero) { in large_malloc() argument 17 return large_palloc(tsdn, arena, usize, CACHELINE, zero); in large_malloc() 21 large_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment, in large_palloc() argument 28 assert(!tsdn_null(tsdn) || arena != NULL); in large_palloc() 45 arena = arena_choose_maybe_huge(tsdn_tsd(tsdn), arena, usize); in large_palloc() 47 if (unlikely(arena == NULL) || (extent = arena_extent_alloc_large(tsdn, in large_palloc() 48 arena, usize, alignment, &is_zeroed)) == NULL) { in large_palloc() 53 if (!arena_is_auto(arena)) { in large_palloc() 55 malloc_mutex_lock(tsdn, &arena->large_mtx); in large_palloc() 56 extent_list_append(&arena->large, extent); in large_palloc() [all …]
|
H A D | extent.c | 34 static bool extent_commit_impl(tsdn_t *tsdn, arena_t *arena, 43 static bool extent_purge_lazy_impl(tsdn_t *tsdn, arena_t *arena, 50 static bool extent_purge_forced_impl(tsdn_t *tsdn, arena_t *arena, 56 static extent_t *extent_split_impl(tsdn_t *tsdn, arena_t *arena, 63 static bool extent_merge_impl(tsdn_t *tsdn, arena_t *arena, 103 static extent_t *extent_recycle(tsdn_t *tsdn, arena_t *arena, 107 static extent_t *extent_try_coalesce(tsdn_t *tsdn, arena_t *arena, 110 static void extent_record(tsdn_t *tsdn, arena_t *arena, 180 extent_alloc(tsdn_t *tsdn, arena_t *arena) { in extent_alloc() argument 181 malloc_mutex_lock(tsdn, &arena->extent_avail_mtx); in extent_alloc() [all …]
|
H A D | tcache.c | 89 tcache_alloc_small_hard(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache, in tcache_alloc_small_hard() argument 93 assert(tcache->arena != NULL); in tcache_alloc_small_hard() 94 arena_tcache_fill_small(tsdn, arena, tcache, tbin, binind, in tcache_alloc_small_hard() 141 arena_t *arena = tcache->arena; in tcache_bin_flush_small() local 142 assert(arena != NULL); in tcache_bin_flush_small() 166 if (config_prof && bin_arena == arena) { in tcache_bin_flush_small() 167 if (arena_prof_accum(tsd_tsdn(tsd), arena, in tcache_bin_flush_small() 175 if (config_stats && bin_arena == arena && !merged_stats) { in tcache_bin_flush_small() 213 bin_t *bin = arena_bin_choose_lock(tsd_tsdn(tsd), arena, binind, in tcache_bin_flush_small() 237 arena_t *tcache_arena = tcache->arena; in tcache_bin_flush_large() [all …]
|
H A D | extent_dss.c | 110 extent_alloc_dss(tsdn_t *tsdn, arena_t *arena, void *new_addr, size_t size, in extent_alloc_dss() argument 126 gap = extent_alloc(tsdn, arena); in extent_alloc_dss() 156 extent_init(gap, arena, gap_addr_page, in extent_alloc_dss() 158 arena_extent_sn_next(arena), in extent_alloc_dss() 189 extent_dalloc_gap(tsdn, arena, gap); in extent_alloc_dss() 191 extent_dalloc(tsdn, arena, gap); in extent_alloc_dss() 201 extent_init(&extent, arena, ret, size, in extent_alloc_dss() 206 arena, &extent_hooks, &extent, 0, in extent_alloc_dss() 227 extent_dalloc(tsdn, arena, gap); in extent_alloc_dss()
|
H A D | jemalloc.c | 301 arena_set(unsigned ind, arena_t *arena) { in arena_set() argument 302 atomic_store_p(&arenas[ind], arena, ATOMIC_RELEASE); in arena_set() 323 arena_t *arena; in arena_init_locked() local 337 arena = arena_get(tsdn, ind, false); in arena_init_locked() 338 if (arena != NULL) { in arena_init_locked() 339 assert(arena_is_auto(arena)); in arena_init_locked() 340 return arena; in arena_init_locked() 344 arena = arena_new(tsdn, ind, extent_hooks); in arena_init_locked() 346 return arena; in arena_init_locked() 369 arena_t *arena; in arena_init() local [all …]
|
H A D | ctl.c | 631 {NAME("arena"), CHILD(indexed, arena)}, 795 ctl_arena_stats_amerge(tsdn_t *tsdn, ctl_arena_t *ctl_arena, arena_t *arena) { in ctl_arena_stats_amerge() argument 799 arena_stats_merge(tsdn, arena, &ctl_arena->nthreads, in ctl_arena_stats_amerge() 822 arena_basic_stats_merge(tsdn, arena, &ctl_arena->nthreads, in ctl_arena_stats_amerge() 993 ctl_arena_refresh(tsdn_t *tsdn, arena_t *arena, ctl_arena_t *ctl_sdarena, in ctl_arena_refresh() argument 998 ctl_arena_stats_amerge(tsdn, ctl_arena, arena); in ctl_arena_refresh() 2137 arena_t **arena) { in arena_i_reset_destroy_helper() argument 2144 *arena = arena_get(tsd_tsdn(tsd), *arena_ind, false); in arena_i_reset_destroy_helper() 2145 if (*arena == NULL || arena_is_auto(*arena)) { in arena_i_reset_destroy_helper() 2191 arena_t *arena; in arena_i_reset_ctl() local [all …]
|
H A D | background_thread.c | 57 void background_thread_interval_check(tsdn_t *tsdn, arena_t *arena, in background_thread_create() 202 arena_decay_compute_purge_interval(tsdn_t *tsdn, arena_t *arena) { 204 i1 = arena_decay_compute_purge_interval_impl(tsdn, &arena->decay_dirty, 205 &arena->extents_dirty); 209 i2 = arena_decay_compute_purge_interval_impl(tsdn, &arena->decay_muzzy, 210 &arena->extents_muzzy); 291 arena_t *arena = arena_get(tsdn, i, false); 292 if (!arena) { 295 arena_decay(tsdn, arena, true, false); 301 arena); [all …]
|
/freebsd/contrib/jemalloc/include/jemalloc/internal/ |
H A D | arena_externs.h | 22 void arena_basic_stats_merge(tsdn_t *tsdn, arena_t *arena, 25 void arena_stats_merge(tsdn_t *tsdn, arena_t *arena, unsigned *nthreads, 30 void arena_extents_dirty_dalloc(tsdn_t *tsdn, arena_t *arena, 35 extent_t *arena_extent_alloc_large(tsdn_t *tsdn, arena_t *arena, 37 void arena_extent_dalloc_large_prep(tsdn_t *tsdn, arena_t *arena, 39 void arena_extent_ralloc_large_shrink(tsdn_t *tsdn, arena_t *arena, 41 void arena_extent_ralloc_large_expand(tsdn_t *tsdn, arena_t *arena, 43 ssize_t arena_dirty_decay_ms_get(arena_t *arena); 44 bool arena_dirty_decay_ms_set(tsdn_t *tsdn, arena_t *arena, ssize_t decay_ms); 45 ssize_t arena_muzzy_decay_ms_get(arena_t *arena); [all …]
|
H A D | extent_externs.h | 15 extent_t *extent_alloc(tsdn_t *tsdn, arena_t *arena); 16 void extent_dalloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent); 18 extent_hooks_t *extent_hooks_get(arena_t *arena); 19 extent_hooks_t *extent_hooks_set(tsd_t *tsd, arena_t *arena, 38 extent_t *extents_alloc(tsdn_t *tsdn, arena_t *arena, 42 void extents_dalloc(tsdn_t *tsdn, arena_t *arena, 44 extent_t *extents_evict(tsdn_t *tsdn, arena_t *arena, 49 extent_t *extent_alloc_wrapper(tsdn_t *tsdn, arena_t *arena, 52 void extent_dalloc_gap(tsdn_t *tsdn, arena_t *arena, extent_t *extent); 53 void extent_dalloc_wrapper(tsdn_t *tsdn, arena_t *arena, [all …]
|
H A D | jemalloc_internal_inlines_b.h | 8 arena_choose_impl(tsd_t *tsd, arena_t *arena, bool internal) { in arena_choose_impl() argument 11 if (arena != NULL) { in arena_choose_impl() 12 return arena; in arena_choose_impl() 26 if (tcache->arena != NULL) { in arena_choose_impl() 28 assert(tcache->arena == in arena_choose_impl() 30 if (tcache->arena != ret) { in arena_choose_impl() 62 arena_choose(tsd_t *tsd, arena_t *arena) { in arena_choose() argument 63 return arena_choose_impl(tsd, arena, false); in arena_choose() 67 arena_ichoose(tsd_t *tsd, arena_t *arena) { in arena_ichoose() argument 68 return arena_choose_impl(tsd, arena, true); in arena_ichoose() [all …]
|
H A D | arena_inlines_a.h | 5 arena_ind_get(const arena_t *arena) { in arena_ind_get() argument 6 return base_ind_get(arena->base); in arena_ind_get() 10 arena_internal_add(arena_t *arena, size_t size) { in arena_internal_add() argument 11 atomic_fetch_add_zu(&arena->stats.internal, size, ATOMIC_RELAXED); in arena_internal_add() 15 arena_internal_sub(arena_t *arena, size_t size) { in arena_internal_sub() argument 16 atomic_fetch_sub_zu(&arena->stats.internal, size, ATOMIC_RELAXED); in arena_internal_sub() 20 arena_internal_get(arena_t *arena) { in arena_internal_get() argument 21 return atomic_load_zu(&arena->stats.internal, ATOMIC_RELAXED); in arena_internal_get() 25 arena_prof_accum(tsdn_t *tsdn, arena_t *arena, uint64_t accumbytes) { in arena_prof_accum() argument 32 return prof_accum_add(tsdn, &arena->prof_accum, accumbytes); in arena_prof_accum()
|
H A D | arena_inlines_b.h | 12 arena_has_default_hooks(arena_t *arena) { in arena_has_default_hooks() argument 13 return (extent_hooks_get(arena) == &extent_hooks_default); in arena_has_default_hooks() 17 arena_choose_maybe_huge(tsd_t *tsd, arena_t *arena, size_t size) { in arena_choose_maybe_huge() argument 18 if (arena != NULL) { in arena_choose_maybe_huge() 19 return arena; in arena_choose_maybe_huge() 113 arena_decay_ticks(tsdn_t *tsdn, arena_t *arena, unsigned nticks) { in arena_decay_ticks() argument 121 decay_ticker = decay_ticker_get(tsd, arena_ind_get(arena)); in arena_decay_ticks() 126 arena_decay(tsdn, arena, false, false); in arena_decay_ticks() 131 arena_decay_tick(tsdn_t *tsdn, arena_t *arena) { in arena_decay_tick() argument 132 malloc_mutex_assert_not_owner(tsdn, &arena->decay_dirty.mtx); in arena_decay_tick() [all …]
|
H A D | jemalloc_internal_inlines_c.h | 43 bool is_internal, arena_t *arena, bool slow_path) { in iallocztm() argument 47 assert(!is_internal || arena == NULL || arena_is_auto(arena)); in iallocztm() 53 ret = arena_malloc(tsdn, arena, size, ind, zero, tcache, slow_path); in iallocztm() 68 tcache_t *tcache, bool is_internal, arena_t *arena) { in ipallocztm() argument 74 assert(!is_internal || arena == NULL || arena_is_auto(arena)); in ipallocztm() 78 ret = arena_palloc(tsdn, arena, usize, alignment, zero, tcache); in ipallocztm() 88 tcache_t *tcache, arena_t *arena) { in ipalloct() argument 89 return ipallocztm(tsdn, usize, alignment, zero, tcache, false, arena); in ipalloct() 136 size_t alignment, bool zero, tcache_t *tcache, arena_t *arena, in iralloct_realign() argument 147 p = ipalloct(tsdn, usize, alignment, zero, tcache, arena); in iralloct_realign() [all …]
|
H A D | background_thread_inlines.h | 16 arena_background_thread_info_get(arena_t *arena) { in arena_background_thread_info_get() argument 17 unsigned arena_ind = arena_ind_get(arena); in arena_background_thread_info_get() 49 arena_background_thread_inactivity_check(tsdn_t *tsdn, arena_t *arena, in arena_background_thread_inactivity_check() argument 55 arena_background_thread_info_get(arena); in arena_background_thread_inactivity_check() 57 background_thread_interval_check(tsdn, arena, in arena_background_thread_inactivity_check() 58 &arena->decay_dirty, 0); in arena_background_thread_inactivity_check()
|
H A D | tcache_inlines.h | 42 tcache_alloc_small(tsd_t *tsd, arena_t *arena, tcache_t *tcache, in tcache_alloc_small() argument 55 arena = arena_choose(tsd, arena); in tcache_alloc_small() 56 if (unlikely(arena == NULL)) { in tcache_alloc_small() 60 ret = tcache_alloc_small_hard(tsd_tsdn(tsd), arena, tcache, in tcache_alloc_small() 104 tcache_alloc_large(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size, in tcache_alloc_large() argument 119 arena = arena_choose(tsd, arena); in tcache_alloc_large() 120 if (unlikely(arena == NULL)) { in tcache_alloc_large() 124 ret = large_malloc(tsd_tsdn(tsd), arena, sz_s2u(size), zero); in tcache_alloc_large()
|
H A D | tcache_externs.h | 30 void *tcache_alloc_small_hard(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache, 37 arena_t *arena); 40 void tcache_stats_merge(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena); 45 void tcache_arena_associate(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena);
|
H A D | large_externs.h | 6 void *large_malloc(tsdn_t *tsdn, arena_t *arena, size_t usize, bool zero); 7 void *large_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment, 11 void *large_ralloc(tsdn_t *tsdn, arena_t *arena, void *ptr, size_t usize,
|
H A D | jemalloc_internal_inlines_a.h | 132 assert(tsd_tcachep_get(tsd)->arena == NULL || in tcache_available() 151 pre_reentrancy(tsd_t *tsd, arena_t *arena) { in pre_reentrancy() argument 153 assert(arena != arena_get(tsd_tsdn(tsd), 0, false)); in pre_reentrancy()
|
/freebsd/contrib/llvm-project/clang/lib/Analysis/FlowSensitive/ |
H A D | DataflowAnalysisContext.cpp | 82 return arena().create<ScalarStorageLocation>(Type); in createStorageLocation() 100 return arena().create<RecordStorageLocation>(Type, std::move(FieldLocs), in createRecordStorageLocation() 131 Res.first->second = &arena().create<PointerValue>(PointeeLoc); in getOrCreateNullPointerValue() 140 Invariant = &arena().makeAnd(*Invariant, Constraint); in addInvariant() 148 &arena().makeAnd(*Res.first->second, Constraint); in addFlowConditionConstraint() 153 Atom ForkToken = arena().makeFlowConditionToken(); in forkFlowCondition() 155 addFlowConditionConstraint(ForkToken, arena().makeAtomRef(Token)); in forkFlowCondition() 162 Atom Token = arena().makeFlowConditionToken(); in joinFlowConditions() 166 arena().makeOr(arena().makeAtomRef(FirstToken), in joinFlowConditions() 167 arena().makeAtomRef(SecondToken))); in joinFlowConditions() [all …]
|
/freebsd/contrib/llvm-project/clang/include/clang/Analysis/FlowSensitive/ |
H A D | DataflowEnvironment.h | 163 FlowConditionToken(DACtx.arena().makeFlowConditionToken()) {} in Environment() 558 return arena().create<T>(std::forward<Args>(args)...); in create() 564 return arena().makeIntLiteral(Value); in getIntLiteralValue() 570 return arena().makeBoolValue(arena().makeLiteral(Value)); in getBoolLiteralValue() 575 return arena().makeAtomValue(); in makeAtomicBoolValue() 580 return arena().makeTopValue(); in makeTopBoolValue() 588 return arena().makeBoolValue( in makeAnd() 589 arena().makeAnd(LHS.formula(), RHS.formula())); in makeAnd() 597 return arena().makeBoolValue( in makeOr() 598 arena().makeOr(LHS.formula(), RHS.formula())); in makeOr() [all …]
|
/freebsd/crypto/openssl/crypto/ |
H A D | mem_sec.c | 297 ((char*)(p) >= sh.arena && (char*)(p) < &sh.arena[sh.arena_size]) 312 char *arena; member 327 size_t bit = (sh.arena_size + ptr - sh.arena) / sh.minsize; in sh_getlist() 344 OPENSSL_assert(((ptr - sh.arena) & ((sh.arena_size >> list) - 1)) == 0); in sh_testbit() 345 bit = (ONE << list) + ((ptr - sh.arena) / (sh.arena_size >> list)); in sh_testbit() 355 OPENSSL_assert(((ptr - sh.arena) & ((sh.arena_size >> list) - 1)) == 0); in sh_clearbit() 356 bit = (ONE << list) + ((ptr - sh.arena) / (sh.arena_size >> list)); in sh_clearbit() 367 OPENSSL_assert(((ptr - sh.arena) & ((sh.arena_size >> list) - 1)) == 0); in sh_setbit() 368 bit = (ONE << list) + ((ptr - sh.arena) / (sh.arena_size >> list)); in sh_setbit() 524 sh.arena = (char *)(sh.map_result + pgsize); in sh_init() [all …]
|
/freebsd/sys/vm/ |
H A D | vm_glue.c | 310 vmem_t *arena; in vm_thread_alloc_kstack_kva() local 316 arena = vm_dom[domain].vmd_kernel_arena; in vm_thread_alloc_kstack_kva() 318 arena = vmd_kstack_arena[domain]; in vm_thread_alloc_kstack_kva() 320 rv = vmem_alloc(arena, size, M_BESTFIT | M_NOWAIT, &addr); in vm_thread_alloc_kstack_kva() 341 vmem_t *arena; in vm_thread_free_kstack_kva() local 345 arena = kernel_arena; in vm_thread_free_kstack_kva() 347 arena = vmd_kstack_arena[domain]; in vm_thread_free_kstack_kva() 349 arena = vm_dom[domain].vmd_kernel_arena; in vm_thread_free_kstack_kva() 352 vmem_free(arena, addr, size); in vm_thread_free_kstack_kva() 382 vm_thread_kstack_arena_import(void *arena, vmem_size_t size, int flags, in vm_thread_kstack_arena_import() argument [all …]
|
H A D | vm_kern.c | 471 vmem_t *arena; in kmem_malloc_domain() local 477 arena = vm_dom[domain].vmd_kernel_arena; in kmem_malloc_domain() 479 arena = vm_dom[domain].vmd_kernel_rwx_arena; in kmem_malloc_domain() 481 arena = vm_dom[domain].vmd_kernel_nofree_arena; in kmem_malloc_domain() 483 if (vmem_alloc(arena, asize, flags | M_BESTFIT, &addr)) in kmem_malloc_domain() 488 vmem_free(arena, addr, asize); in kmem_malloc_domain() 638 struct vmem *arena; in _kmem_unback() local 656 arena = vm_dom[domain].vmd_kernel_arena; in _kmem_unback() 658 arena = vm_dom[domain].vmd_kernel_rwx_arena; in _kmem_unback() 667 return (arena); in _kmem_unback() [all …]
|
/freebsd/contrib/llvm-project/compiler-rt/lib/scudo/standalone/ |
H A D | wrappers_c.h | 23 __scudo_mallinfo_data_t arena; member 36 size_t arena; member
|