Lines Matching full:tsd
381 arena_bind(tsd_t *tsd, unsigned ind, bool internal) { in arena_bind() argument
382 arena_t *arena = arena_get(tsd_tsdn(tsd), ind, false); in arena_bind()
386 tsd_iarena_set(tsd, arena); in arena_bind()
388 tsd_arena_set(tsd, arena); in arena_bind()
391 tsd_binshards_t *bins = tsd_binshardsp_get(tsd); in arena_bind()
401 arena_migrate(tsd_t *tsd, unsigned oldind, unsigned newind) { in arena_migrate() argument
404 oldarena = arena_get(tsd_tsdn(tsd), oldind, false); in arena_migrate()
405 newarena = arena_get(tsd_tsdn(tsd), newind, false); in arena_migrate()
408 tsd_arena_set(tsd, newarena); in arena_migrate()
412 arena_unbind(tsd_t *tsd, unsigned ind, bool internal) { in arena_unbind() argument
415 arena = arena_get(tsd_tsdn(tsd), ind, false); in arena_unbind()
419 tsd_iarena_set(tsd, NULL); in arena_unbind()
421 tsd_arena_set(tsd, NULL); in arena_unbind()
426 arena_tdata_get_hard(tsd_t *tsd, unsigned ind) { in arena_tdata_get_hard() argument
428 arena_tdata_t *arenas_tdata = tsd_arenas_tdata_get(tsd); in arena_tdata_get_hard()
430 unsigned narenas_tdata = tsd_narenas_tdata_get(tsd); in arena_tdata_get_hard()
442 tsd_arenas_tdata_set(tsd, arenas_tdata); in arena_tdata_get_hard()
443 tsd_narenas_tdata_set(tsd, narenas_tdata); in arena_tdata_get_hard()
451 bool *arenas_tdata_bypassp = tsd_arenas_tdata_bypassp_get(tsd); in arena_tdata_get_hard()
454 if (tsd_nominal(tsd) && !*arenas_tdata_bypassp) { in arena_tdata_get_hard()
464 assert(tsd_nominal(tsd) && !*arenas_tdata_bypassp); in arena_tdata_get_hard()
465 tsd_arenas_tdata_set(tsd, arenas_tdata); in arena_tdata_get_hard()
466 tsd_narenas_tdata_set(tsd, narenas_tdata); in arena_tdata_get_hard()
503 arena_choose_hard(tsd_t *tsd, bool internal) { in arena_choose_hard() argument
508 ret = arena_get(tsd_tsdn(tsd), choose, true); in arena_choose_hard()
510 arena_bind(tsd, arena_ind_get(ret), false); in arena_choose_hard()
511 arena_bind(tsd, arena_ind_get(ret), true); in arena_choose_hard()
534 malloc_mutex_lock(tsd_tsdn(tsd), &arenas_lock); in arena_choose_hard()
535 assert(arena_get(tsd_tsdn(tsd), 0, false) != NULL); in arena_choose_hard()
537 if (arena_get(tsd_tsdn(tsd), i, false) != NULL) { in arena_choose_hard()
544 tsd_tsdn(tsd), i, false), !!j) < in arena_choose_hard()
546 tsd_tsdn(tsd), choose[j], false), in arena_choose_hard()
566 if (arena_nthreads_get(arena_get(tsd_tsdn(tsd), in arena_choose_hard()
574 ret = arena_get(tsd_tsdn(tsd), in arena_choose_hard()
582 arena = arena_init_locked(tsd_tsdn(tsd), in arena_choose_hard()
586 malloc_mutex_unlock(tsd_tsdn(tsd), in arena_choose_hard()
595 arena_bind(tsd, choose[j], !!j); in arena_choose_hard()
597 malloc_mutex_unlock(tsd_tsdn(tsd), &arenas_lock); in arena_choose_hard()
603 tsd_tsdn(tsd), choose[j]); in arena_choose_hard()
608 ret = arena_get(tsd_tsdn(tsd), 0, false); in arena_choose_hard()
609 arena_bind(tsd, 0, false); in arena_choose_hard()
610 arena_bind(tsd, 0, true); in arena_choose_hard()
617 iarena_cleanup(tsd_t *tsd) { in iarena_cleanup() argument
620 iarena = tsd_iarena_get(tsd); in iarena_cleanup()
622 arena_unbind(tsd, arena_ind_get(iarena), true); in iarena_cleanup()
627 arena_cleanup(tsd_t *tsd) { in arena_cleanup() argument
630 arena = tsd_arena_get(tsd); in arena_cleanup()
632 arena_unbind(tsd, arena_ind_get(arena), false); in arena_cleanup()
637 arenas_tdata_cleanup(tsd_t *tsd) { in arenas_tdata_cleanup() argument
640 /* Prevent tsd->arenas_tdata from being (re)created. */ in arenas_tdata_cleanup()
641 *tsd_arenas_tdata_bypassp_get(tsd) = true; in arenas_tdata_cleanup()
643 arenas_tdata = tsd_arenas_tdata_get(tsd); in arenas_tdata_cleanup()
645 tsd_arenas_tdata_set(tsd, NULL); in arenas_tdata_cleanup()
695 tsd_t *tsd = tsdn_tsd(tsdn); in check_entry_exit_locking() local
700 int8_t reentrancy_level = tsd_reentrancy_level_get(tsd); in check_entry_exit_locking()
1730 tsd_t *tsd = tsdn_tsd(tsdn); in malloc_init_hard_cleanup() local
1731 assert(tsd_reentrancy_level_get(tsd) > 0); in malloc_init_hard_cleanup()
1732 post_reentrancy(tsd); in malloc_init_hard_cleanup()
1738 tsd_t *tsd; in malloc_init_hard() local
1759 /* Recursive allocation relies on functional tsd. */ in malloc_init_hard()
1760 tsd = malloc_tsd_boot0(); in malloc_init_hard()
1761 if (tsd == NULL) { in malloc_init_hard()
1768 malloc_mutex_lock(tsd_tsdn(tsd), &init_lock); in malloc_init_hard()
1770 pre_reentrancy(tsd, NULL); in malloc_init_hard()
1772 if (malloc_init_narenas() || background_thread_boot1(tsd_tsdn(tsd))) { in malloc_init_hard()
1773 UNLOCK_RETURN(tsd_tsdn(tsd), true, true) in malloc_init_hard()
1775 if (config_prof && prof_boot2(tsd)) { in malloc_init_hard()
1776 UNLOCK_RETURN(tsd_tsdn(tsd), true, true) in malloc_init_hard()
1782 UNLOCK_RETURN(tsd_tsdn(tsd), true, true) in malloc_init_hard()
1784 post_reentrancy(tsd); in malloc_init_hard()
1785 malloc_mutex_unlock(tsd_tsdn(tsd), &init_lock); in malloc_init_hard()
1788 tsd_witness_tsdp_get_unsafe(tsd))); in malloc_init_hard()
1790 /* Update TSD after tsd_boot1. */ in malloc_init_hard()
1791 tsd = tsd_fetch(); in malloc_init_hard()
1799 background_thread_ctl_init(tsd_tsdn(tsd)); in malloc_init_hard()
1800 if (background_thread_create(tsd, 0)) { in malloc_init_hard()
1916 imalloc_no_sample(static_opts_t *sopts, dynamic_opts_t *dopts, tsd_t *tsd, in imalloc_no_sample() argument
1925 tcache = tsd_tcachep_get(tsd); in imalloc_no_sample()
1926 assert(tcache == tcache_get(tsd)); in imalloc_no_sample()
1928 tcache = tcache_get(tsd); in imalloc_no_sample()
1933 tcache = tcaches_get(tsd, dopts->tcache_ind); in imalloc_no_sample()
1945 arena = arena_get(tsd_tsdn(tsd), dopts->arena_ind, true); in imalloc_no_sample()
1949 return ipalloct(tsd_tsdn(tsd), usize, dopts->alignment, in imalloc_no_sample()
1953 return iallocztm(tsd_tsdn(tsd), size, ind, dopts->zero, tcache, false, in imalloc_no_sample()
1958 imalloc_sample(static_opts_t *sopts, dynamic_opts_t *dopts, tsd_t *tsd, in imalloc_sample() argument
1976 ret = imalloc_no_sample(sopts, dopts, tsd, bumped_usize, in imalloc_sample()
1981 arena_prof_promote(tsd_tsdn(tsd), ret, usize); in imalloc_sample()
1983 ret = imalloc_no_sample(sopts, dopts, tsd, usize, usize, ind); in imalloc_sample()
2032 imalloc_body(static_opts_t *sopts, dynamic_opts_t *dopts, tsd_t *tsd) { in imalloc_body() argument
2094 check_entry_exit_locking(tsd_tsdn(tsd)); in imalloc_body()
2100 reentrancy_level = tsd_reentrancy_level_get(tsd); in imalloc_body()
2121 tsd, usize, prof_active_get_unlocked(), true); in imalloc_body()
2128 sopts, dopts, tsd, usize, usize, ind); in imalloc_body()
2135 sopts, dopts, tsd, usize, ind); in imalloc_body()
2142 prof_alloc_rollback(tsd, tctx, true); in imalloc_body()
2145 prof_malloc(tsd_tsdn(tsd), allocation, usize, &alloc_ctx, tctx); in imalloc_body()
2153 allocation = imalloc_no_sample(sopts, dopts, tsd, size, usize, in imalloc_body()
2168 assert(usize == isalloc(tsd_tsdn(tsd), allocation)); in imalloc_body()
2169 *tsd_thread_allocatedp_get(tsd) += usize; in imalloc_body()
2177 check_entry_exit_locking(tsd_tsdn(tsd)); in imalloc_body()
2191 check_entry_exit_locking(tsd_tsdn(tsd)); in imalloc_body()
2222 check_entry_exit_locking(tsd_tsdn(tsd)); in imalloc_body()
2255 /* We always need the tsd. Let's grab it right away. */ in imalloc()
2256 tsd_t *tsd = tsd_fetch(); in imalloc() local
2257 assert(tsd); in imalloc()
2258 if (likely(tsd_fast(tsd))) { in imalloc()
2260 tsd_assert_fast(tsd); in imalloc()
2262 return imalloc_body(sopts, dopts, tsd); in imalloc()
2269 return imalloc_body(sopts, dopts, tsd); in imalloc()
2334 tsd_t *tsd = tsd_get(false); in JEMALLOC_ATTR() local
2335 if (unlikely(!tsd || !tsd_fast(tsd) || (size > SC_LOOKUP_MAXCLASS))) { in JEMALLOC_ATTR()
2339 tcache_t *tcache = tsd_tcachep_get(tsd); in JEMALLOC_ATTR()
2355 int64_t bytes_until_sample = tsd_bytes_until_sample_get(tsd); in JEMALLOC_ATTR()
2357 tsd_bytes_until_sample_set(tsd, bytes_until_sample); in JEMALLOC_ATTR()
2367 tsd_bytes_until_sample_set(tsd, SSIZE_MAX); in JEMALLOC_ATTR()
2379 *tsd_thread_allocatedp_get(tsd) += usize; in JEMALLOC_ATTR()
2510 irealloc_prof_sample(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t usize, in irealloc_prof_sample() argument
2518 p = iralloc(tsd, old_ptr, old_usize, in irealloc_prof_sample()
2523 arena_prof_promote(tsd_tsdn(tsd), p, usize); in irealloc_prof_sample()
2525 p = iralloc(tsd, old_ptr, old_usize, usize, 0, false, in irealloc_prof_sample()
2533 irealloc_prof(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t usize, in irealloc_prof() argument
2540 old_tctx = prof_tctx_get(tsd_tsdn(tsd), old_ptr, alloc_ctx); in irealloc_prof()
2541 tctx = prof_alloc_prep(tsd, usize, prof_active, true); in irealloc_prof()
2543 p = irealloc_prof_sample(tsd, old_ptr, old_usize, usize, tctx, in irealloc_prof()
2546 p = iralloc(tsd, old_ptr, old_usize, usize, 0, false, in irealloc_prof()
2550 prof_alloc_rollback(tsd, tctx, true); in irealloc_prof()
2553 prof_realloc(tsd, p, usize, tctx, prof_active, true, old_ptr, old_usize, in irealloc_prof()
2560 ifree(tsd_t *tsd, void *ptr, tcache_t *tcache, bool slow_path) { in ifree() argument
2562 tsd_assert_fast(tsd); in ifree()
2564 check_entry_exit_locking(tsd_tsdn(tsd)); in ifree()
2565 if (tsd_reentrancy_level_get(tsd) != 0) { in ifree()
2573 rtree_ctx_t *rtree_ctx = tsd_rtree_ctx(tsd); in ifree()
2574 rtree_szind_slab_read(tsd_tsdn(tsd), &extents_rtree, rtree_ctx, in ifree()
2581 prof_free(tsd, ptr, usize, &alloc_ctx); in ifree()
2586 *tsd_thread_deallocatedp_get(tsd) += usize; in ifree()
2590 idalloctm(tsd_tsdn(tsd), ptr, tcache, &alloc_ctx, false, in ifree()
2593 idalloctm(tsd_tsdn(tsd), ptr, tcache, &alloc_ctx, false, in ifree()
2599 isfree(tsd_t *tsd, void *ptr, size_t usize, tcache_t *tcache, bool slow_path) { in isfree() argument
2601 tsd_assert_fast(tsd); in isfree()
2603 check_entry_exit_locking(tsd_tsdn(tsd)); in isfree()
2604 if (tsd_reentrancy_level_get(tsd) != 0) { in isfree()
2623 rtree_ctx_t *rtree_ctx = tsd_rtree_ctx(tsd); in isfree()
2624 rtree_szind_slab_read(tsd_tsdn(tsd), &extents_rtree, in isfree()
2631 rtree_ctx_t *rtree_ctx = tsd_rtree_ctx(tsd); in isfree()
2632 rtree_szind_slab_read(tsd_tsdn(tsd), &extents_rtree, rtree_ctx, in isfree()
2641 prof_free(tsd, ptr, usize, ctx); in isfree()
2644 *tsd_thread_deallocatedp_get(tsd) += usize; in isfree()
2648 isdalloct(tsd_tsdn(tsd), ptr, usize, tcache, ctx, false); in isfree()
2650 isdalloct(tsd_tsdn(tsd), ptr, usize, tcache, ctx, true); in isfree()
2672 tsd_t *tsd = tsd_fetch(); in je_realloc() local
2674 check_entry_exit_locking(tsd_tsdn(tsd)); in je_realloc()
2681 rtree_ctx_t *rtree_ctx = tsd_rtree_ctx(tsd); in je_realloc()
2682 rtree_szind_slab_read(tsd_tsdn(tsd), &extents_rtree, rtree_ctx, in je_realloc()
2686 assert(old_usize == isalloc(tsd_tsdn(tsd), ptr)); in je_realloc()
2693 ret = irealloc_prof(tsd, ptr, old_usize, usize, in je_realloc()
2700 ret = iralloc(tsd, ptr, old_usize, size, 0, false, in je_realloc()
2703 tsdn = tsd_tsdn(tsd); in je_realloc()
2740 tsd_t *tsd; in je_realloc() local
2743 tsd = tsdn_tsd(tsdn); in je_realloc()
2744 *tsd_thread_allocatedp_get(tsd) += usize; in je_realloc()
2745 *tsd_thread_deallocatedp_get(tsd) += old_usize; in je_realloc()
2760 * We avoid setting up tsd fully (e.g. tcache, arena binding) in free_default()
2765 * fully-setup tsd won't be destructed properly. in free_default()
2767 tsd_t *tsd = tsd_fetch_min(); in free_default() local
2768 check_entry_exit_locking(tsd_tsdn(tsd)); in free_default()
2771 if (likely(tsd_fast(tsd))) { in free_default()
2772 tsd_assert_fast(tsd); in free_default()
2774 tcache = tsd_tcachep_get(tsd); in free_default()
2775 ifree(tsd, ptr, tcache, false); in free_default()
2777 if (likely(tsd_reentrancy_level_get(tsd) == 0)) { in free_default()
2778 tcache = tcache_get(tsd); in free_default()
2784 ifree(tsd, ptr, tcache, true); in free_default()
2786 check_entry_exit_locking(tsd_tsdn(tsd)); in free_default()
2792 tsd_t *tsd = tsd_get(false); in free_fastpath() local
2793 if (unlikely(!tsd || !tsd_fast(tsd))) { in free_fastpath()
2797 tcache_t *tcache = tsd_tcachep_get(tsd); in free_fastpath()
2807 rtree_ctx_t *rtree_ctx = tsd_rtree_ctx(tsd); in free_fastpath()
2808 bool res = rtree_szind_slab_read_fast(tsd_tsdn(tsd), &extents_rtree, in free_fastpath()
2841 *tsd_thread_deallocatedp_get(tsd) += usize; in free_fastpath()
3159 irallocx_prof(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t size, in irallocx_prof() argument
3167 old_tctx = prof_tctx_get(tsd_tsdn(tsd), old_ptr, alloc_ctx); in irallocx_prof()
3168 tctx = prof_alloc_prep(tsd, *usize, prof_active, false); in irallocx_prof()
3170 p = irallocx_prof_sample(tsd_tsdn(tsd), old_ptr, old_usize, in irallocx_prof()
3173 p = iralloct(tsd_tsdn(tsd), old_ptr, old_usize, size, alignment, in irallocx_prof()
3177 prof_alloc_rollback(tsd, tctx, false); in irallocx_prof()
3190 *usize = isalloc(tsd_tsdn(tsd), p); in irallocx_prof()
3192 prof_realloc(tsd, p, *usize, tctx, prof_active, false, old_ptr, in irallocx_prof()
3203 tsd_t *tsd; in je_rallocx() local
3218 tsd = tsd_fetch(); in je_rallocx()
3219 check_entry_exit_locking(tsd_tsdn(tsd)); in je_rallocx()
3223 arena = arena_get(tsd_tsdn(tsd), arena_ind, true); in je_rallocx()
3235 tcache = tcaches_get(tsd, MALLOCX_TCACHE_GET(flags)); in je_rallocx()
3238 tcache = tcache_get(tsd); in je_rallocx()
3242 rtree_ctx_t *rtree_ctx = tsd_rtree_ctx(tsd); in je_rallocx()
3243 rtree_szind_slab_read(tsd_tsdn(tsd), &extents_rtree, rtree_ctx, in je_rallocx()
3247 assert(old_usize == isalloc(tsd_tsdn(tsd), ptr)); in je_rallocx()
3258 p = irallocx_prof(tsd, ptr, old_usize, size, alignment, &usize, in je_rallocx()
3264 p = iralloct(tsd_tsdn(tsd), ptr, old_usize, size, alignment, in je_rallocx()
3270 usize = isalloc(tsd_tsdn(tsd), p); in je_rallocx()
3276 *tsd_thread_allocatedp_get(tsd) += usize; in je_rallocx()
3277 *tsd_thread_deallocatedp_get(tsd) += old_usize; in je_rallocx()
3280 check_entry_exit_locking(tsd_tsdn(tsd)); in je_rallocx()
3290 check_entry_exit_locking(tsd_tsdn(tsd)); in je_rallocx()
3324 ixallocx_prof(tsd_t *tsd, void *ptr, size_t old_usize, size_t size, in ixallocx_prof() argument
3331 old_tctx = prof_tctx_get(tsd_tsdn(tsd), ptr, alloc_ctx); in ixallocx_prof()
3355 tctx = prof_alloc_prep(tsd, usize_max, prof_active, false); in ixallocx_prof()
3358 usize = ixallocx_prof_sample(tsd_tsdn(tsd), ptr, old_usize, in ixallocx_prof()
3361 usize = ixallocx_helper(tsd_tsdn(tsd), ptr, old_usize, size, in ixallocx_prof()
3365 prof_alloc_rollback(tsd, tctx, false); in ixallocx_prof()
3368 prof_realloc(tsd, ptr, usize, tctx, prof_active, false, ptr, old_usize, in ixallocx_prof()
3376 tsd_t *tsd; in je_xallocx() local
3388 tsd = tsd_fetch(); in je_xallocx()
3389 check_entry_exit_locking(tsd_tsdn(tsd)); in je_xallocx()
3392 rtree_ctx_t *rtree_ctx = tsd_rtree_ctx(tsd); in je_xallocx()
3393 rtree_szind_slab_read(tsd_tsdn(tsd), &extents_rtree, rtree_ctx, in je_xallocx()
3397 assert(old_usize == isalloc(tsd_tsdn(tsd), ptr)); in je_xallocx()
3416 usize = ixallocx_prof(tsd, ptr, old_usize, size, extra, in je_xallocx()
3419 usize = ixallocx_helper(tsd_tsdn(tsd), ptr, old_usize, size, in je_xallocx()
3427 *tsd_thread_allocatedp_get(tsd) += usize; in je_xallocx()
3428 *tsd_thread_deallocatedp_get(tsd) += old_usize; in je_xallocx()
3431 if (unlikely(!tsd_fast(tsd))) { in je_xallocx()
3438 check_entry_exit_locking(tsd_tsdn(tsd)); in je_xallocx()
3478 tsd_t *tsd = tsd_fetch(); in je_dallocx() local
3479 bool fast = tsd_fast(tsd); in je_dallocx()
3480 check_entry_exit_locking(tsd_tsdn(tsd)); in je_dallocx()
3485 assert(tsd_reentrancy_level_get(tsd) == 0); in je_dallocx()
3489 tcache = tcaches_get(tsd, MALLOCX_TCACHE_GET(flags)); in je_dallocx()
3493 tcache = tsd_tcachep_get(tsd); in je_dallocx()
3494 assert(tcache == tcache_get(tsd)); in je_dallocx()
3496 if (likely(tsd_reentrancy_level_get(tsd) == 0)) { in je_dallocx()
3497 tcache = tcache_get(tsd); in je_dallocx()
3506 tsd_assert_fast(tsd); in je_dallocx()
3507 ifree(tsd, ptr, tcache, false); in je_dallocx()
3511 ifree(tsd, ptr, tcache, true); in je_dallocx()
3513 check_entry_exit_locking(tsd_tsdn(tsd)); in je_dallocx()
3537 tsd_t *tsd = tsd_fetch(); in sdallocx_default() local
3538 bool fast = tsd_fast(tsd); in sdallocx_default()
3539 size_t usize = inallocx(tsd_tsdn(tsd), size, flags); in sdallocx_default()
3540 assert(usize == isalloc(tsd_tsdn(tsd), ptr)); in sdallocx_default()
3541 check_entry_exit_locking(tsd_tsdn(tsd)); in sdallocx_default()
3546 assert(tsd_reentrancy_level_get(tsd) == 0); in sdallocx_default()
3550 tcache = tcaches_get(tsd, MALLOCX_TCACHE_GET(flags)); in sdallocx_default()
3554 tcache = tsd_tcachep_get(tsd); in sdallocx_default()
3555 assert(tcache == tcache_get(tsd)); in sdallocx_default()
3557 if (likely(tsd_reentrancy_level_get(tsd) == 0)) { in sdallocx_default()
3558 tcache = tcache_get(tsd); in sdallocx_default()
3567 tsd_assert_fast(tsd); in sdallocx_default()
3568 isfree(tsd, ptr, usize, tcache, false); in sdallocx_default()
3572 isfree(tsd, ptr, usize, tcache, true); in sdallocx_default()
3574 check_entry_exit_locking(tsd_tsdn(tsd)); in sdallocx_default()
3633 tsd_t *tsd; in je_mallctl() local
3642 tsd = tsd_fetch(); in je_mallctl()
3643 check_entry_exit_locking(tsd_tsdn(tsd)); in je_mallctl()
3644 ret = ctl_byname(tsd, name, oldp, oldlenp, newp, newlen); in je_mallctl()
3645 check_entry_exit_locking(tsd_tsdn(tsd)); in je_mallctl()
3662 tsd_t *tsd = tsd_fetch(); in je_mallctlnametomib() local
3663 check_entry_exit_locking(tsd_tsdn(tsd)); in je_mallctlnametomib()
3664 ret = ctl_nametomib(tsd, name, mibp, miblenp); in je_mallctlnametomib()
3665 check_entry_exit_locking(tsd_tsdn(tsd)); in je_mallctlnametomib()
3675 tsd_t *tsd; in je_mallctlbymib() local
3684 tsd = tsd_fetch(); in je_mallctlbymib()
3685 check_entry_exit_locking(tsd_tsdn(tsd)); in je_mallctlbymib()
3686 ret = ctl_bymib(tsd, mib, miblen, oldp, oldlenp, newp, newlen); in je_mallctlbymib()
3687 check_entry_exit_locking(tsd_tsdn(tsd)); in je_mallctlbymib()
3869 tsd_t *tsd; in jemalloc_prefork() local
3880 tsd = tsd_fetch(); in jemalloc_prefork()
3884 witness_prefork(tsd_witness_tsdp_get(tsd)); in jemalloc_prefork()
3886 ctl_prefork(tsd_tsdn(tsd)); in jemalloc_prefork()
3887 tcache_prefork(tsd_tsdn(tsd)); in jemalloc_prefork()
3888 malloc_mutex_prefork(tsd_tsdn(tsd), &arenas_lock); in jemalloc_prefork()
3890 background_thread_prefork0(tsd_tsdn(tsd)); in jemalloc_prefork()
3892 prof_prefork0(tsd_tsdn(tsd)); in jemalloc_prefork()
3894 background_thread_prefork1(tsd_tsdn(tsd)); in jemalloc_prefork()
3899 if ((arena = arena_get(tsd_tsdn(tsd), j, false)) != in jemalloc_prefork()
3903 arena_prefork0(tsd_tsdn(tsd), arena); in jemalloc_prefork()
3906 arena_prefork1(tsd_tsdn(tsd), arena); in jemalloc_prefork()
3909 arena_prefork2(tsd_tsdn(tsd), arena); in jemalloc_prefork()
3912 arena_prefork3(tsd_tsdn(tsd), arena); in jemalloc_prefork()
3915 arena_prefork4(tsd_tsdn(tsd), arena); in jemalloc_prefork()
3918 arena_prefork5(tsd_tsdn(tsd), arena); in jemalloc_prefork()
3921 arena_prefork6(tsd_tsdn(tsd), arena); in jemalloc_prefork()
3924 arena_prefork7(tsd_tsdn(tsd), arena); in jemalloc_prefork()
3931 prof_prefork1(tsd_tsdn(tsd)); in jemalloc_prefork()
3932 tsd_prefork(tsd); in jemalloc_prefork()
3943 tsd_t *tsd; in jemalloc_postfork_parent() local
3953 tsd = tsd_fetch(); in jemalloc_postfork_parent()
3955 tsd_postfork_parent(tsd); in jemalloc_postfork_parent()
3957 witness_postfork_parent(tsd_witness_tsdp_get(tsd)); in jemalloc_postfork_parent()
3962 if ((arena = arena_get(tsd_tsdn(tsd), i, false)) != NULL) { in jemalloc_postfork_parent()
3963 arena_postfork_parent(tsd_tsdn(tsd), arena); in jemalloc_postfork_parent()
3966 prof_postfork_parent(tsd_tsdn(tsd)); in jemalloc_postfork_parent()
3968 background_thread_postfork_parent(tsd_tsdn(tsd)); in jemalloc_postfork_parent()
3970 malloc_mutex_postfork_parent(tsd_tsdn(tsd), &arenas_lock); in jemalloc_postfork_parent()
3971 tcache_postfork_parent(tsd_tsdn(tsd)); in jemalloc_postfork_parent()
3972 ctl_postfork_parent(tsd_tsdn(tsd)); in jemalloc_postfork_parent()
3977 tsd_t *tsd; in jemalloc_postfork_child() local
3982 tsd = tsd_fetch(); in jemalloc_postfork_child()
3984 tsd_postfork_child(tsd); in jemalloc_postfork_child()
3986 witness_postfork_child(tsd_witness_tsdp_get(tsd)); in jemalloc_postfork_child()
3991 if ((arena = arena_get(tsd_tsdn(tsd), i, false)) != NULL) { in jemalloc_postfork_child()
3992 arena_postfork_child(tsd_tsdn(tsd), arena); in jemalloc_postfork_child()
3995 prof_postfork_child(tsd_tsdn(tsd)); in jemalloc_postfork_child()
3997 background_thread_postfork_child(tsd_tsdn(tsd)); in jemalloc_postfork_child()
3999 malloc_mutex_postfork_child(tsd_tsdn(tsd), &arenas_lock); in jemalloc_postfork_child()
4000 tcache_postfork_child(tsd_tsdn(tsd)); in jemalloc_postfork_child()
4001 ctl_postfork_child(tsd_tsdn(tsd)); in jemalloc_postfork_child()