Home
last modified time | relevance | path

Searched full:tsd (Results 1 – 25 of 106) sorted by relevance

12345

/freebsd/contrib/jemalloc/src/
H A Dtsd.c32 * have a dependency on tsd. So we define the struct here, and only refer to it
66 tsd_in_nominal_list(tsd_t *tsd) {
70 * We don't know that tsd is nominal; it might not be safe to get data in tsd_in_nominal_list()
75 if (tsd == tsd_list) { in tsd_in_nominal_list()
85 tsd_add_nominal(tsd_t *tsd) { in tsd_in_nominal_list()
86 assert(!tsd_in_nominal_list(tsd));
87 assert(tsd_state_get(tsd) <= tsd_state_nominal_max);
88 ql_elm_new(tsd, TSD_MANGLE(tsd_link)); in tsd_add_nominal() argument
89 malloc_mutex_lock(tsd_tsdn(tsd), &tsd_nominal_tsds_lock); in tsd_add_nominal()
90 ql_tail_insert(&tsd_nominal_tsds, tsd, TSD_MANGL in tsd_add_nominal()
69 tsd_in_nominal_list(tsd_t * tsd) tsd_in_nominal_list() argument
98 tsd_remove_nominal(tsd_t * tsd) tsd_remove_nominal() argument
145 tsd_local_slow(tsd_t * tsd) tsd_local_slow() argument
158 tsd_state_compute(tsd_t * tsd) tsd_state_compute() argument
171 tsd_slow_update(tsd_t * tsd) tsd_slow_update() argument
181 tsd_state_set(tsd_t * tsd,uint8_t new_state) tsd_state_set() argument
219 tsd_data_init(tsd_t * tsd) tsd_data_init() argument
240 assert_tsd_data_cleanup_done(tsd_t * tsd) assert_tsd_data_cleanup_done() argument
252 tsd_data_init_nocleanup(tsd_t * tsd) tsd_data_init_nocleanup() argument
270 tsd_fetch_slow(tsd_t * tsd,bool minimal) tsd_fetch_slow() argument
363 tsd_do_data_cleanup(tsd_t * tsd) tsd_do_data_cleanup() argument
374 tsd_t *tsd = (tsd_t *)arg; tsd_cleanup() local
419 tsd_t *tsd; malloc_tsd_boot0() local
437 tsd_t *tsd = tsd_fetch(); malloc_tsd_boot1() local
517 tsd_prefork(tsd_t * tsd) tsd_prefork() argument
522 tsd_postfork_parent(tsd_t * tsd) tsd_postfork_parent() argument
527 tsd_postfork_child(tsd_t * tsd) tsd_postfork_child() argument
[all...]
H A Dtcache.c90 tcache_gc_new_event_wait(tsd_t *tsd) { in tcache_alloc_small_hard()
95 tcache_gc_postponed_event_wait(tsd_t *tsd) { in tcache_alloc_small_hard()
100 tcache_gc_dalloc_new_event_wait(tsd_t *tsd) { in tcache_alloc_small_hard()
105 tcache_gc_dalloc_postponed_event_wait(tsd_t *tsd) {
123 tcache_gc_small(tsd_t *tsd, tcache_slow_t *tcache_slow, tcache_t *tcache, in tbin_extents_lookup_size_check()
148 tcache_bin_flush_small(tsd, tcache, cache_bin, szind, in tcache_bin_flush_small()
162 tcache_gc_large(tsd_t *tsd, tcache_slow_t *tcache_slow, tcache_t *tcache, in tcache_bin_flush_small()
171 tcache_bin_flush_large(tsd, tcache, cache_bin, szind, in tcache_bin_flush_small()
176 tcache_event(tsd_t *tsd) { in tcache_bin_flush_small()
177 tcache_t *tcache = tcache_get(tsd); in tcache_bin_flush_small()
41 tcache_event_hard(tsd_t * tsd,tcache_t * tcache) tcache_event_hard() argument
134 tcache_bin_flush_small(tsd_t * tsd,tcache_t * tcache,cache_bin_t * tbin,szind_t binind,unsigned rem) tcache_bin_flush_small() argument
230 tcache_bin_flush_large(tsd_t * tsd,cache_bin_t * tbin,szind_t binind,unsigned rem,tcache_t * tcache) tcache_bin_flush_large() argument
395 tsd_tcache_enabled_data_init(tsd_t * tsd) tsd_tcache_enabled_data_init() argument
410 tcache_init(tsd_t * tsd,tcache_t * tcache,void * avail_stack) tcache_init() argument
444 tsd_tcache_data_init(tsd_t * tsd) tsd_tcache_data_init() argument
486 tcache_create_explicit(tsd_t * tsd) tcache_create_explicit() argument
512 tcache_flush_cache(tsd_t * tsd,tcache_t * tcache) tcache_flush_cache() argument
540 tcache_flush(tsd_t * tsd) tcache_flush() argument
546 tcache_destroy(tsd_t * tsd,tcache_t * tcache,bool tsd_tcache) tcache_destroy() argument
582 tcache_cleanup(tsd_t * tsd) tcache_cleanup() argument
625 tcaches_create_prep(tsd_t * tsd) tcaches_create_prep() argument
651 tcaches_create(tsd_t * tsd,unsigned * r_ind) tcaches_create() argument
689 tcaches_elm_remove(tsd_t * tsd,tcaches_t * elm,bool allow_reinit) tcaches_elm_remove() argument
709 tcaches_flush(tsd_t * tsd,unsigned ind) tcaches_flush() argument
720 tcaches_destroy(tsd_t * tsd,unsigned ind) tcaches_destroy() argument
[all...]
H A Djemalloc.c456 arena_bind(tsd_t *tsd, unsigned ind, bool internal) { in arena_tdata_get_hard()
457 arena_t *arena = arena_get(tsd_tsdn(tsd), ind, false); in arena_tdata_get_hard()
461 tsd_iarena_set(tsd, arena); in arena_tdata_get_hard()
463 tsd_arena_set(tsd, arena); in arena_tdata_get_hard()
466 tsd_binshards_t *bins = tsd_binshardsp_get(tsd); in arena_tdata_get_hard()
476 arena_migrate(tsd_t *tsd, arena_t *oldarena, arena_t *newarena) { in arena_tdata_get_hard()
482 tsd_arena_set(tsd, newarena); in arena_tdata_get_hard()
486 arena_decay(tsd_tsdn(tsd), oldarena, in arena_tdata_get_hard()
492 arena_unbind(tsd_t *tsd, unsigned ind, bool internal) { in arena_tdata_get_hard()
495 arena = arena_get(tsd_tsdn(tsd), in in arena_tdata_get_hard()
381 arena_bind(tsd_t * tsd,unsigned ind,bool internal) arena_bind() argument
401 arena_migrate(tsd_t * tsd,unsigned oldind,unsigned newind) arena_migrate() argument
412 arena_unbind(tsd_t * tsd,unsigned ind,bool internal) arena_unbind() argument
426 arena_tdata_get_hard(tsd_t * tsd,unsigned ind) arena_tdata_get_hard() argument
503 arena_choose_hard(tsd_t * tsd,bool internal) arena_choose_hard() argument
617 iarena_cleanup(tsd_t * tsd) iarena_cleanup() argument
627 arena_cleanup(tsd_t * tsd) arena_cleanup() argument
637 arenas_tdata_cleanup(tsd_t * tsd) arenas_tdata_cleanup() argument
695 tsd_t *tsd = tsdn_tsd(tsdn); check_entry_exit_locking() local
1730 tsd_t *tsd = tsdn_tsd(tsdn); malloc_init_hard_cleanup() local
1738 tsd_t *tsd; malloc_init_hard() local
1916 imalloc_no_sample(static_opts_t * sopts,dynamic_opts_t * dopts,tsd_t * tsd,size_t size,size_t usize,szind_t ind) imalloc_no_sample() argument
1958 imalloc_sample(static_opts_t * sopts,dynamic_opts_t * dopts,tsd_t * tsd,size_t usize,szind_t ind) imalloc_sample() argument
2032 imalloc_body(static_opts_t * sopts,dynamic_opts_t * dopts,tsd_t * tsd) imalloc_body() argument
2256 tsd_t *tsd = tsd_fetch(); imalloc() local
2334 tsd_t *tsd = tsd_get(false); JEMALLOC_ATTR() local
2510 irealloc_prof_sample(tsd_t * tsd,void * old_ptr,size_t old_usize,size_t usize,prof_tctx_t * tctx,hook_ralloc_args_t * hook_args) irealloc_prof_sample() argument
2533 irealloc_prof(tsd_t * tsd,void * old_ptr,size_t old_usize,size_t usize,alloc_ctx_t * alloc_ctx,hook_ralloc_args_t * hook_args) irealloc_prof() argument
2560 ifree(tsd_t * tsd,void * ptr,tcache_t * tcache,bool slow_path) ifree() argument
2599 isfree(tsd_t * tsd,void * ptr,size_t usize,tcache_t * tcache,bool slow_path) isfree() argument
2672 tsd_t *tsd = tsd_fetch(); je_realloc() local
2740 tsd_t *tsd; je_realloc() local
2767 tsd_t *tsd = tsd_fetch_min(); free_default() local
2792 tsd_t *tsd = tsd_get(false); free_fastpath() local
3159 irallocx_prof(tsd_t * tsd,void * old_ptr,size_t old_usize,size_t size,size_t alignment,size_t * usize,bool zero,tcache_t * tcache,arena_t * arena,alloc_ctx_t * alloc_ctx,hook_ralloc_args_t * hook_args) irallocx_prof() argument
3203 tsd_t *tsd; je_rallocx() local
3324 ixallocx_prof(tsd_t * tsd,void * ptr,size_t old_usize,size_t size,size_t extra,size_t alignment,bool zero,alloc_ctx_t * alloc_ctx) ixallocx_prof() argument
3376 tsd_t *tsd; je_xallocx() local
3478 tsd_t *tsd = tsd_fetch(); je_dallocx() local
3537 tsd_t *tsd = tsd_fetch(); sdallocx_default() local
3633 tsd_t *tsd; je_mallctl() local
3662 tsd_t *tsd = tsd_fetch(); je_mallctlnametomib() local
3675 tsd_t *tsd; je_mallctlbymib() local
3869 tsd_t *tsd; jemalloc_prefork() local
3943 tsd_t *tsd; jemalloc_postfork_parent() local
3977 tsd_t *tsd; jemalloc_postfork_child() local
[all...]
H A Dprof.c83 prof_alloc_rollback(tsd_t *tsd, prof_tctx_t *tctx) {
86 if (tsd_reentrancy_level_get(tsd) > 0) {
92 malloc_mutex_lock(tsd_tsdn(tsd), tctx->tdata->lock);
94 prof_tctx_try_destroy(tsd, tctx);
99 prof_malloc_sample_object(tsd_t *tsd, const void *ptr, size_t size,
104 prof_sys_thread_name_fetch(tsd);
107 edata_t *edata = emap_edata_lookup(tsd_tsdn(tsd), &arena_emap_global,
109 prof_info_set(tsd, edata, tctx, size);
113 malloc_mutex_lock(tsd_tsdn(tsd), tctx->tdata->lock);
136 bool record_recent = prof_recent_alloc_prepare(tsd, tct
365 prof_log_bt_index(tsd_t * tsd,prof_bt_t * bt) prof_log_bt_index() argument
407 prof_log_thr_index(tsd_t * tsd,uint64_t thr_uid,const char * name) prof_log_thr_index() argument
444 prof_try_log(tsd_t * tsd,const void * ptr,size_t usize,prof_tctx_t * tctx) prof_try_log() argument
521 prof_free_sampled_object(tsd_t * tsd,const void * ptr,size_t usize,prof_tctx_t * tctx) prof_free_sampled_object() argument
548 prof_enter(tsd_t * tsd,prof_tdata_t * tdata) prof_enter() argument
561 prof_leave(tsd_t * tsd,prof_tdata_t * tdata) prof_leave() argument
847 prof_gctx_try_destroy(tsd_t * tsd,prof_tdata_t * tdata_self,prof_gctx_t * gctx,prof_tdata_t * tdata) prof_gctx_try_destroy() argument
912 prof_tctx_destroy(tsd_t * tsd,prof_tctx_t * tctx) prof_tctx_destroy() argument
988 prof_lookup_global(tsd_t * tsd,prof_bt_t * bt,prof_tdata_t * tdata,void ** p_btkey,prof_gctx_t ** p_gctx,bool * p_new_gctx) prof_lookup_global() argument
1053 prof_lookup(tsd_t * tsd,prof_bt_t * bt) prof_lookup() argument
1210 tsd_t *tsd; prof_bt_count() local
1490 prof_gctx_finish(tsd_t * tsd,prof_gctx_tree_t * gctxs) prof_gctx_finish() argument
1823 prof_dump_prep(tsd_t * tsd,prof_tdata_t * tdata,struct prof_tdata_merge_iter_arg_s * prof_tdata_merge_iter_arg,struct prof_gctx_merge_iter_arg_s * prof_gctx_merge_iter_arg,prof_gctx_tree_t * gctxs) prof_dump_prep() argument
1865 prof_dump_file(tsd_t * tsd,bool propagate_err,const char * filename,bool leakcheck,prof_tdata_t * tdata,struct prof_tdata_merge_iter_arg_s * prof_tdata_merge_iter_arg,struct prof_gctx_merge_iter_arg_s * prof_gctx_merge_iter_arg,struct prof_gctx_dump_iter_arg_s * prof_gctx_dump_iter_arg,prof_gctx_tree_t * gctxs) prof_dump_file() argument
1906 prof_dump(tsd_t * tsd,bool propagate_err,const char * filename,bool leakcheck) prof_dump() argument
1948 tsd_t *tsd; prof_cnt_all() local
2013 tsd_t *tsd; prof_fdump() local
2050 tsd_t *tsd; prof_idump() local
2083 prof_mdump(tsd_t * tsd,const char * filename) prof_mdump() argument
2107 tsd_t *tsd; prof_gdump() local
2201 prof_tdata_init_impl(tsd_t * tsd,uint64_t thr_uid,uint64_t thr_discrim,char * thread_name,bool active) prof_tdata_init_impl() argument
2247 prof_tdata_init(tsd_t * tsd) prof_tdata_init() argument
2272 prof_tdata_destroy_locked(tsd_t * tsd,prof_tdata_t * tdata,bool even_if_attached) prof_tdata_destroy_locked() argument
2289 prof_tdata_destroy(tsd_t * tsd,prof_tdata_t * tdata,bool even_if_attached) prof_tdata_destroy() argument
2296 prof_tdata_detach(tsd_t * tsd,prof_tdata_t * tdata) prof_tdata_detach() argument
2321 prof_tdata_reinit(tsd_t * tsd,prof_tdata_t * tdata) prof_tdata_reinit() argument
2359 prof_reset(tsd_t * tsd,size_t lg_sample) prof_reset() argument
2386 prof_tdata_cleanup(tsd_t * tsd) prof_tdata_cleanup() argument
2554 tsd_t *tsd = tsd_fetch(); prof_log_stop_final() local
2581 prof_log_emit_threads(tsd_t * tsd,emitter_t * emitter) prof_log_emit_threads() argument
2605 prof_log_emit_traces(tsd_t * tsd,emitter_t * emitter) prof_log_emit_traces() argument
2634 prof_log_emit_allocs(tsd_t * tsd,emitter_t * emitter) prof_log_emit_allocs() argument
2701 tsd_t *tsd = tsdn_tsd(tsdn); prof_log_stop() local
2783 prof_thread_name_get(tsd_t * tsd) prof_thread_name_get() argument
2817 prof_thread_name_set(tsd_t * tsd,const char * thread_name) prof_thread_name_set() argument
2855 prof_thread_active_get(tsd_t * tsd) prof_thread_active_get() argument
2866 prof_thread_active_set(tsd_t * tsd,bool active) prof_thread_active_set() argument
2952 prof_boot2(tsd_t * tsd) prof_boot2() argument
[all...]
H A Dbackground_thread.c53 bool background_thread_create(tsd_t *tsd, unsigned arena_ind) NOT_REACHED
54 bool background_threads_enable(tsd_t *tsd) NOT_REACHED in background_thread_create() argument
55 bool background_threads_disable(tsd_t *tsd) NOT_REACHED in background_thread_create()
233 background_threads_disable_single(tsd_t *tsd, background_thread_info_t *info) {
235 malloc_mutex_assert_owner(tsd_tsdn(tsd),
238 malloc_mutex_assert_not_owner(tsd_tsdn(tsd),
242 pre_reentrancy(tsd, NULL);
243 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx);
253 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx);
256 post_reentrancy(tsd);
[all...]
H A Dctl.c57 static int n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, \
982 arenas_i_impl(tsd_t *tsd, size_t i, bool compat, bool init) { in ctl_arena_stats_sdmerge()
995 (struct container_s *)base_alloc(tsd_tsdn(tsd), in ctl_arena_refresh()
1003 ret = (ctl_arena_t *)base_alloc(tsd_tsdn(tsd), b0get(),
1262 ctl_arena_init(tsd_t *tsd, const arena_config_t *config) { in ctl_lookup()
1275 if (arenas_i_impl(tsd, arena_ind, false, true) == NULL) { in ctl_lookup()
1280 if (arena_init(tsd_tsdn(tsd), arena_ind, config) == NULL) { in ctl_lookup()
1387 ctl_init(tsd_t *tsd) { in ctl_boot()
1389 tsdn_t *tsdn = tsd_tsdn(tsd); in ctl_boot()
1423 if ((ctl_sarena = arenas_i_impl(tsd, MALLCTL_ARENAS_AL
726 arenas_i_impl(tsd_t * tsd,size_t i,bool compat,bool init) arenas_i_impl() argument
1004 ctl_arena_init(tsd_t * tsd,extent_hooks_t * extent_hooks) ctl_arena_init() argument
1118 ctl_init(tsd_t * tsd) ctl_init() argument
1292 ctl_byname(tsd_t * tsd,const char * name,void * oldp,size_t * oldlenp,void * newp,size_t newlen) ctl_byname() argument
1324 ctl_nametomib(tsd_t * tsd,const char * name,size_t * mibp,size_t * miblenp) ctl_nametomib() argument
1338 ctl_bymib(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) ctl_bymib() argument
1635 background_thread_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) background_thread_ctl() argument
1687 max_background_threads_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) max_background_threads_ctl() argument
1890 thread_tcache_flush_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) thread_tcache_flush_ctl() argument
1911 thread_prof_name_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) thread_prof_name_ctl() argument
1943 thread_prof_active_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) thread_prof_active_ctl() argument
1974 tcache_create_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) tcache_create_ctl() argument
1992 tcache_flush_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) tcache_flush_ctl() argument
2012 tcache_destroy_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) tcache_destroy_ctl() argument
2034 arena_i_initialized_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) arena_i_initialized_ctl() argument
2103 arena_i_decay_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) arena_i_decay_ctl() argument
2119 arena_i_purge_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) arena_i_purge_ctl() argument
2135 arena_i_reset_destroy_helper(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen,unsigned * arena_ind,arena_t ** arena) arena_i_reset_destroy_helper() argument
2156 arena_reset_prepare_background_thread(tsd_t * tsd,unsigned arena_ind) arena_reset_prepare_background_thread() argument
2172 arena_reset_finish_background_thread(tsd_t * tsd,unsigned arena_ind) arena_reset_finish_background_thread() argument
2187 arena_i_reset_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) arena_i_reset_ctl() argument
2207 arena_i_destroy_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) arena_i_destroy_ctl() argument
2248 arena_i_dss_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) arena_i_dss_ctl() argument
2309 arena_i_decay_ms_ctl_impl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen,bool dirty) arena_i_decay_ms_ctl_impl() argument
2357 arena_i_dirty_decay_ms_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) arena_i_dirty_decay_ms_ctl() argument
2364 arena_i_muzzy_decay_ms_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) arena_i_muzzy_decay_ms_ctl() argument
2371 arena_i_extent_hooks_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) arena_i_extent_hooks_ctl() argument
2426 arena_i_retain_grow_limit_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) arena_i_retain_grow_limit_ctl() argument
2489 arenas_narenas_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) arenas_narenas_ctl() argument
2510 arenas_decay_ms_ctl_impl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen,bool dirty) arenas_decay_ms_ctl_impl() argument
2538 arenas_dirty_decay_ms_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) arenas_dirty_decay_ms_ctl() argument
2545 arenas_muzzy_decay_ms_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) arenas_muzzy_decay_ms_ctl() argument
2582 arenas_create_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) arenas_create_ctl() argument
2605 arenas_lookup_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) arenas_lookup_ctl() argument
2638 prof_thread_active_init_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) prof_thread_active_init_ctl() argument
2666 prof_active_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) prof_active_ctl() argument
2692 prof_dump_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) prof_dump_ctl() argument
2715 prof_gdump_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) prof_gdump_ctl() argument
2741 prof_reset_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) prof_reset_ctl() argument
2791 prof_log_stop_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) prof_log_stop_ctl() argument
2954 stats_mutexes_reset_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) stats_mutexes_reset_ctl() argument
3120 experimental_hooks_install_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) experimental_hooks_install_ctl() argument
3148 experimental_hooks_remove_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) experimental_hooks_remove_ctl() argument
3231 experimental_utilization_query_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) experimental_utilization_query_ctl() argument
3356 experimental_utilization_batch_query_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) experimental_utilization_batch_query_ctl() argument
3401 experimental_arenas_i_pactivep_ctl(tsd_t * tsd,const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen) experimental_arenas_i_pactivep_ctl() argument
[all...]
H A Dckh.c52 static bool ckh_grow(tsd_t *tsd, ckh_t *ckh);
53 static void ckh_shrink(tsd_t *tsd, ckh_t *ckh);
256 ckh_grow(tsd_t *tsd, ckh_t *ckh) {
282 tab = (ckhc_t *)ipallocztm(tsd_tsdn(tsd), usize, CACHELINE, in ckh_grow()
283 true, NULL, true, arena_ichoose(tsd, NULL)); in ckh_grow()
295 idalloctm(tsd_tsdn(tsd), tab, NULL, NULL, true, true); in ckh_grow()
300 idalloctm(tsd_tsdn(tsd), ckh->tab, NULL, NULL, true, true); in ckh_grow()
311 ckh_shrink(tsd_t *tsd, ckh_t *ckh) {
326 tab = (ckhc_t *)ipallocztm(tsd_tsdn(tsd), usize, CACHELINE, true, NULL, in ckh_shrink()
327 true, arena_ichoose(tsd, NUL in ckh_shrink()
257 ckh_grow(tsd_t * tsd,ckh_t * ckh) ckh_grow() argument
312 ckh_shrink(tsd_t * tsd,ckh_t * ckh) ckh_shrink() argument
360 ckh_new(tsd_t * tsd,ckh_t * ckh,size_t minitems,ckh_hash_t * hash,ckh_keycomp_t * keycomp) ckh_new() argument
417 ckh_delete(tsd_t * tsd,ckh_t * ckh) ckh_delete() argument
467 ckh_insert(tsd_t * tsd,ckh_t * ckh,const void * key,const void * data) ckh_insert() argument
490 ckh_remove(tsd_t * tsd,ckh_t * ckh,const void * searchkey,void ** key,void ** data) ckh_remove() argument
[all...]
/freebsd/contrib/jemalloc/include/jemalloc/internal/
H A Dtsd.h165 tsd_t *tsd_fetch_slow(tsd_t *tsd, bool internal);
166 void tsd_state_set(tsd_t *tsd, uint8_t new_state);
167 void tsd_slow_update(tsd_t *tsd);
168 void tsd_prefork(tsd_t *tsd);
169 void tsd_postfork_parent(tsd_t *tsd);
170 void tsd_postfork_child(tsd_t *tsd);
204 * in such scenarios, we need tsd, but set up in such a way that no
212 * What it says on the tin; tsd that hasn't been initialized. Note in tsd_state_get()
213 * that even when the tsd struct lives in TLS, when need to keep track in tsd_state_get()
221 * Some TSD accesse member
205 tsd_state_get(tsd_t * tsd) tsd_state_get() argument
225 tsd_tsdn(tsd_t * tsd) tsd_tsdn() argument
325 tsd_assert_fast(tsd_t * tsd) tsd_assert_fast() argument
336 tsd_fast(tsd_t * tsd) tsd_fast() argument
347 tsd_t *tsd = tsd_get(init); tsd_fetch_impl() local
372 tsd_t *tsd = tsd_fetch_min(); tsd_internal_fetch() local
385 tsd_nominal(tsd_t * tsd) tsd_nominal() argument
399 tsd_rtree_ctx(tsd_t * tsd) tsd_rtree_ctx() argument
[all...]
H A Dprof_inlines_b.h
H A Dtcache_inlines.h12 tcache_enabled_get(tsd_t *tsd) { in tcache_enabled_get() argument
13 return tsd_tcache_enabled_get(tsd); in tcache_enabled_get()
17 tcache_enabled_set(tsd_t *tsd, bool enabled) { in tcache_enabled_set() argument
18 bool was_enabled = tsd_tcache_enabled_get(tsd); in tcache_enabled_set()
21 tsd_tcache_data_init(tsd); in tcache_enabled_set()
23 tcache_cleanup(tsd); in tcache_enabled_set()
26 tsd_tcache_enabled_set(tsd, enabled); in tcache_enabled_set()
27 tsd_slow_update(tsd); in tcache_enabled_set()
45 tcache_alloc_small(tsd_t *tsd, arena_t *arena, tcache_t *tcache, in tcache_alloc_small()
56 arena = arena_choose(tsd, aren in tcache_alloc_small()
31 tcache_event(tsd_t * tsd,tcache_t * tcache) tcache_event() argument
42 tcache_alloc_small(tsd_t * tsd,arena_t * arena,tcache_t * tcache,size_t size,szind_t binind,bool zero,bool slow_path) tcache_alloc_small() argument
104 tcache_alloc_large(tsd_t * tsd,arena_t * arena,tcache_t * tcache,size_t size,szind_t binind,bool zero,bool slow_path) tcache_alloc_large() argument
164 tcache_dalloc_small(tsd_t * tsd,tcache_t * tcache,void * ptr,szind_t binind,bool slow_path) tcache_dalloc_small() argument
189 tcache_dalloc_large(tsd_t * tsd,tcache_t * tcache,void * ptr,szind_t binind,bool slow_path) tcache_dalloc_large() argument
216 tcaches_get(tsd_t * tsd,unsigned ind) tcaches_get() argument
[all...]
H A Djemalloc_internal_inlines_a.h75 tcache_available(tsd_t *tsd) { in arena_tdata_get()
81 if (likely(tsd_tcache_enabled_get(tsd))) { in arena_tdata_get()
83 if (config_debug && tsd_tcache_slowp_get(tsd)->arena != NULL) {
84 tcache_assert_initialized(tsd_tcachep_get(tsd));
93 tcache_get(tsd_t *tsd) { in arena_get()
94 if (!tcache_available(tsd)) { in arena_get()
98 return tsd_tcachep_get(tsd); in arena_get()
102 tcache_slow_get(tsd_t *tsd) { in decay_ticker_get()
103 if (!tcache_available(tsd)) { in decay_ticker_get()
107 return tsd_tcache_slowp_get(tsd); in decay_ticker_get()
60 arena_tdata_get(tsd_t * tsd,unsigned ind,bool refresh_if_missing) arena_tdata_get() argument
101 decay_ticker_get(tsd_t * tsd,unsigned ind) decay_ticker_get() argument
124 tcache_available(tsd_t * tsd) tcache_available() argument
142 tcache_get(tsd_t * tsd) tcache_get() argument
151 pre_reentrancy(tsd_t * tsd,arena_t * arena) pre_reentrancy() argument
166 post_reentrancy(tsd_t * tsd) post_reentrancy() argument
[all...]
H A Djemalloc_internal_inlines_b.h7 percpu_arena_update(tsd_t *tsd, unsigned cpu) {
9 arena_t *oldarena = tsd_arena_get(tsd); in arena_choose_impl()
15 arena_t *newarena = arena_get(tsd_tsdn(tsd), newind, true); in arena_choose_impl()
19 arena_migrate(tsd, oldarena, newarena); in arena_choose_impl()
20 tcache_t *tcache = tcache_get(tsd); in arena_choose_impl()
22 tcache_slow_t *tcache_slow = tsd_tcache_slowp_get(tsd); in arena_choose_impl()
23 tcache_arena_reassociate(tsd_tsdn(tsd), tcache_slow, in arena_choose_impl()
32 arena_choose_impl(tsd_t *tsd, arena_t *arena, bool internal) { in arena_choose_impl()
40 if (unlikely(tsd_reentrancy_level_get(tsd) > 0)) { in arena_choose_impl()
41 return arena_get(tsd_tsdn(tsd), in arena_choose_impl()
8 arena_choose_impl(tsd_t * tsd,arena_t * arena,bool internal) arena_choose_impl() argument
62 arena_choose(tsd_t * tsd,arena_t * arena) arena_choose() argument
67 arena_ichoose(tsd_t * tsd,arena_t * arena) arena_ichoose() argument
[all...]
H A Dtcache_externs.h41 void tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, cache_bin_t *tbin,
43 void tcache_bin_flush_large(tsd_t *tsd, tcache_t *tcache, cache_bin_t *tbin,
45 void tcache_bin_flush_stashed(tsd_t *tsd, tcache_t *tcache, cache_bin_t *bin,
49 tcache_t *tcache_create_explicit(tsd_t *tsd);
50 void tcache_cleanup(tsd_t *tsd);
52 bool tcaches_create(tsd_t *tsd, base_t *base, unsigned *r_ind);
53 void tcaches_flush(tsd_t *tsd, unsigned ind);
54 void tcaches_destroy(tsd_t *tsd, unsigned ind);
61 void tcache_flush(tsd_t *tsd);
62 bool tsd_tcache_data_init(tsd_t *tsd);
[all...]
H A Dprof_externs.h59 prof_tdata_t *prof_tdata_init(tsd_t *tsd);
60 prof_tdata_t *prof_tdata_reinit(tsd_t *tsd, prof_tdata_t *tdata);
62 void prof_alloc_rollback(tsd_t *tsd, prof_tctx_t *tctx);
63 void prof_malloc_sample_object(tsd_t *tsd, const void *ptr, size_t size,
65 void prof_free_sampled_object(tsd_t *tsd, size_t usize, prof_info_t *prof_info);
66 prof_tctx_t *prof_tctx_create(tsd_t *tsd);
68 bool prof_mdump(tsd_t *tsd, const char *filename);
71 void prof_tdata_cleanup(tsd_t *tsd);
74 const char *prof_thread_name_get(tsd_t *tsd);
75 int prof_thread_name_set(tsd_t *tsd, cons
[all...]
H A Djemalloc_internal_externs.h10 /* TSD checks this to set thread local slow state accordingly. */
64 arena_t *arena_choose_hard(tsd_t *tsd, bool internal);
65 void arena_migrate(tsd_t *tsd, arena_t *oldarena, arena_t *newarena);
66 void iarena_cleanup(tsd_t *tsd);
67 void arena_cleanup(tsd_t *tsd);
H A Djemalloc_internal_inlines_c.h63 ialloc(tsd_t *tsd, size_t size, szind_t ind, bool zero, bool slow_path) { in ialloc()
64 return iallocztm(tsd_tsdn(tsd), size, ind, zero, tcache_get(tsd), false, in ialloc()
95 ipalloc(tsd_t *tsd, size_t usize, size_t alignment, bool zero) { in ipalloc()
96 return ipallocztm(tsd_tsdn(tsd), usize, alignment, zero, in ipalloc()
97 tcache_get(tsd), false, NULL);
124 idalloc(tsd_t *tsd, void *ptr) { in idalloc()
125 idalloctm(tsd_tsdn(tsd), ptr, tcache_get(tsd), NULL, false, true);
199 iralloc(tsd_t *tsd, voi in iralloc()
61 ialloc(tsd_t * tsd,size_t size,szind_t ind,bool zero,bool slow_path) ialloc() argument
93 ipalloc(tsd_t * tsd,size_t usize,size_t alignment,bool zero) ipalloc() argument
122 idalloc(tsd_t * tsd,void * ptr) idalloc() argument
197 iralloc(tsd_t * tsd,void * ptr,size_t oldsize,size_t size,size_t alignment,bool zero,hook_ralloc_args_t * hook_args) iralloc() argument
[all...]
H A Darena_inlines_a.h36 percpu_arena_update(tsd_t * tsd,unsigned cpu) percpu_arena_update() argument
H A Dtsd_generic.h2 #error This file should be included only once, by tsd.h.
13 /* Defined in tsd.c, to allow the mutex headers to have tsd dependencies. */
42 malloc_write("<jemalloc>: Error setting TSD\n"); in tsd_cleanup_wrapper()
59 malloc_write("<jemalloc>: Error setting TSD\n"); in tsd_wrapper_set()
85 malloc_write("<jemalloc>: Error allocating TSD\n"); in tsd_wrapper_get()
126 malloc_write("<jemalloc>: Error allocating TSD\n"); in tsd_boot()
H A Dckh.h4 #include "jemalloc/internal/tsd.h"
70 bool ckh_new(tsd_t *tsd, ckh_t *ckh, size_t minitems, ckh_hash_t *hash,
72 void ckh_delete(tsd_t *tsd, ckh_t *ckh);
90 bool ckh_insert(tsd_t *tsd, ckh_t *ckh, const void *key, const void *data);
91 bool ckh_remove(tsd_t *tsd, ckh_t *ckh, const void *searchkey, void **key,
/freebsd/contrib/llvm-project/compiler-rt/lib/scudo/standalone/
H A Dtsd_shared.h12 #include "tsd.h"
39 TSD<Allocator> &operator*() { return *CurrentTSD; }
41 TSD<Allocator> *operator->() {
47 TSD<Allocator> *CurrentTSD;
111 // Not supported by the TSD Registry, but not an error either. in setOption()
124 // Theoretically, we want to mark TSD::lock()/TSD::unlock() with proper in getStats()
125 // thread annotations. However, given the TSD is only locked on shared in getStats()
129 Str->append(" Shared TSD[%zu]:\n", I); in getStats()
136 ALWAYS_INLINE TSD<Allocator> *getTSDAndLock() NO_THREAD_SAFETY_ANALYSIS { in getTSDAndLock()
137 TSD<Allocator> *TSD = getCurrentTSD(); in getTSDAndLock() local
[all …]
H A Dtsd_exclusive.h12 #include "tsd.h"
43 TSD<Allocator> &operator*() { return *CurrentTSD; }
45 TSD<Allocator> *operator->() {
51 TSD<Allocator> *CurrentTSD;
102 // To disable the exclusive TSD registry, we effectively lock the fallback TSD
130 Str->append("Exclusive TSD don't support iterating each TSD\n"); in getStats()
134 ALWAYS_INLINE TSD<Allocator> *
163 TSD<Allocator> FallbackTSD;
166 static thread_local TSD<Allocator> ThreadTSD;
172 thread_local TSD<Allocator> TSDRegistryExT<Allocator>::ThreadTSD;
H A Dtsd.h1 //===-- tsd.h ---------------------------------------------------*- C++ -*-===//
27 template <class Allocator> struct alignas(SCUDO_CACHE_LINE_SIZE) TSD { struct
28 using ThisT = TSD<Allocator>; argument
58 // As the comments attached to `getCache()`, the TSD doesn't always need to be argument
68 // current architecture of accessing TSD is not easy to cooperate with the argument
73 // TSD doesn't always require holding the lock. Add this assertion while the
/freebsd/contrib/llvm-project/compiler-rt/lib/memprof/
H A Dmemprof_posix.cpp26 // ---------------------- TSD ---------------- {{{1
30 void TSDInit(void (*destructor)(void *tsd)) { in TSDInit() argument
41 void TSDSet(void *tsd) { in TSDSet() argument
43 pthread_setspecific(tsd_key, tsd); in TSDSet()
46 void PlatformTSDDtor(void *tsd) { in PlatformTSDDtor() argument
47 MemprofThreadContext *context = (MemprofThreadContext *)tsd; in PlatformTSDDtor()
50 CHECK_EQ(0, pthread_setspecific(tsd_key, tsd)); in PlatformTSDDtor()
53 MemprofThread::TSDDtor(tsd); in PlatformTSDDtor()
/freebsd/contrib/llvm-project/compiler-rt/lib/msan/
H A Dmsan_linux.cpp218 // ---------------------- TSD ---------------- {{{1
222 // Reuse the MSan TSD API for compatibility with existing code
225 static void (*tsd_destructor)(void *tsd) = nullptr;
239 void MsanTSDInit(void (*destructor)(void *tsd)) { in MsanTSDInit() argument
249 void SetCurrentThread(MsanThread *tsd) { in SetCurrentThread() argument
251 CHECK(tsd); in SetCurrentThread()
253 key.key = tsd; in SetCurrentThread()
256 void MsanTSDDtor(void *tsd) { in MsanTSDDtor() argument
258 CHECK_EQ(key.key, tsd); in MsanTSDDtor()
262 MsanThread::TSDDtor(tsd); in MsanTSDDtor()
[all …]
/freebsd/contrib/llvm-project/compiler-rt/lib/dfsan/
H A Ddfsan_thread.cpp51 void DFsanThread::TSDDtor(void *tsd) { in TSDDtor() argument
52 DFsanThread *t = (DFsanThread *)tsd; in TSDDtor()
59 // some code may still be executing in later TSD destructors in Destroy()
102 void DFsanTSDInit(void (*destructor)(void *tsd)) { in DFsanTSDInit() argument
121 void DFsanTSDDtor(void *tsd) { in DFsanTSDDtor() argument
122 DFsanThread *t = (DFsanThread *)tsd; in DFsanTSDDtor()
125 CHECK_EQ(0, pthread_setspecific(tsd_key, tsd)); in DFsanTSDDtor()
131 DFsanThread::TSDDtor(tsd); in DFsanTSDDtor()

12345