Lines Matching full:tsd
54 bool background_thread_create(tsd_t *tsd, unsigned arena_ind) NOT_REACHED in background_thread_create() argument
55 bool background_threads_enable(tsd_t *tsd) NOT_REACHED in background_thread_create()
56 bool background_threads_disable(tsd_t *tsd) NOT_REACHED in background_thread_create()
311 background_threads_disable_single(tsd_t *tsd, background_thread_info_t *info) {
313 malloc_mutex_assert_owner(tsd_tsdn(tsd),
316 malloc_mutex_assert_not_owner(tsd_tsdn(tsd),
320 pre_reentrancy(tsd, NULL);
321 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx);
331 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx);
334 post_reentrancy(tsd);
339 post_reentrancy(tsd);
344 post_reentrancy(tsd);
384 check_background_thread_creation(tsd_t *tsd, unsigned *n_created,
391 tsdn_t *tsdn = tsd_tsdn(tsd);
409 pre_reentrancy(tsd, NULL);
412 post_reentrancy(tsd);
434 background_thread0_work(tsd_t *tsd) {
444 if (background_thread_pause_check(tsd_tsdn(tsd),
448 if (check_background_thread_creation(tsd, &n_created,
452 background_work_sleep_once(tsd_tsdn(tsd),
465 background_threads_disable_single(tsd, info);
467 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx);
475 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx);
483 background_work(tsd_t *tsd, unsigned ind) {
486 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx);
487 background_thread_wakeup_time_set(tsd_tsdn(tsd), info,
490 background_thread0_work(tsd);
493 if (background_thread_pause_check(tsd_tsdn(tsd),
497 background_work_sleep_once(tsd_tsdn(tsd), info, ind);
501 background_thread_wakeup_time_set(tsd_tsdn(tsd), info, 0);
502 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx);
518 * Start periodic background work. We use internal tsd which avoids
530 background_thread_init(tsd_t *tsd, background_thread_info_t *info) {
531 malloc_mutex_assert_owner(tsd_tsdn(tsd), &background_thread_lock);
533 background_thread_info_init(tsd_tsdn(tsd), info);
538 background_thread_create_locked(tsd_t *tsd, unsigned arena_ind) {
540 malloc_mutex_assert_owner(tsd_tsdn(tsd), &background_thread_lock);
547 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx);
551 background_thread_init(tsd, info);
553 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx);
560 malloc_mutex_lock(tsd_tsdn(tsd), &t0->mtx);
563 malloc_mutex_unlock(tsd_tsdn(tsd), &t0->mtx);
568 pre_reentrancy(tsd, NULL);
575 post_reentrancy(tsd);
580 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx);
583 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx);
593 background_thread_create(tsd_t *tsd, unsigned arena_ind) {
597 malloc_mutex_lock(tsd_tsdn(tsd), &background_thread_lock);
598 ret = background_thread_create_locked(tsd, arena_ind);
599 malloc_mutex_unlock(tsd_tsdn(tsd), &background_thread_lock);
605 background_threads_enable(tsd_t *tsd) {
608 malloc_mutex_assert_owner(tsd_tsdn(tsd), &background_thread_lock);
622 arena_get(tsd_tsdn(tsd), i, false) == NULL) {
627 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx);
629 background_thread_init(tsd, info);
630 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx);
637 return background_thread_create_locked(tsd, 0);
641 background_threads_disable(tsd_t *tsd) {
643 malloc_mutex_assert_owner(tsd_tsdn(tsd), &background_thread_lock);
646 if (background_threads_disable_single(tsd,