Lines Matching defs:tsd
53 bool background_thread_create(tsd_t *tsd, unsigned arena_ind) NOT_REACHED
54 bool background_threads_enable(tsd_t *tsd) NOT_REACHED
55 bool background_threads_disable(tsd_t *tsd) NOT_REACHED
233 background_threads_disable_single(tsd_t *tsd, background_thread_info_t *info) {
235 malloc_mutex_assert_owner(tsd_tsdn(tsd),
238 malloc_mutex_assert_not_owner(tsd_tsdn(tsd),
242 pre_reentrancy(tsd, NULL);
243 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx);
253 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx);
256 post_reentrancy(tsd);
261 post_reentrancy(tsd);
266 post_reentrancy(tsd);
306 check_background_thread_creation(tsd_t *tsd, unsigned *n_created,
313 tsdn_t *tsdn = tsd_tsdn(tsd);
331 pre_reentrancy(tsd, NULL);
334 post_reentrancy(tsd);
356 background_thread0_work(tsd_t *tsd) {
366 if (background_thread_pause_check(tsd_tsdn(tsd),
370 if (check_background_thread_creation(tsd, &n_created,
374 background_work_sleep_once(tsd_tsdn(tsd),
387 background_threads_disable_single(tsd, info);
389 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx);
397 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx);
405 background_work(tsd_t *tsd, unsigned ind) {
408 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx);
409 background_thread_wakeup_time_set(tsd_tsdn(tsd), info,
412 background_thread0_work(tsd);
415 if (background_thread_pause_check(tsd_tsdn(tsd),
419 background_work_sleep_once(tsd_tsdn(tsd), info, ind);
423 background_thread_wakeup_time_set(tsd_tsdn(tsd), info, 0);
424 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx);
440 * Start periodic background work. We use internal tsd which avoids
452 background_thread_init(tsd_t *tsd, background_thread_info_t *info) {
453 malloc_mutex_assert_owner(tsd_tsdn(tsd), &background_thread_lock);
455 background_thread_info_init(tsd_tsdn(tsd), info);
460 background_thread_create_locked(tsd_t *tsd, unsigned arena_ind) {
462 malloc_mutex_assert_owner(tsd_tsdn(tsd), &background_thread_lock);
469 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx);
473 background_thread_init(tsd, info);
475 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx);
482 malloc_mutex_lock(tsd_tsdn(tsd), &t0->mtx);
485 malloc_mutex_unlock(tsd_tsdn(tsd), &t0->mtx);
490 pre_reentrancy(tsd, NULL);
497 post_reentrancy(tsd);
502 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx);
505 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx);
515 background_thread_create(tsd_t *tsd, unsigned arena_ind) {
519 malloc_mutex_lock(tsd_tsdn(tsd), &background_thread_lock);
520 ret = background_thread_create_locked(tsd, arena_ind);
521 malloc_mutex_unlock(tsd_tsdn(tsd), &background_thread_lock);
527 background_threads_enable(tsd_t *tsd) {
530 malloc_mutex_assert_owner(tsd_tsdn(tsd), &background_thread_lock);
544 arena_get(tsd_tsdn(tsd), i, false) == NULL) {
549 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx);
551 background_thread_init(tsd, info);
552 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx);
559 bool err = background_thread_create_locked(tsd, 0);
564 arena_t *arena = arena_get(tsd_tsdn(tsd), i, false);
566 pa_shard_set_deferral_allowed(tsd_tsdn(tsd),
574 background_threads_disable(tsd_t *tsd) {
576 malloc_mutex_assert_owner(tsd_tsdn(tsd), &background_thread_lock);
579 if (background_threads_disable_single(tsd,
586 arena_t *arena = arena_get(tsd_tsdn(tsd), i, false);
588 pa_shard_set_deferral_allowed(tsd_tsdn(tsd),