Lines Matching defs:slow

2240 	bool slow;
2257 static_opts->slow = false;
2326 zero_get(bool guarantee, bool slow) {
2327 if (config_fill && slow && unlikely(opt_zero)) {
2335 tcache_get_from_ind(tsd_t *tsd, unsigned tcache_ind, bool slow, bool is_alloc) {
2338 if (likely(!slow)) {
2389 sopts->slow, /* is_alloc */ true);
2403 arena, sopts->slow);
2498 /* Reentrancy is only checked on slow path. */
2513 dopts->zero = zero_get(dopts->zero, sopts->slow);
2531 if (sopts->slow && unlikely(reentrancy_level > 0)) {
2596 if (config_fill && sopts->slow && !dopts->zero
2601 if (sopts->slow) {
2611 if (unlikely(sopts->slow) && config_xmalloc && unlikely(opt_xmalloc)) {
2616 if (sopts->slow) {
2647 if (sopts->slow) {
2690 sopts->slow = false;
2697 sopts->slow = true;
2729 * the check on tsd_fast that sets sopts.slow.
2731 if (sopts.slow) {
2779 if (sopts.slow) {
2822 if (sopts.slow) {
2857 if (sopts.slow) {
3016 TCACHE_IND_AUTOMATIC, /* slow */ false,
3018 ifree(tsd, ptr, tcache, /* slow */ false);
3021 TCACHE_IND_AUTOMATIC, /* slow */ true,
3025 ifree(tsd, ptr, tcache, /* slow */ true);
3142 /* slow */ false, /* is_alloc */ false);
3208 if (sopts.slow) {
3247 if (sopts.slow) {
3429 if (sopts.slow) {
3511 bool zero = zero_get(MALLOCX_ZERO_GET(flags), /* slow */ true);
3520 /* slow */ true, /* is_alloc */ true);
3604 TCACHE_IND_AUTOMATIC, /* slow */ true,
3658 if (sopts.slow) {
3761 bool zero = zero_get(MALLOCX_ZERO_GET(flags), /* slow */ true);
4155 bool zero = zero_get(MALLOCX_ZERO_GET(flags), /* slow */ true);
4214 tcache_ind, /* slow */ true,
4225 * tcache; in such cases, we go through the slow path,
4233 * In such cases, we rely on the slow path,