Lines Matching refs:usize

1864 	bool usize;  member
1878 static_opts->usize = false; in static_opts_init()
1893 size_t usize; member
1905 dynamic_opts->usize = 0; in dynamic_opts_init()
1917 size_t size, size_t usize, szind_t ind) { in imalloc_no_sample() argument
1949 return ipalloct(tsd_tsdn(tsd), usize, dopts->alignment, in imalloc_no_sample()
1959 size_t usize, szind_t ind) { in imalloc_sample() argument
1967 size_t bumped_usize = usize; in imalloc_sample()
1969 if (usize <= SC_SMALL_MAXCLASS) { in imalloc_sample()
1981 arena_prof_promote(tsd_tsdn(tsd), ret, usize); in imalloc_sample()
1983 ret = imalloc_no_sample(sopts, dopts, tsd, usize, usize, ind); in imalloc_sample()
2047 size_t usize = 0; in imalloc_body() local
2070 if (config_stats || (config_prof && opt_prof) || sopts->usize) { in imalloc_body()
2071 usize = sz_index2size(ind); in imalloc_body()
2072 dopts->usize = usize; in imalloc_body()
2073 assert(usize > 0 && usize in imalloc_body()
2082 usize = sz_sa2u(size, dopts->alignment); in imalloc_body()
2083 dopts->usize = usize; in imalloc_body()
2084 if (unlikely(usize == 0 in imalloc_body()
2085 || usize > SC_LARGE_MAXCLASS)) { in imalloc_body()
2121 tsd, usize, prof_active_get_unlocked(), true); in imalloc_body()
2125 alloc_ctx.slab = (usize in imalloc_body()
2128 sopts, dopts, tsd, usize, usize, ind); in imalloc_body()
2135 sopts, dopts, tsd, usize, ind); in imalloc_body()
2145 prof_malloc(tsd_tsdn(tsd), allocation, usize, &alloc_ctx, tctx); in imalloc_body()
2153 allocation = imalloc_no_sample(sopts, dopts, tsd, size, usize, in imalloc_body()
2168 assert(usize == isalloc(tsd_tsdn(tsd), allocation)); in imalloc_body()
2169 *tsd_thread_allocatedp_get(tsd) += usize; in imalloc_body()
2346 size_t usize; in JEMALLOC_ATTR() local
2348 usize = sz_index2size(ind); in JEMALLOC_ATTR()
2356 bytes_until_sample -= usize; in JEMALLOC_ATTR()
2379 *tsd_thread_allocatedp_get(tsd) += usize; in JEMALLOC_ATTR()
2383 tcache->prof_accumbytes += usize; in JEMALLOC_ATTR()
2510 irealloc_prof_sample(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t usize, in irealloc_prof_sample() argument
2517 if (usize <= SC_SMALL_MAXCLASS) { in irealloc_prof_sample()
2523 arena_prof_promote(tsd_tsdn(tsd), p, usize); in irealloc_prof_sample()
2525 p = iralloc(tsd, old_ptr, old_usize, usize, 0, false, in irealloc_prof_sample()
2533 irealloc_prof(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t usize, in irealloc_prof() argument
2541 tctx = prof_alloc_prep(tsd, usize, prof_active, true); in irealloc_prof()
2543 p = irealloc_prof_sample(tsd, old_ptr, old_usize, usize, tctx, in irealloc_prof()
2546 p = iralloc(tsd, old_ptr, old_usize, usize, 0, false, in irealloc_prof()
2553 prof_realloc(tsd, p, usize, tctx, prof_active, true, old_ptr, old_usize, in irealloc_prof()
2578 size_t usize; in ifree() local
2580 usize = sz_index2size(alloc_ctx.szind); in ifree()
2581 prof_free(tsd, ptr, usize, &alloc_ctx); in ifree()
2583 usize = sz_index2size(alloc_ctx.szind); in ifree()
2586 *tsd_thread_deallocatedp_get(tsd) += usize; in ifree()
2599 isfree(tsd_t *tsd, void *ptr, size_t usize, tcache_t *tcache, bool slow_path) { in isfree() argument
2618 alloc_ctx.szind = sz_size2index(usize); in isfree()
2634 assert(alloc_ctx.szind == sz_size2index(usize)); in isfree()
2641 prof_free(tsd, ptr, usize, ctx); in isfree()
2644 *tsd_thread_deallocatedp_get(tsd) += usize; in isfree()
2648 isdalloct(tsd_tsdn(tsd), ptr, usize, tcache, ctx, false); in isfree()
2650 isdalloct(tsd_tsdn(tsd), ptr, usize, tcache, ctx, true); in isfree()
2660 size_t usize JEMALLOC_CC_SILENCE_INIT(0); in je_realloc()
2688 usize = sz_s2u(size); in je_realloc()
2689 if (unlikely(usize == 0 in je_realloc()
2690 || usize > SC_LARGE_MAXCLASS)) { in je_realloc()
2693 ret = irealloc_prof(tsd, ptr, old_usize, usize, in je_realloc()
2698 usize = sz_s2u(size); in je_realloc()
2742 assert(usize == isalloc(tsdn, ret)); in je_realloc()
2744 *tsd_thread_allocatedp_get(tsd) += usize; in je_realloc()
2840 size_t usize = sz_index2size(alloc_ctx.szind); in free_fastpath() local
2841 *tsd_thread_deallocatedp_get(tsd) += usize; in free_fastpath()
3042 sopts.usize = true; in JEMALLOC_SMALLOCX_CONCAT_HELPER2()
3070 assert(dopts.usize == je_nallocx(size, flags)); in JEMALLOC_SMALLOCX_CONCAT_HELPER2()
3071 ret.size = dopts.usize; in JEMALLOC_SMALLOCX_CONCAT_HELPER2()
3135 size_t usize, size_t alignment, bool zero, tcache_t *tcache, arena_t *arena, in irallocx_prof_sample() argument
3142 if (usize <= SC_SMALL_MAXCLASS) { in irallocx_prof_sample()
3149 arena_prof_promote(tsdn, p, usize); in irallocx_prof_sample()
3151 p = iralloct(tsdn, old_ptr, old_usize, usize, alignment, zero, in irallocx_prof_sample()
3160 size_t alignment, size_t *usize, bool zero, tcache_t *tcache, in irallocx_prof() argument
3168 tctx = prof_alloc_prep(tsd, *usize, prof_active, false); in irallocx_prof()
3171 *usize, alignment, zero, tcache, arena, tctx, hook_args); in irallocx_prof()
3190 *usize = isalloc(tsd_tsdn(tsd), p); in irallocx_prof()
3192 prof_realloc(tsd, p, *usize, tctx, prof_active, false, old_ptr, in irallocx_prof()
3204 size_t usize; in je_rallocx() local
3252 usize = (alignment == 0) ? in je_rallocx()
3254 if (unlikely(usize == 0 in je_rallocx()
3255 || usize > SC_LARGE_MAXCLASS)) { in je_rallocx()
3258 p = irallocx_prof(tsd, ptr, old_usize, size, alignment, &usize, in je_rallocx()
3270 usize = isalloc(tsd_tsdn(tsd), p); in je_rallocx()
3276 *tsd_thread_allocatedp_get(tsd) += usize; in je_rallocx()
3312 size_t usize; in ixallocx_prof_sample() local
3317 usize = ixallocx_helper(tsdn, ptr, old_usize, size, extra, alignment, in ixallocx_prof_sample()
3320 return usize; in ixallocx_prof_sample()
3326 size_t usize_max, usize; in ixallocx_prof() local
3358 usize = ixallocx_prof_sample(tsd_tsdn(tsd), ptr, old_usize, in ixallocx_prof()
3361 usize = ixallocx_helper(tsd_tsdn(tsd), ptr, old_usize, size, in ixallocx_prof()
3364 if (usize == old_usize) { in ixallocx_prof()
3366 return usize; in ixallocx_prof()
3368 prof_realloc(tsd, ptr, usize, tctx, prof_active, false, ptr, old_usize, in ixallocx_prof()
3371 return usize; in ixallocx_prof()
3377 size_t usize, old_usize; in je_xallocx() local
3408 usize = old_usize; in je_xallocx()
3416 usize = ixallocx_prof(tsd, ptr, old_usize, size, extra, in je_xallocx()
3419 usize = ixallocx_helper(tsd_tsdn(tsd), ptr, old_usize, size, in je_xallocx()
3422 if (unlikely(usize == old_usize)) { in je_xallocx()
3427 *tsd_thread_allocatedp_get(tsd) += usize; in je_xallocx()
3434 usize, (uintptr_t)usize, args); in je_xallocx()
3440 LOG("core.xallocx.exit", "result: %zu", usize); in je_xallocx()
3441 return usize; in je_xallocx()
3447 size_t usize; in JEMALLOC_ATTR() local
3459 usize = ivsalloc(tsdn, ptr); in JEMALLOC_ATTR()
3460 assert(force_ivsalloc || usize != 0); in JEMALLOC_ATTR()
3462 usize = isalloc(tsdn, ptr); in JEMALLOC_ATTR()
3467 LOG("core.sallocx.exit", "result: %zu", usize); in JEMALLOC_ATTR()
3468 return usize; in JEMALLOC_ATTR()
3522 size_t usize; in inallocx() local
3524 usize = sz_s2u(size); in inallocx()
3526 usize = sz_sa2u(size, MALLOCX_ALIGN_GET_SPECIFIED(flags)); in inallocx()
3529 return usize; in inallocx()
3539 size_t usize = inallocx(tsd_tsdn(tsd), size, flags); in sdallocx_default() local
3540 assert(usize == isalloc(tsd_tsdn(tsd), ptr)); in sdallocx_default()
3568 isfree(tsd, ptr, usize, tcache, false); in sdallocx_default()
3572 isfree(tsd, ptr, usize, tcache, true); in sdallocx_default()
3605 size_t usize; in JEMALLOC_ATTR() local
3618 usize = inallocx(tsdn, size, flags); in JEMALLOC_ATTR()
3619 if (unlikely(usize > SC_LARGE_MAXCLASS)) { in JEMALLOC_ATTR()
3625 LOG("core.nallocx.exit", "result: %zu", usize); in JEMALLOC_ATTR()
3626 return usize; in JEMALLOC_ATTR()
3777 size_t usize = je_xallocx(*ptr, size, extra, flags); in je_rallocm() local
3778 ret = (usize >= size) ? ALLOCM_SUCCESS : ALLOCM_ERR_NOT_MOVED; in je_rallocm()
3780 *rsize = usize; in je_rallocm()
3812 size_t usize = je_nallocx(size, flags); in je_nallocm() local
3813 if (usize == 0) { in je_nallocm()
3817 *rsize = usize; in je_nallocm()