Lines Matching defs:bin

12  * cache_bin_array_descriptor_t objects and reading out per-bin stats it
18 * The size in bytes of each cache bin stack. We also use this to indicate
24 * Leave a noticeable mark pattern on the cache bin stack boundaries, in case a
36 * individual bin. The cache bins track their bounds looking just at the low
46 * alongside it, but is otherwise not modified by any cache bin operations.
53 * bin.
84 * The stack grows down. Whenever the bin is nonempty, the head points
184 cache_bin_assert_earlier(cache_bin_t *bin, uint16_t earlier, uint16_t later) {
186 assert(bin->low_bits_full > bin->low_bits_empty);
197 cache_bin_diff(cache_bin_t *bin, uint16_t earlier, uint16_t later, bool racy) {
199 * When it's racy, bin->low_bits_full can be modified concurrently. It
201 * bin->low_bits_empty at the time of the check.
204 cache_bin_assert_earlier(bin, earlier, later);
210 * Number of items currently cached in the bin, without checking ncached_max.
215 cache_bin_ncached_get_internal(cache_bin_t *bin, bool racy) {
216 cache_bin_sz_t diff = cache_bin_diff(bin,
217 (uint16_t)(uintptr_t)bin->stack_head, bin->low_bits_empty, racy);
228 assert(n == 0 || *(bin->stack_head) != NULL || racy);
233 * Number of items currently cached in the bin, with checking ncached_max. The
238 cache_bin_ncached_get_local(cache_bin_t *bin, cache_bin_info_t *info) {
239 cache_bin_sz_t n = cache_bin_ncached_get_internal(bin,
250 * Do not call if racy, because both 'bin->stack_head' and 'bin->low_bits_full'
254 cache_bin_empty_position_get(cache_bin_t *bin) {
255 cache_bin_sz_t diff = cache_bin_diff(bin,
256 (uint16_t)(uintptr_t)bin->stack_head, bin->low_bits_empty,
258 uintptr_t empty_bits = (uintptr_t)bin->stack_head + diff;
261 assert(ret >= bin->stack_head);
269 * Calculates low bits of the lower bound of the usable cache bin's range (see
277 cache_bin_low_bits_low_bound_get(cache_bin_t *bin, cache_bin_info_t *info) {
278 return (uint16_t)bin->low_bits_empty -
288 cache_bin_low_bound_get(cache_bin_t *bin, cache_bin_info_t *info) {
290 void **ret = cache_bin_empty_position_get(bin) - ncached_max;
291 assert(ret <= bin->stack_head);
298 * batch fill a nonempty cache bin.
301 cache_bin_assert_empty(cache_bin_t *bin, cache_bin_info_t *info) {
302 assert(cache_bin_ncached_get_local(bin, info) == 0);
303 assert(cache_bin_empty_position_get(bin) == bin->stack_head);
312 cache_bin_low_water_get_internal(cache_bin_t *bin) {
313 return cache_bin_diff(bin, bin->low_bits_low_water,
314 bin->low_bits_empty, /* racy */ false) / sizeof(void *);
319 cache_bin_low_water_get(cache_bin_t *bin, cache_bin_info_t *info) {
320 cache_bin_sz_t low_water = cache_bin_low_water_get_internal(bin);
322 assert(low_water <= cache_bin_ncached_get_local(bin, info));
324 cache_bin_assert_earlier(bin, (uint16_t)(uintptr_t)bin->stack_head,
325 bin->low_bits_low_water);
331 * Indicates that the current cache bin position should be the low water mark
335 cache_bin_low_water_set(cache_bin_t *bin) {
336 bin->low_bits_low_water = (uint16_t)(uintptr_t)bin->stack_head;
340 cache_bin_low_water_adjust(cache_bin_t *bin) {
341 if (cache_bin_ncached_get_internal(bin, /* racy */ false)
342 < cache_bin_low_water_get_internal(bin)) {
343 cache_bin_low_water_set(bin);
348 cache_bin_alloc_impl(cache_bin_t *bin, bool *success, bool adjust_low_water) {
361 void *ret = *bin->stack_head;
362 uint16_t low_bits = (uint16_t)(uintptr_t)bin->stack_head;
363 void **new_head = bin->stack_head + 1;
369 if (likely(low_bits != bin->low_bits_low_water)) {
370 bin->stack_head = new_head;
383 if (likely(low_bits != bin->low_bits_empty)) {
384 bin->stack_head = new_head;
385 bin->low_bits_low_water = (uint16_t)(uintptr_t)new_head;
394 * Allocate an item out of the bin, failing if we're at the low-water mark.
397 cache_bin_alloc_easy(cache_bin_t *bin, bool *success) {
399 return cache_bin_alloc_impl(bin, success, false);
403 * Allocate an item out of the bin, even if we're currently at the low-water
404 * mark (and failing only if the bin is empty).
407 cache_bin_alloc(cache_bin_t *bin, bool *success) {
408 return cache_bin_alloc_impl(bin, success, true);
412 cache_bin_alloc_batch(cache_bin_t *bin, size_t num, void **out) {
413 cache_bin_sz_t n = cache_bin_ncached_get_internal(bin,
418 memcpy(out, bin->stack_head, n * sizeof(void *));
419 bin->stack_head += n;
420 cache_bin_low_water_adjust(bin);
426 cache_bin_full(cache_bin_t *bin) {
427 return ((uint16_t)(uintptr_t)bin->stack_head == bin->low_bits_full);
431 * Free an object into the given bin. Fails only if the bin is full.
434 cache_bin_dalloc_easy(cache_bin_t *bin, void *ptr) {
435 if (unlikely(cache_bin_full(bin))) {
439 bin->stack_head--;
440 *bin->stack_head = ptr;
441 cache_bin_assert_earlier(bin, bin->low_bits_full,
442 (uint16_t)(uintptr_t)bin->stack_head);
447 /* Returns false if failed to stash (i.e. bin is full). */
449 cache_bin_stash(cache_bin_t *bin, void *ptr) {
450 if (cache_bin_full(bin)) {
455 uint16_t low_bits_head = (uint16_t)(uintptr_t)bin->stack_head;
457 uint16_t diff = cache_bin_diff(bin, bin->low_bits_full, low_bits_head,
459 *(void **)((uintptr_t)bin->stack_head - diff) = ptr;
461 assert(!cache_bin_full(bin));
462 bin->low_bits_full += sizeof(void *);
463 cache_bin_assert_earlier(bin, bin->low_bits_full, low_bits_head);
472 * important to keep in mind that 'bin->stack_head' and 'bin->low_bits_full' can
477 cache_bin_nstashed_get_internal(cache_bin_t *bin, cache_bin_info_t *info,
480 uint16_t low_bits_low_bound = cache_bin_low_bits_low_bound_get(bin,
483 cache_bin_sz_t n = cache_bin_diff(bin, low_bits_low_bound,
484 bin->low_bits_full, racy) / sizeof(void *);
489 void **low_bound = cache_bin_low_bound_get(bin, info);
505 cache_bin_nstashed_get_local(cache_bin_t *bin, cache_bin_info_t *info) {
506 cache_bin_sz_t n = cache_bin_nstashed_get_internal(bin, info,
513 * Obtain a racy view of the number of items currently in the cache bin, in the
517 cache_bin_nitems_get_remote(cache_bin_t *bin, cache_bin_info_t *info,
519 cache_bin_sz_t n = cache_bin_ncached_get_internal(bin, /* racy */ true);
523 n = cache_bin_nstashed_get_internal(bin, info, /* racy */ true);
556 * cache_bin_ptr_array_init_... call, since we reuse the cache bin stack memory.
566 * Start a fill. The bin must be empty, and This must be followed by a
567 * finish_fill call before doing any alloc/dalloc operations on the bin.
570 cache_bin_init_ptr_array_for_fill(cache_bin_t *bin, cache_bin_info_t *info,
572 cache_bin_assert_empty(bin, info);
573 arr->ptr = cache_bin_empty_position_get(bin) - nfill;
582 cache_bin_finish_fill(cache_bin_t *bin, cache_bin_info_t *info,
584 cache_bin_assert_empty(bin, info);
585 void **empty_position = cache_bin_empty_position_get(bin);
590 bin->stack_head = empty_position - nfilled;
598 cache_bin_init_ptr_array_for_flush(cache_bin_t *bin, cache_bin_info_t *info,
600 arr->ptr = cache_bin_empty_position_get(bin) - nflush;
601 assert(cache_bin_ncached_get_local(bin, info) == 0
606 cache_bin_finish_flush(cache_bin_t *bin, cache_bin_info_t *info,
608 unsigned rem = cache_bin_ncached_get_local(bin, info) - nflushed;
609 memmove(bin->stack_head + nflushed, bin->stack_head,
611 bin->stack_head = bin->stack_head + nflushed;
612 cache_bin_low_water_adjust(bin);
616 cache_bin_init_ptr_array_for_stashed(cache_bin_t *bin, szind_t binind,
620 assert(cache_bin_nstashed_get_local(bin, info) == nstashed);
622 void **low_bound = cache_bin_low_bound_get(bin, info);
628 cache_bin_finish_flush_stashed(cache_bin_t *bin, cache_bin_info_t *info) {
629 void **low_bound = cache_bin_low_bound_get(bin, info);
631 /* Reset the bin local full position. */
632 bin->low_bits_full = (uint16_t)(uintptr_t)low_bound;
633 assert(cache_bin_nstashed_get_local(bin, info) == 0);
652 * preincrement, call init once for each bin and info, and then call
660 void cache_bin_init(cache_bin_t *bin, cache_bin_info_t *info, void *alloc,
664 * If a cache bin was zero initialized (either because it lives in static or
668 bool cache_bin_still_zero_initialized(cache_bin_t *bin);