Lines Matching refs:inuse
557 return s->offset >= s->inuse; in freeptr_outside_object()
567 return s->inuse + sizeof(void *); in get_info_end()
569 return s->inuse; in get_info_end()
1011 slab, slab->objects, slab->inuse, slab->freelist, in print_slab_info()
1072 s->inuse - s->object_size); in print_trailer()
1166 s->inuse - poison_size); in init_object()
1325 endobject, val, s->inuse - s->object_size)) in check_object()
1339 if ((s->flags & SLAB_POISON) && s->object_size < s->inuse) { in check_object()
1342 s->inuse - s->object_size)) in check_object()
1411 if (slab->inuse > slab->objects) { in check_slab()
1413 slab->inuse, slab->objects); in check_slab()
1449 slab->inuse = slab->objects; in on_freelist()
1470 if (slab->inuse != slab->objects - nr) { in on_freelist()
1472 slab->inuse, slab->objects - nr); in on_freelist()
1473 slab->inuse = slab->objects - nr; in on_freelist()
1486 object, slab->inuse, in trace()
1599 slab->inuse = slab->objects; in alloc_debug_processing()
2336 unsigned int inuse, orig_size; in slab_free_hook() local
2338 inuse = get_info_end(s); in slab_free_hook()
2343 memset((char *)kasan_reset_tag(x) + inuse, 0, in slab_free_hook()
2344 s->size - inuse - rsize); in slab_free_hook()
2602 slab->inuse = 0; in allocate_slab()
2751 slab->inuse++; in alloc_single_from_partial()
2759 if (slab->inuse == slab->objects) { in alloc_single_from_partial()
2783 slab->inuse = 1; in alloc_single_from_new_slab()
2795 if (slab->inuse == slab->objects) in alloc_single_from_new_slab()
3088 new.inuse -= free_delta; in deactivate_slab()
3102 if (!new.inuse && n->nr_partial >= s->min_partial) { in deactivate_slab()
3136 if (unlikely(!slab->inuse && n->nr_partial >= s->min_partial)) { in __put_partials()
3395 return slab->objects - slab->inuse; in count_free()
3417 if (slab->inuse < *bulk_cnt) { in free_debug_processing()
3419 slab->inuse, *bulk_cnt); in free_debug_processing()
3490 x += slab->objects - slab->inuse; in count_partial_free_approx()
3500 x += slab->objects - slab->inuse; in count_partial_free_approx()
3505 x += slab->objects - slab->inuse; in count_partial_free_approx()
3599 new.inuse = slab->objects; in get_freelist()
3626 new.inuse = slab->objects; in freeze_slab()
3859 slab->inuse = slab->objects; in ___slab_alloc()
4362 slab->inuse -= cnt; in free_to_partial_list()
4371 if (slab->inuse == 0 && n->nr_partial >= s->min_partial) in free_to_partial_list()
4441 new.inuse -= cnt; in __slab_free()
4442 if ((!new.inuse || !prior) && !was_frozen) { in __slab_free()
4495 if (unlikely(!new.inuse && n->nr_partial >= s->min_partial)) in __slab_free()
5340 slab->inuse = 1; in early_kmem_cache_node_alloc()
5473 s->inuse = size; in calculate_sizes()
5611 if (!slab->inuse) { in free_partial()
5859 int free = slab->objects - slab->inuse; in __kmem_cache_do_shrink()
6140 s->inuse = max(s->inuse, ALIGN(size, sizeof(void *))); in __kmem_cache_alias()
6238 return slab->inuse; in count_inuse()
6538 x = slab->inuse; in show_slab_objects()