| /linux/tools/testing/shared/ |
| H A D | linux.c | 15 int nr_allocated; variable 36 return cachep->size * cachep->nr_allocated; in kmem_cache_set_callback() 41 return cachep->nr_allocated; in kmem_cache_set_private() 98 uatomic_inc(&cachep->nr_allocated); in kmem_cache_alloc_lru() 99 uatomic_inc(&nr_allocated); in kmem_cache_alloc_lru() 122 uatomic_dec(&nr_allocated); in __kmem_cache_free_locked() 123 uatomic_dec(&cachep->nr_allocated); in __kmem_cache_free_locked() 220 uatomic_inc(&nr_allocated); in kmem_cache_alloc_bulk() 221 uatomic_inc(&cachep->nr_allocated); in kmem_cache_alloc_bulk() 242 ret->nr_allocated in kmem_cache_create() 27 unsigned long nr_allocated; global() member [all...] |
| /linux/tools/testing/radix-tree/ |
| H A D | main.c | 244 nr_allocated, preempt_count); in single_thread_tests() 248 nr_allocated, preempt_count); in single_thread_tests() 252 nr_allocated, preempt_count); in single_thread_tests() 256 nr_allocated, preempt_count); in single_thread_tests() 260 nr_allocated, preempt_count); in single_thread_tests() 264 nr_allocated, preempt_count); in single_thread_tests() 269 nr_allocated, preempt_count); in single_thread_tests() 273 nr_allocated, preempt_count); in single_thread_tests() 281 nr_allocated, preempt_count); in single_thread_tests() 324 nr_allocated, preempt_count); in main()
|
| H A D | tag_check.c | 53 printv(2, "before item_kill_tree: %d allocated\n", nr_allocated); in simple_checks() 56 printv(2, "after item_kill_tree: %d allocated\n", nr_allocated); in simple_checks() 300 printv(2, "%d: nr_allocated=%d\n", __LINE__, nr_allocated); in __leak_check() 302 printv(2, "%d: nr_allocated=%d\n", __LINE__, nr_allocated); in __leak_check() 304 printv(2, "%d: nr_allocated=%d\n", __LINE__, nr_allocated); in __leak_check() 306 printv(2, "%d: nr_allocated=%d\n", __LINE__, nr_allocated); in __leak_check() 340 printv(2, "after extend_checks: %d allocated\n", nr_allocated); in tag_check() 344 printv(2, "after leak_check: %d allocated\n", nr_allocated); in tag_check() 347 printv(2, "after simple_checks: %d allocated\n", nr_allocated); in tag_check() 350 printv(2, "after thrash_tags: %d allocated\n", nr_allocated); in tag_check()
|
| H A D | xarray.c | 26 if (nr_allocated) in main() 27 printf("nr_allocated = %d\n", nr_allocated); in main()
|
| H A D | test.h | 50 extern int nr_allocated;
|
| H A D | idr-test.c | 619 if (nr_allocated) in main() 620 printf("nr_allocated = %d\n", nr_allocated); in main()
|
| H A D | maple.c | 36224 if (nr_allocated) in main() 36225 printf("nr_allocated = %d\n", nr_allocated); in main()
|
| /linux/drivers/media/common/videobuf2/ |
| H A D | frame_vector.c | 49 if (WARN_ON_ONCE(nr_frames > vec->nr_allocated)) in get_vaddr_frames() 50 nr_frames = vec->nr_allocated; in get_vaddr_frames() 179 vec->nr_allocated = nr_frames; in frame_vector_create()
|
| /linux/io_uring/ |
| H A D | memmap.c | 158 unsigned long nr_allocated; in io_region_allocate_pages() local 170 nr_allocated = alloc_pages_bulk_node(gfp, NUMA_NO_NODE, in io_region_allocate_pages() 172 if (nr_allocated != mr->nr_pages) { in io_region_allocate_pages() 173 if (nr_allocated) in io_region_allocate_pages() 174 release_pages(pages, nr_allocated); in io_region_allocate_pages()
|
| /linux/lib/ |
| H A D | test_vmalloc.c | 389 unsigned long nr_allocated; in vm_map_ram_test() local 400 nr_allocated = alloc_pages_bulk(GFP_KERNEL, map_nr_pages, pages); in vm_map_ram_test() 401 if (nr_allocated != map_nr_pages) in vm_map_ram_test() 412 for (i = 0; i < nr_allocated; i++) in vm_map_ram_test() 418 return nr_allocated != map_nr_pages; in vm_map_ram_test()
|
| /linux/drivers/md/persistent-data/ |
| H A D | dm-space-map-common.h | 80 dm_block_t nr_allocated; member 103 __le64 nr_allocated; member
|
| H A D | dm-space-map-disk.c | 62 *count = (smd->old_ll.nr_blocks - smd->old_ll.nr_allocated) - smd->nr_allocated_this_transaction; in sm_disk_get_nr_free() 186 root_le.nr_allocated = cpu_to_le64(smd->ll.nr_allocated); in sm_disk_copy_root()
|
| H A D | dm-space-map-metadata.c | 296 *count = smm->old_ll.nr_blocks - smm->old_ll.nr_allocated - in sm_metadata_get_nr_free() 549 root_le.nr_allocated = cpu_to_le64(smm->ll.nr_allocated); in sm_metadata_copy_root()
|
| /linux/mm/ |
| H A D | mempolicy.c | 2599 unsigned long nr_allocated; in alloc_pages_bulk_interleave() local 2608 nr_allocated = alloc_pages_bulk_noprof(gfp, in alloc_pages_bulk_interleave() 2614 nr_allocated = alloc_pages_bulk_noprof(gfp, in alloc_pages_bulk_interleave() 2619 page_array += nr_allocated; in alloc_pages_bulk_interleave() 2620 total_allocated += nr_allocated; in alloc_pages_bulk_interleave() 2634 unsigned long nr_allocated = 0; in alloc_pages_bulk_weighted_interleave() local 2665 nr_allocated = __alloc_pages_bulk(gfp, node, NULL, node_pages, in alloc_pages_bulk_weighted_interleave() 2667 page_array += nr_allocated; in alloc_pages_bulk_weighted_interleave() 2668 total_allocated += nr_allocated; in alloc_pages_bulk_weighted_interleave() 2730 nr_allocated = __alloc_pages_bulk(gfp, node, NULL, node_pages, in alloc_pages_bulk_weighted_interleave() [all …]
|
| H A D | vmalloc.c | 3631 unsigned int nr_allocated = 0; in vm_area_alloc_pages() local 3660 pages[nr_allocated + i] = page + i; in vm_area_alloc_pages() 3662 nr_allocated += 1U << large_order; in vm_area_alloc_pages() 3663 nr_remaining = nr_pages - nr_allocated; in vm_area_alloc_pages() 3676 while (nr_allocated < nr_pages) { in vm_area_alloc_pages() 3685 nr_pages_request = min(100U, nr_pages - nr_allocated); in vm_area_alloc_pages() 3695 pages + nr_allocated); in vm_area_alloc_pages() 3699 pages + nr_allocated); in vm_area_alloc_pages() 3701 nr_allocated += nr; in vm_area_alloc_pages() 3713 while (nr_allocated < nr_pages) { in vm_area_alloc_pages() [all …]
|
| /linux/include/media/ |
| H A D | frame_vector.h | 7 unsigned int nr_allocated; /* Number of frames we have space for */ member
|
| /linux/tools/perf/ |
| H A D | builtin-trace.c | 2265 size_t nr_used = 0, nr_allocated = strlist__nr_entries(trace->ev_qualifier); in trace__validate_ev_qualifier() local 2267 trace->ev_qualifier_ids.entries = malloc(nr_allocated * in trace__validate_ev_qualifier() 2309 if (nr_allocated == nr_used) { in trace__validate_ev_qualifier() 2312 nr_allocated += 8; in trace__validate_ev_qualifier() 2314 nr_allocated * sizeof(trace->ev_qualifier_ids.entries[0])); in trace__validate_ev_qualifier()
|