| /linux/tools/testing/shared/ |
| H A D | linux.c | 106 void __kmem_cache_free_locked(struct kmem_cache *cachep, void *objp) in kmem_cache_alloc_lru() 108 assert(objp); in kmem_cache_alloc_lru() 110 memset(objp, POISON_FREE, cachep->size); in kmem_cache_alloc_lru() 111 free(objp); in kmem_cache_alloc_lru() 113 struct radix_tree_node *node = objp; in kmem_cache_alloc_lru() 120 void kmem_cache_free_locked(struct kmem_cache *cachep, void *objp) in __kmem_cache_free_locked() argument 125 printf("Freeing %p to slab\n", objp); in __kmem_cache_free_locked() 126 __kmem_cache_free_locked(cachep, objp); in __kmem_cache_free_locked() 129 void kmem_cache_free(struct kmem_cache *cachep, void *objp) in __kmem_cache_free_locked() 132 kmem_cache_free_locked(cachep, objp); in __kmem_cache_free_locked() 134 kmem_cache_free_locked(struct kmem_cache * cachep,void * objp) kmem_cache_free_locked() argument 143 kmem_cache_free(struct kmem_cache * cachep,void * objp) kmem_cache_free() argument [all...] |
| /linux/fs/notify/ |
| H A D | mark.c | 377 static void fsnotify_drop_object(unsigned int type, void *objp) in fsnotify_drop_object() argument 379 if (!objp) in fsnotify_drop_object() 384 fsnotify_put_inode_ref(objp); in fsnotify_drop_object() 390 void *objp = NULL; in fsnotify_put_mark() local 410 objp = fsnotify_detach_connector_from_object(conn, &type); in fsnotify_put_mark() 418 objp = __fsnotify_recalc_mask(conn); in fsnotify_put_mark() 424 fsnotify_drop_object(type, objp); in fsnotify_put_mark() 933 void *objp; in fsnotify_destroy_marks() local 960 objp = fsnotify_detach_connector_from_object(conn, &type); in fsnotify_destroy_marks() 964 fsnotify_drop_object(type, objp); in fsnotify_destroy_marks()
|
| /linux/rust/helpers/ |
| H A D | slab.c | 6 rust_helper_krealloc_node_align(const void *objp, size_t new_size, unsigned long align, in rust_helper_krealloc_node_align() argument 9 return krealloc_node_align(objp, new_size, align, flags, node); in rust_helper_krealloc_node_align()
|
| /linux/mm/ |
| H A D | mm_slot.h | 31 static inline void mm_slot_free(struct kmem_cache *cache, void *objp) in mm_slot_free() argument 33 kmem_cache_free(cache, objp); in mm_slot_free()
|
| H A D | nommu.c | 65 unsigned int kobjsize(const void *objp) in kobjsize() argument 73 if (!objp || !virt_addr_valid(objp)) in kobjsize() 76 folio = virt_to_folio(objp); in kobjsize() 83 return ksize(objp); in kobjsize() 94 vma = find_vma(current->mm, (unsigned long)objp); in kobjsize()
|
| H A D | slab_common.c | 1238 size_t ksize(const void *objp) in ksize() argument 1255 if (unlikely(ZERO_OR_NULL_PTR(objp)) || !kasan_check_byte(objp)) in ksize() 1258 return kfence_ksize(objp) ?: __ksize(objp); in ksize()
|
| H A D | slub.c | 5291 bool kmem_cache_charge(void *objp, gfp_t gfpflags) in kmem_cache_charge() argument 5296 return memcg_slab_post_charge(objp, gfpflags); in kmem_cache_charge() 8100 void *objp; in __kmem_obj_info() 8111 objp = restore_red_left(s, objp0); in __kmem_obj_info() 8113 objp = objp0; in __kmem_obj_info() 8115 objnr = obj_to_index(s, slab, objp); in __kmem_obj_info() 8116 kpp->kp_data_offset = (unsigned long)((char *)objp0 - (char *)objp); in __kmem_obj_info() 8117 objp = base + s->size * objnr; in __kmem_obj_info() 8118 kpp->kp_objp = objp; in __kmem_obj_info() 8119 if (WARN_ON_ONCE(objp < bas in __kmem_obj_info() 8098 void *objp; __kmem_obj_info() local [all...] |
| H A D | slab.h | 558 size_t __ksize(const void *objp);
|
| /linux/include/linux/ |
| H A D | slab.h | 499 void * __must_check krealloc_node_align_noprof(const void *objp, size_t new_size, 507 void kfree(const void *objp); 508 void kfree_nolock(const void *objp); 509 void kfree_sensitive(const void *objp); 510 size_t __ksize(const void *objp); 527 size_t ksize(const void *objp); 809 bool kmem_cache_charge(void *objp, gfp_t gfpflags); 810 void kmem_cache_free(struct kmem_cache *s, void *objp);
|
| H A D | mm.h | 271 extern unsigned int kobjsize(const void *objp);
|
| /linux/arch/arm/include/asm/ |
| H A D | pgtable-nommu.h | 56 extern unsigned int kobjsize(const void *objp);
|