Searched refs:slab_cache (Results 1 – 9 of 9) sorted by relevance
/linux/mm/kasan/ |
H A D | common.c | 464 poison_kmalloc_redzone(slab->slab_cache, object, size, flags); in __kasan_krealloc() 519 if (check_slab_allocation(slab->slab_cache, ptr, ip)) in __kasan_mempool_poison_object() 522 poison_slab_object(slab->slab_cache, ptr, false, false); in __kasan_mempool_poison_object() 547 unpoison_slab_object(slab->slab_cache, ptr, flags, false); in __kasan_mempool_unpoison_object() 550 if (is_kmalloc_cache(slab->slab_cache)) in __kasan_mempool_unpoison_object() 551 poison_kmalloc_redzone(slab->slab_cache, ptr, size, flags); in __kasan_mempool_unpoison_object()
|
H A D | quarantine.c | 131 return virt_to_slab(qlink)->slab_cache; in qlink_to_cache()
|
H A D | report.c | 541 info->cache = slab->slab_cache; in complete_report_info()
|
/linux/drivers/gpu/drm/i915/ |
H A D | i915_active.c | 24 static struct kmem_cache *slab_cache; variable 172 kmem_cache_free(slab_cache, it); in __active_retire() 320 node = kmem_cache_alloc(slab_cache, GFP_ATOMIC); in active_instance() 748 kmem_cache_free(slab_cache, ref->cache); in i915_active_fini() 868 node = kmem_cache_alloc(slab_cache, GFP_KERNEL); in i915_active_acquire_preallocate_barrier() 916 kmem_cache_free(slab_cache, node); in i915_active_acquire_preallocate_barrier() 1180 kmem_cache_destroy(slab_cache); in i915_active_module_exit() 1185 slab_cache = KMEM_CACHE(active_node, SLAB_HWCACHE_ALIGN); in i915_active_module_init() 1186 if (!slab_cache) in i915_active_module_init()
|
/linux/Documentation/translations/zh_CN/mm/ |
H A D | split_page_table_lock.rst | 62 确保架构不使用slab分配器来分配页表:slab使用page->slab_cache来分配其页
|
/linux/tools/cgroup/ |
H A D | memcg_slabinfo.py | 198 cache = slab.slab_cache
|
/linux/mm/ |
H A D | slub.c | 1622 if (unlikely(s != slab->slab_cache)) { in free_consistency_checks() 1626 } else if (!slab->slab_cache) { in free_consistency_checks() 1907 unsigned int offs = obj_to_index(obj_exts_slab->slab_cache, in mark_objexts_empty() 2213 s = slab->slab_cache; in memcg_slab_post_charge() 2607 slab->slab_cache = s; in allocate_slab() 2662 __free_slab(slab->slab_cache, slab); in rcu_free_slab() 4654 s = slab->slab_cache; in slab_free_after_rcu_debug() 4678 return slab->slab_cache; in virt_to_cache() 4756 s = slab->slab_cache; in kfree() 4788 s = folio_slab(folio)->slab_cache; in __do_krealloc() [all …]
|
H A D | slab_common.c | 1017 skip_orig_size_check(folio_slab(folio)->slab_cache, object); in __ksize() 1020 return slab_ksize(folio_slab(folio)->slab_cache); in __ksize() 1275 return slab ? slab->slab_cache : NULL; in bpf_get_kmem_cache()
|
H A D | memcontrol.c | 2436 off = obj_to_index(slab->slab_cache, slab, p); in mem_cgroup_from_obj_folio()
|