Lines Matching +full:128 +full:ma
20 * 16 32 64 96 128 196 256 512 1024 2048 4096
23 * 16 32 64 96 128 196 256 512 1024 2048 4096
57 6, /* 128 */
126 static const u16 sizes[NUM_CACHES] = {96, 192, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096};
457 * 64*16 + 64*32 + 64*64 + 64*96 + 64*128 + 64*196 + 64*256 + 32*512 + 16*1024 + 8*2048 + 4*4096
509 int bpf_mem_alloc_init(struct bpf_mem_alloc *ma, int size, bool percpu) in bpf_mem_alloc_init() argument
522 ma->percpu = percpu; in bpf_mem_alloc_init()
537 ma->objcg = objcg; in bpf_mem_alloc_init()
548 ma->cache = pc; in bpf_mem_alloc_init()
558 ma->objcg = objcg; in bpf_mem_alloc_init()
573 ma->caches = pcc; in bpf_mem_alloc_init()
577 int bpf_mem_alloc_percpu_init(struct bpf_mem_alloc *ma, struct obj_cgroup *objcg) in bpf_mem_alloc_percpu_init() argument
585 ma->caches = pcc; in bpf_mem_alloc_percpu_init()
586 ma->objcg = objcg; in bpf_mem_alloc_percpu_init()
587 ma->percpu = true; in bpf_mem_alloc_percpu_init()
591 int bpf_mem_alloc_percpu_unit_init(struct bpf_mem_alloc *ma, int size) in bpf_mem_alloc_percpu_unit_init() argument
606 objcg = ma->objcg; in bpf_mem_alloc_percpu_unit_init()
607 pcc = ma->caches; in bpf_mem_alloc_percpu_unit_init()
658 static void check_leaked_objs(struct bpf_mem_alloc *ma) in check_leaked_objs() argument
664 if (ma->cache) { in check_leaked_objs()
666 c = per_cpu_ptr(ma->cache, cpu); in check_leaked_objs()
670 if (ma->caches) { in check_leaked_objs()
672 cc = per_cpu_ptr(ma->caches, cpu); in check_leaked_objs()
681 static void free_mem_alloc_no_barrier(struct bpf_mem_alloc *ma) in free_mem_alloc_no_barrier() argument
683 check_leaked_objs(ma); in free_mem_alloc_no_barrier()
684 free_percpu(ma->cache); in free_mem_alloc_no_barrier()
685 free_percpu(ma->caches); in free_mem_alloc_no_barrier()
686 ma->cache = NULL; in free_mem_alloc_no_barrier()
687 ma->caches = NULL; in free_mem_alloc_no_barrier()
690 static void free_mem_alloc(struct bpf_mem_alloc *ma) in free_mem_alloc() argument
706 free_mem_alloc_no_barrier(ma); in free_mem_alloc()
711 struct bpf_mem_alloc *ma = container_of(work, struct bpf_mem_alloc, work); in free_mem_alloc_deferred() local
713 free_mem_alloc(ma); in free_mem_alloc_deferred()
714 kfree(ma); in free_mem_alloc_deferred()
717 static void destroy_mem_alloc(struct bpf_mem_alloc *ma, int rcu_in_progress) in destroy_mem_alloc() argument
725 free_mem_alloc_no_barrier(ma); in destroy_mem_alloc()
729 copy = kmemdup(ma, sizeof(*ma), GFP_KERNEL); in destroy_mem_alloc()
732 free_mem_alloc(ma); in destroy_mem_alloc()
737 memset(ma, 0, sizeof(*ma)); in destroy_mem_alloc()
742 void bpf_mem_alloc_destroy(struct bpf_mem_alloc *ma) in bpf_mem_alloc_destroy() argument
748 if (ma->cache) { in bpf_mem_alloc_destroy()
751 c = per_cpu_ptr(ma->cache, cpu); in bpf_mem_alloc_destroy()
758 obj_cgroup_put(ma->objcg); in bpf_mem_alloc_destroy()
759 destroy_mem_alloc(ma, rcu_in_progress); in bpf_mem_alloc_destroy()
761 if (ma->caches) { in bpf_mem_alloc_destroy()
764 cc = per_cpu_ptr(ma->caches, cpu); in bpf_mem_alloc_destroy()
774 obj_cgroup_put(ma->objcg); in bpf_mem_alloc_destroy()
775 destroy_mem_alloc(ma, rcu_in_progress); in bpf_mem_alloc_destroy()
889 void notrace *bpf_mem_alloc(struct bpf_mem_alloc *ma, size_t size) in bpf_mem_alloc() argument
897 if (!ma->percpu) in bpf_mem_alloc()
903 ret = unit_alloc(this_cpu_ptr(ma->caches)->cache + idx); in bpf_mem_alloc()
907 void notrace bpf_mem_free(struct bpf_mem_alloc *ma, void *ptr) in bpf_mem_free() argument
920 unit_free(this_cpu_ptr(ma->caches)->cache + idx, ptr); in bpf_mem_free()
923 void notrace bpf_mem_free_rcu(struct bpf_mem_alloc *ma, void *ptr) in bpf_mem_free_rcu() argument
936 unit_free_rcu(this_cpu_ptr(ma->caches)->cache + idx, ptr); in bpf_mem_free_rcu()
939 void notrace *bpf_mem_cache_alloc(struct bpf_mem_alloc *ma) in bpf_mem_cache_alloc() argument
943 ret = unit_alloc(this_cpu_ptr(ma->cache)); in bpf_mem_cache_alloc()
947 void notrace bpf_mem_cache_free(struct bpf_mem_alloc *ma, void *ptr) in bpf_mem_cache_free() argument
952 unit_free(this_cpu_ptr(ma->cache), ptr); in bpf_mem_cache_free()
955 void notrace bpf_mem_cache_free_rcu(struct bpf_mem_alloc *ma, void *ptr) in bpf_mem_cache_free_rcu() argument
960 unit_free_rcu(this_cpu_ptr(ma->cache), ptr); in bpf_mem_cache_free_rcu()
985 void notrace *bpf_mem_cache_alloc_flags(struct bpf_mem_alloc *ma, gfp_t flags) in bpf_mem_cache_alloc_flags() argument
990 c = this_cpu_ptr(ma->cache); in bpf_mem_cache_alloc_flags()