Lines Matching full:area

68 	struct vm_struct *area;  in execmem_vmap()  local
70 area = __get_vm_area_node(size, range->alignment, PAGE_SHIFT, VM_ALLOC, in execmem_vmap()
73 if (!area && range->fallback_start) in execmem_vmap()
74 area = __get_vm_area_node(size, range->alignment, PAGE_SHIFT, VM_ALLOC, in execmem_vmap()
78 return area; in execmem_vmap()
163 void *area; in execmem_cache_clean() local
166 mas_for_each(&mas, area, ULONG_MAX) { in execmem_cache_clean()
171 struct vm_struct *vm = find_vm_area(area); in execmem_cache_clean()
175 vfree(area); in execmem_cache_clean()
189 void *area = NULL; in execmem_cache_add_locked() local
194 area = mas_walk(&mas); in execmem_cache_add_locked()
195 if (area && mas.last == addr - 1) in execmem_cache_add_locked()
198 area = mas_next(&mas, ULONG_MAX); in execmem_cache_add_locked()
199 if (area && mas.index == addr + size) in execmem_cache_add_locked()
236 void *area, *ptr = NULL; in __execmem_cache_alloc() local
240 mas_for_each(&mas_free, area, ULONG_MAX) { in __execmem_cache_alloc()
381 void *area; in execmem_cache_free_slow() local
388 mas_for_each(&mas, area, ULONG_MAX) { in execmem_cache_free_slow()
389 if (!is_pending_free(area)) in execmem_cache_free_slow()
392 area = pending_free_clear(area); in execmem_cache_free_slow()
393 if (__execmem_cache_free(&mas, area, GFP_KERNEL)) in execmem_cache_free_slow()
410 void *area; in execmem_cache_free() local
415 area = mas_walk(&mas); in execmem_cache_free()
416 if (!area) in execmem_cache_free()
419 err = __execmem_cache_free(&mas, area, GFP_KERNEL | __GFP_NORETRY); in execmem_cache_free()
422 * mas points to exact slot we've got the area from, nothing in execmem_cache_free()
427 area = pending_free_set(area); in execmem_cache_free()
428 mas_store_gfp(&mas, area, GFP_KERNEL); in execmem_cache_free()