Home
last modified time | relevance | path

Searched refs:vmi (Results 1 – 25 of 49) sorted by relevance

12

/linux/mm/
H A Dvma.h35 struct vma_iterator *vmi; member
71 struct vma_iterator *vmi; member
175 struct vma_iterator *vmi, struct vm_area_struct *vma) in unmap_all_init() argument
177 unmap->mas = &vmi->mas; in unmap_all_init()
199 struct vma_iterator *vmi) in unmap_pgtable_init() argument
201 vma_iter_set(vmi, unmap->tree_reset); in unmap_pgtable_init()
239 .vmi = vmi_, \
250 .vmi = vmi_, \
273 __must_check int vma_shrink(struct vma_iterator *vmi,
277 static inline int vma_iter_store_gfp(struct vma_iterator *vmi, in vma_iter_store_gfp() argument
[all …]
H A Dvma.c12 struct vma_iterator *vmi; member
50 .vmi = vmi_, \
63 .vmi = (map_)->vmi, \
335 static void vma_complete(struct vma_prepare *vp, struct vma_iterator *vmi, in vma_complete() argument
356 vma_iter_store_new(vmi, vp->insert); in vma_complete()
497 __split_vma(struct vma_iterator *vmi, struct vm_area_struct *vma, in __split_vma() argument
525 vma_iter_config(vmi, new->vm_start, new->vm_end); in __split_vma()
526 if (vma_iter_prealloc(vmi, new)) in __split_vma()
566 vma_complete(&vp, vmi, vma->vm_mm); in __split_vma()
571 vma_next(vmi); in __split_vma()
[all …]
H A Dmseal.c43 VMA_ITERATOR(vmi, current->mm, start); in range_contains_unmapped()
45 for_each_vma_range(vmi, vma, end) { in range_contains_unmapped()
60 VMA_ITERATOR(vmi, mm, start); in mseal_apply()
63 vma = vma_iter_load(&vmi); in mseal_apply()
64 prev = vma_prev(&vmi); in mseal_apply()
68 for_each_vma_range(vmi, vma, end) { in mseal_apply()
74 vma = vma_modify_flags(&vmi, prev, vma, curr_start, in mseal_apply()
H A Dvma_exec.c38 VMA_ITERATOR(vmi, mm, new_start); in relocate_vma_down()
39 VMG_STATE(vmg, mm, &vmi, new_start, old_end, 0, vma->vm_pgoff); in relocate_vma_down()
50 if (vma != vma_next(&vmi)) in relocate_vma_down()
53 vma_iter_prev_range(&vmi); in relocate_vma_down()
70 next = vma_next(&vmi); in relocate_vma_down()
89 vma_prev(&vmi); in relocate_vma_down()
91 return vma_shrink(&vmi, vma, new_start, new_end, vma->vm_pgoff); in relocate_vma_down()
H A Dmmap.c124 struct vma_iterator vmi; in SYSCALL_DEFINE1() local
164 vma_iter_init(&vmi, mm, newbrk); in SYSCALL_DEFINE1()
165 brkvma = vma_find(&vmi, oldbrk); in SYSCALL_DEFINE1()
174 if (do_vmi_align_munmap(&vmi, brkvma, mm, newbrk, oldbrk, &uf, in SYSCALL_DEFINE1()
188 vma_iter_init(&vmi, mm, oldbrk); in SYSCALL_DEFINE1()
189 next = vma_find(&vmi, newbrk + PAGE_SIZE + stack_guard_gap); in SYSCALL_DEFINE1()
193 brkvma = vma_prev_limit(&vmi, mm->start_brk); in SYSCALL_DEFINE1()
195 if (do_brk_flags(&vmi, brkvma, oldbrk, newbrk - oldbrk, 0) < 0) in SYSCALL_DEFINE1()
929 VMA_ITERATOR(vmi, mm, addr); in find_vma_prev()
931 vma = vma_iter_load(&vmi); in find_vma_prev()
[all …]
H A Ddebug.c256 vmg->vmi, vmg->vmi ? vma_iter_addr(vmg->vmi) : 0, in dump_vmg()
257 vmg->vmi ? vma_iter_end(vmg->vmi) : 0, in dump_vmg()
301 if (vmg->vmi) { in dump_vmg()
303 vma_iter_dump_tree(vmg->vmi); in dump_vmg()
356 void vma_iter_dump_tree(const struct vma_iterator *vmi) in vma_iter_dump_tree() argument
359 mas_dump(&vmi->mas); in vma_iter_dump_tree()
360 mt_dump(vmi->mas.tree, mt_dump_hex); in vma_iter_dump_tree()
H A Dmmap_lock.c345 struct vma_iterator *vmi, in lock_next_vma()
356 vma_iter_set(vmi, from_addr); in lock_next_vma()
357 vma = vma_next(vmi); in lock_next_vma()
370 struct vma_iterator *vmi, in lock_next_vma()
381 vma = vma_next(vmi); in lock_next_vma()
395 vma_iter_set(vmi, from_addr); in get_mmap_lock_carefully()
415 vma_iter_set(vmi, from_addr); in upgrade_mmap_lock_carefully()
416 if (vma != vma_next(vmi)) in upgrade_mmap_lock_carefully()
427 vma = lock_next_vma_under_mmap_lock(mm, vmi, from_addr);
430 vma_iter_set(vmi, IS_ERR_OR_NUL
294 lock_next_vma_under_mmap_lock(struct mm_struct * mm,struct vma_iterator * vmi,unsigned long from_addr) lock_next_vma_under_mmap_lock() argument
319 lock_next_vma(struct mm_struct * mm,struct vma_iterator * vmi,unsigned long from_addr) lock_next_vma() argument
[all...]
H A Dnommu.c599 VMA_ITERATOR(vmi, vma->vm_mm, vma->vm_start); in delete_vma_from_mm()
601 vma_iter_config(&vmi, vma->vm_start, vma->vm_end); in delete_vma_from_mm()
602 if (vma_iter_prealloc(&vmi, NULL)) { in delete_vma_from_mm()
610 vma_iter_clear(&vmi); in delete_vma_from_mm()
642 VMA_ITERATOR(vmi, mm, addr); in find_vma()
644 return vma_iter_load(&vmi); in find_vma()
673 VMA_ITERATOR(vmi, mm, addr); in find_vma_exact()
675 vma = vma_iter_load(&vmi); in find_vma_exact()
1027 VMA_ITERATOR(vmi, current->mm, 0); in do_mmap()
1197 vma_iter_config(&vmi, vma->vm_start, vma->vm_end); in do_mmap()
[all …]
H A Dmlock.c466 static int mlock_fixup(struct vma_iterator *vmi, struct vm_area_struct *vma, in mlock_fixup() argument
481 vma = vma_modify_flags(vmi, *prev, vma, start, end, &newflags); in mlock_fixup()
519 VMA_ITERATOR(vmi, current->mm, start); in apply_vma_lock_flags()
528 vma = vma_iter_load(&vmi); in apply_vma_lock_flags()
532 prev = vma_prev(&vmi); in apply_vma_lock_flags()
538 for_each_vma_range(vmi, vma, end) { in apply_vma_lock_flags()
551 error = mlock_fixup(&vmi, vma, &prev, nstart, tmp, newflags); in apply_vma_lock_flags()
554 tmp = vma_iter_end(&vmi); in apply_vma_lock_flags()
577 VMA_ITERATOR(vmi, mm, start); in count_mm_mlocked_page_nr()
585 for_each_vma_range(vmi, vma, end) { in count_mm_mlocked_page_nr()
[all …]
H A Dmprotect.c695 mprotect_fixup(struct vma_iterator *vmi, struct mmu_gather *tlb, in mprotect_fixup() argument
756 vma = vma_modify_flags(vmi, *pprev, vma, start, end, &newflags); in mprotect_fixup()
811 struct vma_iterator vmi; in do_mprotect_pkey() local
843 vma_iter_init(&vmi, current->mm, start); in do_mprotect_pkey()
844 vma = vma_find(&vmi, end); in do_mprotect_pkey()
867 prev = vma_prev(&vmi); in do_mprotect_pkey()
874 for_each_vma_range(vmi, vma, end) { in do_mprotect_pkey()
930 error = mprotect_fixup(&vmi, &tlb, vma, &prev, nstart, tmp, newflags); in do_mprotect_pkey()
934 tmp = vma_iter_end(&vmi); in do_mprotect_pkey()
H A Dmremap.c1088 VMA_ITERATOR(vmi, mm, addr); in unmap_source_vma()
1126 err = do_vmi_munmap(&vmi, mm, addr, len, vrm->uf_unmap, /* unlock= */false); in unmap_source_vma()
1164 struct vm_area_struct *prev = vma_prev(&vmi); in unmap_source_vma()
1170 struct vm_area_struct *next = vma_next(&vmi); in unmap_source_vma()
1337 VMA_ITERATOR(vmi, mm, unmap_start); in shrink_vma()
1341 res = do_vmi_munmap(&vmi, mm, unmap_start, unmap_bytes, in shrink_vma()
1460 VMA_ITERATOR(vmi, mm, vma->vm_end); in expand_vma_in_place()
1474 vma = vma_merge_extend(&vmi, vma, vrm->delta); in expand_vma_in_place()
1837 VMA_ITERATOR(vmi, current->mm, start); in remap_move()
1844 for_each_vma_range(vmi, vma, end) { in remap_move()
[all …]
H A Duserfaultfd.c945 VMA_ITERATOR(vmi, dst_mm, start); in mwriteprotect_range()
969 for_each_vma_range(vmi, dst_vma, end) { in mwriteprotect_range()
1971 struct vm_area_struct *userfaultfd_clear_vma(struct vma_iterator *vmi, in userfaultfd_reset_ctx()
1991 ret = vma_modify_flags_uffd(vmi, prev, vma, start, end, in userfaultfd_clear_vma()
2013 VMA_ITERATOR(vmi, ctx->mm, start); in userfaultfd_register_range()
2014 struct vm_area_struct *prev = vma_prev(&vmi); in userfaultfd_register_range()
2021 for_each_vma_range(vmi, vma, end) { in userfaultfd_register_range()
2042 vma = vma_modify_flags_uffd(&vmi, prev, vma, start, vma_end, in userfaultfd_register_range()
2071 VMA_ITERATOR(vmi, mm, 0); in userfaultfd_release_new()
2075 for_each_vma(vmi, vm in userfaultfd_release_new()
1975 userfaultfd_clear_vma(struct vma_iterator * vmi,struct vm_area_struct * prev,struct vm_area_struct * vma,unsigned long start,unsigned long end) userfaultfd_clear_vma() argument
[all...]
/linux/tools/testing/vma/tests/
H A Dmerge.c11 vmg->next = vma_next(vmg->vmi); in merge_new()
12 vmg->prev = vma_prev(vmg->vmi); in merge_new()
13 vma_iter_next_range(vmg->vmi); in merge_new()
38 vma_iter_set(vmg->vmi, start); in vmg_set_range()
102 VMA_ITERATOR(vmi, &mm, 0x1000); in test_simple_merge()
105 .vmi = &vmi, in test_simple_merge()
135 VMA_ITERATOR(vmi, &mm, 0x1000); in test_simple_modify()
144 vma = vma_modify_flags(&vmi, init_vma, init_vma, in test_simple_modify()
159 vma_iter_set(&vmi, 0); in test_simple_modify()
160 vma = vma_iter_load(&vmi); in test_simple_modify()
[all …]
H A Dmmap.c8 VMA_ITERATOR(vmi, &mm, 0); in test_mmap_region_basic()
38 for_each_vma(vmi, vma) { in test_mmap_region_basic()
50 cleanup_mm(&mm, &vmi); in test_mmap_region_basic()
H A Dvma.c28 VMA_ITERATOR(vmi, &mm, 0); in test_copy_vma()
41 cleanup_mm(&mm, &vmi); in test_copy_vma()
52 cleanup_mm(&mm, &vmi); in test_copy_vma()
/linux/tools/testing/vma/include/
H A Ddup.h723 static inline void vma_iter_invalidate(struct vma_iterator *vmi) in vma_iter_invalidate() argument
725 mas_pause(&vmi->mas); in vma_iter_invalidate()
953 static inline struct vm_area_struct *vma_next(struct vma_iterator *vmi) in vma_next() argument
959 return mas_find(&vmi->mas, ULONG_MAX); in vma_next()
1049 struct vm_area_struct *vma_find(struct vma_iterator *vmi, unsigned long max) in vma_find() argument
1051 return mas_find(&vmi->mas, max - 1); in vma_find()
1054 static inline int vma_iter_clear_gfp(struct vma_iterator *vmi, in vma_iter_clear_gfp() argument
1057 __mas_set_range(&vmi->mas, start, end - 1); in vma_iter_clear_gfp()
1058 mas_store_gfp(&vmi->mas, NULL, gfp); in vma_iter_clear_gfp()
1059 if (unlikely(mas_is_err(&vmi->mas))) in vma_iter_clear_gfp()
[all …]
/linux/fs/proc/
H A Dtask_nommu.c23 VMA_ITERATOR(vmi, mm, 0); in task_mem()
29 for_each_vma(vmi, vma) { in task_mem()
83 VMA_ITERATOR(vmi, mm, 0); in task_vsize()
88 for_each_vma(vmi, vma) in task_vsize()
98 VMA_ITERATOR(vmi, mm, 0); in task_statm()
104 for_each_vma(vmi, vma) { in task_statm()
/linux/arch/xtensa/kernel/
H A Dsyscall.c62 struct vma_iterator vmi; in arch_get_unmapped_area() local
84 vma_iter_init(&vmi, current->mm, addr); in arch_get_unmapped_area()
85 for_each_vma(vmi, vmm) { in arch_get_unmapped_area()
/linux/tools/testing/vma/
H A Dshared.h43 #define vma_iter_prealloc(vmi, vma) \ argument
44 (fail_prealloc ? -ENOMEM : mas_preallocate(&(vmi)->mas, (vma), GFP_KERNEL))
99 int cleanup_mm(struct mm_struct *mm, struct vma_iterator *vmi);
H A Dshared.c69 int cleanup_mm(struct mm_struct *mm, struct vma_iterator *vmi) in cleanup_mm() argument
77 vma_iter_set(vmi, 0); in cleanup_mm()
78 for_each_vma(*vmi, vma) { in cleanup_mm()
/linux/arch/powerpc/mm/book3s32/
H A Dtlb.c84 VMA_ITERATOR(vmi, mm, 0); in hash__flush_tlb_mm()
92 for_each_vma(vmi, mp) in hash__flush_tlb_mm()
/linux/include/linux/
H A Dmm.h1312 struct vm_area_struct *vma_find(struct vma_iterator *vmi, unsigned long max) in vma_find() argument
1314 return mas_find(&vmi->mas, max - 1); in vma_find()
1317 static inline struct vm_area_struct *vma_next(struct vma_iterator *vmi) in vma_next() argument
1323 return mas_find(&vmi->mas, ULONG_MAX); in vma_next()
1327 struct vm_area_struct *vma_iter_next_range(struct vma_iterator *vmi) in vma_iter_next_range() argument
1329 return mas_next_range(&vmi->mas, ULONG_MAX); in vma_iter_next_range()
1333 static inline struct vm_area_struct *vma_prev(struct vma_iterator *vmi) in vma_prev() argument
1335 return mas_prev(&vmi->mas, 0); in vma_prev()
1338 static inline int vma_iter_clear_gfp(struct vma_iterator *vmi, in vma_iter_clear_gfp() argument
1341 __mas_set_range(&vmi->mas, start, end - 1); in vma_iter_clear_gfp()
[all …]
/linux/lib/vdso/
H A Ddatastore.c119 VMA_ITERATOR(vmi, mm, 0); in vdso_join_timens()
122 for_each_vma(vmi, vma) { in vdso_join_timens()
/linux/fs/
H A Duserfaultfd.c1273 struct vma_iterator vmi; in userfaultfd_register() local
1318 vma_iter_init(&vmi, mm, start); in userfaultfd_register()
1319 vma = vma_find(&vmi, end); in userfaultfd_register()
1397 } for_each_vma_range(vmi, cur, end); in userfaultfd_register()
1445 struct vma_iterator vmi; in userfaultfd_unregister() local
1466 vma_iter_init(&vmi, mm, start); in userfaultfd_unregister()
1467 vma = vma_find(&vmi, end); in userfaultfd_unregister()
1512 } for_each_vma_range(vmi, cur, end); in userfaultfd_unregister()
1515 vma_iter_set(&vmi, start); in userfaultfd_unregister()
1516 prev = vma_prev(&vmi); in userfaultfd_unregister()
[all …]
/linux/arch/um/kernel/
H A Dtlb.c221 VMA_ITERATOR(vmi, mm, 0); in flush_tlb_mm()
223 for_each_vma(vmi, vma) in flush_tlb_mm()

12