Home
last modified time | relevance | path

Searched refs:vma (Results 1 – 25 of 621) sorted by relevance

12345678910>>...25

/linux/include/linux/
H A Dmmap_lock.h82 #define __vma_lockdep_map(vma) (&vma->vmlock_dep_map) argument
84 #define __vma_lockdep_map(vma) NULL argument
106 #define __vma_lockdep_acquire_read(vma) \ argument
107 lock_acquire_shared(__vma_lockdep_map(vma), 0, 1, NULL, _RET_IP_)
108 #define __vma_lockdep_release_read(vma) \ argument
109 lock_release(__vma_lockdep_map(vma), _RET_IP_)
110 #define __vma_lockdep_acquire_exclusive(vma) \ argument
111 lock_acquire_exclusive(__vma_lockdep_map(vma), 0, 0, NULL, _RET_IP_)
112 #define __vma_lockdep_release_exclusive(vma) \ argument
113 lock_release(__vma_lockdep_map(vma), _RET_IP_)
[all …]
/linux/drivers/gpu/drm/i915/
H A Di915_gem_evict.c41 static bool dying_vma(struct i915_vma *vma)
43 return !kref_read(&vma->obj->base.refcount);
67 static bool grab_vma(struct i915_vma *vma, struct i915_gem_ww_ctx *ww) in grab_vma() argument
73 if (i915_gem_object_get_rcu(vma->obj)) { in grab_vma()
74 if (!i915_gem_object_trylock(vma->obj, ww)) { in grab_vma()
75 i915_gem_object_put(vma->obj); in grab_vma()
80 atomic_and(~I915_VMA_PIN_MASK, &vma->flags); in grab_vma()
86 static void ungrab_vma(struct i915_vma *vma) in ungrab_vma() argument
88 if (dying_vma(vma)) in ungrab_vma()
91 i915_gem_object_unlock(vma->obj); in ungrab_vma()
[all …]
/linux/tools/testing/vma/tests/
H A Dmerge.c6 struct vm_area_struct *vma; in merge_new() local
15 vma = vma_merge_new_range(vmg); in merge_new()
16 if (vma) in merge_new()
17 vma_assert_attached(vma); in merge_new()
19 return vma; in merge_new()
97 struct vm_area_struct *vma; in test_simple_merge() local
116 vma = merge_new(&vmg); in test_simple_merge()
117 ASSERT_NE(vma, NULL); in test_simple_merge()
119 ASSERT_EQ(vma->vm_start, 0); in test_simple_merge()
120 ASSERT_EQ(vma in test_simple_merge()
131 struct vm_area_struct *vma; test_simple_modify() local
194 struct vm_area_struct *vma = alloc_vma(&mm, 0, 0x1000, 0, vm_flags); test_simple_expand() local
222 struct vm_area_struct *vma = alloc_vma(&mm, 0, 0x3000, 0, vm_flags); test_simple_shrink() local
264 struct vm_area_struct *vma, *vma_a, *vma_b, *vma_c, *vma_d; __test_merge_new() local
482 struct vm_area_struct *vma_left, *vma; test_vma_merge_special_flags() local
554 struct vm_area_struct *vma_prev, *vma_next, *vma; test_vma_merge_with_close() local
765 struct vm_area_struct *vma; test_vma_merge_new_with_close() local
813 struct vm_area_struct *vma, *vma_prev, *vma_next; __test_merge_existing() local
1073 struct vm_area_struct *vma, *vma_prev, *vma_next; test_anon_vma_non_mergeable() local
1167 struct vm_area_struct *vma_prev, *vma_next, *vma; test_dup_anon_vma() local
1325 struct vm_area_struct *vma_prev, *vma; test_vmi_prealloc_fail() local
1386 struct vm_area_struct *vma; test_merge_extend() local
1416 struct vm_area_struct *vma_prev, *vma; test_expand_only_mode() local
[all...]
/linux/mm/
H A Dvma.c74 static bool vma_is_fork_child(struct vm_area_struct *vma) in vma_is_fork_child() argument
81 return vma && vma->anon_vma && !list_is_singular(&vma->anon_vma_chain); in vma_is_fork_child()
86 struct vm_area_struct *vma = merge_next ? vmg->next : vmg->prev; in is_mergeable_vma() local
89 if (!mpol_equal(vmg->policy, vma_policy(vma))) in is_mergeable_vma()
92 diff = vma_flags_diff_pair(&vma->flags, &vmg->vma_flags); in is_mergeable_vma()
97 if (vma->vm_file != vmg->file) in is_mergeable_vma()
99 if (!is_mergeable_vm_userfaultfd_ctx(vma, vmg->uffd_ctx)) in is_mergeable_vma()
101 if (!anon_vma_name_eq(anon_vma_name(vma), vmg->anon_name)) in is_mergeable_vma()
146 struct vm_area_struct *vma, in init_multi_vma_prep() argument
153 vp->vma = vma; in init_multi_vma_prep()
[all …]
H A Dmemory.c107 if (!userfaultfd_wp(vmf->vma)) in vmf_orig_pte_uffd_wp()
377 struct vm_area_struct *vma = unmap->first; in free_pgtables() local
390 unsigned long addr = vma->vm_start; in free_pgtables()
400 vma_start_write(vma); in free_pgtables()
401 unlink_anon_vmas(vma); in free_pgtables()
404 unlink_file_vma_batch_add(&vb, vma); in free_pgtables()
409 while (next && next->vm_start <= vma->vm_end + PMD_SIZE) { in free_pgtables()
410 vma = next; in free_pgtables()
413 vma_start_write(vma); in free_pgtables()
414 unlink_anon_vmas(vma); in free_pgtables()
[all …]
H A Dmadvise.c80 struct vm_area_struct *vma; member
110 struct anon_vma_name *anon_vma_name(struct vm_area_struct *vma) in anon_vma_name() argument
112 vma_assert_stabilised(vma); in anon_vma_name()
113 return vma->anon_name; in anon_vma_name()
117 static int replace_anon_vma_name(struct vm_area_struct *vma, in replace_anon_vma_name() argument
120 struct anon_vma_name *orig_name = anon_vma_name(vma); in replace_anon_vma_name()
123 vma->anon_name = NULL; in replace_anon_vma_name()
131 vma->anon_name = anon_vma_name_reuse(anon_name); in replace_anon_vma_name()
137 static int replace_anon_vma_name(struct vm_area_struct *vma, in replace_anon_vma_name() argument
153 struct vm_area_struct *vma = madv_behavior->vma; in madvise_update_vma() local
[all …]
H A Drmap.c150 static void anon_vma_chain_assign(struct vm_area_struct *vma, in anon_vma_chain_assign() argument
154 avc->vma = vma; in anon_vma_chain_assign()
156 list_add(&avc->same_vma, &vma->anon_vma_chain); in anon_vma_chain_assign()
185 int __anon_vma_prepare(struct vm_area_struct *vma) in __anon_vma_prepare() argument
187 struct mm_struct *mm = vma->vm_mm; in __anon_vma_prepare()
198 anon_vma = find_mergeable_anon_vma(vma); in __anon_vma_prepare()
211 if (likely(!vma->anon_vma)) { in __anon_vma_prepare()
212 vma->anon_vma = anon_vma; in __anon_vma_prepare()
213 anon_vma_chain_assign(vma, avc, anon_vma); in __anon_vma_prepare()
290 static void cleanup_partial_anon_vmas(struct vm_area_struct *vma);
[all …]
H A Dmseal.c41 struct vm_area_struct *vma; in range_contains_unmapped() local
45 for_each_vma_range(vmi, vma, end) { in range_contains_unmapped()
46 if (vma->vm_start > prev_end) in range_contains_unmapped()
49 prev_end = vma->vm_end; in range_contains_unmapped()
58 struct vm_area_struct *vma, *prev; in mseal_apply() local
62 vma = vma_iter_load(&vmi); in mseal_apply()
64 if (start > vma->vm_start) in mseal_apply()
65 prev = vma; in mseal_apply()
67 for_each_vma_range(vmi, vma, end) { in mseal_apply()
68 const unsigned long curr_start = MAX(vma->vm_start, start); in mseal_apply()
[all …]
H A Dpagewalk.c47 update_mmu_cache(walk->vma, addr, pte); in walk_pte_range_inner()
168 if (walk->vma) in walk_pmd_range()
169 split_huge_pmd(walk->vma, pmd, addr); in walk_pmd_range()
234 if (walk->vma) in walk_pud_range()
235 split_huge_pud(walk->vma, pud, addr); in walk_pud_range()
341 struct vm_area_struct *vma = walk->vma; in walk_hugetlb_range() local
342 struct hstate *h = hstate_vma(vma); in walk_hugetlb_range()
350 hugetlb_vma_lock_read(vma); in walk_hugetlb_range()
353 pte = hugetlb_walk(vma, addr & hmask, sz); in walk_hugetlb_range()
361 hugetlb_vma_unlock_read(vma); in walk_hugetlb_range()
[all …]
H A Dhugetlb.c118 static void hugetlb_vma_lock_free(struct vm_area_struct *vma);
119 static void __hugetlb_vma_unlock_write_free(struct vm_area_struct *vma);
120 static void hugetlb_unshare_pmds(struct vm_area_struct *vma,
122 static struct resv_map *vma_resv_map(struct vm_area_struct *vma);
274 static inline struct hugepage_subpool *subpool_vma(struct vm_area_struct *vma) in subpool_vma() argument
276 return subpool_inode(file_inode(vma->vm_file)); in subpool_vma()
282 void hugetlb_vma_lock_read(struct vm_area_struct *vma) in hugetlb_vma_lock_read() argument
284 if (__vma_shareable_lock(vma)) { in hugetlb_vma_lock_read()
285 struct hugetlb_vma_lock *vma_lock = vma->vm_private_data; in hugetlb_vma_lock_read()
288 } else if (__vma_private_lock(vma)) { in hugetlb_vma_lock_read()
[all …]
H A Duserfaultfd.c29 struct vm_area_struct *vma; member
35 static bool anon_can_userfault(struct vm_area_struct *vma, vm_flags_t vm_flags) in anon_can_userfault() argument
43 static struct folio *anon_alloc_folio(struct vm_area_struct *vma, in anon_alloc_folio() argument
46 struct folio *folio = vma_alloc_folio(GFP_HIGHUSER_MOVABLE, 0, vma, in anon_alloc_folio()
52 if (mem_cgroup_charge(folio, vma->vm_mm, GFP_KERNEL)) { in anon_alloc_folio()
65 static const struct vm_uffd_ops *vma_uffd_ops(struct vm_area_struct *vma) in vma_uffd_ops() argument
67 if (vma_is_anonymous(vma)) in vma_uffd_ops()
69 return vma->vm_ops ? vma->vm_ops->uffd_ops : NULL; in vma_uffd_ops()
94 struct vm_area_struct *vma; in find_vma_and_prepare_anon() local
97 vma = vma_lookup(mm, addr); in find_vma_and_prepare_anon()
[all …]
H A Dhuge_memory.c85 static inline bool file_thp_enabled(struct vm_area_struct *vma) in file_thp_enabled() argument
92 if (!vma->vm_file) in file_thp_enabled()
95 inode = file_inode(vma->vm_file); in file_thp_enabled()
104 static bool vma_is_special_huge(const struct vm_area_struct *vma) in vma_is_special_huge() argument
106 if (vma_is_dax(vma)) in vma_is_special_huge()
108 return vma_test_any(vma, VMA_PFNMAP_BIT, VMA_MIXEDMAP_BIT); in vma_is_special_huge()
111 unsigned long __thp_vma_allowable_orders(struct vm_area_struct *vma, in __thp_vma_allowable_orders() argument
122 if (vma_is_anonymous(vma)) in __thp_vma_allowable_orders()
124 else if (vma_is_dax(vma) || vma_is_special_huge(vma)) in __thp_vma_allowable_orders()
133 if (!vma->vm_mm) /* vdso */ in __thp_vma_allowable_orders()
[all …]
/linux/tools/testing/vma/
H A Dshared.c19 struct vm_area_struct *vma = vm_area_alloc(mm); in alloc_vma() local
21 if (vma == NULL) in alloc_vma()
24 vma->vm_start = start; in alloc_vma()
25 vma->vm_end = end; in alloc_vma()
26 vma->vm_pgoff = pgoff; in alloc_vma()
27 vma->flags = vma_flags; in alloc_vma()
28 vma_assert_detached(vma); in alloc_vma()
30 return vma; in alloc_vma()
33 void detach_free_vma(struct vm_area_struct *vma) in detach_free_vma() argument
35 vma_mark_detached(vma); in detach_free_vma()
43 struct vm_area_struct *vma = alloc_vma(mm, start, end, pgoff, vm_flags); alloc_and_link_vma() local
71 struct vm_area_struct *vma; cleanup_mm() local
88 vma_write_started(struct vm_area_struct * vma) vma_write_started() argument
99 __vma_set_dummy_anon_vma(struct vm_area_struct * vma,struct anon_vma_chain * avc,struct anon_vma * anon_vma) __vma_set_dummy_anon_vma() argument
108 vma_set_dummy_anon_vma(struct vm_area_struct * vma,struct anon_vma_chain * avc) vma_set_dummy_anon_vma() argument
124 vma_set_range(struct vm_area_struct * vma,unsigned long start,unsigned long end,pgoff_t pgoff) vma_set_range() argument
[all...]
/linux/drivers/gpu/drm/nouveau/
H A Dnouveau_vmm.c29 nouveau_vma_unmap(struct nouveau_vma *vma) in nouveau_vma_unmap() argument
31 if (vma->mem) { in nouveau_vma_unmap()
32 nvif_vmm_unmap(&vma->vmm->vmm, vma->addr); in nouveau_vma_unmap()
33 vma->mem = NULL; in nouveau_vma_unmap()
38 nouveau_vma_map(struct nouveau_vma *vma, struct nouveau_mem *mem) in nouveau_vma_map() argument
40 struct nvif_vma tmp = { .addr = vma->addr }; in nouveau_vma_map()
41 int ret = nouveau_mem_map(mem, &vma->vmm->vmm, &tmp); in nouveau_vma_map()
44 vma->mem = mem; in nouveau_vma_map()
51 struct nouveau_vma *vma; in nouveau_vma_find() local
53 list_for_each_entry(vma, &nvbo->vma_list, head) { in nouveau_vma_find()
[all …]
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/
H A Dvmm.c826 struct nvkm_vma *vma = kzalloc_obj(*vma); in nvkm_vma_new() local
827 if (vma) { in nvkm_vma_new()
828 vma->addr = addr; in nvkm_vma_new()
829 vma->size = size; in nvkm_vma_new()
830 vma->page = NVKM_VMA_PAGE_NONE; in nvkm_vma_new()
831 vma->refd = NVKM_VMA_PAGE_NONE; in nvkm_vma_new()
833 return vma; in nvkm_vma_new()
837 nvkm_vma_tail(struct nvkm_vma *vma, u64 tail) in nvkm_vma_tail() argument
841 BUG_ON(vma->size == tail); in nvkm_vma_tail()
843 if (!(new = nvkm_vma_new(vma->addr + (vma->size - tail), tail))) in nvkm_vma_tail()
[all …]
H A Duvmm.c113 struct nvkm_vma *vma; in nvkm_uvmm_mthd_unmap() local
126 vma = nvkm_vmm_node_search(vmm, addr); in nvkm_uvmm_mthd_unmap()
127 if (ret = -ENOENT, !vma || vma->addr != addr) { in nvkm_uvmm_mthd_unmap()
129 addr, vma ? vma->addr : ~0ULL); in nvkm_uvmm_mthd_unmap()
133 if (ret = -ENOENT, vma->busy) { in nvkm_uvmm_mthd_unmap()
134 VMM_DEBUG(vmm, "denied %016llx: %d", addr, vma->busy); in nvkm_uvmm_mthd_unmap()
138 if (ret = -EINVAL, !vma->memory) { in nvkm_uvmm_mthd_unmap()
143 nvkm_vmm_unmap_locked(vmm, vma, false); in nvkm_uvmm_mthd_unmap()
159 struct nvkm_vma *vma; in nvkm_uvmm_mthd_map() local
181 if (ret = -ENOENT, !(vma = nvkm_vmm_node_search(vmm, addr))) { in nvkm_uvmm_mthd_map()
[all …]
/linux/drivers/gpu/drm/i915/selftests/
H A Di915_gem_gtt.c398 struct i915_vma *vma; in close_object_list() local
400 vma = i915_vma_instance(obj, vm, NULL); in close_object_list()
401 if (!IS_ERR(vma)) in close_object_list()
402 ignored = i915_vma_unbind_unlocked(vma); in close_object_list()
421 struct i915_vma *vma; in fill_hole() local
463 vma = i915_vma_instance(obj, vm, NULL); in fill_hole()
464 if (IS_ERR(vma)) in fill_hole()
473 err = i915_vma_pin(vma, 0, 0, offset | flags); in fill_hole()
480 if (!drm_mm_node_allocated(&vma->node) || in fill_hole()
481 i915_vma_misplaced(vma, 0, 0, offset | flags)) { in fill_hole()
[all …]
/linux/drivers/gpu/drm/xe/
H A Dxe_trace_bo.h21 #define __dev_name_vma(vma) __dev_name_vm(xe_vma_vm(vma)) argument
89 TP_PROTO(struct xe_vma *vma),
90 TP_ARGS(vma),
93 __string(dev, __dev_name_vma(vma))
94 __field(struct xe_vma *, vma)
104 __entry->vma = vma;
105 __entry->vm = xe_vma_vm(vma);
106 __entry->asid = xe_vma_vm(vma)->usm.asid;
107 __entry->start = xe_vma_start(vma);
108 __entry->end = xe_vma_end(vma) - 1;
[all …]
/linux/tools/testing/vma/include/
H A Ddup.h7 static inline void vma_start_write(struct vm_area_struct *vma);
493 int (*success_hook)(const struct vm_area_struct *vma);
651 void (*open)(struct vm_area_struct *vma);
656 void (*close)(struct vm_area_struct *vma);
675 int (*may_split)(struct vm_area_struct *vma, unsigned long addr);
676 int (*mremap)(struct vm_area_struct *vma);
682 int (*mprotect)(struct vm_area_struct *vma, unsigned long start,
688 unsigned long (*pagesize)(struct vm_area_struct *vma);
701 int (*access)(struct vm_area_struct *vma, unsigned long addr,
707 const char *(*name)(struct vm_area_struct *vma);
[all …]
H A Dstubs.h28 #define vma_policy(vma) NULL argument
67 static inline void vma_numab_state_init(struct vm_area_struct *vma) in vma_numab_state_init() argument
71 static inline void vma_numab_state_free(struct vm_area_struct *vma) in vma_numab_state_free() argument
80 static inline void free_anon_vma_name(struct vm_area_struct *vma) in free_anon_vma_name() argument
89 static inline int mmap_action_complete(struct vm_area_struct *vma, in mmap_action_complete() argument
96 static inline void fixup_hugetlb_reservations(struct vm_area_struct *vma) in fixup_hugetlb_reservations() argument
115 static inline int remap_pfn_range_complete(struct vm_area_struct *vma, unsigned long addr, in remap_pfn_range_complete() argument
143 static inline int userfaultfd_unmap_prep(struct vm_area_struct *vma, in userfaultfd_unmap_prep() argument
186 static inline void khugepaged_enter_vma(struct vm_area_struct *vma, in khugepaged_enter_vma() argument
196 static inline bool is_vm_hugetlb_page(struct vm_area_struct *vma) in is_vm_hugetlb_page() argument
[all …]
/linux/drivers/pci/
H A Dmmap.c25 struct vm_area_struct *vma, in pci_mmap_resource_range() argument
32 if (vma->vm_pgoff + vma_pages(vma) > size) in pci_mmap_resource_range()
36 vma->vm_page_prot = pgprot_writecombine(vma->vm_page_prot); in pci_mmap_resource_range()
38 vma->vm_page_prot = pgprot_device(vma->vm_page_prot); in pci_mmap_resource_range()
41 ret = pci_iobar_pfn(pdev, bar, vma); in pci_mmap_resource_range()
45 vma->vm_pgoff += (pci_resource_start(pdev, bar) >> PAGE_SHIFT); in pci_mmap_resource_range()
47 vma->vm_ops = &pci_phys_vm_ops; in pci_mmap_resource_range()
49 return io_remap_pfn_range(vma, vma->vm_start, vma->vm_pgoff, in pci_mmap_resource_range()
50 vma->vm_end - vma->vm_start, in pci_mmap_resource_range()
51 vma->vm_page_prot); in pci_mmap_resource_range()
[all …]
/linux/drivers/gpu/drm/i915/gt/
H A Dintel_ring.c37 struct i915_vma *vma = ring->vma; in intel_ring_pin() local
46 flags = PIN_OFFSET_BIAS | i915_ggtt_pin_bias(vma); in intel_ring_pin()
48 if (i915_gem_object_is_stolen(vma->obj)) in intel_ring_pin()
53 ret = i915_ggtt_pin(vma, ww, 0, flags); in intel_ring_pin()
57 if (i915_vma_is_map_and_fenceable(vma) && !HAS_LLC(vma->vm->i915)) { in intel_ring_pin()
58 addr = (void __force *)i915_vma_pin_iomap(vma); in intel_ring_pin()
60 int type = intel_gt_coherent_map_type(vma->vm->gt, vma->obj, false); in intel_ring_pin()
62 addr = i915_gem_object_pin_map(vma->obj, type); in intel_ring_pin()
70 i915_vma_make_unshrinkable(vma); in intel_ring_pin()
79 i915_vma_unpin(vma); in intel_ring_pin()
[all …]
/linux/arch/powerpc/mm/book3s64/
H A Dradix_hugetlbpage.c10 void radix__flush_hugetlb_page(struct vm_area_struct *vma, unsigned long vmaddr) in radix__flush_hugetlb_page() argument
13 struct hstate *hstate = hstate_file(vma->vm_file); in radix__flush_hugetlb_page()
16 radix__flush_tlb_page_psize(vma->vm_mm, vmaddr, psize); in radix__flush_hugetlb_page()
19 void radix__local_flush_hugetlb_page(struct vm_area_struct *vma, unsigned long vmaddr) in radix__local_flush_hugetlb_page() argument
22 struct hstate *hstate = hstate_file(vma->vm_file); in radix__local_flush_hugetlb_page()
25 radix__local_flush_tlb_page_psize(vma->vm_mm, vmaddr, psize); in radix__local_flush_hugetlb_page()
28 void radix__flush_hugetlb_tlb_range(struct vm_area_struct *vma, unsigned long start, in radix__flush_hugetlb_tlb_range() argument
32 struct hstate *hstate = hstate_file(vma->vm_file); in radix__flush_hugetlb_tlb_range()
39 radix__flush_tlb_pwc_range_psize(vma->vm_mm, start, end, psize); in radix__flush_hugetlb_tlb_range()
41 radix__flush_tlb_range_psize(vma->vm_mm, start, end, psize); in radix__flush_hugetlb_tlb_range()
[all …]
/linux/drivers/gpu/drm/i915/gem/
H A Di915_gem_mman.c29 __vma_matches(struct vm_area_struct *vma, struct file *filp, in __vma_matches() argument
32 if (vma->vm_file != filp) in __vma_matches()
35 return vma->vm_start == addr && in __vma_matches()
36 (vma->vm_end - vma->vm_start) == PAGE_ALIGN(size); in __vma_matches()
106 struct vm_area_struct *vma; in i915_gem_mmap_ioctl() local
112 vma = find_vma(mm, addr); in i915_gem_mmap_ioctl()
113 if (vma && __vma_matches(vma, obj->base.filp, addr, args->size)) in i915_gem_mmap_ioctl()
114 vma->vm_page_prot = in i915_gem_mmap_ioctl()
115 pgprot_writecombine(vm_get_page_prot(vma->vm_flags)); in i915_gem_mmap_ioctl()
254 struct vm_area_struct *area = vmf->vma; in vm_fault_cpu()
[all …]
/linux/fs/proc/
H A Dtask_nommu.c24 struct vm_area_struct *vma; in task_mem() local
29 for_each_vma(vmi, vma) { in task_mem()
30 bytes += kobjsize(vma); in task_mem()
32 region = vma->vm_region; in task_mem()
37 size = vma->vm_end - vma->vm_start; in task_mem()
41 is_nommu_shared_mapping(vma->vm_flags)) { in task_mem()
46 slack = region->vm_end - vma->vm_end; in task_mem()
84 struct vm_area_struct *vma; in task_vsize() local
88 for_each_vma(vmi, vma) in task_vsize()
89 vsize += vma->vm_end - vma->vm_start; in task_vsize()
[all …]

12345678910>>...25