Home
last modified time | relevance | path

Searched refs:vm_start (Results 1 – 25 of 216) sorted by relevance

123456789

/linux/mm/
H A Dnommu.c96 return vma->vm_end - vma->vm_start; in kobjsize()
448 BUG_ON(last->vm_end <= last->vm_start); in validate_nommu_regions()
455 BUG_ON(region->vm_end <= region->vm_start); in validate_nommu_regions()
457 BUG_ON(region->vm_start < last->vm_top); in validate_nommu_regions()
483 if (region->vm_start < pregion->vm_start) in add_nommu_region()
485 else if (region->vm_start > pregion->vm_start) in add_nommu_region()
536 if (region->vm_top > region->vm_start) in __put_nommu_region()
546 free_page_series(region->vm_start, region->vm_top); in __put_nommu_region()
599 VMA_ITERATOR(vmi, vma->vm_mm, vma->vm_start); in delete_vma_from_mm()
601 vma_iter_config(&vmi, vma->vm_start, vma->vm_end); in delete_vma_from_mm()
[all …]
H A Dvma.h164 return vma->vm_pgoff + PHYS_PFN(addr - vma->vm_start); in vma_pgoff_offset()
213 ((vmi->mas.index > vma->vm_start) || (vmi->mas.last < vma->vm_start))) in vma_iter_store_gfp()
216 __mas_set_range(&vmi->mas, vma->vm_start, vma->vm_end - 1); in vma_iter_store_gfp()
523 vmi->mas.index > vma->vm_start)) { in vma_iter_store_overwrite()
525 vmi->mas.index, vma->vm_start, vma->vm_start, in vma_iter_store_overwrite()
529 vmi->mas.last < vma->vm_start)) { in vma_iter_store_overwrite()
531 vmi->mas.last, vma->vm_start, vma->vm_start, vma->vm_end, in vma_iter_store_overwrite()
537 ((vmi->mas.index > vma->vm_start) || (vmi->mas.last < vma->vm_start))) in vma_iter_store_overwrite()
540 __mas_set_range(&vmi->mas, vma->vm_start, vma->vm_end - 1); in vma_iter_store_overwrite()
H A Dvma.c283 uprobe_munmap(vp->vma, vp->vma->vm_start, vp->vma->vm_end); in vma_prepare()
286 uprobe_munmap(vp->adj_next, vp->adj_next->vm_start, in vma_prepare()
374 uprobe_munmap(vp->remove, vp->remove->vm_start, in vma_complete()
434 if (!next || vmg->end != next->vm_start || !can_vma_merge_before(vmg)) in can_vma_merge_right()
478 unmap_vmas(&tlb, mas, vma, vma->vm_start, vma->vm_end, vma->vm_end); in unmap_region()
481 next ? next->vm_start : USER_PGTABLES_CEILING, in unmap_region()
499 WARN_ON(vma->vm_start >= addr); in __split_vma()
515 new->vm_start = addr; in __split_vma()
516 new->vm_pgoff += ((addr - vma->vm_start) >> PAGE_SHIFT); in __split_vma()
520 vma_iter_config(vmi, new->vm_start, new->vm_end); in __split_vma()
[all …]
H A Dvma_exec.c33 unsigned long old_start = vma->vm_start; in relocate_vma_down()
76 next ? next->vm_start : USER_PGTABLES_CEILING); in relocate_vma_down()
85 next ? next->vm_start : USER_PGTABLES_CEILING); in relocate_vma_down()
140 vma->vm_start = vma->vm_end - PAGE_SIZE; in create_init_stack_vma()
H A Dmsync.c73 if (start < vma->vm_start) { in SYSCALL_DEFINE3()
76 start = vma->vm_start; in SYSCALL_DEFINE3()
88 fstart = (start - vma->vm_start) + in SYSCALL_DEFINE3()
H A Dmremap.c651 if (!pmc->for_stack && vma->vm_start != addr_to_align) in can_align_down()
655 if (pmc->for_stack && addr_masked >= vma->vm_start) in can_align_down()
662 return find_vma_intersection(vma->vm_mm, addr_masked, vma->vm_start) == NULL; in can_align_down()
951 pgoff_t internal_pgoff = (vrm->addr - vma->vm_start) >> PAGE_SHIFT; in vrm_set_new_addr()
1051 if (vma->vm_start != old_addr) in prep_move_vma()
1090 unsigned long vm_start; in unmap_source_vma() local
1122 vm_start = vma->vm_start; in unmap_source_vma()
1163 if (vm_start < addr) { in unmap_source_vma()
1188 unsigned long internal_offset = vrm->addr - vrm->vma->vm_start; in copy_vma_and_data()
1254 unsigned long old_start = vrm->vma->vm_start; in dontunmap_complete()
[all …]
H A Dmlock.c533 if (start > vma->vm_start) in apply_vma_lock_flags()
537 tmp = vma->vm_start; in apply_vma_lock_flags()
542 if (vma->vm_start != tmp) in apply_vma_lock_flags()
587 if (start > vma->vm_start) in count_mm_mlocked_page_nr()
588 count -= (start - vma->vm_start); in count_mm_mlocked_page_nr()
590 count += end - vma->vm_start; in count_mm_mlocked_page_nr()
593 count += vma->vm_end - vma->vm_start; in count_mm_mlocked_page_nr()
734 error = mlock_fixup(&vmi, vma, &prev, vma->vm_start, vma->vm_end, in apply_mlockall_flags()
H A Dmmap_lock.c281 if (unlikely(address < vma->vm_start || address >= vma->vm_end)) { in lock_vma_under_rcu()
361 if (from_addr < vma->vm_start) { in lock_next_vma()
454 if (likely(vma && (vma->vm_start <= addr))) in lock_mm_and_find_vma()
482 if (vma->vm_start <= addr) in lock_mm_and_find_vma()
/linux/tools/testing/vma/
H A Dvma.c77 vma->vm_start = start; in alloc_vma()
326 ASSERT_EQ(vma->vm_start, 0); in test_simple_merge()
358 ASSERT_EQ(vma->vm_start, 0x1000); in test_simple_modify()
370 ASSERT_EQ(vma->vm_start, 0); in test_simple_modify()
379 ASSERT_EQ(vma->vm_start, 0x1000); in test_simple_modify()
388 ASSERT_EQ(vma->vm_start, 0x2000); in test_simple_modify()
416 ASSERT_EQ(vma->vm_start, 0); in test_simple_expand()
437 ASSERT_EQ(vma->vm_start, 0); in test_simple_shrink()
529 ASSERT_EQ(vma->vm_start, 0); in __test_merge_new()
548 ASSERT_EQ(vma->vm_start, 0); in __test_merge_new()
[all …]
/linux/mm/damon/tests/
H A Dvaddr-kunit.h28 mas_set_range(&mas, vmas[i].vm_start, vmas[i].vm_end - 1); in __link_vmas()
72 (struct vm_area_struct) {.vm_start = 10, .vm_end = 20}, in damon_test_three_regions_in_vmas()
73 (struct vm_area_struct) {.vm_start = 20, .vm_end = 25}, in damon_test_three_regions_in_vmas()
74 (struct vm_area_struct) {.vm_start = 200, .vm_end = 210}, in damon_test_three_regions_in_vmas()
75 (struct vm_area_struct) {.vm_start = 210, .vm_end = 220}, in damon_test_three_regions_in_vmas()
76 (struct vm_area_struct) {.vm_start = 300, .vm_end = 305}, in damon_test_three_regions_in_vmas()
77 (struct vm_area_struct) {.vm_start = 307, .vm_end = 330}, in damon_test_three_regions_in_vmas()
/linux/tools/testing/selftests/bpf/progs/
H A Diters_task_vma.c13 __u64 vm_start; member
34 vm_ranges[seen].vm_start = vma->vm_start; in iter_task_vma_for_each()
H A Dfind_vma_fail1.c5 #define vm_flags vm_start
17 vma->vm_start = 0xffffffffff600000; in write_vma()
/linux/fs/proc/
H A Dtask_nommu.c35 size += region->vm_end - region->vm_start; in task_mem()
37 size = vma->vm_end - vma->vm_start; in task_mem()
89 vsize += vma->vm_end - vma->vm_start; in task_vsize()
109 size += region->vm_end - region->vm_start; in task_statm()
149 vma->vm_start, in nommu_vma_show()
184 *ppos = vma->vm_start; in proc_get_vma()
/linux/drivers/android/
H A Dbinder_alloc.c65 return alloc->vm_start + alloc->buffer_size - buffer->user_data; in binder_alloc_buffer_size()
204 index = (page_addr - alloc->vm_start) / PAGE_SIZE; in binder_lru_freelist_add()
341 alloc->pid, addr - alloc->vm_start); in binder_install_single_page()
353 alloc->pid, __func__, addr - alloc->vm_start, ret); in binder_install_single_page()
375 index = (page_addr - alloc->vm_start) / PAGE_SIZE; in binder_install_buffer_pages()
404 index = (page_addr - alloc->vm_start) / PAGE_SIZE; in binder_lru_freelist_del()
757 BUG_ON(buffer->user_data < alloc->vm_start); in binder_free_buf_locked()
758 BUG_ON(buffer->user_data > alloc->vm_start + alloc->buffer_size); in binder_free_buf_locked()
817 (buffer->user_data - alloc->vm_start); in binder_alloc_get_page()
913 alloc->buffer_size = min_t(unsigned long, vma->vm_end - vma->vm_start, in binder_alloc_mmap_handler()
[all …]
/linux/scripts/coccinelle/api/
H A Dvma_pages.cocci22 * (vma->vm_end - vma->vm_start) >> PAGE_SHIFT
32 - ((vma->vm_end - vma->vm_start) >> PAGE_SHIFT)
44 (vma->vm_end@p - vma->vm_start) >> PAGE_SHIFT
/linux/arch/parisc/mm/
H A Dfault.c130 if (tree->vm_start > addr) {
136 if (prev->vm_next->vm_start > addr)
262 vma->vm_start, vma->vm_end); in show_signal_msg()
296 if (!vma || address < vma->vm_start) { in do_page_fault()
377 address < vma->vm_start || address >= vma->vm_end) { in do_page_fault()
/linux/arch/arc/kernel/
H A Darc_hostlink.c22 if (io_remap_pfn_range(vma, vma->vm_start, vma->vm_pgoff, in arc_hl_mmap()
23 vma->vm_end - vma->vm_start, in arc_hl_mmap()
H A Dtroubleshoot.c103 vma->vm_start < TASK_UNMAPPED_BASE ? in show_faulting_vma()
104 address : address - vma->vm_start, in show_faulting_vma()
105 nm, vma->vm_start, vma->vm_end); in show_faulting_vma()
/linux/arch/powerpc/platforms/book3s/
H A Dvas-api.c438 fault = vmf_insert_pfn(vma, vma->vm_start, in vas_mmap_fault()
516 if ((vma->vm_end - vma->vm_start) > PAGE_SIZE) { in coproc_mmap()
518 (vma->vm_end - vma->vm_start), PAGE_SIZE); in coproc_mmap()
573 rc = remap_pfn_range(vma, vma->vm_start, pfn + vma->vm_pgoff, in coproc_mmap()
574 vma->vm_end - vma->vm_start, prot); in coproc_mmap()
577 vma->vm_start, rc); in coproc_mmap()
/linux/drivers/xen/xenfs/
H A Dxenstored.c36 size_t size = vma->vm_end - vma->vm_start; in xsd_kva_mmap()
41 if (remap_pfn_range(vma, vma->vm_start, in xsd_kva_mmap()
/linux/kernel/bpf/
H A Dsysfs_btf.c23 size_t vm_size = vma->vm_end - vma->vm_start; in btf_sysfs_vmlinux_mmap()
43 return remap_pfn_range(vma, vma->vm_start, pfn, vm_size, vma->vm_page_prot); in btf_sysfs_vmlinux_mmap()
/linux/include/trace/events/
H A Dfs_dax.h16 __field(unsigned long, vm_start)
29 __entry->vm_start = vmf->vma->vm_start;
46 __entry->vm_start,
/linux/drivers/media/common/videobuf2/
H A Dvideobuf2-memops.c95 __func__, h, refcount_read(h->refcount), vma->vm_start, in vb2_common_vm_open()
113 __func__, h, refcount_read(h->refcount), vma->vm_start, in vb2_common_vm_close()
/linux/drivers/xen/xenbus/
H A Dxenbus_dev_backend.c94 size_t size = vma->vm_end - vma->vm_start; in xenbus_backend_mmap()
102 if (remap_pfn_range(vma, vma->vm_start, in xenbus_backend_mmap()
/linux/drivers/pci/
H A Dmmap.c49 return io_remap_pfn_range(vma, vma->vm_start, vma->vm_pgoff, in pci_mmap_resource_range()
50 vma->vm_end - vma->vm_start, in pci_mmap_resource_range()

123456789