Home
last modified time | relevance | path

Searched full:vm_end (Results 1 – 25 of 181) sorted by relevance

12345678

/linux/mm/
H A Dnommu.c96 return vma->vm_end - vma->vm_start; in kobjsize()
448 BUG_ON(last->vm_end <= last->vm_start); in validate_nommu_regions()
449 BUG_ON(last->vm_top < last->vm_end); in validate_nommu_regions()
455 BUG_ON(region->vm_end <= region->vm_start); in validate_nommu_regions()
456 BUG_ON(region->vm_top < region->vm_end); in validate_nommu_regions()
601 vma_iter_config(&vmi, vma->vm_start, vma->vm_end); in delete_vma_from_mm()
680 if (vma->vm_end != end) in find_vma_exact()
899 vma->vm_region->vm_top = vma->vm_region->vm_end; in do_mmap_shared_file()
935 vma->vm_region->vm_top = vma->vm_region->vm_end; in do_mmap_private()
968 region->vm_end = region->vm_start + len; in do_mmap_private()
[all …]
H A Dmsync.c72 /* Here start < vma->vm_end. */ in SYSCALL_DEFINE3()
81 /* Here vma->vm_start <= start < vma->vm_end. */ in SYSCALL_DEFINE3()
90 fend = fstart + (min(end, vma->vm_end) - start) - 1; in SYSCALL_DEFINE3()
91 start = vma->vm_end; in SYSCALL_DEFINE3()
107 vma = find_vma(mm, vma->vm_end); in SYSCALL_DEFINE3()
H A Dvma.c254 * Before updating the vma's vm_start / vm_end / vm_pgoff fields, the
289 uprobe_munmap(vp->vma, vp->vma->vm_start, vp->vma->vm_end); in vma_prepare()
293 vp->adj_next->vm_end); in vma_prepare()
381 vp->remove->vm_end); in vma_complete()
389 WARN_ON_ONCE(vp->vma->vm_end < vp->remove->vm_end); in vma_complete()
423 return vmg->prev && vmg->prev->vm_end == vmg->start && in can_vma_merge_left()
484 unmap_vmas(&tlb, mas, vma, vma->vm_start, vma->vm_end, vma->vm_end, in unmap_region()
486 mas_set(mas, vma->vm_end); in unmap_region()
487 free_pgtables(&tlb, mas, vma, prev ? prev->vm_end : FIRST_USER_ADDRESS, in unmap_region()
507 WARN_ON(vma->vm_end <= addr); in __split_vma()
[all …]
H A Dvma_exec.c14 * shift, vma->vm_end - shift).
34 unsigned long old_end = vma->vm_end; in relocate_vma_down()
138 vma->vm_end = STACK_TOP_MAX; in create_init_stack_vma()
139 vma->vm_start = vma->vm_end - PAGE_SIZE; in create_init_stack_vma()
150 *top_mem_p = vma->vm_end - sizeof(void *); in create_init_stack_vma()
H A Dmremap.c1049 if (!err && vma->vm_end != old_addr + old_len) in prep_move_vma()
1087 unsigned long vm_end; in unmap_source_vma() local
1119 vm_end = vma->vm_end; in unmap_source_vma()
1165 if (vm_end > end) { in unmap_source_vma()
1251 unsigned long old_end = vrm->vma->vm_end; in dontunmap_complete()
1417 unsigned long end = vma->vm_end + delta; in vma_expandable()
1419 if (end < vma->vm_end) /* overflow */ in vma_expandable()
1421 if (find_vma_intersection(vma->vm_mm, vma->vm_end, end)) in vma_expandable()
1433 unsigned long suffix_bytes = vrm->vma->vm_end - vrm->addr; in vrm_can_expand_in_place()
1456 VMA_ITERATOR(vmi, mm, vma->vm_end); in expand_vma_in_place()
[all …]
H A Dvma.h134 * (and thereby, vmg->prev->vm_end).
139 * (and thereby, vmg->middle->vm_end).
216 __mas_set_range(&vmi->mas, vma->vm_start, vma->vm_end - 1); in vma_iter_store_gfp()
466 vma->vm_end, vmi->mas.index, vmi->mas.last); in vma_iter_store_overwrite()
471 vmi->mas.last, vma->vm_start, vma->vm_start, vma->vm_end, in vma_iter_store_overwrite()
480 __mas_set_range(&vmi->mas, vma->vm_start, vma->vm_end - 1); in vma_iter_store_overwrite()
H A Dvma_init.c47 dest->vm_end = src->vm_end; in vm_area_init_from()
H A Dmadvise.c820 if (range.start >= vma->vm_end) in madvise_free_single_vma()
822 range.end = min(vma->vm_end, end_addr); in madvise_free_single_vma()
933 if (range->end > vma->vm_end) { in madvise_dontneed_free()
935 * Don't fail if end > vma->vm_end. If the old in madvise_dontneed_free()
943 * end-vma->vm_end range, but the manager can in madvise_dontneed_free()
946 range->end = vma->vm_end; in madvise_dontneed_free()
1595 if (madv_behavior->range.end > vma->vm_end || current->mm != mm || in try_vma_read_lock()
1649 /* Here start < (last_end|vma->vm_end). */ in madvise_walk_vmas()
1663 /* Here vma->vm_start <= range->start < (last_end|vma->vm_end) */ in madvise_walk_vmas()
1664 range->end = min(vma->vm_end, last_end); in madvise_walk_vmas()
[all …]
H A Dmseal.c49 prev_end = vma->vm_end; in range_contains_unmapped()
69 unsigned long curr_end = MIN(vma->vm_end, end); in mseal_apply()
/linux/tools/testing/vma/
H A Dvma.c76 ret->vm_end = end; in alloc_vma()
325 ASSERT_EQ(vma->vm_end, 0x3000); in test_simple_merge()
356 ASSERT_EQ(vma->vm_end, 0x2000); in test_simple_modify()
368 ASSERT_EQ(vma->vm_end, 0x1000); in test_simple_modify()
377 ASSERT_EQ(vma->vm_end, 0x2000); in test_simple_modify()
386 ASSERT_EQ(vma->vm_end, 0x3000); in test_simple_modify()
414 ASSERT_EQ(vma->vm_end, 0x3000); in test_simple_expand()
435 ASSERT_EQ(vma->vm_end, 0x1000); in test_simple_shrink()
518 ASSERT_EQ(vma->vm_end, 0x4000); in test_merge_new()
535 ASSERT_EQ(vma->vm_end, in test_merge_new()
[all...]
H A Dvma_internal.h322 /* VMA covers [vm_start; vm_end) addresses within mm */
324 unsigned long vm_end; member
664 vma->vm_end = end; in vma_set_range()
768 return (vma->vm_end - vma->vm_start) >> PAGE_SHIFT; in vma_pages()
1152 unsigned long vm_end = vma->vm_end; in vm_end_gap() local
1155 vm_end += stack_guard_gap; in vm_end_gap()
1156 if (vm_end < vma->vm_end) in vm_end_gap()
1157 vm_end = -PAGE_SIZE; in vm_end_gap()
1159 return vm_end; in vm_end_gap()
1336 .end = vma->vm_end, in __compat_vma_mmap_prepare()
/linux/fs/proc/
H A Dtask_nommu.c35 size += region->vm_end - region->vm_start; in task_mem()
37 size = vma->vm_end - vma->vm_start; in task_mem()
46 slack = region->vm_end - vma->vm_end; in task_mem()
89 vsize += vma->vm_end - vma->vm_start; in task_vsize()
109 size += region->vm_end - region->vm_start; in task_statm()
150 vma->vm_end, in nommu_vma_show()
/linux/tools/testing/selftests/bpf/progs/
H A Diters_task_vma.c14 __u64 vm_end; member
35 vm_ranges[seen].vm_end = vma->vm_end; in iter_task_vma_for_each()
/linux/include/trace/events/
H A Dmmap.h
H A Dfs_dax.h17 __field(unsigned long, vm_end)
30 __entry->vm_end = vmf->vma->vm_end;
39 "%#lx vm_end %#lx pgoff %#lx max_pgoff %#lx %s",
47 __entry->vm_end,
/linux/mm/damon/tests/
H A Dvaddr-kunit.h28 mas_set_range(&mas, vmas[i].vm_start, vmas[i].vm_end - 1); in __link_vmas()
72 (struct vm_area_struct) {.vm_start = 10, .vm_end = 20}, in damon_test_three_regions_in_vmas()
73 (struct vm_area_struct) {.vm_start = 20, .vm_end = 25}, in damon_test_three_regions_in_vmas()
74 (struct vm_area_struct) {.vm_start = 200, .vm_end = 210}, in damon_test_three_regions_in_vmas()
75 (struct vm_area_struct) {.vm_start = 210, .vm_end = 220}, in damon_test_three_regions_in_vmas()
76 (struct vm_area_struct) {.vm_start = 300, .vm_end = 305}, in damon_test_three_regions_in_vmas()
77 (struct vm_area_struct) {.vm_start = 307, .vm_end = 330}, in damon_test_three_regions_in_vmas()
/linux/scripts/coccinelle/api/
H A Dvma_pages.cocci22 * (vma->vm_end - vma->vm_start) >> PAGE_SHIFT
32 - ((vma->vm_end - vma->vm_start) >> PAGE_SHIFT)
44 (vma->vm_end@p - vma->vm_start) >> PAGE_SHIFT
/linux/arch/x86/um/
H A Dmem_32.c18 gate_vma.vm_end = FIXADDR_USER_END; in gate_vma_init()
49 return (addr >= vma->vm_start) && (addr < vma->vm_end); in in_gate_area()
/linux/arch/xtensa/kernel/
H A Dsyscall.c86 /* At this point: (addr < vmm->vm_end). */ in arch_get_unmapped_area()
90 addr = vmm->vm_end; in arch_get_unmapped_area()
/linux/drivers/media/common/videobuf2/
H A Dvideobuf2-memops.c96 vma->vm_end); in vb2_common_vm_open()
114 vma->vm_end); in vb2_common_vm_close()
/linux/arch/riscv/kvm/
H A Dmmu.c209 hva_t vm_end; in kvm_arch_prepare_memory_region() local
225 vm_end = min(reg_end, vma->vm_end); in kvm_arch_prepare_memory_region()
234 hva = vm_end; in kvm_arch_prepare_memory_region()
/linux/Documentation/bpf/
H A Dprog_lsm.rst36 unsigned long vm_start, vm_end;
80 vma->vm_end <= vma->vm_mm->brk);
/linux/drivers/sbus/char/
H A Dflash.c66 if (vma->vm_end - (vma->vm_start + (vma->vm_pgoff << PAGE_SHIFT)) > size) in flash_mmap()
67 size = vma->vm_end - (vma->vm_start + (vma->vm_pgoff << PAGE_SHIFT)); in flash_mmap()
/linux/kernel/bpf/
H A Dtask_iter.c484 info->prev_vm_end = curr_vma->vm_end; in task_vma_seq_get_next()
530 curr_vma = find_vma(curr_mm, curr_vma->vm_end); in task_vma_seq_get_next()
543 curr_vma->vm_end == info->prev_vm_end) in task_vma_seq_get_next()
544 curr_vma = find_vma(curr_mm, curr_vma->vm_end); in task_vma_seq_get_next()
644 info->prev_vm_end = info->vma->vm_end; in task_vma_seq_stop()
777 if (vma && vma->vm_start <= start && vma->vm_end > start) { in BPF_CALL_5()
/linux/drivers/infiniband/hw/hfi1/
H A Dfile_ops.c315 vma->vm_end - vma->vm_start, vma->vm_flags); in mmap_cdbg()
415 if ((vma->vm_end - vma->vm_start) != memlen) { in hfi1_file_mmap()
417 (vma->vm_end - vma->vm_start), memlen); in hfi1_file_mmap()
434 vm_end_save = vma->vm_end; in hfi1_file_mmap()
435 vma->vm_end = vma->vm_start; in hfi1_file_mmap()
440 vma->vm_end += memlen; in hfi1_file_mmap()
447 vma->vm_end = vm_end_save; in hfi1_file_mmap()
453 vma->vm_end = vm_end_save; in hfi1_file_mmap()
553 if ((vma->vm_end - vma->vm_start) != memlen) { in hfi1_file_mmap()
556 (vma->vm_end - vma->vm_start), memlen); in hfi1_file_mmap()

12345678