Lines Matching +full:mm +full:- +full:0
1 // SPDX-License-Identifier: GPL-2.0-or-later
7 #include "generated/bit-length.h"
9 #include "maple-shared.h"
13 #include "../../../mm/vma.h"
19 (fail_prealloc ? -ENOMEM : mas_preallocate(&(vmi)->mas, (vma), GFP_KERNEL))
29 * provides userland-equivalent functionality for everything vma.c uses.
31 #include "../../../mm/vma_init.c"
32 #include "../../../mm/vma_exec.c"
33 #include "../../../mm/vma.c"
46 } while (0)
60 return (unsigned long)-1;
64 static struct vm_area_struct *alloc_vma(struct mm_struct *mm,
70 struct vm_area_struct *ret = vm_area_alloc(mm);
75 ret->vm_start = start;
76 ret->vm_end = end;
77 ret->vm_pgoff = pgoff;
78 ret->__vm_flags = vm_flags;
85 static int attach_vma(struct mm_struct *mm, struct vm_area_struct *vma)
89 res = vma_link(mm, vma);
102 static struct vm_area_struct *alloc_and_link_vma(struct mm_struct *mm,
108 struct vm_area_struct *vma = alloc_vma(mm, start, end, pgoff, vm_flags);
113 if (attach_vma(mm, vma)) {
123 vma->vm_lock_seq = UINT_MAX;
136 vmg->next = vma_next(vmg->vmi);
137 vmg->prev = vma_prev(vmg->vmi);
138 vma_iter_next_range(vmg->vmi);
177 vma_iter_set(vmg->vmi, start);
179 vmg->prev = NULL;
180 vmg->middle = NULL;
181 vmg->next = NULL;
182 vmg->target = NULL;
184 vmg->start = start;
185 vmg->end = end;
186 vmg->pgoff = pgoff;
187 vmg->vm_flags = vm_flags;
189 vmg->just_expand = false;
190 vmg->__remove_middle = false;
191 vmg->__remove_next = false;
192 vmg->__adjust_middle_start = false;
193 vmg->__adjust_next_start = false;
202 vmg->anon_vma = anon_vma;
211 static struct vm_area_struct *try_merge_new_vma(struct mm_struct *mm,
224 ASSERT_EQ(vmg->state, VMA_MERGE_SUCCESS);
230 ASSERT_EQ(vmg->state, VMA_MERGE_NOMERGE);
232 return alloc_and_link_vma(mm, start, end, pgoff, vm_flags);
249 static int cleanup_mm(struct mm_struct *mm, struct vma_iterator *vmi)
252 int count = 0;
257 vma_iter_set(vmi, 0);
263 mtree_destroy(&mm->mm_mt);
264 mm->map_count = 0;
271 int seq = vma->vm_lock_seq;
274 vma->vm_lock_seq = UINT_MAX;
277 return seq > -1;
280 /* Helper function providing a dummy vm_ops->close() method.*/
289 vma->anon_vma = anon_vma;
290 INIT_LIST_HEAD(&vma->anon_vma_chain);
291 list_add(&avc->same_vma, &vma->anon_vma_chain);
292 avc->anon_vma = vma->anon_vma;
305 struct mm_struct mm = {};
306 struct vm_area_struct *vma_left = alloc_vma(&mm, 0, 0x1000, 0, vm_flags);
307 struct vm_area_struct *vma_right = alloc_vma(&mm, 0x2000, 0x3000, 2, vm_flags);
308 VMA_ITERATOR(vmi, &mm, 0x1000);
310 .mm = &mm,
312 .start = 0x1000,
313 .end = 0x2000,
318 ASSERT_FALSE(attach_vma(&mm, vma_left));
319 ASSERT_FALSE(attach_vma(&mm, vma_right));
324 ASSERT_EQ(vma->vm_start, 0);
325 ASSERT_EQ(vma->vm_end, 0x3000);
326 ASSERT_EQ(vma->vm_pgoff, 0);
327 ASSERT_EQ(vma->vm_flags, vm_flags);
330 mtree_destroy(&mm.mm_mt);
339 struct mm_struct mm = {};
340 struct vm_area_struct *init_vma = alloc_vma(&mm, 0, 0x3000, 0, vm_flags);
341 VMA_ITERATOR(vmi, &mm, 0x1000);
343 ASSERT_FALSE(attach_vma(&mm, init_vma));
350 0x1000, 0x2000, VM_READ | VM_MAYREAD);
355 ASSERT_EQ(vma->vm_start, 0x1000);
356 ASSERT_EQ(vma->vm_end, 0x2000);
357 ASSERT_EQ(vma->vm_pgoff, 1);
364 vma_iter_set(&vmi, 0);
367 ASSERT_EQ(vma->vm_start, 0);
368 ASSERT_EQ(vma->vm_end, 0x1000);
369 ASSERT_EQ(vma->vm_pgoff, 0);
376 ASSERT_EQ(vma->vm_start, 0x1000);
377 ASSERT_EQ(vma->vm_end, 0x2000);
378 ASSERT_EQ(vma->vm_pgoff, 1);
385 ASSERT_EQ(vma->vm_start, 0x2000);
386 ASSERT_EQ(vma->vm_end, 0x3000);
387 ASSERT_EQ(vma->vm_pgoff, 2);
390 mtree_destroy(&mm.mm_mt);
398 struct mm_struct mm = {};
399 struct vm_area_struct *vma = alloc_vma(&mm, 0, 0x1000, 0, vm_flags);
400 VMA_ITERATOR(vmi, &mm, 0);
404 .start = 0,
405 .end = 0x3000,
406 .pgoff = 0,
409 ASSERT_FALSE(attach_vma(&mm, vma));
413 ASSERT_EQ(vma->vm_start, 0);
414 ASSERT_EQ(vma->vm_end, 0x3000);
415 ASSERT_EQ(vma->vm_pgoff, 0);
418 mtree_destroy(&mm.mm_mt);
426 struct mm_struct mm = {};
427 struct vm_area_struct *vma = alloc_vma(&mm, 0, 0x3000, 0, vm_flags);
428 VMA_ITERATOR(vmi, &mm, 0);
430 ASSERT_FALSE(attach_vma(&mm, vma));
432 ASSERT_FALSE(vma_shrink(&vmi, vma, 0, 0x1000, 0));
434 ASSERT_EQ(vma->vm_start, 0);
435 ASSERT_EQ(vma->vm_end, 0x1000);
436 ASSERT_EQ(vma->vm_pgoff, 0);
439 mtree_destroy(&mm.mm_mt);
447 struct mm_struct mm = {};
448 VMA_ITERATOR(vmi, &mm, 0);
450 .mm = &mm,
476 vma_a = alloc_and_link_vma(&mm, 0, 0x2000, 0, vm_flags);
479 INIT_LIST_HEAD(&vma_a->anon_vma_chain);
480 list_add(&dummy_anon_vma_chain_a.same_vma, &vma_a->anon_vma_chain);
482 vma_b = alloc_and_link_vma(&mm, 0x3000, 0x4000, 3, vm_flags);
484 INIT_LIST_HEAD(&vma_b->anon_vma_chain);
485 list_add(&dummy_anon_vma_chain_b.same_vma, &vma_b->anon_vma_chain);
487 vma_c = alloc_and_link_vma(&mm, 0xb000, 0xc000, 0xb, vm_flags);
489 INIT_LIST_HEAD(&vma_c->anon_vma_chain);
490 list_add(&dummy_anon_vma_chain_c.same_vma, &vma_c->anon_vma_chain);
498 vma_d = try_merge_new_vma(&mm, &vmg, 0x7000, 0x9000, 7, vm_flags, &merged);
500 INIT_LIST_HEAD(&vma_d->anon_vma_chain);
501 list_add(&dummy_anon_vma_chain_d.same_vma, &vma_d->anon_vma_chain);
503 ASSERT_EQ(mm.map_count, 4);
511 vma_a->vm_ops = &vm_ops; /* This should have no impact. */
512 vma_b->anon_vma = &dummy_anon_vma;
513 vma = try_merge_new_vma(&mm, &vmg, 0x2000, 0x3000, 2, vm_flags, &merged);
517 ASSERT_EQ(vma->vm_start, 0);
518 ASSERT_EQ(vma->vm_end, 0x4000);
519 ASSERT_EQ(vma->vm_pgoff, 0);
520 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma);
522 ASSERT_EQ(mm.map_count, 3);
530 vma = try_merge_new_vma(&mm, &vmg, 0x4000, 0x5000, 4, vm_flags, &merged);
534 ASSERT_EQ(vma->vm_start, 0);
535 ASSERT_EQ(vma->vm_end, 0x5000);
536 ASSERT_EQ(vma->vm_pgoff, 0);
537 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma);
539 ASSERT_EQ(mm.map_count, 3);
547 vma_d->anon_vma = &dummy_anon_vma;
548 vma_d->vm_ops = &vm_ops; /* This should have no impact. */
549 vma = try_merge_new_vma(&mm, &vmg, 0x6000, 0x7000, 6, vm_flags, &merged);
553 ASSERT_EQ(vma->vm_start, 0x6000);
554 ASSERT_EQ(vma->vm_end, 0x9000);
555 ASSERT_EQ(vma->vm_pgoff, 6);
556 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma);
558 ASSERT_EQ(mm.map_count, 3);
566 vma_d->vm_ops = NULL; /* This would otherwise degrade the merge. */
567 vma = try_merge_new_vma(&mm, &vmg, 0x5000, 0x6000, 5, vm_flags, &merged);
571 ASSERT_EQ(vma->vm_start, 0);
572 ASSERT_EQ(vma->vm_end, 0x9000);
573 ASSERT_EQ(vma->vm_pgoff, 0);
574 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma);
576 ASSERT_EQ(mm.map_count, 2);
584 vma_c->anon_vma = &dummy_anon_vma;
585 vma = try_merge_new_vma(&mm, &vmg, 0xa000, 0xb000, 0xa, vm_flags, &merged);
589 ASSERT_EQ(vma->vm_start, 0xa000);
590 ASSERT_EQ(vma->vm_end, 0xc000);
591 ASSERT_EQ(vma->vm_pgoff, 0xa);
592 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma);
594 ASSERT_EQ(mm.map_count, 2);
602 vma = try_merge_new_vma(&mm, &vmg, 0x9000, 0xa000, 0x9, vm_flags, &merged);
606 ASSERT_EQ(vma->vm_start, 0);
607 ASSERT_EQ(vma->vm_end, 0xc000);
608 ASSERT_EQ(vma->vm_pgoff, 0);
609 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma);
611 ASSERT_EQ(mm.map_count, 1);
620 count = 0;
621 vma_iter_set(&vmi, 0);
624 ASSERT_EQ(vma->vm_start, 0);
625 ASSERT_EQ(vma->vm_end, 0xc000);
626 ASSERT_EQ(vma->vm_pgoff, 0);
627 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma);
636 mtree_destroy(&mm.mm_mt);
643 struct mm_struct mm = {};
644 VMA_ITERATOR(vmi, &mm, 0);
646 .mm = &mm,
650 vm_flags_t all_special_flags = 0;
655 for (i = 0; i < ARRAY_SIZE(special_flags); i++) {
664 vma_left = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags);
675 vmg_set_range(&vmg, 0x3000, 0x4000, 3, vm_flags);
676 for (i = 0; i < ARRAY_SIZE(special_flags); i++) {
679 vma_left->__vm_flags = vm_flags | special_flag;
694 vma = alloc_and_link_vma(&mm, 0x3000, 0x4000, 3, vm_flags);
698 for (i = 0; i < ARRAY_SIZE(special_flags); i++) {
701 vma_left->__vm_flags = vm_flags | special_flag;
708 cleanup_mm(&mm, &vmi);
715 struct mm_struct mm = {};
716 VMA_ITERATOR(vmi, &mm, 0);
718 .mm = &mm,
728 * vm_ops->close() hook.
742 * has a vm_ops->close() callback that will need to be called when
753 * vm_ops->close: - - !NULL
762 * vm_ops->close: - !NULL
771 * - !NULL NULL
774 * Cannot occur, because vma->vm_ops being the same implies the same
775 * vma->vm_file, and therefore this would mean that next->vm_ops->close
781 * is one where both the previous and next VMAs are merged - in this
789 * ->
794 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags);
795 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, vm_flags);
796 vma_next->vm_ops = &vm_ops;
798 vmg_set_range(&vmg, 0x3000, 0x5000, 3, vm_flags);
801 ASSERT_EQ(vma_prev->vm_start, 0);
802 ASSERT_EQ(vma_prev->vm_end, 0x5000);
803 ASSERT_EQ(vma_prev->vm_pgoff, 0);
805 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2);
819 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags);
820 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags);
821 vma->vm_ops = &vm_ops;
823 vmg_set_range(&vmg, 0x3000, 0x5000, 3, vm_flags);
834 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2);
847 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags);
848 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, vm_flags);
849 vma->vm_ops = &vm_ops;
851 vmg_set_range(&vmg, 0x3000, 0x5000, 3, vm_flags);
861 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2);
875 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags);
876 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags);
877 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, vm_flags);
878 vma->vm_ops = &vm_ops;
880 vmg_set_range(&vmg, 0x3000, 0x5000, 3, vm_flags);
887 ASSERT_EQ(cleanup_mm(&mm, &vmi), 3);
896 * ->
901 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags);
902 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags);
903 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, vm_flags);
904 vma_next->vm_ops = &vm_ops;
906 vmg_set_range(&vmg, 0x3000, 0x5000, 3, vm_flags);
912 ASSERT_EQ(vma_prev->vm_start, 0);
913 ASSERT_EQ(vma_prev->vm_end, 0x5000);
914 ASSERT_EQ(vma_prev->vm_pgoff, 0);
916 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2);
924 struct mm_struct mm = {};
925 VMA_ITERATOR(vmi, &mm, 0);
927 .mm = &mm,
930 struct vm_area_struct *vma_prev = alloc_and_link_vma(&mm, 0, 0x2000, 0, vm_flags);
931 struct vm_area_struct *vma_next = alloc_and_link_vma(&mm, 0x5000, 0x7000, 5, vm_flags);
939 * surrounding VMAs have vm_ops->close() hooks (but are otherwise
943 * A v-------v B
944 * |-----| |-----|
953 * |------------||-----|
957 /* Have prev and next have a vm_ops->close() hook. */
958 vma_prev->vm_ops = &vm_ops;
959 vma_next->vm_ops = &vm_ops;
961 vmg_set_range(&vmg, 0x2000, 0x5000, 2, vm_flags);
965 ASSERT_EQ(vma->vm_start, 0);
966 ASSERT_EQ(vma->vm_end, 0x5000);
967 ASSERT_EQ(vma->vm_pgoff, 0);
968 ASSERT_EQ(vma->vm_ops, &vm_ops);
970 ASSERT_EQ(mm.map_count, 2);
972 cleanup_mm(&mm, &vmi);
979 struct mm_struct mm = {};
980 VMA_ITERATOR(vmi, &mm, 0);
983 .mm = &mm,
992 * Merge right case - partial span.
994 * <->
997 * ->
1001 vma = alloc_and_link_vma(&mm, 0x2000, 0x6000, 2, vm_flags);
1002 vma->vm_ops = &vm_ops; /* This should have no impact. */
1003 vma_next = alloc_and_link_vma(&mm, 0x6000, 0x9000, 6, vm_flags);
1004 vma_next->vm_ops = &vm_ops; /* This should have no impact. */
1005 vmg_set_range_anon_vma(&vmg, 0x3000, 0x6000, 3, vm_flags, &dummy_anon_vma);
1011 ASSERT_EQ(vma_next->vm_start, 0x3000);
1012 ASSERT_EQ(vma_next->vm_end, 0x9000);
1013 ASSERT_EQ(vma_next->vm_pgoff, 3);
1014 ASSERT_EQ(vma_next->anon_vma, &dummy_anon_vma);
1015 ASSERT_EQ(vma->vm_start, 0x2000);
1016 ASSERT_EQ(vma->vm_end, 0x3000);
1017 ASSERT_EQ(vma->vm_pgoff, 2);
1020 ASSERT_EQ(mm.map_count, 2);
1023 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2);
1026 * Merge right case - full span.
1028 * <-->
1031 * ->
1035 vma = alloc_and_link_vma(&mm, 0x2000, 0x6000, 2, vm_flags);
1036 vma_next = alloc_and_link_vma(&mm, 0x6000, 0x9000, 6, vm_flags);
1037 vma_next->vm_ops = &vm_ops; /* This should have no impact. */
1038 vmg_set_range_anon_vma(&vmg, 0x2000, 0x6000, 2, vm_flags, &dummy_anon_vma);
1043 ASSERT_EQ(vma_next->vm_start, 0x2000);
1044 ASSERT_EQ(vma_next->vm_end, 0x9000);
1045 ASSERT_EQ(vma_next->vm_pgoff, 2);
1046 ASSERT_EQ(vma_next->anon_vma, &dummy_anon_vma);
1048 ASSERT_EQ(mm.map_count, 1);
1051 ASSERT_EQ(cleanup_mm(&mm, &vmi), 1);
1054 * Merge left case - partial span.
1056 * <->
1059 * ->
1063 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags);
1064 vma_prev->vm_ops = &vm_ops; /* This should have no impact. */
1065 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, vm_flags);
1066 vma->vm_ops = &vm_ops; /* This should have no impact. */
1067 vmg_set_range_anon_vma(&vmg, 0x3000, 0x6000, 3, vm_flags, &dummy_anon_vma);
1073 ASSERT_EQ(vma_prev->vm_start, 0);
1074 ASSERT_EQ(vma_prev->vm_end, 0x6000);
1075 ASSERT_EQ(vma_prev->vm_pgoff, 0);
1076 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma);
1077 ASSERT_EQ(vma->vm_start, 0x6000);
1078 ASSERT_EQ(vma->vm_end, 0x7000);
1079 ASSERT_EQ(vma->vm_pgoff, 6);
1082 ASSERT_EQ(mm.map_count, 2);
1085 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2);
1088 * Merge left case - full span.
1090 * <-->
1093 * ->
1097 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags);
1098 vma_prev->vm_ops = &vm_ops; /* This should have no impact. */
1099 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, vm_flags);
1100 vmg_set_range_anon_vma(&vmg, 0x3000, 0x7000, 3, vm_flags, &dummy_anon_vma);
1106 ASSERT_EQ(vma_prev->vm_start, 0);
1107 ASSERT_EQ(vma_prev->vm_end, 0x7000);
1108 ASSERT_EQ(vma_prev->vm_pgoff, 0);
1109 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma);
1111 ASSERT_EQ(mm.map_count, 1);
1114 ASSERT_EQ(cleanup_mm(&mm, &vmi), 1);
1119 * <-->
1122 * ->
1126 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags);
1127 vma_prev->vm_ops = &vm_ops; /* This should have no impact. */
1128 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, vm_flags);
1129 vma_next = alloc_and_link_vma(&mm, 0x7000, 0x9000, 7, vm_flags);
1130 vmg_set_range_anon_vma(&vmg, 0x3000, 0x7000, 3, vm_flags, &dummy_anon_vma);
1136 ASSERT_EQ(vma_prev->vm_start, 0);
1137 ASSERT_EQ(vma_prev->vm_end, 0x9000);
1138 ASSERT_EQ(vma_prev->vm_pgoff, 0);
1139 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma);
1141 ASSERT_EQ(mm.map_count, 1);
1144 ASSERT_EQ(cleanup_mm(&mm, &vmi), 1);
1147 * Non-merge ranges. the modified VMA merge operation assumes that the
1151 * -
1152 * -
1153 * -
1154 * <->
1161 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags);
1162 vma = alloc_and_link_vma(&mm, 0x3000, 0x8000, 3, vm_flags);
1163 vma_next = alloc_and_link_vma(&mm, 0x8000, 0xa000, 8, vm_flags);
1165 vmg_set_range(&vmg, 0x4000, 0x5000, 4, vm_flags);
1171 vmg_set_range(&vmg, 0x5000, 0x6000, 5, vm_flags);
1177 vmg_set_range(&vmg, 0x6000, 0x7000, 6, vm_flags);
1183 vmg_set_range(&vmg, 0x4000, 0x7000, 4, vm_flags);
1189 vmg_set_range(&vmg, 0x4000, 0x6000, 4, vm_flags);
1195 vmg_set_range(&vmg, 0x5000, 0x6000, 5, vm_flags);
1201 ASSERT_EQ(cleanup_mm(&mm, &vmi), 3);
1209 struct mm_struct mm = {};
1210 VMA_ITERATOR(vmi, &mm, 0);
1213 .mm = &mm,
1225 * <-->
1228 * ->
1232 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags);
1233 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, vm_flags);
1234 vma_next = alloc_and_link_vma(&mm, 0x7000, 0x9000, 7, vm_flags);
1238 * merge with the NULL vmg->anon_vma.
1242 vmg_set_range_anon_vma(&vmg, 0x3000, 0x7000, 3, vm_flags, NULL);
1250 ASSERT_EQ(vma_prev->vm_start, 0);
1251 ASSERT_EQ(vma_prev->vm_end, 0x7000);
1252 ASSERT_EQ(vma_prev->vm_pgoff, 0);
1257 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2);
1263 * <-->
1266 * ->
1270 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags);
1271 vma_next = alloc_and_link_vma(&mm, 0x7000, 0x9000, 7, vm_flags);
1273 vmg_set_range_anon_vma(&vmg, 0x3000, 0x7000, 3, vm_flags, NULL);
1281 ASSERT_EQ(vma_prev->vm_start, 0);
1282 ASSERT_EQ(vma_prev->vm_end, 0x7000);
1283 ASSERT_EQ(vma_prev->vm_pgoff, 0);
1288 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2);
1296 struct mm_struct mm = {};
1297 VMA_ITERATOR(vmi, &mm, 0);
1299 .mm = &mm,
1316 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags);
1317 vma_next = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags);
1318 vma_next->anon_vma = &dummy_anon_vma;
1320 vmg_set_range(&vmg, 0, 0x5000, 0, vm_flags);
1324 ASSERT_EQ(expand_existing(&vmg), 0);
1327 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma);
1328 ASSERT_TRUE(vma_prev->anon_vma->was_cloned);
1331 cleanup_mm(&mm, &vmi);
1336 * |<----->|
1337 * |-------*********-------|
1342 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags);
1343 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags);
1344 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x8000, 5, vm_flags);
1347 INIT_LIST_HEAD(&vma_next->anon_vma_chain);
1348 list_add(&dummy_anon_vma_chain.same_vma, &vma_next->anon_vma_chain);
1350 vma_next->anon_vma = &dummy_anon_vma;
1351 vmg_set_range(&vmg, 0x3000, 0x5000, 3, vm_flags);
1358 ASSERT_EQ(vma_prev->vm_start, 0);
1359 ASSERT_EQ(vma_prev->vm_end, 0x8000);
1361 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma);
1362 ASSERT_TRUE(vma_prev->anon_vma->was_cloned);
1364 cleanup_mm(&mm, &vmi);
1369 * |<----->|
1370 * |-------*********-------|
1375 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags);
1376 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags);
1377 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x8000, 5, vm_flags);
1380 vmg_set_range(&vmg, 0x3000, 0x5000, 3, vm_flags);
1387 ASSERT_EQ(vma_prev->vm_start, 0);
1388 ASSERT_EQ(vma_prev->vm_end, 0x8000);
1390 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma);
1391 ASSERT_TRUE(vma_prev->anon_vma->was_cloned);
1393 cleanup_mm(&mm, &vmi);
1398 * |<----->|
1399 * |-------*************
1404 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags);
1405 vma = alloc_and_link_vma(&mm, 0x3000, 0x8000, 3, vm_flags);
1408 vmg_set_range(&vmg, 0x3000, 0x5000, 3, vm_flags);
1415 ASSERT_EQ(vma_prev->vm_start, 0);
1416 ASSERT_EQ(vma_prev->vm_end, 0x5000);
1418 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma);
1419 ASSERT_TRUE(vma_prev->anon_vma->was_cloned);
1421 cleanup_mm(&mm, &vmi);
1426 * |<----->|
1427 * *************-------|
1432 vma = alloc_and_link_vma(&mm, 0, 0x5000, 0, vm_flags);
1433 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x8000, 5, vm_flags);
1436 vmg_set_range(&vmg, 0x3000, 0x5000, 3, vm_flags);
1443 ASSERT_EQ(vma_next->vm_start, 0x3000);
1444 ASSERT_EQ(vma_next->vm_end, 0x8000);
1446 ASSERT_EQ(vma_next->anon_vma, &dummy_anon_vma);
1447 ASSERT_TRUE(vma_next->anon_vma->was_cloned);
1449 cleanup_mm(&mm, &vmi);
1456 struct mm_struct mm = {};
1457 VMA_ITERATOR(vmi, &mm, 0);
1459 .mm = &mm,
1471 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags);
1472 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags);
1473 vma->anon_vma = &dummy_anon_vma;
1475 vmg_set_range_anon_vma(&vmg, 0x3000, 0x5000, 3, vm_flags, &dummy_anon_vma);
1486 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma);
1491 cleanup_mm(&mm, &vmi); /* Resets fail_prealloc too. */
1499 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags);
1500 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags);
1501 vma->anon_vma = &dummy_anon_vma;
1503 vmg_set_range(&vmg, 0, 0x5000, 3, vm_flags);
1508 ASSERT_EQ(expand_existing(&vmg), -ENOMEM);
1511 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma);
1515 cleanup_mm(&mm, &vmi);
1522 struct mm_struct mm = {};
1523 VMA_ITERATOR(vmi, &mm, 0x1000);
1526 vma = alloc_and_link_vma(&mm, 0, 0x1000, 0, vm_flags);
1527 alloc_and_link_vma(&mm, 0x3000, 0x4000, 3, vm_flags);
1533 * <->
1538 ASSERT_EQ(vma_merge_extend(&vmi, vma, 0x2000), vma);
1539 ASSERT_EQ(vma->vm_start, 0);
1540 ASSERT_EQ(vma->vm_end, 0x4000);
1541 ASSERT_EQ(vma->vm_pgoff, 0);
1543 ASSERT_EQ(mm.map_count, 1);
1545 cleanup_mm(&mm, &vmi);
1552 struct mm_struct mm = {};
1554 VMA_ITERATOR(vmi, &mm, 0);
1559 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags);
1560 vma_new = copy_vma(&vma, 0, 0x2000, 0, &need_locks);
1562 ASSERT_EQ(vma_new->vm_start, 0);
1563 ASSERT_EQ(vma_new->vm_end, 0x2000);
1564 ASSERT_EQ(vma_new->vm_pgoff, 0);
1567 cleanup_mm(&mm, &vmi);
1571 vma = alloc_and_link_vma(&mm, 0, 0x2000, 0, vm_flags);
1572 vma_next = alloc_and_link_vma(&mm, 0x6000, 0x8000, 6, vm_flags);
1573 vma_new = copy_vma(&vma, 0x4000, 0x2000, 4, &need_locks);
1578 cleanup_mm(&mm, &vmi);
1585 struct mm_struct mm = {};
1586 VMA_ITERATOR(vmi, &mm, 0);
1588 VMG_STATE(vmg, &mm, &vmi, 0x5000, 0x9000, vm_flags, 5);
1596 alloc_and_link_vma(&mm, 0, 0x2000, 0, vm_flags);
1600 * 0x9000.
1602 vma_iter_set(&vmi, 0x3000);
1603 vma_prev = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags);
1611 ASSERT_EQ(vma->vm_start, 0x3000);
1612 ASSERT_EQ(vma->vm_end, 0x9000);
1613 ASSERT_EQ(vma->vm_pgoff, 3);
1615 ASSERT_EQ(vma_iter_addr(&vmi), 0x3000);
1618 cleanup_mm(&mm, &vmi);
1624 struct mm_struct mm = {};
1627 VMA_ITERATOR(vmi, &mm, 0);
1629 current->mm = &mm;
1631 /* Map at 0x300000, length 0x3000. */
1632 addr = __mmap_region(NULL, 0x300000, 0x3000,
1634 0x300, NULL);
1635 ASSERT_EQ(addr, 0x300000);
1637 /* Map at 0x250000, length 0x3000. */
1638 addr = __mmap_region(NULL, 0x250000, 0x3000,
1640 0x250, NULL);
1641 ASSERT_EQ(addr, 0x250000);
1643 /* Map at 0x303000, merging to 0x300000 of length 0x6000. */
1644 addr = __mmap_region(NULL, 0x303000, 0x3000,
1646 0x303, NULL);
1647 ASSERT_EQ(addr, 0x303000);
1649 /* Map at 0x24d000, merging to 0x250000 of length 0x6000. */
1650 addr = __mmap_region(NULL, 0x24d000, 0x3000,
1652 0x24d, NULL);
1653 ASSERT_EQ(addr, 0x24d000);
1655 ASSERT_EQ(mm.map_count, 2);
1658 if (vma->vm_start == 0x300000) {
1659 ASSERT_EQ(vma->vm_end, 0x306000);
1660 ASSERT_EQ(vma->vm_pgoff, 0x300);
1661 } else if (vma->vm_start == 0x24d000) {
1662 ASSERT_EQ(vma->vm_end, 0x253000);
1663 ASSERT_EQ(vma->vm_pgoff, 0x24d);
1669 cleanup_mm(&mm, &vmi);
1675 int num_tests = 0, num_fail = 0;
1687 } while (0)
1712 num_tests, num_tests - num_fail, num_fail);
1714 return num_fail == 0 ? EXIT_SUCCESS : EXIT_FAILURE;