Lines Matching defs:vma

13 #include "../../../mm/vma.h"
18 #define vma_iter_prealloc(vmi, vma) \
19 (fail_prealloc ? -ENOMEM : mas_preallocate(&(vmi)->mas, (vma), GFP_KERNEL))
29 * provides userland-equivalent functionality for everything vma.c uses.
33 #include "../../../mm/vma.c"
85 static int attach_vma(struct mm_struct *mm, struct vm_area_struct *vma)
89 res = vma_link(mm, vma);
91 vma_assert_attached(vma);
95 static void detach_free_vma(struct vm_area_struct *vma)
97 vma_mark_detached(vma);
98 vm_area_free(vma);
108 struct vm_area_struct *vma = alloc_vma(mm, start, end, pgoff, vm_flags);
110 if (vma == NULL)
113 if (attach_vma(mm, vma)) {
114 detach_free_vma(vma);
123 vma->vm_lock_seq = UINT_MAX;
125 return vma;
131 struct vm_area_struct *vma;
140 vma = vma_merge_new_range(vmg);
141 if (vma)
142 vma_assert_attached(vma);
144 return vma;
153 struct vm_area_struct *vma;
155 vma = vma_merge_existing_range(vmg);
156 if (vma)
157 vma_assert_attached(vma);
158 return vma;
251 struct vm_area_struct *vma;
258 for_each_vma(*vmi, vma) {
259 detach_free_vma(vma);
269 static bool vma_write_started(struct vm_area_struct *vma)
271 int seq = vma->vm_lock_seq;
274 vma->vm_lock_seq = UINT_MAX;
285 static void __vma_set_dummy_anon_vma(struct vm_area_struct *vma,
289 vma->anon_vma = anon_vma;
290 INIT_LIST_HEAD(&vma->anon_vma_chain);
291 list_add(&avc->same_vma, &vma->anon_vma_chain);
292 avc->anon_vma = vma->anon_vma;
295 static void vma_set_dummy_anon_vma(struct vm_area_struct *vma,
298 __vma_set_dummy_anon_vma(vma, avc, &dummy_anon_vma);
303 struct vm_area_struct *vma;
321 vma = merge_new(&vmg);
322 ASSERT_NE(vma, NULL);
324 ASSERT_EQ(vma->vm_start, 0);
325 ASSERT_EQ(vma->vm_end, 0x3000);
326 ASSERT_EQ(vma->vm_pgoff, 0);
327 ASSERT_EQ(vma->vm_flags, vm_flags);
329 detach_free_vma(vma);
337 struct vm_area_struct *vma;
349 vma = vma_modify_flags(&vmi, init_vma, init_vma,
351 ASSERT_NE(vma, NULL);
353 ASSERT_EQ(vma, init_vma);
355 ASSERT_EQ(vma->vm_start, 0x1000);
356 ASSERT_EQ(vma->vm_end, 0x2000);
357 ASSERT_EQ(vma->vm_pgoff, 1);
365 vma = vma_iter_load(&vmi);
367 ASSERT_EQ(vma->vm_start, 0);
368 ASSERT_EQ(vma->vm_end, 0x1000);
369 ASSERT_EQ(vma->vm_pgoff, 0);
371 detach_free_vma(vma);
374 vma = vma_next(&vmi);
376 ASSERT_EQ(vma->vm_start, 0x1000);
377 ASSERT_EQ(vma->vm_end, 0x2000);
378 ASSERT_EQ(vma->vm_pgoff, 1);
380 detach_free_vma(vma);
383 vma = vma_next(&vmi);
385 ASSERT_EQ(vma->vm_start, 0x2000);
386 ASSERT_EQ(vma->vm_end, 0x3000);
387 ASSERT_EQ(vma->vm_pgoff, 2);
389 detach_free_vma(vma);
399 struct vm_area_struct *vma = alloc_vma(&mm, 0, 0x1000, 0, vm_flags);
403 .target = vma,
409 ASSERT_FALSE(attach_vma(&mm, vma));
413 ASSERT_EQ(vma->vm_start, 0);
414 ASSERT_EQ(vma->vm_end, 0x3000);
415 ASSERT_EQ(vma->vm_pgoff, 0);
417 detach_free_vma(vma);
427 struct vm_area_struct *vma = alloc_vma(&mm, 0, 0x3000, 0, vm_flags);
430 ASSERT_FALSE(attach_vma(&mm, vma));
432 ASSERT_FALSE(vma_shrink(&vmi, vma, 0, 0x1000, 0));
434 ASSERT_EQ(vma->vm_start, 0);
435 ASSERT_EQ(vma->vm_end, 0x1000);
436 ASSERT_EQ(vma->vm_pgoff, 0);
438 detach_free_vma(vma);
469 struct vm_area_struct *vma, *vma_a, *vma_b, *vma_c, *vma_d;
513 vma = try_merge_new_vma(&mm, &vmg, 0x2000, 0x3000, 2, vm_flags, &merged);
514 ASSERT_EQ(vma, vma_a);
517 ASSERT_EQ(vma->vm_start, 0);
518 ASSERT_EQ(vma->vm_end, 0x4000);
519 ASSERT_EQ(vma->vm_pgoff, 0);
520 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma);
521 ASSERT_TRUE(vma_write_started(vma));
530 vma = try_merge_new_vma(&mm, &vmg, 0x4000, 0x5000, 4, vm_flags, &merged);
531 ASSERT_EQ(vma, vma_a);
534 ASSERT_EQ(vma->vm_start, 0);
535 ASSERT_EQ(vma->vm_end, 0x5000);
536 ASSERT_EQ(vma->vm_pgoff, 0);
537 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma);
538 ASSERT_TRUE(vma_write_started(vma));
549 vma = try_merge_new_vma(&mm, &vmg, 0x6000, 0x7000, 6, vm_flags, &merged);
550 ASSERT_EQ(vma, vma_d);
553 ASSERT_EQ(vma->vm_start, 0x6000);
554 ASSERT_EQ(vma->vm_end, 0x9000);
555 ASSERT_EQ(vma->vm_pgoff, 6);
556 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma);
557 ASSERT_TRUE(vma_write_started(vma));
567 vma = try_merge_new_vma(&mm, &vmg, 0x5000, 0x6000, 5, vm_flags, &merged);
568 ASSERT_EQ(vma, vma_a);
571 ASSERT_EQ(vma->vm_start, 0);
572 ASSERT_EQ(vma->vm_end, 0x9000);
573 ASSERT_EQ(vma->vm_pgoff, 0);
574 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma);
575 ASSERT_TRUE(vma_write_started(vma));
585 vma = try_merge_new_vma(&mm, &vmg, 0xa000, 0xb000, 0xa, vm_flags, &merged);
586 ASSERT_EQ(vma, vma_c);
589 ASSERT_EQ(vma->vm_start, 0xa000);
590 ASSERT_EQ(vma->vm_end, 0xc000);
591 ASSERT_EQ(vma->vm_pgoff, 0xa);
592 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma);
593 ASSERT_TRUE(vma_write_started(vma));
602 vma = try_merge_new_vma(&mm, &vmg, 0x9000, 0xa000, 0x9, vm_flags, &merged);
603 ASSERT_EQ(vma, vma_a);
606 ASSERT_EQ(vma->vm_start, 0);
607 ASSERT_EQ(vma->vm_end, 0xc000);
608 ASSERT_EQ(vma->vm_pgoff, 0);
609 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma);
610 ASSERT_TRUE(vma_write_started(vma));
622 for_each_vma(vmi, vma) {
623 ASSERT_NE(vma, NULL);
624 ASSERT_EQ(vma->vm_start, 0);
625 ASSERT_EQ(vma->vm_end, 0xc000);
626 ASSERT_EQ(vma->vm_pgoff, 0);
627 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma);
629 detach_free_vma(vma);
652 struct vm_area_struct *vma_left, *vma;
681 vma = merge_new(&vmg);
682 ASSERT_EQ(vma, NULL);
694 vma = alloc_and_link_vma(&mm, 0x3000, 0x4000, 3, vm_flags);
695 ASSERT_NE(vma, NULL);
696 vmg.middle = vma;
703 vma = merge_existing(&vmg);
704 ASSERT_EQ(vma, NULL);
724 struct vm_area_struct *vma_prev, *vma_next, *vma;
733 * [ prev ][ vma ][ next ]
736 * mergeable, then prev will be extended with one or both of vma and
754 * [ prev ][ vma ][ next ]
763 * [ prev ][ vma ]
765 * Where prev and vma are present and mergeable.
772 * [ prev ][ vma ][ next ]
774 * Cannot occur, because vma->vm_ops being the same implies the same
775 * vma->vm_file, and therefore this would mean that next->vm_ops->close
815 * In this instance, if vma has a close hook, the merge simply cannot
820 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags);
821 vma->vm_ops = &vm_ops;
825 vmg.middle = vma;
843 * In this instance, if vma has a close hook, the merge simply cannot
847 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags);
849 vma->vm_ops = &vm_ops;
852 vmg.middle = vma;
867 * The first variant is where vma has a close hook. In this instance, no
876 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags);
878 vma->vm_ops = &vm_ops;
882 vmg.middle = vma;
891 * we reduce the operation to a merge between prev and vma.
902 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags);
908 vmg.middle = vma;
935 struct vm_area_struct *vma;
962 vma = merge_new(&vmg);
963 ASSERT_NE(vma, NULL);
965 ASSERT_EQ(vma->vm_start, 0);
966 ASSERT_EQ(vma->vm_end, 0x5000);
967 ASSERT_EQ(vma->vm_pgoff, 0);
968 ASSERT_EQ(vma->vm_ops, &vm_ops);
969 ASSERT_TRUE(vma_write_started(vma));
981 struct vm_area_struct *vma, *vma_prev, *vma_next;
1001 vma = alloc_and_link_vma(&mm, 0x2000, 0x6000, 2, vm_flags);
1002 vma->vm_ops = &vm_ops; /* This should have no impact. */
1006 vmg.middle = vma;
1007 vmg.prev = vma;
1008 vma_set_dummy_anon_vma(vma, &avc);
1015 ASSERT_EQ(vma->vm_start, 0x2000);
1016 ASSERT_EQ(vma->vm_end, 0x3000);
1017 ASSERT_EQ(vma->vm_pgoff, 2);
1018 ASSERT_TRUE(vma_write_started(vma));
1035 vma = alloc_and_link_vma(&mm, 0x2000, 0x6000, 2, vm_flags);
1039 vmg.middle = vma;
1040 vma_set_dummy_anon_vma(vma, &avc);
1050 /* Clear down and reset. We should have deleted vma. */
1065 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, vm_flags);
1066 vma->vm_ops = &vm_ops; /* This should have no impact. */
1069 vmg.middle = vma;
1070 vma_set_dummy_anon_vma(vma, &avc);
1077 ASSERT_EQ(vma->vm_start, 0x6000);
1078 ASSERT_EQ(vma->vm_end, 0x7000);
1079 ASSERT_EQ(vma->vm_pgoff, 6);
1081 ASSERT_TRUE(vma_write_started(vma));
1099 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, vm_flags);
1102 vmg.middle = vma;
1103 vma_set_dummy_anon_vma(vma, &avc);
1113 /* Clear down and reset. We should have deleted vma. */
1128 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, vm_flags);
1132 vmg.middle = vma;
1133 vma_set_dummy_anon_vma(vma, &avc);
1162 vma = alloc_and_link_vma(&mm, 0x3000, 0x8000, 3, vm_flags);
1166 vmg.prev = vma;
1167 vmg.middle = vma;
1172 vmg.prev = vma;
1173 vmg.middle = vma;
1178 vmg.prev = vma;
1179 vmg.middle = vma;
1184 vmg.prev = vma;
1185 vmg.middle = vma;
1190 vmg.prev = vma;
1191 vmg.middle = vma;
1196 vmg.prev = vma;
1197 vmg.middle = vma;
1211 struct vm_area_struct *vma, *vma_prev, *vma_next;
1233 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, vm_flags);
1244 vmg.middle = vma;
1305 struct vm_area_struct *vma_prev, *vma_next, *vma;
1338 * prev vma next
1343 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags);
1353 vmg.middle = vma;
1367 * vma has anon_vma, we assign to prev.
1371 * prev vma next
1376 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags);
1379 vma_set_dummy_anon_vma(vma, &dummy_anon_vma_chain);
1382 vmg.middle = vma;
1396 * vma has anon_vma, we assign to prev.
1400 * prev vma
1405 vma = alloc_and_link_vma(&mm, 0x3000, 0x8000, 3, vm_flags);
1407 vma_set_dummy_anon_vma(vma, &dummy_anon_vma_chain);
1410 vmg.middle = vma;
1424 * vma has anon_vma, we assign to next.
1428 * vma next
1432 vma = alloc_and_link_vma(&mm, 0, 0x5000, 0, vm_flags);
1435 vma_set_dummy_anon_vma(vma, &dummy_anon_vma_chain);
1437 vmg.prev = vma;
1438 vmg.middle = vma;
1463 struct vm_area_struct *vma_prev, *vma;
1466 * We are merging vma into prev, with vma possessing an anon_vma, which
1472 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags);
1473 vma->anon_vma = &dummy_anon_vma;
1477 vmg.middle = vma;
1478 vma_set_dummy_anon_vma(vma, &avc);
1500 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags);
1501 vma->anon_vma = &dummy_anon_vma;
1505 vmg.next = vma;
1524 struct vm_area_struct *vma;
1526 vma = alloc_and_link_vma(&mm, 0, 0x1000, 0, vm_flags);
1538 ASSERT_EQ(vma_merge_extend(&vmi, vma, 0x2000), vma);
1539 ASSERT_EQ(vma->vm_start, 0);
1540 ASSERT_EQ(vma->vm_end, 0x4000);
1541 ASSERT_EQ(vma->vm_pgoff, 0);
1542 ASSERT_TRUE(vma_write_started(vma));
1555 struct vm_area_struct *vma, *vma_new, *vma_next;
1559 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags);
1560 vma_new = copy_vma(&vma, 0, 0x2000, 0, &need_locks);
1561 ASSERT_NE(vma_new, vma);
1571 vma = alloc_and_link_vma(&mm, 0, 0x2000, 0, vm_flags);
1573 vma_new = copy_vma(&vma, 0x4000, 0x2000, 4, &need_locks);
1587 struct vm_area_struct *vma_prev, *vma;
1607 vma = vma_merge_new_range(&vmg);
1608 ASSERT_NE(vma, NULL);
1609 ASSERT_EQ(vma, vma_prev);
1611 ASSERT_EQ(vma->vm_start, 0x3000);
1612 ASSERT_EQ(vma->vm_end, 0x9000);
1613 ASSERT_EQ(vma->vm_pgoff, 3);
1614 ASSERT_TRUE(vma_write_started(vma));
1616 vma_assert_attached(vma);
1626 struct vm_area_struct *vma;
1657 for_each_vma(vmi, vma) {
1658 if (vma->vm_start == 0x300000) {
1659 ASSERT_EQ(vma->vm_end, 0x306000);
1660 ASSERT_EQ(vma->vm_pgoff, 0x300);
1661 } else if (vma->vm_start == 0x24d000) {
1662 ASSERT_EQ(vma->vm_end, 0x253000);
1663 ASSERT_EQ(vma->vm_pgoff, 0x24d);