Lines Matching defs:vma

13 #include "../../../mm/vma.h"
18 #define vma_iter_prealloc(vmi, vma) \
19 (fail_prealloc ? -ENOMEM : mas_preallocate(&(vmi)->mas, (vma), GFP_KERNEL))
29 * provides userland-equivalent functionality for everything vma.c uses.
31 #include "../../../mm/vma.c"
88 struct vm_area_struct *vma = alloc_vma(mm, start, end, pgoff, flags);
90 if (vma == NULL)
93 if (vma_link(mm, vma)) {
94 vm_area_free(vma);
103 vma->vm_lock_seq = UINT_MAX;
105 return vma;
151 vmg->vma = NULL;
205 struct vm_area_struct *vma;
212 for_each_vma(*vmi, vma) {
213 vm_area_free(vma);
223 static bool vma_write_started(struct vm_area_struct *vma)
225 int seq = vma->vm_lock_seq;
228 vma->vm_lock_seq = UINT_MAX;
241 struct vm_area_struct *vma;
259 vma = merge_new(&vmg);
260 ASSERT_NE(vma, NULL);
262 ASSERT_EQ(vma->vm_start, 0);
263 ASSERT_EQ(vma->vm_end, 0x3000);
264 ASSERT_EQ(vma->vm_pgoff, 0);
265 ASSERT_EQ(vma->vm_flags, flags);
267 vm_area_free(vma);
275 struct vm_area_struct *vma;
287 vma = vma_modify_flags(&vmi, init_vma, init_vma,
289 ASSERT_NE(vma, NULL);
291 ASSERT_EQ(vma, init_vma);
293 ASSERT_EQ(vma->vm_start, 0x1000);
294 ASSERT_EQ(vma->vm_end, 0x2000);
295 ASSERT_EQ(vma->vm_pgoff, 1);
303 vma = vma_iter_load(&vmi);
305 ASSERT_EQ(vma->vm_start, 0);
306 ASSERT_EQ(vma->vm_end, 0x1000);
307 ASSERT_EQ(vma->vm_pgoff, 0);
309 vm_area_free(vma);
312 vma = vma_next(&vmi);
314 ASSERT_EQ(vma->vm_start, 0x1000);
315 ASSERT_EQ(vma->vm_end, 0x2000);
316 ASSERT_EQ(vma->vm_pgoff, 1);
318 vm_area_free(vma);
321 vma = vma_next(&vmi);
323 ASSERT_EQ(vma->vm_start, 0x2000);
324 ASSERT_EQ(vma->vm_end, 0x3000);
325 ASSERT_EQ(vma->vm_pgoff, 2);
327 vm_area_free(vma);
337 struct vm_area_struct *vma = alloc_vma(&mm, 0, 0x1000, 0, flags);
341 .vma = vma,
347 ASSERT_FALSE(vma_link(&mm, vma));
351 ASSERT_EQ(vma->vm_start, 0);
352 ASSERT_EQ(vma->vm_end, 0x3000);
353 ASSERT_EQ(vma->vm_pgoff, 0);
355 vm_area_free(vma);
365 struct vm_area_struct *vma = alloc_vma(&mm, 0, 0x3000, 0, flags);
368 ASSERT_FALSE(vma_link(&mm, vma));
370 ASSERT_FALSE(vma_shrink(&vmi, vma, 0, 0x1000, 0));
372 ASSERT_EQ(vma->vm_start, 0);
373 ASSERT_EQ(vma->vm_end, 0x1000);
374 ASSERT_EQ(vma->vm_pgoff, 0);
376 vm_area_free(vma);
407 struct vm_area_struct *vma, *vma_a, *vma_b, *vma_c, *vma_d;
451 vma = try_merge_new_vma(&mm, &vmg, 0x2000, 0x3000, 2, flags, &merged);
452 ASSERT_EQ(vma, vma_a);
455 ASSERT_EQ(vma->vm_start, 0);
456 ASSERT_EQ(vma->vm_end, 0x4000);
457 ASSERT_EQ(vma->vm_pgoff, 0);
458 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma);
459 ASSERT_TRUE(vma_write_started(vma));
468 vma = try_merge_new_vma(&mm, &vmg, 0x4000, 0x5000, 4, flags, &merged);
469 ASSERT_EQ(vma, vma_a);
472 ASSERT_EQ(vma->vm_start, 0);
473 ASSERT_EQ(vma->vm_end, 0x5000);
474 ASSERT_EQ(vma->vm_pgoff, 0);
475 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma);
476 ASSERT_TRUE(vma_write_started(vma));
487 vma = try_merge_new_vma(&mm, &vmg, 0x6000, 0x7000, 6, flags, &merged);
488 ASSERT_EQ(vma, vma_d);
491 ASSERT_EQ(vma->vm_start, 0x6000);
492 ASSERT_EQ(vma->vm_end, 0x9000);
493 ASSERT_EQ(vma->vm_pgoff, 6);
494 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma);
495 ASSERT_TRUE(vma_write_started(vma));
505 vma = try_merge_new_vma(&mm, &vmg, 0x5000, 0x6000, 5, flags, &merged);
506 ASSERT_EQ(vma, vma_a);
509 ASSERT_EQ(vma->vm_start, 0);
510 ASSERT_EQ(vma->vm_end, 0x9000);
511 ASSERT_EQ(vma->vm_pgoff, 0);
512 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma);
513 ASSERT_TRUE(vma_write_started(vma));
523 vma = try_merge_new_vma(&mm, &vmg, 0xa000, 0xb000, 0xa, flags, &merged);
524 ASSERT_EQ(vma, vma_c);
527 ASSERT_EQ(vma->vm_start, 0xa000);
528 ASSERT_EQ(vma->vm_end, 0xc000);
529 ASSERT_EQ(vma->vm_pgoff, 0xa);
530 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma);
531 ASSERT_TRUE(vma_write_started(vma));
540 vma = try_merge_new_vma(&mm, &vmg, 0x9000, 0xa000, 0x9, flags, &merged);
541 ASSERT_EQ(vma, vma_a);
544 ASSERT_EQ(vma->vm_start, 0);
545 ASSERT_EQ(vma->vm_end, 0xc000);
546 ASSERT_EQ(vma->vm_pgoff, 0);
547 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma);
548 ASSERT_TRUE(vma_write_started(vma));
560 for_each_vma(vmi, vma) {
561 ASSERT_NE(vma, NULL);
562 ASSERT_EQ(vma->vm_start, 0);
563 ASSERT_EQ(vma->vm_end, 0xc000);
564 ASSERT_EQ(vma->vm_pgoff, 0);
565 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma);
567 vm_area_free(vma);
590 struct vm_area_struct *vma_left, *vma;
619 vma = merge_new(&vmg);
620 ASSERT_EQ(vma, NULL);
632 vma = alloc_and_link_vma(&mm, 0x3000, 0x4000, 3, flags);
633 ASSERT_NE(vma, NULL);
634 vmg.vma = vma;
641 vma = merge_existing(&vmg);
642 ASSERT_EQ(vma, NULL);
662 struct vm_area_struct *vma_prev, *vma_next, *vma;
671 * [ prev ][ vma ][ next ]
674 * mergeable, then prev will be extended with one or both of vma and
692 * [ prev ][ vma ][ next ]
701 * [ prev ][ vma ]
703 * Where prev and vma are present and mergeable.
710 * [ prev ][ vma ][ next ]
712 * Cannot occur, because vma->vm_ops being the same implies the same
713 * vma->vm_file, and therefore this would mean that next->vm_ops->close
753 * In this instance, if vma has a close hook, the merge simply cannot
758 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags);
759 vma->vm_ops = &vm_ops;
763 vmg.vma = vma;
781 * In this instance, if vma has a close hook, the merge simply cannot
785 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags);
787 vma->vm_ops = &vm_ops;
790 vmg.vma = vma;
805 * The first variant is where vma has a close hook. In this instance, no
814 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags);
816 vma->vm_ops = &vm_ops;
820 vmg.vma = vma;
829 * we reduce the operation to a merge between prev and vma.
840 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags);
846 vmg.vma = vma;
873 struct vm_area_struct *vma;
900 vma = merge_new(&vmg);
901 ASSERT_NE(vma, NULL);
903 ASSERT_EQ(vma->vm_start, 0);
904 ASSERT_EQ(vma->vm_end, 0x5000);
905 ASSERT_EQ(vma->vm_pgoff, 0);
906 ASSERT_EQ(vma->vm_ops, &vm_ops);
907 ASSERT_TRUE(vma_write_started(vma));
919 struct vm_area_struct *vma, *vma_prev, *vma_next;
938 vma = alloc_and_link_vma(&mm, 0x2000, 0x6000, 2, flags);
939 vma->vm_ops = &vm_ops; /* This should have no impact. */
943 vmg.vma = vma;
944 vmg.prev = vma;
945 vma->anon_vma = &dummy_anon_vma;
952 ASSERT_EQ(vma->vm_start, 0x2000);
953 ASSERT_EQ(vma->vm_end, 0x3000);
954 ASSERT_EQ(vma->vm_pgoff, 2);
955 ASSERT_TRUE(vma_write_started(vma));
972 vma = alloc_and_link_vma(&mm, 0x2000, 0x6000, 2, flags);
976 vmg.vma = vma;
977 vma->anon_vma = &dummy_anon_vma;
987 /* Clear down and reset. We should have deleted vma. */
1002 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags);
1003 vma->vm_ops = &vm_ops; /* This should have no impact. */
1006 vmg.vma = vma;
1007 vma->anon_vma = &dummy_anon_vma;
1015 ASSERT_EQ(vma->vm_start, 0x6000);
1016 ASSERT_EQ(vma->vm_end, 0x7000);
1017 ASSERT_EQ(vma->vm_pgoff, 6);
1019 ASSERT_TRUE(vma_write_started(vma));
1037 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags);
1040 vmg.vma = vma;
1041 vma->anon_vma = &dummy_anon_vma;
1051 /* Clear down and reset. We should have deleted vma. */
1066 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags);
1070 vmg.vma = vma;
1071 vma->anon_vma = &dummy_anon_vma;
1100 vma = alloc_and_link_vma(&mm, 0x3000, 0x8000, 3, flags);
1104 vmg.prev = vma;
1105 vmg.vma = vma;
1110 vmg.prev = vma;
1111 vmg.vma = vma;
1116 vmg.prev = vma;
1117 vmg.vma = vma;
1122 vmg.prev = vma;
1123 vmg.vma = vma;
1128 vmg.prev = vma;
1129 vmg.vma = vma;
1134 vmg.prev = vma;
1135 vmg.vma = vma;
1149 struct vm_area_struct *vma, *vma_prev, *vma_next;
1174 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags);
1200 vmg.vma = vma;
1264 struct vm_area_struct *vma_prev, *vma_next, *vma;
1280 vmg.vma = vma_prev;
1297 * prev vma next
1302 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags);
1312 vmg.vma = vma;
1326 * vma has anon_vma, we assign to prev.
1330 * prev vma next
1335 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags);
1338 vma->anon_vma = &dummy_anon_vma;
1341 vmg.vma = vma;
1355 * vma has anon_vma, we assign to prev.
1359 * prev vma
1364 vma = alloc_and_link_vma(&mm, 0x3000, 0x8000, 3, flags);
1366 vma->anon_vma = &dummy_anon_vma;
1369 vmg.vma = vma;
1383 * vma has anon_vma, we assign to next.
1387 * vma next
1391 vma = alloc_and_link_vma(&mm, 0, 0x5000, 0, flags);
1394 vma->anon_vma = &dummy_anon_vma;
1396 vmg.prev = vma;
1397 vmg.vma = vma;
1421 struct vm_area_struct *vma_prev, *vma;
1424 * We are merging vma into prev, with vma possessing an anon_vma, which
1430 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags);
1431 vma->anon_vma = &dummy_anon_vma;
1435 vmg.vma = vma;
1457 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags);
1458 vma->anon_vma = &dummy_anon_vma;
1461 vmg.vma = vma_prev;
1462 vmg.next = vma;
1481 struct vm_area_struct *vma;
1483 vma = alloc_and_link_vma(&mm, 0, 0x1000, 0, flags);
1495 ASSERT_EQ(vma_merge_extend(&vmi, vma, 0x2000), vma);
1496 ASSERT_EQ(vma->vm_start, 0);
1497 ASSERT_EQ(vma->vm_end, 0x4000);
1498 ASSERT_EQ(vma->vm_pgoff, 0);
1499 ASSERT_TRUE(vma_write_started(vma));
1512 struct vm_area_struct *vma, *vma_new, *vma_next;
1516 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags);
1517 vma_new = copy_vma(&vma, 0, 0x2000, 0, &need_locks);
1519 ASSERT_NE(vma_new, vma);
1528 vma = alloc_and_link_vma(&mm, 0, 0x2000, 0, flags);
1530 vma_new = copy_vma(&vma, 0x4000, 0x2000, 4, &need_locks);
1543 struct vm_area_struct *vma_prev, *vma;
1563 vma = vma_merge_new_range(&vmg);
1564 ASSERT_NE(vma, NULL);
1565 ASSERT_EQ(vma, vma_prev);
1567 ASSERT_EQ(vma->vm_start, 0x3000);
1568 ASSERT_EQ(vma->vm_end, 0x9000);
1569 ASSERT_EQ(vma->vm_pgoff, 3);
1570 ASSERT_TRUE(vma_write_started(vma));
1581 struct vm_area_struct *vma;
1612 for_each_vma(vmi, vma) {
1613 if (vma->vm_start == 0x300000) {
1614 ASSERT_EQ(vma->vm_end, 0x306000);
1615 ASSERT_EQ(vma->vm_pgoff, 0x300);
1616 } else if (vma->vm_start == 0x24d000) {
1617 ASSERT_EQ(vma->vm_end, 0x253000);
1618 ASSERT_EQ(vma->vm_pgoff, 0x24d);