Lines Matching refs:mm

66 static struct vm_area_struct *alloc_vma(struct mm_struct *mm,  in alloc_vma()  argument
72 struct vm_area_struct *vma = vm_area_alloc(mm); in alloc_vma()
87 static int attach_vma(struct mm_struct *mm, struct vm_area_struct *vma) in attach_vma() argument
91 res = vma_link(mm, vma); in attach_vma()
104 static struct vm_area_struct *alloc_and_link_vma(struct mm_struct *mm, in alloc_and_link_vma() argument
110 struct vm_area_struct *vma = alloc_vma(mm, start, end, pgoff, vm_flags); in alloc_and_link_vma()
115 if (attach_vma(mm, vma)) { in alloc_and_link_vma()
213 static struct vm_area_struct *try_merge_new_vma(struct mm_struct *mm, in try_merge_new_vma() argument
234 return alloc_and_link_vma(mm, start, end, pgoff, vm_flags); in try_merge_new_vma()
251 static int cleanup_mm(struct mm_struct *mm, struct vma_iterator *vmi) in cleanup_mm() argument
265 mtree_destroy(&mm->mm_mt); in cleanup_mm()
266 mm->map_count = 0; in cleanup_mm()
307 struct mm_struct mm = {}; in test_simple_merge() local
308 struct vm_area_struct *vma_left = alloc_vma(&mm, 0, 0x1000, 0, vm_flags); in test_simple_merge()
309 struct vm_area_struct *vma_right = alloc_vma(&mm, 0x2000, 0x3000, 2, vm_flags); in test_simple_merge()
310 VMA_ITERATOR(vmi, &mm, 0x1000); in test_simple_merge()
312 .mm = &mm, in test_simple_merge()
320 ASSERT_FALSE(attach_vma(&mm, vma_left)); in test_simple_merge()
321 ASSERT_FALSE(attach_vma(&mm, vma_right)); in test_simple_merge()
332 mtree_destroy(&mm.mm_mt); in test_simple_merge()
341 struct mm_struct mm = {}; in test_simple_modify() local
342 struct vm_area_struct *init_vma = alloc_vma(&mm, 0, 0x3000, 0, vm_flags); in test_simple_modify()
343 VMA_ITERATOR(vmi, &mm, 0x1000); in test_simple_modify()
346 ASSERT_FALSE(attach_vma(&mm, init_vma)); in test_simple_modify()
393 mtree_destroy(&mm.mm_mt); in test_simple_modify()
401 struct mm_struct mm = {}; in test_simple_expand() local
402 struct vm_area_struct *vma = alloc_vma(&mm, 0, 0x1000, 0, vm_flags); in test_simple_expand()
403 VMA_ITERATOR(vmi, &mm, 0); in test_simple_expand()
412 ASSERT_FALSE(attach_vma(&mm, vma)); in test_simple_expand()
421 mtree_destroy(&mm.mm_mt); in test_simple_expand()
429 struct mm_struct mm = {}; in test_simple_shrink() local
430 struct vm_area_struct *vma = alloc_vma(&mm, 0, 0x3000, 0, vm_flags); in test_simple_shrink()
431 VMA_ITERATOR(vmi, &mm, 0); in test_simple_shrink()
433 ASSERT_FALSE(attach_vma(&mm, vma)); in test_simple_shrink()
442 mtree_destroy(&mm.mm_mt); in test_simple_shrink()
450 struct mm_struct mm = {}; in __test_merge_new() local
451 VMA_ITERATOR(vmi, &mm, 0); in __test_merge_new()
453 .mm = &mm, in __test_merge_new()
482 vma_a = alloc_and_link_vma(&mm, 0, 0x2000, 0, vm_flags); in __test_merge_new()
490 vma_b = alloc_and_link_vma(&mm, 0x3000, 0x4000, 3, vm_flags); in __test_merge_new()
497 vma_c = alloc_and_link_vma(&mm, 0xb000, 0xc000, 0xb, vm_flags); in __test_merge_new()
510 vma_d = try_merge_new_vma(&mm, &vmg, 0x7000, 0x9000, 7, vm_flags, &merged); in __test_merge_new()
515 ASSERT_EQ(mm.map_count, 4); in __test_merge_new()
525 vma = try_merge_new_vma(&mm, &vmg, 0x2000, 0x3000, 2, vm_flags, &merged); in __test_merge_new()
534 ASSERT_EQ(mm.map_count, 3); in __test_merge_new()
544 vma = try_merge_new_vma(&mm, &vmg, 0x4000, 0x5000, 4, vm_flags, &merged); in __test_merge_new()
553 ASSERT_EQ(mm.map_count, 3); in __test_merge_new()
565 vma = try_merge_new_vma(&mm, &vmg, 0x6000, 0x7000, 6, vm_flags, &merged); in __test_merge_new()
574 ASSERT_EQ(mm.map_count, 3); in __test_merge_new()
585 vma = try_merge_new_vma(&mm, &vmg, 0x5000, 0x6000, 5, vm_flags, &merged); in __test_merge_new()
594 ASSERT_EQ(mm.map_count, 2); in __test_merge_new()
605 vma = try_merge_new_vma(&mm, &vmg, 0xa000, 0xb000, 0xa, vm_flags, &merged); in __test_merge_new()
614 ASSERT_EQ(mm.map_count, 2); in __test_merge_new()
624 vma = try_merge_new_vma(&mm, &vmg, 0x9000, 0xa000, 0x9, vm_flags, &merged); in __test_merge_new()
633 ASSERT_EQ(mm.map_count, 1); in __test_merge_new()
660 mtree_destroy(&mm.mm_mt); in __test_merge_new()
681 struct mm_struct mm = {}; in test_vma_merge_special_flags() local
682 VMA_ITERATOR(vmi, &mm, 0); in test_vma_merge_special_flags()
684 .mm = &mm, in test_vma_merge_special_flags()
702 vma_left = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_vma_merge_special_flags()
732 vma = alloc_and_link_vma(&mm, 0x3000, 0x4000, 3, vm_flags); in test_vma_merge_special_flags()
746 cleanup_mm(&mm, &vmi); in test_vma_merge_special_flags()
753 struct mm_struct mm = {}; in test_vma_merge_with_close() local
754 VMA_ITERATOR(vmi, &mm, 0); in test_vma_merge_with_close()
756 .mm = &mm, in test_vma_merge_with_close()
832 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_vma_merge_with_close()
833 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, vm_flags); in test_vma_merge_with_close()
843 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_vma_merge_with_close()
857 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_vma_merge_with_close()
858 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags); in test_vma_merge_with_close()
872 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_vma_merge_with_close()
885 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags); in test_vma_merge_with_close()
886 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, vm_flags); in test_vma_merge_with_close()
899 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_vma_merge_with_close()
913 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_vma_merge_with_close()
914 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags); in test_vma_merge_with_close()
915 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, vm_flags); in test_vma_merge_with_close()
925 ASSERT_EQ(cleanup_mm(&mm, &vmi), 3); in test_vma_merge_with_close()
939 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_vma_merge_with_close()
940 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags); in test_vma_merge_with_close()
941 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, vm_flags); in test_vma_merge_with_close()
954 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_vma_merge_with_close()
962 struct mm_struct mm = {}; in test_vma_merge_new_with_close() local
963 VMA_ITERATOR(vmi, &mm, 0); in test_vma_merge_new_with_close()
965 .mm = &mm, in test_vma_merge_new_with_close()
968 struct vm_area_struct *vma_prev = alloc_and_link_vma(&mm, 0, 0x2000, 0, vm_flags); in test_vma_merge_new_with_close()
969 struct vm_area_struct *vma_next = alloc_and_link_vma(&mm, 0x5000, 0x7000, 5, vm_flags); in test_vma_merge_new_with_close()
1008 ASSERT_EQ(mm.map_count, 2); in test_vma_merge_new_with_close()
1010 cleanup_mm(&mm, &vmi); in test_vma_merge_new_with_close()
1019 struct mm_struct mm = {}; in __test_merge_existing() local
1020 VMA_ITERATOR(vmi, &mm, 0); in __test_merge_existing()
1023 .mm = &mm, in __test_merge_existing()
1048 vma = alloc_and_link_vma(&mm, 0x2000, 0x6000, 2, vm_flags); in __test_merge_existing()
1050 vma_next = alloc_and_link_vma(&mm, 0x6000, 0x9000, 6, next_flags); in __test_merge_existing()
1067 ASSERT_EQ(mm.map_count, 2); in __test_merge_existing()
1072 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in __test_merge_existing()
1084 vma = alloc_and_link_vma(&mm, 0x2000, 0x6000, 2, vm_flags); in __test_merge_existing()
1085 vma_next = alloc_and_link_vma(&mm, 0x6000, 0x9000, 6, next_flags); in __test_merge_existing()
1097 ASSERT_EQ(mm.map_count, 1); in __test_merge_existing()
1102 ASSERT_EQ(cleanup_mm(&mm, &vmi), 1); in __test_merge_existing()
1114 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, prev_flags); in __test_merge_existing()
1116 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, vm_flags); in __test_merge_existing()
1133 ASSERT_EQ(mm.map_count, 2); in __test_merge_existing()
1138 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in __test_merge_existing()
1150 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, prev_flags); in __test_merge_existing()
1152 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, vm_flags); in __test_merge_existing()
1164 ASSERT_EQ(mm.map_count, 1); in __test_merge_existing()
1169 ASSERT_EQ(cleanup_mm(&mm, &vmi), 1); in __test_merge_existing()
1181 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, prev_flags); in __test_merge_existing()
1183 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, vm_flags); in __test_merge_existing()
1184 vma_next = alloc_and_link_vma(&mm, 0x7000, 0x9000, 7, next_flags); in __test_merge_existing()
1196 ASSERT_EQ(mm.map_count, 1); in __test_merge_existing()
1201 ASSERT_EQ(cleanup_mm(&mm, &vmi), 1); in __test_merge_existing()
1218 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, prev_flags); in __test_merge_existing()
1219 vma = alloc_and_link_vma(&mm, 0x3000, 0x8000, 3, vm_flags); in __test_merge_existing()
1220 vma_next = alloc_and_link_vma(&mm, 0x8000, 0xa000, 8, next_flags); in __test_merge_existing()
1258 ASSERT_EQ(cleanup_mm(&mm, &vmi), 3); in __test_merge_existing()
1279 struct mm_struct mm = {}; in test_anon_vma_non_mergeable() local
1280 VMA_ITERATOR(vmi, &mm, 0); in test_anon_vma_non_mergeable()
1283 .mm = &mm, in test_anon_vma_non_mergeable()
1302 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_anon_vma_non_mergeable()
1303 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, vm_flags); in test_anon_vma_non_mergeable()
1304 vma_next = alloc_and_link_vma(&mm, 0x7000, 0x9000, 7, vm_flags); in test_anon_vma_non_mergeable()
1327 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_anon_vma_non_mergeable()
1340 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_anon_vma_non_mergeable()
1341 vma_next = alloc_and_link_vma(&mm, 0x7000, 0x9000, 7, vm_flags); in test_anon_vma_non_mergeable()
1358 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_anon_vma_non_mergeable()
1366 struct mm_struct mm = {}; in test_dup_anon_vma() local
1367 VMA_ITERATOR(vmi, &mm, 0); in test_dup_anon_vma()
1369 .mm = &mm, in test_dup_anon_vma()
1386 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_dup_anon_vma()
1387 vma_next = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags); in test_dup_anon_vma()
1401 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1412 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_dup_anon_vma()
1413 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags); in test_dup_anon_vma()
1414 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x8000, 5, vm_flags); in test_dup_anon_vma()
1434 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1445 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_dup_anon_vma()
1446 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags); in test_dup_anon_vma()
1447 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x8000, 5, vm_flags); in test_dup_anon_vma()
1463 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1474 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_dup_anon_vma()
1475 vma = alloc_and_link_vma(&mm, 0x3000, 0x8000, 3, vm_flags); in test_dup_anon_vma()
1491 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1502 vma = alloc_and_link_vma(&mm, 0, 0x5000, 0, vm_flags); in test_dup_anon_vma()
1503 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x8000, 5, vm_flags); in test_dup_anon_vma()
1519 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1526 struct mm_struct mm = {}; in test_vmi_prealloc_fail() local
1527 VMA_ITERATOR(vmi, &mm, 0); in test_vmi_prealloc_fail()
1529 .mm = &mm, in test_vmi_prealloc_fail()
1541 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_vmi_prealloc_fail()
1542 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags); in test_vmi_prealloc_fail()
1561 cleanup_mm(&mm, &vmi); /* Resets fail_prealloc too. */ in test_vmi_prealloc_fail()
1569 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, vm_flags); in test_vmi_prealloc_fail()
1570 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags); in test_vmi_prealloc_fail()
1585 cleanup_mm(&mm, &vmi); in test_vmi_prealloc_fail()
1592 struct mm_struct mm = {}; in test_merge_extend() local
1593 VMA_ITERATOR(vmi, &mm, 0x1000); in test_merge_extend()
1596 vma = alloc_and_link_vma(&mm, 0, 0x1000, 0, vm_flags); in test_merge_extend()
1597 alloc_and_link_vma(&mm, 0x3000, 0x4000, 3, vm_flags); in test_merge_extend()
1613 ASSERT_EQ(mm.map_count, 1); in test_merge_extend()
1615 cleanup_mm(&mm, &vmi); in test_merge_extend()
1622 struct mm_struct mm = {}; in test_copy_vma() local
1624 VMA_ITERATOR(vmi, &mm, 0); in test_copy_vma()
1629 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags); in test_copy_vma()
1637 cleanup_mm(&mm, &vmi); in test_copy_vma()
1641 vma = alloc_and_link_vma(&mm, 0, 0x2000, 0, vm_flags); in test_copy_vma()
1642 vma_next = alloc_and_link_vma(&mm, 0x6000, 0x8000, 6, vm_flags); in test_copy_vma()
1648 cleanup_mm(&mm, &vmi); in test_copy_vma()
1655 struct mm_struct mm = {}; in test_expand_only_mode() local
1656 VMA_ITERATOR(vmi, &mm, 0); in test_expand_only_mode()
1658 VMG_STATE(vmg, &mm, &vmi, 0x5000, 0x9000, vm_flags, 5); in test_expand_only_mode()
1666 alloc_and_link_vma(&mm, 0, 0x2000, 0, vm_flags); in test_expand_only_mode()
1673 vma_prev = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, vm_flags); in test_expand_only_mode()
1688 cleanup_mm(&mm, &vmi); in test_expand_only_mode()
1694 struct mm_struct mm = {}; in test_mmap_region_basic() local
1697 VMA_ITERATOR(vmi, &mm, 0); in test_mmap_region_basic()
1699 current->mm = &mm; in test_mmap_region_basic()
1725 ASSERT_EQ(mm.map_count, 2); in test_mmap_region_basic()
1739 cleanup_mm(&mm, &vmi); in test_mmap_region_basic()