Lines Matching +full:0 +full:x3000

44 	} while (0)
206 int count = 0;
211 vma_iter_set(vmi, 0);
218 mm->map_count = 0;
244 struct vm_area_struct *vma_left = alloc_vma(&mm, 0, 0x1000, 0, flags);
245 struct vm_area_struct *vma_right = alloc_vma(&mm, 0x2000, 0x3000, 2, flags);
246 VMA_ITERATOR(vmi, &mm, 0x1000);
250 .start = 0x1000,
251 .end = 0x2000,
262 ASSERT_EQ(vma->vm_start, 0);
263 ASSERT_EQ(vma->vm_end, 0x3000);
264 ASSERT_EQ(vma->vm_pgoff, 0);
278 struct vm_area_struct *init_vma = alloc_vma(&mm, 0, 0x3000, 0, flags);
279 VMA_ITERATOR(vmi, &mm, 0x1000);
288 0x1000, 0x2000, VM_READ | VM_MAYREAD);
293 ASSERT_EQ(vma->vm_start, 0x1000);
294 ASSERT_EQ(vma->vm_end, 0x2000);
302 vma_iter_set(&vmi, 0);
305 ASSERT_EQ(vma->vm_start, 0);
306 ASSERT_EQ(vma->vm_end, 0x1000);
307 ASSERT_EQ(vma->vm_pgoff, 0);
314 ASSERT_EQ(vma->vm_start, 0x1000);
315 ASSERT_EQ(vma->vm_end, 0x2000);
323 ASSERT_EQ(vma->vm_start, 0x2000);
324 ASSERT_EQ(vma->vm_end, 0x3000);
337 struct vm_area_struct *vma = alloc_vma(&mm, 0, 0x1000, 0, flags);
338 VMA_ITERATOR(vmi, &mm, 0);
342 .start = 0,
343 .end = 0x3000,
344 .pgoff = 0,
351 ASSERT_EQ(vma->vm_start, 0);
352 ASSERT_EQ(vma->vm_end, 0x3000);
353 ASSERT_EQ(vma->vm_pgoff, 0);
365 struct vm_area_struct *vma = alloc_vma(&mm, 0, 0x3000, 0, flags);
366 VMA_ITERATOR(vmi, &mm, 0);
370 ASSERT_FALSE(vma_shrink(&vmi, vma, 0, 0x1000, 0));
372 ASSERT_EQ(vma->vm_start, 0);
373 ASSERT_EQ(vma->vm_end, 0x1000);
374 ASSERT_EQ(vma->vm_pgoff, 0);
386 VMA_ITERATOR(vmi, &mm, 0);
414 vma_a = alloc_and_link_vma(&mm, 0, 0x2000, 0, flags);
420 vma_b = alloc_and_link_vma(&mm, 0x3000, 0x4000, 3, flags);
425 vma_c = alloc_and_link_vma(&mm, 0xb000, 0xc000, 0xb, flags);
436 vma_d = try_merge_new_vma(&mm, &vmg, 0x7000, 0x9000, 7, flags, &merged);
451 vma = try_merge_new_vma(&mm, &vmg, 0x2000, 0x3000, 2, flags, &merged);
455 ASSERT_EQ(vma->vm_start, 0);
456 ASSERT_EQ(vma->vm_end, 0x4000);
457 ASSERT_EQ(vma->vm_pgoff, 0);
468 vma = try_merge_new_vma(&mm, &vmg, 0x4000, 0x5000, 4, flags, &merged);
472 ASSERT_EQ(vma->vm_start, 0);
473 ASSERT_EQ(vma->vm_end, 0x5000);
474 ASSERT_EQ(vma->vm_pgoff, 0);
487 vma = try_merge_new_vma(&mm, &vmg, 0x6000, 0x7000, 6, flags, &merged);
491 ASSERT_EQ(vma->vm_start, 0x6000);
492 ASSERT_EQ(vma->vm_end, 0x9000);
505 vma = try_merge_new_vma(&mm, &vmg, 0x5000, 0x6000, 5, flags, &merged);
509 ASSERT_EQ(vma->vm_start, 0);
510 ASSERT_EQ(vma->vm_end, 0x9000);
511 ASSERT_EQ(vma->vm_pgoff, 0);
523 vma = try_merge_new_vma(&mm, &vmg, 0xa000, 0xb000, 0xa, flags, &merged);
527 ASSERT_EQ(vma->vm_start, 0xa000);
528 ASSERT_EQ(vma->vm_end, 0xc000);
529 ASSERT_EQ(vma->vm_pgoff, 0xa);
540 vma = try_merge_new_vma(&mm, &vmg, 0x9000, 0xa000, 0x9, flags, &merged);
544 ASSERT_EQ(vma->vm_start, 0);
545 ASSERT_EQ(vma->vm_end, 0xc000);
546 ASSERT_EQ(vma->vm_pgoff, 0);
558 count = 0;
559 vma_iter_set(&vmi, 0);
562 ASSERT_EQ(vma->vm_start, 0);
563 ASSERT_EQ(vma->vm_end, 0xc000);
564 ASSERT_EQ(vma->vm_pgoff, 0);
582 VMA_ITERATOR(vmi, &mm, 0);
588 vm_flags_t all_special_flags = 0;
593 for (i = 0; i < ARRAY_SIZE(special_flags); i++) {
602 vma_left = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags);
613 vmg_set_range(&vmg, 0x3000, 0x4000, 3, flags);
614 for (i = 0; i < ARRAY_SIZE(special_flags); i++) {
632 vma = alloc_and_link_vma(&mm, 0x3000, 0x4000, 3, flags);
636 for (i = 0; i < ARRAY_SIZE(special_flags); i++) {
654 VMA_ITERATOR(vmi, &mm, 0);
732 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags);
733 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, flags);
736 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags);
739 ASSERT_EQ(vma_prev->vm_start, 0);
740 ASSERT_EQ(vma_prev->vm_end, 0x5000);
741 ASSERT_EQ(vma_prev->vm_pgoff, 0);
757 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags);
758 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags);
761 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags);
785 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags);
786 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, flags);
789 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags);
813 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags);
814 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags);
815 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, flags);
818 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags);
839 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags);
840 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags);
841 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, flags);
844 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags);
850 ASSERT_EQ(vma_prev->vm_start, 0);
851 ASSERT_EQ(vma_prev->vm_end, 0x5000);
852 ASSERT_EQ(vma_prev->vm_pgoff, 0);
863 VMA_ITERATOR(vmi, &mm, 0);
868 struct vm_area_struct *vma_prev = alloc_and_link_vma(&mm, 0, 0x2000, 0, flags);
869 struct vm_area_struct *vma_next = alloc_and_link_vma(&mm, 0x5000, 0x7000, 5, flags);
899 vmg_set_range(&vmg, 0x2000, 0x5000, 2, flags);
903 ASSERT_EQ(vma->vm_start, 0);
904 ASSERT_EQ(vma->vm_end, 0x5000);
905 ASSERT_EQ(vma->vm_pgoff, 0);
918 VMA_ITERATOR(vmi, &mm, 0);
938 vma = alloc_and_link_vma(&mm, 0x2000, 0x6000, 2, flags);
940 vma_next = alloc_and_link_vma(&mm, 0x6000, 0x9000, 6, flags);
942 vmg_set_range(&vmg, 0x3000, 0x6000, 3, flags);
948 ASSERT_EQ(vma_next->vm_start, 0x3000);
949 ASSERT_EQ(vma_next->vm_end, 0x9000);
952 ASSERT_EQ(vma->vm_start, 0x2000);
953 ASSERT_EQ(vma->vm_end, 0x3000);
972 vma = alloc_and_link_vma(&mm, 0x2000, 0x6000, 2, flags);
973 vma_next = alloc_and_link_vma(&mm, 0x6000, 0x9000, 6, flags);
975 vmg_set_range(&vmg, 0x2000, 0x6000, 2, flags);
980 ASSERT_EQ(vma_next->vm_start, 0x2000);
981 ASSERT_EQ(vma_next->vm_end, 0x9000);
1000 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags);
1002 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags);
1004 vmg_set_range(&vmg, 0x3000, 0x6000, 3, flags);
1011 ASSERT_EQ(vma_prev->vm_start, 0);
1012 ASSERT_EQ(vma_prev->vm_end, 0x6000);
1013 ASSERT_EQ(vma_prev->vm_pgoff, 0);
1015 ASSERT_EQ(vma->vm_start, 0x6000);
1016 ASSERT_EQ(vma->vm_end, 0x7000);
1035 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags);
1037 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags);
1038 vmg_set_range(&vmg, 0x3000, 0x7000, 3, flags);
1044 ASSERT_EQ(vma_prev->vm_start, 0);
1045 ASSERT_EQ(vma_prev->vm_end, 0x7000);
1046 ASSERT_EQ(vma_prev->vm_pgoff, 0);
1064 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags);
1066 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags);
1067 vma_next = alloc_and_link_vma(&mm, 0x7000, 0x9000, 7, flags);
1068 vmg_set_range(&vmg, 0x3000, 0x7000, 3, flags);
1074 ASSERT_EQ(vma_prev->vm_start, 0);
1075 ASSERT_EQ(vma_prev->vm_end, 0x9000);
1076 ASSERT_EQ(vma_prev->vm_pgoff, 0);
1099 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags);
1100 vma = alloc_and_link_vma(&mm, 0x3000, 0x8000, 3, flags);
1101 vma_next = alloc_and_link_vma(&mm, 0x8000, 0xa000, 8, flags);
1103 vmg_set_range(&vmg, 0x4000, 0x5000, 4, flags);
1109 vmg_set_range(&vmg, 0x5000, 0x6000, 5, flags);
1115 vmg_set_range(&vmg, 0x6000, 0x7000, 6, flags);
1121 vmg_set_range(&vmg, 0x4000, 0x7000, 4, flags);
1127 vmg_set_range(&vmg, 0x4000, 0x6000, 4, flags);
1133 vmg_set_range(&vmg, 0x5000, 0x6000, 5, flags);
1148 VMA_ITERATOR(vmi, &mm, 0);
1173 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags);
1174 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags);
1175 vma_next = alloc_and_link_vma(&mm, 0x7000, 0x9000, 7, flags);
1198 vmg_set_range(&vmg, 0x3000, 0x7000, 3, flags);
1204 ASSERT_EQ(vma_prev->vm_start, 0);
1205 ASSERT_EQ(vma_prev->vm_end, 0x7000);
1206 ASSERT_EQ(vma_prev->vm_pgoff, 0);
1224 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags);
1225 vma_next = alloc_and_link_vma(&mm, 0x7000, 0x9000, 7, flags);
1235 vmg_set_range(&vmg, 0x3000, 0x7000, 3, flags);
1240 ASSERT_EQ(vma_prev->vm_start, 0);
1241 ASSERT_EQ(vma_prev->vm_end, 0x7000);
1242 ASSERT_EQ(vma_prev->vm_pgoff, 0);
1256 VMA_ITERATOR(vmi, &mm, 0);
1275 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags);
1276 vma_next = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags);
1279 vmg_set_range(&vmg, 0, 0x5000, 0, flags);
1283 ASSERT_EQ(expand_existing(&vmg), 0);
1301 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags);
1302 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags);
1303 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x8000, 5, flags);
1310 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags);
1317 ASSERT_EQ(vma_prev->vm_start, 0);
1318 ASSERT_EQ(vma_prev->vm_end, 0x8000);
1334 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags);
1335 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags);
1336 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x8000, 5, flags);
1339 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags);
1346 ASSERT_EQ(vma_prev->vm_start, 0);
1347 ASSERT_EQ(vma_prev->vm_end, 0x8000);
1363 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags);
1364 vma = alloc_and_link_vma(&mm, 0x3000, 0x8000, 3, flags);
1367 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags);
1374 ASSERT_EQ(vma_prev->vm_start, 0);
1375 ASSERT_EQ(vma_prev->vm_end, 0x5000);
1391 vma = alloc_and_link_vma(&mm, 0, 0x5000, 0, flags);
1392 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x8000, 5, flags);
1395 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags);
1402 ASSERT_EQ(vma_next->vm_start, 0x3000);
1403 ASSERT_EQ(vma_next->vm_end, 0x8000);
1416 VMA_ITERATOR(vmi, &mm, 0);
1429 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags);
1430 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags);
1433 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags);
1456 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags);
1457 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags);
1460 vmg_set_range(&vmg, 0, 0x5000, 3, flags);
1480 VMA_ITERATOR(vmi, &mm, 0x1000);
1483 vma = alloc_and_link_vma(&mm, 0, 0x1000, 0, flags);
1484 alloc_and_link_vma(&mm, 0x3000, 0x4000, 3, flags);
1495 ASSERT_EQ(vma_merge_extend(&vmi, vma, 0x2000), vma);
1496 ASSERT_EQ(vma->vm_start, 0);
1497 ASSERT_EQ(vma->vm_end, 0x4000);
1498 ASSERT_EQ(vma->vm_pgoff, 0);
1511 VMA_ITERATOR(vmi, &mm, 0);
1516 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags);
1517 vma_new = copy_vma(&vma, 0, 0x2000, 0, &need_locks);
1520 ASSERT_EQ(vma_new->vm_start, 0);
1521 ASSERT_EQ(vma_new->vm_end, 0x2000);
1522 ASSERT_EQ(vma_new->vm_pgoff, 0);
1528 vma = alloc_and_link_vma(&mm, 0, 0x2000, 0, flags);
1529 vma_next = alloc_and_link_vma(&mm, 0x6000, 0x8000, 6, flags);
1530 vma_new = copy_vma(&vma, 0x4000, 0x2000, 4, &need_locks);
1542 VMA_ITERATOR(vmi, &mm, 0);
1544 VMG_STATE(vmg, &mm, &vmi, 0x5000, 0x9000, flags, 5);
1552 alloc_and_link_vma(&mm, 0, 0x2000, 0, flags);
1556 * 0x9000.
1558 vma_iter_set(&vmi, 0x3000);
1559 vma_prev = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags);
1567 ASSERT_EQ(vma->vm_start, 0x3000);
1568 ASSERT_EQ(vma->vm_end, 0x9000);
1571 ASSERT_EQ(vma_iter_addr(&vmi), 0x3000);
1582 VMA_ITERATOR(vmi, &mm, 0);
1586 /* Map at 0x300000, length 0x3000. */
1587 addr = __mmap_region(NULL, 0x300000, 0x3000,
1589 0x300, NULL);
1590 ASSERT_EQ(addr, 0x300000);
1592 /* Map at 0x250000, length 0x3000. */
1593 addr = __mmap_region(NULL, 0x250000, 0x3000,
1595 0x250, NULL);
1596 ASSERT_EQ(addr, 0x250000);
1598 /* Map at 0x303000, merging to 0x300000 of length 0x6000. */
1599 addr = __mmap_region(NULL, 0x303000, 0x3000,
1601 0x303, NULL);
1602 ASSERT_EQ(addr, 0x303000);
1604 /* Map at 0x24d000, merging to 0x250000 of length 0x6000. */
1605 addr = __mmap_region(NULL, 0x24d000, 0x3000,
1607 0x24d, NULL);
1608 ASSERT_EQ(addr, 0x24d000);
1613 if (vma->vm_start == 0x300000) {
1614 ASSERT_EQ(vma->vm_end, 0x306000);
1615 ASSERT_EQ(vma->vm_pgoff, 0x300);
1616 } else if (vma->vm_start == 0x24d000) {
1617 ASSERT_EQ(vma->vm_end, 0x253000);
1618 ASSERT_EQ(vma->vm_pgoff, 0x24d);
1630 int num_tests = 0, num_fail = 0;
1641 } while (0)
1668 return num_fail == 0 ? EXIT_SUCCESS : EXIT_FAILURE;