Lines Matching defs:vmi
18 #define vma_iter_prealloc(vmi, vma) \
19 (fail_prealloc ? -ENOMEM : mas_preallocate(&(vmi)->mas, (vma), GFP_KERNEL))
136 vmg->next = vma_next(vmg->vmi);
137 vmg->prev = vma_prev(vmg->vmi);
138 vma_iter_next_range(vmg->vmi);
177 vma_iter_set(vmg->vmi, start);
249 static int cleanup_mm(struct mm_struct *mm, struct vma_iterator *vmi)
257 vma_iter_set(vmi, 0);
258 for_each_vma(*vmi, vma) {
308 VMA_ITERATOR(vmi, &mm, 0x1000);
311 .vmi = &vmi,
341 VMA_ITERATOR(vmi, &mm, 0x1000);
349 vma = vma_modify_flags(&vmi, init_vma, init_vma,
364 vma_iter_set(&vmi, 0);
365 vma = vma_iter_load(&vmi);
372 vma_iter_clear(&vmi);
374 vma = vma_next(&vmi);
381 vma_iter_clear(&vmi);
383 vma = vma_next(&vmi);
400 VMA_ITERATOR(vmi, &mm, 0);
402 .vmi = &vmi,
428 VMA_ITERATOR(vmi, &mm, 0);
432 ASSERT_FALSE(vma_shrink(&vmi, vma, 0, 0x1000, 0));
448 VMA_ITERATOR(vmi, &mm, 0);
451 .vmi = &vmi,
621 vma_iter_set(&vmi, 0);
622 for_each_vma(vmi, vma) {
644 VMA_ITERATOR(vmi, &mm, 0);
647 .vmi = &vmi,
708 cleanup_mm(&mm, &vmi);
716 VMA_ITERATOR(vmi, &mm, 0);
719 .vmi = &vmi,
805 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2);
834 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2);
861 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2);
887 ASSERT_EQ(cleanup_mm(&mm, &vmi), 3);
916 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2);
925 VMA_ITERATOR(vmi, &mm, 0);
928 .vmi = &vmi,
972 cleanup_mm(&mm, &vmi);
980 VMA_ITERATOR(vmi, &mm, 0);
984 .vmi = &vmi,
1023 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2);
1051 ASSERT_EQ(cleanup_mm(&mm, &vmi), 1);
1085 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2);
1114 ASSERT_EQ(cleanup_mm(&mm, &vmi), 1);
1144 ASSERT_EQ(cleanup_mm(&mm, &vmi), 1);
1201 ASSERT_EQ(cleanup_mm(&mm, &vmi), 3);
1210 VMA_ITERATOR(vmi, &mm, 0);
1214 .vmi = &vmi,
1257 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2);
1288 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2);
1297 VMA_ITERATOR(vmi, &mm, 0);
1300 .vmi = &vmi,
1331 cleanup_mm(&mm, &vmi);
1364 cleanup_mm(&mm, &vmi);
1393 cleanup_mm(&mm, &vmi);
1421 cleanup_mm(&mm, &vmi);
1449 cleanup_mm(&mm, &vmi);
1457 VMA_ITERATOR(vmi, &mm, 0);
1460 .vmi = &vmi,
1467 * will be duplicated. We cause the vmi preallocation to fail and assert
1491 cleanup_mm(&mm, &vmi); /* Resets fail_prealloc too. */
1515 cleanup_mm(&mm, &vmi);
1523 VMA_ITERATOR(vmi, &mm, 0x1000);
1538 ASSERT_EQ(vma_merge_extend(&vmi, vma, 0x2000), vma);
1545 cleanup_mm(&mm, &vmi);
1554 VMA_ITERATOR(vmi, &mm, 0);
1567 cleanup_mm(&mm, &vmi);
1578 cleanup_mm(&mm, &vmi);
1586 VMA_ITERATOR(vmi, &mm, 0);
1588 VMG_STATE(vmg, &mm, &vmi, 0x5000, 0x9000, vm_flags, 5);
1602 vma_iter_set(&vmi, 0x3000);
1615 ASSERT_EQ(vma_iter_addr(&vmi), 0x3000);
1618 cleanup_mm(&mm, &vmi);
1627 VMA_ITERATOR(vmi, &mm, 0);
1657 for_each_vma(vmi, vma) {
1669 cleanup_mm(&mm, &vmi);