vma.c (fc21959f74bc1138b28e90a02ec224ab8626111e) | vma.c (cacded5e42b9609b07b22d80c10f0076d439f7d1) |
---|---|
1// SPDX-License-Identifier: GPL-2.0-or-later 2 3#include <stdbool.h> 4#include <stdio.h> 5#include <stdlib.h> 6 7#include "maple-shared.h" 8#include "vma_internal.h" --- 87 unchanged lines hidden (view full) --- 96static struct vm_area_struct *merge_new(struct vma_merge_struct *vmg) 97{ 98 /* 99 * For convenience, get prev and next VMAs. Which the new VMA operation 100 * requires. 101 */ 102 vmg->next = vma_next(vmg->vmi); 103 vmg->prev = vma_prev(vmg->vmi); | 1// SPDX-License-Identifier: GPL-2.0-or-later 2 3#include <stdbool.h> 4#include <stdio.h> 5#include <stdlib.h> 6 7#include "maple-shared.h" 8#include "vma_internal.h" --- 87 unchanged lines hidden (view full) --- 96static struct vm_area_struct *merge_new(struct vma_merge_struct *vmg) 97{ 98 /* 99 * For convenience, get prev and next VMAs. Which the new VMA operation 100 * requires. 101 */ 102 vmg->next = vma_next(vmg->vmi); 103 vmg->prev = vma_prev(vmg->vmi); |
104 vma_iter_next_range(vmg->vmi); |
|
104 | 105 |
105 vma_iter_set(vmg->vmi, vmg->start); 106 return vma_merge(vmg); | 106 return vma_merge_new_range(vmg); |
107} 108 109/* 110 * Helper function which provides a wrapper around a merge existing VMA 111 * operation. 112 */ 113static struct vm_area_struct *merge_existing(struct vma_merge_struct *vmg) 114{ --- 42 unchanged lines hidden (view full) --- 157{ 158 struct vm_area_struct *merged; 159 160 vmg_set_range(vmg, start, end, pgoff, flags); 161 162 merged = merge_new(vmg); 163 if (merged) { 164 *was_merged = true; | 107} 108 109/* 110 * Helper function which provides a wrapper around a merge existing VMA 111 * operation. 112 */ 113static struct vm_area_struct *merge_existing(struct vma_merge_struct *vmg) 114{ --- 42 unchanged lines hidden (view full) --- 157{ 158 struct vm_area_struct *merged; 159 160 vmg_set_range(vmg, start, end, pgoff, flags); 161 162 merged = merge_new(vmg); 163 if (merged) { 164 *was_merged = true; |
165 ASSERT_EQ(vmg->state, VMA_MERGE_SUCCESS); |
|
165 return merged; 166 } 167 168 *was_merged = false; | 166 return merged; 167 } 168 169 *was_merged = false; |
170 171 ASSERT_EQ(vmg->state, VMA_MERGE_NOMERGE); 172 |
|
169 return alloc_and_link_vma(mm, start, end, pgoff, flags); 170} 171 172/* 173 * Helper function to reset the dummy anon_vma to indicate it has not been 174 * duplicated. 175 */ 176static void reset_dummy_anon_vma(void) --- 413 unchanged lines hidden (view full) --- 590 vmg_set_range(&vmg, 0x3000, 0x4000, 3, flags); 591 for (i = 0; i < ARRAY_SIZE(special_flags); i++) { 592 vm_flags_t special_flag = special_flags[i]; 593 594 vma_left->__vm_flags = flags | special_flag; 595 vmg.flags = flags | special_flag; 596 vma = merge_new(&vmg); 597 ASSERT_EQ(vma, NULL); | 173 return alloc_and_link_vma(mm, start, end, pgoff, flags); 174} 175 176/* 177 * Helper function to reset the dummy anon_vma to indicate it has not been 178 * duplicated. 179 */ 180static void reset_dummy_anon_vma(void) --- 413 unchanged lines hidden (view full) --- 594 vmg_set_range(&vmg, 0x3000, 0x4000, 3, flags); 595 for (i = 0; i < ARRAY_SIZE(special_flags); i++) { 596 vm_flags_t special_flag = special_flags[i]; 597 598 vma_left->__vm_flags = flags | special_flag; 599 vmg.flags = flags | special_flag; 600 vma = merge_new(&vmg); 601 ASSERT_EQ(vma, NULL); |
602 ASSERT_EQ(vmg.state, VMA_MERGE_NOMERGE); |
|
598 } 599 600 /* 2. Modify VMA with special flag that would otherwise merge. */ 601 602 /* 603 * 01234 604 * AAAB 605 * --- 5 unchanged lines hidden (view full) --- 611 612 for (i = 0; i < ARRAY_SIZE(special_flags); i++) { 613 vm_flags_t special_flag = special_flags[i]; 614 615 vma_left->__vm_flags = flags | special_flag; 616 vmg.flags = flags | special_flag; 617 vma = merge_existing(&vmg); 618 ASSERT_EQ(vma, NULL); | 603 } 604 605 /* 2. Modify VMA with special flag that would otherwise merge. */ 606 607 /* 608 * 01234 609 * AAAB 610 * --- 5 unchanged lines hidden (view full) --- 616 617 for (i = 0; i < ARRAY_SIZE(special_flags); i++) { 618 vm_flags_t special_flag = special_flags[i]; 619 620 vma_left->__vm_flags = flags | special_flag; 621 vmg.flags = flags | special_flag; 622 vma = merge_existing(&vmg); 623 ASSERT_EQ(vma, NULL); |
624 ASSERT_EQ(vmg.state, VMA_MERGE_NOMERGE); |
|
619 } 620 621 cleanup_mm(&mm, &vmi); 622 return true; 623} 624 625static bool test_vma_merge_with_close(void) 626{ --- 76 unchanged lines hidden (view full) --- 703 /* Make the next VMA have a close() callback. */ 704 vma_next->vm_ops = &vm_ops; 705 706 /* Our proposed VMA has characteristics that would otherwise be merged. */ 707 vmg_set_range(&vmg, 0x1000, 0x2000, 1, flags); 708 709 /* The next VMA having a close() operator should cause the merge to fail.*/ 710 ASSERT_EQ(merge_new(&vmg), NULL); | 625 } 626 627 cleanup_mm(&mm, &vmi); 628 return true; 629} 630 631static bool test_vma_merge_with_close(void) 632{ --- 76 unchanged lines hidden (view full) --- 709 /* Make the next VMA have a close() callback. */ 710 vma_next->vm_ops = &vm_ops; 711 712 /* Our proposed VMA has characteristics that would otherwise be merged. */ 713 vmg_set_range(&vmg, 0x1000, 0x2000, 1, flags); 714 715 /* The next VMA having a close() operator should cause the merge to fail.*/ 716 ASSERT_EQ(merge_new(&vmg), NULL); |
717 ASSERT_EQ(vmg.state, VMA_MERGE_NOMERGE); |
|
711 712 /* Now create the VMA so we can merge via modified flags */ 713 vmg_set_range(&vmg, 0x1000, 0x2000, 1, flags); 714 vma = alloc_and_link_vma(&mm, 0x1000, 0x2000, 1, flags); 715 vmg.vma = vma; 716 717 /* 718 * The VMA being modified in a way that would otherwise merge should 719 * also fail. 720 */ 721 ASSERT_EQ(merge_existing(&vmg), NULL); | 718 719 /* Now create the VMA so we can merge via modified flags */ 720 vmg_set_range(&vmg, 0x1000, 0x2000, 1, flags); 721 vma = alloc_and_link_vma(&mm, 0x1000, 0x2000, 1, flags); 722 vmg.vma = vma; 723 724 /* 725 * The VMA being modified in a way that would otherwise merge should 726 * also fail. 727 */ 728 ASSERT_EQ(merge_existing(&vmg), NULL); |
729 ASSERT_EQ(vmg.state, VMA_MERGE_NOMERGE); |
|
722 723 /* SCENARIO B 724 * 725 * 0123 726 * P* 727 * 728 * In order for this scenario to trigger, the VMA currently being 729 * modified must also have a .close(). --- 9 unchanged lines hidden (view full) --- 739 vma_next->__vm_flags &= ~VM_MAYWRITE; 740 /* Allocate prev. */ 741 vmg.prev = alloc_and_link_vma(&mm, 0, 0x1000, 0, flags); 742 /* Assign a vm_ops->close() function to VMA explicitly. */ 743 vma->vm_ops = &vm_ops; 744 vmg.vma = vma; 745 /* Make sure merge does not occur. */ 746 ASSERT_EQ(merge_existing(&vmg), NULL); | 730 731 /* SCENARIO B 732 * 733 * 0123 734 * P* 735 * 736 * In order for this scenario to trigger, the VMA currently being 737 * modified must also have a .close(). --- 9 unchanged lines hidden (view full) --- 747 vma_next->__vm_flags &= ~VM_MAYWRITE; 748 /* Allocate prev. */ 749 vmg.prev = alloc_and_link_vma(&mm, 0, 0x1000, 0, flags); 750 /* Assign a vm_ops->close() function to VMA explicitly. */ 751 vma->vm_ops = &vm_ops; 752 vmg.vma = vma; 753 /* Make sure merge does not occur. */ 754 ASSERT_EQ(merge_existing(&vmg), NULL); |
755 ASSERT_EQ(vmg.state, VMA_MERGE_NOMERGE); |
|
747 748 cleanup_mm(&mm, &vmi); 749 return true; 750} 751 752static bool test_vma_merge_new_with_close(void) 753{ 754 unsigned long flags = VM_READ | VM_WRITE | VM_MAYREAD | VM_MAYWRITE; --- 32 unchanged lines hidden (view full) --- 787 788 /* Have prev and next have a vm_ops->close() hook. */ 789 vma_prev->vm_ops = &vm_ops; 790 vma_next->vm_ops = &vm_ops; 791 792 vmg_set_range(&vmg, 0x2000, 0x5000, 2, flags); 793 vma = merge_new(&vmg); 794 ASSERT_NE(vma, NULL); | 756 757 cleanup_mm(&mm, &vmi); 758 return true; 759} 760 761static bool test_vma_merge_new_with_close(void) 762{ 763 unsigned long flags = VM_READ | VM_WRITE | VM_MAYREAD | VM_MAYWRITE; --- 32 unchanged lines hidden (view full) --- 796 797 /* Have prev and next have a vm_ops->close() hook. */ 798 vma_prev->vm_ops = &vm_ops; 799 vma_next->vm_ops = &vm_ops; 800 801 vmg_set_range(&vmg, 0x2000, 0x5000, 2, flags); 802 vma = merge_new(&vmg); 803 ASSERT_NE(vma, NULL); |
804 ASSERT_EQ(vmg.state, VMA_MERGE_SUCCESS); |
|
795 ASSERT_EQ(vma->vm_start, 0); 796 ASSERT_EQ(vma->vm_end, 0x5000); 797 ASSERT_EQ(vma->vm_pgoff, 0); 798 ASSERT_EQ(vma->vm_ops, &vm_ops); 799 ASSERT_TRUE(vma_write_started(vma)); 800 ASSERT_EQ(mm.map_count, 2); 801 802 cleanup_mm(&mm, &vmi); --- 23 unchanged lines hidden (view full) --- 826 */ 827 vma = alloc_and_link_vma(&mm, 0x2000, 0x6000, 2, flags); 828 vma_next = alloc_and_link_vma(&mm, 0x6000, 0x9000, 6, flags); 829 vmg_set_range(&vmg, 0x3000, 0x6000, 3, flags); 830 vmg.vma = vma; 831 vmg.prev = vma; 832 vma->anon_vma = &dummy_anon_vma; 833 ASSERT_EQ(merge_existing(&vmg), vma_next); | 805 ASSERT_EQ(vma->vm_start, 0); 806 ASSERT_EQ(vma->vm_end, 0x5000); 807 ASSERT_EQ(vma->vm_pgoff, 0); 808 ASSERT_EQ(vma->vm_ops, &vm_ops); 809 ASSERT_TRUE(vma_write_started(vma)); 810 ASSERT_EQ(mm.map_count, 2); 811 812 cleanup_mm(&mm, &vmi); --- 23 unchanged lines hidden (view full) --- 836 */ 837 vma = alloc_and_link_vma(&mm, 0x2000, 0x6000, 2, flags); 838 vma_next = alloc_and_link_vma(&mm, 0x6000, 0x9000, 6, flags); 839 vmg_set_range(&vmg, 0x3000, 0x6000, 3, flags); 840 vmg.vma = vma; 841 vmg.prev = vma; 842 vma->anon_vma = &dummy_anon_vma; 843 ASSERT_EQ(merge_existing(&vmg), vma_next); |
844 ASSERT_EQ(vmg.state, VMA_MERGE_SUCCESS); |
|
834 ASSERT_EQ(vma_next->vm_start, 0x3000); 835 ASSERT_EQ(vma_next->vm_end, 0x9000); 836 ASSERT_EQ(vma_next->vm_pgoff, 3); 837 ASSERT_EQ(vma_next->anon_vma, &dummy_anon_vma); 838 ASSERT_EQ(vma->vm_start, 0x2000); 839 ASSERT_EQ(vma->vm_end, 0x3000); 840 ASSERT_EQ(vma->vm_pgoff, 2); 841 ASSERT_TRUE(vma_write_started(vma)); --- 14 unchanged lines hidden (view full) --- 856 * NNNNNNN 857 */ 858 vma = alloc_and_link_vma(&mm, 0x2000, 0x6000, 2, flags); 859 vma_next = alloc_and_link_vma(&mm, 0x6000, 0x9000, 6, flags); 860 vmg_set_range(&vmg, 0x2000, 0x6000, 2, flags); 861 vmg.vma = vma; 862 vma->anon_vma = &dummy_anon_vma; 863 ASSERT_EQ(merge_existing(&vmg), vma_next); | 845 ASSERT_EQ(vma_next->vm_start, 0x3000); 846 ASSERT_EQ(vma_next->vm_end, 0x9000); 847 ASSERT_EQ(vma_next->vm_pgoff, 3); 848 ASSERT_EQ(vma_next->anon_vma, &dummy_anon_vma); 849 ASSERT_EQ(vma->vm_start, 0x2000); 850 ASSERT_EQ(vma->vm_end, 0x3000); 851 ASSERT_EQ(vma->vm_pgoff, 2); 852 ASSERT_TRUE(vma_write_started(vma)); --- 14 unchanged lines hidden (view full) --- 867 * NNNNNNN 868 */ 869 vma = alloc_and_link_vma(&mm, 0x2000, 0x6000, 2, flags); 870 vma_next = alloc_and_link_vma(&mm, 0x6000, 0x9000, 6, flags); 871 vmg_set_range(&vmg, 0x2000, 0x6000, 2, flags); 872 vmg.vma = vma; 873 vma->anon_vma = &dummy_anon_vma; 874 ASSERT_EQ(merge_existing(&vmg), vma_next); |
875 ASSERT_EQ(vmg.state, VMA_MERGE_SUCCESS); |
|
864 ASSERT_EQ(vma_next->vm_start, 0x2000); 865 ASSERT_EQ(vma_next->vm_end, 0x9000); 866 ASSERT_EQ(vma_next->vm_pgoff, 2); 867 ASSERT_EQ(vma_next->anon_vma, &dummy_anon_vma); 868 ASSERT_TRUE(vma_write_started(vma_next)); 869 ASSERT_EQ(mm.map_count, 1); 870 871 /* Clear down and reset. We should have deleted vma. */ --- 12 unchanged lines hidden (view full) --- 884 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); 885 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags); 886 vmg_set_range(&vmg, 0x3000, 0x6000, 3, flags); 887 vmg.prev = vma_prev; 888 vmg.vma = vma; 889 vma->anon_vma = &dummy_anon_vma; 890 891 ASSERT_EQ(merge_existing(&vmg), vma_prev); | 876 ASSERT_EQ(vma_next->vm_start, 0x2000); 877 ASSERT_EQ(vma_next->vm_end, 0x9000); 878 ASSERT_EQ(vma_next->vm_pgoff, 2); 879 ASSERT_EQ(vma_next->anon_vma, &dummy_anon_vma); 880 ASSERT_TRUE(vma_write_started(vma_next)); 881 ASSERT_EQ(mm.map_count, 1); 882 883 /* Clear down and reset. We should have deleted vma. */ --- 12 unchanged lines hidden (view full) --- 896 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); 897 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags); 898 vmg_set_range(&vmg, 0x3000, 0x6000, 3, flags); 899 vmg.prev = vma_prev; 900 vmg.vma = vma; 901 vma->anon_vma = &dummy_anon_vma; 902 903 ASSERT_EQ(merge_existing(&vmg), vma_prev); |
904 ASSERT_EQ(vmg.state, VMA_MERGE_SUCCESS); |
|
892 ASSERT_EQ(vma_prev->vm_start, 0); 893 ASSERT_EQ(vma_prev->vm_end, 0x6000); 894 ASSERT_EQ(vma_prev->vm_pgoff, 0); 895 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); 896 ASSERT_EQ(vma->vm_start, 0x6000); 897 ASSERT_EQ(vma->vm_end, 0x7000); 898 ASSERT_EQ(vma->vm_pgoff, 6); 899 ASSERT_TRUE(vma_write_started(vma_prev)); --- 15 unchanged lines hidden (view full) --- 915 */ 916 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); 917 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags); 918 vmg_set_range(&vmg, 0x3000, 0x7000, 3, flags); 919 vmg.prev = vma_prev; 920 vmg.vma = vma; 921 vma->anon_vma = &dummy_anon_vma; 922 ASSERT_EQ(merge_existing(&vmg), vma_prev); | 905 ASSERT_EQ(vma_prev->vm_start, 0); 906 ASSERT_EQ(vma_prev->vm_end, 0x6000); 907 ASSERT_EQ(vma_prev->vm_pgoff, 0); 908 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); 909 ASSERT_EQ(vma->vm_start, 0x6000); 910 ASSERT_EQ(vma->vm_end, 0x7000); 911 ASSERT_EQ(vma->vm_pgoff, 6); 912 ASSERT_TRUE(vma_write_started(vma_prev)); --- 15 unchanged lines hidden (view full) --- 928 */ 929 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); 930 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags); 931 vmg_set_range(&vmg, 0x3000, 0x7000, 3, flags); 932 vmg.prev = vma_prev; 933 vmg.vma = vma; 934 vma->anon_vma = &dummy_anon_vma; 935 ASSERT_EQ(merge_existing(&vmg), vma_prev); |
936 ASSERT_EQ(vmg.state, VMA_MERGE_SUCCESS); |
|
923 ASSERT_EQ(vma_prev->vm_start, 0); 924 ASSERT_EQ(vma_prev->vm_end, 0x7000); 925 ASSERT_EQ(vma_prev->vm_pgoff, 0); 926 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); 927 ASSERT_TRUE(vma_write_started(vma_prev)); 928 ASSERT_EQ(mm.map_count, 1); 929 930 /* Clear down and reset. We should have deleted vma. */ --- 12 unchanged lines hidden (view full) --- 943 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); 944 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags); 945 vma_next = alloc_and_link_vma(&mm, 0x7000, 0x9000, 7, flags); 946 vmg_set_range(&vmg, 0x3000, 0x7000, 3, flags); 947 vmg.prev = vma_prev; 948 vmg.vma = vma; 949 vma->anon_vma = &dummy_anon_vma; 950 ASSERT_EQ(merge_existing(&vmg), vma_prev); | 937 ASSERT_EQ(vma_prev->vm_start, 0); 938 ASSERT_EQ(vma_prev->vm_end, 0x7000); 939 ASSERT_EQ(vma_prev->vm_pgoff, 0); 940 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); 941 ASSERT_TRUE(vma_write_started(vma_prev)); 942 ASSERT_EQ(mm.map_count, 1); 943 944 /* Clear down and reset. We should have deleted vma. */ --- 12 unchanged lines hidden (view full) --- 957 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); 958 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags); 959 vma_next = alloc_and_link_vma(&mm, 0x7000, 0x9000, 7, flags); 960 vmg_set_range(&vmg, 0x3000, 0x7000, 3, flags); 961 vmg.prev = vma_prev; 962 vmg.vma = vma; 963 vma->anon_vma = &dummy_anon_vma; 964 ASSERT_EQ(merge_existing(&vmg), vma_prev); |
965 ASSERT_EQ(vmg.state, VMA_MERGE_SUCCESS); |
|
951 ASSERT_EQ(vma_prev->vm_start, 0); 952 ASSERT_EQ(vma_prev->vm_end, 0x9000); 953 ASSERT_EQ(vma_prev->vm_pgoff, 0); 954 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); 955 ASSERT_TRUE(vma_write_started(vma_prev)); 956 ASSERT_EQ(mm.map_count, 1); 957 958 /* Clear down and reset. We should have deleted prev and next. */ --- 17 unchanged lines hidden (view full) --- 976 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); 977 vma = alloc_and_link_vma(&mm, 0x3000, 0x8000, 3, flags); 978 vma_next = alloc_and_link_vma(&mm, 0x8000, 0xa000, 8, flags); 979 980 vmg_set_range(&vmg, 0x4000, 0x5000, 4, flags); 981 vmg.prev = vma; 982 vmg.vma = vma; 983 ASSERT_EQ(merge_existing(&vmg), NULL); | 966 ASSERT_EQ(vma_prev->vm_start, 0); 967 ASSERT_EQ(vma_prev->vm_end, 0x9000); 968 ASSERT_EQ(vma_prev->vm_pgoff, 0); 969 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); 970 ASSERT_TRUE(vma_write_started(vma_prev)); 971 ASSERT_EQ(mm.map_count, 1); 972 973 /* Clear down and reset. We should have deleted prev and next. */ --- 17 unchanged lines hidden (view full) --- 991 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); 992 vma = alloc_and_link_vma(&mm, 0x3000, 0x8000, 3, flags); 993 vma_next = alloc_and_link_vma(&mm, 0x8000, 0xa000, 8, flags); 994 995 vmg_set_range(&vmg, 0x4000, 0x5000, 4, flags); 996 vmg.prev = vma; 997 vmg.vma = vma; 998 ASSERT_EQ(merge_existing(&vmg), NULL); |
999 ASSERT_EQ(vmg.state, VMA_MERGE_NOMERGE); |
|
984 985 vmg_set_range(&vmg, 0x5000, 0x6000, 5, flags); 986 vmg.prev = vma; 987 vmg.vma = vma; 988 ASSERT_EQ(merge_existing(&vmg), NULL); | 1000 1001 vmg_set_range(&vmg, 0x5000, 0x6000, 5, flags); 1002 vmg.prev = vma; 1003 vmg.vma = vma; 1004 ASSERT_EQ(merge_existing(&vmg), NULL); |
1005 ASSERT_EQ(vmg.state, VMA_MERGE_NOMERGE); |
|
989 990 vmg_set_range(&vmg, 0x6000, 0x7000, 6, flags); 991 vmg.prev = vma; 992 vmg.vma = vma; 993 ASSERT_EQ(merge_existing(&vmg), NULL); | 1006 1007 vmg_set_range(&vmg, 0x6000, 0x7000, 6, flags); 1008 vmg.prev = vma; 1009 vmg.vma = vma; 1010 ASSERT_EQ(merge_existing(&vmg), NULL); |
1011 ASSERT_EQ(vmg.state, VMA_MERGE_NOMERGE); |
|
994 995 vmg_set_range(&vmg, 0x4000, 0x7000, 4, flags); 996 vmg.prev = vma; 997 vmg.vma = vma; 998 ASSERT_EQ(merge_existing(&vmg), NULL); | 1012 1013 vmg_set_range(&vmg, 0x4000, 0x7000, 4, flags); 1014 vmg.prev = vma; 1015 vmg.vma = vma; 1016 ASSERT_EQ(merge_existing(&vmg), NULL); |
1017 ASSERT_EQ(vmg.state, VMA_MERGE_NOMERGE); |
|
999 1000 vmg_set_range(&vmg, 0x4000, 0x6000, 4, flags); 1001 vmg.prev = vma; 1002 vmg.vma = vma; 1003 ASSERT_EQ(merge_existing(&vmg), NULL); | 1018 1019 vmg_set_range(&vmg, 0x4000, 0x6000, 4, flags); 1020 vmg.prev = vma; 1021 vmg.vma = vma; 1022 ASSERT_EQ(merge_existing(&vmg), NULL); |
1023 ASSERT_EQ(vmg.state, VMA_MERGE_NOMERGE); |
|
1004 1005 vmg_set_range(&vmg, 0x5000, 0x6000, 5, flags); 1006 vmg.prev = vma; 1007 vmg.vma = vma; 1008 ASSERT_EQ(merge_existing(&vmg), NULL); | 1024 1025 vmg_set_range(&vmg, 0x5000, 0x6000, 5, flags); 1026 vmg.prev = vma; 1027 vmg.vma = vma; 1028 ASSERT_EQ(merge_existing(&vmg), NULL); |
1029 ASSERT_EQ(vmg.state, VMA_MERGE_NOMERGE); |
|
1009 1010 ASSERT_EQ(cleanup_mm(&mm, &vmi), 3); 1011 1012 return true; 1013} 1014 1015static bool test_anon_vma_non_mergeable(void) 1016{ --- 49 unchanged lines hidden (view full) --- 1066 1067 ASSERT_FALSE(is_mergeable_anon_vma(vma_prev->anon_vma, vma_next->anon_vma, NULL)); 1068 1069 vmg_set_range(&vmg, 0x3000, 0x7000, 3, flags); 1070 vmg.prev = vma_prev; 1071 vmg.vma = vma; 1072 1073 ASSERT_EQ(merge_existing(&vmg), vma_prev); | 1030 1031 ASSERT_EQ(cleanup_mm(&mm, &vmi), 3); 1032 1033 return true; 1034} 1035 1036static bool test_anon_vma_non_mergeable(void) 1037{ --- 49 unchanged lines hidden (view full) --- 1087 1088 ASSERT_FALSE(is_mergeable_anon_vma(vma_prev->anon_vma, vma_next->anon_vma, NULL)); 1089 1090 vmg_set_range(&vmg, 0x3000, 0x7000, 3, flags); 1091 vmg.prev = vma_prev; 1092 vmg.vma = vma; 1093 1094 ASSERT_EQ(merge_existing(&vmg), vma_prev); |
1095 ASSERT_EQ(vmg.state, VMA_MERGE_SUCCESS); |
|
1074 ASSERT_EQ(vma_prev->vm_start, 0); 1075 ASSERT_EQ(vma_prev->vm_end, 0x7000); 1076 ASSERT_EQ(vma_prev->vm_pgoff, 0); 1077 ASSERT_TRUE(vma_write_started(vma_prev)); 1078 ASSERT_FALSE(vma_write_started(vma_next)); 1079 1080 /* Clear down and reset. */ 1081 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); --- 19 unchanged lines hidden (view full) --- 1101 INIT_LIST_HEAD(&vma_next->anon_vma_chain); 1102 list_add(&dummy_anon_vma_chain2.same_vma, &vma_next->anon_vma_chain); 1103 vma_next->anon_vma = (struct anon_vma *)2; 1104 1105 vmg_set_range(&vmg, 0x3000, 0x7000, 3, flags); 1106 vmg.prev = vma_prev; 1107 1108 ASSERT_EQ(merge_new(&vmg), vma_prev); | 1096 ASSERT_EQ(vma_prev->vm_start, 0); 1097 ASSERT_EQ(vma_prev->vm_end, 0x7000); 1098 ASSERT_EQ(vma_prev->vm_pgoff, 0); 1099 ASSERT_TRUE(vma_write_started(vma_prev)); 1100 ASSERT_FALSE(vma_write_started(vma_next)); 1101 1102 /* Clear down and reset. */ 1103 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); --- 19 unchanged lines hidden (view full) --- 1123 INIT_LIST_HEAD(&vma_next->anon_vma_chain); 1124 list_add(&dummy_anon_vma_chain2.same_vma, &vma_next->anon_vma_chain); 1125 vma_next->anon_vma = (struct anon_vma *)2; 1126 1127 vmg_set_range(&vmg, 0x3000, 0x7000, 3, flags); 1128 vmg.prev = vma_prev; 1129 1130 ASSERT_EQ(merge_new(&vmg), vma_prev); |
1131 ASSERT_EQ(vmg.state, VMA_MERGE_SUCCESS); |
|
1109 ASSERT_EQ(vma_prev->vm_start, 0); 1110 ASSERT_EQ(vma_prev->vm_end, 0x7000); 1111 ASSERT_EQ(vma_prev->vm_pgoff, 0); 1112 ASSERT_TRUE(vma_write_started(vma_prev)); 1113 ASSERT_FALSE(vma_write_started(vma_next)); 1114 1115 /* Final cleanup. */ 1116 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); --- 59 unchanged lines hidden (view full) --- 1176 list_add(&dummy_anon_vma_chain.same_vma, &vma_next->anon_vma_chain); 1177 1178 vma_next->anon_vma = &dummy_anon_vma; 1179 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); 1180 vmg.prev = vma_prev; 1181 vmg.vma = vma; 1182 1183 ASSERT_EQ(merge_existing(&vmg), vma_prev); | 1132 ASSERT_EQ(vma_prev->vm_start, 0); 1133 ASSERT_EQ(vma_prev->vm_end, 0x7000); 1134 ASSERT_EQ(vma_prev->vm_pgoff, 0); 1135 ASSERT_TRUE(vma_write_started(vma_prev)); 1136 ASSERT_FALSE(vma_write_started(vma_next)); 1137 1138 /* Final cleanup. */ 1139 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); --- 59 unchanged lines hidden (view full) --- 1199 list_add(&dummy_anon_vma_chain.same_vma, &vma_next->anon_vma_chain); 1200 1201 vma_next->anon_vma = &dummy_anon_vma; 1202 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); 1203 vmg.prev = vma_prev; 1204 vmg.vma = vma; 1205 1206 ASSERT_EQ(merge_existing(&vmg), vma_prev); |
1207 ASSERT_EQ(vmg.state, VMA_MERGE_SUCCESS); |
|
1184 1185 ASSERT_EQ(vma_prev->vm_start, 0); 1186 ASSERT_EQ(vma_prev->vm_end, 0x8000); 1187 1188 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); 1189 ASSERT_TRUE(vma_prev->anon_vma->was_cloned); 1190 1191 cleanup_mm(&mm, &vmi); --- 12 unchanged lines hidden (view full) --- 1204 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x8000, 5, flags); 1205 1206 vma->anon_vma = &dummy_anon_vma; 1207 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); 1208 vmg.prev = vma_prev; 1209 vmg.vma = vma; 1210 1211 ASSERT_EQ(merge_existing(&vmg), vma_prev); | 1208 1209 ASSERT_EQ(vma_prev->vm_start, 0); 1210 ASSERT_EQ(vma_prev->vm_end, 0x8000); 1211 1212 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); 1213 ASSERT_TRUE(vma_prev->anon_vma->was_cloned); 1214 1215 cleanup_mm(&mm, &vmi); --- 12 unchanged lines hidden (view full) --- 1228 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x8000, 5, flags); 1229 1230 vma->anon_vma = &dummy_anon_vma; 1231 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); 1232 vmg.prev = vma_prev; 1233 vmg.vma = vma; 1234 1235 ASSERT_EQ(merge_existing(&vmg), vma_prev); |
1236 ASSERT_EQ(vmg.state, VMA_MERGE_SUCCESS); |
|
1212 1213 ASSERT_EQ(vma_prev->vm_start, 0); 1214 ASSERT_EQ(vma_prev->vm_end, 0x8000); 1215 1216 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); 1217 ASSERT_TRUE(vma_prev->anon_vma->was_cloned); 1218 1219 cleanup_mm(&mm, &vmi); --- 11 unchanged lines hidden (view full) --- 1231 vma = alloc_and_link_vma(&mm, 0x3000, 0x8000, 3, flags); 1232 1233 vma->anon_vma = &dummy_anon_vma; 1234 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); 1235 vmg.prev = vma_prev; 1236 vmg.vma = vma; 1237 1238 ASSERT_EQ(merge_existing(&vmg), vma_prev); | 1237 1238 ASSERT_EQ(vma_prev->vm_start, 0); 1239 ASSERT_EQ(vma_prev->vm_end, 0x8000); 1240 1241 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); 1242 ASSERT_TRUE(vma_prev->anon_vma->was_cloned); 1243 1244 cleanup_mm(&mm, &vmi); --- 11 unchanged lines hidden (view full) --- 1256 vma = alloc_and_link_vma(&mm, 0x3000, 0x8000, 3, flags); 1257 1258 vma->anon_vma = &dummy_anon_vma; 1259 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); 1260 vmg.prev = vma_prev; 1261 vmg.vma = vma; 1262 1263 ASSERT_EQ(merge_existing(&vmg), vma_prev); |
1264 ASSERT_EQ(vmg.state, VMA_MERGE_SUCCESS); |
|
1239 1240 ASSERT_EQ(vma_prev->vm_start, 0); 1241 ASSERT_EQ(vma_prev->vm_end, 0x5000); 1242 1243 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); 1244 ASSERT_TRUE(vma_prev->anon_vma->was_cloned); 1245 1246 cleanup_mm(&mm, &vmi); --- 11 unchanged lines hidden (view full) --- 1258 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x8000, 5, flags); 1259 1260 vma->anon_vma = &dummy_anon_vma; 1261 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); 1262 vmg.prev = vma; 1263 vmg.vma = vma; 1264 1265 ASSERT_EQ(merge_existing(&vmg), vma_next); | 1265 1266 ASSERT_EQ(vma_prev->vm_start, 0); 1267 ASSERT_EQ(vma_prev->vm_end, 0x5000); 1268 1269 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); 1270 ASSERT_TRUE(vma_prev->anon_vma->was_cloned); 1271 1272 cleanup_mm(&mm, &vmi); --- 11 unchanged lines hidden (view full) --- 1284 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x8000, 5, flags); 1285 1286 vma->anon_vma = &dummy_anon_vma; 1287 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); 1288 vmg.prev = vma; 1289 vmg.vma = vma; 1290 1291 ASSERT_EQ(merge_existing(&vmg), vma_next); |
1292 ASSERT_EQ(vmg.state, VMA_MERGE_SUCCESS); |
|
1266 1267 ASSERT_EQ(vma_next->vm_start, 0x3000); 1268 ASSERT_EQ(vma_next->vm_end, 0x8000); 1269 1270 ASSERT_EQ(vma_next->anon_vma, &dummy_anon_vma); 1271 ASSERT_TRUE(vma_next->anon_vma->was_cloned); 1272 1273 cleanup_mm(&mm, &vmi); --- 24 unchanged lines hidden (view full) --- 1298 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); 1299 vmg.prev = vma_prev; 1300 vmg.vma = vma; 1301 1302 fail_prealloc = true; 1303 1304 /* This will cause the merge to fail. */ 1305 ASSERT_EQ(merge_existing(&vmg), NULL); | 1293 1294 ASSERT_EQ(vma_next->vm_start, 0x3000); 1295 ASSERT_EQ(vma_next->vm_end, 0x8000); 1296 1297 ASSERT_EQ(vma_next->anon_vma, &dummy_anon_vma); 1298 ASSERT_TRUE(vma_next->anon_vma->was_cloned); 1299 1300 cleanup_mm(&mm, &vmi); --- 24 unchanged lines hidden (view full) --- 1325 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); 1326 vmg.prev = vma_prev; 1327 vmg.vma = vma; 1328 1329 fail_prealloc = true; 1330 1331 /* This will cause the merge to fail. */ 1332 ASSERT_EQ(merge_existing(&vmg), NULL); |
1333 ASSERT_EQ(vmg.state, VMA_MERGE_ERROR_NOMEM); |
|
1306 /* We will already have assigned the anon_vma. */ 1307 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); 1308 /* And it was both cloned and unlinked. */ 1309 ASSERT_TRUE(dummy_anon_vma.was_cloned); 1310 ASSERT_TRUE(dummy_anon_vma.was_unlinked); 1311 1312 cleanup_mm(&mm, &vmi); /* Resets fail_prealloc too. */ 1313 --- 8 unchanged lines hidden (view full) --- 1322 vma->anon_vma = &dummy_anon_vma; 1323 1324 vmg_set_range(&vmg, 0, 0x5000, 3, flags); 1325 vmg.vma = vma_prev; 1326 vmg.next = vma; 1327 1328 fail_prealloc = true; 1329 ASSERT_EQ(expand_existing(&vmg), -ENOMEM); | 1334 /* We will already have assigned the anon_vma. */ 1335 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); 1336 /* And it was both cloned and unlinked. */ 1337 ASSERT_TRUE(dummy_anon_vma.was_cloned); 1338 ASSERT_TRUE(dummy_anon_vma.was_unlinked); 1339 1340 cleanup_mm(&mm, &vmi); /* Resets fail_prealloc too. */ 1341 --- 8 unchanged lines hidden (view full) --- 1350 vma->anon_vma = &dummy_anon_vma; 1351 1352 vmg_set_range(&vmg, 0, 0x5000, 3, flags); 1353 vmg.vma = vma_prev; 1354 vmg.next = vma; 1355 1356 fail_prealloc = true; 1357 ASSERT_EQ(expand_existing(&vmg), -ENOMEM); |
1358 ASSERT_EQ(vmg.state, VMA_MERGE_ERROR_NOMEM); |
|
1330 1331 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); 1332 ASSERT_TRUE(dummy_anon_vma.was_cloned); 1333 ASSERT_TRUE(dummy_anon_vma.was_unlinked); 1334 1335 cleanup_mm(&mm, &vmi); 1336 return true; 1337} --- 102 unchanged lines hidden --- | 1359 1360 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); 1361 ASSERT_TRUE(dummy_anon_vma.was_cloned); 1362 ASSERT_TRUE(dummy_anon_vma.was_unlinked); 1363 1364 cleanup_mm(&mm, &vmi); 1365 return true; 1366} --- 102 unchanged lines hidden --- |