Lines Matching +full:mm +full:- +full:0
1 // SPDX-License-Identifier: GPL-2.0-or-later
7 #include "generated/bit-length.h"
9 #include "maple-shared.h"
13 #include "../../../mm/vma.h"
19 (fail_prealloc ? -ENOMEM : mas_preallocate(&(vmi)->mas, (vma), GFP_KERNEL))
29 * provides userland-equivalent functionality for everything vma.c uses.
31 #include "../../../mm/vma.c"
44 } while (0)
58 return (unsigned long)-1; in rlimit()
62 static struct vm_area_struct *alloc_vma(struct mm_struct *mm, in alloc_vma() argument
68 struct vm_area_struct *ret = vm_area_alloc(mm); in alloc_vma()
73 ret->vm_start = start; in alloc_vma()
74 ret->vm_end = end; in alloc_vma()
75 ret->vm_pgoff = pgoff; in alloc_vma()
76 ret->__vm_flags = flags; in alloc_vma()
83 static int attach_vma(struct mm_struct *mm, struct vm_area_struct *vma) in attach_vma() argument
87 res = vma_link(mm, vma); in attach_vma()
94 static struct vm_area_struct *alloc_and_link_vma(struct mm_struct *mm, in alloc_and_link_vma() argument
100 struct vm_area_struct *vma = alloc_vma(mm, start, end, pgoff, flags); in alloc_and_link_vma()
105 if (attach_vma(mm, vma)) { in alloc_and_link_vma()
115 vma->vm_lock_seq = UINT_MAX; in alloc_and_link_vma()
128 vmg->next = vma_next(vmg->vmi); in merge_new()
129 vmg->prev = vma_prev(vmg->vmi); in merge_new()
130 vma_iter_next_range(vmg->vmi); in merge_new()
169 vma_iter_set(vmg->vmi, start); in vmg_set_range()
171 vmg->prev = NULL; in vmg_set_range()
172 vmg->middle = NULL; in vmg_set_range()
173 vmg->next = NULL; in vmg_set_range()
174 vmg->target = NULL; in vmg_set_range()
176 vmg->start = start; in vmg_set_range()
177 vmg->end = end; in vmg_set_range()
178 vmg->pgoff = pgoff; in vmg_set_range()
179 vmg->flags = flags; in vmg_set_range()
181 vmg->just_expand = false; in vmg_set_range()
182 vmg->__remove_middle = false; in vmg_set_range()
183 vmg->__remove_next = false; in vmg_set_range()
184 vmg->__adjust_middle_start = false; in vmg_set_range()
185 vmg->__adjust_next_start = false; in vmg_set_range()
194 static struct vm_area_struct *try_merge_new_vma(struct mm_struct *mm, in try_merge_new_vma() argument
207 ASSERT_EQ(vmg->state, VMA_MERGE_SUCCESS); in try_merge_new_vma()
213 ASSERT_EQ(vmg->state, VMA_MERGE_NOMERGE); in try_merge_new_vma()
215 return alloc_and_link_vma(mm, start, end, pgoff, flags); in try_merge_new_vma()
232 static int cleanup_mm(struct mm_struct *mm, struct vma_iterator *vmi) in cleanup_mm() argument
235 int count = 0; in cleanup_mm()
240 vma_iter_set(vmi, 0); in cleanup_mm()
246 mtree_destroy(&mm->mm_mt); in cleanup_mm()
247 mm->map_count = 0; in cleanup_mm()
254 int seq = vma->vm_lock_seq; in vma_write_started()
257 vma->vm_lock_seq = UINT_MAX; in vma_write_started()
260 return seq > -1; in vma_write_started()
263 /* Helper function providing a dummy vm_ops->close() method.*/
272 struct mm_struct mm = {}; in test_simple_merge() local
273 struct vm_area_struct *vma_left = alloc_vma(&mm, 0, 0x1000, 0, flags); in test_simple_merge()
274 struct vm_area_struct *vma_right = alloc_vma(&mm, 0x2000, 0x3000, 2, flags); in test_simple_merge()
275 VMA_ITERATOR(vmi, &mm, 0x1000); in test_simple_merge()
277 .mm = &mm, in test_simple_merge()
279 .start = 0x1000, in test_simple_merge()
280 .end = 0x2000, in test_simple_merge()
285 ASSERT_FALSE(attach_vma(&mm, vma_left)); in test_simple_merge()
286 ASSERT_FALSE(attach_vma(&mm, vma_right)); in test_simple_merge()
291 ASSERT_EQ(vma->vm_start, 0); in test_simple_merge()
292 ASSERT_EQ(vma->vm_end, 0x3000); in test_simple_merge()
293 ASSERT_EQ(vma->vm_pgoff, 0); in test_simple_merge()
294 ASSERT_EQ(vma->vm_flags, flags); in test_simple_merge()
297 mtree_destroy(&mm.mm_mt); in test_simple_merge()
306 struct mm_struct mm = {}; in test_simple_modify() local
307 struct vm_area_struct *init_vma = alloc_vma(&mm, 0, 0x3000, 0, flags); in test_simple_modify()
308 VMA_ITERATOR(vmi, &mm, 0x1000); in test_simple_modify()
310 ASSERT_FALSE(attach_vma(&mm, init_vma)); in test_simple_modify()
317 0x1000, 0x2000, VM_READ | VM_MAYREAD); in test_simple_modify()
322 ASSERT_EQ(vma->vm_start, 0x1000); in test_simple_modify()
323 ASSERT_EQ(vma->vm_end, 0x2000); in test_simple_modify()
324 ASSERT_EQ(vma->vm_pgoff, 1); in test_simple_modify()
331 vma_iter_set(&vmi, 0); in test_simple_modify()
334 ASSERT_EQ(vma->vm_start, 0); in test_simple_modify()
335 ASSERT_EQ(vma->vm_end, 0x1000); in test_simple_modify()
336 ASSERT_EQ(vma->vm_pgoff, 0); in test_simple_modify()
343 ASSERT_EQ(vma->vm_start, 0x1000); in test_simple_modify()
344 ASSERT_EQ(vma->vm_end, 0x2000); in test_simple_modify()
345 ASSERT_EQ(vma->vm_pgoff, 1); in test_simple_modify()
352 ASSERT_EQ(vma->vm_start, 0x2000); in test_simple_modify()
353 ASSERT_EQ(vma->vm_end, 0x3000); in test_simple_modify()
354 ASSERT_EQ(vma->vm_pgoff, 2); in test_simple_modify()
357 mtree_destroy(&mm.mm_mt); in test_simple_modify()
365 struct mm_struct mm = {}; in test_simple_expand() local
366 struct vm_area_struct *vma = alloc_vma(&mm, 0, 0x1000, 0, flags); in test_simple_expand()
367 VMA_ITERATOR(vmi, &mm, 0); in test_simple_expand()
371 .start = 0, in test_simple_expand()
372 .end = 0x3000, in test_simple_expand()
373 .pgoff = 0, in test_simple_expand()
376 ASSERT_FALSE(attach_vma(&mm, vma)); in test_simple_expand()
380 ASSERT_EQ(vma->vm_start, 0); in test_simple_expand()
381 ASSERT_EQ(vma->vm_end, 0x3000); in test_simple_expand()
382 ASSERT_EQ(vma->vm_pgoff, 0); in test_simple_expand()
385 mtree_destroy(&mm.mm_mt); in test_simple_expand()
393 struct mm_struct mm = {}; in test_simple_shrink() local
394 struct vm_area_struct *vma = alloc_vma(&mm, 0, 0x3000, 0, flags); in test_simple_shrink()
395 VMA_ITERATOR(vmi, &mm, 0); in test_simple_shrink()
397 ASSERT_FALSE(attach_vma(&mm, vma)); in test_simple_shrink()
399 ASSERT_FALSE(vma_shrink(&vmi, vma, 0, 0x1000, 0)); in test_simple_shrink()
401 ASSERT_EQ(vma->vm_start, 0); in test_simple_shrink()
402 ASSERT_EQ(vma->vm_end, 0x1000); in test_simple_shrink()
403 ASSERT_EQ(vma->vm_pgoff, 0); in test_simple_shrink()
406 mtree_destroy(&mm.mm_mt); in test_simple_shrink()
414 struct mm_struct mm = {}; in test_merge_new() local
415 VMA_ITERATOR(vmi, &mm, 0); in test_merge_new()
417 .mm = &mm, in test_merge_new()
443 vma_a = alloc_and_link_vma(&mm, 0, 0x2000, 0, flags); in test_merge_new()
446 INIT_LIST_HEAD(&vma_a->anon_vma_chain); in test_merge_new()
447 list_add(&dummy_anon_vma_chain_a.same_vma, &vma_a->anon_vma_chain); in test_merge_new()
449 vma_b = alloc_and_link_vma(&mm, 0x3000, 0x4000, 3, flags); in test_merge_new()
451 INIT_LIST_HEAD(&vma_b->anon_vma_chain); in test_merge_new()
452 list_add(&dummy_anon_vma_chain_b.same_vma, &vma_b->anon_vma_chain); in test_merge_new()
454 vma_c = alloc_and_link_vma(&mm, 0xb000, 0xc000, 0xb, flags); in test_merge_new()
456 INIT_LIST_HEAD(&vma_c->anon_vma_chain); in test_merge_new()
457 list_add(&dummy_anon_vma_chain_c.same_vma, &vma_c->anon_vma_chain); in test_merge_new()
465 vma_d = try_merge_new_vma(&mm, &vmg, 0x7000, 0x9000, 7, flags, &merged); in test_merge_new()
467 INIT_LIST_HEAD(&vma_d->anon_vma_chain); in test_merge_new()
468 list_add(&dummy_anon_vma_chain_d.same_vma, &vma_d->anon_vma_chain); in test_merge_new()
470 ASSERT_EQ(mm.map_count, 4); in test_merge_new()
478 vma_a->vm_ops = &vm_ops; /* This should have no impact. */ in test_merge_new()
479 vma_b->anon_vma = &dummy_anon_vma; in test_merge_new()
480 vma = try_merge_new_vma(&mm, &vmg, 0x2000, 0x3000, 2, flags, &merged); in test_merge_new()
484 ASSERT_EQ(vma->vm_start, 0); in test_merge_new()
485 ASSERT_EQ(vma->vm_end, 0x4000); in test_merge_new()
486 ASSERT_EQ(vma->vm_pgoff, 0); in test_merge_new()
487 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma); in test_merge_new()
489 ASSERT_EQ(mm.map_count, 3); in test_merge_new()
497 vma = try_merge_new_vma(&mm, &vmg, 0x4000, 0x5000, 4, flags, &merged); in test_merge_new()
501 ASSERT_EQ(vma->vm_start, 0); in test_merge_new()
502 ASSERT_EQ(vma->vm_end, 0x5000); in test_merge_new()
503 ASSERT_EQ(vma->vm_pgoff, 0); in test_merge_new()
504 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma); in test_merge_new()
506 ASSERT_EQ(mm.map_count, 3); in test_merge_new()
514 vma_d->anon_vma = &dummy_anon_vma; in test_merge_new()
515 vma_d->vm_ops = &vm_ops; /* This should have no impact. */ in test_merge_new()
516 vma = try_merge_new_vma(&mm, &vmg, 0x6000, 0x7000, 6, flags, &merged); in test_merge_new()
520 ASSERT_EQ(vma->vm_start, 0x6000); in test_merge_new()
521 ASSERT_EQ(vma->vm_end, 0x9000); in test_merge_new()
522 ASSERT_EQ(vma->vm_pgoff, 6); in test_merge_new()
523 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma); in test_merge_new()
525 ASSERT_EQ(mm.map_count, 3); in test_merge_new()
533 vma_d->vm_ops = NULL; /* This would otherwise degrade the merge. */ in test_merge_new()
534 vma = try_merge_new_vma(&mm, &vmg, 0x5000, 0x6000, 5, flags, &merged); in test_merge_new()
538 ASSERT_EQ(vma->vm_start, 0); in test_merge_new()
539 ASSERT_EQ(vma->vm_end, 0x9000); in test_merge_new()
540 ASSERT_EQ(vma->vm_pgoff, 0); in test_merge_new()
541 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma); in test_merge_new()
543 ASSERT_EQ(mm.map_count, 2); in test_merge_new()
551 vma_c->anon_vma = &dummy_anon_vma; in test_merge_new()
552 vma = try_merge_new_vma(&mm, &vmg, 0xa000, 0xb000, 0xa, flags, &merged); in test_merge_new()
556 ASSERT_EQ(vma->vm_start, 0xa000); in test_merge_new()
557 ASSERT_EQ(vma->vm_end, 0xc000); in test_merge_new()
558 ASSERT_EQ(vma->vm_pgoff, 0xa); in test_merge_new()
559 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma); in test_merge_new()
561 ASSERT_EQ(mm.map_count, 2); in test_merge_new()
569 vma = try_merge_new_vma(&mm, &vmg, 0x9000, 0xa000, 0x9, flags, &merged); in test_merge_new()
573 ASSERT_EQ(vma->vm_start, 0); in test_merge_new()
574 ASSERT_EQ(vma->vm_end, 0xc000); in test_merge_new()
575 ASSERT_EQ(vma->vm_pgoff, 0); in test_merge_new()
576 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma); in test_merge_new()
578 ASSERT_EQ(mm.map_count, 1); in test_merge_new()
587 count = 0; in test_merge_new()
588 vma_iter_set(&vmi, 0); in test_merge_new()
591 ASSERT_EQ(vma->vm_start, 0); in test_merge_new()
592 ASSERT_EQ(vma->vm_end, 0xc000); in test_merge_new()
593 ASSERT_EQ(vma->vm_pgoff, 0); in test_merge_new()
594 ASSERT_EQ(vma->anon_vma, &dummy_anon_vma); in test_merge_new()
603 mtree_destroy(&mm.mm_mt); in test_merge_new()
610 struct mm_struct mm = {}; in test_vma_merge_special_flags() local
611 VMA_ITERATOR(vmi, &mm, 0); in test_vma_merge_special_flags()
613 .mm = &mm, in test_vma_merge_special_flags()
617 vm_flags_t all_special_flags = 0; in test_vma_merge_special_flags()
622 for (i = 0; i < ARRAY_SIZE(special_flags); i++) { in test_vma_merge_special_flags()
631 vma_left = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_vma_merge_special_flags()
642 vmg_set_range(&vmg, 0x3000, 0x4000, 3, flags); in test_vma_merge_special_flags()
643 for (i = 0; i < ARRAY_SIZE(special_flags); i++) { in test_vma_merge_special_flags()
646 vma_left->__vm_flags = flags | special_flag; in test_vma_merge_special_flags()
661 vma = alloc_and_link_vma(&mm, 0x3000, 0x4000, 3, flags); in test_vma_merge_special_flags()
665 for (i = 0; i < ARRAY_SIZE(special_flags); i++) { in test_vma_merge_special_flags()
668 vma_left->__vm_flags = flags | special_flag; in test_vma_merge_special_flags()
675 cleanup_mm(&mm, &vmi); in test_vma_merge_special_flags()
682 struct mm_struct mm = {}; in test_vma_merge_with_close() local
683 VMA_ITERATOR(vmi, &mm, 0); in test_vma_merge_with_close()
685 .mm = &mm, in test_vma_merge_with_close()
695 * vm_ops->close() hook. in test_vma_merge_with_close()
709 * has a vm_ops->close() callback that will need to be called when in test_vma_merge_with_close()
720 * vm_ops->close: - - !NULL in test_vma_merge_with_close()
729 * vm_ops->close: - !NULL in test_vma_merge_with_close()
738 * - !NULL NULL in test_vma_merge_with_close()
741 * Cannot occur, because vma->vm_ops being the same implies the same in test_vma_merge_with_close()
742 * vma->vm_file, and therefore this would mean that next->vm_ops->close in test_vma_merge_with_close()
748 * is one where both the previous and next VMAs are merged - in this in test_vma_merge_with_close()
756 * -> in test_vma_merge_with_close()
761 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_vma_merge_with_close()
762 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, flags); in test_vma_merge_with_close()
763 vma_next->vm_ops = &vm_ops; in test_vma_merge_with_close()
765 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); in test_vma_merge_with_close()
768 ASSERT_EQ(vma_prev->vm_start, 0); in test_vma_merge_with_close()
769 ASSERT_EQ(vma_prev->vm_end, 0x5000); in test_vma_merge_with_close()
770 ASSERT_EQ(vma_prev->vm_pgoff, 0); in test_vma_merge_with_close()
772 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_vma_merge_with_close()
786 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_vma_merge_with_close()
787 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_vma_merge_with_close()
788 vma->vm_ops = &vm_ops; in test_vma_merge_with_close()
790 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); in test_vma_merge_with_close()
801 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_vma_merge_with_close()
814 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_vma_merge_with_close()
815 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, flags); in test_vma_merge_with_close()
816 vma->vm_ops = &vm_ops; in test_vma_merge_with_close()
818 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); in test_vma_merge_with_close()
828 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_vma_merge_with_close()
842 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_vma_merge_with_close()
843 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_vma_merge_with_close()
844 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, flags); in test_vma_merge_with_close()
845 vma->vm_ops = &vm_ops; in test_vma_merge_with_close()
847 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); in test_vma_merge_with_close()
854 ASSERT_EQ(cleanup_mm(&mm, &vmi), 3); in test_vma_merge_with_close()
863 * -> in test_vma_merge_with_close()
868 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_vma_merge_with_close()
869 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_vma_merge_with_close()
870 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, flags); in test_vma_merge_with_close()
871 vma_next->vm_ops = &vm_ops; in test_vma_merge_with_close()
873 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); in test_vma_merge_with_close()
879 ASSERT_EQ(vma_prev->vm_start, 0); in test_vma_merge_with_close()
880 ASSERT_EQ(vma_prev->vm_end, 0x5000); in test_vma_merge_with_close()
881 ASSERT_EQ(vma_prev->vm_pgoff, 0); in test_vma_merge_with_close()
883 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_vma_merge_with_close()
891 struct mm_struct mm = {}; in test_vma_merge_new_with_close() local
892 VMA_ITERATOR(vmi, &mm, 0); in test_vma_merge_new_with_close()
894 .mm = &mm, in test_vma_merge_new_with_close()
897 struct vm_area_struct *vma_prev = alloc_and_link_vma(&mm, 0, 0x2000, 0, flags); in test_vma_merge_new_with_close()
898 struct vm_area_struct *vma_next = alloc_and_link_vma(&mm, 0x5000, 0x7000, 5, flags); in test_vma_merge_new_with_close()
906 * surrounding VMAs have vm_ops->close() hooks (but are otherwise in test_vma_merge_new_with_close()
910 * A v-------v B in test_vma_merge_new_with_close()
911 * |-----| |-----| in test_vma_merge_new_with_close()
920 * |------------||-----| in test_vma_merge_new_with_close()
924 /* Have prev and next have a vm_ops->close() hook. */ in test_vma_merge_new_with_close()
925 vma_prev->vm_ops = &vm_ops; in test_vma_merge_new_with_close()
926 vma_next->vm_ops = &vm_ops; in test_vma_merge_new_with_close()
928 vmg_set_range(&vmg, 0x2000, 0x5000, 2, flags); in test_vma_merge_new_with_close()
932 ASSERT_EQ(vma->vm_start, 0); in test_vma_merge_new_with_close()
933 ASSERT_EQ(vma->vm_end, 0x5000); in test_vma_merge_new_with_close()
934 ASSERT_EQ(vma->vm_pgoff, 0); in test_vma_merge_new_with_close()
935 ASSERT_EQ(vma->vm_ops, &vm_ops); in test_vma_merge_new_with_close()
937 ASSERT_EQ(mm.map_count, 2); in test_vma_merge_new_with_close()
939 cleanup_mm(&mm, &vmi); in test_vma_merge_new_with_close()
946 struct mm_struct mm = {}; in test_merge_existing() local
947 VMA_ITERATOR(vmi, &mm, 0); in test_merge_existing()
950 .mm = &mm, in test_merge_existing()
958 * Merge right case - partial span. in test_merge_existing()
960 * <-> in test_merge_existing()
963 * -> in test_merge_existing()
967 vma = alloc_and_link_vma(&mm, 0x2000, 0x6000, 2, flags); in test_merge_existing()
968 vma->vm_ops = &vm_ops; /* This should have no impact. */ in test_merge_existing()
969 vma_next = alloc_and_link_vma(&mm, 0x6000, 0x9000, 6, flags); in test_merge_existing()
970 vma_next->vm_ops = &vm_ops; /* This should have no impact. */ in test_merge_existing()
971 vmg_set_range(&vmg, 0x3000, 0x6000, 3, flags); in test_merge_existing()
974 vma->anon_vma = &dummy_anon_vma; in test_merge_existing()
977 ASSERT_EQ(vma_next->vm_start, 0x3000); in test_merge_existing()
978 ASSERT_EQ(vma_next->vm_end, 0x9000); in test_merge_existing()
979 ASSERT_EQ(vma_next->vm_pgoff, 3); in test_merge_existing()
980 ASSERT_EQ(vma_next->anon_vma, &dummy_anon_vma); in test_merge_existing()
981 ASSERT_EQ(vma->vm_start, 0x2000); in test_merge_existing()
982 ASSERT_EQ(vma->vm_end, 0x3000); in test_merge_existing()
983 ASSERT_EQ(vma->vm_pgoff, 2); in test_merge_existing()
986 ASSERT_EQ(mm.map_count, 2); in test_merge_existing()
989 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_merge_existing()
992 * Merge right case - full span. in test_merge_existing()
994 * <--> in test_merge_existing()
997 * -> in test_merge_existing()
1001 vma = alloc_and_link_vma(&mm, 0x2000, 0x6000, 2, flags); in test_merge_existing()
1002 vma_next = alloc_and_link_vma(&mm, 0x6000, 0x9000, 6, flags); in test_merge_existing()
1003 vma_next->vm_ops = &vm_ops; /* This should have no impact. */ in test_merge_existing()
1004 vmg_set_range(&vmg, 0x2000, 0x6000, 2, flags); in test_merge_existing()
1006 vma->anon_vma = &dummy_anon_vma; in test_merge_existing()
1009 ASSERT_EQ(vma_next->vm_start, 0x2000); in test_merge_existing()
1010 ASSERT_EQ(vma_next->vm_end, 0x9000); in test_merge_existing()
1011 ASSERT_EQ(vma_next->vm_pgoff, 2); in test_merge_existing()
1012 ASSERT_EQ(vma_next->anon_vma, &dummy_anon_vma); in test_merge_existing()
1014 ASSERT_EQ(mm.map_count, 1); in test_merge_existing()
1017 ASSERT_EQ(cleanup_mm(&mm, &vmi), 1); in test_merge_existing()
1020 * Merge left case - partial span. in test_merge_existing()
1022 * <-> in test_merge_existing()
1025 * -> in test_merge_existing()
1029 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_merge_existing()
1030 vma_prev->vm_ops = &vm_ops; /* This should have no impact. */ in test_merge_existing()
1031 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags); in test_merge_existing()
1032 vma->vm_ops = &vm_ops; /* This should have no impact. */ in test_merge_existing()
1033 vmg_set_range(&vmg, 0x3000, 0x6000, 3, flags); in test_merge_existing()
1036 vma->anon_vma = &dummy_anon_vma; in test_merge_existing()
1040 ASSERT_EQ(vma_prev->vm_start, 0); in test_merge_existing()
1041 ASSERT_EQ(vma_prev->vm_end, 0x6000); in test_merge_existing()
1042 ASSERT_EQ(vma_prev->vm_pgoff, 0); in test_merge_existing()
1043 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); in test_merge_existing()
1044 ASSERT_EQ(vma->vm_start, 0x6000); in test_merge_existing()
1045 ASSERT_EQ(vma->vm_end, 0x7000); in test_merge_existing()
1046 ASSERT_EQ(vma->vm_pgoff, 6); in test_merge_existing()
1049 ASSERT_EQ(mm.map_count, 2); in test_merge_existing()
1052 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_merge_existing()
1055 * Merge left case - full span. in test_merge_existing()
1057 * <--> in test_merge_existing()
1060 * -> in test_merge_existing()
1064 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_merge_existing()
1065 vma_prev->vm_ops = &vm_ops; /* This should have no impact. */ in test_merge_existing()
1066 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags); in test_merge_existing()
1067 vmg_set_range(&vmg, 0x3000, 0x7000, 3, flags); in test_merge_existing()
1070 vma->anon_vma = &dummy_anon_vma; in test_merge_existing()
1073 ASSERT_EQ(vma_prev->vm_start, 0); in test_merge_existing()
1074 ASSERT_EQ(vma_prev->vm_end, 0x7000); in test_merge_existing()
1075 ASSERT_EQ(vma_prev->vm_pgoff, 0); in test_merge_existing()
1076 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); in test_merge_existing()
1078 ASSERT_EQ(mm.map_count, 1); in test_merge_existing()
1081 ASSERT_EQ(cleanup_mm(&mm, &vmi), 1); in test_merge_existing()
1086 * <--> in test_merge_existing()
1089 * -> in test_merge_existing()
1093 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_merge_existing()
1094 vma_prev->vm_ops = &vm_ops; /* This should have no impact. */ in test_merge_existing()
1095 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags); in test_merge_existing()
1096 vma_next = alloc_and_link_vma(&mm, 0x7000, 0x9000, 7, flags); in test_merge_existing()
1097 vmg_set_range(&vmg, 0x3000, 0x7000, 3, flags); in test_merge_existing()
1100 vma->anon_vma = &dummy_anon_vma; in test_merge_existing()
1103 ASSERT_EQ(vma_prev->vm_start, 0); in test_merge_existing()
1104 ASSERT_EQ(vma_prev->vm_end, 0x9000); in test_merge_existing()
1105 ASSERT_EQ(vma_prev->vm_pgoff, 0); in test_merge_existing()
1106 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); in test_merge_existing()
1108 ASSERT_EQ(mm.map_count, 1); in test_merge_existing()
1111 ASSERT_EQ(cleanup_mm(&mm, &vmi), 1); in test_merge_existing()
1114 * Non-merge ranges. the modified VMA merge operation assumes that the in test_merge_existing()
1118 * - in test_merge_existing()
1119 * - in test_merge_existing()
1120 * - in test_merge_existing()
1121 * <-> in test_merge_existing()
1128 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_merge_existing()
1129 vma = alloc_and_link_vma(&mm, 0x3000, 0x8000, 3, flags); in test_merge_existing()
1130 vma_next = alloc_and_link_vma(&mm, 0x8000, 0xa000, 8, flags); in test_merge_existing()
1132 vmg_set_range(&vmg, 0x4000, 0x5000, 4, flags); in test_merge_existing()
1138 vmg_set_range(&vmg, 0x5000, 0x6000, 5, flags); in test_merge_existing()
1144 vmg_set_range(&vmg, 0x6000, 0x7000, 6, flags); in test_merge_existing()
1150 vmg_set_range(&vmg, 0x4000, 0x7000, 4, flags); in test_merge_existing()
1156 vmg_set_range(&vmg, 0x4000, 0x6000, 4, flags); in test_merge_existing()
1162 vmg_set_range(&vmg, 0x5000, 0x6000, 5, flags); in test_merge_existing()
1168 ASSERT_EQ(cleanup_mm(&mm, &vmi), 3); in test_merge_existing()
1176 struct mm_struct mm = {}; in test_anon_vma_non_mergeable() local
1177 VMA_ITERATOR(vmi, &mm, 0); in test_anon_vma_non_mergeable()
1180 .mm = &mm, in test_anon_vma_non_mergeable()
1195 * <--> in test_anon_vma_non_mergeable()
1198 * -> in test_anon_vma_non_mergeable()
1202 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_anon_vma_non_mergeable()
1203 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags); in test_anon_vma_non_mergeable()
1204 vma_next = alloc_and_link_vma(&mm, 0x7000, 0x9000, 7, flags); in test_anon_vma_non_mergeable()
1208 * merge with the NULL vmg->anon_vma. in test_anon_vma_non_mergeable()
1213 INIT_LIST_HEAD(&vma_prev->anon_vma_chain); in test_anon_vma_non_mergeable()
1214 list_add(&dummy_anon_vma_chain1.same_vma, &vma_prev->anon_vma_chain); in test_anon_vma_non_mergeable()
1215 ASSERT_TRUE(list_is_singular(&vma_prev->anon_vma_chain)); in test_anon_vma_non_mergeable()
1216 vma_prev->anon_vma = &dummy_anon_vma; in test_anon_vma_non_mergeable()
1217 ASSERT_TRUE(is_mergeable_anon_vma(NULL, vma_prev->anon_vma, vma_prev)); in test_anon_vma_non_mergeable()
1219 INIT_LIST_HEAD(&vma_next->anon_vma_chain); in test_anon_vma_non_mergeable()
1220 list_add(&dummy_anon_vma_chain2.same_vma, &vma_next->anon_vma_chain); in test_anon_vma_non_mergeable()
1221 ASSERT_TRUE(list_is_singular(&vma_next->anon_vma_chain)); in test_anon_vma_non_mergeable()
1222 vma_next->anon_vma = (struct anon_vma *)2; in test_anon_vma_non_mergeable()
1223 ASSERT_TRUE(is_mergeable_anon_vma(NULL, vma_next->anon_vma, vma_next)); in test_anon_vma_non_mergeable()
1225 ASSERT_FALSE(is_mergeable_anon_vma(vma_prev->anon_vma, vma_next->anon_vma, NULL)); in test_anon_vma_non_mergeable()
1227 vmg_set_range(&vmg, 0x3000, 0x7000, 3, flags); in test_anon_vma_non_mergeable()
1233 ASSERT_EQ(vma_prev->vm_start, 0); in test_anon_vma_non_mergeable()
1234 ASSERT_EQ(vma_prev->vm_end, 0x7000); in test_anon_vma_non_mergeable()
1235 ASSERT_EQ(vma_prev->vm_pgoff, 0); in test_anon_vma_non_mergeable()
1240 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_anon_vma_non_mergeable()
1246 * <--> in test_anon_vma_non_mergeable()
1249 * -> in test_anon_vma_non_mergeable()
1253 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_anon_vma_non_mergeable()
1254 vma_next = alloc_and_link_vma(&mm, 0x7000, 0x9000, 7, flags); in test_anon_vma_non_mergeable()
1256 INIT_LIST_HEAD(&vma_prev->anon_vma_chain); in test_anon_vma_non_mergeable()
1257 list_add(&dummy_anon_vma_chain1.same_vma, &vma_prev->anon_vma_chain); in test_anon_vma_non_mergeable()
1258 vma_prev->anon_vma = (struct anon_vma *)1; in test_anon_vma_non_mergeable()
1260 INIT_LIST_HEAD(&vma_next->anon_vma_chain); in test_anon_vma_non_mergeable()
1261 list_add(&dummy_anon_vma_chain2.same_vma, &vma_next->anon_vma_chain); in test_anon_vma_non_mergeable()
1262 vma_next->anon_vma = (struct anon_vma *)2; in test_anon_vma_non_mergeable()
1264 vmg_set_range(&vmg, 0x3000, 0x7000, 3, flags); in test_anon_vma_non_mergeable()
1269 ASSERT_EQ(vma_prev->vm_start, 0); in test_anon_vma_non_mergeable()
1270 ASSERT_EQ(vma_prev->vm_end, 0x7000); in test_anon_vma_non_mergeable()
1271 ASSERT_EQ(vma_prev->vm_pgoff, 0); in test_anon_vma_non_mergeable()
1276 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_anon_vma_non_mergeable()
1284 struct mm_struct mm = {}; in test_dup_anon_vma() local
1285 VMA_ITERATOR(vmi, &mm, 0); in test_dup_anon_vma()
1287 .mm = &mm, in test_dup_anon_vma()
1304 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_dup_anon_vma()
1305 vma_next = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_dup_anon_vma()
1306 vma_next->anon_vma = &dummy_anon_vma; in test_dup_anon_vma()
1308 vmg_set_range(&vmg, 0, 0x5000, 0, flags); in test_dup_anon_vma()
1312 ASSERT_EQ(expand_existing(&vmg), 0); in test_dup_anon_vma()
1315 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); in test_dup_anon_vma()
1316 ASSERT_TRUE(vma_prev->anon_vma->was_cloned); in test_dup_anon_vma()
1319 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1324 * |<----->| in test_dup_anon_vma()
1325 * |-------*********-------| in test_dup_anon_vma()
1330 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_dup_anon_vma()
1331 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_dup_anon_vma()
1332 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x8000, 5, flags); in test_dup_anon_vma()
1335 INIT_LIST_HEAD(&vma_next->anon_vma_chain); in test_dup_anon_vma()
1336 list_add(&dummy_anon_vma_chain.same_vma, &vma_next->anon_vma_chain); in test_dup_anon_vma()
1338 vma_next->anon_vma = &dummy_anon_vma; in test_dup_anon_vma()
1339 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); in test_dup_anon_vma()
1346 ASSERT_EQ(vma_prev->vm_start, 0); in test_dup_anon_vma()
1347 ASSERT_EQ(vma_prev->vm_end, 0x8000); in test_dup_anon_vma()
1349 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); in test_dup_anon_vma()
1350 ASSERT_TRUE(vma_prev->anon_vma->was_cloned); in test_dup_anon_vma()
1352 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1357 * |<----->| in test_dup_anon_vma()
1358 * |-------*********-------| in test_dup_anon_vma()
1363 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_dup_anon_vma()
1364 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_dup_anon_vma()
1365 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x8000, 5, flags); in test_dup_anon_vma()
1367 vma->anon_vma = &dummy_anon_vma; in test_dup_anon_vma()
1368 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); in test_dup_anon_vma()
1375 ASSERT_EQ(vma_prev->vm_start, 0); in test_dup_anon_vma()
1376 ASSERT_EQ(vma_prev->vm_end, 0x8000); in test_dup_anon_vma()
1378 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); in test_dup_anon_vma()
1379 ASSERT_TRUE(vma_prev->anon_vma->was_cloned); in test_dup_anon_vma()
1381 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1386 * |<----->| in test_dup_anon_vma()
1387 * |-------************* in test_dup_anon_vma()
1392 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_dup_anon_vma()
1393 vma = alloc_and_link_vma(&mm, 0x3000, 0x8000, 3, flags); in test_dup_anon_vma()
1395 vma->anon_vma = &dummy_anon_vma; in test_dup_anon_vma()
1396 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); in test_dup_anon_vma()
1403 ASSERT_EQ(vma_prev->vm_start, 0); in test_dup_anon_vma()
1404 ASSERT_EQ(vma_prev->vm_end, 0x5000); in test_dup_anon_vma()
1406 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); in test_dup_anon_vma()
1407 ASSERT_TRUE(vma_prev->anon_vma->was_cloned); in test_dup_anon_vma()
1409 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1414 * |<----->| in test_dup_anon_vma()
1415 * *************-------| in test_dup_anon_vma()
1420 vma = alloc_and_link_vma(&mm, 0, 0x5000, 0, flags); in test_dup_anon_vma()
1421 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x8000, 5, flags); in test_dup_anon_vma()
1423 vma->anon_vma = &dummy_anon_vma; in test_dup_anon_vma()
1424 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); in test_dup_anon_vma()
1431 ASSERT_EQ(vma_next->vm_start, 0x3000); in test_dup_anon_vma()
1432 ASSERT_EQ(vma_next->vm_end, 0x8000); in test_dup_anon_vma()
1434 ASSERT_EQ(vma_next->anon_vma, &dummy_anon_vma); in test_dup_anon_vma()
1435 ASSERT_TRUE(vma_next->anon_vma->was_cloned); in test_dup_anon_vma()
1437 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1444 struct mm_struct mm = {}; in test_vmi_prealloc_fail() local
1445 VMA_ITERATOR(vmi, &mm, 0); in test_vmi_prealloc_fail()
1447 .mm = &mm, in test_vmi_prealloc_fail()
1458 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_vmi_prealloc_fail()
1459 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_vmi_prealloc_fail()
1460 vma->anon_vma = &dummy_anon_vma; in test_vmi_prealloc_fail()
1462 vmg_set_range(&vmg, 0x3000, 0x5000, 3, flags); in test_vmi_prealloc_fail()
1472 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); in test_vmi_prealloc_fail()
1477 cleanup_mm(&mm, &vmi); /* Resets fail_prealloc too. */ in test_vmi_prealloc_fail()
1485 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_vmi_prealloc_fail()
1486 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_vmi_prealloc_fail()
1487 vma->anon_vma = &dummy_anon_vma; in test_vmi_prealloc_fail()
1489 vmg_set_range(&vmg, 0, 0x5000, 3, flags); in test_vmi_prealloc_fail()
1494 ASSERT_EQ(expand_existing(&vmg), -ENOMEM); in test_vmi_prealloc_fail()
1497 ASSERT_EQ(vma_prev->anon_vma, &dummy_anon_vma); in test_vmi_prealloc_fail()
1501 cleanup_mm(&mm, &vmi); in test_vmi_prealloc_fail()
1508 struct mm_struct mm = {}; in test_merge_extend() local
1509 VMA_ITERATOR(vmi, &mm, 0x1000); in test_merge_extend()
1512 vma = alloc_and_link_vma(&mm, 0, 0x1000, 0, flags); in test_merge_extend()
1513 alloc_and_link_vma(&mm, 0x3000, 0x4000, 3, flags); in test_merge_extend()
1519 * <-> in test_merge_extend()
1524 ASSERT_EQ(vma_merge_extend(&vmi, vma, 0x2000), vma); in test_merge_extend()
1525 ASSERT_EQ(vma->vm_start, 0); in test_merge_extend()
1526 ASSERT_EQ(vma->vm_end, 0x4000); in test_merge_extend()
1527 ASSERT_EQ(vma->vm_pgoff, 0); in test_merge_extend()
1529 ASSERT_EQ(mm.map_count, 1); in test_merge_extend()
1531 cleanup_mm(&mm, &vmi); in test_merge_extend()
1538 struct mm_struct mm = {}; in test_copy_vma() local
1540 VMA_ITERATOR(vmi, &mm, 0); in test_copy_vma()
1545 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_copy_vma()
1546 vma_new = copy_vma(&vma, 0, 0x2000, 0, &need_locks); in test_copy_vma()
1548 ASSERT_EQ(vma_new->vm_start, 0); in test_copy_vma()
1549 ASSERT_EQ(vma_new->vm_end, 0x2000); in test_copy_vma()
1550 ASSERT_EQ(vma_new->vm_pgoff, 0); in test_copy_vma()
1553 cleanup_mm(&mm, &vmi); in test_copy_vma()
1557 vma = alloc_and_link_vma(&mm, 0, 0x2000, 0, flags); in test_copy_vma()
1558 vma_next = alloc_and_link_vma(&mm, 0x6000, 0x8000, 6, flags); in test_copy_vma()
1559 vma_new = copy_vma(&vma, 0x4000, 0x2000, 4, &need_locks); in test_copy_vma()
1564 cleanup_mm(&mm, &vmi); in test_copy_vma()
1571 struct mm_struct mm = {}; in test_expand_only_mode() local
1572 VMA_ITERATOR(vmi, &mm, 0); in test_expand_only_mode()
1574 VMG_STATE(vmg, &mm, &vmi, 0x5000, 0x9000, flags, 5); in test_expand_only_mode()
1582 alloc_and_link_vma(&mm, 0, 0x2000, 0, flags); in test_expand_only_mode()
1586 * 0x9000. in test_expand_only_mode()
1588 vma_iter_set(&vmi, 0x3000); in test_expand_only_mode()
1589 vma_prev = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_expand_only_mode()
1597 ASSERT_EQ(vma->vm_start, 0x3000); in test_expand_only_mode()
1598 ASSERT_EQ(vma->vm_end, 0x9000); in test_expand_only_mode()
1599 ASSERT_EQ(vma->vm_pgoff, 3); in test_expand_only_mode()
1601 ASSERT_EQ(vma_iter_addr(&vmi), 0x3000); in test_expand_only_mode()
1604 cleanup_mm(&mm, &vmi); in test_expand_only_mode()
1610 struct mm_struct mm = {}; in test_mmap_region_basic() local
1613 VMA_ITERATOR(vmi, &mm, 0); in test_mmap_region_basic()
1615 current->mm = &mm; in test_mmap_region_basic()
1617 /* Map at 0x300000, length 0x3000. */ in test_mmap_region_basic()
1618 addr = __mmap_region(NULL, 0x300000, 0x3000, in test_mmap_region_basic()
1620 0x300, NULL); in test_mmap_region_basic()
1621 ASSERT_EQ(addr, 0x300000); in test_mmap_region_basic()
1623 /* Map at 0x250000, length 0x3000. */ in test_mmap_region_basic()
1624 addr = __mmap_region(NULL, 0x250000, 0x3000, in test_mmap_region_basic()
1626 0x250, NULL); in test_mmap_region_basic()
1627 ASSERT_EQ(addr, 0x250000); in test_mmap_region_basic()
1629 /* Map at 0x303000, merging to 0x300000 of length 0x6000. */ in test_mmap_region_basic()
1630 addr = __mmap_region(NULL, 0x303000, 0x3000, in test_mmap_region_basic()
1632 0x303, NULL); in test_mmap_region_basic()
1633 ASSERT_EQ(addr, 0x303000); in test_mmap_region_basic()
1635 /* Map at 0x24d000, merging to 0x250000 of length 0x6000. */ in test_mmap_region_basic()
1636 addr = __mmap_region(NULL, 0x24d000, 0x3000, in test_mmap_region_basic()
1638 0x24d, NULL); in test_mmap_region_basic()
1639 ASSERT_EQ(addr, 0x24d000); in test_mmap_region_basic()
1641 ASSERT_EQ(mm.map_count, 2); in test_mmap_region_basic()
1644 if (vma->vm_start == 0x300000) { in test_mmap_region_basic()
1645 ASSERT_EQ(vma->vm_end, 0x306000); in test_mmap_region_basic()
1646 ASSERT_EQ(vma->vm_pgoff, 0x300); in test_mmap_region_basic()
1647 } else if (vma->vm_start == 0x24d000) { in test_mmap_region_basic()
1648 ASSERT_EQ(vma->vm_end, 0x253000); in test_mmap_region_basic()
1649 ASSERT_EQ(vma->vm_pgoff, 0x24d); in test_mmap_region_basic()
1655 cleanup_mm(&mm, &vmi); in test_mmap_region_basic()
1661 int num_tests = 0, num_fail = 0; in main()
1672 } while (0) in main()
1697 num_tests, num_tests - num_fail, num_fail); in main()
1699 return num_fail == 0 ? EXIT_SUCCESS : EXIT_FAILURE; in main()