Home
last modified time | relevance | path

Searched refs:vmi (Results 1 – 25 of 49) sorted by relevance

12

/linux/mm/
H A Dvma.h35 struct vma_iterator *vmi; member
71 struct vma_iterator *vmi; member
170 .vmi = vmi_, \
181 .vmi = vmi_, \
204 __must_check int vma_shrink(struct vma_iterator *vmi,
208 static inline int vma_iter_store_gfp(struct vma_iterator *vmi, in vma_iter_store_gfp() argument
212 if (vmi->mas.status != ma_start && in vma_iter_store_gfp()
213 ((vmi->mas.index > vma->vm_start) || (vmi->mas.last < vma->vm_start))) in vma_iter_store_gfp()
214 vma_iter_invalidate(vmi); in vma_iter_store_gfp()
216 __mas_set_range(&vmi->mas, vma->vm_start, vma->vm_end - 1); in vma_iter_store_gfp()
[all …]
H A Dvma.c12 struct vma_iterator *vmi; member
43 .vmi = vmi_, \
56 .vmi = (map_)->vmi, \
333 static void vma_complete(struct vma_prepare *vp, struct vma_iterator *vmi, in vma_complete() argument
354 vma_iter_store_new(vmi, vp->insert); in vma_complete()
499 __split_vma(struct vma_iterator *vmi, struct vm_area_struct *vma, in __split_vma() argument
527 vma_iter_config(vmi, new->vm_start, new->vm_end); in __split_vma()
528 if (vma_iter_prealloc(vmi, new)) in __split_vma()
568 vma_complete(&vp, vmi, vma->vm_mm); in __split_vma()
573 vma_next(vmi); in __split_vma()
[all …]
H A Dmseal.c43 VMA_ITERATOR(vmi, current->mm, start); in range_contains_unmapped()
45 for_each_vma_range(vmi, vma, end) { in range_contains_unmapped()
60 VMA_ITERATOR(vmi, mm, start); in mseal_apply()
63 vma = vma_iter_load(&vmi); in mseal_apply()
64 prev = vma_prev(&vmi); in mseal_apply()
68 for_each_vma_range(vmi, vma, end) { in mseal_apply()
72 vma = vma_modify_flags(&vmi, prev, vma, in mseal_apply()
H A Dvma_exec.c38 VMA_ITERATOR(vmi, mm, new_start); in relocate_vma_down()
39 VMG_STATE(vmg, mm, &vmi, new_start, old_end, 0, vma->vm_pgoff); in relocate_vma_down()
50 if (vma != vma_next(&vmi)) in relocate_vma_down()
53 vma_iter_prev_range(&vmi); in relocate_vma_down()
70 next = vma_next(&vmi); in relocate_vma_down()
89 vma_prev(&vmi); in relocate_vma_down()
91 return vma_shrink(&vmi, vma, new_start, new_end, vma->vm_pgoff); in relocate_vma_down()
H A Dmmap_lock.c272 struct vma_iterator *vmi, in lock_next_vma_under_mmap_lock() argument
283 vma_iter_set(vmi, from_addr); in lock_next_vma_under_mmap_lock()
284 vma = vma_next(vmi); in lock_next_vma_under_mmap_lock()
297 struct vma_iterator *vmi, in lock_next_vma() argument
308 vma = vma_next(vmi); in lock_next_vma()
322 vma_iter_set(vmi, from_addr); in lock_next_vma()
342 vma_iter_set(vmi, from_addr); in lock_next_vma()
343 if (vma != vma_next(vmi)) in lock_next_vma()
354 vma = lock_next_vma_under_mmap_lock(mm, vmi, from_addr); in lock_next_vma()
357 vma_iter_set(vmi, IS_ERR_OR_NULL(vma) ? from_addr : vma->vm_end); in lock_next_vma()
H A Dmmap.c123 struct vma_iterator vmi; in SYSCALL_DEFINE1() local
163 vma_iter_init(&vmi, mm, newbrk); in SYSCALL_DEFINE1()
164 brkvma = vma_find(&vmi, oldbrk); in SYSCALL_DEFINE1()
173 if (do_vmi_align_munmap(&vmi, brkvma, mm, newbrk, oldbrk, &uf, in SYSCALL_DEFINE1()
187 vma_iter_init(&vmi, mm, oldbrk); in SYSCALL_DEFINE1()
188 next = vma_find(&vmi, newbrk + PAGE_SIZE + stack_guard_gap); in SYSCALL_DEFINE1()
192 brkvma = vma_prev_limit(&vmi, mm->start_brk); in SYSCALL_DEFINE1()
194 if (do_brk_flags(&vmi, brkvma, oldbrk, newbrk - oldbrk, 0) < 0) in SYSCALL_DEFINE1()
931 VMA_ITERATOR(vmi, mm, addr); in find_vma_prev()
933 vma = vma_iter_load(&vmi); in find_vma_prev()
[all …]
H A Ddebug.c256 vmg->vmi, vmg->vmi ? vma_iter_addr(vmg->vmi) : 0, in dump_vmg()
257 vmg->vmi ? vma_iter_end(vmg->vmi) : 0, in dump_vmg()
301 if (vmg->vmi) { in dump_vmg()
303 vma_iter_dump_tree(vmg->vmi); in dump_vmg()
356 void vma_iter_dump_tree(const struct vma_iterator *vmi) in vma_iter_dump_tree() argument
359 mas_dump(&vmi->mas); in vma_iter_dump_tree()
360 mt_dump(vmi->mas.tree, mt_dump_hex); in vma_iter_dump_tree()
H A Dnommu.c599 VMA_ITERATOR(vmi, vma->vm_mm, vma->vm_start); in delete_vma_from_mm()
601 vma_iter_config(&vmi, vma->vm_start, vma->vm_end); in delete_vma_from_mm()
602 if (vma_iter_prealloc(&vmi, NULL)) { in delete_vma_from_mm()
610 vma_iter_clear(&vmi); in delete_vma_from_mm()
642 VMA_ITERATOR(vmi, mm, addr); in find_vma()
644 return vma_iter_load(&vmi); in find_vma()
673 VMA_ITERATOR(vmi, mm, addr); in find_vma_exact()
675 vma = vma_iter_load(&vmi); in find_vma_exact()
1027 VMA_ITERATOR(vmi, current->mm, 0); in do_mmap()
1197 vma_iter_config(&vmi, vma->vm_start, vma->vm_end); in do_mmap()
[all …]
H A Dmlock.c466 static int mlock_fixup(struct vma_iterator *vmi, struct vm_area_struct *vma, in mlock_fixup() argument
481 vma = vma_modify_flags(vmi, *prev, vma, start, end, newflags); in mlock_fixup()
519 VMA_ITERATOR(vmi, current->mm, start); in apply_vma_lock_flags()
528 vma = vma_iter_load(&vmi); in apply_vma_lock_flags()
532 prev = vma_prev(&vmi); in apply_vma_lock_flags()
538 for_each_vma_range(vmi, vma, end) { in apply_vma_lock_flags()
551 error = mlock_fixup(&vmi, vma, &prev, nstart, tmp, newflags); in apply_vma_lock_flags()
554 tmp = vma_iter_end(&vmi); in apply_vma_lock_flags()
577 VMA_ITERATOR(vmi, mm, start); in count_mm_mlocked_page_nr()
585 for_each_vma_range(vmi, vma, end) { in count_mm_mlocked_page_nr()
[all …]
H A Dmprotect.c755 mprotect_fixup(struct vma_iterator *vmi, struct mmu_gather *tlb, in mprotect_fixup() argument
816 vma = vma_modify_flags(vmi, *pprev, vma, start, end, newflags); in mprotect_fixup()
871 struct vma_iterator vmi; in do_mprotect_pkey() local
903 vma_iter_init(&vmi, current->mm, start); in do_mprotect_pkey()
904 vma = vma_find(&vmi, end); in do_mprotect_pkey()
927 prev = vma_prev(&vmi); in do_mprotect_pkey()
934 for_each_vma_range(vmi, vma, end) { in do_mprotect_pkey()
990 error = mprotect_fixup(&vmi, &tlb, vma, &prev, nstart, tmp, newflags); in do_mprotect_pkey()
994 tmp = vma_iter_end(&vmi); in do_mprotect_pkey()
H A Dmremap.c1084 VMA_ITERATOR(vmi, mm, addr); in unmap_source_vma()
1122 err = do_vmi_munmap(&vmi, mm, addr, len, vrm->uf_unmap, /* unlock= */false); in unmap_source_vma()
1160 struct vm_area_struct *prev = vma_prev(&vmi); in unmap_source_vma()
1166 struct vm_area_struct *next = vma_next(&vmi); in unmap_source_vma()
1333 VMA_ITERATOR(vmi, mm, unmap_start); in shrink_vma()
1337 res = do_vmi_munmap(&vmi, mm, unmap_start, unmap_bytes, in shrink_vma()
1456 VMA_ITERATOR(vmi, mm, vma->vm_end); in expand_vma_in_place()
1470 vma = vma_merge_extend(&vmi, vma, vrm->delta); in expand_vma_in_place()
1833 VMA_ITERATOR(vmi, current->mm, start); in remap_move()
1840 for_each_vma_range(vmi, vma, end) { in remap_move()
[all …]
H A Duserfaultfd.c938 VMA_ITERATOR(vmi, dst_mm, start); in mwriteprotect_range()
962 for_each_vma_range(vmi, dst_vma, end) { in mwriteprotect_range()
2010 struct vm_area_struct *userfaultfd_clear_vma(struct vma_iterator *vmi, in userfaultfd_clear_vma() argument
2030 ret = vma_modify_flags_uffd(vmi, prev, vma, start, end, in userfaultfd_clear_vma()
2052 VMA_ITERATOR(vmi, ctx->mm, start); in userfaultfd_register_range()
2053 struct vm_area_struct *prev = vma_prev(&vmi); in userfaultfd_register_range()
2060 for_each_vma_range(vmi, vma, end) { in userfaultfd_register_range()
2081 vma = vma_modify_flags_uffd(&vmi, prev, vma, start, vma_end, in userfaultfd_register_range()
2110 VMA_ITERATOR(vmi, mm, 0); in userfaultfd_release_new()
2114 for_each_vma(vmi, vma) { in userfaultfd_release_new()
[all …]
H A Dmempolicy.c558 VMA_ITERATOR(vmi, mm, 0); in mpol_rebind_mm()
561 for_each_vma(vmi, vma) { in mpol_rebind_mm()
964 static int mbind_range(struct vma_iterator *vmi, struct vm_area_struct *vma, in mbind_range() argument
983 vma = vma_modify_policy(vmi, *prev, vma, vmstart, vmend, new_pol); in mbind_range()
1417 struct vma_iterator vmi; in do_mbind() local
1482 vma_iter_init(&vmi, mm, start); in do_mbind()
1483 prev = vma_prev(&vmi); in do_mbind()
1484 for_each_vma_range(vmi, vma, end) { in do_mbind()
1485 err = mbind_range(&vmi, vma, &prev, start, end, new); in do_mbind()
1517 vma_iter_init(&vmi, mm, start); in do_mbind()
[all …]
/linux/tools/testing/vma/
H A Dvma.c18 #define vma_iter_prealloc(vmi, vma) \ argument
19 (fail_prealloc ? -ENOMEM : mas_preallocate(&(vmi)->mas, (vma), GFP_KERNEL))
136 vmg->next = vma_next(vmg->vmi); in merge_new()
137 vmg->prev = vma_prev(vmg->vmi); in merge_new()
138 vma_iter_next_range(vmg->vmi); in merge_new()
177 vma_iter_set(vmg->vmi, start); in vmg_set_range()
249 static int cleanup_mm(struct mm_struct *mm, struct vma_iterator *vmi) in cleanup_mm() argument
257 vma_iter_set(vmi, 0); in cleanup_mm()
258 for_each_vma(*vmi, vma) { in cleanup_mm()
308 VMA_ITERATOR(vmi, &mm, 0x1000); in test_simple_merge()
[all …]
H A Dvma_internal.h531 static inline void vma_iter_invalidate(struct vma_iterator *vmi) in vma_iter_invalidate() argument
533 mas_pause(&vmi->mas); in vma_iter_invalidate()
557 static inline struct vm_area_struct *vma_next(struct vma_iterator *vmi) in vma_next() argument
563 return mas_find(&vmi->mas, ULONG_MAX); in vma_next()
669 struct vm_area_struct *vma_find(struct vma_iterator *vmi, unsigned long max) in vma_find() argument
671 return mas_find(&vmi->mas, max - 1); in vma_find()
674 static inline int vma_iter_clear_gfp(struct vma_iterator *vmi, in vma_iter_clear_gfp() argument
677 __mas_set_range(&vmi->mas, start, end - 1); in vma_iter_clear_gfp()
678 mas_store_gfp(&vmi->mas, NULL, gfp); in vma_iter_clear_gfp()
679 if (unlikely(mas_is_err(&vmi->mas))) in vma_iter_clear_gfp()
[all …]
/linux/arch/xtensa/kernel/
H A Dsyscall.c62 struct vma_iterator vmi; in arch_get_unmapped_area() local
84 vma_iter_init(&vmi, current->mm, addr); in arch_get_unmapped_area()
85 for_each_vma(vmi, vmm) { in arch_get_unmapped_area()
/linux/fs/proc/
H A Dtask_nommu.c23 VMA_ITERATOR(vmi, mm, 0); in task_mem()
29 for_each_vma(vmi, vma) { in task_mem()
83 VMA_ITERATOR(vmi, mm, 0); in task_vsize()
88 for_each_vma(vmi, vma) in task_vsize()
98 VMA_ITERATOR(vmi, mm, 0); in task_statm()
104 for_each_vma(vmi, vma) { in task_statm()
/linux/arch/powerpc/mm/book3s32/
H A Dtlb.c84 VMA_ITERATOR(vmi, mm, 0); in hash__flush_tlb_mm()
92 for_each_vma(vmi, mp) in hash__flush_tlb_mm()
/linux/include/linux/
H A Dmm.h920 struct vm_area_struct *vma_find(struct vma_iterator *vmi, unsigned long max) in vma_find() argument
922 return mas_find(&vmi->mas, max - 1); in vma_find()
925 static inline struct vm_area_struct *vma_next(struct vma_iterator *vmi) in vma_next() argument
931 return mas_find(&vmi->mas, ULONG_MAX); in vma_next()
935 struct vm_area_struct *vma_iter_next_range(struct vma_iterator *vmi) in vma_iter_next_range() argument
937 return mas_next_range(&vmi->mas, ULONG_MAX); in vma_iter_next_range()
941 static inline struct vm_area_struct *vma_prev(struct vma_iterator *vmi) in vma_prev() argument
943 return mas_prev(&vmi->mas, 0); in vma_prev()
946 static inline int vma_iter_clear_gfp(struct vma_iterator *vmi, in vma_iter_clear_gfp() argument
949 __mas_set_range(&vmi->mas, start, end - 1); in vma_iter_clear_gfp()
[all …]
/linux/lib/vdso/
H A Ddatastore.c119 VMA_ITERATOR(vmi, mm, 0); in vdso_join_timens()
122 for_each_vma(vmi, vma) { in vdso_join_timens()
/linux/arch/s390/mm/
H A Dgmap_helpers.c152 VMA_ITERATOR(vmi, mm, 0); in __gmap_helper_unshare_zeropages()
157 for_each_vma(vmi, vma) { in __gmap_helper_unshare_zeropages()
/linux/fs/
H A Duserfaultfd.c1254 struct vma_iterator vmi; in userfaultfd_register() local
1299 vma_iter_init(&vmi, mm, start); in userfaultfd_register()
1300 vma = vma_find(&vmi, end); in userfaultfd_register()
1378 } for_each_vma_range(vmi, cur, end); in userfaultfd_register()
1426 struct vma_iterator vmi; in userfaultfd_unregister() local
1447 vma_iter_init(&vmi, mm, start); in userfaultfd_unregister()
1448 vma = vma_find(&vmi, end); in userfaultfd_unregister()
1493 } for_each_vma_range(vmi, cur, end); in userfaultfd_unregister()
1496 vma_iter_set(&vmi, start); in userfaultfd_unregister()
1497 prev = vma_prev(&vmi); in userfaultfd_unregister()
[all …]
/linux/arch/um/kernel/
H A Dtlb.c218 VMA_ITERATOR(vmi, mm, 0); in flush_tlb_mm()
220 for_each_vma(vmi, vma) in flush_tlb_mm()
/linux/arch/x86/entry/vdso/
H A Dvma.c205 VMA_ITERATOR(vmi, mm, 0); in map_vdso_once()
215 for_each_vma(vmi, vma) { in map_vdso_once()
/linux/arch/powerpc/mm/book3s64/
H A Dsubpage_prot.c155 VMA_ITERATOR(vmi, mm, addr); in subpage_mark_vma_nohuge()
161 for_each_vma_range(vmi, vma, addr + len) { in subpage_mark_vma_nohuge()

12