/linux/mm/ |
H A D | userfaultfd.c | 24 bool validate_dst_vma(struct vm_area_struct *dst_vma, unsigned long dst_end) in validate_dst_vma() argument 103 struct vm_area_struct *dst_vma; in uffd_mfill_lock() local 124 struct vm_area_struct *dst_vma; uffd_mfill_lock() local 147 mfill_file_over_size(struct vm_area_struct * dst_vma,unsigned long dst_addr) mfill_file_over_size() argument 169 mfill_atomic_install_pte(pmd_t * dst_pmd,struct vm_area_struct * dst_vma,unsigned long dst_addr,struct page * page,bool newly_allocated,uffd_flags_t flags) mfill_atomic_install_pte() argument 238 mfill_atomic_pte_copy(pmd_t * dst_pmd,struct vm_area_struct * dst_vma,unsigned long dst_addr,unsigned long src_addr,uffd_flags_t flags,struct folio ** foliop) mfill_atomic_pte_copy() argument 314 mfill_atomic_pte_zeroed_folio(pmd_t * dst_pmd,struct vm_area_struct * dst_vma,unsigned long dst_addr) mfill_atomic_pte_zeroed_folio() argument 346 mfill_atomic_pte_zeropage(pmd_t * dst_pmd,struct vm_area_struct * dst_vma,unsigned long dst_addr) mfill_atomic_pte_zeropage() argument 381 mfill_atomic_pte_continue(pmd_t * dst_pmd,struct vm_area_struct * dst_vma,unsigned long dst_addr,uffd_flags_t flags) mfill_atomic_pte_continue() argument 425 mfill_atomic_pte_poison(pmd_t * dst_pmd,struct vm_area_struct * dst_vma,unsigned long dst_addr,uffd_flags_t flags) mfill_atomic_pte_poison() argument 490 mfill_atomic_hugetlb(struct userfaultfd_ctx * ctx,struct vm_area_struct * dst_vma,unsigned long dst_start,unsigned long src_start,unsigned long len,uffd_flags_t flags) mfill_atomic_hugetlb() argument 654 mfill_atomic_pte(pmd_t * dst_pmd,struct vm_area_struct * dst_vma,unsigned long dst_addr,unsigned long src_addr,uffd_flags_t flags,struct folio ** foliop) mfill_atomic_pte() argument 704 struct vm_area_struct *dst_vma; mfill_atomic() local 900 uffd_wp_range(struct vm_area_struct * dst_vma,unsigned long start,unsigned long len,bool enable_wp) uffd_wp_range() argument 935 struct vm_area_struct *dst_vma; mwriteprotect_range() local 1030 move_present_pte(struct mm_struct * mm,struct vm_area_struct * dst_vma,struct vm_area_struct * src_vma,unsigned long dst_addr,unsigned long src_addr,pte_t * dst_pte,pte_t * src_pte,pte_t orig_dst_pte,pte_t orig_src_pte,pmd_t * dst_pmd,pmd_t dst_pmdval,spinlock_t * dst_ptl,spinlock_t * src_ptl,struct folio * src_folio) move_present_pte() argument 1081 move_swap_pte(struct mm_struct * mm,struct vm_area_struct * dst_vma,unsigned long dst_addr,unsigned long src_addr,pte_t * dst_pte,pte_t * src_pte,pte_t orig_dst_pte,pte_t orig_src_pte,pmd_t * dst_pmd,pmd_t dst_pmdval,spinlock_t * dst_ptl,spinlock_t * src_ptl,struct folio * src_folio,struct swap_info_struct * si,swp_entry_t entry) move_swap_pte() argument 1147 move_zeropage_pte(struct mm_struct * mm,struct vm_area_struct * dst_vma,struct vm_area_struct * src_vma,unsigned long dst_addr,unsigned long src_addr,pte_t * dst_pte,pte_t * src_pte,pte_t orig_dst_pte,pte_t orig_src_pte,pmd_t * dst_pmd,pmd_t dst_pmdval,spinlock_t * dst_ptl,spinlock_t * src_ptl) move_zeropage_pte() argument 1180 move_pages_pte(struct mm_struct * mm,pmd_t * dst_pmd,pmd_t * src_pmd,struct vm_area_struct * dst_vma,struct vm_area_struct * src_vma,unsigned long dst_addr,unsigned long src_addr,__u64 mode) move_pages_pte() argument 1493 validate_move_areas(struct userfaultfd_ctx * ctx,struct vm_area_struct * src_vma,struct vm_area_struct * dst_vma) validate_move_areas() argument 1621 uffd_move_unlock(struct vm_area_struct * dst_vma,struct vm_area_struct * src_vma) uffd_move_unlock() argument 1646 uffd_move_unlock(struct vm_area_struct * dst_vma,struct vm_area_struct * src_vma) uffd_move_unlock() argument 1734 struct vm_area_struct *src_vma, *dst_vma; move_pages() local [all...] |
H A D | memory.c | 796 pte_t *dst_pte, pte_t *src_pte, struct vm_area_struct *dst_vma, in copy_nonpresent_pte() argument 917 copy_present_page(struct vm_area_struct *dst_vma, struct vm_area_struct *src_vma, in copy_present_page() argument 952 __copy_present_ptes(struct vm_area_struct * dst_vma,struct vm_area_struct * src_vma,pte_t * dst_pte,pte_t * src_pte,pte_t pte,unsigned long addr,int nr) __copy_present_ptes() argument 983 copy_present_ptes(struct vm_area_struct * dst_vma,struct vm_area_struct * src_vma,pte_t * dst_pte,pte_t * src_pte,pte_t pte,unsigned long addr,int max_nr,int * rss,struct folio ** prealloc) copy_present_ptes() argument 1078 copy_pte_range(struct vm_area_struct * dst_vma,struct vm_area_struct * src_vma,pmd_t * dst_pmd,pmd_t * src_pmd,unsigned long addr,unsigned long end) copy_pte_range() argument 1233 copy_pmd_range(struct vm_area_struct * dst_vma,struct vm_area_struct * src_vma,pud_t * dst_pud,pud_t * src_pud,unsigned long addr,unsigned long end) copy_pmd_range() argument 1269 copy_pud_range(struct vm_area_struct * dst_vma,struct vm_area_struct * src_vma,p4d_t * dst_p4d,p4d_t * src_p4d,unsigned long addr,unsigned long end) copy_pud_range() argument 1306 copy_p4d_range(struct vm_area_struct * dst_vma,struct vm_area_struct * src_vma,pgd_t * dst_pgd,pgd_t * src_pgd,unsigned long addr,unsigned long end) copy_p4d_range() argument 1335 vma_needs_copy(struct vm_area_struct * dst_vma,struct vm_area_struct * src_vma) vma_needs_copy() argument 1362 copy_page_range(struct vm_area_struct * dst_vma,struct vm_area_struct * src_vma) copy_page_range() argument [all...] |
H A D | huge_memory.c | 1668 struct vm_area_struct *dst_vma, struct vm_area_struct *src_vma) in copy_huge_pmd() argument 2538 struct vm_area_struct *dst_vma, struct vm_area_struct *src_vma, in move_pages_huge_pmd() argument [all...] |
H A D | hugetlb.c | 5552 struct vm_area_struct *dst_vma, in copy_hugetlb_page_range() argument 6875 hugetlb_mfill_atomic_pte(pte_t * dst_pte,struct vm_area_struct * dst_vma,unsigned long dst_addr,unsigned long src_addr,uffd_flags_t flags,struct folio ** foliop) hugetlb_mfill_atomic_pte() argument [all...] |
H A D | shmem.c | 3215 struct vm_area_struct *dst_vma, in shmem_mfill_atomic_pte() argument
|
/linux/include/linux/ |
H A D | rmap.h | 569 struct page *page, int nr_pages, struct vm_area_struct *dst_vma, in __folio_dup_file_rmap() argument 610 struct page *page, int nr_pages, struct vm_area_struct *dst_vma) in folio_dup_file_rmap_ptes() argument 616 struct page *page, struct vm_area_struct *dst_vma) in folio_dup_file_rmap_pte() argument 632 struct page *page, struct vm_area_struct *dst_vma) in folio_dup_file_rmap_pmd() argument 642 __folio_try_dup_anon_rmap(struct folio * folio,struct page * page,int nr_pages,struct vm_area_struct * dst_vma,struct vm_area_struct * src_vma,enum rmap_level level) __folio_try_dup_anon_rmap() argument 729 folio_try_dup_anon_rmap_ptes(struct folio * folio,struct page * page,int nr_pages,struct vm_area_struct * dst_vma,struct vm_area_struct * src_vma) folio_try_dup_anon_rmap_ptes() argument 737 folio_try_dup_anon_rmap_pte(struct folio * folio,struct page * page,struct vm_area_struct * dst_vma,struct vm_area_struct * src_vma) folio_try_dup_anon_rmap_pte() argument 768 folio_try_dup_anon_rmap_pmd(struct folio * folio,struct page * page,struct vm_area_struct * dst_vma,struct vm_area_struct * src_vma) folio_try_dup_anon_rmap_pmd() argument [all...] |
H A D | shmem_fs.h | 207 #define shmem_mfill_atomic_pte(dst_pmd, dst_vma, dst_addr, \ argument
|
H A D | mm_inline.h | 543 swp_entry_t entry, struct vm_area_struct *dst_vma) in copy_pte_marker() argument
|
H A D | hugetlb.h | 332 struct vm_area_struct *dst_vma, in copy_hugetlb_page_range() argument 395 struct vm_area_struct *dst_vma, in hugetlb_mfill_atomic_pte() argument
|