Lines Matching refs:src_mm
795 copy_nonpresent_pte(struct mm_struct *dst_mm, struct mm_struct *src_mm,
815 &src_mm->mmlist);
821 set_pte_at(src_mm, addr, src_pte, pte);
843 set_pte_at(src_mm, addr, src_pte, pte);
877 set_pte_at(src_mm, addr, src_pte, pte);
956 struct mm_struct *src_mm = src_vma->vm_mm;
960 wrprotect_ptes(src_mm, addr, src_pte, nr);
1055 static inline struct folio *folio_prealloc(struct mm_struct *src_mm,
1068 if (mem_cgroup_charge(new_folio, src_mm, GFP_KERNEL)) {
1083 struct mm_struct *src_mm = src_vma->vm_mm;
1119 src_pte = pte_offset_map_rw_nolock(src_mm, src_pmd, addr, &dummy_pmdval,
1150 ret = copy_nonpresent_pte(dst_mm, src_mm,
1214 prealloc = folio_prealloc(src_mm, src_vma, addr, false);
1238 struct mm_struct *src_mm = src_vma->vm_mm;
1251 err = copy_huge_pmd(dst_mm, src_mm, dst_pmd, src_pmd,
1274 struct mm_struct *src_mm = src_vma->vm_mm;
1288 err = copy_huge_pud(dst_mm, src_mm,
1368 struct mm_struct *src_mm = src_vma->vm_mm;
1378 return copy_hugetlb_page_range(dst_mm, src_mm, dst_vma, src_vma);
1390 0, src_mm, addr, end);
1400 raw_write_seqcount_begin(&src_mm->write_protect_seq);
1405 src_pgd = pgd_offset(src_mm, addr);
1418 raw_write_seqcount_end(&src_mm->write_protect_seq);