Lines Matching refs:src_mm

790 copy_nonpresent_pte(struct mm_struct *dst_mm, struct mm_struct *src_mm,  in copy_nonpresent_pte()  argument
810 &src_mm->mmlist); in copy_nonpresent_pte()
816 set_pte_at(src_mm, addr, src_pte, pte); in copy_nonpresent_pte()
838 set_pte_at(src_mm, addr, src_pte, pte); in copy_nonpresent_pte()
872 set_pte_at(src_mm, addr, src_pte, pte); in copy_nonpresent_pte()
951 struct mm_struct *src_mm = src_vma->vm_mm; in __copy_present_ptes() local
955 wrprotect_ptes(src_mm, addr, src_pte, nr); in __copy_present_ptes()
1054 static inline struct folio *folio_prealloc(struct mm_struct *src_mm, in folio_prealloc() argument
1067 if (mem_cgroup_charge(new_folio, src_mm, GFP_KERNEL)) { in folio_prealloc()
1082 struct mm_struct *src_mm = src_vma->vm_mm; in copy_pte_range() local
1118 src_pte = pte_offset_map_rw_nolock(src_mm, src_pmd, addr, &dummy_pmdval, in copy_pte_range()
1149 ret = copy_nonpresent_pte(dst_mm, src_mm, in copy_pte_range()
1213 prealloc = folio_prealloc(src_mm, src_vma, addr, false); in copy_pte_range()
1237 struct mm_struct *src_mm = src_vma->vm_mm; in copy_pmd_range() local
1251 err = copy_huge_pmd(dst_mm, src_mm, dst_pmd, src_pmd, in copy_pmd_range()
1274 struct mm_struct *src_mm = src_vma->vm_mm; in copy_pud_range() local
1288 err = copy_huge_pud(dst_mm, src_mm, in copy_pud_range()
1369 struct mm_struct *src_mm = src_vma->vm_mm; in copy_page_range() local
1378 return copy_hugetlb_page_range(dst_mm, src_mm, dst_vma, src_vma); in copy_page_range()
1400 0, src_mm, addr, end); in copy_page_range()
1410 raw_write_seqcount_begin(&src_mm->write_protect_seq); in copy_page_range()
1415 src_pgd = pgd_offset(src_mm, addr); in copy_page_range()
1429 raw_write_seqcount_end(&src_mm->write_protect_seq); in copy_page_range()