Lines Matching refs:src_mm

785 copy_nonpresent_pte(struct mm_struct *dst_mm, struct mm_struct *src_mm,  in copy_nonpresent_pte()  argument
803 &src_mm->mmlist); in copy_nonpresent_pte()
809 set_pte_at(src_mm, addr, src_pte, pte); in copy_nonpresent_pte()
831 set_pte_at(src_mm, addr, src_pte, pte); in copy_nonpresent_pte()
864 set_pte_at(src_mm, addr, src_pte, pte); in copy_nonpresent_pte()
946 struct mm_struct *src_mm = src_vma->vm_mm; in copy_present_pte() local
978 ptep_set_wrprotect(src_mm, addr, src_pte); in copy_present_pte()
999 page_copy_prealloc(struct mm_struct *src_mm, struct vm_area_struct *vma, in page_copy_prealloc() argument
1008 if (mem_cgroup_charge(page_folio(new_page), src_mm, GFP_KERNEL)) { in page_copy_prealloc()
1023 struct mm_struct *src_mm = src_vma->vm_mm; in copy_pte_range() local
1042 src_ptl = pte_lockptr(src_mm, src_pmd); in copy_pte_range()
1064 ret = copy_nonpresent_pte(dst_mm, src_mm, in copy_pte_range()
1123 prealloc = page_copy_prealloc(src_mm, src_vma, addr); in copy_pte_range()
1147 struct mm_struct *src_mm = src_vma->vm_mm; in copy_pmd_range() local
1161 err = copy_huge_pmd(dst_mm, src_mm, dst_pmd, src_pmd, in copy_pmd_range()
1184 struct mm_struct *src_mm = src_vma->vm_mm; in copy_pud_range() local
1198 err = copy_huge_pud(dst_mm, src_mm, in copy_pud_range()
1279 struct mm_struct *src_mm = src_vma->vm_mm; in copy_page_range() local
1288 return copy_hugetlb_page_range(dst_mm, src_mm, dst_vma, src_vma); in copy_page_range()
1310 0, src_vma, src_mm, addr, end); in copy_page_range()
1319 mmap_assert_write_locked(src_mm); in copy_page_range()
1320 raw_write_seqcount_begin(&src_mm->write_protect_seq); in copy_page_range()
1325 src_pgd = pgd_offset(src_mm, addr); in copy_page_range()
1338 raw_write_seqcount_end(&src_mm->write_protect_seq); in copy_page_range()