Lines Matching refs:src_vma
773 struct vm_area_struct *src_vma, unsigned long addr, int *rss) in copy_nonpresent_pte() argument
852 VM_BUG_ON(!is_cow_mapping(src_vma->vm_flags)); in copy_nonpresent_pte()
853 if (try_restore_exclusive_pte(src_pte, src_vma, addr)) in copy_nonpresent_pte()
884 copy_present_page(struct vm_area_struct *dst_vma, struct vm_area_struct *src_vma, in copy_present_page() argument
903 if (likely(!page_needs_cow_for_dma(src_vma, page))) in copy_present_page()
915 copy_user_highpage(new_page, page, addr, src_vma); in copy_present_page()
936 copy_present_pte(struct vm_area_struct *dst_vma, struct vm_area_struct *src_vma, in copy_present_pte() argument
940 struct mm_struct *src_mm = src_vma->vm_mm; in copy_present_pte()
941 unsigned long vm_flags = src_vma->vm_flags; in copy_present_pte()
945 page = vm_normal_page(src_vma, addr, pte); in copy_present_pte()
949 retval = copy_present_page(dst_vma, src_vma, dst_pte, src_pte, in copy_present_pte()
1003 copy_pte_range(struct vm_area_struct *dst_vma, struct vm_area_struct *src_vma, in copy_pte_range() argument
1008 struct mm_struct *src_mm = src_vma->vm_mm; in copy_pte_range()
1051 dst_vma, src_vma, in copy_pte_range()
1070 ret = copy_present_pte(dst_vma, src_vma, dst_pte, src_pte, in copy_pte_range()
1108 prealloc = page_copy_prealloc(src_mm, src_vma, addr); in copy_pte_range()
1127 copy_pmd_range(struct vm_area_struct *dst_vma, struct vm_area_struct *src_vma, in copy_pmd_range() argument
1132 struct mm_struct *src_mm = src_vma->vm_mm; in copy_pmd_range()
1145 VM_BUG_ON_VMA(next-addr != HPAGE_PMD_SIZE, src_vma); in copy_pmd_range()
1147 addr, dst_vma, src_vma); in copy_pmd_range()
1156 if (copy_pte_range(dst_vma, src_vma, dst_pmd, src_pmd, in copy_pmd_range()
1164 copy_pud_range(struct vm_area_struct *dst_vma, struct vm_area_struct *src_vma, in copy_pud_range() argument
1169 struct mm_struct *src_mm = src_vma->vm_mm; in copy_pud_range()
1182 VM_BUG_ON_VMA(next-addr != HPAGE_PUD_SIZE, src_vma); in copy_pud_range()
1184 dst_pud, src_pud, addr, src_vma); in copy_pud_range()
1193 if (copy_pmd_range(dst_vma, src_vma, dst_pud, src_pud, in copy_pud_range()
1201 copy_p4d_range(struct vm_area_struct *dst_vma, struct vm_area_struct *src_vma, in copy_p4d_range() argument
1217 if (copy_pud_range(dst_vma, src_vma, dst_p4d, src_p4d, in copy_p4d_range()
1225 copy_page_range(struct vm_area_struct *dst_vma, struct vm_area_struct *src_vma) in copy_page_range() argument
1229 unsigned long addr = src_vma->vm_start; in copy_page_range()
1230 unsigned long end = src_vma->vm_end; in copy_page_range()
1232 struct mm_struct *src_mm = src_vma->vm_mm; in copy_page_range()
1243 if (!(src_vma->vm_flags & (VM_HUGETLB | VM_PFNMAP | VM_MIXEDMAP)) && in copy_page_range()
1244 !src_vma->anon_vma) in copy_page_range()
1247 if (is_vm_hugetlb_page(src_vma)) in copy_page_range()
1248 return copy_hugetlb_page_range(dst_mm, src_mm, src_vma); in copy_page_range()
1250 if (unlikely(src_vma->vm_flags & VM_PFNMAP)) { in copy_page_range()
1255 ret = track_pfn_copy(src_vma); in copy_page_range()
1266 is_cow = is_cow_mapping(src_vma->vm_flags); in copy_page_range()
1270 0, src_vma, src_mm, addr, end); in copy_page_range()
1290 if (unlikely(copy_p4d_range(dst_vma, src_vma, dst_pgd, src_pgd, in copy_page_range()