Home
last modified time | relevance | path

Searched refs:dst_vma (Results 1 – 7 of 7) sorted by relevance

/Linux-v5.10/mm/
Duserfaultfd.c30 struct vm_area_struct *dst_vma; in find_dst_vma() local
32 dst_vma = find_vma(dst_mm, dst_start); in find_dst_vma()
33 if (!dst_vma) in find_dst_vma()
36 if (dst_start < dst_vma->vm_start || in find_dst_vma()
37 dst_start + len > dst_vma->vm_end) in find_dst_vma()
45 if (!dst_vma->vm_userfaultfd_ctx.ctx) in find_dst_vma()
48 return dst_vma; in find_dst_vma()
53 struct vm_area_struct *dst_vma, in mcopy_atomic_pte() argument
69 page = alloc_page_vma(GFP_HIGHUSER_MOVABLE, dst_vma, dst_addr); in mcopy_atomic_pte()
102 _dst_pte = pte_mkdirty(mk_pte(page, dst_vma->vm_page_prot)); in mcopy_atomic_pte()
[all …]
Dmemory.c796 copy_present_page(struct vm_area_struct *dst_vma, struct vm_area_struct *src_vma, in copy_present_page() argument
835 page_add_new_anon_rmap(new_page, dst_vma, addr, false); in copy_present_page()
836 lru_cache_add_inactive_or_unevictable(new_page, dst_vma); in copy_present_page()
840 pte = mk_pte(new_page, dst_vma->vm_page_prot); in copy_present_page()
841 pte = maybe_mkwrite(pte_mkdirty(pte), dst_vma); in copy_present_page()
842 set_pte_at(dst_vma->vm_mm, addr, dst_pte, pte); in copy_present_page()
851 copy_present_pte(struct vm_area_struct *dst_vma, struct vm_area_struct *src_vma, in copy_present_pte() argument
864 retval = copy_present_page(dst_vma, src_vma, dst_pte, src_pte, in copy_present_pte()
899 set_pte_at(dst_vma->vm_mm, addr, dst_pte, pte); in copy_present_pte()
923 copy_pte_range(struct vm_area_struct *dst_vma, struct vm_area_struct *src_vma, in copy_pte_range() argument
[all …]
Dhugetlb.c4605 struct vm_area_struct *dst_vma, in hugetlb_mcopy_atomic_pte() argument
4613 int vm_shared = dst_vma->vm_flags & VM_SHARED; in hugetlb_mcopy_atomic_pte()
4614 struct hstate *h = hstate_vma(dst_vma); in hugetlb_mcopy_atomic_pte()
4622 page = alloc_huge_page(dst_vma, dst_addr, 0); in hugetlb_mcopy_atomic_pte()
4649 mapping = dst_vma->vm_file->f_mapping; in hugetlb_mcopy_atomic_pte()
4650 idx = vma_hugecache_offset(h, dst_vma, dst_addr); in hugetlb_mcopy_atomic_pte()
4697 hugepage_add_new_anon_rmap(page, dst_vma, dst_addr); in hugetlb_mcopy_atomic_pte()
4700 _dst_pte = make_huge_pte(dst_vma, page, dst_vma->vm_flags & VM_WRITE); in hugetlb_mcopy_atomic_pte()
4701 if (dst_vma->vm_flags & VM_WRITE) in hugetlb_mcopy_atomic_pte()
4707 (void)huge_ptep_set_access_flags(dst_vma, dst_addr, dst_pte, _dst_pte, in hugetlb_mcopy_atomic_pte()
[all …]
Dshmem.c2362 struct vm_area_struct *dst_vma, in shmem_mfill_atomic_pte() argument
2368 struct inode *inode = file_inode(dst_vma->vm_file); in shmem_mfill_atomic_pte()
2372 pgoff_t pgoff = linear_page_index(dst_vma, dst_addr); in shmem_mfill_atomic_pte()
2417 offset = linear_page_index(dst_vma, dst_addr); in shmem_mfill_atomic_pte()
2427 _dst_pte = mk_pte(page, dst_vma->vm_page_prot); in shmem_mfill_atomic_pte()
2428 if (dst_vma->vm_flags & VM_WRITE) in shmem_mfill_atomic_pte()
2465 update_mmu_cache(dst_vma, dst_addr, dst_pte); in shmem_mfill_atomic_pte()
2485 struct vm_area_struct *dst_vma, in shmem_mcopy_atomic_pte() argument
2490 return shmem_mfill_atomic_pte(dst_mm, dst_pmd, dst_vma, in shmem_mcopy_atomic_pte()
2496 struct vm_area_struct *dst_vma, in shmem_mfill_zeropage_pte() argument
[all …]
/Linux-v5.10/include/linux/
Dshmem_fs.h123 struct vm_area_struct *dst_vma,
129 struct vm_area_struct *dst_vma,
132 #define shmem_mcopy_atomic_pte(dst_mm, dst_pte, dst_vma, dst_addr, \ argument
134 #define shmem_mfill_zeropage_pte(dst_mm, dst_pmd, dst_vma, \ argument
Dhugetlb.h138 struct vm_area_struct *dst_vma,
313 struct vm_area_struct *dst_vma, in hugetlb_mcopy_atomic_pte() argument
Dmm.h1657 copy_page_range(struct vm_area_struct *dst_vma, struct vm_area_struct *src_vma);