Lines Matching refs:vm_area_struct

80 void reset_vma_resv_huge_pages(struct vm_area_struct *vma);
90 int copy_hugetlb_page_range(struct mm_struct *, struct mm_struct *, struct vm_area_struct *);
91 long follow_hugetlb_page(struct mm_struct *, struct vm_area_struct *,
92 struct page **, struct vm_area_struct **,
95 void unmap_hugepage_range(struct vm_area_struct *,
98 struct vm_area_struct *vma,
101 void __unmap_hugepage_range(struct mmu_gather *tlb, struct vm_area_struct *vma,
108 vm_fault_t hugetlb_fault(struct mm_struct *mm, struct vm_area_struct *vma,
111 struct vm_area_struct *dst_vma,
116 struct vm_area_struct *vma,
127 struct vm_area_struct *vma,
143 void adjust_range_if_pmd_sharing_possible(struct vm_area_struct *vma,
147 struct page *follow_huge_pd(struct vm_area_struct *vma,
159 unsigned long hugetlb_change_protection(struct vm_area_struct *vma,
166 static inline void reset_vma_resv_huge_pages(struct vm_area_struct *vma) in reset_vma_resv_huge_pages()
182 struct vm_area_struct *vma, in adjust_range_if_pmd_sharing_possible()
218 static inline unsigned long hugetlb_change_protection(struct vm_area_struct *vma, in hugetlb_change_protection()
225 struct vm_area_struct *vma, unsigned long start, in __unmap_hugepage_range_final()
232 struct vm_area_struct *vma, unsigned long start, in __unmap_hugepage_range()
367 struct page *alloc_huge_page(struct vm_area_struct *vma,
372 struct page *alloc_huge_page_vma(struct hstate *h, struct vm_area_struct *vma,
412 static inline struct hstate *hstate_vma(struct vm_area_struct *vma) in hstate_vma()
422 extern unsigned long vma_kernel_pagesize(struct vm_area_struct *vma);
424 extern unsigned long vma_mmu_pagesize(struct vm_area_struct *vma);
459 static inline pte_t arch_make_huge_pte(pte_t entry, struct vm_area_struct *vma, in arch_make_huge_pte()