Searched refs:hstate_vma (Results 1 – 15 of 15) sorted by relevance
70 flush_tlb_page(vma, addr & huge_page_mask(hstate_vma(vma))); in huge_ptep_clear_flush()
630 return vma_hugecache_offset(hstate_vma(vma), vma, address); in linear_hugepage_index()1994 struct hstate *h = hstate_vma(vma); in alloc_huge_page()3111 struct hstate *h = hstate_vma(vma); in hugetlb_vm_op_close()3139 if (addr & ~(huge_page_mask(hstate_vma(vma)))) in hugetlb_vm_op_split()3146 struct hstate *hstate = hstate_vma(vma); in hugetlb_vm_op_pagesize()3240 struct hstate *h = hstate_vma(vma); in copy_hugetlb_page_range()3327 struct hstate *h = hstate_vma(vma); in __unmap_hugepage_range()3471 struct hstate *h = hstate_vma(vma); in unmap_ref_private()3529 struct hstate *h = hstate_vma(vma); in hugetlb_cow()3704 struct hstate *h = hstate_vma(vma); in hugetlb_no_page()[all …]
181 struct hstate *h = hstate_vma(vma); in walk_hugetlb_range()
238 h = hstate_vma(dst_vma); in __mcopy_atomic_hugetlb()
523 ptl = huge_pte_lock(hstate_vma(walk->vma), walk->mm, pte); in queue_pages_hugetlb()1864 huge_page_shift(hstate_vma(vma))); in huge_node()
356 spinlock_t *ptl = huge_pte_lockptr(hstate_vma(vma), mm, pte); in migration_entry_wait_huge()
252 struct hstate *h = hstate_vma(vma); in huge_ptep_set_access_flags()
412 static inline struct hstate *hstate_vma(struct vm_area_struct *vma) in hstate_vma() function555 #define hstate_vma(v) NULL macro
286 size_t pagesize = huge_page_size(hstate_vma(vma)); in arch_make_huge_pte()
184 unsigned int shift = huge_page_shift(hstate_vma(vma)); in arch_make_huge_pte()
445 hugepage_size = huge_page_size(hstate_vma(vma)); in update_mmu_cache()
361 h = hstate_vma(vma); in ib_umem_odp_get()
694 mss->shared_hugetlb += huge_page_size(hstate_vma(vma)); in smaps_hugetlb_range()696 mss->private_hugetlb += huge_page_size(hstate_vma(vma)); in smaps_hugetlb_range()
1462 info.si_addr_lsb = huge_page_shift(hstate_vma(vma)); in kvm_send_hwpoison_signal()
1416 h = hstate_vma(vma); in i40iw_set_hugetlb_values()