Lines Matching refs:kvm_pte_t

115 static bool kvm_pte_valid(kvm_pte_t pte)  in kvm_pte_valid()
120 static bool kvm_pte_table(kvm_pte_t pte, u32 level) in kvm_pte_table()
131 static u64 kvm_pte_to_phys(kvm_pte_t pte) in kvm_pte_to_phys()
141 static kvm_pte_t kvm_phys_to_pte(u64 pa) in kvm_phys_to_pte()
143 kvm_pte_t pte = pa & KVM_PTE_ADDR_MASK; in kvm_phys_to_pte()
151 static kvm_pte_t *kvm_pte_follow(kvm_pte_t pte) in kvm_pte_follow()
156 static void kvm_set_invalid_pte(kvm_pte_t *ptep) in kvm_set_invalid_pte()
158 kvm_pte_t pte = *ptep; in kvm_set_invalid_pte()
162 static void kvm_set_table_pte(kvm_pte_t *ptep, kvm_pte_t *childp) in kvm_set_table_pte()
164 kvm_pte_t old = *ptep, pte = kvm_phys_to_pte(__pa(childp)); in kvm_set_table_pte()
173 static bool kvm_set_valid_leaf_pte(kvm_pte_t *ptep, u64 pa, kvm_pte_t attr, in kvm_set_valid_leaf_pte()
176 kvm_pte_t old = *ptep, pte = kvm_phys_to_pte(pa); in kvm_set_valid_leaf_pte()
193 u32 level, kvm_pte_t *ptep, in kvm_pgtable_visitor_cb()
201 kvm_pte_t *pgtable, u32 level);
204 kvm_pte_t *ptep, u32 level) in __kvm_pgtable_visit()
208 kvm_pte_t *childp, pte = *ptep; in __kvm_pgtable_visit()
247 kvm_pte_t *pgtable, u32 level) in __kvm_pgtable_walk()
256 kvm_pte_t *ptep = &pgtable[idx]; in __kvm_pgtable_walk()
283 kvm_pte_t *ptep = &pgt->pgd[idx * PTRS_PER_PTE]; in _kvm_pgtable_walk()
308 kvm_pte_t attr;
316 kvm_pte_t attr = FIELD_PREP(KVM_PTE_LEAF_ATTR_LO_S1_ATTRIDX, mtype); in hyp_map_set_prot_attr()
342 kvm_pte_t *ptep, struct hyp_map_data *data) in hyp_map_walker_try_leaf()
354 static int hyp_map_walker(u64 addr, u64 end, u32 level, kvm_pte_t *ptep, in hyp_map_walker()
357 kvm_pte_t *childp; in hyp_map_walker()
365 childp = (kvm_pte_t *)get_zeroed_page(GFP_KERNEL); in hyp_map_walker()
400 pgt->pgd = (kvm_pte_t *)get_zeroed_page(GFP_KERNEL); in kvm_pgtable_hyp_init()
410 static int hyp_free_walker(u64 addr, u64 end, u32 level, kvm_pte_t *ptep, in hyp_free_walker()
431 kvm_pte_t attr;
433 kvm_pte_t *anchor;
443 kvm_pte_t attr = device ? PAGE_S2_MEMATTR(DEVICE_nGnRE) : in stage2_map_set_prot_attr()
465 kvm_pte_t *ptep, in stage2_map_walker_try_leaf()
495 kvm_pte_t *ptep, in stage2_map_walk_table_pre()
516 static int stage2_map_walk_leaf(u64 addr, u64 end, u32 level, kvm_pte_t *ptep, in stage2_map_walk_leaf()
519 kvm_pte_t *childp, pte = *ptep; in stage2_map_walk_leaf()
561 kvm_pte_t *ptep, in stage2_map_walk_table_post()
599 static int stage2_map_walker(u64 addr, u64 end, u32 level, kvm_pte_t *ptep, in stage2_map_walker()
651 static bool stage2_pte_cacheable(kvm_pte_t pte) in stage2_pte_cacheable()
657 static int stage2_unmap_walker(u64 addr, u64 end, u32 level, kvm_pte_t *ptep, in stage2_unmap_walker()
662 kvm_pte_t pte = *ptep, *childp = NULL; in stage2_unmap_walker()
709 kvm_pte_t attr_set;
710 kvm_pte_t attr_clr;
711 kvm_pte_t pte;
715 static int stage2_attr_walker(u64 addr, u64 end, u32 level, kvm_pte_t *ptep, in stage2_attr_walker()
719 kvm_pte_t pte = *ptep; in stage2_attr_walker()
742 u64 size, kvm_pte_t attr_set, in stage2_update_leaf_attrs()
743 kvm_pte_t attr_clr, kvm_pte_t *orig_pte, in stage2_update_leaf_attrs()
747 kvm_pte_t attr_mask = KVM_PTE_LEAF_ATTR_LO | KVM_PTE_LEAF_ATTR_HI; in stage2_update_leaf_attrs()
777 kvm_pte_t kvm_pgtable_stage2_mkyoung(struct kvm_pgtable *pgt, u64 addr) in kvm_pgtable_stage2_mkyoung()
779 kvm_pte_t pte = 0; in kvm_pgtable_stage2_mkyoung()
786 kvm_pte_t kvm_pgtable_stage2_mkold(struct kvm_pgtable *pgt, u64 addr) in kvm_pgtable_stage2_mkold()
788 kvm_pte_t pte = 0; in kvm_pgtable_stage2_mkold()
802 kvm_pte_t pte = 0; in kvm_pgtable_stage2_is_young()
812 kvm_pte_t set = 0, clr = 0; in kvm_pgtable_stage2_relax_perms()
829 static int stage2_flush_walker(u64 addr, u64 end, u32 level, kvm_pte_t *ptep, in stage2_flush_walker()
833 kvm_pte_t pte = *ptep; in stage2_flush_walker()
877 static int stage2_free_walker(u64 addr, u64 end, u32 level, kvm_pte_t *ptep, in stage2_free_walker()
881 kvm_pte_t pte = *ptep; in stage2_free_walker()