Home
last modified time | relevance | path

Searched refs:kvm_mmu_page (Results 1 – 10 of 10) sorted by relevance

/Linux-v5.15/arch/x86/kvm/mmu/
Dmmu_internal.h33 struct kvm_mmu_page { struct
85 static inline struct kvm_mmu_page *to_shadow_page(hpa_t shadow_page) in to_shadow_page() argument
89 return (struct kvm_mmu_page *)page_private(page); in to_shadow_page()
92 static inline struct kvm_mmu_page *sptep_to_sp(u64 *sptep) in sptep_to_sp()
102 static inline int kvm_mmu_page_as_id(struct kvm_mmu_page *sp) in kvm_mmu_page_as_id()
174 void account_huge_nx_page(struct kvm *kvm, struct kvm_mmu_page *sp);
175 void unaccount_huge_nx_page(struct kvm *kvm, struct kvm_mmu_page *sp);
Dtdp_mmu.c56 static bool zap_gfn_range(struct kvm *kvm, struct kvm_mmu_page *root,
60 static void tdp_mmu_free_sp(struct kvm_mmu_page *sp) in tdp_mmu_free_sp()
76 struct kvm_mmu_page *sp = container_of(head, struct kvm_mmu_page, in tdp_mmu_free_sp_rcu_callback()
82 void kvm_tdp_mmu_put_root(struct kvm *kvm, struct kvm_mmu_page *root, in kvm_tdp_mmu_put_root()
108 static struct kvm_mmu_page *tdp_mmu_next_root(struct kvm *kvm, in tdp_mmu_next_root()
109 struct kvm_mmu_page *prev_root, in tdp_mmu_next_root()
112 struct kvm_mmu_page *next_root; in tdp_mmu_next_root()
174 static struct kvm_mmu_page *alloc_tdp_mmu_page(struct kvm_vcpu *vcpu, gfn_t gfn, in alloc_tdp_mmu_page()
177 struct kvm_mmu_page *sp; in alloc_tdp_mmu_page()
196 struct kvm_mmu_page *root; in kvm_tdp_mmu_get_vcpu_root_hpa()
[all …]
Dmmu_audit.c34 static void __mmu_spte_walk(struct kvm_vcpu *vcpu, struct kvm_mmu_page *sp, in __mmu_spte_walk()
46 struct kvm_mmu_page *child; in __mmu_spte_walk()
57 struct kvm_mmu_page *sp; in mmu_spte_walk()
83 typedef void (*sp_handler) (struct kvm *kvm, struct kvm_mmu_page *sp);
87 struct kvm_mmu_page *sp; in walk_all_active_sps()
95 struct kvm_mmu_page *sp; in audit_mappings()
130 struct kvm_mmu_page *rev_sp; in inspect_spte_has_rmap()
168 struct kvm_mmu_page *sp = sptep_to_sp(sptep); in audit_spte_after_sync()
175 static void check_mappings_rmap(struct kvm *kvm, struct kvm_mmu_page *sp) in check_mappings_rmap()
190 static void audit_write_protection(struct kvm *kvm, struct kvm_mmu_page *sp) in audit_write_protection()
[all …]
Dtdp_mmu.h11 struct kvm_mmu_page *root) in kvm_tdp_mmu_get_root()
19 void kvm_tdp_mmu_put_root(struct kvm *kvm, struct kvm_mmu_page *root,
29 static inline bool kvm_tdp_mmu_zap_sp(struct kvm *kvm, struct kvm_mmu_page *sp) in kvm_tdp_mmu_zap_sp()
96 static inline bool is_tdp_mmu_page(struct kvm_mmu_page *sp) { return sp->tdp_mmu_page; } in is_tdp_mmu_page()
100 struct kvm_mmu_page *sp; in is_tdp_mmu()
118 static inline bool is_tdp_mmu_page(struct kvm_mmu_page *sp) { return false; } in is_tdp_mmu_page()
Dmmu.c383 struct kvm_mmu_page *sp = sptep_to_sp(sptep); in count_spte_clear()
467 struct kvm_mmu_page *sp = sptep_to_sp(sptep); in __get_spte_lockless()
770 static gfn_t kvm_mmu_page_get_gfn(struct kvm_mmu_page *sp, int index) in kvm_mmu_page_get_gfn()
778 static void kvm_mmu_page_set_gfn(struct kvm_mmu_page *sp, int index, gfn_t gfn) in kvm_mmu_page_set_gfn()
828 static void account_shadowed(struct kvm *kvm, struct kvm_mmu_page *sp) in account_shadowed()
847 void account_huge_nx_page(struct kvm *kvm, struct kvm_mmu_page *sp) in account_huge_nx_page()
858 static void unaccount_shadowed(struct kvm *kvm, struct kvm_mmu_page *sp) in unaccount_shadowed()
875 void unaccount_huge_nx_page(struct kvm *kvm, struct kvm_mmu_page *sp) in unaccount_huge_nx_page()
1077 struct kvm_mmu_page *sp; in rmap_add()
1092 struct kvm_mmu_page *sp; in rmap_remove()
[all …]
Dmmutrace.h158 TP_PROTO(struct kvm_mmu_page *sp, bool created),
177 TP_PROTO(struct kvm_mmu_page *sp),
192 TP_PROTO(struct kvm_mmu_page *sp),
198 TP_PROTO(struct kvm_mmu_page *sp),
204 TP_PROTO(struct kvm_mmu_page *sp),
Dpaging_tmpl.h190 struct kvm_mmu_page *sp, u64 *spte, in FNAME()
561 FNAME(prefetch_gpte)(struct kvm_vcpu *vcpu, struct kvm_mmu_page *sp, in FNAME()
592 static void FNAME(update_pte)(struct kvm_vcpu *vcpu, struct kvm_mmu_page *sp, in FNAME()
626 struct kvm_mmu_page *sp; in FNAME()
675 struct kvm_mmu_page *sp = NULL; in FNAME()
948 static gpa_t FNAME(get_level1_sp_gpa)(struct kvm_mmu_page *sp) in FNAME()
963 struct kvm_mmu_page *sp; in FNAME()
1070 static int FNAME(sync_page)(struct kvm_vcpu *vcpu, struct kvm_mmu_page *sp) in FNAME()
Dspte.h216 static inline bool sp_ad_disabled(struct kvm_mmu_page *sp) in sp_ad_disabled()
/Linux-v5.15/arch/x86/include/asm/
Dkvm_host.h409 struct kvm_mmu_page;
428 struct kvm_mmu_page *sp);
/Linux-v5.15/Documentation/virt/kvm/
Dmmu.rst121 The principal data structure is the shadow page, 'struct kvm_mmu_page'. A
198 shadow page; it is also used to go back from a struct kvm_mmu_page