Lines Matching refs:slb_v
1061 unsigned long eaddr, unsigned long slb_v, long mmio_update) in mmio_cache_search() argument
1072 entry->slb_v == slb_v) in mmio_cache_search()
1095 long kvmppc_hv_find_lock_hpte(struct kvm *kvm, gva_t eaddr, unsigned long slb_v, in kvmppc_hv_find_lock_hpte() argument
1111 if (slb_v & SLB_VSID_L) { in kvmppc_hv_find_lock_hpte()
1114 pshift = slb_base_page_shift[(slb_v & SLB_VSID_LP) >> 4]; in kvmppc_hv_find_lock_hpte()
1116 if (slb_v & SLB_VSID_B_1T) { in kvmppc_hv_find_lock_hpte()
1118 vsid = (slb_v & ~SLB_VSID_B) >> SLB_VSID_SHIFT_1T; in kvmppc_hv_find_lock_hpte()
1122 vsid = (slb_v & ~SLB_VSID_B) >> SLB_VSID_SHIFT; in kvmppc_hv_find_lock_hpte()
1125 avpn = slb_v & ~(somask >> 16); /* also includes B */ in kvmppc_hv_find_lock_hpte()
1189 unsigned long slb_v, unsigned int status, bool data) in kvmppc_hpte_hv_fault() argument
1206 cache_entry = mmio_cache_search(vcpu, addr, slb_v, mmio_update); in kvmppc_hpte_hv_fault()
1214 index = kvmppc_hv_find_lock_hpte(kvm, addr, slb_v, valid); in kvmppc_hpte_hv_fault()
1244 if (!hpte_read_permission(pp, slb_v & key)) in kvmppc_hpte_hv_fault()
1248 if (!hpte_write_permission(pp, slb_v & key)) in kvmppc_hpte_hv_fault()
1251 if (!hpte_read_permission(pp, slb_v & key)) in kvmppc_hpte_hv_fault()
1278 if (slb_v & SLB_VSID_L) { in kvmppc_hpte_hv_fault()
1279 pshift_index = ((slb_v & SLB_VSID_LP) >> 4); in kvmppc_hpte_hv_fault()
1289 cache_entry->slb_v = slb_v; in kvmppc_hpte_hv_fault()