Lines Matching refs:vcpu_e500
124 static inline void write_host_tlbe(struct kvmppc_vcpu_e500 *vcpu_e500, in write_host_tlbe() argument
131 __write_host_tlbe(stlbe, mas0, vcpu_e500->vcpu.kvm->arch.lpid); in write_host_tlbe()
136 vcpu_e500->vcpu.kvm->arch.lpid); in write_host_tlbe()
141 static void write_stlbe(struct kvmppc_vcpu_e500 *vcpu_e500, in write_stlbe() argument
149 stid = kvmppc_e500_get_tlb_stid(&vcpu_e500->vcpu, gtlbe); in write_stlbe()
152 write_host_tlbe(vcpu_e500, stlbsel, sesel, stlbe); in write_stlbe()
160 struct kvmppc_vcpu_e500 *vcpu_e500 = to_e500(vcpu); in kvmppc_map_magic() local
170 stid = kvmppc_e500_get_sid(vcpu_e500, 0, 0, 0, 0); in kvmppc_map_magic()
184 void inval_gtlbe_on_host(struct kvmppc_vcpu_e500 *vcpu_e500, int tlbsel, in inval_gtlbe_on_host() argument
188 get_entry(vcpu_e500, tlbsel, esel); in inval_gtlbe_on_host()
189 struct tlbe_ref *ref = &vcpu_e500->gtlb_priv[tlbsel][esel].ref; in inval_gtlbe_on_host()
195 WARN_ON(tlbsel == 1 && vcpu_e500->g2h_tlb1_map[esel]); in inval_gtlbe_on_host()
199 u64 tmp = vcpu_e500->g2h_tlb1_map[esel]; in inval_gtlbe_on_host()
211 vcpu_e500->h2g_tlb1_rmap[hw_tlb_indx] = 0; in inval_gtlbe_on_host()
215 vcpu_e500->g2h_tlb1_map[esel] = 0; in inval_gtlbe_on_host()
225 kvmppc_e500_tlbil_all(vcpu_e500); in inval_gtlbe_on_host()
234 kvmppc_e500_tlbil_one(vcpu_e500, gtlbe); in inval_gtlbe_on_host()
271 static void clear_tlb1_bitmap(struct kvmppc_vcpu_e500 *vcpu_e500) in clear_tlb1_bitmap() argument
273 if (vcpu_e500->g2h_tlb1_map) in clear_tlb1_bitmap()
274 memset(vcpu_e500->g2h_tlb1_map, 0, in clear_tlb1_bitmap()
275 sizeof(u64) * vcpu_e500->gtlb_params[1].entries); in clear_tlb1_bitmap()
276 if (vcpu_e500->h2g_tlb1_rmap) in clear_tlb1_bitmap()
277 memset(vcpu_e500->h2g_tlb1_rmap, 0, in clear_tlb1_bitmap()
281 static void clear_tlb_privs(struct kvmppc_vcpu_e500 *vcpu_e500) in clear_tlb_privs() argument
287 for (i = 0; i < vcpu_e500->gtlb_params[tlbsel].entries; i++) { in clear_tlb_privs()
289 &vcpu_e500->gtlb_priv[tlbsel][i].ref; in clear_tlb_privs()
297 struct kvmppc_vcpu_e500 *vcpu_e500 = to_e500(vcpu); in kvmppc_core_flush_tlb() local
298 kvmppc_e500_tlbil_all(vcpu_e500); in kvmppc_core_flush_tlb()
299 clear_tlb_privs(vcpu_e500); in kvmppc_core_flush_tlb()
300 clear_tlb1_bitmap(vcpu_e500); in kvmppc_core_flush_tlb()
322 static inline int kvmppc_e500_shadow_map(struct kvmppc_vcpu_e500 *vcpu_e500, in kvmppc_e500_shadow_map() argument
334 struct kvm *kvm = vcpu_e500->vcpu.kvm; in kvmppc_e500_shadow_map()
353 slot = gfn_to_memslot(vcpu_e500->vcpu.kvm, gfn); in kvmppc_e500_shadow_map()
469 pgdir = vcpu_e500->vcpu.arch.pgdir; in kvmppc_e500_shadow_map()
495 kvmppc_e500_setup_stlbe(&vcpu_e500->vcpu, gtlbe, tsize, in kvmppc_e500_shadow_map()
511 static int kvmppc_e500_tlb0_map(struct kvmppc_vcpu_e500 *vcpu_e500, int esel, in kvmppc_e500_tlb0_map() argument
520 gtlbe = get_entry(vcpu_e500, 0, esel); in kvmppc_e500_tlb0_map()
521 ref = &vcpu_e500->gtlb_priv[0][esel].ref; in kvmppc_e500_tlb0_map()
523 r = kvmppc_e500_shadow_map(vcpu_e500, get_tlb_eaddr(gtlbe), in kvmppc_e500_tlb0_map()
529 write_stlbe(vcpu_e500, gtlbe, stlbe, stlbsel, sesel); in kvmppc_e500_tlb0_map()
534 static int kvmppc_e500_tlb1_map_tlb1(struct kvmppc_vcpu_e500 *vcpu_e500, in kvmppc_e500_tlb1_map_tlb1() argument
538 unsigned int sesel = vcpu_e500->host_tlb1_nv++; in kvmppc_e500_tlb1_map_tlb1()
540 if (unlikely(vcpu_e500->host_tlb1_nv >= tlb1_max_shadow_size())) in kvmppc_e500_tlb1_map_tlb1()
541 vcpu_e500->host_tlb1_nv = 0; in kvmppc_e500_tlb1_map_tlb1()
543 if (vcpu_e500->h2g_tlb1_rmap[sesel]) { in kvmppc_e500_tlb1_map_tlb1()
544 unsigned int idx = vcpu_e500->h2g_tlb1_rmap[sesel] - 1; in kvmppc_e500_tlb1_map_tlb1()
545 vcpu_e500->g2h_tlb1_map[idx] &= ~(1ULL << sesel); in kvmppc_e500_tlb1_map_tlb1()
548 vcpu_e500->gtlb_priv[1][esel].ref.flags |= E500_TLB_BITMAP; in kvmppc_e500_tlb1_map_tlb1()
549 vcpu_e500->g2h_tlb1_map[esel] |= (u64)1 << sesel; in kvmppc_e500_tlb1_map_tlb1()
550 vcpu_e500->h2g_tlb1_rmap[sesel] = esel + 1; in kvmppc_e500_tlb1_map_tlb1()
559 static int kvmppc_e500_tlb1_map(struct kvmppc_vcpu_e500 *vcpu_e500, in kvmppc_e500_tlb1_map() argument
563 struct tlbe_ref *ref = &vcpu_e500->gtlb_priv[1][esel].ref; in kvmppc_e500_tlb1_map()
567 r = kvmppc_e500_shadow_map(vcpu_e500, gvaddr, gfn, gtlbe, 1, stlbe, in kvmppc_e500_tlb1_map()
574 vcpu_e500->gtlb_priv[1][esel].ref.flags |= E500_TLB_TLB0; in kvmppc_e500_tlb1_map()
575 write_stlbe(vcpu_e500, gtlbe, stlbe, 0, 0); in kvmppc_e500_tlb1_map()
580 sesel = kvmppc_e500_tlb1_map_tlb1(vcpu_e500, ref, esel); in kvmppc_e500_tlb1_map()
581 write_stlbe(vcpu_e500, gtlbe, stlbe, 1, sesel); in kvmppc_e500_tlb1_map()
589 struct kvmppc_vcpu_e500 *vcpu_e500 = to_e500(vcpu); in kvmppc_mmu_map() local
595 gtlbe = get_entry(vcpu_e500, tlbsel, esel); in kvmppc_mmu_map()
599 priv = &vcpu_e500->gtlb_priv[tlbsel][esel]; in kvmppc_mmu_map()
603 kvmppc_e500_tlb0_map(vcpu_e500, esel, &stlbe); in kvmppc_mmu_map()
607 write_stlbe(vcpu_e500, gtlbe, &stlbe, 0, 0); in kvmppc_mmu_map()
613 kvmppc_e500_tlb1_map(vcpu_e500, eaddr, gfn, gtlbe, &stlbe, in kvmppc_mmu_map()
767 int e500_mmu_host_init(struct kvmppc_vcpu_e500 *vcpu_e500) in e500_mmu_host_init() argument
800 vcpu_e500->h2g_tlb1_rmap = kcalloc(host_tlb_params[1].entries, in e500_mmu_host_init()
801 sizeof(*vcpu_e500->h2g_tlb1_rmap), in e500_mmu_host_init()
803 if (!vcpu_e500->h2g_tlb1_rmap) in e500_mmu_host_init()
809 void e500_mmu_host_uninit(struct kvmppc_vcpu_e500 *vcpu_e500) in e500_mmu_host_uninit() argument
811 kfree(vcpu_e500->h2g_tlb1_rmap); in e500_mmu_host_uninit()