Lines Matching refs:arch

46 	struct kvm_vcpu_arch *arch = &vcpu->arch;  in kvm_compute_return_epc()  local
66 arch->gprs[insn.r_format.rd] = epc + 8; in kvm_compute_return_epc()
69 nextpc = arch->gprs[insn.r_format.rs]; in kvm_compute_return_epc()
85 if ((long)arch->gprs[insn.i_format.rs] < 0) in kvm_compute_return_epc()
94 if ((long)arch->gprs[insn.i_format.rs] >= 0) in kvm_compute_return_epc()
103 arch->gprs[31] = epc + 8; in kvm_compute_return_epc()
104 if ((long)arch->gprs[insn.i_format.rs] < 0) in kvm_compute_return_epc()
113 arch->gprs[31] = epc + 8; in kvm_compute_return_epc()
114 if ((long)arch->gprs[insn.i_format.rs] >= 0) in kvm_compute_return_epc()
142 arch->gprs[31] = instpc + 8; in kvm_compute_return_epc()
155 if (arch->gprs[insn.i_format.rs] == in kvm_compute_return_epc()
156 arch->gprs[insn.i_format.rt]) in kvm_compute_return_epc()
165 if (arch->gprs[insn.i_format.rs] != in kvm_compute_return_epc()
166 arch->gprs[insn.i_format.rt]) in kvm_compute_return_epc()
179 if ((long)arch->gprs[insn.i_format.rs] <= 0) in kvm_compute_return_epc()
192 if ((long)arch->gprs[insn.i_format.rs] > 0) in kvm_compute_return_epc()
249 err = kvm_compute_return_epc(vcpu, vcpu->arch.pc, in update_pc()
250 &vcpu->arch.pc); in update_pc()
254 vcpu->arch.pc += 4; in update_pc()
257 kvm_debug("update_pc(): New PC: %#lx\n", vcpu->arch.pc); in update_pc()
276 *out = vcpu->arch.host_cp0_badinstr; in kvm_get_badinstr()
297 *out = vcpu->arch.host_cp0_badinstrp; in kvm_get_badinstrp()
314 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_count_disabled()
316 return (vcpu->arch.count_ctl & KVM_REG_MIPS_COUNT_CTL_DC) || in kvm_mips_count_disabled()
333 delta = now_ns + vcpu->arch.count_dyn_bias; in kvm_mips_ktime_to_count()
335 if (delta >= vcpu->arch.count_period) { in kvm_mips_ktime_to_count()
337 periods = div64_s64(now_ns, vcpu->arch.count_period); in kvm_mips_ktime_to_count()
338 vcpu->arch.count_dyn_bias = -periods * vcpu->arch.count_period; in kvm_mips_ktime_to_count()
340 delta = now_ns + vcpu->arch.count_dyn_bias; in kvm_mips_ktime_to_count()
353 return div_u64(delta * vcpu->arch.count_hz, NSEC_PER_SEC); in kvm_mips_ktime_to_count()
368 if (unlikely(vcpu->arch.count_ctl & KVM_REG_MIPS_COUNT_CTL_DC)) in kvm_mips_count_time()
369 return vcpu->arch.count_resume; in kvm_mips_count_time()
386 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_read_count_running()
392 count = vcpu->arch.count_bias + kvm_mips_ktime_to_count(vcpu, now); in kvm_mips_read_count_running()
408 expires = hrtimer_get_expires(&vcpu->arch.comparecount_timer); in kvm_mips_read_count_running()
409 threshold = ktime_add_ns(now, vcpu->arch.count_period / 4); in kvm_mips_read_count_running()
415 running = hrtimer_cancel(&vcpu->arch.comparecount_timer); in kvm_mips_read_count_running()
426 vcpu->arch.count_period); in kvm_mips_read_count_running()
427 hrtimer_start(&vcpu->arch.comparecount_timer, expires, in kvm_mips_read_count_running()
446 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_read_count()
476 hrtimer_cancel(&vcpu->arch.comparecount_timer); in kvm_mips_freeze_hrtimer()
504 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_resume_hrtimer()
512 delta = div_u64(delta * NSEC_PER_SEC, vcpu->arch.count_hz); in kvm_mips_resume_hrtimer()
516 hrtimer_cancel(&vcpu->arch.comparecount_timer); in kvm_mips_resume_hrtimer()
517 hrtimer_start(&vcpu->arch.comparecount_timer, expire, HRTIMER_MODE_ABS); in kvm_mips_resume_hrtimer()
551 before_count = vcpu->arch.count_bias + in kvm_mips_restore_hrtimer()
564 vcpu->arch.count_bias += drift; in kvm_mips_restore_hrtimer()
571 now_count = vcpu->arch.count_bias + kvm_mips_ktime_to_count(vcpu, now); in kvm_mips_restore_hrtimer()
580 vcpu->arch.count_bias += drift; in kvm_mips_restore_hrtimer()
587 delta = div_u64(delta * NSEC_PER_SEC, vcpu->arch.count_hz); in kvm_mips_restore_hrtimer()
605 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_write_count()
610 vcpu->arch.count_bias = count - kvm_mips_ktime_to_count(vcpu, now); in kvm_mips_write_count()
630 vcpu->arch.count_hz = count_hz; in kvm_mips_init_count()
631 vcpu->arch.count_period = div_u64((u64)NSEC_PER_SEC << 32, count_hz); in kvm_mips_init_count()
632 vcpu->arch.count_dyn_bias = 0; in kvm_mips_init_count()
651 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_set_count_hz()
660 if (vcpu->arch.count_hz == count_hz) in kvm_mips_set_count_hz()
673 vcpu->arch.count_hz = count_hz; in kvm_mips_set_count_hz()
674 vcpu->arch.count_period = div_u64((u64)NSEC_PER_SEC << 32, count_hz); in kvm_mips_set_count_hz()
675 vcpu->arch.count_dyn_bias = 0; in kvm_mips_set_count_hz()
678 vcpu->arch.count_bias = count - kvm_mips_ktime_to_count(vcpu, now); in kvm_mips_set_count_hz()
698 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_write_compare()
783 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_count_disable()
788 hrtimer_cancel(&vcpu->arch.comparecount_timer); in kvm_mips_count_disable()
810 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_count_disable_cause()
813 if (!(vcpu->arch.count_ctl & KVM_REG_MIPS_COUNT_CTL_DC)) in kvm_mips_count_disable_cause()
830 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_count_enable_cause()
856 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_set_count_ctl()
857 s64 changed = count_ctl ^ vcpu->arch.count_ctl; in kvm_mips_set_count_ctl()
867 vcpu->arch.count_ctl = count_ctl; in kvm_mips_set_count_ctl()
875 vcpu->arch.count_resume = ktime_get(); in kvm_mips_set_count_ctl()
878 vcpu->arch.count_resume = kvm_mips_count_disable(vcpu); in kvm_mips_set_count_ctl()
888 vcpu->arch.count_hz); in kvm_mips_set_count_ctl()
889 expire = ktime_add_ns(vcpu->arch.count_resume, delta); in kvm_mips_set_count_ctl()
926 vcpu->arch.count_resume = ns_to_ktime(count_resume); in kvm_mips_set_count_resume()
941 hrtimer_add_expires_ns(&vcpu->arch.comparecount_timer, in kvm_mips_count_timeout()
942 vcpu->arch.count_period); in kvm_mips_count_timeout()
948 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emul_eret()
953 vcpu->arch.pc = kvm_read_c0_guest_errorepc(cop0); in kvm_mips_emul_eret()
955 kvm_debug("[%#lx] ERET to %#lx\n", vcpu->arch.pc, in kvm_mips_emul_eret()
958 vcpu->arch.pc = kvm_read_c0_guest_epc(cop0); in kvm_mips_emul_eret()
962 vcpu->arch.pc); in kvm_mips_emul_eret()
971 kvm_debug("[%#lx] !!!WAIT!!! (%#lx)\n", vcpu->arch.pc, in kvm_mips_emul_wait()
972 vcpu->arch.pending_exceptions); in kvm_mips_emul_wait()
976 if (!vcpu->arch.pending_exceptions) { in kvm_mips_emul_wait()
978 vcpu->arch.wait = 1; in kvm_mips_emul_wait()
997 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_change_entryhi()
998 struct mm_struct *kern_mm = &vcpu->arch.guest_kernel_mm; in kvm_mips_change_entryhi()
1031 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emul_tlbr()
1033 unsigned long pc = vcpu->arch.pc; in kvm_mips_emul_tlbr()
1043 tlb = &vcpu->arch.guest_tlb[index]; in kvm_mips_emul_tlbr()
1063 struct mm_struct *kern_mm = &vcpu->arch.guest_kernel_mm; in kvm_mips_invalidate_guest_tlb()
1064 struct mm_struct *user_mm = &vcpu->arch.guest_user_mm; in kvm_mips_invalidate_guest_tlb()
1104 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emul_tlbwi()
1107 unsigned long pc = vcpu->arch.pc; in kvm_mips_emul_tlbwi()
1119 tlb = &vcpu->arch.guest_tlb[index]; in kvm_mips_emul_tlbwi()
1140 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emul_tlbwr()
1142 unsigned long pc = vcpu->arch.pc; in kvm_mips_emul_tlbwr()
1146 tlb = &vcpu->arch.guest_tlb[index]; in kvm_mips_emul_tlbwr()
1165 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emul_tlbp()
1167 unsigned long pc = vcpu->arch.pc; in kvm_mips_emul_tlbp()
1192 if (kvm_mips_guest_can_have_fpu(&vcpu->arch)) in kvm_mips_config1_wrmask()
1211 if (kvm_mips_guest_can_have_msa(&vcpu->arch)) in kvm_mips_config3_wrmask()
1247 if (kvm_mips_guest_has_msa(&vcpu->arch)) in kvm_mips_config5_wrmask()
1254 if (kvm_mips_guest_has_fpu(&vcpu->arch)) { in kvm_mips_config5_wrmask()
1268 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_CP0()
1277 curr_pc = vcpu->arch.pc; in kvm_mips_emulate_CP0()
1321 vcpu->arch.gprs[rt] = in kvm_mips_emulate_CP0()
1324 vcpu->arch.gprs[rt] = 0x0; in kvm_mips_emulate_CP0()
1329 vcpu->arch.gprs[rt] = (s32)cop0->reg[rd][sel]; in kvm_mips_emulate_CP0()
1338 vcpu->arch.gprs[rt]); in kvm_mips_emulate_CP0()
1342 vcpu->arch.gprs[rt] = cop0->reg[rd][sel]; in kvm_mips_emulate_CP0()
1346 vcpu->arch.gprs[rt]); in kvm_mips_emulate_CP0()
1355 vcpu->arch.gprs[rt]); in kvm_mips_emulate_CP0()
1358 && (vcpu->arch.gprs[rt] >= in kvm_mips_emulate_CP0()
1361 vcpu->arch.gprs[rt]); in kvm_mips_emulate_CP0()
1371 vcpu->arch.gprs[rt]); in kvm_mips_emulate_CP0()
1374 vcpu->arch.gprs[rt]); in kvm_mips_emulate_CP0()
1378 kvm_mips_write_count(vcpu, vcpu->arch.gprs[rt]); in kvm_mips_emulate_CP0()
1384 vcpu->arch.gprs[rt], in kvm_mips_emulate_CP0()
1390 val = vcpu->arch.gprs[rt]; in kvm_mips_emulate_CP0()
1401 if (!kvm_mips_guest_has_fpu(&vcpu->arch)) in kvm_mips_emulate_CP0()
1432 vcpu->arch.aux_inuse & KVM_MIPS_AUX_MSA) in kvm_mips_emulate_CP0()
1443 vcpu->arch.aux_inuse & KVM_MIPS_AUX_FPU) in kvm_mips_emulate_CP0()
1455 if (!kvm_mips_guest_has_fpu(&vcpu->arch)) in kvm_mips_emulate_CP0()
1462 val = vcpu->arch.gprs[rt]; in kvm_mips_emulate_CP0()
1478 vcpu->arch.aux_inuse & KVM_MIPS_AUX_FPU) in kvm_mips_emulate_CP0()
1488 vcpu->arch.aux_inuse & KVM_MIPS_AUX_MSA) in kvm_mips_emulate_CP0()
1499 new_cause = vcpu->arch.gprs[rt]; in kvm_mips_emulate_CP0()
1519 cop0->reg[rd][sel] = vcpu->arch.gprs[rt] & mask; in kvm_mips_emulate_CP0()
1521 cop0->reg[rd][sel] = vcpu->arch.gprs[rt]; in kvm_mips_emulate_CP0()
1530 vcpu->arch.pc, rt, rd, sel); in kvm_mips_emulate_CP0()
1533 vcpu->arch.gprs[rt]); in kvm_mips_emulate_CP0()
1542 vcpu->arch.gprs[rt] = in kvm_mips_emulate_CP0()
1547 vcpu->arch.pc); in kvm_mips_emulate_CP0()
1551 vcpu->arch.pc); in kvm_mips_emulate_CP0()
1571 vcpu->arch.gprs[rt]); in kvm_mips_emulate_CP0()
1572 vcpu->arch.gprs[rd] = vcpu->arch.gprs[rt]; in kvm_mips_emulate_CP0()
1577 vcpu->arch.pc, inst.c0r_format.rs); in kvm_mips_emulate_CP0()
1586 vcpu->arch.pc = curr_pc; in kvm_mips_emulate_CP0()
1612 curr_pc = vcpu->arch.pc; in kvm_mips_emulate_store()
1620 vcpu->arch.host_cp0_badvaddr); in kvm_mips_emulate_store()
1628 *(u64 *)data = vcpu->arch.gprs[rt]; in kvm_mips_emulate_store()
1631 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store()
1632 vcpu->arch.gprs[rt], *(u64 *)data); in kvm_mips_emulate_store()
1638 *(u32 *)data = vcpu->arch.gprs[rt]; in kvm_mips_emulate_store()
1641 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store()
1642 vcpu->arch.gprs[rt], *(u32 *)data); in kvm_mips_emulate_store()
1647 *(u16 *)data = vcpu->arch.gprs[rt]; in kvm_mips_emulate_store()
1650 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store()
1651 vcpu->arch.gprs[rt], *(u16 *)data); in kvm_mips_emulate_store()
1656 *(u8 *)data = vcpu->arch.gprs[rt]; in kvm_mips_emulate_store()
1659 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store()
1660 vcpu->arch.gprs[rt], *(u8 *)data); in kvm_mips_emulate_store()
1676 vcpu->arch.pc = curr_pc; in kvm_mips_emulate_store()
1696 curr_pc = vcpu->arch.pc; in kvm_mips_emulate_load()
1700 vcpu->arch.io_pc = vcpu->arch.pc; in kvm_mips_emulate_load()
1701 vcpu->arch.pc = curr_pc; in kvm_mips_emulate_load()
1703 vcpu->arch.io_gpr = rt; in kvm_mips_emulate_load()
1706 vcpu->arch.host_cp0_badvaddr); in kvm_mips_emulate_load()
1781 vcpu->arch.host_cp0_badvaddr = addr; in kvm_mips_guest_cache_op()
1782 vcpu->arch.pc = curr_pc; in kvm_mips_guest_cache_op()
1787 vcpu->arch.host_cp0_badvaddr = addr; in kvm_mips_guest_cache_op()
1788 vcpu->arch.pc = curr_pc; in kvm_mips_guest_cache_op()
1805 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_cache() local
1813 curr_pc = vcpu->arch.pc; in kvm_mips_emulate_cache()
1827 va = arch->gprs[base] + offset; in kvm_mips_emulate_cache()
1830 cache, op, base, arch->gprs[base], offset); in kvm_mips_emulate_cache()
1839 vcpu->arch.pc, vcpu->arch.gprs[31], cache, op, base, in kvm_mips_emulate_cache()
1840 arch->gprs[base], offset); in kvm_mips_emulate_cache()
1916 cache, op, base, arch->gprs[base], offset); in kvm_mips_emulate_cache()
1923 vcpu->arch.pc = curr_pc; in kvm_mips_emulate_cache()
1993 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_guest_exception_base()
2006 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_syscall()
2007 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_syscall() local
2012 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_syscall()
2020 kvm_debug("Delivering SYSCALL @ pc %#lx\n", arch->pc); in kvm_mips_emulate_syscall()
2026 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x180; in kvm_mips_emulate_syscall()
2041 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_tlbmiss_ld()
2042 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_tlbmiss_ld() local
2043 unsigned long entryhi = (vcpu->arch. host_cp0_badvaddr & VPN2_MASK) | in kvm_mips_emulate_tlbmiss_ld()
2048 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_tlbmiss_ld()
2057 arch->pc); in kvm_mips_emulate_tlbmiss_ld()
2060 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x0; in kvm_mips_emulate_tlbmiss_ld()
2064 arch->pc); in kvm_mips_emulate_tlbmiss_ld()
2066 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x180; in kvm_mips_emulate_tlbmiss_ld()
2073 kvm_write_c0_guest_badvaddr(cop0, vcpu->arch.host_cp0_badvaddr); in kvm_mips_emulate_tlbmiss_ld()
2085 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_tlbinv_ld()
2086 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_tlbinv_ld() local
2088 (vcpu->arch.host_cp0_badvaddr & VPN2_MASK) | in kvm_mips_emulate_tlbinv_ld()
2093 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_tlbinv_ld()
2102 arch->pc); in kvm_mips_emulate_tlbinv_ld()
2105 arch->pc); in kvm_mips_emulate_tlbinv_ld()
2109 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x180; in kvm_mips_emulate_tlbinv_ld()
2115 kvm_write_c0_guest_badvaddr(cop0, vcpu->arch.host_cp0_badvaddr); in kvm_mips_emulate_tlbinv_ld()
2127 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_tlbmiss_st()
2128 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_tlbmiss_st() local
2129 unsigned long entryhi = (vcpu->arch.host_cp0_badvaddr & VPN2_MASK) | in kvm_mips_emulate_tlbmiss_st()
2134 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_tlbmiss_st()
2143 arch->pc); in kvm_mips_emulate_tlbmiss_st()
2146 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x0; in kvm_mips_emulate_tlbmiss_st()
2149 arch->pc); in kvm_mips_emulate_tlbmiss_st()
2150 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x180; in kvm_mips_emulate_tlbmiss_st()
2157 kvm_write_c0_guest_badvaddr(cop0, vcpu->arch.host_cp0_badvaddr); in kvm_mips_emulate_tlbmiss_st()
2169 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_tlbinv_st()
2170 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_tlbinv_st() local
2171 unsigned long entryhi = (vcpu->arch.host_cp0_badvaddr & VPN2_MASK) | in kvm_mips_emulate_tlbinv_st()
2176 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_tlbinv_st()
2185 arch->pc); in kvm_mips_emulate_tlbinv_st()
2188 arch->pc); in kvm_mips_emulate_tlbinv_st()
2192 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x180; in kvm_mips_emulate_tlbinv_st()
2198 kvm_write_c0_guest_badvaddr(cop0, vcpu->arch.host_cp0_badvaddr); in kvm_mips_emulate_tlbinv_st()
2210 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_tlbmod()
2211 unsigned long entryhi = (vcpu->arch.host_cp0_badvaddr & VPN2_MASK) | in kvm_mips_emulate_tlbmod()
2213 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_tlbmod() local
2217 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_tlbmod()
2226 arch->pc); in kvm_mips_emulate_tlbmod()
2229 arch->pc); in kvm_mips_emulate_tlbmod()
2232 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x180; in kvm_mips_emulate_tlbmod()
2238 kvm_write_c0_guest_badvaddr(cop0, vcpu->arch.host_cp0_badvaddr); in kvm_mips_emulate_tlbmod()
2250 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_fpu_exc()
2251 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_fpu_exc() local
2255 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_fpu_exc()
2265 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x180; in kvm_mips_emulate_fpu_exc()
2279 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_ri_exc()
2280 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_ri_exc() local
2285 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_ri_exc()
2293 kvm_debug("Delivering RI @ pc %#lx\n", arch->pc); in kvm_mips_emulate_ri_exc()
2299 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x180; in kvm_mips_emulate_ri_exc()
2314 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_bp_exc()
2315 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_bp_exc() local
2320 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_bp_exc()
2328 kvm_debug("Delivering BP @ pc %#lx\n", arch->pc); in kvm_mips_emulate_bp_exc()
2334 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x180; in kvm_mips_emulate_bp_exc()
2349 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_trap_exc()
2350 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_trap_exc() local
2355 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_trap_exc()
2363 kvm_debug("Delivering TRAP @ pc %#lx\n", arch->pc); in kvm_mips_emulate_trap_exc()
2369 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x180; in kvm_mips_emulate_trap_exc()
2384 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_msafpe_exc()
2385 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_msafpe_exc() local
2390 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_msafpe_exc()
2398 kvm_debug("Delivering MSAFPE @ pc %#lx\n", arch->pc); in kvm_mips_emulate_msafpe_exc()
2404 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x180; in kvm_mips_emulate_msafpe_exc()
2419 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_fpe_exc()
2420 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_fpe_exc() local
2425 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_fpe_exc()
2433 kvm_debug("Delivering FPE @ pc %#lx\n", arch->pc); in kvm_mips_emulate_fpe_exc()
2439 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x180; in kvm_mips_emulate_fpe_exc()
2454 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_msadis_exc()
2455 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_msadis_exc() local
2460 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_msadis_exc()
2468 kvm_debug("Delivering MSADIS @ pc %#lx\n", arch->pc); in kvm_mips_emulate_msadis_exc()
2474 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x180; in kvm_mips_emulate_msadis_exc()
2488 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_handle_ri()
2489 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_handle_ri() local
2499 curr_pc = vcpu->arch.pc; in kvm_mips_handle_ri()
2530 arch->gprs[rt] = vcpu->vcpu_id; in kvm_mips_handle_ri()
2533 arch->gprs[rt] = min(current_cpu_data.dcache.linesz, in kvm_mips_handle_ri()
2537 arch->gprs[rt] = (s32)kvm_mips_read_count(vcpu); in kvm_mips_handle_ri()
2543 arch->gprs[rt] = 1; in kvm_mips_handle_ri()
2546 arch->gprs[rt] = 2; in kvm_mips_handle_ri()
2550 arch->gprs[rt] = kvm_read_c0_guest_userlocal(cop0); in kvm_mips_handle_ri()
2559 vcpu->arch.gprs[rt]); in kvm_mips_handle_ri()
2573 vcpu->arch.pc = curr_pc; in kvm_mips_handle_ri()
2580 unsigned long *gpr = &vcpu->arch.gprs[vcpu->arch.io_gpr]; in kvm_mips_complete_mmio_load()
2590 vcpu->arch.pc = vcpu->arch.io_pc; in kvm_mips_complete_mmio_load()
2629 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_exc()
2630 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_exc() local
2635 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_exc()
2647 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x180; in kvm_mips_emulate_exc()
2648 kvm_write_c0_guest_badvaddr(cop0, vcpu->arch.host_cp0_badvaddr); in kvm_mips_emulate_exc()
2668 unsigned long badvaddr = vcpu->arch.host_cp0_badvaddr; in kvm_mips_check_privilege()
2765 unsigned long va = vcpu->arch.host_cp0_badvaddr; in kvm_mips_handle_tlbmiss()
2769 vcpu->arch.host_cp0_badvaddr); in kvm_mips_handle_tlbmiss()
2779 (kvm_read_c0_guest_entryhi(vcpu->arch.cop0) & in kvm_mips_handle_tlbmiss()
2792 struct kvm_mips_tlb *tlb = &vcpu->arch.guest_tlb[index]; in kvm_mips_handle_tlbmiss()