Lines Matching full:csr

21 #include <asm/csr.h>
111 struct kvm_vcpu_csr *csr = &vcpu->arch.guest_csr; in kvm_riscv_reset_vcpu() local
129 memcpy(csr, reset_csr, sizeof(*csr)); in kvm_riscv_reset_vcpu()
410 struct kvm_vcpu_csr *csr = &vcpu->arch.guest_csr; in kvm_riscv_vcpu_get_reg_csr() local
425 reg_val = (csr->hvip >> VSIP_TO_HVIP_SHIFT) & VSIP_VALID_MASK; in kvm_riscv_vcpu_get_reg_csr()
427 reg_val = ((unsigned long *)csr)[reg_num]; in kvm_riscv_vcpu_get_reg_csr()
438 struct kvm_vcpu_csr *csr = &vcpu->arch.guest_csr; in kvm_riscv_vcpu_set_reg_csr() local
459 ((unsigned long *)csr)[reg_num] = reg_val; in kvm_riscv_vcpu_set_reg_csr()
677 struct kvm_vcpu_csr *csr = &vcpu->arch.guest_csr; in kvm_riscv_vcpu_flush_interrupts() local
684 csr->hvip &= ~mask; in kvm_riscv_vcpu_flush_interrupts()
685 csr->hvip |= val; in kvm_riscv_vcpu_flush_interrupts()
693 struct kvm_vcpu_csr *csr = &vcpu->arch.guest_csr; in kvm_riscv_vcpu_sync_interrupts() local
696 csr->vsie = csr_read(CSR_VSIE); in kvm_riscv_vcpu_sync_interrupts()
700 if ((csr->hvip ^ hvip) & (1UL << IRQ_VS_SOFT)) { in kvm_riscv_vcpu_sync_interrupts()
825 struct kvm_vcpu_csr *csr = &vcpu->arch.guest_csr; in kvm_arch_vcpu_load() local
827 csr_write(CSR_VSSTATUS, csr->vsstatus); in kvm_arch_vcpu_load()
828 csr_write(CSR_VSIE, csr->vsie); in kvm_arch_vcpu_load()
829 csr_write(CSR_VSTVEC, csr->vstvec); in kvm_arch_vcpu_load()
830 csr_write(CSR_VSSCRATCH, csr->vsscratch); in kvm_arch_vcpu_load()
831 csr_write(CSR_VSEPC, csr->vsepc); in kvm_arch_vcpu_load()
832 csr_write(CSR_VSCAUSE, csr->vscause); in kvm_arch_vcpu_load()
833 csr_write(CSR_VSTVAL, csr->vstval); in kvm_arch_vcpu_load()
834 csr_write(CSR_HVIP, csr->hvip); in kvm_arch_vcpu_load()
835 csr_write(CSR_VSATP, csr->vsatp); in kvm_arch_vcpu_load()
852 struct kvm_vcpu_csr *csr = &vcpu->arch.guest_csr; in kvm_arch_vcpu_put() local
862 csr->vsstatus = csr_read(CSR_VSSTATUS); in kvm_arch_vcpu_put()
863 csr->vsie = csr_read(CSR_VSIE); in kvm_arch_vcpu_put()
864 csr->vstvec = csr_read(CSR_VSTVEC); in kvm_arch_vcpu_put()
865 csr->vsscratch = csr_read(CSR_VSSCRATCH); in kvm_arch_vcpu_put()
866 csr->vsepc = csr_read(CSR_VSEPC); in kvm_arch_vcpu_put()
867 csr->vscause = csr_read(CSR_VSCAUSE); in kvm_arch_vcpu_put()
868 csr->vstval = csr_read(CSR_VSTVAL); in kvm_arch_vcpu_put()
869 csr->hvip = csr_read(CSR_HVIP); in kvm_arch_vcpu_put()
870 csr->vsatp = csr_read(CSR_VSATP); in kvm_arch_vcpu_put()
920 struct kvm_vcpu_csr *csr = &vcpu->arch.guest_csr; in kvm_riscv_update_hvip() local
922 csr_write(CSR_HVIP, csr->hvip); in kvm_riscv_update_hvip()
961 /* Process CSR value returned from user-space */ in kvm_arch_vcpu_ioctl_run()
1013 /* Update HVIP CSR for current CPU */ in kvm_arch_vcpu_ioctl_run()