Lines Matching refs:insn
72 #define INSN_IS_16BIT(insn) (((insn) & INSN_16BIT_MASK) != INSN_16BIT_MASK) argument
74 #define INSN_LEN(insn) (INSN_IS_16BIT(insn) ? 2 : 4) argument
105 #define RVC_RS1S(insn) (8 + RV_X(insn, SH_RD, 3)) argument
106 #define RVC_RS2S(insn) (8 + RV_X(insn, SH_RS2C, 3)) argument
107 #define RVC_RS2(insn) RV_X(insn, SH_RS2C, 5) argument
115 #define REG_OFFSET(insn, pos) \ argument
116 (SHIFT_RIGHT((insn), (pos) - LOG_REGBYTES) & REG_MASK)
118 #define REG_PTR(insn, pos, regs) \ argument
119 ((ulong *)((ulong)(regs) + REG_OFFSET(insn, pos)))
121 #define GET_FUNCT3(insn) (((insn) >> 12) & 7) argument
123 #define GET_RS1(insn, regs) (*REG_PTR(insn, SH_RS1, regs)) argument
124 #define GET_RS2(insn, regs) (*REG_PTR(insn, SH_RS2, regs)) argument
125 #define GET_RS1S(insn, regs) (*REG_PTR(RVC_RS1S(insn), 0, regs)) argument
126 #define GET_RS2S(insn, regs) (*REG_PTR(RVC_RS2S(insn), 0, regs)) argument
127 #define GET_RS2C(insn, regs) (*REG_PTR(insn, SH_RS2C, regs)) argument
129 #define SET_RD(insn, regs, val) (*REG_PTR(insn, SH_RD, regs) = (val)) argument
130 #define IMM_I(insn) ((s32)(insn) >> 20) argument
131 #define IMM_S(insn) (((s32)(insn) >> 25 << 5) | \ argument
132 (s32)(((insn) >> 7) & 0x1f))
148 int (*func)(struct kvm_vcpu *vcpu, struct kvm_run *run, ulong insn);
152 ulong insn) in truly_illegal_insn() argument
159 utrap.stval = insn; in truly_illegal_insn()
168 ulong insn) in truly_virtual_insn() argument
175 utrap.stval = insn; in truly_virtual_insn()
197 static int wfi_insn(struct kvm_vcpu *vcpu, struct kvm_run *run, ulong insn) in wfi_insn() argument
232 ulong insn; in kvm_riscv_vcpu_csr_return() local
239 insn = vcpu->arch.csr_decode.insn; in kvm_riscv_vcpu_csr_return()
240 if ((insn >> SH_RD) & MASK_RX) in kvm_riscv_vcpu_csr_return()
241 SET_RD(insn, &vcpu->arch.guest_context, in kvm_riscv_vcpu_csr_return()
245 vcpu->arch.guest_context.sepc += INSN_LEN(insn); in kvm_riscv_vcpu_csr_return()
250 static int csr_insn(struct kvm_vcpu *vcpu, struct kvm_run *run, ulong insn) in csr_insn() argument
253 unsigned int csr_num = insn >> SH_RS2; in csr_insn()
254 unsigned int rs1_num = (insn >> SH_RS1) & MASK_RX; in csr_insn()
255 ulong rs1_val = GET_RS1(insn, &vcpu->arch.guest_context); in csr_insn()
260 switch (GET_FUNCT3(insn)) { in csr_insn()
290 vcpu->arch.csr_decode.insn = insn; in csr_insn()
371 ulong insn) in system_opcode_insn() argument
378 if ((insn & ifn->mask) == ifn->match) { in system_opcode_insn()
379 rc = ifn->func(vcpu, run, insn); in system_opcode_insn()
386 return truly_illegal_insn(vcpu, run, insn); in system_opcode_insn()
388 return truly_virtual_insn(vcpu, run, insn); in system_opcode_insn()
390 vcpu->arch.guest_context.sepc += INSN_LEN(insn); in system_opcode_insn()
413 unsigned long insn = trap->stval; in kvm_riscv_vcpu_virtual_insn() local
417 if (unlikely(INSN_IS_16BIT(insn))) { in kvm_riscv_vcpu_virtual_insn()
418 if (insn == 0) { in kvm_riscv_vcpu_virtual_insn()
420 insn = kvm_riscv_vcpu_unpriv_read(vcpu, true, in kvm_riscv_vcpu_virtual_insn()
429 if (INSN_IS_16BIT(insn)) in kvm_riscv_vcpu_virtual_insn()
430 return truly_illegal_insn(vcpu, run, insn); in kvm_riscv_vcpu_virtual_insn()
433 switch ((insn & INSN_OPCODE_MASK) >> INSN_OPCODE_SHIFT) { in kvm_riscv_vcpu_virtual_insn()
435 return system_opcode_insn(vcpu, run, insn); in kvm_riscv_vcpu_virtual_insn()
437 return truly_illegal_insn(vcpu, run, insn); in kvm_riscv_vcpu_virtual_insn()
458 unsigned long insn; in kvm_riscv_vcpu_mmio_load() local
469 insn = htinst | INSN_16BIT_MASK; in kvm_riscv_vcpu_mmio_load()
470 insn_len = (htinst & BIT(1)) ? INSN_LEN(insn) : 2; in kvm_riscv_vcpu_mmio_load()
476 insn = kvm_riscv_vcpu_unpriv_read(vcpu, true, ct->sepc, in kvm_riscv_vcpu_mmio_load()
484 insn_len = INSN_LEN(insn); in kvm_riscv_vcpu_mmio_load()
488 if ((insn & INSN_MASK_LW) == INSN_MATCH_LW) { in kvm_riscv_vcpu_mmio_load()
491 } else if ((insn & INSN_MASK_LB) == INSN_MATCH_LB) { in kvm_riscv_vcpu_mmio_load()
494 } else if ((insn & INSN_MASK_LBU) == INSN_MATCH_LBU) { in kvm_riscv_vcpu_mmio_load()
498 } else if ((insn & INSN_MASK_LD) == INSN_MATCH_LD) { in kvm_riscv_vcpu_mmio_load()
501 } else if ((insn & INSN_MASK_LWU) == INSN_MATCH_LWU) { in kvm_riscv_vcpu_mmio_load()
504 } else if ((insn & INSN_MASK_LH) == INSN_MATCH_LH) { in kvm_riscv_vcpu_mmio_load()
507 } else if ((insn & INSN_MASK_LHU) == INSN_MATCH_LHU) { in kvm_riscv_vcpu_mmio_load()
510 } else if ((insn & INSN_MASK_C_LD) == INSN_MATCH_C_LD) { in kvm_riscv_vcpu_mmio_load()
513 insn = RVC_RS2S(insn) << SH_RD; in kvm_riscv_vcpu_mmio_load()
514 } else if ((insn & INSN_MASK_C_LDSP) == INSN_MATCH_C_LDSP && in kvm_riscv_vcpu_mmio_load()
515 ((insn >> SH_RD) & 0x1f)) { in kvm_riscv_vcpu_mmio_load()
519 } else if ((insn & INSN_MASK_C_LW) == INSN_MATCH_C_LW) { in kvm_riscv_vcpu_mmio_load()
522 insn = RVC_RS2S(insn) << SH_RD; in kvm_riscv_vcpu_mmio_load()
523 } else if ((insn & INSN_MASK_C_LWSP) == INSN_MATCH_C_LWSP && in kvm_riscv_vcpu_mmio_load()
524 ((insn >> SH_RD) & 0x1f)) { in kvm_riscv_vcpu_mmio_load()
536 vcpu->arch.mmio_decode.insn = insn; in kvm_riscv_vcpu_mmio_load()
584 unsigned long insn; in kvm_riscv_vcpu_mmio_store() local
595 insn = htinst | INSN_16BIT_MASK; in kvm_riscv_vcpu_mmio_store()
596 insn_len = (htinst & BIT(1)) ? INSN_LEN(insn) : 2; in kvm_riscv_vcpu_mmio_store()
602 insn = kvm_riscv_vcpu_unpriv_read(vcpu, true, ct->sepc, in kvm_riscv_vcpu_mmio_store()
610 insn_len = INSN_LEN(insn); in kvm_riscv_vcpu_mmio_store()
613 data = GET_RS2(insn, &vcpu->arch.guest_context); in kvm_riscv_vcpu_mmio_store()
616 if ((insn & INSN_MASK_SW) == INSN_MATCH_SW) { in kvm_riscv_vcpu_mmio_store()
618 } else if ((insn & INSN_MASK_SB) == INSN_MATCH_SB) { in kvm_riscv_vcpu_mmio_store()
621 } else if ((insn & INSN_MASK_SD) == INSN_MATCH_SD) { in kvm_riscv_vcpu_mmio_store()
624 } else if ((insn & INSN_MASK_SH) == INSN_MATCH_SH) { in kvm_riscv_vcpu_mmio_store()
627 } else if ((insn & INSN_MASK_C_SD) == INSN_MATCH_C_SD) { in kvm_riscv_vcpu_mmio_store()
629 data64 = GET_RS2S(insn, &vcpu->arch.guest_context); in kvm_riscv_vcpu_mmio_store()
630 } else if ((insn & INSN_MASK_C_SDSP) == INSN_MATCH_C_SDSP && in kvm_riscv_vcpu_mmio_store()
631 ((insn >> SH_RD) & 0x1f)) { in kvm_riscv_vcpu_mmio_store()
633 data64 = GET_RS2C(insn, &vcpu->arch.guest_context); in kvm_riscv_vcpu_mmio_store()
635 } else if ((insn & INSN_MASK_C_SW) == INSN_MATCH_C_SW) { in kvm_riscv_vcpu_mmio_store()
637 data32 = GET_RS2S(insn, &vcpu->arch.guest_context); in kvm_riscv_vcpu_mmio_store()
638 } else if ((insn & INSN_MASK_C_SWSP) == INSN_MATCH_C_SWSP && in kvm_riscv_vcpu_mmio_store()
639 ((insn >> SH_RD) & 0x1f)) { in kvm_riscv_vcpu_mmio_store()
641 data32 = GET_RS2C(insn, &vcpu->arch.guest_context); in kvm_riscv_vcpu_mmio_store()
651 vcpu->arch.mmio_decode.insn = insn; in kvm_riscv_vcpu_mmio_store()
709 ulong insn; in kvm_riscv_vcpu_mmio_return() local
716 insn = vcpu->arch.mmio_decode.insn; in kvm_riscv_vcpu_mmio_return()
727 SET_RD(insn, &vcpu->arch.guest_context, in kvm_riscv_vcpu_mmio_return()
732 SET_RD(insn, &vcpu->arch.guest_context, in kvm_riscv_vcpu_mmio_return()
737 SET_RD(insn, &vcpu->arch.guest_context, in kvm_riscv_vcpu_mmio_return()
742 SET_RD(insn, &vcpu->arch.guest_context, in kvm_riscv_vcpu_mmio_return()