Lines Matching +full:3 +full:rd
87 /* 3 bits per cache level, as per CLIDR, but non-existent caches always 0 */
297 p->regval = (1 << 3); in trap_oslsr_el1()
368 const struct sys_reg_desc *rd, in reg_to_dbg() argument
373 get_access_mask(rd, &mask, &shift); in reg_to_dbg()
385 const struct sys_reg_desc *rd, in dbg_to_reg() argument
390 get_access_mask(rd, &mask, &shift); in dbg_to_reg()
396 const struct sys_reg_desc *rd) in trap_bvr() argument
398 u64 *dbg_reg = &vcpu->arch.vcpu_debug_state.dbg_bvr[rd->CRm]; in trap_bvr()
401 reg_to_dbg(vcpu, p, rd, dbg_reg); in trap_bvr()
403 dbg_to_reg(vcpu, p, rd, dbg_reg); in trap_bvr()
405 trace_trap_reg(__func__, rd->CRm, p->is_write, *dbg_reg); in trap_bvr()
410 static int set_bvr(struct kvm_vcpu *vcpu, const struct sys_reg_desc *rd, in set_bvr() argument
413 __u64 *r = &vcpu->arch.vcpu_debug_state.dbg_bvr[rd->CRm]; in set_bvr()
420 static int get_bvr(struct kvm_vcpu *vcpu, const struct sys_reg_desc *rd, in get_bvr() argument
423 __u64 *r = &vcpu->arch.vcpu_debug_state.dbg_bvr[rd->CRm]; in get_bvr()
431 const struct sys_reg_desc *rd) in reset_bvr() argument
433 vcpu->arch.vcpu_debug_state.dbg_bvr[rd->CRm] = rd->val; in reset_bvr()
438 const struct sys_reg_desc *rd) in trap_bcr() argument
440 u64 *dbg_reg = &vcpu->arch.vcpu_debug_state.dbg_bcr[rd->CRm]; in trap_bcr()
443 reg_to_dbg(vcpu, p, rd, dbg_reg); in trap_bcr()
445 dbg_to_reg(vcpu, p, rd, dbg_reg); in trap_bcr()
447 trace_trap_reg(__func__, rd->CRm, p->is_write, *dbg_reg); in trap_bcr()
452 static int set_bcr(struct kvm_vcpu *vcpu, const struct sys_reg_desc *rd, in set_bcr() argument
455 __u64 *r = &vcpu->arch.vcpu_debug_state.dbg_bcr[rd->CRm]; in set_bcr()
463 static int get_bcr(struct kvm_vcpu *vcpu, const struct sys_reg_desc *rd, in get_bcr() argument
466 __u64 *r = &vcpu->arch.vcpu_debug_state.dbg_bcr[rd->CRm]; in get_bcr()
474 const struct sys_reg_desc *rd) in reset_bcr() argument
476 vcpu->arch.vcpu_debug_state.dbg_bcr[rd->CRm] = rd->val; in reset_bcr()
481 const struct sys_reg_desc *rd) in trap_wvr() argument
483 u64 *dbg_reg = &vcpu->arch.vcpu_debug_state.dbg_wvr[rd->CRm]; in trap_wvr()
486 reg_to_dbg(vcpu, p, rd, dbg_reg); in trap_wvr()
488 dbg_to_reg(vcpu, p, rd, dbg_reg); in trap_wvr()
490 trace_trap_reg(__func__, rd->CRm, p->is_write, in trap_wvr()
491 vcpu->arch.vcpu_debug_state.dbg_wvr[rd->CRm]); in trap_wvr()
496 static int set_wvr(struct kvm_vcpu *vcpu, const struct sys_reg_desc *rd, in set_wvr() argument
499 __u64 *r = &vcpu->arch.vcpu_debug_state.dbg_wvr[rd->CRm]; in set_wvr()
506 static int get_wvr(struct kvm_vcpu *vcpu, const struct sys_reg_desc *rd, in get_wvr() argument
509 __u64 *r = &vcpu->arch.vcpu_debug_state.dbg_wvr[rd->CRm]; in get_wvr()
517 const struct sys_reg_desc *rd) in reset_wvr() argument
519 vcpu->arch.vcpu_debug_state.dbg_wvr[rd->CRm] = rd->val; in reset_wvr()
524 const struct sys_reg_desc *rd) in trap_wcr() argument
526 u64 *dbg_reg = &vcpu->arch.vcpu_debug_state.dbg_wcr[rd->CRm]; in trap_wcr()
529 reg_to_dbg(vcpu, p, rd, dbg_reg); in trap_wcr()
531 dbg_to_reg(vcpu, p, rd, dbg_reg); in trap_wcr()
533 trace_trap_reg(__func__, rd->CRm, p->is_write, *dbg_reg); in trap_wcr()
538 static int set_wcr(struct kvm_vcpu *vcpu, const struct sys_reg_desc *rd, in set_wcr() argument
541 __u64 *r = &vcpu->arch.vcpu_debug_state.dbg_wcr[rd->CRm]; in set_wcr()
548 static int get_wcr(struct kvm_vcpu *vcpu, const struct sys_reg_desc *rd, in get_wcr() argument
551 __u64 *r = &vcpu->arch.vcpu_debug_state.dbg_wcr[rd->CRm]; in get_wcr()
559 const struct sys_reg_desc *rd) in reset_wcr() argument
561 vcpu->arch.vcpu_debug_state.dbg_wcr[rd->CRm] = rd->val; in reset_wcr()
799 idx = ((r->CRm & 3) << 3) | (r->Op2 & 7); in access_pmu_evcntr()
833 idx = ((r->CRm & 3) << 3) | (r->Op2 & 7); in access_pmu_evtyper()
1007 const struct sys_reg_desc *rd) in ptrauth_visibility() argument
1169 const struct sys_reg_desc *rd) in sve_visibility() argument
1178 const struct sys_reg_desc *rd, in set_id_aa64pfr0_el1() argument
1181 const u64 id = sys_reg_to_index(rd); in set_id_aa64pfr0_el1()
1207 val ^= read_id_reg(vcpu, rd, false); in set_id_aa64pfr0_el1()
1227 const struct sys_reg_desc *rd, void __user *uaddr, in __get_id_reg() argument
1230 const u64 id = sys_reg_to_index(rd); in __get_id_reg()
1231 const u64 val = read_id_reg(vcpu, rd, raz); in __get_id_reg()
1237 const struct sys_reg_desc *rd, void __user *uaddr, in __set_id_reg() argument
1240 const u64 id = sys_reg_to_index(rd); in __set_id_reg()
1249 if (val != read_id_reg(vcpu, rd, raz)) in __set_id_reg()
1255 static int get_id_reg(struct kvm_vcpu *vcpu, const struct sys_reg_desc *rd, in get_id_reg() argument
1258 bool raz = sysreg_visible_as_raz(vcpu, rd); in get_id_reg()
1260 return __get_id_reg(vcpu, rd, uaddr, raz); in get_id_reg()
1263 static int set_id_reg(struct kvm_vcpu *vcpu, const struct sys_reg_desc *rd, in set_id_reg() argument
1266 bool raz = sysreg_visible_as_raz(vcpu, rd); in set_id_reg()
1268 return __set_id_reg(vcpu, rd, uaddr, raz); in set_id_reg()
1271 static int get_raz_id_reg(struct kvm_vcpu *vcpu, const struct sys_reg_desc *rd, in get_raz_id_reg() argument
1274 return __get_id_reg(vcpu, rd, uaddr, true); in get_raz_id_reg()
1277 static int set_raz_id_reg(struct kvm_vcpu *vcpu, const struct sys_reg_desc *rd, in set_raz_id_reg() argument
1280 return __set_id_reg(vcpu, rd, uaddr, true); in set_raz_id_reg()
1283 static int set_wi_reg(struct kvm_vcpu *vcpu, const struct sys_reg_desc *rd, in set_wi_reg() argument
1290 err = reg_from_user(&val, uaddr, sys_reg_to_index(rd)); in set_wi_reg()
1353 p->regval &= ~GENMASK(27, 3); in access_ccsidr()
1358 const struct sys_reg_desc *rd) in mte_visibility() argument
1385 * register with encoding Op0=3, Op1=0, CRn=0, CRm=crm, Op2=op2
1389 Op0(3), Op1(0), CRn(0), CRm(crm), Op2(op2), \
1428 DBG_BCR_BVR_WCR_WVR_EL1(3),
1486 /* CRm=3 */
1490 ID_UNALLOCATED(3,3),
1494 ID_UNALLOCATED(3,7),
1502 ID_UNALLOCATED(4,3),
1512 ID_UNALLOCATED(5,3),
1522 ID_UNALLOCATED(6,3),
1532 ID_UNALLOCATED(7,3),
1684 AMU_AMEVCNTR0_EL0(3),
1700 AMU_AMEVTYPER0_EL0(3),
1716 AMU_AMEVCNTR1_EL0(3),
1732 AMU_AMEVTYPER1_EL0(3),
1754 PMU_PMEVCNTR_EL0(3),
1786 PMU_PMEVTYPER_EL0(3),
1888 { Op1( 0), CRn( 0), CRm( 3), Op2( 0), trap_raz_wi },
1890 { Op1( 0), CRn( 0), CRm( 3), Op2( 2), trap_raz_wi },
1891 DBG_BCR_BVR_WCR_WVR(3),
1921 DBGBXVR(3),
1923 { Op1( 0), CRn( 1), CRm( 3), Op2( 4), trap_raz_wi },
1969 CRm((0b1000 | (((n) >> 3) & 0x3))), Op2(((n) & 0x7)), \
1976 CRm((0b1100 | (((n) >> 3) & 0x3))), Op2(((n) & 0x7)), \
1990 { AA32(HI), Op1( 0), CRn( 1), CRm( 0), Op2( 3), access_actlr, NULL, ACTLR_EL1 },
1996 { AA32(HI), Op1( 0), CRn( 2), CRm( 0), Op2( 3), access_vm_reg, NULL, TCR_EL1 },
1997 { Op1( 0), CRn( 3), CRm( 0), Op2( 0), access_vm_reg, NULL, DACR32_EL2 },
2021 { Op1( 0), CRn( 9), CRm(12), Op2( 3), access_pmovs },
2032 { Op1( 0), CRn( 9), CRm(14), Op2( 3), access_pmovs },
2043 { AA32(LO), Op1( 0), CRn(10), CRm( 3), Op2( 0), access_vm_reg, NULL, AMAIR_EL1 },
2045 { AA32(HI), Op1( 0), CRn(10), CRm( 3), Op2( 1), access_vm_reg, NULL, AMAIR_EL1 },
2060 PMU_PMEVCNTR(3),
2092 PMU_PMEVTYPER(3),
2349 return params->Op0 == 3 && (params->CRn & 0b1011) == 0b1011; in is_imp_def_sys_reg()
2569 /* Bottom bit is Instruction or Data bit. Next 3 bits are level. */ in is_valid_cache()
2571 ctype = (cache_levels >> (level * 3)) & 7; in is_valid_cache()
2581 case 3: /* Separate instruction and data caches */ in is_valid_cache()
2745 const struct sys_reg_desc *rd, in walk_one_sys_reg() argument
2753 if (!(rd->reg || rd->get_user)) in walk_one_sys_reg()
2756 if (sysreg_hidden(vcpu, rd)) in walk_one_sys_reg()
2759 if (!copy_reg_to_user(rd, uind)) in walk_one_sys_reg()
2841 if (((cache_levels >> (i*3)) & 7) == 0) in kvm_sys_reg_table_init()
2844 cache_levels &= (1 << (i*3))-1; in kvm_sys_reg_table_init()