Lines Matching full:intid

47 	u32 intid = VGIC_ADDR_TO_INTID(addr, 1);  in vgic_mmio_read_group()  local
53 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_mmio_read_group()
72 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in vgic_mmio_write_group() local
77 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_mmio_write_group()
81 if (irq->hw && vgic_irq_is_sgi(irq->intid)) { in vgic_mmio_write_group()
99 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in vgic_mmio_read_enable() local
105 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_mmio_read_enable()
120 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in vgic_mmio_write_senable() local
125 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_mmio_write_senable()
128 if (irq->hw && vgic_irq_is_sgi(irq->intid)) { in vgic_mmio_write_senable()
169 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in vgic_mmio_write_cenable() local
174 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_mmio_write_cenable()
177 if (irq->hw && vgic_irq_is_sgi(irq->intid) && irq->enabled) in vgic_mmio_write_cenable()
191 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in vgic_uaccess_write_senable() local
196 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_uaccess_write_senable()
212 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in vgic_uaccess_write_cenable() local
217 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_uaccess_write_cenable()
232 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in vgic_mmio_read_pending() local
238 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_mmio_read_pending()
243 if (irq->hw && vgic_irq_is_sgi(irq->intid)) { in vgic_mmio_read_pending()
266 return (vgic_irq_is_sgi(irq->intid) && in is_vgic_v2_sgi()
274 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in vgic_mmio_write_spending() local
279 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_mmio_write_spending()
289 if (irq->hw && vgic_irq_is_sgi(irq->intid)) { in vgic_mmio_write_spending()
316 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in vgic_uaccess_write_spending() local
321 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_uaccess_write_spending()
367 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in vgic_mmio_write_cpending() local
372 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_mmio_write_cpending()
382 if (irq->hw && vgic_irq_is_sgi(irq->intid)) { in vgic_mmio_write_cpending()
410 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in vgic_uaccess_write_cpending() local
415 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_uaccess_write_cpending()
451 static void vgic_access_active_prepare(struct kvm_vcpu *vcpu, u32 intid) in vgic_access_active_prepare() argument
454 intid >= VGIC_NR_PRIVATE_IRQS) in vgic_access_active_prepare()
459 static void vgic_access_active_finish(struct kvm_vcpu *vcpu, u32 intid) in vgic_access_active_finish() argument
462 intid >= VGIC_NR_PRIVATE_IRQS) in vgic_access_active_finish()
469 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in __vgic_mmio_read_active() local
475 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in __vgic_mmio_read_active()
493 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in vgic_mmio_read_active() local
497 vgic_access_active_prepare(vcpu, intid); in vgic_mmio_read_active()
501 vgic_access_active_finish(vcpu, intid); in vgic_mmio_read_active()
532 if (irq->hw && !vgic_irq_is_sgi(irq->intid)) { in vgic_mmio_change_active()
534 } else if (irq->hw && vgic_irq_is_sgi(irq->intid)) { in vgic_mmio_change_active()
561 active && vgic_irq_is_sgi(irq->intid)) in vgic_mmio_change_active()
575 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in __vgic_mmio_write_cactive() local
579 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in __vgic_mmio_write_cactive()
589 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in vgic_mmio_write_cactive() local
592 vgic_access_active_prepare(vcpu, intid); in vgic_mmio_write_cactive()
596 vgic_access_active_finish(vcpu, intid); in vgic_mmio_write_cactive()
612 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in __vgic_mmio_write_sactive() local
616 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in __vgic_mmio_write_sactive()
626 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in vgic_mmio_write_sactive() local
629 vgic_access_active_prepare(vcpu, intid); in vgic_mmio_write_sactive()
633 vgic_access_active_finish(vcpu, intid); in vgic_mmio_write_sactive()
648 u32 intid = VGIC_ADDR_TO_INTID(addr, 8); in vgic_mmio_read_priority() local
653 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_mmio_read_priority()
674 u32 intid = VGIC_ADDR_TO_INTID(addr, 8); in vgic_mmio_write_priority() local
679 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_mmio_write_priority()
684 if (irq->hw && vgic_irq_is_sgi(irq->intid)) in vgic_mmio_write_priority()
695 u32 intid = VGIC_ADDR_TO_INTID(addr, 2); in vgic_mmio_read_config() local
700 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_mmio_read_config()
715 u32 intid = VGIC_ADDR_TO_INTID(addr, 2); in vgic_mmio_write_config() local
728 if (intid + i < VGIC_NR_PRIVATE_IRQS) in vgic_mmio_write_config()
731 irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_mmio_write_config()
744 u64 vgic_read_irq_line_level_info(struct kvm_vcpu *vcpu, u32 intid) in vgic_read_irq_line_level_info() argument
753 if ((intid + i) < VGIC_NR_SGIS || (intid + i) >= nr_irqs) in vgic_read_irq_line_level_info()
756 irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_read_irq_line_level_info()
766 void vgic_write_irq_line_level_info(struct kvm_vcpu *vcpu, u32 intid, in vgic_write_irq_line_level_info() argument
777 if ((intid + i) < VGIC_NR_SGIS || (intid + i) >= nr_irqs) in vgic_write_irq_line_level_info()
780 irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_write_irq_line_level_info()