Lines Matching full:intid
47 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in vgic_mmio_read_group() local
53 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_mmio_read_group()
72 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in vgic_mmio_write_group() local
77 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_mmio_write_group()
81 if (irq->hw && vgic_irq_is_sgi(irq->intid)) { in vgic_mmio_write_group()
99 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in vgic_mmio_read_enable() local
105 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_mmio_read_enable()
120 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in vgic_mmio_write_senable() local
125 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_mmio_write_senable()
128 if (irq->hw && vgic_irq_is_sgi(irq->intid)) { in vgic_mmio_write_senable()
169 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in vgic_mmio_write_cenable() local
174 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_mmio_write_cenable()
177 if (irq->hw && vgic_irq_is_sgi(irq->intid) && irq->enabled) in vgic_mmio_write_cenable()
191 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in vgic_uaccess_write_senable() local
196 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_uaccess_write_senable()
212 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in vgic_uaccess_write_cenable() local
217 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_uaccess_write_cenable()
233 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in __read_pending() local
239 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in __read_pending()
244 if (irq->hw && vgic_irq_is_sgi(irq->intid)) { in __read_pending()
281 return (vgic_irq_is_sgi(irq->intid) && in is_vgic_v2_sgi()
289 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in vgic_mmio_write_spending() local
294 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_mmio_write_spending()
304 if (irq->hw && vgic_irq_is_sgi(irq->intid)) { in vgic_mmio_write_spending()
331 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in vgic_uaccess_write_spending() local
336 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_uaccess_write_spending()
382 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in vgic_mmio_write_cpending() local
387 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_mmio_write_cpending()
397 if (irq->hw && vgic_irq_is_sgi(irq->intid)) { in vgic_mmio_write_cpending()
425 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in vgic_uaccess_write_cpending() local
430 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_uaccess_write_cpending()
466 static void vgic_access_active_prepare(struct kvm_vcpu *vcpu, u32 intid) in vgic_access_active_prepare() argument
469 intid >= VGIC_NR_PRIVATE_IRQS) in vgic_access_active_prepare()
474 static void vgic_access_active_finish(struct kvm_vcpu *vcpu, u32 intid) in vgic_access_active_finish() argument
477 intid >= VGIC_NR_PRIVATE_IRQS) in vgic_access_active_finish()
484 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in __vgic_mmio_read_active() local
490 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in __vgic_mmio_read_active()
508 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in vgic_mmio_read_active() local
512 vgic_access_active_prepare(vcpu, intid); in vgic_mmio_read_active()
516 vgic_access_active_finish(vcpu, intid); in vgic_mmio_read_active()
547 if (irq->hw && !vgic_irq_is_sgi(irq->intid)) { in vgic_mmio_change_active()
549 } else if (irq->hw && vgic_irq_is_sgi(irq->intid)) { in vgic_mmio_change_active()
576 active && vgic_irq_is_sgi(irq->intid)) in vgic_mmio_change_active()
590 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in __vgic_mmio_write_cactive() local
594 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in __vgic_mmio_write_cactive()
604 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in vgic_mmio_write_cactive() local
607 vgic_access_active_prepare(vcpu, intid); in vgic_mmio_write_cactive()
611 vgic_access_active_finish(vcpu, intid); in vgic_mmio_write_cactive()
627 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in __vgic_mmio_write_sactive() local
631 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in __vgic_mmio_write_sactive()
641 u32 intid = VGIC_ADDR_TO_INTID(addr, 1); in vgic_mmio_write_sactive() local
644 vgic_access_active_prepare(vcpu, intid); in vgic_mmio_write_sactive()
648 vgic_access_active_finish(vcpu, intid); in vgic_mmio_write_sactive()
663 u32 intid = VGIC_ADDR_TO_INTID(addr, 8); in vgic_mmio_read_priority() local
668 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_mmio_read_priority()
689 u32 intid = VGIC_ADDR_TO_INTID(addr, 8); in vgic_mmio_write_priority() local
694 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_mmio_write_priority()
699 if (irq->hw && vgic_irq_is_sgi(irq->intid)) in vgic_mmio_write_priority()
710 u32 intid = VGIC_ADDR_TO_INTID(addr, 2); in vgic_mmio_read_config() local
715 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_mmio_read_config()
730 u32 intid = VGIC_ADDR_TO_INTID(addr, 2); in vgic_mmio_write_config() local
743 if (intid + i < VGIC_NR_PRIVATE_IRQS) in vgic_mmio_write_config()
746 irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_mmio_write_config()
759 u64 vgic_read_irq_line_level_info(struct kvm_vcpu *vcpu, u32 intid) in vgic_read_irq_line_level_info() argument
768 if ((intid + i) < VGIC_NR_SGIS || (intid + i) >= nr_irqs) in vgic_read_irq_line_level_info()
771 irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_read_irq_line_level_info()
781 void vgic_write_irq_line_level_info(struct kvm_vcpu *vcpu, u32 intid, in vgic_write_irq_line_level_info() argument
792 if ((intid + i) < VGIC_NR_SGIS || (intid + i) >= nr_irqs) in vgic_write_irq_line_level_info()
795 irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i); in vgic_write_irq_line_level_info()