Lines Matching refs:vgic_cpu

96 		return &vcpu->arch.vgic_cpu.private_irqs[intid];  in vgic_get_irq()
152 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_flush_pending_lpis() local
156 raw_spin_lock_irqsave(&vgic_cpu->ap_list_lock, flags); in vgic_flush_pending_lpis()
158 list_for_each_entry_safe(irq, tmp, &vgic_cpu->ap_list_head, ap_list) { in vgic_flush_pending_lpis()
168 raw_spin_unlock_irqrestore(&vgic_cpu->ap_list_lock, flags); in vgic_flush_pending_lpis()
299 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_sort_ap_list() local
301 lockdep_assert_held(&vgic_cpu->ap_list_lock); in vgic_sort_ap_list()
303 list_sort(NULL, &vgic_cpu->ap_list_head, vgic_irq_cmp); in vgic_sort_ap_list()
379 raw_spin_lock_irqsave(&vcpu->arch.vgic_cpu.ap_list_lock, flags); in vgic_queue_irq_unlock()
396 raw_spin_unlock_irqrestore(&vcpu->arch.vgic_cpu.ap_list_lock, in vgic_queue_irq_unlock()
408 list_add_tail(&irq->ap_list, &vcpu->arch.vgic_cpu.ap_list_head); in vgic_queue_irq_unlock()
412 raw_spin_unlock_irqrestore(&vcpu->arch.vgic_cpu.ap_list_lock, flags); in vgic_queue_irq_unlock()
620 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_prune_ap_list() local
626 raw_spin_lock(&vgic_cpu->ap_list_lock); in vgic_prune_ap_list()
628 list_for_each_entry_safe(irq, tmp, &vgic_cpu->ap_list_head, ap_list) { in vgic_prune_ap_list()
667 raw_spin_unlock(&vgic_cpu->ap_list_lock); in vgic_prune_ap_list()
681 raw_spin_lock(&vcpuA->arch.vgic_cpu.ap_list_lock); in vgic_prune_ap_list()
682 raw_spin_lock_nested(&vcpuB->arch.vgic_cpu.ap_list_lock, in vgic_prune_ap_list()
696 struct vgic_cpu *new_cpu = &target_vcpu->arch.vgic_cpu; in vgic_prune_ap_list()
705 raw_spin_unlock(&vcpuB->arch.vgic_cpu.ap_list_lock); in vgic_prune_ap_list()
706 raw_spin_unlock(&vcpuA->arch.vgic_cpu.ap_list_lock); in vgic_prune_ap_list()
716 raw_spin_unlock(&vgic_cpu->ap_list_lock); in vgic_prune_ap_list()
759 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in compute_ap_list_depth() local
765 lockdep_assert_held(&vgic_cpu->ap_list_lock); in compute_ap_list_depth()
767 list_for_each_entry(irq, &vgic_cpu->ap_list_head, ap_list) { in compute_ap_list_depth()
784 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_flush_lr_state() local
791 lockdep_assert_held(&vgic_cpu->ap_list_lock); in vgic_flush_lr_state()
799 list_for_each_entry(irq, &vgic_cpu->ap_list_head, ap_list) { in vgic_flush_lr_state()
825 &vgic_cpu->ap_list_head)) in vgic_flush_lr_state()
836 vcpu->arch.vgic_cpu.vgic_v2.used_lrs = count; in vgic_flush_lr_state()
838 vcpu->arch.vgic_cpu.vgic_v3.used_lrs = count; in vgic_flush_lr_state()
856 __vgic_v3_save_state(&vcpu->arch.vgic_cpu.vgic_v3); in vgic_save_state()
865 if (list_empty(&vcpu->arch.vgic_cpu.ap_list_head)) in kvm_vgic_sync_hwstate()
872 used_lrs = vcpu->arch.vgic_cpu.vgic_v2.used_lrs; in kvm_vgic_sync_hwstate()
874 used_lrs = vcpu->arch.vgic_cpu.vgic_v3.used_lrs; in kvm_vgic_sync_hwstate()
886 __vgic_v3_restore_state(&vcpu->arch.vgic_cpu.vgic_v3); in vgic_restore_state()
904 if (list_empty(&vcpu->arch.vgic_cpu.ap_list_head) && in kvm_vgic_flush_hwstate()
910 if (!list_empty(&vcpu->arch.vgic_cpu.ap_list_head)) { in kvm_vgic_flush_hwstate()
911 raw_spin_lock(&vcpu->arch.vgic_cpu.ap_list_lock); in kvm_vgic_flush_hwstate()
913 raw_spin_unlock(&vcpu->arch.vgic_cpu.ap_list_lock); in kvm_vgic_flush_hwstate()
958 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in kvm_vgic_vcpu_pending_irq() local
967 if (vcpu->arch.vgic_cpu.vgic_v3.its_vpe.pending_last) in kvm_vgic_vcpu_pending_irq()
972 raw_spin_lock_irqsave(&vgic_cpu->ap_list_lock, flags); in kvm_vgic_vcpu_pending_irq()
974 list_for_each_entry(irq, &vgic_cpu->ap_list_head, ap_list) { in kvm_vgic_vcpu_pending_irq()
985 raw_spin_unlock_irqrestore(&vgic_cpu->ap_list_lock, flags); in kvm_vgic_vcpu_pending_irq()