• Home
  • Raw
  • Download

Lines Matching refs:vgic_cpu

96 		return &vcpu->arch.vgic_cpu.private_irqs[intid];  in vgic_get_irq()
153 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_flush_pending_lpis() local
157 raw_spin_lock_irqsave(&vgic_cpu->ap_list_lock, flags); in vgic_flush_pending_lpis()
159 list_for_each_entry_safe(irq, tmp, &vgic_cpu->ap_list_head, ap_list) { in vgic_flush_pending_lpis()
169 raw_spin_unlock_irqrestore(&vgic_cpu->ap_list_lock, flags); in vgic_flush_pending_lpis()
300 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_sort_ap_list() local
302 lockdep_assert_held(&vgic_cpu->ap_list_lock); in vgic_sort_ap_list()
304 list_sort(NULL, &vgic_cpu->ap_list_head, vgic_irq_cmp); in vgic_sort_ap_list()
380 raw_spin_lock_irqsave(&vcpu->arch.vgic_cpu.ap_list_lock, flags); in vgic_queue_irq_unlock()
397 raw_spin_unlock_irqrestore(&vcpu->arch.vgic_cpu.ap_list_lock, in vgic_queue_irq_unlock()
409 list_add_tail(&irq->ap_list, &vcpu->arch.vgic_cpu.ap_list_head); in vgic_queue_irq_unlock()
413 raw_spin_unlock_irqrestore(&vcpu->arch.vgic_cpu.ap_list_lock, flags); in vgic_queue_irq_unlock()
621 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_prune_ap_list() local
627 raw_spin_lock(&vgic_cpu->ap_list_lock); in vgic_prune_ap_list()
629 list_for_each_entry_safe(irq, tmp, &vgic_cpu->ap_list_head, ap_list) { in vgic_prune_ap_list()
668 raw_spin_unlock(&vgic_cpu->ap_list_lock); in vgic_prune_ap_list()
682 raw_spin_lock(&vcpuA->arch.vgic_cpu.ap_list_lock); in vgic_prune_ap_list()
683 raw_spin_lock_nested(&vcpuB->arch.vgic_cpu.ap_list_lock, in vgic_prune_ap_list()
697 struct vgic_cpu *new_cpu = &target_vcpu->arch.vgic_cpu; in vgic_prune_ap_list()
706 raw_spin_unlock(&vcpuB->arch.vgic_cpu.ap_list_lock); in vgic_prune_ap_list()
707 raw_spin_unlock(&vcpuA->arch.vgic_cpu.ap_list_lock); in vgic_prune_ap_list()
717 raw_spin_unlock(&vgic_cpu->ap_list_lock); in vgic_prune_ap_list()
760 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in compute_ap_list_depth() local
766 lockdep_assert_held(&vgic_cpu->ap_list_lock); in compute_ap_list_depth()
768 list_for_each_entry(irq, &vgic_cpu->ap_list_head, ap_list) { in compute_ap_list_depth()
785 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_flush_lr_state() local
792 lockdep_assert_held(&vgic_cpu->ap_list_lock); in vgic_flush_lr_state()
800 list_for_each_entry(irq, &vgic_cpu->ap_list_head, ap_list) { in vgic_flush_lr_state()
826 &vgic_cpu->ap_list_head)) in vgic_flush_lr_state()
837 vcpu->arch.vgic_cpu.vgic_v2.used_lrs = count; in vgic_flush_lr_state()
839 vcpu->arch.vgic_cpu.vgic_v3.used_lrs = count; in vgic_flush_lr_state()
857 __vgic_v3_save_state(&vcpu->arch.vgic_cpu.vgic_v3); in vgic_save_state()
866 if (list_empty(&vcpu->arch.vgic_cpu.ap_list_head)) in kvm_vgic_sync_hwstate()
873 used_lrs = vcpu->arch.vgic_cpu.vgic_v2.used_lrs; in kvm_vgic_sync_hwstate()
875 used_lrs = vcpu->arch.vgic_cpu.vgic_v3.used_lrs; in kvm_vgic_sync_hwstate()
887 __vgic_v3_restore_state(&vcpu->arch.vgic_cpu.vgic_v3); in vgic_restore_state()
905 if (list_empty(&vcpu->arch.vgic_cpu.ap_list_head) && in kvm_vgic_flush_hwstate()
911 if (!list_empty(&vcpu->arch.vgic_cpu.ap_list_head)) { in kvm_vgic_flush_hwstate()
912 raw_spin_lock(&vcpu->arch.vgic_cpu.ap_list_lock); in kvm_vgic_flush_hwstate()
914 raw_spin_unlock(&vcpu->arch.vgic_cpu.ap_list_lock); in kvm_vgic_flush_hwstate()
956 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in kvm_vgic_vcpu_pending_irq() local
965 if (vcpu->arch.vgic_cpu.vgic_v3.its_vpe.pending_last) in kvm_vgic_vcpu_pending_irq()
970 raw_spin_lock_irqsave(&vgic_cpu->ap_list_lock, flags); in kvm_vgic_vcpu_pending_irq()
972 list_for_each_entry(irq, &vgic_cpu->ap_list_head, ap_list) { in kvm_vgic_vcpu_pending_irq()
983 raw_spin_unlock_irqrestore(&vgic_cpu->ap_list_lock, flags); in kvm_vgic_vcpu_pending_irq()