Lines Matching refs:xc
98 struct kvmppc_xive_vcpu *xc = vcpu->arch.xive_vcpu; in xive_attach_escalation() local
99 struct xive_q *q = &xc->queues[prio]; in xive_attach_escalation()
104 if (xc->esc_virq[prio]) in xive_attach_escalation()
108 xc->esc_virq[prio] = irq_create_mapping(NULL, q->esc_irq); in xive_attach_escalation()
109 if (!xc->esc_virq[prio]) { in xive_attach_escalation()
111 prio, xc->server_num); in xive_attach_escalation()
121 vcpu->kvm->arch.lpid, xc->server_num, prio); in xive_attach_escalation()
124 prio, xc->server_num); in xive_attach_escalation()
128 rc = request_irq(xc->esc_virq[prio], xive_esc_irq, in xive_attach_escalation()
132 prio, xc->server_num); in xive_attach_escalation()
135 xc->esc_virq_names[prio] = name; in xive_attach_escalation()
138 irq_dispose_mapping(xc->esc_virq[prio]); in xive_attach_escalation()
139 xc->esc_virq[prio] = 0; in xive_attach_escalation()
146 struct kvmppc_xive_vcpu *xc = vcpu->arch.xive_vcpu; in xive_provision_queue() local
147 struct kvmppc_xive *xive = xc->xive; in xive_provision_queue()
148 struct xive_q *q = &xc->queues[prio]; in xive_provision_queue()
159 prio, xc->server_num); in xive_provision_queue()
171 rc = xive_native_configure_queue(xc->vp_id, q, prio, qpage, in xive_provision_queue()
175 prio, xc->server_num); in xive_provision_queue()
214 struct kvmppc_xive_vcpu *xc; in xive_inc_q_pending() local
223 xc = vcpu->arch.xive_vcpu; in xive_inc_q_pending()
224 if (WARN_ON(!xc)) in xive_inc_q_pending()
227 q = &xc->queues[prio]; in xive_inc_q_pending()
233 struct kvmppc_xive_vcpu *xc = vcpu->arch.xive_vcpu; in xive_try_pick_queue() local
237 if (WARN_ON(!xc)) in xive_try_pick_queue()
239 if (!xc->valid) in xive_try_pick_queue()
242 q = &xc->queues[prio]; in xive_try_pick_queue()
721 struct kvmppc_xive_vcpu *xc = vcpu->arch.xive_vcpu; in kvmppc_xive_get_icp() local
723 if (!xc) in kvmppc_xive_get_icp()
727 return (u64)xc->cppr << KVM_REG_PPC_ICP_CPPR_SHIFT | in kvmppc_xive_get_icp()
728 (u64)xc->mfrr << KVM_REG_PPC_ICP_MFRR_SHIFT | in kvmppc_xive_get_icp()
734 struct kvmppc_xive_vcpu *xc = vcpu->arch.xive_vcpu; in kvmppc_xive_set_icp() local
739 if (!xc || !xive) in kvmppc_xive_set_icp()
749 xc->server_num, cppr, mfrr, xisr); in kvmppc_xive_set_icp()
760 xc->hw_cppr = xc->cppr = cppr; in kvmppc_xive_set_icp()
768 xc->mfrr = mfrr; in kvmppc_xive_set_icp()
770 xive_irq_trigger(&xc->vp_ipi_data); in kvmppc_xive_set_icp()
782 xc->delayed_irq = xisr; in kvmppc_xive_set_icp()
955 struct kvmppc_xive_vcpu *xc = vcpu->arch.xive_vcpu; in kvmppc_xive_disable_vcpu_interrupts() local
972 if (state->act_server != xc->server_num) in kvmppc_xive_disable_vcpu_interrupts()
991 struct kvmppc_xive_vcpu *xc = vcpu->arch.xive_vcpu; in kvmppc_xive_cleanup_vcpu() local
992 struct kvmppc_xive *xive = xc->xive; in kvmppc_xive_cleanup_vcpu()
995 pr_devel("cleanup_vcpu(cpu=%d)\n", xc->server_num); in kvmppc_xive_cleanup_vcpu()
998 xc->valid = false; in kvmppc_xive_cleanup_vcpu()
1002 xive_vm_esb_load(&xc->vp_ipi_data, XIVE_ESB_SET_PQ_01); in kvmppc_xive_cleanup_vcpu()
1006 if (xc->esc_virq[i]) { in kvmppc_xive_cleanup_vcpu()
1007 free_irq(xc->esc_virq[i], vcpu); in kvmppc_xive_cleanup_vcpu()
1008 irq_dispose_mapping(xc->esc_virq[i]); in kvmppc_xive_cleanup_vcpu()
1009 kfree(xc->esc_virq_names[i]); in kvmppc_xive_cleanup_vcpu()
1014 xive_native_disable_vp(xc->vp_id); in kvmppc_xive_cleanup_vcpu()
1018 struct xive_q *q = &xc->queues[i]; in kvmppc_xive_cleanup_vcpu()
1020 xive_native_disable_queue(xc->vp_id, q, i); in kvmppc_xive_cleanup_vcpu()
1029 if (xc->vp_ipi) { in kvmppc_xive_cleanup_vcpu()
1030 xive_cleanup_irq_data(&xc->vp_ipi_data); in kvmppc_xive_cleanup_vcpu()
1031 xive_native_free_irq(xc->vp_ipi); in kvmppc_xive_cleanup_vcpu()
1034 kfree(xc); in kvmppc_xive_cleanup_vcpu()
1041 struct kvmppc_xive_vcpu *xc; in kvmppc_xive_connect_vcpu() local
1062 xc = kzalloc(sizeof(*xc), GFP_KERNEL); in kvmppc_xive_connect_vcpu()
1063 if (!xc) in kvmppc_xive_connect_vcpu()
1068 vcpu->arch.xive_vcpu = xc; in kvmppc_xive_connect_vcpu()
1069 xc->xive = xive; in kvmppc_xive_connect_vcpu()
1070 xc->vcpu = vcpu; in kvmppc_xive_connect_vcpu()
1071 xc->server_num = cpu; in kvmppc_xive_connect_vcpu()
1072 xc->vp_id = xive->vp_base + cpu; in kvmppc_xive_connect_vcpu()
1073 xc->mfrr = 0xff; in kvmppc_xive_connect_vcpu()
1074 xc->valid = true; in kvmppc_xive_connect_vcpu()
1076 r = xive_native_get_vp_info(xc->vp_id, &xc->vp_cam, &xc->vp_chip_id); in kvmppc_xive_connect_vcpu()
1082 vcpu->arch.xive_cam_word = cpu_to_be32(xc->vp_cam | TM_QW1W2_VO); in kvmppc_xive_connect_vcpu()
1085 xc->vp_ipi = xive_native_alloc_irq(); in kvmppc_xive_connect_vcpu()
1086 if (!xc->vp_ipi) { in kvmppc_xive_connect_vcpu()
1090 pr_devel(" IPI=0x%x\n", xc->vp_ipi); in kvmppc_xive_connect_vcpu()
1092 r = xive_native_populate_irq_data(xc->vp_ipi, &xc->vp_ipi_data); in kvmppc_xive_connect_vcpu()
1103 struct xive_q *q = &xc->queues[i]; in kvmppc_xive_connect_vcpu()
1113 r = xive_native_configure_queue(xc->vp_id, in kvmppc_xive_connect_vcpu()
1129 r = xive_native_enable_vp(xc->vp_id); in kvmppc_xive_connect_vcpu()
1134 r = xive_native_configure_irq(xc->vp_ipi, xc->vp_id, 0, XICS_IPI); in kvmppc_xive_connect_vcpu()
1136 xive_vm_esb_load(&xc->vp_ipi_data, XIVE_ESB_SET_PQ_00); in kvmppc_xive_connect_vcpu()
1258 struct kvmppc_xive_vcpu *xc = vcpu->arch.xive_vcpu; in xive_pre_save_scan() local
1259 if (!xc) in xive_pre_save_scan()
1262 if (xc->queues[j].qpage) in xive_pre_save_scan()
1263 xive_pre_save_queue(xive, &xc->queues[j]); in xive_pre_save_scan()
1426 struct kvmppc_xive_vcpu *xc = vcpu->arch.xive_vcpu; in xive_check_delayed_irq() local
1428 if (!xc) in xive_check_delayed_irq()
1431 if (xc->delayed_irq == irq) { in xive_check_delayed_irq()
1432 xc->delayed_irq = 0; in xive_check_delayed_irq()
1799 struct kvmppc_xive_vcpu *xc = vcpu->arch.xive_vcpu; in xive_debug_show() local
1801 if (!xc) in xive_debug_show()
1806 xc->server_num, xc->cppr, xc->hw_cppr, in xive_debug_show()
1807 xc->mfrr, xc->pending, in xive_debug_show()
1808 xc->stat_rm_h_xirr, xc->stat_vm_h_xirr); in xive_debug_show()
1810 t_rm_h_xirr += xc->stat_rm_h_xirr; in xive_debug_show()
1811 t_rm_h_ipoll += xc->stat_rm_h_ipoll; in xive_debug_show()
1812 t_rm_h_cppr += xc->stat_rm_h_cppr; in xive_debug_show()
1813 t_rm_h_eoi += xc->stat_rm_h_eoi; in xive_debug_show()
1814 t_rm_h_ipi += xc->stat_rm_h_ipi; in xive_debug_show()
1815 t_vm_h_xirr += xc->stat_vm_h_xirr; in xive_debug_show()
1816 t_vm_h_ipoll += xc->stat_vm_h_ipoll; in xive_debug_show()
1817 t_vm_h_cppr += xc->stat_vm_h_cppr; in xive_debug_show()
1818 t_vm_h_eoi += xc->stat_vm_h_eoi; in xive_debug_show()
1819 t_vm_h_ipi += xc->stat_vm_h_ipi; in xive_debug_show()