Lines Matching refs:ics
75 struct kvmppc_ics *ics; in ics_deliver_irq() local
81 ics = kvmppc_xics_find_ics(xics, irq, &src); in ics_deliver_irq()
82 if (!ics) { in ics_deliver_irq()
86 state = &ics->irq_state[src]; in ics_deliver_irq()
128 static void ics_check_resend(struct kvmppc_xics *xics, struct kvmppc_ics *ics, in ics_check_resend() argument
134 struct ics_irq_state *state = &ics->irq_state[i]; in ics_check_resend()
143 static bool write_xive(struct kvmppc_xics *xics, struct kvmppc_ics *ics, in write_xive() argument
151 arch_spin_lock(&ics->lock); in write_xive()
163 arch_spin_unlock(&ics->lock); in write_xive()
173 struct kvmppc_ics *ics; in kvmppc_xics_set_xive() local
180 ics = kvmppc_xics_find_ics(xics, irq, &src); in kvmppc_xics_set_xive()
181 if (!ics) in kvmppc_xics_set_xive()
183 state = &ics->irq_state[src]; in kvmppc_xics_set_xive()
193 if (write_xive(xics, ics, state, server, priority, priority)) in kvmppc_xics_set_xive()
202 struct kvmppc_ics *ics; in kvmppc_xics_get_xive() local
210 ics = kvmppc_xics_find_ics(xics, irq, &src); in kvmppc_xics_get_xive()
211 if (!ics) in kvmppc_xics_get_xive()
213 state = &ics->irq_state[src]; in kvmppc_xics_get_xive()
216 arch_spin_lock(&ics->lock); in kvmppc_xics_get_xive()
219 arch_spin_unlock(&ics->lock); in kvmppc_xics_get_xive()
229 struct kvmppc_ics *ics; in kvmppc_xics_int_on() local
236 ics = kvmppc_xics_find_ics(xics, irq, &src); in kvmppc_xics_int_on()
237 if (!ics) in kvmppc_xics_int_on()
239 state = &ics->irq_state[src]; in kvmppc_xics_int_on()
245 if (write_xive(xics, ics, state, state->server, state->saved_priority, in kvmppc_xics_int_on()
255 struct kvmppc_ics *ics; in kvmppc_xics_int_off() local
262 ics = kvmppc_xics_find_ics(xics, irq, &src); in kvmppc_xics_int_off()
263 if (!ics) in kvmppc_xics_int_off()
265 state = &ics->irq_state[src]; in kvmppc_xics_int_off()
267 write_xive(xics, ics, state, state->server, MASKED, state->priority); in kvmppc_xics_int_off()
329 struct kvmppc_ics *ics = xics->ics[icsid]; in icp_check_resend() local
333 if (!ics) in icp_check_resend()
335 ics_check_resend(xics, ics, icp); in icp_check_resend()
384 struct kvmppc_ics *ics; in icp_deliver_irq() local
406 ics = kvmppc_xics_find_ics(xics, new_irq, &src); in icp_deliver_irq()
407 if (!ics) { in icp_deliver_irq()
411 state = &ics->irq_state[src]; in icp_deliver_irq()
415 arch_spin_lock(&ics->lock); in icp_deliver_irq()
476 arch_spin_unlock(&ics->lock); in icp_deliver_irq()
494 set_bit(ics->icsid, icp->resend_map); in icp_deliver_irq()
505 arch_spin_unlock(&ics->lock); in icp_deliver_irq()
512 arch_spin_unlock(&ics->lock); in icp_deliver_irq()
786 struct kvmppc_ics *ics; in ics_eoi() local
799 ics = kvmppc_xics_find_ics(xics, irq, &src); in ics_eoi()
800 if (!ics) { in ics_eoi()
804 state = &ics->irq_state[src]; in ics_eoi()
994 struct kvmppc_ics *ics = xics->ics[icsid]; in xics_debug_show() local
996 if (!ics) in xics_debug_show()
1003 arch_spin_lock(&ics->lock); in xics_debug_show()
1006 struct ics_irq_state *irq = &ics->irq_state[i]; in xics_debug_show()
1014 arch_spin_unlock(&ics->lock); in xics_debug_show()
1052 struct kvmppc_ics *ics; in kvmppc_xics_create_ics() local
1060 if (xics->ics[icsid]) in kvmppc_xics_create_ics()
1064 ics = kzalloc(sizeof(struct kvmppc_ics), GFP_KERNEL); in kvmppc_xics_create_ics()
1065 if (!ics) in kvmppc_xics_create_ics()
1068 ics->icsid = icsid; in kvmppc_xics_create_ics()
1071 ics->irq_state[i].number = (icsid << KVMPPC_XICS_ICS_SHIFT) | i; in kvmppc_xics_create_ics()
1072 ics->irq_state[i].priority = MASKED; in kvmppc_xics_create_ics()
1073 ics->irq_state[i].saved_priority = MASKED; in kvmppc_xics_create_ics()
1076 xics->ics[icsid] = ics; in kvmppc_xics_create_ics()
1083 return xics->ics[icsid]; in kvmppc_xics_create_ics()
1130 struct kvmppc_ics *ics; in kvmppc_xics_set_icp() local
1155 ics = kvmppc_xics_find_ics(xics, xisr, &src); in kvmppc_xics_set_icp()
1156 if (!ics) in kvmppc_xics_set_icp()
1204 struct kvmppc_ics *ics; in xics_get_source() local
1211 ics = kvmppc_xics_find_ics(xics, irq, &idx); in xics_get_source()
1212 if (!ics) in xics_get_source()
1215 irqp = &ics->irq_state[idx]; in xics_get_source()
1217 arch_spin_lock(&ics->lock); in xics_get_source()
1242 arch_spin_unlock(&ics->lock); in xics_get_source()
1253 struct kvmppc_ics *ics; in xics_set_source() local
1265 ics = kvmppc_xics_find_ics(xics, irq, &idx); in xics_set_source()
1266 if (!ics) { in xics_set_source()
1267 ics = kvmppc_xics_create_ics(xics->kvm, xics, irq); in xics_set_source()
1268 if (!ics) in xics_set_source()
1271 irqp = &ics->irq_state[idx]; in xics_set_source()
1282 arch_spin_lock(&ics->lock); in xics_set_source()
1300 arch_spin_unlock(&ics->lock); in xics_set_source()
1365 kfree(xics->ics[i]); in kvmppc_xics_free()
1456 struct kvmppc_ics *ics; in kvmppc_xics_set_mapped() local
1459 ics = kvmppc_xics_find_ics(xics, irq, &idx); in kvmppc_xics_set_mapped()
1460 if (!ics) in kvmppc_xics_set_mapped()
1463 ics->irq_state[idx].host_irq = host_irq; in kvmppc_xics_set_mapped()
1464 ics->irq_state[idx].intr_cpu = -1; in kvmppc_xics_set_mapped()
1472 struct kvmppc_ics *ics; in kvmppc_xics_clr_mapped() local
1475 ics = kvmppc_xics_find_ics(xics, irq, &idx); in kvmppc_xics_clr_mapped()
1476 if (!ics) in kvmppc_xics_clr_mapped()
1479 ics->irq_state[idx].host_irq = 0; in kvmppc_xics_clr_mapped()