Home
last modified time | relevance | path

Searched refs:ack_irq (Results 1 – 13 of 13) sorted by relevance

/drivers/mailbox/
Dhi6220-mailbox.c61 unsigned int dir, dst_irq, ack_irq; member
186 writel(BIT(mchan->ack_irq), ACK_INT_CLR_REG(mbox->ipc)); in hi6220_mbox_interrupt()
201 writel(BIT(mchan->ack_irq), ACK_INT_ENA_REG(mbox->ipc)); in hi6220_mbox_startup()
211 writel(BIT(mchan->ack_irq), ACK_INT_DIS_REG(mbox->ipc)); in hi6220_mbox_shutdown()
212 mbox->irq_map_chan[mchan->ack_irq] = NULL; in hi6220_mbox_shutdown()
230 unsigned int ack_irq = spec->args[2]; in hi6220_mbox_xlate() local
234 ack_irq >= mbox->chan_num) { in hi6220_mbox_xlate()
237 i, dst_irq, ack_irq); in hi6220_mbox_xlate()
243 if (mbox->irq_map_chan[ack_irq] == (void *)chan) { in hi6220_mbox_xlate()
250 mchan->ack_irq = ack_irq; in hi6220_mbox_xlate()
[all …]
Dhi3660-mailbox.c57 unsigned int ack_irq; member
106 writel(BIT(mchan->ack_irq), base + MBOX_ICLR_REG); in hi3660_mbox_check_state()
143 writel(BIT(mchan->ack_irq), base + MBOX_SRC_REG); in hi3660_mbox_acquire_channel()
147 if (val & BIT(mchan->ack_irq)) in hi3660_mbox_acquire_channel()
201 writel(BIT(mchan->ack_irq), base + MBOX_SEND_REG); in hi3660_mbox_send_data()
224 mchan->ack_irq = spec->args[2]; in hi3660_mbox_xlate()
/drivers/gpu/drm/amd/amdgpu/
Dmxgpu_nv.c382 adev->virt.ack_irq.num_types = 1; in xgpu_nv_mailbox_set_irq_funcs()
383 adev->virt.ack_irq.funcs = &xgpu_nv_mailbox_ack_irq_funcs; in xgpu_nv_mailbox_set_irq_funcs()
396 r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_BIF, 138, &adev->virt.ack_irq); in xgpu_nv_mailbox_add_irq_id()
412 r = amdgpu_irq_get(adev, &adev->virt.ack_irq, 0); in xgpu_nv_mailbox_get_irq()
425 amdgpu_irq_put(adev, &adev->virt.ack_irq, 0); in xgpu_nv_mailbox_put_irq()
Dmxgpu_ai.c355 adev->virt.ack_irq.num_types = 1; in xgpu_ai_mailbox_set_irq_funcs()
356 adev->virt.ack_irq.funcs = &xgpu_ai_mailbox_ack_irq_funcs; in xgpu_ai_mailbox_set_irq_funcs()
369 r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_BIF, 138, &adev->virt.ack_irq); in xgpu_ai_mailbox_add_irq_id()
385 r = amdgpu_irq_get(adev, &adev->virt.ack_irq, 0); in xgpu_ai_mailbox_get_irq()
398 amdgpu_irq_put(adev, &adev->virt.ack_irq, 0); in xgpu_ai_mailbox_put_irq()
Dmxgpu_vi.c585 adev->virt.ack_irq.num_types = 1; in xgpu_vi_mailbox_set_irq_funcs()
586 adev->virt.ack_irq.funcs = &xgpu_vi_mailbox_ack_irq_funcs; in xgpu_vi_mailbox_set_irq_funcs()
599 r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, 138, &adev->virt.ack_irq); in xgpu_vi_mailbox_add_irq_id()
615 r = amdgpu_irq_get(adev, &adev->virt.ack_irq, 0); in xgpu_vi_mailbox_get_irq()
628 amdgpu_irq_put(adev, &adev->virt.ack_irq, 0); in xgpu_vi_mailbox_put_irq()
Damdgpu_virt.h235 struct amdgpu_irq_src ack_irq; member
/drivers/misc/ocxl/
Dlink.c118 static void ack_irq(struct spa *spa, enum xsl_response r) in ack_irq() function
186 ack_irq(spa, r); in xsl_fault_handler_bh()
214 ack_irq(spa, ADDRESS_ERROR); in xsl_fault_handler()
233 ack_irq(spa, ADDRESS_ERROR); in xsl_fault_handler()
244 ack_irq(spa, ADDRESS_ERROR); in xsl_fault_handler()
261 ack_irq(spa, ADDRESS_ERROR); in xsl_fault_handler()
/drivers/misc/cxl/
Dfault.c101 cxl_ops->ack_irq(ctx, CXL_PSL_TFC_An_AE, 0); in cxl_ack_ae()
125 cxl_ops->ack_irq(ctx, CXL_PSL_TFC_An_R, 0); in cxl_handle_segment_miss()
190 cxl_ops->ack_irq(ctx, CXL_PSL_TFC_An_R, 0); in cxl_handle_page_fault()
Dirq.c75 cxl_ops->ack_irq(ctx, CXL_PSL_TFC_An_A, 0); in cxl_irq_psl9()
161 cxl_ops->ack_irq(ctx, CXL_PSL_TFC_An_A, 0); in cxl_irq_psl8()
Dguest.c67 return cxl_ops->ack_irq(ctx, 0, errstat); in guest_handle_psl_slice_error()
1188 .ack_irq = guest_ack_irq,
Dnative.c1150 return cxl_ops->ack_irq(ctx, 0, errstat); in native_handle_psl_slice_error()
1576 .ack_irq = native_ack_irq,
Dcxl.h1079 int (*ack_irq)(struct cxl_context *ctx, u64 tfc, u64 psl_reset_mask); member
/drivers/media/pci/cx18/
Dcx18-mailbox.c385 u32 ack_irq, req; in mb_ack_irq() local
389 ack_irq = IRQ_EPU_TO_APU_ACK; in mb_ack_irq()
393 ack_irq = IRQ_EPU_TO_CPU_ACK; in mb_ack_irq()
412 cx18_write_reg_expect(cx, ack_irq, SW2_INT_SET, ack_irq, ack_irq); in mb_ack_irq()