Home
last modified time | relevance | path

Searched refs:sdma_engine (Results 1 – 25 of 40) sorted by relevance

12

/drivers/infiniband/hw/hfi1/
Dsdma.h311 struct sdma_engine { struct
432 static inline int sdma_empty(struct sdma_engine *sde) in sdma_empty() argument
437 static inline u16 sdma_descq_freecnt(struct sdma_engine *sde) in sdma_descq_freecnt()
444 static inline u16 sdma_descq_inprocess(struct sdma_engine *sde) in sdma_descq_inprocess()
453 static inline int __sdma_running(struct sdma_engine *engine) in __sdma_running()
469 static inline int sdma_running(struct sdma_engine *engine) in sdma_running()
846 int sdma_send_txreq(struct sdma_engine *sde,
850 int sdma_send_txlist(struct sdma_engine *sde,
855 int sdma_ahg_alloc(struct sdma_engine *sde);
856 void sdma_ahg_free(struct sdma_engine *sde, int ahg_index);
[all …]
Dsdma.c237 static void sdma_set_state(struct sdma_engine *, enum sdma_states);
238 static void sdma_start_hw_clean_up(struct sdma_engine *);
240 static void sdma_sendctrl(struct sdma_engine *, unsigned);
241 static void init_sdma_regs(struct sdma_engine *, u32, uint);
243 struct sdma_engine *sde,
246 struct sdma_engine *sde,
248 static void dump_sdma_state(struct sdma_engine *sde);
249 static void sdma_make_progress(struct sdma_engine *sde, u64 status);
250 static void sdma_desc_avail(struct sdma_engine *sde, uint avail);
251 static void sdma_flush_descq(struct sdma_engine *sde);
[all …]
Dtrace_tx.h150 TP_PROTO(struct sdma_engine *sde,
344 TP_PROTO(struct sdma_engine *sde, u64 status),
362 TP_PROTO(struct sdma_engine *sde, u64 status),
367 TP_PROTO(struct sdma_engine *sde, u64 status),
372 TP_PROTO(struct sdma_engine *sde, int aidx),
390 TP_PROTO(struct sdma_engine *sde, int aidx),
394 TP_PROTO(struct sdma_engine *sde, int aidx),
399 TP_PROTO(struct sdma_engine *sde,
437 TP_PROTO(struct sdma_engine *sde,
472 TP_PROTO(struct sdma_engine *sde, u64 sn),
[all …]
Dsysfs.c751 ssize_t (*show)(struct sdma_engine *sde, char *buf);
752 ssize_t (*store)(struct sdma_engine *sde, const char *buf, size_t cnt);
759 struct sdma_engine *sde = in sde_show()
760 container_of(kobj, struct sdma_engine, kobj); in sde_show()
773 struct sdma_engine *sde = in sde_store()
774 container_of(kobj, struct sdma_engine, kobj); in sde_store()
798 static ssize_t sde_show_cpu_to_sde_map(struct sdma_engine *sde, char *buf) in sde_show_cpu_to_sde_map()
803 static ssize_t sde_store_cpu_to_sde_map(struct sdma_engine *sde, in sde_store_cpu_to_sde_map()
809 static ssize_t sde_show_vl(struct sdma_engine *sde, char *buf) in sde_show_vl()
Dvnic_sdma.c91 static noinline int build_vnic_ulp_payload(struct sdma_engine *sde, in build_vnic_ulp_payload()
126 static int build_vnic_tx_desc(struct sdma_engine *sde, in build_vnic_tx_desc()
172 struct sdma_engine *sde = vnic_sdma->sde; in hfi1_vnic_send_dma()
227 static int hfi1_vnic_sdma_sleep(struct sdma_engine *sde, in hfi1_vnic_sdma_sleep()
Diowait.h77 struct sdma_engine;
142 struct sdma_engine *sde,
174 int (*sleep)(struct sdma_engine *sde,
Dmsix.h58 int msix_request_sdma_irq(struct sdma_engine *sde);
Dmsix.c229 int msix_request_sdma_irq(struct sdma_engine *sde) in msix_request_sdma_irq()
275 struct sdma_engine *sde = &dd->per_sdma[i]; in msix_request_irqs()
Dqp.c68 struct sdma_engine *sde,
489 struct sdma_engine *sde, in iowait_sleep()
591 struct sdma_engine *qp_to_sdma_engine(struct rvt_qp *qp, u8 sc5) in qp_to_sdma_engine()
594 struct sdma_engine *sde; in qp_to_sdma_engine()
655 struct sdma_engine *sde; in qp_iter_print()
Dverbs_txreq.h65 struct sdma_engine *sde;
Dvnic.h85 struct sdma_engine *sde;
Dqp.h136 struct sdma_engine *qp_to_sdma_engine(struct rvt_qp *qp, u8 sc5);
Diowait.c44 int (*sleep)(struct sdma_engine *sde, in iowait_init()
Duser_sdma.h185 struct sdma_engine *sde;
Daffinity.c791 struct sdma_engine *sde = msix->arg; in hfi1_update_sdma_affinity()
889 struct sdma_engine *sde = NULL; in get_irq_affinity()
901 sde = (struct sdma_engine *)msix->arg; in get_irq_affinity()
/drivers/gpu/drm/amd/amdgpu/
Dvega20_reg_init.c73 adev->doorbell_index.sdma_engine[0] = AMDGPU_VEGA20_DOORBELL_sDMA_ENGINE0; in vega20_doorbell_index_init()
74 adev->doorbell_index.sdma_engine[1] = AMDGPU_VEGA20_DOORBELL_sDMA_ENGINE1; in vega20_doorbell_index_init()
75 adev->doorbell_index.sdma_engine[2] = AMDGPU_VEGA20_DOORBELL_sDMA_ENGINE2; in vega20_doorbell_index_init()
76 adev->doorbell_index.sdma_engine[3] = AMDGPU_VEGA20_DOORBELL_sDMA_ENGINE3; in vega20_doorbell_index_init()
77 adev->doorbell_index.sdma_engine[4] = AMDGPU_VEGA20_DOORBELL_sDMA_ENGINE4; in vega20_doorbell_index_init()
78 adev->doorbell_index.sdma_engine[5] = AMDGPU_VEGA20_DOORBELL_sDMA_ENGINE5; in vega20_doorbell_index_init()
79 adev->doorbell_index.sdma_engine[6] = AMDGPU_VEGA20_DOORBELL_sDMA_ENGINE6; in vega20_doorbell_index_init()
80 adev->doorbell_index.sdma_engine[7] = AMDGPU_VEGA20_DOORBELL_sDMA_ENGINE7; in vega20_doorbell_index_init()
Dvega10_reg_init.c73 adev->doorbell_index.sdma_engine[0] = AMDGPU_DOORBELL64_sDMA_ENGINE0; in vega10_doorbell_index_init()
74 adev->doorbell_index.sdma_engine[1] = AMDGPU_DOORBELL64_sDMA_ENGINE1; in vega10_doorbell_index_init()
Dpsp_v11_0.c757 topology_info_input->nodes[i].sdma_engine = topology->nodes[i].sdma_engine; in psp_v11_0_xgmi_get_topology_info()
772 topology->nodes[i].sdma_engine = topology_info_output->nodes[i].sdma_engine; in psp_v11_0_xgmi_get_topology_info()
799 topology_info_input->nodes[i].sdma_engine = topology->nodes[i].sdma_engine; in psp_v11_0_xgmi_set_topology_info()
Dta_xgmi_if.h75 enum ta_xgmi_assigned_sdma_engine sdma_engine; member
Damdgpu_doorbell.h55 uint32_t sdma_engine[8]; member
/drivers/dma/
Dimx-sdma.c302 struct sdma_engine;
363 struct sdma_engine *sdma;
425 struct sdma_engine { struct
617 static inline u32 chnenbl_ofs(struct sdma_engine *sdma, unsigned int event) in chnenbl_ofs()
626 struct sdma_engine *sdma = sdmac->sdma; in sdma_config_ownership()
659 static void sdma_enable_channel(struct sdma_engine *sdma, int channel) in sdma_enable_channel()
667 static int sdma_run_channel0(struct sdma_engine *sdma) in sdma_run_channel0()
689 static int sdma_load_script(struct sdma_engine *sdma, void *buf, int size, in sdma_load_script()
724 struct sdma_engine *sdma = sdmac->sdma; in sdma_event_enable()
736 struct sdma_engine *sdma = sdmac->sdma; in sdma_event_disable()
[all …]
/drivers/gpu/drm/amd/amdkfd/
Dkfd_kernel_queue_v9.c246 unsigned int sdma_engine) in pm_unmap_queues_v9() argument
265 if (sdma_engine < 2) { in pm_unmap_queues_v9()
269 engine_sel__mes_unmap_queues__sdma0 + sdma_engine; in pm_unmap_queues_v9()
273 packet->bitfields2.engine_sel = sdma_engine; in pm_unmap_queues_v9()
Dkfd_kernel_queue_v10.c213 unsigned int sdma_engine) in pm_unmap_queues_v10() argument
231 engine_sel__mes_unmap_queues__sdma0 + sdma_engine; in pm_unmap_queues_v10()
Dkfd_kernel_queue_vi.c244 unsigned int sdma_engine) in pm_unmap_queues_vi() argument
262 engine_sel__mes_unmap_queues__sdma0 + sdma_engine; in pm_unmap_queues_vi()
Dkfd_packet_manager.c376 unsigned int sdma_engine) in pm_send_unmap_queue() argument
392 reset, sdma_engine); in pm_send_unmap_queue()

12