Home
last modified time | relevance | path

Searched refs:sdma (Results 1 – 25 of 46) sorted by relevance

12

/drivers/dma/
Dimx-sdma.c440 struct sdma_engine *sdma; member
685 static inline u32 chnenbl_ofs(struct sdma_engine *sdma, unsigned int event) in chnenbl_ofs() argument
687 u32 chnenbl0 = sdma->drvdata->chnenbl0; in chnenbl_ofs()
694 struct sdma_engine *sdma = sdmac->sdma; in sdma_config_ownership() local
701 evt = readl_relaxed(sdma->regs + SDMA_H_EVTOVR); in sdma_config_ownership()
702 mcu = readl_relaxed(sdma->regs + SDMA_H_HOSTOVR); in sdma_config_ownership()
703 dsp = readl_relaxed(sdma->regs + SDMA_H_DSPOVR); in sdma_config_ownership()
720 writel_relaxed(evt, sdma->regs + SDMA_H_EVTOVR); in sdma_config_ownership()
721 writel_relaxed(mcu, sdma->regs + SDMA_H_HOSTOVR); in sdma_config_ownership()
722 writel_relaxed(dsp, sdma->regs + SDMA_H_DSPOVR); in sdma_config_ownership()
[all …]
DTODO8 - imx-sdma
/drivers/net/ethernet/marvell/prestera/
Dprestera_rxtx.c107 struct prestera_sdma sdma; member
110 static int prestera_sdma_buf_init(struct prestera_sdma *sdma, in prestera_sdma_buf_init() argument
116 desc = dma_pool_alloc(sdma->desc_pool, GFP_DMA | GFP_KERNEL, &dma); in prestera_sdma_buf_init()
128 static u32 prestera_sdma_map(struct prestera_sdma *sdma, dma_addr_t pa) in prestera_sdma_map() argument
130 return sdma->map_addr + pa; in prestera_sdma_map()
133 static void prestera_sdma_rx_desc_init(struct prestera_sdma *sdma, in prestera_sdma_rx_desc_init() argument
142 desc->buff = cpu_to_le32(prestera_sdma_map(sdma, buf)); in prestera_sdma_rx_desc_init()
150 static void prestera_sdma_rx_desc_set_next(struct prestera_sdma *sdma, in prestera_sdma_rx_desc_set_next() argument
154 desc->next = cpu_to_le32(prestera_sdma_map(sdma, next)); in prestera_sdma_rx_desc_set_next()
157 static int prestera_sdma_rx_skb_alloc(struct prestera_sdma *sdma, in prestera_sdma_rx_skb_alloc() argument
[all …]
/drivers/gpu/drm/amd/amdgpu/
Damdgpu_sdma.c42 for (i = 0; i < adev->sdma.num_instances; i++) in amdgpu_sdma_get_instance_from_ring()
43 if (ring == &adev->sdma.instance[i].ring || in amdgpu_sdma_get_instance_from_ring()
44 ring == &adev->sdma.instance[i].page) in amdgpu_sdma_get_instance_from_ring()
45 return &adev->sdma.instance[i]; in amdgpu_sdma_get_instance_from_ring()
55 for (i = 0; i < adev->sdma.num_instances; i++) { in amdgpu_sdma_get_index_from_ring()
56 if (ring == &adev->sdma.instance[i].ring || in amdgpu_sdma_get_index_from_ring()
57 ring == &adev->sdma.instance[i].page) { in amdgpu_sdma_get_index_from_ring()
82 sdma[ring->idx].sdma_meta_data); in amdgpu_sdma_get_csa_mc_addr()
108 for (i = 0; i < adev->sdma.num_instances; i++) { in amdgpu_sdma_ras_late_init()
109 r = amdgpu_irq_get(adev, &adev->sdma.ecc_irq, in amdgpu_sdma_ras_late_init()
[all …]
Dsdma_v4_4_2.c105 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_2_inst_init_golden_registers()
134 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_2_init_microcode()
288 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in sdma_v4_4_2_ring_insert_nop() local
292 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v4_4_2_ring_insert_nop()
369 << (ring->me % adev->sdma.num_inst_per_aid); in sdma_v4_4_2_ring_emit_hdp_flush()
429 struct amdgpu_ring *sdma[AMDGPU_MAX_SDMA_INSTANCES]; in sdma_v4_4_2_inst_gfx_stop() local
434 sdma[i] = &adev->sdma.instance[i].ring; in sdma_v4_4_2_inst_gfx_stop()
436 if ((adev->mman.buffer_funcs_ring == sdma[i]) && unset != 1) { in sdma_v4_4_2_inst_gfx_stop()
475 struct amdgpu_ring *sdma[AMDGPU_MAX_SDMA_INSTANCES]; in sdma_v4_4_2_inst_page_stop() local
481 sdma[i] = &adev->sdma.instance[i].page; in sdma_v4_4_2_inst_page_stop()
[all …]
Dsdma_v4_0.c555 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_setup_ulv()
580 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_init_microcode()
737 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in sdma_v4_0_ring_insert_nop() local
741 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v4_0_ring_insert_nop()
880 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_gfx_enable()
916 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_page_stop()
965 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_ctx_switch_enable()
982 adev->sdma.instance[i].fw_version >= 14) in sdma_v4_0_ctx_switch_enable()
1006 if (adev->sdma.has_page_queue) in sdma_v4_0_enable()
1010 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_enable()
[all …]
Dsdma_v3_0.c254 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v3_0_free_microcode()
255 amdgpu_ucode_release(&adev->sdma.instance[i].fw); in sdma_v3_0_free_microcode()
306 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_init_microcode()
311 err = amdgpu_ucode_request(adev, &adev->sdma.instance[i].fw, fw_name); in sdma_v3_0_init_microcode()
314 hdr = (const struct sdma_firmware_header_v1_0 *)adev->sdma.instance[i].fw->data; in sdma_v3_0_init_microcode()
315 adev->sdma.instance[i].fw_version = le32_to_cpu(hdr->header.ucode_version); in sdma_v3_0_init_microcode()
316 adev->sdma.instance[i].feature_version = le32_to_cpu(hdr->ucode_feature_version); in sdma_v3_0_init_microcode()
317 if (adev->sdma.instance[i].feature_version >= 20) in sdma_v3_0_init_microcode()
318 adev->sdma.instance[i].burst_nop = true; in sdma_v3_0_init_microcode()
322 info->fw = adev->sdma.instance[i].fw; in sdma_v3_0_init_microcode()
[all …]
Dcik_sdma.c77 for (i = 0; i < adev->sdma.num_instances; i++) in cik_sdma_free_microcode()
78 amdgpu_ucode_release(&adev->sdma.instance[i].fw); in cik_sdma_free_microcode()
134 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_init_microcode()
139 err = amdgpu_ucode_request(adev, &adev->sdma.instance[i].fw, fw_name); in cik_sdma_init_microcode()
146 for (i = 0; i < adev->sdma.num_instances; i++) in cik_sdma_init_microcode()
147 amdgpu_ucode_release(&adev->sdma.instance[i].fw); in cik_sdma_init_microcode()
199 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in cik_sdma_ring_insert_nop() local
203 if (sdma && sdma->burst_nop && (i == 0)) in cik_sdma_ring_insert_nop()
313 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_gfx_stop()
370 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_ctx_switch_enable()
[all …]
Dsdma_v2_4.c117 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v2_4_free_microcode()
118 amdgpu_ucode_release(&adev->sdma.instance[i].fw); in sdma_v2_4_free_microcode()
148 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v2_4_init_microcode()
153 err = amdgpu_ucode_request(adev, &adev->sdma.instance[i].fw, fw_name); in sdma_v2_4_init_microcode()
156 hdr = (const struct sdma_firmware_header_v1_0 *)adev->sdma.instance[i].fw->data; in sdma_v2_4_init_microcode()
157 adev->sdma.instance[i].fw_version = le32_to_cpu(hdr->header.ucode_version); in sdma_v2_4_init_microcode()
158 adev->sdma.instance[i].feature_version = le32_to_cpu(hdr->ucode_feature_version); in sdma_v2_4_init_microcode()
159 if (adev->sdma.instance[i].feature_version >= 20) in sdma_v2_4_init_microcode()
160 adev->sdma.instance[i].burst_nop = true; in sdma_v2_4_init_microcode()
165 info->fw = adev->sdma.instance[i].fw; in sdma_v2_4_init_microcode()
[all …]
Dsi_dma.c49 u32 me = (ring == &adev->sdma.instance[0].ring) ? 0 : 1; in si_dma_ring_get_wptr()
57 u32 me = (ring == &adev->sdma.instance[0].ring) ? 0 : 1; in si_dma_ring_set_wptr()
120 for (i = 0; i < adev->sdma.num_instances; i++) { in si_dma_stop()
135 for (i = 0; i < adev->sdma.num_instances; i++) { in si_dma_start()
136 ring = &adev->sdma.instance[i].ring; in si_dma_start()
469 adev->sdma.num_instances = 2; in si_dma_early_init()
487 &adev->sdma.trap_irq); in si_dma_sw_init()
493 &adev->sdma.trap_irq); in si_dma_sw_init()
497 for (i = 0; i < adev->sdma.num_instances; i++) { in si_dma_sw_init()
498 ring = &adev->sdma.instance[i].ring; in si_dma_sw_init()
[all …]
Dsdma_v5_2.c216 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in sdma_v5_2_ring_insert_nop() local
220 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v5_2_ring_insert_nop()
381 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_gfx_stop()
440 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_ctx_switch_enable()
479 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_enable()
507 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_gfx_resume()
508 ring = &adev->sdma.instance[i].ring; in sdma_v5_2_gfx_resume()
679 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_load_microcode()
680 if (!adev->sdma.instance[i].fw) in sdma_v5_2_load_microcode()
683 hdr = (const struct sdma_firmware_header_v1_0 *)adev->sdma.instance[i].fw->data; in sdma_v5_2_load_microcode()
[all …]
Dsdma_v6_0.c226 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in sdma_v6_0_ring_insert_nop() local
230 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v6_0_ring_insert_nop()
386 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v6_0_gfx_stop()
422 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v6_0_ctxempty_int_enable()
452 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v6_0_enable()
478 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v6_0_gfx_resume()
479 ring = &adev->sdma.instance[i].ring; in sdma_v6_0_gfx_resume()
548 adev->doorbell_index.sdma_doorbell_range * adev->sdma.num_instances); in sdma_v6_0_gfx_resume()
637 if (!adev->sdma.instance[0].fw) in sdma_v6_0_load_microcode()
646 hdr = (const struct sdma_firmware_header_v2_0 *)adev->sdma.instance[0].fw->data; in sdma_v6_0_load_microcode()
[all …]
Dsdma_v5_0.c243 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_init_microcode()
401 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in sdma_v5_0_ring_insert_nop() local
405 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v5_0_ring_insert_nop()
564 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_gfx_stop()
623 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_ctx_switch_enable()
665 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_enable()
692 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_gfx_resume()
693 ring = &adev->sdma.instance[i].ring; in sdma_v5_0_gfx_resume()
867 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_load_microcode()
868 if (!adev->sdma.instance[i].fw) in sdma_v5_0_load_microcode()
[all …]
Daqua_vanjaram.c50 for (i = 0; i < adev->sdma.num_instances; i++) in aqua_vanjaram_doorbell_index_init()
275 { SDMA0_HWIP, adev->sdma.sdma_mask }, in aqua_vanjaram_ip_map_init()
352 num_sdma = adev->sdma.num_instances; in __aqua_vanjaram_get_xcp_ip_info()
632 u32 mask, inst_mask = adev->sdma.sdma_mask; in aqua_vanjaram_init_soc_config()
636 adev->sdma.num_inst_per_aid = 4; in aqua_vanjaram_init_soc_config()
637 adev->sdma.num_instances = NUM_SDMA(adev->sdma.sdma_mask); in aqua_vanjaram_init_soc_config()
640 inst_mask >>= adev->sdma.num_inst_per_aid; in aqua_vanjaram_init_soc_config()
642 for (mask = (1 << adev->sdma.num_inst_per_aid) - 1; inst_mask; in aqua_vanjaram_init_soc_config()
643 inst_mask >>= adev->sdma.num_inst_per_aid, ++i) { in aqua_vanjaram_init_soc_config()
Daldebaran.c365 if (tmp_adev->sdma.ras && in aldebaran_mode2_restore_hwcontext()
366 tmp_adev->sdma.ras->ras_block.ras_late_init) { in aldebaran_mode2_restore_hwcontext()
367 r = tmp_adev->sdma.ras->ras_block.ras_late_init(tmp_adev, in aldebaran_mode2_restore_hwcontext()
368 &tmp_adev->sdma.ras->ras_block.ras_comm); in aldebaran_mode2_restore_hwcontext()
Dsdma_v4_4.c243 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_reset_ras_error_count()
256 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_query_ras_error_count()
Damdgpu_discovery.c654 adev->sdma.sdma_mask &= in amdgpu_discovery_read_from_harvest_table()
937 harvest = ((1 << inst) & adev->sdma.sdma_mask) == 0; in amdgpu_discovery_get_harvest_info()
1226 adev->sdma.sdma_mask = 0; in amdgpu_discovery_reg_base_init()
1293 if (adev->sdma.num_instances < in amdgpu_discovery_reg_base_init()
1295 adev->sdma.num_instances++; in amdgpu_discovery_reg_base_init()
1296 adev->sdma.sdma_mask |= in amdgpu_discovery_reg_base_init()
1300 adev->sdma.num_instances + 1, in amdgpu_discovery_reg_base_init()
2161 adev->sdma.num_instances = 2; in amdgpu_discovery_set_ip_blocks()
2183 adev->sdma.num_instances = 2; in amdgpu_discovery_set_ip_blocks()
2205 adev->sdma.num_instances = 1; in amdgpu_discovery_set_ip_blocks()
[all …]
Damdgpu_amdkfd.c408 return adev->sdma.instance[0].fw_version; in amdgpu_amdkfd_get_fw_version()
411 return adev->sdma.instance[1].fw_version; in amdgpu_amdkfd_get_fw_version()
663 ring = &adev->sdma.instance[0].ring; in amdgpu_amdkfd_submit_ib()
666 ring = &adev->sdma.instance[1].ring; in amdgpu_amdkfd_submit_ib()
Damdgpu_mes_ctx.h104 } __aligned(PAGE_SIZE) sdma[AMDGPU_MES_CTX_MAX_SDMA_RINGS];
Dgfx_v11_0_3.c55 ras_if = adev->sdma.ras_if; in gfx_v11_0_3_rlc_gc_fed_irq()
/drivers/infiniband/hw/hfi1/
Dvnic_sdma.c30 struct hfi1_vnic_sdma *sdma; member
42 struct hfi1_vnic_sdma *vnic_sdma = tx->sdma; in vnic_sdma_complete()
130 struct hfi1_vnic_sdma *vnic_sdma = &vinfo->sdma[q_idx]; in hfi1_vnic_send_dma()
147 tx->sdma = vnic_sdma; in hfi1_vnic_send_dma()
231 struct hfi1_vnic_sdma *vnic_sdma = &vinfo->sdma[q_idx]; in hfi1_vnic_sdma_write_avail()
241 struct hfi1_vnic_sdma *vnic_sdma = &vinfo->sdma[i]; in hfi1_vnic_sdma_init()
DKconfig16 sdma completions for unit testing
DMakefile42 sdma.o \
/drivers/soc/fsl/qe/
Dqe.c368 struct sdma __iomem *sdma = &qe_immr->sdma; in qe_sdma_init() local
380 &sdma->sdebcr); in qe_sdma_init()
382 &sdma->sdmr); in qe_sdma_init()
/drivers/scsi/isci/
Dhost.c807 &ihost->scu_registers->sdma.unsolicited_frame_queue_control); in sci_controller_initialize_unsolicited_frame_queue()
816 &ihost->scu_registers->sdma.unsolicited_frame_get_pointer); in sci_controller_initialize_unsolicited_frame_queue()
820 &ihost->scu_registers->sdma.unsolicited_frame_put_pointer); in sci_controller_initialize_unsolicited_frame_queue()
1555 writel(0, &ihost->scu_registers->sdma.unsolicited_frame_get_pointer); in sci_controller_reset_hardware()
2203 val = readl(&ihost->scu_registers->sdma.pdma_configuration); in sci_controller_initialize()
2205 writel(val, &ihost->scu_registers->sdma.pdma_configuration); in sci_controller_initialize()
2207 val = readl(&ihost->scu_registers->sdma.cdma_configuration); in sci_controller_initialize()
2209 writel(val, &ihost->scu_registers->sdma.cdma_configuration); in sci_controller_initialize()
2319 &ihost->scu_registers->sdma.uf_header_base_address_lower); in sci_controller_mem_init()
2321 &ihost->scu_registers->sdma.uf_header_base_address_upper); in sci_controller_mem_init()
[all …]

12