/drivers/dma/ |
D | imx-sdma.c | 423 struct sdma_engine *sdma; member 662 static inline u32 chnenbl_ofs(struct sdma_engine *sdma, unsigned int event) in chnenbl_ofs() argument 664 u32 chnenbl0 = sdma->drvdata->chnenbl0; in chnenbl_ofs() 671 struct sdma_engine *sdma = sdmac->sdma; in sdma_config_ownership() local 678 evt = readl_relaxed(sdma->regs + SDMA_H_EVTOVR); in sdma_config_ownership() 679 mcu = readl_relaxed(sdma->regs + SDMA_H_HOSTOVR); in sdma_config_ownership() 680 dsp = readl_relaxed(sdma->regs + SDMA_H_DSPOVR); in sdma_config_ownership() 697 writel_relaxed(evt, sdma->regs + SDMA_H_EVTOVR); in sdma_config_ownership() 698 writel_relaxed(mcu, sdma->regs + SDMA_H_HOSTOVR); in sdma_config_ownership() 699 writel_relaxed(dsp, sdma->regs + SDMA_H_DSPOVR); in sdma_config_ownership() [all …]
|
D | TODO | 8 - imx-sdma
|
D | Makefile | 42 obj-$(CONFIG_IMX_SDMA) += imx-sdma.o
|
/drivers/net/ethernet/marvell/prestera/ |
D | prestera_rxtx.c | 110 struct prestera_sdma sdma; member 113 static int prestera_sdma_buf_init(struct prestera_sdma *sdma, in prestera_sdma_buf_init() argument 119 desc = dma_pool_alloc(sdma->desc_pool, GFP_DMA | GFP_KERNEL, &dma); in prestera_sdma_buf_init() 131 static u32 prestera_sdma_map(struct prestera_sdma *sdma, dma_addr_t pa) in prestera_sdma_map() argument 133 return sdma->map_addr + pa; in prestera_sdma_map() 136 static void prestera_sdma_rx_desc_init(struct prestera_sdma *sdma, in prestera_sdma_rx_desc_init() argument 145 desc->buff = cpu_to_le32(prestera_sdma_map(sdma, buf)); in prestera_sdma_rx_desc_init() 153 static void prestera_sdma_rx_desc_set_next(struct prestera_sdma *sdma, in prestera_sdma_rx_desc_set_next() argument 157 desc->next = cpu_to_le32(prestera_sdma_map(sdma, next)); in prestera_sdma_rx_desc_set_next() 160 static int prestera_sdma_rx_skb_alloc(struct prestera_sdma *sdma, in prestera_sdma_rx_skb_alloc() argument [all …]
|
/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_sdma.c | 41 for (i = 0; i < adev->sdma.num_instances; i++) in amdgpu_sdma_get_instance_from_ring() 42 if (ring == &adev->sdma.instance[i].ring || in amdgpu_sdma_get_instance_from_ring() 43 ring == &adev->sdma.instance[i].page) in amdgpu_sdma_get_instance_from_ring() 44 return &adev->sdma.instance[i]; in amdgpu_sdma_get_instance_from_ring() 54 for (i = 0; i < adev->sdma.num_instances; i++) { in amdgpu_sdma_get_index_from_ring() 55 if (ring == &adev->sdma.instance[i].ring || in amdgpu_sdma_get_index_from_ring() 56 ring == &adev->sdma.instance[i].page) { in amdgpu_sdma_get_index_from_ring() 101 if (!adev->sdma.ras_if) { in amdgpu_sdma_ras_late_init() 102 adev->sdma.ras_if = kmalloc(sizeof(struct ras_common_if), GFP_KERNEL); in amdgpu_sdma_ras_late_init() 103 if (!adev->sdma.ras_if) in amdgpu_sdma_ras_late_init() [all …]
|
D | sdma_v4_0.c | 554 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_setup_ulv() 586 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_destroy_inst_ctx() 587 release_firmware(adev->sdma.instance[i].fw); in sdma_v4_0_destroy_inst_ctx() 588 adev->sdma.instance[i].fw = NULL; in sdma_v4_0_destroy_inst_ctx() 597 memset((void *)adev->sdma.instance, 0, in sdma_v4_0_destroy_inst_ctx() 659 err = request_firmware(&adev->sdma.instance[0].fw, fw_name, adev->dev); in sdma_v4_0_init_microcode() 663 err = sdma_v4_0_init_inst_ctx(&adev->sdma.instance[0]); in sdma_v4_0_init_microcode() 667 for (i = 1; i < adev->sdma.num_instances; i++) { in sdma_v4_0_init_microcode() 672 memcpy((void *)&adev->sdma.instance[i], in sdma_v4_0_init_microcode() 673 (void *)&adev->sdma.instance[0], in sdma_v4_0_init_microcode() [all …]
|
D | sdma_v3_0.c | 253 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_free_microcode() 254 release_firmware(adev->sdma.instance[i].fw); in sdma_v3_0_free_microcode() 255 adev->sdma.instance[i].fw = NULL; in sdma_v3_0_free_microcode() 307 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_init_microcode() 312 err = request_firmware(&adev->sdma.instance[i].fw, fw_name, adev->dev); in sdma_v3_0_init_microcode() 315 err = amdgpu_ucode_validate(adev->sdma.instance[i].fw); in sdma_v3_0_init_microcode() 318 hdr = (const struct sdma_firmware_header_v1_0 *)adev->sdma.instance[i].fw->data; in sdma_v3_0_init_microcode() 319 adev->sdma.instance[i].fw_version = le32_to_cpu(hdr->header.ucode_version); in sdma_v3_0_init_microcode() 320 adev->sdma.instance[i].feature_version = le32_to_cpu(hdr->ucode_feature_version); in sdma_v3_0_init_microcode() 321 if (adev->sdma.instance[i].feature_version >= 20) in sdma_v3_0_init_microcode() [all …]
|
D | cik_sdma.c | 76 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_free_microcode() 77 release_firmware(adev->sdma.instance[i].fw); in cik_sdma_free_microcode() 78 adev->sdma.instance[i].fw = NULL; in cik_sdma_free_microcode() 135 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_init_microcode() 140 err = request_firmware(&adev->sdma.instance[i].fw, fw_name, adev->dev); in cik_sdma_init_microcode() 143 err = amdgpu_ucode_validate(adev->sdma.instance[i].fw); in cik_sdma_init_microcode() 148 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_init_microcode() 149 release_firmware(adev->sdma.instance[i].fw); in cik_sdma_init_microcode() 150 adev->sdma.instance[i].fw = NULL; in cik_sdma_init_microcode() 203 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in cik_sdma_ring_insert_nop() local [all …]
|
D | sdma_v2_4.c | 116 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v2_4_free_microcode() 117 release_firmware(adev->sdma.instance[i].fw); in sdma_v2_4_free_microcode() 118 adev->sdma.instance[i].fw = NULL; in sdma_v2_4_free_microcode() 149 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v2_4_init_microcode() 154 err = request_firmware(&adev->sdma.instance[i].fw, fw_name, adev->dev); in sdma_v2_4_init_microcode() 157 err = amdgpu_ucode_validate(adev->sdma.instance[i].fw); in sdma_v2_4_init_microcode() 160 hdr = (const struct sdma_firmware_header_v1_0 *)adev->sdma.instance[i].fw->data; in sdma_v2_4_init_microcode() 161 adev->sdma.instance[i].fw_version = le32_to_cpu(hdr->header.ucode_version); in sdma_v2_4_init_microcode() 162 adev->sdma.instance[i].feature_version = le32_to_cpu(hdr->ucode_feature_version); in sdma_v2_4_init_microcode() 163 if (adev->sdma.instance[i].feature_version >= 20) in sdma_v2_4_init_microcode() [all …]
|
D | sdma_v5_2.c | 111 release_firmware(adev->sdma.instance[0].fw); in sdma_v5_2_destroy_inst_ctx() 113 memset((void *)adev->sdma.instance, 0, in sdma_v5_2_destroy_inst_ctx() 164 err = request_firmware(&adev->sdma.instance[0].fw, fw_name, adev->dev); in sdma_v5_2_init_microcode() 168 err = sdma_v5_2_init_inst_ctx(&adev->sdma.instance[0]); in sdma_v5_2_init_microcode() 172 for (i = 1; i < adev->sdma.num_instances; i++) in sdma_v5_2_init_microcode() 173 memcpy((void *)&adev->sdma.instance[i], in sdma_v5_2_init_microcode() 174 (void *)&adev->sdma.instance[0], in sdma_v5_2_init_microcode() 184 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_init_microcode() 187 info->fw = adev->sdma.instance[i].fw; in sdma_v5_2_init_microcode() 318 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in sdma_v5_2_ring_insert_nop() local [all …]
|
D | sdma_v5_0.c | 276 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_init_microcode() 281 err = request_firmware(&adev->sdma.instance[i].fw, fw_name, adev->dev); in sdma_v5_0_init_microcode() 284 err = amdgpu_ucode_validate(adev->sdma.instance[i].fw); in sdma_v5_0_init_microcode() 287 hdr = (const struct sdma_firmware_header_v1_0 *)adev->sdma.instance[i].fw->data; in sdma_v5_0_init_microcode() 288 adev->sdma.instance[i].fw_version = le32_to_cpu(hdr->header.ucode_version); in sdma_v5_0_init_microcode() 289 adev->sdma.instance[i].feature_version = le32_to_cpu(hdr->ucode_feature_version); in sdma_v5_0_init_microcode() 290 if (adev->sdma.instance[i].feature_version >= 20) in sdma_v5_0_init_microcode() 291 adev->sdma.instance[i].burst_nop = true; in sdma_v5_0_init_microcode() 298 info->fw = adev->sdma.instance[i].fw; in sdma_v5_0_init_microcode() 307 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_init_microcode() [all …]
|
D | si_dma.c | 49 u32 me = (ring == &adev->sdma.instance[0].ring) ? 0 : 1; in si_dma_ring_get_wptr() 57 u32 me = (ring == &adev->sdma.instance[0].ring) ? 0 : 1; in si_dma_ring_set_wptr() 120 for (i = 0; i < adev->sdma.num_instances; i++) { in si_dma_stop() 121 ring = &adev->sdma.instance[i].ring; in si_dma_stop() 139 for (i = 0; i < adev->sdma.num_instances; i++) { in si_dma_start() 140 ring = &adev->sdma.instance[i].ring; in si_dma_start() 475 adev->sdma.num_instances = 2; in si_dma_early_init() 493 &adev->sdma.trap_irq); in si_dma_sw_init() 499 &adev->sdma.trap_irq); in si_dma_sw_init() 503 for (i = 0; i < adev->sdma.num_instances; i++) { in si_dma_sw_init() [all …]
|
D | amdgpu_amdkfd.c | 389 return adev->sdma.instance[0].fw_version; in amdgpu_amdkfd_get_fw_version() 392 return adev->sdma.instance[1].fw_version; in amdgpu_amdkfd_get_fw_version() 693 ring = &adev->sdma.instance[0].ring; in amdgpu_amdkfd_submit_ib() 696 ring = &adev->sdma.instance[1].ring; in amdgpu_amdkfd_submit_ib()
|
D | amdgpu_cgs.c | 167 fw_version = adev->sdma.instance[0].fw_version; in amdgpu_get_firmware_version() 170 fw_version = adev->sdma.instance[1].fw_version; in amdgpu_get_firmware_version()
|
D | amdgpu_kms.c | 380 if (query_fw->index >= adev->sdma.num_instances) in amdgpu_firmware_info() 382 fw_info->ver = adev->sdma.instance[query_fw->index].fw_version; in amdgpu_firmware_info() 383 fw_info->feature = adev->sdma.instance[query_fw->index].feature_version; in amdgpu_firmware_info() 443 for (i = 0; i < adev->sdma.num_instances; i++) in amdgpu_hw_ip_info() 444 if (adev->sdma.instance[i].ring.sched.ready) in amdgpu_hw_ip_info() 1585 for (i = 0; i < adev->sdma.num_instances; i++) { in amdgpu_debugfs_firmware_info_show()
|
D | sdma_v4_4.c | 239 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_reset_ras_error_count()
|
D | amdgpu_ras.c | 836 if (adev->sdma.funcs->query_ras_error_count) { in amdgpu_ras_query_error_status() 837 for (i = 0; i < adev->sdma.num_instances; i++) in amdgpu_ras_query_error_status() 838 adev->sdma.funcs->query_ras_error_count(adev, i, in amdgpu_ras_query_error_status() 953 if (adev->sdma.funcs->reset_ras_error_count) in amdgpu_ras_reset_error_status() 954 adev->sdma.funcs->reset_ras_error_count(adev); in amdgpu_ras_reset_error_status()
|
D | amdgpu_virt.c | 540 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_SDMA, adev->sdma.instance[0].fw_version); in amdgpu_virt_populate_vf2pf_ucode_info() 541 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_SDMA2, adev->sdma.instance[1].fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
|
/drivers/infiniband/hw/hfi1/ |
D | vnic_sdma.c | 30 struct hfi1_vnic_sdma *sdma; member 42 struct hfi1_vnic_sdma *vnic_sdma = tx->sdma; in vnic_sdma_complete() 130 struct hfi1_vnic_sdma *vnic_sdma = &vinfo->sdma[q_idx]; in hfi1_vnic_send_dma() 147 tx->sdma = vnic_sdma; in hfi1_vnic_send_dma() 231 struct hfi1_vnic_sdma *vnic_sdma = &vinfo->sdma[q_idx]; in hfi1_vnic_sdma_write_avail() 241 struct hfi1_vnic_sdma *vnic_sdma = &vinfo->sdma[i]; in hfi1_vnic_sdma_init()
|
D | Kconfig | 16 sdma completions for unit testing
|
D | Makefile | 41 sdma.o \
|
D | vnic.h | 95 struct hfi1_vnic_sdma sdma[HFI1_VNIC_MAX_TXQ]; member
|
/drivers/soc/fsl/qe/ |
D | qe.c | 367 struct sdma __iomem *sdma = &qe_immr->sdma; in qe_sdma_init() local 379 &sdma->sdebcr); in qe_sdma_init() 381 &sdma->sdmr); in qe_sdma_init()
|
/drivers/scsi/isci/ |
D | host.c | 807 &ihost->scu_registers->sdma.unsolicited_frame_queue_control); in sci_controller_initialize_unsolicited_frame_queue() 816 &ihost->scu_registers->sdma.unsolicited_frame_get_pointer); in sci_controller_initialize_unsolicited_frame_queue() 820 &ihost->scu_registers->sdma.unsolicited_frame_put_pointer); in sci_controller_initialize_unsolicited_frame_queue() 1555 writel(0, &ihost->scu_registers->sdma.unsolicited_frame_get_pointer); in sci_controller_reset_hardware() 2203 val = readl(&ihost->scu_registers->sdma.pdma_configuration); in sci_controller_initialize() 2205 writel(val, &ihost->scu_registers->sdma.pdma_configuration); in sci_controller_initialize() 2207 val = readl(&ihost->scu_registers->sdma.cdma_configuration); in sci_controller_initialize() 2209 writel(val, &ihost->scu_registers->sdma.cdma_configuration); in sci_controller_initialize() 2319 &ihost->scu_registers->sdma.uf_header_base_address_lower); in sci_controller_mem_init() 2321 &ihost->scu_registers->sdma.uf_header_base_address_upper); in sci_controller_mem_init() [all …]
|
/drivers/gpu/drm/radeon/ |
D | radeon_ucode.h | 216 struct sdma_firmware_header_v1_0 sdma; member
|