Home
last modified time | relevance | path

Searched full:sdma (Results 1 – 25 of 541) sorted by relevance

12345678910>>...22

/kernel/linux/linux-5.10/drivers/dma/
Dimx-sdma.c3 // drivers/dma/imx-sdma.c
38 #include <linux/platform_data/dma-imx-sdma.h>
47 /* SDMA registers */
104 * Error bit set in the CCB status field by the SDMA,
145 * 28 Lower WML Event(LWE) SDMA events reg to check for
149 * 29 Higher WML Event(HWE) SDMA events reg to check for
208 * @unused: padding. The SDMA engine expects an array of 128 byte
218 * struct sdma_state_registers - SDMA context for a channel
247 * struct sdma_context_data - sdma context specific to a channel
331 * struct sdma_channel - housekeeping for a SDMA channel
[all …]
Dsirf-dma.c238 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan); in sirfsoc_dma_execute() local
247 base = sdma->base; in sirfsoc_dma_execute()
253 if (sdma->type == SIRFSOC_DMA_VER_A7V2) in sirfsoc_dma_execute()
257 sdma->exec_desc(sdesc, cid, schan->mode, base); in sirfsoc_dma_execute()
266 struct sirfsoc_dma *sdma = data; in sirfsoc_dma_irq() local
274 switch (sdma->type) { in sirfsoc_dma_irq()
277 is = readl(sdma->base + SIRFSOC_DMA_CH_INT); in sirfsoc_dma_irq()
278 reg = sdma->base + SIRFSOC_DMA_CH_INT; in sirfsoc_dma_irq()
282 schan = &sdma->channels[ch]; in sirfsoc_dma_irq()
300 is = readl(sdma->base + SIRFSOC_DMA_INT_ATLAS7); in sirfsoc_dma_irq()
[all …]
/kernel/linux/linux-6.6/drivers/dma/
Dimx-sdma.c3 // drivers/dma/imx-sdma.c
46 /* SDMA registers */
104 * Error bit set in the CCB status field by the SDMA,
145 * 28 Lower WML Event(LWE) SDMA events reg to check for
149 * 29 Higher WML Event(HWE) SDMA events reg to check for
193 * struct sdma_script_start_addrs - SDMA script start pointers
196 * address space of the SDMA engine.
275 * @unused: padding. The SDMA engine expects an array of 128 byte
285 * struct sdma_state_registers - SDMA context for a channel
314 * struct sdma_context_data - sdma context specific to a channel
[all …]
/kernel/linux/linux-5.10/Documentation/devicetree/bindings/dma/
Dfsl-imx-sdma.txt1 * Freescale Smart Direct Memory Access (SDMA) Controller for i.MX
5 "fsl,imx25-sdma"
6 "fsl,imx31-sdma", "fsl,imx31-to1-sdma", "fsl,imx31-to2-sdma"
7 "fsl,imx35-sdma", "fsl,imx35-to1-sdma", "fsl,imx35-to2-sdma"
8 "fsl,imx51-sdma"
9 "fsl,imx53-sdma"
10 "fsl,imx6q-sdma"
11 "fsl,imx7d-sdma"
12 "fsl,imx8mq-sdma"
13 "fsl,imx8mm-sdma"
[all …]
/kernel/linux/linux-5.10/drivers/net/ethernet/marvell/prestera/
Dprestera_rxtx.c104 /* protect SDMA with concurrrent access from multiple CPUs */
109 struct prestera_sdma sdma; member
112 static int prestera_sdma_buf_init(struct prestera_sdma *sdma, in prestera_sdma_buf_init() argument
118 desc = dma_pool_alloc(sdma->desc_pool, GFP_DMA | GFP_KERNEL, &dma); in prestera_sdma_buf_init()
130 static u32 prestera_sdma_map(struct prestera_sdma *sdma, dma_addr_t pa) in prestera_sdma_map() argument
132 return sdma->map_addr + pa; in prestera_sdma_map()
135 static void prestera_sdma_rx_desc_init(struct prestera_sdma *sdma, in prestera_sdma_rx_desc_init() argument
144 desc->buff = cpu_to_le32(prestera_sdma_map(sdma, buf)); in prestera_sdma_rx_desc_init()
152 static void prestera_sdma_rx_desc_set_next(struct prestera_sdma *sdma, in prestera_sdma_rx_desc_set_next() argument
156 desc->next = cpu_to_le32(prestera_sdma_map(sdma, next)); in prestera_sdma_rx_desc_set_next()
[all …]
/kernel/linux/linux-6.6/drivers/net/ethernet/marvell/prestera/
Dprestera_rxtx.c102 /* protect SDMA with concurrent access from multiple CPUs */
107 struct prestera_sdma sdma; member
110 static int prestera_sdma_buf_init(struct prestera_sdma *sdma, in prestera_sdma_buf_init() argument
116 desc = dma_pool_alloc(sdma->desc_pool, GFP_DMA | GFP_KERNEL, &dma); in prestera_sdma_buf_init()
128 static u32 prestera_sdma_map(struct prestera_sdma *sdma, dma_addr_t pa) in prestera_sdma_map() argument
130 return sdma->map_addr + pa; in prestera_sdma_map()
133 static void prestera_sdma_rx_desc_init(struct prestera_sdma *sdma, in prestera_sdma_rx_desc_init() argument
142 desc->buff = cpu_to_le32(prestera_sdma_map(sdma, buf)); in prestera_sdma_rx_desc_init()
150 static void prestera_sdma_rx_desc_set_next(struct prestera_sdma *sdma, in prestera_sdma_rx_desc_set_next() argument
154 desc->next = cpu_to_le32(prestera_sdma_map(sdma, next)); in prestera_sdma_rx_desc_set_next()
[all …]
/kernel/linux/linux-6.6/Documentation/devicetree/bindings/dma/
Dfsl,imx-sdma.yaml4 $id: http://devicetree.org/schemas/dma/fsl,imx-sdma.yaml#
7 title: Freescale Smart Direct Memory Access (SDMA) Controller for i.MX
20 - fsl,imx50-sdma
21 - fsl,imx51-sdma
22 - fsl,imx53-sdma
23 - fsl,imx6q-sdma
24 - fsl,imx7d-sdma
25 - const: fsl,imx35-sdma
28 - fsl,imx6sx-sdma
29 - fsl,imx6sl-sdma
[all …]
/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdgpu/
Damdgpu_sdma.c30 /* SDMA CSA reside in the 3rd page of CSA */
34 * GPU SDMA IP block helpers function.
42 for (i = 0; i < adev->sdma.num_instances; i++) in amdgpu_sdma_get_instance_from_ring()
43 if (ring == &adev->sdma.instance[i].ring || in amdgpu_sdma_get_instance_from_ring()
44 ring == &adev->sdma.instance[i].page) in amdgpu_sdma_get_instance_from_ring()
45 return &adev->sdma.instance[i]; in amdgpu_sdma_get_instance_from_ring()
55 for (i = 0; i < adev->sdma.num_instances; i++) { in amdgpu_sdma_get_index_from_ring()
56 if (ring == &adev->sdma.instance[i].ring || in amdgpu_sdma_get_index_from_ring()
57 ring == &adev->sdma.instance[i].page) { in amdgpu_sdma_get_index_from_ring()
74 /* don't enable OS preemption on SDMA under SRIOV */ in amdgpu_sdma_get_csa_mc_addr()
[all …]
Dsdma_v4_4_2.c34 #include "sdma/sdma_4_4_2_offset.h"
35 #include "sdma/sdma_4_4_2_sh_mask.h"
105 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_2_inst_init_golden_registers()
134 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_2_init_microcode()
288 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in sdma_v4_4_2_ring_insert_nop() local
292 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v4_4_2_ring_insert_nop()
369 << (ring->me % adev->sdma.num_inst_per_aid); in sdma_v4_4_2_ring_emit_hdp_flush()
429 struct amdgpu_ring *sdma[AMDGPU_MAX_SDMA_INSTANCES]; in sdma_v4_4_2_inst_gfx_stop() local
434 sdma[i] = &adev->sdma.instance[i].ring; in sdma_v4_4_2_inst_gfx_stop()
436 if ((adev->mman.buffer_funcs_ring == sdma[i]) && unset != 1) { in sdma_v4_4_2_inst_gfx_stop()
[all …]
Dsdma_v4_0.c555 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_setup_ulv()
580 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_init_microcode()
584 for every SDMA instance */ in sdma_v4_0_init_microcode()
737 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in sdma_v4_0_ring_insert_nop() local
741 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v4_0_ring_insert_nop()
870 * @enable: enable SDMA RB/IB
880 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_gfx_enable()
916 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_page_stop()
965 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_ctx_switch_enable()
977 * Enable SDMA utilization. Its only supported on in sdma_v4_0_ctx_switch_enable()
[all …]
Dsdma_v3_0.c182 * sDMA - System DMA
190 * (ring buffer, IBs, etc.), but sDMA has it's own
192 * used by the CP. sDMA supports copying data, writing
254 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v3_0_free_microcode()
255 amdgpu_ucode_release(&adev->sdma.instance[i].fw); in sdma_v3_0_free_microcode()
306 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_init_microcode()
311 err = amdgpu_ucode_request(adev, &adev->sdma.instance[i].fw, fw_name); in sdma_v3_0_init_microcode()
314 hdr = (const struct sdma_firmware_header_v1_0 *)adev->sdma.instance[i].fw->data; in sdma_v3_0_init_microcode()
315 adev->sdma.instance[i].fw_version = le32_to_cpu(hdr->header.ucode_version); in sdma_v3_0_init_microcode()
316 adev->sdma.instance[i].feature_version = le32_to_cpu(hdr->ucode_feature_version); in sdma_v3_0_init_microcode()
[all …]
Dcik_sdma.c77 for (i = 0; i < adev->sdma.num_instances; i++) in cik_sdma_free_microcode()
78 amdgpu_ucode_release(&adev->sdma.instance[i].fw); in cik_sdma_free_microcode()
82 * sDMA - System DMA
90 * (ring buffer, IBs, etc.), but sDMA has it's own
92 * used by the CP. sDMA supports copying data, writing
134 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_init_microcode()
139 err = amdgpu_ucode_request(adev, &adev->sdma.instance[i].fw, fw_name); in cik_sdma_init_microcode()
146 for (i = 0; i < adev->sdma.num_instances; i++) in cik_sdma_init_microcode()
147 amdgpu_ucode_release(&adev->sdma.instance[i].fw); in cik_sdma_init_microcode()
199 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in cik_sdma_ring_insert_nop() local
[all …]
Dsdma_v2_4.c81 * sDMA - System DMA
89 * (ring buffer, IBs, etc.), but sDMA has it's own
91 * used by the CP. sDMA supports copying data, writing
117 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v2_4_free_microcode()
118 amdgpu_ucode_release(&adev->sdma.instance[i].fw); in sdma_v2_4_free_microcode()
148 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v2_4_init_microcode()
153 err = amdgpu_ucode_request(adev, &adev->sdma.instance[i].fw, fw_name); in sdma_v2_4_init_microcode()
156 hdr = (const struct sdma_firmware_header_v1_0 *)adev->sdma.instance[i].fw->data; in sdma_v2_4_init_microcode()
157 adev->sdma.instance[i].fw_version = le32_to_cpu(hdr->header.ucode_version); in sdma_v2_4_init_microcode()
158 adev->sdma.instance[i].feature_version = le32_to_cpu(hdr->ucode_feature_version); in sdma_v2_4_init_microcode()
[all …]
/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/
Damdgpu_sdma.c29 /* SDMA CSA reside in the 3rd page of CSA */
33 * GPU SDMA IP block helpers function.
41 for (i = 0; i < adev->sdma.num_instances; i++) in amdgpu_sdma_get_instance_from_ring()
42 if (ring == &adev->sdma.instance[i].ring || in amdgpu_sdma_get_instance_from_ring()
43 ring == &adev->sdma.instance[i].page) in amdgpu_sdma_get_instance_from_ring()
44 return &adev->sdma.instance[i]; in amdgpu_sdma_get_instance_from_ring()
54 for (i = 0; i < adev->sdma.num_instances; i++) { in amdgpu_sdma_get_index_from_ring()
55 if (ring == &adev->sdma.instance[i].ring || in amdgpu_sdma_get_index_from_ring()
56 ring == &adev->sdma.instance[i].page) { in amdgpu_sdma_get_index_from_ring()
73 /* don't enable OS preemption on SDMA under SRIOV */ in amdgpu_sdma_get_csa_mc_addr()
[all …]
Dcik_sdma.c76 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_free_microcode()
77 release_firmware(adev->sdma.instance[i].fw); in cik_sdma_free_microcode()
78 adev->sdma.instance[i].fw = NULL; in cik_sdma_free_microcode()
83 * sDMA - System DMA
91 * (ring buffer, IBs, etc.), but sDMA has it's own
93 * used by the CP. sDMA supports copying data, writing
135 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_init_microcode()
140 err = request_firmware(&adev->sdma.instance[i].fw, fw_name, adev->dev); in cik_sdma_init_microcode()
143 err = amdgpu_ucode_validate(adev->sdma.instance[i].fw); in cik_sdma_init_microcode()
148 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_init_microcode()
[all …]
Dsdma_v4_0.c530 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_setup_ulv()
562 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_destroy_inst_ctx()
563 release_firmware(adev->sdma.instance[i].fw); in sdma_v4_0_destroy_inst_ctx()
564 adev->sdma.instance[i].fw = NULL; in sdma_v4_0_destroy_inst_ctx()
567 all SDMA isntances */ in sdma_v4_0_destroy_inst_ctx()
572 memset((void*)adev->sdma.instance, 0, in sdma_v4_0_destroy_inst_ctx()
634 err = request_firmware(&adev->sdma.instance[0].fw, fw_name, adev->dev); in sdma_v4_0_init_microcode()
638 err = sdma_v4_0_init_inst_ctx(&adev->sdma.instance[0]); in sdma_v4_0_init_microcode()
642 for (i = 1; i < adev->sdma.num_instances; i++) { in sdma_v4_0_init_microcode()
645 for every SDMA instance */ in sdma_v4_0_init_microcode()
[all …]
Dsdma_v2_4.c81 * sDMA - System DMA
89 * (ring buffer, IBs, etc.), but sDMA has it's own
91 * used by the CP. sDMA supports copying data, writing
116 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v2_4_free_microcode()
117 release_firmware(adev->sdma.instance[i].fw); in sdma_v2_4_free_microcode()
118 adev->sdma.instance[i].fw = NULL; in sdma_v2_4_free_microcode()
149 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v2_4_init_microcode()
154 err = request_firmware(&adev->sdma.instance[i].fw, fw_name, adev->dev); in sdma_v2_4_init_microcode()
157 err = amdgpu_ucode_validate(adev->sdma.instance[i].fw); in sdma_v2_4_init_microcode()
160 hdr = (const struct sdma_firmware_header_v1_0 *)adev->sdma.instance[i].fw->data; in sdma_v2_4_init_microcode()
[all …]
Dsdma_v3_0.c182 * sDMA - System DMA
190 * (ring buffer, IBs, etc.), but sDMA has it's own
192 * used by the CP. sDMA supports copying data, writing
253 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_free_microcode()
254 release_firmware(adev->sdma.instance[i].fw); in sdma_v3_0_free_microcode()
255 adev->sdma.instance[i].fw = NULL; in sdma_v3_0_free_microcode()
307 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_init_microcode()
312 err = request_firmware(&adev->sdma.instance[i].fw, fw_name, adev->dev); in sdma_v3_0_init_microcode()
315 err = amdgpu_ucode_validate(adev->sdma.instance[i].fw); in sdma_v3_0_init_microcode()
318 hdr = (const struct sdma_firmware_header_v1_0 *)adev->sdma.instance[i].fw->data; in sdma_v3_0_init_microcode()
[all …]
Dsdma_v5_2.c119 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_destroy_inst_ctx()
120 release_firmware(adev->sdma.instance[i].fw); in sdma_v5_2_destroy_inst_ctx()
121 adev->sdma.instance[i].fw = NULL; in sdma_v5_2_destroy_inst_ctx()
127 memset((void*)adev->sdma.instance, 0, in sdma_v5_2_destroy_inst_ctx()
169 err = request_firmware(&adev->sdma.instance[0].fw, fw_name, adev->dev); in sdma_v5_2_init_microcode()
173 err = sdma_v5_2_init_inst_ctx(&adev->sdma.instance[0]); in sdma_v5_2_init_microcode()
177 for (i = 1; i < adev->sdma.num_instances; i++) { in sdma_v5_2_init_microcode()
180 memcpy((void*)&adev->sdma.instance[i], in sdma_v5_2_init_microcode()
181 (void*)&adev->sdma.instance[0], in sdma_v5_2_init_microcode()
185 err = request_firmware(&adev->sdma.instance[i].fw, fw_name, adev->dev); in sdma_v5_2_init_microcode()
[all …]
/kernel/linux/patches/linux-5.10/imx8mm_patch/patches/drivers/
D0017_linux_drivers_dma_dmabuf.patch1730 diff --git a/drivers/dma/imx-sdma.c b/drivers/dma/imx-sdma.c
1732 --- a/drivers/dma/imx-sdma.c
1733 +++ b/drivers/dma/imx-sdma.c
1788 struct sdma_engine *sdma;
1828 + * ecspi ERR009165 fixed should be done in sdma script
1855 /* clock ratio for AHB:SDMA core. 1:1 is 1, 2:1 is 0*/
1913 .name = "imx25-sdma",
1916 .name = "imx6q-sdma",
1919 + .name = "imx6sx-sdma",
1922 .name = "imx7d-sdma",
[all …]
/kernel/linux/linux-5.10/arch/arm/boot/dts/
Domap2.dtsi65 dmas = <&sdma 9 &sdma 10>;
103 sdma: dma-controller@0 { label
104 compatible = "ti,omap2420-sdma", "ti,omap-sdma";
123 dmas = <&sdma 27 &sdma 28>;
134 dmas = <&sdma 29 &sdma 30>;
143 dmas = <&sdma 35 &sdma 36 &sdma 37 &sdma 38
144 &sdma 39 &sdma 40 &sdma 41 &sdma 42>;
154 dmas = <&sdma 43 &sdma 44 &sdma 45 &sdma 46>;
170 dmas = <&sdma 13>;
179 dmas = <&sdma 49 &sdma 50>;
[all …]
Domap3.dtsi184 dmas = <&sdma 9 &sdma 10>;
210 dmas = <&sdma 65 &sdma 66>;
292 sdma: dma-controller@0 { label
293 compatible = "ti,omap3430-sdma", "ti,omap-sdma";
376 dmas = <&sdma 49 &sdma 50>;
386 dmas = <&sdma 51 &sdma 52>;
396 dmas = <&sdma 53 &sdma 54>;
406 dmas = <&sdma 27 &sdma 28>;
417 dmas = <&sdma 29 &sdma 30>;
428 dmas = <&sdma 25 &sdma 26>;
[all …]
/kernel/linux/linux-6.6/arch/arm/boot/dts/ti/omap/
Domap2.dtsi62 dmas = <&sdma 9 &sdma 10>;
100 sdma: dma-controller@0 { label
101 compatible = "ti,omap2420-sdma", "ti,omap-sdma";
136 dmas = <&sdma 35 &sdma 36 &sdma 37 &sdma 38
137 &sdma 39 &sdma 40 &sdma 41 &sdma 42>;
147 dmas = <&sdma 43 &sdma 44 &sdma 45 &sdma 46>;
163 dmas = <&sdma 13>;
172 dmas = <&sdma 49 &sdma 50>;
182 dmas = <&sdma 51 &sdma 52>;
192 dmas = <&sdma 53 &sdma 54>;
Domap2430.dtsi186 dmas = <&sdma 31>,
187 <&sdma 32>;
202 dmas = <&sdma 33>,
203 <&sdma 34>;
218 dmas = <&sdma 17>,
219 <&sdma 18>;
234 dmas = <&sdma 19>,
235 <&sdma 20>;
250 dmas = <&sdma 21>,
251 <&sdma 22>;
[all …]
/kernel/linux/linux-6.6/drivers/infiniband/hw/hfi1/
Dvnic_sdma.c7 * This file contains HFI1 support for VNIC SDMA functionality
10 #include "sdma.h"
21 * @txreq: sdma transmit request
22 * @sdma: vnic sdma pointer
30 struct hfi1_vnic_sdma *sdma; member
42 struct hfi1_vnic_sdma *vnic_sdma = tx->sdma; in vnic_sdma_complete()
130 struct hfi1_vnic_sdma *vnic_sdma = &vinfo->sdma[q_idx]; in hfi1_vnic_send_dma()
147 tx->sdma = vnic_sdma; in hfi1_vnic_send_dma()
157 /* When -ECOMM, sdma callback will be called with ABORT status */ in hfi1_vnic_send_dma()
179 * hfi1_vnic_sdma_sleep - vnic sdma sleep function
[all …]

12345678910>>...22