Home
last modified time | relevance | path

Searched refs:vchan (Results 1 – 16 of 16) sorted by relevance

/drivers/dma/
Dsun6i-dma.c138 struct sun6i_vchan *vchan; member
318 static inline void sun6i_dma_dump_lli(struct sun6i_vchan *vchan, in sun6i_dma_dump_lli() argument
323 dev_dbg(chan2dev(&vchan->vc.chan), in sun6i_dma_dump_lli()
358 static int sun6i_dma_start_desc(struct sun6i_vchan *vchan) in sun6i_dma_start_desc() argument
360 struct sun6i_dma_dev *sdev = to_sun6i_dma_dev(vchan->vc.chan.device); in sun6i_dma_start_desc()
361 struct virt_dma_desc *desc = vchan_next_desc(&vchan->vc); in sun6i_dma_start_desc()
362 struct sun6i_pchan *pchan = vchan->phy; in sun6i_dma_start_desc()
379 sun6i_dma_dump_lli(vchan, pchan->desc->v_lli); in sun6i_dma_start_desc()
401 struct sun6i_vchan *vchan; in sun6i_dma_tasklet() local
406 list_for_each_entry(vchan, &sdev->slave.channels, vc.chan.device_node) { in sun6i_dma_tasklet()
[all …]
Dsun4i-dma.c133 struct sun4i_dma_vchan *vchan; member
216 struct sun4i_dma_vchan *vchan = to_sun4i_dma_vchan(chan); in sun4i_dma_free_chan_resources() local
218 vchan_free_chan_resources(&vchan->vc); in sun4i_dma_free_chan_resources()
222 struct sun4i_dma_vchan *vchan) in find_and_use_pchan() argument
232 if (vchan->is_dedicated) { in find_and_use_pchan()
243 pchan->vchan = vchan; in find_and_use_pchan()
260 pchan->vchan = NULL; in release_pchan()
324 struct sun4i_dma_vchan *vchan) in __execute_vchan_pending() argument
332 lockdep_assert_held(&vchan->vc.lock); in __execute_vchan_pending()
335 pchan = find_and_use_pchan(priv, vchan); in __execute_vchan_pending()
[all …]
Ddma-jz4740.c116 struct virt_dma_chan vchan; member
137 return container_of(chan->vchan.chan.device, struct jz4740_dma_dev, in jz4740_dma_chan_get_dev()
143 return container_of(c, struct jz4740_dmaengine_chan, vchan.chan); in to_jz4740_dma_chan()
277 spin_lock_irqsave(&chan->vchan.lock, flags); in jz4740_dma_terminate_all()
281 vchan_get_all_descriptors(&chan->vchan, &head); in jz4740_dma_terminate_all()
282 spin_unlock_irqrestore(&chan->vchan.lock, flags); in jz4740_dma_terminate_all()
284 vchan_dma_desc_free_list(&chan->vchan, &head); in jz4740_dma_terminate_all()
300 vdesc = vchan_next_desc(&chan->vchan); in jz4740_dma_start_transfer()
340 spin_lock(&chan->vchan.lock); in jz4740_dma_chan_irq()
353 spin_unlock(&chan->vchan.lock); in jz4740_dma_chan_irq()
[all …]
Didma64.c111 struct idma64 *idma64 = to_idma64(idma64c->vchan.chan.device); in idma64_stop_transfer()
118 struct idma64 *idma64 = to_idma64(idma64c->vchan.chan.device); in idma64_start_transfer()
122 vdesc = vchan_next_desc(&idma64c->vchan); in idma64_start_transfer()
147 spin_lock_irqsave(&idma64c->vchan.lock, flags); in idma64_chan_irq()
164 spin_unlock_irqrestore(&idma64c->vchan.lock, flags); in idma64_chan_irq()
336 return vchan_tx_prep(&idma64c->vchan, &desc->vdesc, flags); in idma64_prep_slave_sg()
344 spin_lock_irqsave(&idma64c->vchan.lock, flags); in idma64_issue_pending()
345 if (vchan_issue_pending(&idma64c->vchan) && !idma64c->desc) in idma64_issue_pending()
347 spin_unlock_irqrestore(&idma64c->vchan.lock, flags); in idma64_issue_pending()
388 spin_lock_irqsave(&idma64c->vchan.lock, flags); in idma64_tx_status()
[all …]
Ddma-jz4780.c126 struct virt_dma_chan vchan; member
156 return container_of(chan, struct jz4780_dma_chan, vchan.chan); in to_jz4780_dma_chan()
168 return container_of(jzchan->vchan.chan.device, struct jz4780_dma_dev, in jz4780_dma_chan_parent()
346 return vchan_tx_prep(&jzchan->vchan, &desc->vdesc, flags); in jz4780_dma_prep_slave_sg()
396 return vchan_tx_prep(&jzchan->vchan, &desc->vdesc, flags); in jz4780_dma_prep_dma_cyclic()
423 return vchan_tx_prep(&jzchan->vchan, &desc->vdesc, flags); in jz4780_dma_prep_dma_memcpy()
434 vdesc = vchan_next_desc(&jzchan->vchan); in jz4780_dma_begin()
490 spin_lock_irqsave(&jzchan->vchan.lock, flags); in jz4780_dma_issue_pending()
492 if (vchan_issue_pending(&jzchan->vchan) && !jzchan->desc) in jz4780_dma_issue_pending()
495 spin_unlock_irqrestore(&jzchan->vchan.lock, flags); in jz4780_dma_issue_pending()
[all …]
Dfsl-edma.c148 struct virt_dma_chan vchan; member
220 return container_of(chan, struct fsl_edma_chan, vchan.chan); in to_fsl_edma_chan()
231 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_enable_request()
240 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_disable_request()
249 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_chan_mux()
254 ch_off = fsl_chan->vchan.chan.chan_id % chans_per_mux; in fsl_edma_chan_mux()
298 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_terminate_all()
301 vchan_get_all_descriptors(&fsl_chan->vchan, &head); in fsl_edma_terminate_all()
302 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_terminate_all()
303 vchan_dma_desc_free_list(&fsl_chan->vchan, &head); in fsl_edma_terminate_all()
[all …]
Ddma-axi-dmac.c96 struct virt_dma_chan vchan; member
128 return container_of(chan->vchan.chan.device, struct axi_dmac, in chan_to_axi_dmac()
134 return container_of(c, struct axi_dmac_chan, vchan.chan); in to_axi_dmac_chan()
195 vdesc = vchan_next_desc(&chan->vchan); in axi_dmac_start_transfer()
275 spin_lock(&dmac->chan.vchan.lock); in axi_dmac_interrupt_handler()
286 spin_unlock(&dmac->chan.vchan.lock); in axi_dmac_interrupt_handler()
298 spin_lock_irqsave(&chan->vchan.lock, flags); in axi_dmac_terminate_all()
301 vchan_get_all_descriptors(&chan->vchan, &head); in axi_dmac_terminate_all()
303 spin_unlock_irqrestore(&chan->vchan.lock, flags); in axi_dmac_terminate_all()
305 vchan_dma_desc_free_list(&chan->vchan, &head); in axi_dmac_terminate_all()
[all …]
Dedma.c210 struct virt_dma_chan vchan; member
729 return container_of(c, struct edma_chan, vchan.chan); in to_edma_chan()
748 struct device *dev = echan->vchan.chan.device->dev; in edma_execute()
753 vdesc = vchan_next_desc(&echan->vchan); in edma_execute()
842 spin_lock_irqsave(&echan->vchan.lock, flags); in edma_terminate_all()
862 vchan_get_all_descriptors(&echan->vchan, &head); in edma_terminate_all()
863 spin_unlock_irqrestore(&echan->vchan.lock, flags); in edma_terminate_all()
864 vchan_dma_desc_free_list(&echan->vchan, &head); in edma_terminate_all()
1118 return vchan_tx_prep(&echan->vchan, &edesc->vdesc, tx_flags); in edma_prep_slave_sg()
1231 return vchan_tx_prep(&echan->vchan, &edesc->vdesc, tx_flags); in edma_prep_dma_memcpy()
[all …]
Dpxa_dma.c102 struct pxad_chan *vchan; member
151 dev_vdbg(&phy->vchan->vc.chan.dev->device, \
159 dev_vdbg(&phy->vchan->vc.chan.dev->device, \
166 dev_vdbg(&phy->vchan->vc.chan.dev->device, \
445 if (!phy->vchan) { in lookup_phy()
446 phy->vchan = pchan; in lookup_phy()
486 chan->phy->vchan = NULL; in pxad_free_phy()
516 if (!phy->vchan) in phy_enable()
519 dev_dbg(&phy->vchan->vc.chan.dev->device, in phy_enable()
523 pdev = to_pxad_dev(phy->vchan->vc.chan.device); in phy_enable()
[all …]
Dmmp_pdma.c122 struct mmp_pdma_chan *vchan; member
154 if (!phy->vchan) in enable_chan()
157 reg = DRCMR(phy->vchan->drcmr); in enable_chan()
161 if (phy->vchan->byte_align) in enable_chan()
194 if ((dcsr & DCSR_BUSERR) && (phy->vchan)) in clear_chan_irq()
195 dev_warn(phy->vchan->dev, "DCSR_BUSERR\n"); in clear_chan_irq()
207 tasklet_schedule(&phy->vchan->tasklet); in mmp_pdma_chan_handler()
259 if (!phy->vchan) { in lookup_phy()
260 phy->vchan = pchan; in lookup_phy()
286 pchan->phy->vchan = NULL; in mmp_pdma_free_phy()
Dk3dma.c89 struct k3_dma_chan *vchan; member
204 c = p->vchan; in k3_dma_int_handler()
277 p->vchan = NULL; in k3_dma_tasklet()
288 if (p->vchan == NULL && !list_empty(&d->chan_pending)) { in k3_dma_tasklet()
295 p->vchan = c; in k3_dma_tasklet()
305 c = p->vchan; in k3_dma_tasklet()
589 p->vchan = NULL; in k3_dma_terminate_all()
Didma64.h130 struct virt_dma_chan vchan; member
145 return container_of(chan, struct idma64_chan, vchan.chan); in to_idma64_chan()
Dzx296702_dma.c115 struct zx_dma_chan *vchan; member
237 p->vchan = NULL; in zx_dma_task()
248 if (!p->vchan) { in zx_dma_task()
253 p->vchan = c; in zx_dma_task()
264 c = p->vchan; in zx_dma_task()
289 c = p->vchan; in zx_dma_int_handler()
684 p->vchan = NULL; in zx_dma_terminate_all()
Dsa11x0-dma.c109 struct sa11x0_dma_chan *vchan; member
279 c = p->vchan; in sa11x0_dma_irq()
350 p->vchan = NULL; in sa11x0_dma_tasklet()
360 if (p->vchan == NULL && !list_empty(&d->chan_pending)) { in sa11x0_dma_tasklet()
368 p->vchan = c; in sa11x0_dma_tasklet()
378 c = p->vchan; in sa11x0_dma_tasklet()
791 p->vchan = NULL; in sa11x0_dma_device_terminate_all()
/drivers/dma/hsu/
Dhsu.c116 vdesc = vchan_next_desc(&hsuc->vchan); in hsu_dma_start_transfer()
134 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_chan_get_sr()
136 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_chan_get_sr()
170 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_irq()
183 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_irq()
238 return vchan_tx_prep(&hsuc->vchan, &desc->vdesc, flags); in hsu_dma_prep_slave_sg()
246 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_issue_pending()
247 if (vchan_issue_pending(&hsuc->vchan) && !hsuc->desc) in hsu_dma_issue_pending()
249 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_issue_pending()
290 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_tx_status()
[all …]
Dhsu.h81 struct virt_dma_chan vchan; member
94 return container_of(chan, struct hsu_dma_chan, vchan.chan); in to_hsu_dma_chan()