Home
last modified time | relevance | path

Searched refs:vchan (Results 1 – 17 of 17) sorted by relevance

/drivers/dma/
Dsun6i-dma.c138 struct sun6i_vchan *vchan; member
305 static inline void sun6i_dma_dump_lli(struct sun6i_vchan *vchan, in sun6i_dma_dump_lli() argument
310 dev_dbg(chan2dev(&vchan->vc.chan), in sun6i_dma_dump_lli()
345 static int sun6i_dma_start_desc(struct sun6i_vchan *vchan) in sun6i_dma_start_desc() argument
347 struct sun6i_dma_dev *sdev = to_sun6i_dma_dev(vchan->vc.chan.device); in sun6i_dma_start_desc()
348 struct virt_dma_desc *desc = vchan_next_desc(&vchan->vc); in sun6i_dma_start_desc()
349 struct sun6i_pchan *pchan = vchan->phy; in sun6i_dma_start_desc()
366 sun6i_dma_dump_lli(vchan, pchan->desc->v_lli); in sun6i_dma_start_desc()
371 vchan->irq_type = vchan->cyclic ? DMA_IRQ_PKG : DMA_IRQ_QUEUE; in sun6i_dma_start_desc()
376 irq_val |= vchan->irq_type << (irq_offset * DMA_IRQ_CHAN_WIDTH); in sun6i_dma_start_desc()
[all …]
Dsun4i-dma.c133 struct sun4i_dma_vchan *vchan; member
216 struct sun4i_dma_vchan *vchan = to_sun4i_dma_vchan(chan); in sun4i_dma_free_chan_resources() local
218 vchan_free_chan_resources(&vchan->vc); in sun4i_dma_free_chan_resources()
222 struct sun4i_dma_vchan *vchan) in find_and_use_pchan() argument
232 if (vchan->is_dedicated) { in find_and_use_pchan()
243 pchan->vchan = vchan; in find_and_use_pchan()
260 pchan->vchan = NULL; in release_pchan()
324 struct sun4i_dma_vchan *vchan) in __execute_vchan_pending() argument
332 lockdep_assert_held(&vchan->vc.lock); in __execute_vchan_pending()
335 pchan = find_and_use_pchan(priv, vchan); in __execute_vchan_pending()
[all …]
Ddma-jz4740.c114 struct virt_dma_chan vchan; member
135 return container_of(chan->vchan.chan.device, struct jz4740_dma_dev, in jz4740_dma_chan_get_dev()
141 return container_of(c, struct jz4740_dmaengine_chan, vchan.chan); in to_jz4740_dma_chan()
275 spin_lock_irqsave(&chan->vchan.lock, flags); in jz4740_dma_terminate_all()
279 vchan_get_all_descriptors(&chan->vchan, &head); in jz4740_dma_terminate_all()
280 spin_unlock_irqrestore(&chan->vchan.lock, flags); in jz4740_dma_terminate_all()
282 vchan_dma_desc_free_list(&chan->vchan, &head); in jz4740_dma_terminate_all()
298 vdesc = vchan_next_desc(&chan->vchan); in jz4740_dma_start_transfer()
338 spin_lock(&chan->vchan.lock); in jz4740_dma_chan_irq()
351 spin_unlock(&chan->vchan.lock); in jz4740_dma_chan_irq()
[all …]
Dfsl-edma.c152 struct virt_dma_chan vchan; member
227 return container_of(chan, struct fsl_edma_chan, vchan.chan); in to_fsl_edma_chan()
238 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_enable_request()
247 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_disable_request()
256 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_chan_mux()
261 ch_off = fsl_chan->vchan.chan.chan_id % chans_per_mux; in fsl_edma_chan_mux()
305 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_terminate_all()
309 vchan_get_all_descriptors(&fsl_chan->vchan, &head); in fsl_edma_terminate_all()
310 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_terminate_all()
311 vchan_dma_desc_free_list(&fsl_chan->vchan, &head); in fsl_edma_terminate_all()
[all …]
Didma64.c111 struct idma64 *idma64 = to_idma64(idma64c->vchan.chan.device); in idma64_stop_transfer()
118 struct idma64 *idma64 = to_idma64(idma64c->vchan.chan.device); in idma64_start_transfer()
122 vdesc = vchan_next_desc(&idma64c->vchan); in idma64_start_transfer()
147 spin_lock_irqsave(&idma64c->vchan.lock, flags); in idma64_chan_irq()
164 spin_unlock_irqrestore(&idma64c->vchan.lock, flags); in idma64_chan_irq()
331 return vchan_tx_prep(&idma64c->vchan, &desc->vdesc, flags); in idma64_prep_slave_sg()
339 spin_lock_irqsave(&idma64c->vchan.lock, flags); in idma64_issue_pending()
340 if (vchan_issue_pending(&idma64c->vchan) && !idma64c->desc) in idma64_issue_pending()
342 spin_unlock_irqrestore(&idma64c->vchan.lock, flags); in idma64_issue_pending()
383 spin_lock_irqsave(&idma64c->vchan.lock, flags); in idma64_tx_status()
[all …]
Ddma-jz4780.c126 struct virt_dma_chan vchan; member
156 return container_of(chan, struct jz4780_dma_chan, vchan.chan); in to_jz4780_dma_chan()
168 return container_of(jzchan->vchan.chan.device, struct jz4780_dma_dev, in jz4780_dma_chan_parent()
348 return vchan_tx_prep(&jzchan->vchan, &desc->vdesc, flags); in jz4780_dma_prep_slave_sg()
400 return vchan_tx_prep(&jzchan->vchan, &desc->vdesc, flags); in jz4780_dma_prep_dma_cyclic()
427 return vchan_tx_prep(&jzchan->vchan, &desc->vdesc, flags); in jz4780_dma_prep_dma_memcpy()
438 vdesc = vchan_next_desc(&jzchan->vchan); in jz4780_dma_begin()
494 spin_lock_irqsave(&jzchan->vchan.lock, flags); in jz4780_dma_issue_pending()
496 if (vchan_issue_pending(&jzchan->vchan) && !jzchan->desc) in jz4780_dma_issue_pending()
499 spin_unlock_irqrestore(&jzchan->vchan.lock, flags); in jz4780_dma_issue_pending()
[all …]
Ddma-axi-dmac.c96 struct virt_dma_chan vchan; member
128 return container_of(chan->vchan.chan.device, struct axi_dmac, in chan_to_axi_dmac()
134 return container_of(c, struct axi_dmac_chan, vchan.chan); in to_axi_dmac_chan()
195 vdesc = vchan_next_desc(&chan->vchan); in axi_dmac_start_transfer()
278 spin_lock(&dmac->chan.vchan.lock); in axi_dmac_interrupt_handler()
289 spin_unlock(&dmac->chan.vchan.lock); in axi_dmac_interrupt_handler()
301 spin_lock_irqsave(&chan->vchan.lock, flags); in axi_dmac_terminate_all()
304 vchan_get_all_descriptors(&chan->vchan, &head); in axi_dmac_terminate_all()
306 spin_unlock_irqrestore(&chan->vchan.lock, flags); in axi_dmac_terminate_all()
308 vchan_dma_desc_free_list(&chan->vchan, &head); in axi_dmac_terminate_all()
[all …]
Dstm32-dma.c164 struct virt_dma_chan vchan; member
186 return container_of(chan->vchan.chan.device, struct stm32_dma_device, in stm32_dma_get_dev()
192 return container_of(c, struct stm32_dma_chan, vchan.chan); in to_stm32_dma_chan()
202 return &chan->vchan.chan.dev->device; in chan2dev()
392 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma_terminate_all()
399 vchan_get_all_descriptors(&chan->vchan, &head); in stm32_dma_terminate_all()
400 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_dma_terminate_all()
401 vchan_dma_desc_free_list(&chan->vchan, &head); in stm32_dma_terminate_all()
438 vdesc = vchan_next_desc(&chan->vchan); in stm32_dma_start_transfer()
532 spin_lock(&chan->vchan.lock); in stm32_dma_chan_irq()
[all …]
Dedma.c213 struct virt_dma_chan vchan; member
738 return container_of(c, struct edma_chan, vchan.chan); in to_edma_chan()
757 struct device *dev = echan->vchan.chan.device->dev; in edma_execute()
762 vdesc = vchan_next_desc(&echan->vchan); in edma_execute()
851 spin_lock_irqsave(&echan->vchan.lock, flags); in edma_terminate_all()
871 vchan_get_all_descriptors(&echan->vchan, &head); in edma_terminate_all()
872 spin_unlock_irqrestore(&echan->vchan.lock, flags); in edma_terminate_all()
873 vchan_dma_desc_free_list(&echan->vchan, &head); in edma_terminate_all()
882 vchan_synchronize(&echan->vchan); in edma_synchronize()
1135 return vchan_tx_prep(&echan->vchan, &edesc->vdesc, tx_flags); in edma_prep_slave_sg()
[all …]
Dpxa_dma.c103 struct pxad_chan *vchan; member
155 dev_vdbg(&phy->vchan->vc.chan.dev->device, \
163 dev_vdbg(&phy->vchan->vc.chan.dev->device, \
170 dev_vdbg(&phy->vchan->vc.chan.dev->device, \
448 if (!phy->vchan) { in lookup_phy()
449 phy->vchan = pchan; in lookup_phy()
489 chan->phy->vchan = NULL; in pxad_free_phy()
519 if (!phy->vchan) in phy_enable()
522 dev_dbg(&phy->vchan->vc.chan.dev->device, in phy_enable()
526 pdev = to_pxad_dev(phy->vchan->vc.chan.device); in phy_enable()
[all …]
Dmmp_pdma.c122 struct mmp_pdma_chan *vchan; member
154 if (!phy->vchan) in enable_chan()
157 reg = DRCMR(phy->vchan->drcmr); in enable_chan()
161 if (phy->vchan->byte_align) in enable_chan()
194 if ((dcsr & DCSR_BUSERR) && (phy->vchan)) in clear_chan_irq()
195 dev_warn(phy->vchan->dev, "DCSR_BUSERR\n"); in clear_chan_irq()
207 tasklet_schedule(&phy->vchan->tasklet); in mmp_pdma_chan_handler()
259 if (!phy->vchan) { in lookup_phy()
260 phy->vchan = pchan; in lookup_phy()
286 pchan->phy->vchan = NULL; in mmp_pdma_free_phy()
Didma64.h131 struct virt_dma_chan vchan; member
146 return container_of(chan, struct idma64_chan, vchan.chan); in to_idma64_chan()
Dzx296702_dma.c115 struct zx_dma_chan *vchan; member
237 p->vchan = NULL; in zx_dma_task()
248 if (!p->vchan) { in zx_dma_task()
253 p->vchan = c; in zx_dma_task()
264 c = p->vchan; in zx_dma_task()
289 c = p->vchan; in zx_dma_int_handler()
684 p->vchan = NULL; in zx_dma_terminate_all()
Dk3dma.c99 struct k3_dma_chan *vchan; member
222 c = p->vchan; in k3_dma_int_handler()
304 p->vchan = NULL; in k3_dma_tasklet()
315 if (p->vchan == NULL && !list_empty(&d->chan_pending)) { in k3_dma_tasklet()
322 p->vchan = c; in k3_dma_tasklet()
332 c = p->vchan; in k3_dma_tasklet()
721 p->vchan = NULL; in k3_dma_terminate_all()
Dsa11x0-dma.c109 struct sa11x0_dma_chan *vchan; member
279 c = p->vchan; in sa11x0_dma_irq()
350 p->vchan = NULL; in sa11x0_dma_tasklet()
360 if (p->vchan == NULL && !list_empty(&d->chan_pending)) { in sa11x0_dma_tasklet()
368 p->vchan = c; in sa11x0_dma_tasklet()
378 c = p->vchan; in sa11x0_dma_tasklet()
791 p->vchan = NULL; in sa11x0_dma_device_terminate_all()
/drivers/dma/hsu/
Dhsu.c116 vdesc = vchan_next_desc(&hsuc->vchan); in hsu_dma_start_transfer()
163 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_get_status()
165 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_get_status()
217 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_do_irq()
230 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_do_irq()
287 return vchan_tx_prep(&hsuc->vchan, &desc->vdesc, flags); in hsu_dma_prep_slave_sg()
295 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_issue_pending()
296 if (vchan_issue_pending(&hsuc->vchan) && !hsuc->desc) in hsu_dma_issue_pending()
298 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_issue_pending()
331 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_tx_status()
[all …]
Dhsu.h86 struct virt_dma_chan vchan; member
99 return container_of(chan, struct hsu_dma_chan, vchan.chan); in to_hsu_dma_chan()