Home
last modified time | relevance | path

Searched refs:dchan (Results 1 – 13 of 13) sorted by relevance

/drivers/rapidio/devices/
Dtsi721_dma.c52 return container_of(chan, struct tsi721_bdma_chan, dchan); in to_tsi721_chan()
77 struct device *dev = bdma_chan->dchan.device->dev; in tsi721_bdma_ch_init()
83 struct tsi721_device *priv = to_tsi721(bdma_chan->dchan.device); in tsi721_bdma_ch_init()
218 struct tsi721_device *priv = to_tsi721(bdma_chan->dchan.device); in tsi721_bdma_ch_free()
242 dma_free_coherent(bdma_chan->dchan.device->dev, in tsi721_bdma_ch_free()
248 dma_free_coherent(bdma_chan->dchan.device->dev, in tsi721_bdma_ch_free()
313 dev_err(bdma_chan->dchan.device->dev, in tsi721_start_dma()
319 dev_err(bdma_chan->dchan.device->dev, in tsi721_start_dma()
324 dev_dbg(bdma_chan->dchan.device->dev, in tsi721_start_dma()
416 struct dma_chan *dchan = desc->txd.chan; in tsi721_submit_sg() local
[all …]
Dtsi721.h674 struct dma_chan dchan; member
/drivers/dma/
Dnbpfaxi.c571 static void nbpf_issue_pending(struct dma_chan *dchan) in nbpf_issue_pending() argument
573 struct nbpf_channel *chan = nbpf_to_chan(dchan); in nbpf_issue_pending()
576 dev_dbg(dchan->device->dev, "Entry %s()\n", __func__); in nbpf_issue_pending()
595 static enum dma_status nbpf_tx_status(struct dma_chan *dchan, in nbpf_tx_status() argument
598 struct nbpf_channel *chan = nbpf_to_chan(dchan); in nbpf_tx_status()
599 enum dma_status status = dma_cookie_status(dchan, cookie, state); in nbpf_tx_status()
610 dev_dbg(dchan->device->dev, "%s(): residue %u\n", __func__, in nbpf_tx_status()
661 struct dma_chan *dchan = &chan->dma_chan; in nbpf_desc_page_alloc() local
669 struct device *dev = dchan->device->dev; in nbpf_desc_page_alloc()
682 ldesc->hwdesc_dma_addr = dma_map_single(dchan->device->dev, in nbpf_desc_page_alloc()
[all …]
Dmmp_pdma.c138 #define to_mmp_pdma_chan(dchan) \ argument
139 container_of(dchan, struct mmp_pdma_chan, chan)
391 static int mmp_pdma_alloc_chan_resources(struct dma_chan *dchan) in mmp_pdma_alloc_chan_resources() argument
393 struct mmp_pdma_chan *chan = to_mmp_pdma_chan(dchan); in mmp_pdma_alloc_chan_resources()
398 chan->desc_pool = dma_pool_create(dev_name(&dchan->dev->device), in mmp_pdma_alloc_chan_resources()
425 static void mmp_pdma_free_chan_resources(struct dma_chan *dchan) in mmp_pdma_free_chan_resources() argument
427 struct mmp_pdma_chan *chan = to_mmp_pdma_chan(dchan); in mmp_pdma_free_chan_resources()
444 mmp_pdma_prep_memcpy(struct dma_chan *dchan, in mmp_pdma_prep_memcpy() argument
452 if (!dchan) in mmp_pdma_prep_memcpy()
458 chan = to_mmp_pdma_chan(dchan); in mmp_pdma_prep_memcpy()
[all …]
Dfsldma.c370 int fsl_dma_external_start(struct dma_chan *dchan, int enable) in fsl_dma_external_start() argument
374 if (!dchan) in fsl_dma_external_start()
377 chan = to_fsl_chan(dchan); in fsl_dma_external_start()
694 static int fsl_dma_alloc_chan_resources(struct dma_chan *dchan) in fsl_dma_alloc_chan_resources() argument
696 struct fsldma_chan *chan = to_fsl_chan(dchan); in fsl_dma_alloc_chan_resources()
747 static void fsl_dma_free_chan_resources(struct dma_chan *dchan) in fsl_dma_free_chan_resources() argument
749 struct fsldma_chan *chan = to_fsl_chan(dchan); in fsl_dma_free_chan_resources()
764 fsl_dma_prep_memcpy(struct dma_chan *dchan, in fsl_dma_prep_memcpy() argument
772 if (!dchan) in fsl_dma_prep_memcpy()
778 chan = to_fsl_chan(dchan); in fsl_dma_prep_memcpy()
[all …]
Dpxa_dma.c138 #define to_pxad_chan(dchan) \ argument
139 container_of(dchan, struct pxad_chan, vc.chan)
735 static int pxad_alloc_chan_resources(struct dma_chan *dchan) in pxad_alloc_chan_resources() argument
737 struct pxad_chan *chan = to_pxad_chan(dchan); in pxad_alloc_chan_resources()
743 chan->desc_pool = dma_pool_create(dma_chan_name(dchan), in pxad_alloc_chan_resources()
758 static void pxad_free_chan_resources(struct dma_chan *dchan) in pxad_free_chan_resources() argument
760 struct pxad_chan *chan = to_pxad_chan(dchan); in pxad_free_chan_resources()
874 static void pxad_issue_pending(struct dma_chan *dchan) in pxad_issue_pending() argument
876 struct pxad_chan *chan = to_pxad_chan(dchan); in pxad_issue_pending()
965 pxad_prep_memcpy(struct dma_chan *dchan, in pxad_prep_memcpy() argument
[all …]
Dxgene-dma.c202 #define to_dma_chan(dchan) \ argument
203 container_of(dchan, struct xgene_dma_chan, dma_chan)
833 static int xgene_dma_alloc_chan_resources(struct dma_chan *dchan) in xgene_dma_alloc_chan_resources() argument
835 struct xgene_dma_chan *chan = to_dma_chan(dchan); in xgene_dma_alloc_chan_resources()
870 static void xgene_dma_free_chan_resources(struct dma_chan *dchan) in xgene_dma_free_chan_resources() argument
872 struct xgene_dma_chan *chan = to_dma_chan(dchan); in xgene_dma_free_chan_resources()
897 struct dma_chan *dchan, struct scatterlist *dst_sg, in xgene_dma_prep_sg() argument
907 if (unlikely(!dchan)) in xgene_dma_prep_sg()
916 chan = to_dma_chan(dchan); in xgene_dma_prep_sg()
1005 struct dma_chan *dchan, dma_addr_t dst, dma_addr_t *src, in xgene_dma_prep_xor() argument
[all …]
Dat_hdmac_regs.h280 static inline struct at_dma_chan *to_at_dma_chan(struct dma_chan *dchan) in to_at_dma_chan() argument
282 return container_of(dchan, struct at_dma_chan, chan_common); in to_at_dma_chan()
Dmmp_tdma.c140 #define to_mmp_tdma_chan(dchan) container_of(dchan, struct mmp_tdma_chan, chan) argument
Dat_xdmac.c272 static inline struct at_xdmac_chan *to_at_xdmac_chan(struct dma_chan *dchan) in to_at_xdmac_chan() argument
274 return container_of(dchan, struct at_xdmac_chan, chan); in to_at_xdmac_chan()
Dste_dma40.c2236 d40_prep_sg(struct dma_chan *dchan, struct scatterlist *sg_src, in d40_prep_sg() argument
2240 struct d40_chan *chan = container_of(dchan, struct d40_chan, chan); in d40_prep_sg()
/drivers/dma/xilinx/
Dxilinx_vdma.c426 static void xilinx_vdma_free_chan_resources(struct dma_chan *dchan) in xilinx_vdma_free_chan_resources() argument
428 struct xilinx_vdma_chan *chan = to_xilinx_chan(dchan); in xilinx_vdma_free_chan_resources()
489 static int xilinx_vdma_alloc_chan_resources(struct dma_chan *dchan) in xilinx_vdma_alloc_chan_resources() argument
491 struct xilinx_vdma_chan *chan = to_xilinx_chan(dchan); in xilinx_vdma_alloc_chan_resources()
512 dma_cookie_init(dchan); in xilinx_vdma_alloc_chan_resources()
524 static enum dma_status xilinx_vdma_tx_status(struct dma_chan *dchan, in xilinx_vdma_tx_status() argument
528 return dma_cookie_status(dchan, cookie, txstate); in xilinx_vdma_tx_status()
730 static void xilinx_vdma_issue_pending(struct dma_chan *dchan) in xilinx_vdma_issue_pending() argument
732 struct xilinx_vdma_chan *chan = to_xilinx_chan(dchan); in xilinx_vdma_issue_pending()
930 xilinx_vdma_dma_prep_interleaved(struct dma_chan *dchan, in xilinx_vdma_dma_prep_interleaved() argument
[all …]
/drivers/rapidio/
Drio.c1552 void rio_release_dma(struct dma_chan *dchan) in rio_release_dma() argument
1554 dma_release_channel(dchan); in rio_release_dma()
1572 struct dma_async_tx_descriptor *rio_dma_prep_xfer(struct dma_chan *dchan, in rio_dma_prep_xfer() argument
1578 if (dchan->device->device_prep_slave_sg == NULL) { in rio_dma_prep_xfer()
1588 return dmaengine_prep_rio_sg(dchan, data->sg, data->sg_len, in rio_dma_prep_xfer()
1608 struct dma_chan *dchan, struct rio_dma_data *data, in rio_dma_prep_slave_sg() argument
1611 return rio_dma_prep_xfer(dchan, rdev->destid, data, direction, flags); in rio_dma_prep_slave_sg()