Searched refs:tx_submit (Results 1 – 25 of 66) sorted by relevance
123
113 intr_tx->tx_submit(intr_tx); in async_tx_channel_switch()121 tx->tx_submit(tx); in async_tx_channel_switch()196 tx->tx_submit(tx); in async_tx_submit()201 tx->tx_submit(tx); in async_tx_submit()
63 vd->tx.tx_submit = vchan_tx_submit; in vchan_tx_prep()
392 desc->txd.tx_submit = plx_dma_tx_submit; in plx_dma_alloc_desc()
827 sw_desc->async_tx.tx_submit = mv_xor_v2_tx_submit; in mv_xor_v2_probe()
339 td_desc->txd.tx_submit = td_tx_submit; in td_alloc_init_desc()
387 tdmac->desc.tx_submit = mmp_tdma_tx_submit; in mmp_tdma_alloc_chan_resources()
672 desc->async_tx.tx_submit = msgdma_tx_submit; in msgdma_alloc_chan_resources()
422 mxs_chan->desc.tx_submit = mxs_dma_tx_submit; in mxs_dma_alloc_chan_resources()
254 desc->async_tx.tx_submit = fsl_re_tx_submit; in fsl_re_init_desc()
250 cookie = rxdesc->tx_submit(rxdesc); in at91_usart_spi_dma_transfer()254 cookie = txdesc->tx_submit(txdesc); in at91_usart_spi_dma_transfer()
784 cookie = rxdesc->tx_submit(rxdesc); in atmel_spi_next_xfer_dma_submit()787 cookie = txdesc->tx_submit(txdesc); in atmel_spi_next_xfer_dma_submit()
1117 desc_rx->tx_submit(desc_rx); in pch_spi_handle_dma()1118 desc_tx->tx_submit(desc_tx); in pch_spi_handle_dma()
364 cookie = tx->tx_submit(tx); in ioat_dma_self_test()879 cookie = tx->tx_submit(tx); in ioat_xor_val_self_test()947 cookie = tx->tx_submit(tx); in ioat_xor_val_self_test()1000 cookie = tx->tx_submit(tx); in ioat_xor_val_self_test()
269 desc->txd.tx_submit = idxd_dma_tx_submit; in idxd_register_dma_channel()
172 cppi41_channel->cookie = dma_desc->tx_submit(dma_desc); in cppi41_trans_done()472 cppi41_channel->cookie = dma_desc->tx_submit(dma_desc); in cppi41_configure_channel()
121 ux500_channel->cookie = dma_desc->tx_submit(dma_desc); in ux500_configure_channel()
2018 desc->async_tx.tx_submit = xilinx_dma_tx_submit; in xilinx_vdma_dma_prep_interleaved()2093 desc->async_tx.tx_submit = xilinx_dma_tx_submit; in xilinx_cdma_prep_memcpy()2156 desc->async_tx.tx_submit = xilinx_dma_tx_submit; in xilinx_dma_prep_slave_sg()2263 desc->async_tx.tx_submit = xilinx_dma_tx_submit; in xilinx_dma_prep_dma_cyclic()2363 desc->async_tx.tx_submit = xilinx_dma_tx_submit; in xilinx_mcdma_prep_slave_sg()
603 dma_cookie_t (*tx_submit)(struct dma_async_tx_descriptor *tx); member1200 return desc->tx_submit(desc); in dmaengine_submit()
413 cookie = mx3_fbi->txd->tx_submit(mx3_fbi->txd); in sdc_enable_channel()417 if (!mx3_fbi->txd || !mx3_fbi->txd->tx_submit) { in sdc_enable_channel()1218 cookie = txd->tx_submit(txd); in mx3fb_pan_display()
315 cookie = dma_tx->tx_submit(dma_tx); in mpc512x_lpbfifo_kick()
214 cookie = tx->tx_submit(tx); in hist_buf_dma()
343 cookie = tx->tx_submit(tx); in omap2_onenand_dma_transfer()
456 ctl->adesc->tx_submit(ctl->adesc); in ks8842_tx_frame_dma()570 ctl->adesc->tx_submit(ctl->adesc); in __ks8842_start_new_rx_dma()
106 such a transfer tx_submit() will be queued on the submitted queue, and
343 desc->tx_desc.tx_submit = ccp_tx_submit; in ccp_alloc_dma_desc()