/crypto/async_tx/ |
D | async_tx.c | 79 struct dma_async_tx_descriptor *tx) in async_tx_channel_switch() argument 87 if (txd_parent(depend_tx) && depend_tx->chan == tx->chan) { in async_tx_channel_switch() 88 txd_chain(depend_tx, tx); in async_tx_channel_switch() 113 txd_chain(intr_tx, tx); in async_tx_channel_switch() 134 tx->tx_submit(tx); in async_tx_channel_switch() 156 async_tx_submit(struct dma_chan *chan, struct dma_async_tx_descriptor *tx, in async_tx_submit() argument 161 tx->callback = submit->cb_fn; in async_tx_submit() 162 tx->callback_param = submit->cb_param; in async_tx_submit() 174 txd_parent(tx)); in async_tx_submit() 186 txd_chain(depend_tx, tx); in async_tx_submit() [all …]
|
D | async_raid6_recov.c | 45 struct dma_async_tx_descriptor *tx; in async_sum_product() local 53 tx = dma->device_prep_dma_pq(chan, dma_dest, dma_src, 2, coef, in async_sum_product() 55 if (tx) { in async_sum_product() 56 async_tx_submit(chan, tx, submit); in async_sum_product() 57 return tx; in async_sum_product() 99 struct dma_async_tx_descriptor *tx; in async_mult() local 106 tx = dma->device_prep_dma_pq(chan, dma_dest, dma_src, 1, &coef, in async_mult() 108 if (tx) { in async_mult() 109 async_tx_submit(chan, tx, submit); in async_mult() 110 return tx; in async_mult() [all …]
|
D | async_xor.c | 41 struct dma_async_tx_descriptor *tx = NULL; in do_async_xor() local 92 tx = dma->device_prep_dma_xor(chan, dma_dest, &dma_src[src_off], in do_async_xor() 95 if (unlikely(!tx)) in do_async_xor() 99 while (unlikely(!tx)) { in do_async_xor() 101 tx = dma->device_prep_dma_xor(chan, dma_dest, in do_async_xor() 107 async_tx_submit(chan, tx, submit); in do_async_xor() 108 submit->depend_tx = tx; in do_async_xor() 122 return tx; in do_async_xor() 270 struct dma_async_tx_descriptor *tx = NULL; in async_xor_val() local 295 tx = device->device_prep_dma_xor_val(chan, dma_src, src_cnt, in async_xor_val() [all …]
|
D | raid6test.c | 72 struct dma_async_tx_descriptor *tx = NULL; in raid6_dual_recov() local 82 tx = async_gen_syndrome(ptrs, 0, disks, bytes, &submit); in raid6_dual_recov() 100 tx = async_xor(dest, blocks, 0, count, bytes, &submit); in raid6_dual_recov() 102 init_async_submit(&submit, 0, tx, NULL, NULL, addr_conv); in raid6_dual_recov() 103 tx = async_gen_syndrome(ptrs, 0, disks, bytes, &submit); in raid6_dual_recov() 109 tx = async_raid6_datap_recov(disks, bytes, faila, ptrs, &submit); in raid6_dual_recov() 113 tx = async_raid6_2data_recov(disks, bytes, faila, failb, ptrs, &submit); in raid6_dual_recov() 117 init_async_submit(&submit, ASYNC_TX_ACK, tx, callback, &cmp, addr_conv); in raid6_dual_recov() 118 tx = async_syndrome_val(ptrs, 0, disks, bytes, &result, spare, &submit); in raid6_dual_recov() 119 async_tx_issue_pending(tx); in raid6_dual_recov() [all …]
|
D | async_pq.c | 54 struct dma_async_tx_descriptor *tx = NULL; in do_async_gen_syndrome() local 121 tx = dma->device_prep_dma_pq(chan, dma_dest, in do_async_gen_syndrome() 126 if (likely(tx)) in do_async_gen_syndrome() 132 async_tx_submit(chan, tx, submit); in do_async_gen_syndrome() 133 submit->depend_tx = tx; in do_async_gen_syndrome() 142 return tx; in do_async_gen_syndrome() 277 struct dma_async_tx_descriptor *tx; in async_syndrome_val() local 323 tx = device->device_prep_dma_pq_val(chan, pq, dma_src, in async_syndrome_val() 328 if (likely(tx)) in async_syndrome_val() 333 async_tx_submit(chan, tx, submit); in async_syndrome_val() [all …]
|
D | async_memcpy.c | 52 struct dma_async_tx_descriptor *tx = NULL; in async_memcpy() local 68 tx = device->device_prep_dma_memcpy(chan, dma_dest, dma_src, in async_memcpy() 70 if (!tx) { in async_memcpy() 78 if (tx) { in async_memcpy() 80 async_tx_submit(chan, tx, submit); in async_memcpy() 99 return tx; in async_memcpy()
|
D | async_memset.c | 49 struct dma_async_tx_descriptor *tx = NULL; in async_memset() local 62 tx = device->device_prep_dma_memset(chan, dma_dest, val, len, in async_memset() 66 if (tx) { in async_memset() 68 async_tx_submit(chan, tx, submit); in async_memset() 83 return tx; in async_memset()
|