/crypto/async_tx/ |
D | async_tx.c | 66 struct dma_async_tx_descriptor *tx) in async_tx_channel_switch() argument 74 if (txd_parent(depend_tx) && depend_tx->chan == tx->chan) { in async_tx_channel_switch() 75 txd_chain(depend_tx, tx); in async_tx_channel_switch() 100 txd_chain(intr_tx, tx); in async_tx_channel_switch() 121 tx->tx_submit(tx); in async_tx_channel_switch() 143 async_tx_submit(struct dma_chan *chan, struct dma_async_tx_descriptor *tx, in async_tx_submit() argument 148 tx->callback = submit->cb_fn; in async_tx_submit() 149 tx->callback_param = submit->cb_param; in async_tx_submit() 161 txd_parent(tx)); in async_tx_submit() 173 txd_chain(depend_tx, tx); in async_tx_submit() [all …]
|
D | async_raid6_recov.c | 36 struct dma_async_tx_descriptor *tx; in async_sum_product() local 54 tx = dma->device_prep_dma_pq(chan, pq, unmap->addr, 2, coef, in async_sum_product() 56 if (tx) { in async_sum_product() 57 dma_set_unmap(tx, unmap); in async_sum_product() 58 async_tx_submit(chan, tx, submit); in async_sum_product() 60 return tx; in async_sum_product() 104 struct dma_async_tx_descriptor *tx; in async_mult() local 122 tx = dma->device_prep_dma_pq(chan, dma_dest, unmap->addr, in async_mult() 125 if (tx) { in async_mult() 126 dma_set_unmap(tx, unmap); in async_mult() [all …]
|
D | async_xor.c | 27 struct dma_async_tx_descriptor *tx = NULL; in do_async_xor() local 65 tx = dma->device_prep_dma_xor(chan, dma_dest, src_list, in do_async_xor() 69 if (unlikely(!tx)) in do_async_xor() 73 while (unlikely(!tx)) { in do_async_xor() 75 tx = dma->device_prep_dma_xor(chan, dma_dest, in do_async_xor() 82 dma_set_unmap(tx, unmap); in do_async_xor() 83 async_tx_submit(chan, tx, submit); in do_async_xor() 84 submit->depend_tx = tx; in do_async_xor() 96 return tx; in do_async_xor() 199 struct dma_async_tx_descriptor *tx; in async_xor_offs() local [all …]
|
D | async_pq.c | 41 struct dma_async_tx_descriptor *tx = NULL; in do_async_gen_syndrome() local 78 tx = dma->device_prep_dma_pq(chan, dma_dest, in do_async_gen_syndrome() 83 if (likely(tx)) in do_async_gen_syndrome() 89 dma_set_unmap(tx, unmap); in do_async_gen_syndrome() 90 async_tx_submit(chan, tx, submit); in do_async_gen_syndrome() 91 submit->depend_tx = tx; in do_async_gen_syndrome() 100 return tx; in do_async_gen_syndrome() 197 struct dma_async_tx_descriptor *tx; in async_gen_syndrome() local 244 tx = do_async_gen_syndrome(chan, coefs, j, unmap, dma_flags, submit); in async_gen_syndrome() 246 return tx; in async_gen_syndrome() [all …]
|
D | raid6test.c | 62 struct dma_async_tx_descriptor *tx = NULL; in raid6_dual_recov() local 72 tx = async_gen_syndrome(ptrs, offs, in raid6_dual_recov() 93 tx = async_xor(dest, blocks, 0, count, bytes, &submit); in raid6_dual_recov() 95 init_async_submit(&submit, 0, tx, NULL, NULL, addr_conv); in raid6_dual_recov() 96 tx = async_gen_syndrome(ptrs, offs, in raid6_dual_recov() 103 tx = async_raid6_datap_recov(disks, bytes, in raid6_dual_recov() 108 tx = async_raid6_2data_recov(disks, bytes, in raid6_dual_recov() 113 init_async_submit(&submit, ASYNC_TX_ACK, tx, callback, &cmp, addr_conv); in raid6_dual_recov() 114 tx = async_syndrome_val(ptrs, offs, in raid6_dual_recov() 116 async_tx_issue_pending(tx); in raid6_dual_recov() [all …]
|
D | async_memcpy.c | 39 struct dma_async_tx_descriptor *tx = NULL; in async_memcpy() local 61 tx = device->device_prep_dma_memcpy(chan, unmap->addr[1], in async_memcpy() 66 if (tx) { in async_memcpy() 69 dma_set_unmap(tx, unmap); in async_memcpy() 70 async_tx_submit(chan, tx, submit); in async_memcpy() 91 return tx; in async_memcpy()
|
/crypto/ |
D | ecc.c | 1396 u64 tx[ECC_MAX_DIGITS]; in ecc_point_mult_shamir() local 1400 vli_set(tx, point->x, ndigits); in ecc_point_mult_shamir() 1402 apply_z(tx, ty, z, curve); in ecc_point_mult_shamir() 1403 vli_mod_sub(tz, rx, tx, curve->p, ndigits); in ecc_point_mult_shamir() 1404 xycz_add(tx, ty, rx, ry, curve); in ecc_point_mult_shamir()
|