Searched refs:device (Results 1 – 4 of 4) sorted by relevance
/crypto/async_tx/ |
D | async_tx.c | 57 dma_has_cap(tx_type, depend_tx->chan->device->cap_mask)) in __async_tx_find_channel() 87 struct dma_device *device; in async_tx_channel_switch() local 103 device = chan->device; in async_tx_channel_switch() 108 if (dma_has_cap(DMA_INTERRUPT, device->cap_mask)) in async_tx_channel_switch() 109 intr_tx = device->device_prep_dma_interrupt(chan, 0); in async_tx_channel_switch() 245 struct dma_device *device; in async_trigger_callback() local 250 device = chan->device; in async_trigger_callback() 255 if (device && !dma_has_cap(DMA_INTERRUPT, device->cap_mask)) in async_trigger_callback() 256 device = NULL; in async_trigger_callback() 258 tx = device ? device->device_prep_dma_interrupt(chan, 0) : NULL; in async_trigger_callback()
|
D | async_memcpy.c | 51 struct dma_device *device = chan ? chan->device : NULL; in async_memcpy() local 54 if (device) { in async_memcpy() 58 dma_dest = dma_map_page(device->dev, dest, dest_offset, len, in async_memcpy() 61 dma_src = dma_map_page(device->dev, src, src_offset, len, in async_memcpy() 64 tx = device->device_prep_dma_memcpy(chan, dma_dest, dma_src, in async_memcpy()
|
D | async_memset.c | 51 struct dma_device *device = chan ? chan->device : NULL; in async_memset() local 54 if (device) { in async_memset() 58 dma_dest = dma_map_page(device->dev, dest, offset, len, in async_memset() 61 tx = device->device_prep_dma_memset(chan, dma_dest, val, len, in async_memset()
|
D | async_xor.c | 44 struct dma_device *dma = chan->device; in do_async_xor() 251 struct dma_device *device = chan ? chan->device : NULL; in async_xor_zero_sum() local 256 if (device && src_cnt <= device->max_xor) { in async_xor_zero_sum() 264 dma_src[i] = dma_map_page(device->dev, src_list[i], in async_xor_zero_sum() 267 tx = device->device_prep_dma_zero_sum(chan, dma_src, src_cnt, in async_xor_zero_sum() 275 tx = device->device_prep_dma_zero_sum(chan, in async_xor_zero_sum()
|