Searched refs:device (Results 1 – 6 of 6) sorted by relevance
/crypto/async_tx/ |
D | async_tx.c | 50 dma_has_cap(tx_type, depend_tx->chan->device->cap_mask)) in __async_tx_find_channel() 69 struct dma_device *device = chan->device; in async_tx_channel_switch() local 82 device->device_issue_pending(chan); in async_tx_channel_switch() 89 if (dma_has_cap(DMA_INTERRUPT, device->cap_mask)) in async_tx_channel_switch() 90 intr_tx = device->device_prep_dma_interrupt(chan, 0); in async_tx_channel_switch() 116 device->device_issue_pending(chan); in async_tx_channel_switch() 224 struct dma_device *device; in async_trigger_callback() local 230 device = chan->device; in async_trigger_callback() 235 if (device && !dma_has_cap(DMA_INTERRUPT, device->cap_mask)) in async_trigger_callback() 236 device = NULL; in async_trigger_callback() [all …]
|
D | async_xor.c | 26 struct dma_device *dma = chan->device; in do_async_xor() 166 struct dma_device *device = chan ? chan->device : NULL; in async_xor() local 171 if (device) in async_xor() 172 unmap = dmaengine_get_unmap_data(device->dev, src_cnt+1, GFP_NOWAIT); in async_xor() 174 if (unmap && is_dma_xor_aligned(device, offset, 0, len)) { in async_xor() 186 unmap->addr[j++] = dma_map_page(device->dev, src_list[i], in async_xor() 191 unmap->addr[j] = dma_map_page(device->dev, dest, offset, len, in async_xor() 261 struct dma_device *device = chan ? chan->device : NULL; in async_xor_val() local 267 if (device) in async_xor_val() 268 unmap = dmaengine_get_unmap_data(device->dev, src_cnt, GFP_NOWAIT); in async_xor_val() [all …]
|
D | async_memcpy.c | 38 struct dma_device *device = chan ? chan->device : NULL; in async_memcpy() local 42 if (device) in async_memcpy() 43 unmap = dmaengine_get_unmap_data(device->dev, 2, GFP_NOWAIT); in async_memcpy() 45 if (unmap && is_dma_copy_aligned(device, src_offset, dest_offset, len)) { in async_memcpy() 54 unmap->addr[0] = dma_map_page(device->dev, src, src_offset, len, in async_memcpy() 57 unmap->addr[1] = dma_map_page(device->dev, dest, dest_offset, len, in async_memcpy() 61 tx = device->device_prep_dma_memcpy(chan, unmap->addr[1], in async_memcpy()
|
D | async_pq.c | 42 struct dma_device *dma = chan->device; in do_async_gen_syndrome() 170 struct dma_device *device = chan ? chan->device : NULL; in async_gen_syndrome() local 175 if (device) in async_gen_syndrome() 176 unmap = dmaengine_get_unmap_data(device->dev, disks, GFP_NOWAIT); in async_gen_syndrome() 180 (src_cnt <= dma_maxpq(device, 0) || in async_gen_syndrome() 181 dma_maxpq(device, DMA_PREP_CONTINUE) > 0) && in async_gen_syndrome() 182 is_dma_pq_aligned(device, offset, 0, len)) { in async_gen_syndrome() 199 unmap->addr[j] = dma_map_page(device->dev, blocks[i], offset, in async_gen_syndrome() 212 unmap->addr[j++] = dma_map_page(device->dev, P(blocks, disks), in async_gen_syndrome() 221 unmap->addr[j++] = dma_map_page(device->dev, Q(blocks, disks), in async_gen_syndrome() [all …]
|
D | async_raid6_recov.c | 23 struct dma_device *dma = chan ? chan->device : NULL; in async_sum_product() 33 struct device *dev = dma->dev; in async_sum_product() 88 struct dma_device *dma = chan ? chan->device : NULL; in async_mult() 98 struct device *dev = dma->dev; in async_mult()
|
/crypto/ |
D | crypto_engine.c | 426 struct crypto_engine *crypto_engine_alloc_init(struct device *dev, bool rt) in crypto_engine_alloc_init()
|