Home
last modified time | relevance | path

Searched refs:addr (Results 1 – 20 of 20) sorted by relevance

/crypto/
Dblkcipher.c41 walk->src.virt.addr = scatterwalk_map(&walk->in); in blkcipher_map_src()
46 walk->dst.virt.addr = scatterwalk_map(&walk->out); in blkcipher_map_dst()
51 scatterwalk_unmap(walk->src.virt.addr); in blkcipher_unmap_src()
56 scatterwalk_unmap(walk->dst.virt.addr); in blkcipher_unmap_dst()
71 u8 *addr; in blkcipher_done_slow() local
73 addr = (u8 *)ALIGN((unsigned long)walk->buffer, walk->alignmask + 1); in blkcipher_done_slow()
74 addr = blkcipher_get_spot(addr, bsize); in blkcipher_done_slow()
75 scatterwalk_copychunks(addr, &walk->out, bsize, 1); in blkcipher_done_slow()
83 memcpy(walk->dst.virt.addr, walk->page, n); in blkcipher_done_fast()
161 walk->dst.virt.addr = (u8 *)ALIGN((unsigned long)walk->buffer, in blkcipher_next_slow()
[all …]
Dcfb.c48 u8 *src = walk->src.virt.addr; in crypto_cfb_final()
49 u8 *dst = walk->dst.virt.addr; in crypto_cfb_final()
62 u8 *src = walk->src.virt.addr; in crypto_cfb_encrypt_segment()
63 u8 *dst = walk->dst.virt.addr; in crypto_cfb_encrypt_segment()
85 u8 *src = walk->src.virt.addr; in crypto_cfb_encrypt_inplace()
112 if (walk.src.virt.addr == walk.dst.virt.addr) in crypto_cfb_encrypt()
132 u8 *src = walk->src.virt.addr; in crypto_cfb_decrypt_segment()
133 u8 *dst = walk->dst.virt.addr; in crypto_cfb_decrypt_segment()
155 u8 *src = walk->src.virt.addr; in crypto_cfb_decrypt_inplace()
172 if (walk->src.virt.addr == walk->dst.virt.addr) in crypto_cfb_decrypt_blocks()
Dpcbc.c25 u8 *src = walk->src.virt.addr; in crypto_pcbc_encrypt_segment()
26 u8 *dst = walk->dst.virt.addr; in crypto_pcbc_encrypt_segment()
47 u8 *src = walk->src.virt.addr; in crypto_pcbc_encrypt_inplace()
74 if (walk.src.virt.addr == walk.dst.virt.addr) in crypto_pcbc_encrypt()
92 u8 *src = walk->src.virt.addr; in crypto_pcbc_decrypt_segment()
93 u8 *dst = walk->dst.virt.addr; in crypto_pcbc_decrypt_segment()
114 u8 *src = walk->src.virt.addr; in crypto_pcbc_decrypt_inplace()
141 if (walk.src.virt.addr == walk.dst.virt.addr) in crypto_pcbc_decrypt()
Dofb.c28 const u8 *src = walk.src.virt.addr; in crypto_ofb_crypt()
29 u8 *dst = walk.dst.virt.addr; in crypto_ofb_crypt()
45 crypto_xor_cpy(walk.dst.virt.addr, walk.src.virt.addr, walk.iv, in crypto_ofb_crypt()
Dskcipher.c60 walk->src.virt.addr = skcipher_map(&walk->in); in skcipher_map_src()
65 walk->dst.virt.addr = skcipher_map(&walk->out); in skcipher_map_dst()
70 skcipher_unmap(&walk->in, walk->src.virt.addr); in skcipher_unmap_src()
75 skcipher_unmap(&walk->out, walk->dst.virt.addr); in skcipher_unmap_dst()
95 u8 *addr; in skcipher_done_slow() local
97 addr = (u8 *)ALIGN((unsigned long)walk->buffer, walk->alignmask + 1); in skcipher_done_slow()
98 addr = skcipher_get_spot(addr, bsize); in skcipher_done_slow()
99 scatterwalk_copychunks(addr, &walk->out, bsize, in skcipher_done_slow()
128 memcpy(walk->dst.virt.addr, walk->page, n); in skcipher_walk_done()
273 walk->dst.virt.addr = PTR_ALIGN(buffer, alignmask + 1); in skcipher_next_slow()
[all …]
Dcrypto_null.c84 if (walk.src.virt.addr != walk.dst.virt.addr) in null_skcipher_crypt()
85 memcpy(walk.dst.virt.addr, walk.src.virt.addr, in null_skcipher_crypt()
Dctr.c35 u8 *src = walk->src.virt.addr; in crypto_ctr_crypt_final()
36 u8 *dst = walk->dst.virt.addr; in crypto_ctr_crypt_final()
52 u8 *src = walk->src.virt.addr; in crypto_ctr_crypt_segment()
53 u8 *dst = walk->dst.virt.addr; in crypto_ctr_crypt_segment()
80 u8 *src = walk->src.virt.addr; in crypto_ctr_crypt_inplace()
110 if (walk.src.virt.addr == walk.dst.virt.addr) in crypto_ctr_crypt()
Darc4.c34 arc4_crypt(ctx, walk.dst.virt.addr, walk.src.virt.addr, in crypto_arc4_crypt()
Decb.c27 const u8 *src = walk.src.virt.addr; in crypto_ecb_crypt()
28 u8 *dst = walk.dst.virt.addr; in crypto_ecb_crypt()
Dchacha_generic.c51 chacha_docrypt(state, walk.dst.virt.addr, walk.src.virt.addr, in chacha_stream_xor()
Dsalsa20_generic.c171 salsa20_docrypt(state, walk.dst.virt.addr, walk.src.virt.addr, in salsa20_crypt()
Dlrw.c175 wsrc = w.src.virt.addr; in xor_tweak()
176 wdst = w.dst.virt.addr; in xor_tweak()
Daegis128-core.c340 ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr, in crypto_aegis128_process_crypt()
Dxts.c109 wsrc = w.src.virt.addr; in xor_tweak()
110 wdst = w.dst.virt.addr; in xor_tweak()
Dtestmgr.c190 static inline void testmgr_poison(void *addr, size_t len) in testmgr_poison() argument
192 memset(addr, TESTMGR_POISON_BYTE, len); in testmgr_poison()
196 static inline bool testmgr_is_poison(const void *addr, size_t len) in testmgr_is_poison() argument
198 return memchr_inv(addr, TESTMGR_POISON_BYTE, len) == NULL; in testmgr_is_poison()
572 void *addr; in build_test_sglist() local
584 addr = &tsgl->bufs[i][offset]; in build_test_sglist()
585 sg_set_buf(&tsgl->sgl[i], addr, partitions[i].length); in build_test_sglist()
594 copied = copy_from_iter(addr, copy_len, data); in build_test_sglist()
597 testmgr_poison(addr + copy_len, partitions[i].length + in build_test_sglist()
600 testmgr_poison(addr, partitions[i].length + in build_test_sglist()
Dadiantum.c278 err = crypto_shash_update(hash_desc, miter.addr, n); in adiantum_hash_message()
/crypto/async_tx/
Dasync_pq.c76 dma_dest[0] = unmap->addr[disks - 2]; in do_async_gen_syndrome()
77 dma_dest[1] = unmap->addr[disks - 1]; in do_async_gen_syndrome()
79 &unmap->addr[src_off], in do_async_gen_syndrome()
199 unmap->addr[j] = dma_map_page(device->dev, blocks[i], offset, in async_gen_syndrome()
212 unmap->addr[j++] = dma_map_page(device->dev, P(blocks, disks), in async_gen_syndrome()
215 unmap->addr[j++] = 0; in async_gen_syndrome()
221 unmap->addr[j++] = dma_map_page(device->dev, Q(blocks, disks), in async_gen_syndrome()
224 unmap->addr[j++] = 0; in async_gen_syndrome()
309 unmap->addr[j] = dma_map_page(dev, blocks[i], in async_syndrome_val()
325 unmap->addr[j++] = pq[0]; in async_syndrome_val()
[all …]
Dasync_memcpy.c54 unmap->addr[0] = dma_map_page(device->dev, src, src_offset, len, in async_memcpy()
57 unmap->addr[1] = dma_map_page(device->dev, dest, dest_offset, len, in async_memcpy()
61 tx = device->device_prep_dma_memcpy(chan, unmap->addr[1], in async_memcpy()
62 unmap->addr[0], len, in async_memcpy()
Dasync_xor.c34 dma_addr_t dma_dest = unmap->addr[unmap->to_cnt]; in do_async_xor()
35 dma_addr_t *src_list = unmap->addr; in do_async_xor()
63 if (src_list > unmap->addr) in do_async_xor()
186 unmap->addr[j++] = dma_map_page(device->dev, src_list[i], in async_xor()
191 unmap->addr[j] = dma_map_page(device->dev, dest, offset, len, in async_xor()
283 unmap->addr[i] = dma_map_page(device->dev, src_list[i], in async_xor_val()
289 tx = device->device_prep_dma_xor_val(chan, unmap->addr, src_cnt, in async_xor_val()
298 unmap->addr, src_cnt, len, result, in async_xor_val()
Dasync_raid6_recov.c40 unmap->addr[0] = dma_map_page(dev, srcs[0], 0, len, DMA_TO_DEVICE); in async_sum_product()
41 unmap->addr[1] = dma_map_page(dev, srcs[1], 0, len, DMA_TO_DEVICE); in async_sum_product()
44 unmap->addr[2] = dma_map_page(dev, dest, 0, len, DMA_BIDIRECTIONAL); in async_sum_product()
47 pq[1] = unmap->addr[2]; in async_sum_product()
50 tx = dma->device_prep_dma_pq(chan, pq, unmap->addr, 2, coef, in async_sum_product()
104 unmap->addr[0] = dma_map_page(dev, src, 0, len, DMA_TO_DEVICE); in async_mult()
106 unmap->addr[1] = dma_map_page(dev, dest, 0, len, DMA_BIDIRECTIONAL); in async_mult()
107 dma_dest[1] = unmap->addr[1]; in async_mult()
115 tx = dma->device_prep_dma_pq(chan, dma_dest, unmap->addr, in async_mult()