Home
last modified time | relevance | path

Searched refs:addr (Results 1 – 21 of 21) sorted by relevance

/crypto/
Dcfb.c49 u8 *src = walk->src.virt.addr; in crypto_cfb_final()
50 u8 *dst = walk->dst.virt.addr; in crypto_cfb_final()
63 u8 *src = walk->src.virt.addr; in crypto_cfb_encrypt_segment()
64 u8 *dst = walk->dst.virt.addr; in crypto_cfb_encrypt_segment()
86 u8 *src = walk->src.virt.addr; in crypto_cfb_encrypt_inplace()
113 if (walk.src.virt.addr == walk.dst.virt.addr) in crypto_cfb_encrypt()
133 u8 *src = walk->src.virt.addr; in crypto_cfb_decrypt_segment()
134 u8 *dst = walk->dst.virt.addr; in crypto_cfb_decrypt_segment()
156 u8 *src = walk->src.virt.addr; in crypto_cfb_decrypt_inplace()
173 if (walk->src.virt.addr == walk->dst.virt.addr) in crypto_cfb_decrypt_blocks()
Dpcbc.c26 u8 *src = walk->src.virt.addr; in crypto_pcbc_encrypt_segment()
27 u8 *dst = walk->dst.virt.addr; in crypto_pcbc_encrypt_segment()
48 u8 *src = walk->src.virt.addr; in crypto_pcbc_encrypt_inplace()
75 if (walk.src.virt.addr == walk.dst.virt.addr) in crypto_pcbc_encrypt()
93 u8 *src = walk->src.virt.addr; in crypto_pcbc_decrypt_segment()
94 u8 *dst = walk->dst.virt.addr; in crypto_pcbc_decrypt_segment()
115 u8 *src = walk->src.virt.addr; in crypto_pcbc_decrypt_inplace()
142 if (walk.src.virt.addr == walk.dst.virt.addr) in crypto_pcbc_decrypt()
Dcbc.c23 u8 *src = walk->src.virt.addr; in crypto_cbc_encrypt_segment()
24 u8 *dst = walk->dst.virt.addr; in crypto_cbc_encrypt_segment()
51 u8 *src = walk->src.virt.addr; in crypto_cbc_encrypt_inplace()
82 if (walk.src.virt.addr == walk.dst.virt.addr) in crypto_cbc_encrypt()
98 u8 *src = walk->src.virt.addr; in crypto_cbc_decrypt_segment()
99 u8 *dst = walk->dst.virt.addr; in crypto_cbc_decrypt_segment()
128 u8 *src = walk->src.virt.addr; in crypto_cbc_decrypt_inplace()
164 if (walk.src.virt.addr == walk.dst.virt.addr) in crypto_cbc_decrypt()
Dxctr.c38 const u8 *src = walk->src.virt.addr; in crypto_xctr_crypt_final()
39 u8 *dst = walk->dst.virt.addr; in crypto_xctr_crypt_final()
54 const u8 *src = walk->src.virt.addr; in crypto_xctr_crypt_segment()
55 u8 *dst = walk->dst.virt.addr; in crypto_xctr_crypt_segment()
81 u8 *data = walk->src.virt.addr; in crypto_xctr_crypt_inplace()
112 if (walk.src.virt.addr == walk.dst.virt.addr) in crypto_xctr_crypt()
Dskcipher.c61 walk->src.virt.addr = skcipher_map(&walk->in); in skcipher_map_src()
66 walk->dst.virt.addr = skcipher_map(&walk->out); in skcipher_map_dst()
71 skcipher_unmap(&walk->in, walk->src.virt.addr); in skcipher_unmap_src()
76 skcipher_unmap(&walk->out, walk->dst.virt.addr); in skcipher_unmap_dst()
96 u8 *addr; in skcipher_done_slow() local
98 addr = (u8 *)ALIGN((unsigned long)walk->buffer, walk->alignmask + 1); in skcipher_done_slow()
99 addr = skcipher_get_spot(addr, bsize); in skcipher_done_slow()
100 scatterwalk_copychunks(addr, &walk->out, bsize, in skcipher_done_slow()
129 memcpy(walk->dst.virt.addr, walk->page, n); in skcipher_walk_done()
274 walk->dst.virt.addr = PTR_ALIGN(buffer, alignmask + 1); in skcipher_next_slow()
[all …]
Dofb.c29 const u8 *src = walk.src.virt.addr; in crypto_ofb_crypt()
30 u8 *dst = walk.dst.virt.addr; in crypto_ofb_crypt()
46 crypto_xor_cpy(walk.dst.virt.addr, walk.src.virt.addr, walk.iv, in crypto_ofb_crypt()
Dctr.c36 u8 *src = walk->src.virt.addr; in crypto_ctr_crypt_final()
37 u8 *dst = walk->dst.virt.addr; in crypto_ctr_crypt_final()
53 u8 *src = walk->src.virt.addr; in crypto_ctr_crypt_segment()
54 u8 *dst = walk->dst.virt.addr; in crypto_ctr_crypt_segment()
81 u8 *src = walk->src.virt.addr; in crypto_ctr_crypt_inplace()
111 if (walk.src.virt.addr == walk.dst.virt.addr) in crypto_ctr_crypt()
Dcrypto_null.c84 if (walk.src.virt.addr != walk.dst.virt.addr) in null_skcipher_crypt()
85 memcpy(walk.dst.virt.addr, walk.src.virt.addr, in null_skcipher_crypt()
Darc4.c36 arc4_crypt(ctx, walk.dst.virt.addr, walk.src.virt.addr, in crypto_arc4_crypt()
Decb.c28 const u8 *src = walk.src.virt.addr; in crypto_ecb_crypt()
29 u8 *dst = walk.dst.virt.addr; in crypto_ecb_crypt()
Dchacha_generic.c32 chacha_crypt_generic(state, walk.dst.virt.addr, in chacha_stream_xor()
33 walk.src.virt.addr, nbytes, ctx->nrounds); in chacha_stream_xor()
Dlrw.c173 wsrc = w.src.virt.addr; in lrw_xor_tweak()
174 wdst = w.dst.virt.addr; in lrw_xor_tweak()
Dxts.c105 wsrc = w.src.virt.addr; in xts_xor_tweak()
106 wdst = w.dst.virt.addr; in xts_xor_tweak()
Daegis128-core.c337 crypt(state, walk->dst.virt.addr, walk->src.virt.addr, nbytes); in crypto_aegis128_process_crypt()
Dtestmgr.c208 static inline void testmgr_poison(void *addr, size_t len) in testmgr_poison() argument
210 memset(addr, TESTMGR_POISON_BYTE, len); in testmgr_poison()
214 static inline bool testmgr_is_poison(const void *addr, size_t len) in testmgr_is_poison() argument
216 return memchr_inv(addr, TESTMGR_POISON_BYTE, len) == NULL; in testmgr_is_poison()
602 void *addr; in build_test_sglist() local
614 addr = &tsgl->bufs[i][offset]; in build_test_sglist()
615 sg_set_buf(&tsgl->sgl[i], addr, partitions[i].length); in build_test_sglist()
624 copied = copy_from_iter(addr, copy_len, data); in build_test_sglist()
627 testmgr_poison(addr + copy_len, partitions[i].length + in build_test_sglist()
630 testmgr_poison(addr, partitions[i].length + in build_test_sglist()
Dhctr2.c211 err = crypto_shash_update(hash_desc, miter.addr, n); in hctr2_hash_message()
Dadiantum.c270 err = crypto_shash_update(hash_desc, miter.addr, n); in adiantum_hash_message()
/crypto/async_tx/
Dasync_pq.c76 dma_dest[0] = unmap->addr[disks - 2]; in do_async_gen_syndrome()
77 dma_dest[1] = unmap->addr[disks - 1]; in do_async_gen_syndrome()
79 &unmap->addr[src_off], in do_async_gen_syndrome()
213 unmap->addr[j] = dma_map_page(device->dev, blocks[i], in async_gen_syndrome()
226 unmap->addr[j++] = dma_map_page(device->dev, P(blocks, disks), in async_gen_syndrome()
230 unmap->addr[j++] = 0; in async_gen_syndrome()
236 unmap->addr[j++] = dma_map_page(device->dev, Q(blocks, disks), in async_gen_syndrome()
240 unmap->addr[j++] = 0; in async_gen_syndrome()
326 unmap->addr[j] = dma_map_page(dev, blocks[i], in async_syndrome_val()
342 unmap->addr[j++] = pq[0]; in async_syndrome_val()
[all …]
Dasync_memcpy.c54 unmap->addr[0] = dma_map_page(device->dev, src, src_offset, len, in async_memcpy()
57 unmap->addr[1] = dma_map_page(device->dev, dest, dest_offset, len, in async_memcpy()
61 tx = device->device_prep_dma_memcpy(chan, unmap->addr[1], in async_memcpy()
62 unmap->addr[0], len, in async_memcpy()
Dasync_xor.c34 dma_addr_t dma_dest = unmap->addr[unmap->to_cnt]; in do_async_xor()
35 dma_addr_t *src_list = unmap->addr; in do_async_xor()
63 if (src_list > unmap->addr) in do_async_xor()
210 unmap->addr[j++] = dma_map_page(device->dev, src_list[i], in async_xor_offs()
216 unmap->addr[j] = dma_map_page(device->dev, dest, offset, len, in async_xor_offs()
343 unmap->addr[i] = dma_map_page(device->dev, src_list[i], in async_xor_val_offs()
350 tx = device->device_prep_dma_xor_val(chan, unmap->addr, src_cnt, in async_xor_val_offs()
359 unmap->addr, src_cnt, len, result, in async_xor_val_offs()
Dasync_raid6_recov.c41 unmap->addr[0] = dma_map_page(dev, srcs[0], src_offs[0], in async_sum_product()
43 unmap->addr[1] = dma_map_page(dev, srcs[1], src_offs[1], in async_sum_product()
47 unmap->addr[2] = dma_map_page(dev, dest, d_off, in async_sum_product()
51 pq[1] = unmap->addr[2]; in async_sum_product()
54 tx = dma->device_prep_dma_pq(chan, pq, unmap->addr, 2, coef, in async_sum_product()
109 unmap->addr[0] = dma_map_page(dev, src, s_off, in async_mult()
112 unmap->addr[1] = dma_map_page(dev, dest, d_off, in async_mult()
114 dma_dest[1] = unmap->addr[1]; in async_mult()
122 tx = dma->device_prep_dma_pq(chan, dma_dest, unmap->addr, in async_mult()