Home
last modified time | relevance | path

Searched refs:walk (Results 1 – 16 of 16) sorted by relevance

/crypto/
Dskcipher.c43 static int skcipher_walk_next(struct skcipher_walk *walk);
45 static inline void skcipher_unmap(struct scatter_walk *walk, void *vaddr) in skcipher_unmap() argument
47 if (PageHighMem(scatterwalk_page(walk))) in skcipher_unmap()
51 static inline void *skcipher_map(struct scatter_walk *walk) in skcipher_map() argument
53 struct page *page = scatterwalk_page(walk); in skcipher_map()
56 offset_in_page(walk->offset); in skcipher_map()
59 static inline void skcipher_map_src(struct skcipher_walk *walk) in skcipher_map_src() argument
61 walk->src.virt.addr = skcipher_map(&walk->in); in skcipher_map_src()
64 static inline void skcipher_map_dst(struct skcipher_walk *walk) in skcipher_map_dst() argument
66 walk->dst.virt.addr = skcipher_map(&walk->out); in skcipher_map_dst()
[all …]
Dcfb.c43 static void crypto_cfb_final(struct skcipher_walk *walk, in crypto_cfb_final() argument
49 u8 *src = walk->src.virt.addr; in crypto_cfb_final()
50 u8 *dst = walk->dst.virt.addr; in crypto_cfb_final()
51 u8 *iv = walk->iv; in crypto_cfb_final()
52 unsigned int nbytes = walk->nbytes; in crypto_cfb_final()
58 static int crypto_cfb_encrypt_segment(struct skcipher_walk *walk, in crypto_cfb_encrypt_segment() argument
62 unsigned int nbytes = walk->nbytes; in crypto_cfb_encrypt_segment()
63 u8 *src = walk->src.virt.addr; in crypto_cfb_encrypt_segment()
64 u8 *dst = walk->dst.virt.addr; in crypto_cfb_encrypt_segment()
65 u8 *iv = walk->iv; in crypto_cfb_encrypt_segment()
[all …]
Dcbc.c17 static int crypto_cbc_encrypt_segment(struct skcipher_walk *walk, in crypto_cbc_encrypt_segment() argument
22 unsigned int nbytes = walk->nbytes; in crypto_cbc_encrypt_segment()
23 u8 *src = walk->src.virt.addr; in crypto_cbc_encrypt_segment()
24 u8 *dst = walk->dst.virt.addr; in crypto_cbc_encrypt_segment()
27 u8 *iv = walk->iv; in crypto_cbc_encrypt_segment()
45 static int crypto_cbc_encrypt_inplace(struct skcipher_walk *walk, in crypto_cbc_encrypt_inplace() argument
50 unsigned int nbytes = walk->nbytes; in crypto_cbc_encrypt_inplace()
51 u8 *src = walk->src.virt.addr; in crypto_cbc_encrypt_inplace()
54 u8 *iv = walk->iv; in crypto_cbc_encrypt_inplace()
68 memcpy(walk->iv, iv, bsize); in crypto_cbc_encrypt_inplace()
[all …]
Dpcbc.c21 struct skcipher_walk *walk, in crypto_pcbc_encrypt_segment() argument
25 unsigned int nbytes = walk->nbytes; in crypto_pcbc_encrypt_segment()
26 u8 *src = walk->src.virt.addr; in crypto_pcbc_encrypt_segment()
27 u8 *dst = walk->dst.virt.addr; in crypto_pcbc_encrypt_segment()
28 u8 * const iv = walk->iv; in crypto_pcbc_encrypt_segment()
43 struct skcipher_walk *walk, in crypto_pcbc_encrypt_inplace() argument
47 unsigned int nbytes = walk->nbytes; in crypto_pcbc_encrypt_inplace()
48 u8 *src = walk->src.virt.addr; in crypto_pcbc_encrypt_inplace()
49 u8 * const iv = walk->iv; in crypto_pcbc_encrypt_inplace()
68 struct skcipher_walk walk; in crypto_pcbc_encrypt() local
[all …]
Dahash.c41 static int hash_walk_next(struct crypto_hash_walk *walk) in hash_walk_next() argument
43 unsigned int alignmask = walk->alignmask; in hash_walk_next()
44 unsigned int offset = walk->offset; in hash_walk_next()
45 unsigned int nbytes = min(walk->entrylen, in hash_walk_next()
48 walk->data = kmap_atomic(walk->pg); in hash_walk_next()
49 walk->data += offset; in hash_walk_next()
58 walk->entrylen -= nbytes; in hash_walk_next()
62 static int hash_walk_new_entry(struct crypto_hash_walk *walk) in hash_walk_new_entry() argument
66 sg = walk->sg; in hash_walk_new_entry()
67 walk->offset = sg->offset; in hash_walk_new_entry()
[all …]
Dofb.c23 struct skcipher_walk walk; in crypto_ofb_crypt() local
26 err = skcipher_walk_virt(&walk, req, false); in crypto_ofb_crypt()
28 while (walk.nbytes >= bsize) { in crypto_ofb_crypt()
29 const u8 *src = walk.src.virt.addr; in crypto_ofb_crypt()
30 u8 *dst = walk.dst.virt.addr; in crypto_ofb_crypt()
31 u8 * const iv = walk.iv; in crypto_ofb_crypt()
32 unsigned int nbytes = walk.nbytes; in crypto_ofb_crypt()
41 err = skcipher_walk_done(&walk, nbytes); in crypto_ofb_crypt()
44 if (walk.nbytes) { in crypto_ofb_crypt()
45 crypto_cipher_encrypt_one(cipher, walk.iv, walk.iv); in crypto_ofb_crypt()
[all …]
Dctr.c28 static void crypto_ctr_crypt_final(struct skcipher_walk *walk, in crypto_ctr_crypt_final() argument
33 u8 *ctrblk = walk->iv; in crypto_ctr_crypt_final()
36 u8 *src = walk->src.virt.addr; in crypto_ctr_crypt_final()
37 u8 *dst = walk->dst.virt.addr; in crypto_ctr_crypt_final()
38 unsigned int nbytes = walk->nbytes; in crypto_ctr_crypt_final()
46 static int crypto_ctr_crypt_segment(struct skcipher_walk *walk, in crypto_ctr_crypt_segment() argument
52 u8 *ctrblk = walk->iv; in crypto_ctr_crypt_segment()
53 u8 *src = walk->src.virt.addr; in crypto_ctr_crypt_segment()
54 u8 *dst = walk->dst.virt.addr; in crypto_ctr_crypt_segment()
55 unsigned int nbytes = walk->nbytes; in crypto_ctr_crypt_segment()
[all …]
Dscatterwalk.c26 void scatterwalk_copychunks(void *buf, struct scatter_walk *walk, in scatterwalk_copychunks() argument
30 unsigned int len_this_page = scatterwalk_pagelen(walk); in scatterwalk_copychunks()
37 vaddr = scatterwalk_map(walk); in scatterwalk_copychunks()
42 scatterwalk_advance(walk, len_this_page); in scatterwalk_copychunks()
50 scatterwalk_pagedone(walk, out & 1, 1); in scatterwalk_copychunks()
58 struct scatter_walk walk; in scatterwalk_map_and_copy() local
66 scatterwalk_start(&walk, sg); in scatterwalk_map_and_copy()
67 scatterwalk_copychunks(buf, &walk, nbytes, out); in scatterwalk_map_and_copy()
68 scatterwalk_done(&walk, out, 0); in scatterwalk_map_and_copy()
Darc4.c30 struct skcipher_walk walk; in crypto_arc4_crypt() local
33 err = skcipher_walk_virt(&walk, req, false); in crypto_arc4_crypt()
35 while (walk.nbytes > 0) { in crypto_arc4_crypt()
36 arc4_crypt(ctx, walk.dst.virt.addr, walk.src.virt.addr, in crypto_arc4_crypt()
37 walk.nbytes); in crypto_arc4_crypt()
38 err = skcipher_walk_done(&walk, 0); in crypto_arc4_crypt()
Daegis128-core.c284 struct scatter_walk walk; in crypto_aegis128_process_ad() local
288 scatterwalk_start(&walk, sg_src); in crypto_aegis128_process_ad()
290 unsigned int size = scatterwalk_clamp(&walk, assoclen); in crypto_aegis128_process_ad()
292 void *mapped = scatterwalk_map(&walk); in crypto_aegis128_process_ad()
315 scatterwalk_advance(&walk, size); in crypto_aegis128_process_ad()
316 scatterwalk_done(&walk, 0, assoclen); in crypto_aegis128_process_ad()
328 struct skcipher_walk *walk, in crypto_aegis128_process_crypt() argument
335 while (walk->nbytes) { in crypto_aegis128_process_crypt()
336 unsigned int nbytes = walk->nbytes; in crypto_aegis128_process_crypt()
338 if (nbytes < walk->total) in crypto_aegis128_process_crypt()
[all …]
Dchacha_generic.c18 struct skcipher_walk walk; in chacha_stream_xor() local
22 err = skcipher_walk_virt(&walk, req, false); in chacha_stream_xor()
26 while (walk.nbytes > 0) { in chacha_stream_xor()
27 unsigned int nbytes = walk.nbytes; in chacha_stream_xor()
29 if (nbytes < walk.total) in chacha_stream_xor()
32 chacha_crypt_generic(state, walk.dst.virt.addr, in chacha_stream_xor()
33 walk.src.virt.addr, nbytes, ctx->nrounds); in chacha_stream_xor()
34 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in chacha_stream_xor()
Dcrypto_null.c78 struct skcipher_walk walk; in null_skcipher_crypt() local
81 err = skcipher_walk_virt(&walk, req, false); in null_skcipher_crypt()
83 while (walk.nbytes) { in null_skcipher_crypt()
84 if (walk.src.virt.addr != walk.dst.virt.addr) in null_skcipher_crypt()
85 memcpy(walk.dst.virt.addr, walk.src.virt.addr, in null_skcipher_crypt()
86 walk.nbytes); in null_skcipher_crypt()
87 err = skcipher_walk_done(&walk, 0); in null_skcipher_crypt()
Dsalsa20_generic.c157 struct skcipher_walk walk; in salsa20_crypt() local
161 err = skcipher_walk_virt(&walk, req, false); in salsa20_crypt()
165 while (walk.nbytes > 0) { in salsa20_crypt()
166 unsigned int nbytes = walk.nbytes; in salsa20_crypt()
168 if (nbytes < walk.total) in salsa20_crypt()
169 nbytes = round_down(nbytes, walk.stride); in salsa20_crypt()
171 salsa20_docrypt(state, walk.dst.virt.addr, walk.src.virt.addr, in salsa20_crypt()
173 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in salsa20_crypt()
Decb.c21 struct skcipher_walk walk; in crypto_ecb_crypt() local
25 err = skcipher_walk_virt(&walk, req, false); in crypto_ecb_crypt()
27 while ((nbytes = walk.nbytes) != 0) { in crypto_ecb_crypt()
28 const u8 *src = walk.src.virt.addr; in crypto_ecb_crypt()
29 u8 *dst = walk.dst.virt.addr; in crypto_ecb_crypt()
38 err = skcipher_walk_done(&walk, nbytes); in crypto_ecb_crypt()
Dshash.c269 struct crypto_hash_walk walk; in shash_ahash_update() local
272 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0; in shash_ahash_update()
273 nbytes = crypto_hash_walk_done(&walk, nbytes)) in shash_ahash_update()
274 nbytes = crypto_shash_update(desc, walk.data, nbytes); in shash_ahash_update()
292 struct crypto_hash_walk walk; in shash_ahash_finup() local
295 nbytes = crypto_hash_walk_first(req, &walk); in shash_ahash_finup()
300 nbytes = crypto_hash_walk_last(&walk) ? in shash_ahash_finup()
301 crypto_shash_finup(desc, walk.data, nbytes, in shash_ahash_finup()
303 crypto_shash_update(desc, walk.data, nbytes); in shash_ahash_finup()
304 nbytes = crypto_hash_walk_done(&walk, nbytes); in shash_ahash_finup()
Dkeywrap.c102 static void crypto_kw_scatterlist_ff(struct scatter_walk *walk, in crypto_kw_scatterlist_ff() argument
114 scatterwalk_start(walk, sg); in crypto_kw_scatterlist_ff()
115 scatterwalk_advance(walk, skip); in crypto_kw_scatterlist_ff()