Home
last modified time | relevance | path

Searched refs:nbytes (Results 1 – 25 of 28) sorted by relevance

12

/crypto/
Dpcbc.c24 unsigned int nbytes = walk->nbytes; in crypto_pcbc_encrypt_segment() local
36 } while ((nbytes -= bsize) >= bsize); in crypto_pcbc_encrypt_segment()
38 return nbytes; in crypto_pcbc_encrypt_segment()
46 unsigned int nbytes = walk->nbytes; in crypto_pcbc_encrypt_inplace() local
58 } while ((nbytes -= bsize) >= bsize); in crypto_pcbc_encrypt_inplace()
60 return nbytes; in crypto_pcbc_encrypt_inplace()
68 unsigned int nbytes; in crypto_pcbc_encrypt() local
73 while ((nbytes = walk.nbytes)) { in crypto_pcbc_encrypt()
75 nbytes = crypto_pcbc_encrypt_inplace(req, &walk, in crypto_pcbc_encrypt()
78 nbytes = crypto_pcbc_encrypt_segment(req, &walk, in crypto_pcbc_encrypt()
[all …]
Dcfb.c51 unsigned int nbytes = walk->nbytes; in crypto_cfb_final() local
54 crypto_xor_cpy(dst, stream, src, nbytes); in crypto_cfb_final()
61 unsigned int nbytes = walk->nbytes; in crypto_cfb_encrypt_segment() local
73 } while ((nbytes -= bsize) >= bsize); in crypto_cfb_encrypt_segment()
77 return nbytes; in crypto_cfb_encrypt_segment()
84 unsigned int nbytes = walk->nbytes; in crypto_cfb_encrypt_inplace() local
95 } while ((nbytes -= bsize) >= bsize); in crypto_cfb_encrypt_inplace()
99 return nbytes; in crypto_cfb_encrypt_inplace()
111 while (walk.nbytes >= bsize) { in crypto_cfb_encrypt()
119 if (walk.nbytes) { in crypto_cfb_encrypt()
[all …]
Dscatterwalk.c18 static inline void memcpy_dir(void *buf, void *sgdata, size_t nbytes, int out) in memcpy_dir() argument
23 memcpy(dst, src, nbytes); in memcpy_dir()
27 size_t nbytes, int out) in scatterwalk_copychunks() argument
33 if (len_this_page > nbytes) in scatterwalk_copychunks()
34 len_this_page = nbytes; in scatterwalk_copychunks()
44 if (nbytes == len_this_page) in scatterwalk_copychunks()
48 nbytes -= len_this_page; in scatterwalk_copychunks()
56 unsigned int start, unsigned int nbytes, int out) in scatterwalk_map_and_copy() argument
61 if (!nbytes) in scatterwalk_map_and_copy()
67 scatterwalk_copychunks(buf, &walk, nbytes, out); in scatterwalk_map_and_copy()
Decdh.c72 size_t copied, nbytes, public_key_sz; in ecdh_compute_value() local
75 nbytes = ctx->ndigits << ECC_DIGITS_TO_BYTES_SHIFT; in ecdh_compute_value()
77 public_key_sz = 2 * nbytes; in ecdh_compute_value()
84 shared_secret = kmalloc(nbytes, GFP_KERNEL); in ecdh_compute_value()
111 nbytes = public_key_sz; in ecdh_compute_value()
118 nbytes = min_t(size_t, nbytes, req->dst_len); in ecdh_compute_value()
120 nbytes), in ecdh_compute_value()
121 buf, nbytes); in ecdh_compute_value()
122 if (copied != nbytes) in ecdh_compute_value()
Dctr.c37 unsigned int nbytes = walk->nbytes; in crypto_ctr_crypt_final() local
40 crypto_xor_cpy(dst, keystream, src, nbytes); in crypto_ctr_crypt_final()
54 unsigned int nbytes = walk->nbytes; in crypto_ctr_crypt_segment() local
66 } while ((nbytes -= bsize) >= bsize); in crypto_ctr_crypt_segment()
68 return nbytes; in crypto_ctr_crypt_segment()
78 unsigned int nbytes = walk->nbytes; in crypto_ctr_crypt_inplace() local
93 } while ((nbytes -= bsize) >= bsize); in crypto_ctr_crypt_inplace()
95 return nbytes; in crypto_ctr_crypt_inplace()
104 unsigned int nbytes; in crypto_ctr_crypt() local
109 while (walk.nbytes >= bsize) { in crypto_ctr_crypt()
[all …]
Dshash.c243 int nbytes; in shash_ahash_update() local
245 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0; in shash_ahash_update()
246 nbytes = crypto_hash_walk_done(&walk, nbytes)) in shash_ahash_update()
247 nbytes = crypto_shash_update(desc, walk.data, nbytes); in shash_ahash_update()
249 return nbytes; in shash_ahash_update()
266 int nbytes; in shash_ahash_finup() local
268 nbytes = crypto_hash_walk_first(req, &walk); in shash_ahash_finup()
269 if (!nbytes) in shash_ahash_finup()
273 nbytes = crypto_hash_walk_last(&walk) ? in shash_ahash_finup()
274 crypto_shash_finup(desc, walk.data, nbytes, in shash_ahash_finup()
[all …]
Dofb.c27 while (walk.nbytes >= bsize) { in crypto_ofb_crypt()
31 unsigned int nbytes = walk.nbytes; in crypto_ofb_crypt() local
38 } while ((nbytes -= bsize) >= bsize); in crypto_ofb_crypt()
40 err = skcipher_walk_done(&walk, nbytes); in crypto_ofb_crypt()
43 if (walk.nbytes) { in crypto_ofb_crypt()
46 walk.nbytes); in crypto_ofb_crypt()
Dahash.c44 unsigned int nbytes = min(walk->entrylen, in hash_walk_next() local
56 if (nbytes > unaligned) in hash_walk_next()
57 nbytes = unaligned; in hash_walk_next()
60 walk->entrylen -= nbytes; in hash_walk_next()
61 return nbytes; in hash_walk_next()
88 unsigned int nbytes; in crypto_hash_walk_done() local
91 nbytes = min(walk->entrylen, in crypto_hash_walk_done()
93 if (nbytes) { in crypto_hash_walk_done()
94 walk->entrylen -= nbytes; in crypto_hash_walk_done()
96 return nbytes; in crypto_hash_walk_done()
[all …]
Dkeywrap.c153 unsigned int nbytes = req->cryptlen; in crypto_kw_decrypt() local
155 while (nbytes) { in crypto_kw_decrypt()
157 crypto_kw_scatterlist_ff(&src_walk, src, nbytes); in crypto_kw_decrypt()
170 crypto_kw_scatterlist_ff(&dst_walk, dst, nbytes); in crypto_kw_decrypt()
175 nbytes -= SEMIBSIZE; in crypto_kw_decrypt()
226 unsigned int nbytes = req->cryptlen; in crypto_kw_encrypt() local
231 while (nbytes) { in crypto_kw_encrypt()
247 nbytes -= SEMIBSIZE; in crypto_kw_encrypt()
Dchacha_generic.c45 while (walk.nbytes > 0) { in chacha_stream_xor()
46 unsigned int nbytes = walk.nbytes; in chacha_stream_xor() local
48 if (nbytes < walk.total) in chacha_stream_xor()
49 nbytes = round_down(nbytes, CHACHA_BLOCK_SIZE); in chacha_stream_xor()
52 nbytes, ctx->nrounds); in chacha_stream_xor()
53 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in chacha_stream_xor()
Dsalsa20_generic.c165 while (walk.nbytes > 0) { in salsa20_crypt()
166 unsigned int nbytes = walk.nbytes; in salsa20_crypt() local
168 if (nbytes < walk.total) in salsa20_crypt()
169 nbytes = round_down(nbytes, walk.stride); in salsa20_crypt()
172 nbytes); in salsa20_crypt()
173 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in salsa20_crypt()
Decb.c21 unsigned int nbytes; in crypto_ecb_crypt() local
26 while ((nbytes = walk.nbytes) != 0) { in crypto_ecb_crypt()
35 } while ((nbytes -= bsize) >= bsize); in crypto_ecb_crypt()
37 err = skcipher_walk_done(&walk, nbytes); in crypto_ecb_crypt()
Dcts.c154 unsigned int nbytes = req->cryptlen; in crypto_cts_encrypt() local
159 if (nbytes < bsize) in crypto_cts_encrypt()
162 if (nbytes == bsize) { in crypto_cts_encrypt()
166 skcipher_request_set_crypt(subreq, req->src, req->dst, nbytes, in crypto_cts_encrypt()
171 offset = rounddown(nbytes - 1, bsize); in crypto_cts_encrypt()
248 unsigned int nbytes = req->cryptlen; in crypto_cts_decrypt() local
254 if (nbytes < bsize) in crypto_cts_decrypt()
257 if (nbytes == bsize) { in crypto_cts_decrypt()
261 skcipher_request_set_crypt(subreq, req->src, req->dst, nbytes, in crypto_cts_decrypt()
271 offset = rounddown(nbytes - 1, bsize); in crypto_cts_decrypt()
Daegis128-core.c334 while (walk.nbytes) { in crypto_aegis128_process_crypt()
335 unsigned int nbytes = walk.nbytes; in crypto_aegis128_process_crypt() local
337 if (nbytes < walk.total) in crypto_aegis128_process_crypt()
338 nbytes = round_down(nbytes, walk.stride); in crypto_aegis128_process_crypt()
341 nbytes); in crypto_aegis128_process_crypt()
343 skcipher_walk_done(&walk, walk.nbytes - nbytes); in crypto_aegis128_process_crypt()
Dskcipher.c106 unsigned int n = walk->nbytes; in skcipher_walk_done()
107 unsigned int nbytes = 0; in skcipher_walk_done() local
114 nbytes = walk->total - n; in skcipher_walk_done()
139 nbytes = 0; in skcipher_walk_done()
147 walk->total = nbytes; in skcipher_walk_done()
148 walk->nbytes = 0; in skcipher_walk_done()
152 scatterwalk_done(&walk->in, 0, nbytes); in skcipher_walk_done()
153 scatterwalk_done(&walk->out, 1, nbytes); in skcipher_walk_done()
155 if (nbytes) { in skcipher_walk_done()
279 walk->nbytes = bsize; in skcipher_next_slow()
[all …]
Dansi_cprng.c180 static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx, in get_prng_bytes() argument
184 unsigned int byte_count = (unsigned int)nbytes; in get_prng_bytes()
200 if (nbytes < DEFAULT_BLK_SZ) in get_prng_bytes()
218 memset(buf, 0, nbytes); in get_prng_bytes()
245 memset(buf, 0, nbytes); in get_prng_bytes()
Darc4.c33 while (walk.nbytes > 0) { in crypto_arc4_crypt()
35 walk.nbytes); in crypto_arc4_crypt()
Dblkcipher.c104 n = walk->nbytes - err; in blkcipher_walk_done()
128 walk->nbytes = 0; in blkcipher_walk_done()
169 walk->nbytes = bsize; in blkcipher_next_slow()
180 memcpy(tmp, walk->src.virt.addr, walk->nbytes); in blkcipher_next_copy()
251 walk->nbytes = n; in blkcipher_walk_next()
323 walk->nbytes = walk->total; in blkcipher_walk_first()
423 return alg->encrypt(&desc, req->dst, req->src, req->nbytes); in async_encrypt()
436 return alg->decrypt(&desc, req->dst, req->src, req->nbytes); in async_decrypt()
Dhmac.c106 const u8 *data, unsigned int nbytes) in hmac_update() argument
110 return crypto_shash_update(desc, data, nbytes); in hmac_update()
127 unsigned int nbytes, u8 *out) in hmac_finup() argument
136 return crypto_shash_finup(desc, data, nbytes, out) ?: in hmac_finup()
Dablkcipher.c105 n = walk->nbytes - err; in ablkcipher_walk_done()
129 walk->nbytes = 0; in ablkcipher_walk_done()
168 walk->nbytes = bsize; in ablkcipher_next_slow()
243 walk->nbytes = n; in ablkcipher_walk_next()
269 walk->nbytes = walk->total; in ablkcipher_walk_first()
Dcrypto_null.c83 while (walk.nbytes) { in null_skcipher_crypt()
86 walk.nbytes); in null_skcipher_crypt()
Dlrw.c170 while (w.nbytes) { in xor_tweak()
171 unsigned int avail = w.nbytes; in xor_tweak()
186 if (second_pass && w.nbytes == w.total) { in xor_tweak()
Dalgapi.c1055 void crypto_stats_ablkcipher_encrypt(unsigned int nbytes, int ret, in crypto_stats_ablkcipher_encrypt() argument
1062 atomic64_add(nbytes, &alg->stats.cipher.encrypt_tlen); in crypto_stats_ablkcipher_encrypt()
1068 void crypto_stats_ablkcipher_decrypt(unsigned int nbytes, int ret, in crypto_stats_ablkcipher_decrypt() argument
1075 atomic64_add(nbytes, &alg->stats.cipher.decrypt_tlen); in crypto_stats_ablkcipher_decrypt()
1177 void crypto_stats_ahash_update(unsigned int nbytes, int ret, in crypto_stats_ahash_update() argument
1183 atomic64_add(nbytes, &alg->stats.hash.hash_tlen); in crypto_stats_ahash_update()
1188 void crypto_stats_ahash_final(unsigned int nbytes, int ret, in crypto_stats_ahash_final() argument
1195 atomic64_add(nbytes, &alg->stats.hash.hash_tlen); in crypto_stats_ahash_final()
Dxts.c104 while (w.nbytes) { in xor_tweak()
105 unsigned int avail = w.nbytes; in xor_tweak()
114 w.total - w.nbytes + avail < 2 * XTS_BLOCK_SIZE) { in xor_tweak()
Dcbc.c42 while (walk.nbytes) { in crypto_cbc_decrypt()

12