Home
last modified time | relevance | path

Searched refs:req (Results 1 – 25 of 53) sorted by relevance

123

/crypto/
Dchacha20poly1305.c42 struct ahash_request req; /* must be last member */ member
48 struct skcipher_request req; /* must be last member */ member
70 static inline void async_done_continue(struct aead_request *req, int err, in async_done_continue() argument
74 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); in async_done_continue()
77 err = cont(req); in async_done_continue()
81 aead_request_complete(req, err); in async_done_continue()
84 static void chacha_iv(u8 *iv, struct aead_request *req, u32 icb) in chacha_iv() argument
86 struct chachapoly_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); in chacha_iv()
91 memcpy(iv + sizeof(leicb) + ctx->saltlen, req->iv, in chacha_iv()
95 static int poly_verify_tag(struct aead_request *req) in poly_verify_tag() argument
[all …]
Dseqiv.c21 static void seqiv_aead_encrypt_complete2(struct aead_request *req, int err) in seqiv_aead_encrypt_complete2() argument
23 struct aead_request *subreq = aead_request_ctx(req); in seqiv_aead_encrypt_complete2()
32 geniv = crypto_aead_reqtfm(req); in seqiv_aead_encrypt_complete2()
33 memcpy(req->iv, subreq->iv, crypto_aead_ivsize(geniv)); in seqiv_aead_encrypt_complete2()
42 struct aead_request *req = base->data; in seqiv_aead_encrypt_complete() local
44 seqiv_aead_encrypt_complete2(req, err); in seqiv_aead_encrypt_complete()
45 aead_request_complete(req, err); in seqiv_aead_encrypt_complete()
48 static int seqiv_aead_encrypt(struct aead_request *req) in seqiv_aead_encrypt() argument
50 struct crypto_aead *geniv = crypto_aead_reqtfm(req); in seqiv_aead_encrypt()
52 struct aead_request *subreq = aead_request_ctx(req); in seqiv_aead_encrypt()
[all …]
Dgcm.c60 int (*complete)(struct aead_request *req, u32 flags);
82 static int crypto_rfc4543_copy_src_to_dst(struct aead_request *req, bool enc);
85 struct aead_request *req) in crypto_gcm_reqctx() argument
87 unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req)); in crypto_gcm_reqctx()
89 return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1); in crypto_gcm_reqctx()
105 struct skcipher_request req; in crypto_gcm_setkey() member
123 skcipher_request_set_tfm(&data->req, ctr); in crypto_gcm_setkey()
124 skcipher_request_set_callback(&data->req, CRYPTO_TFM_REQ_MAY_SLEEP | in crypto_gcm_setkey()
128 skcipher_request_set_crypt(&data->req, data->sg, data->sg, in crypto_gcm_setkey()
131 err = crypto_wait_req(crypto_skcipher_encrypt(&data->req), in crypto_gcm_setkey()
[all …]
Dcts.c65 static inline u8 *crypto_cts_reqctx_space(struct skcipher_request *req) in crypto_cts_reqctx_space() argument
67 struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req); in crypto_cts_reqctx_space()
68 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); in crypto_cts_reqctx_space()
90 struct skcipher_request *req = areq->data; in cts_cbc_crypt_done() local
95 skcipher_request_complete(req, err); in cts_cbc_crypt_done()
98 static int cts_cbc_encrypt(struct skcipher_request *req) in cts_cbc_encrypt() argument
100 struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req); in cts_cbc_encrypt()
101 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); in cts_cbc_encrypt()
110 lastn = req->cryptlen - offset; in cts_cbc_encrypt()
112 sg = scatterwalk_ffwd(rctx->sg, req->dst, offset - bsize); in cts_cbc_encrypt()
[all …]
Dechainiv.c26 static int echainiv_encrypt(struct aead_request *req) in echainiv_encrypt() argument
28 struct crypto_aead *geniv = crypto_aead_reqtfm(req); in echainiv_encrypt()
30 struct aead_request *subreq = aead_request_ctx(req); in echainiv_encrypt()
37 if (req->cryptlen < ivsize) in echainiv_encrypt()
42 info = req->iv; in echainiv_encrypt()
44 if (req->src != req->dst) { in echainiv_encrypt()
48 skcipher_request_set_callback(nreq, req->base.flags, in echainiv_encrypt()
50 skcipher_request_set_crypt(nreq, req->src, req->dst, in echainiv_encrypt()
51 req->assoclen + req->cryptlen, in echainiv_encrypt()
59 aead_request_set_callback(subreq, req->base.flags, in echainiv_encrypt()
[all …]
Drsa-pkcs1pad.c174 static int pkcs1pad_encrypt_sign_complete(struct akcipher_request *req, int err) in pkcs1pad_encrypt_sign_complete() argument
176 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req); in pkcs1pad_encrypt_sign_complete()
178 struct pkcs1pad_request *req_ctx = akcipher_request_ctx(req); in pkcs1pad_encrypt_sign_complete()
198 sg_copy_to_buffer(req->dst, sg_nents_for_len(req->dst, len), in pkcs1pad_encrypt_sign_complete()
200 sg_copy_from_buffer(req->dst, in pkcs1pad_encrypt_sign_complete()
201 sg_nents_for_len(req->dst, ctx->key_size), in pkcs1pad_encrypt_sign_complete()
206 req->dst_len = ctx->key_size; in pkcs1pad_encrypt_sign_complete()
216 struct akcipher_request *req = child_async_req->data; in pkcs1pad_encrypt_sign_complete_cb() local
221 err = pkcs1pad_encrypt_sign_complete(req, err); in pkcs1pad_encrypt_sign_complete_cb()
224 akcipher_request_complete(req, err); in pkcs1pad_encrypt_sign_complete_cb()
[all …]
Dauthenc.c40 static void authenc_request_complete(struct aead_request *req, int err) in authenc_request_complete() argument
43 aead_request_complete(req, err); in authenc_request_complete()
114 struct aead_request *req = areq->data; in authenc_geniv_ahash_done() local
115 struct crypto_aead *authenc = crypto_aead_reqtfm(req); in authenc_geniv_ahash_done()
118 struct authenc_request_ctx *areq_ctx = aead_request_ctx(req); in authenc_geniv_ahash_done()
124 scatterwalk_map_and_copy(ahreq->result, req->dst, in authenc_geniv_ahash_done()
125 req->assoclen + req->cryptlen, in authenc_geniv_ahash_done()
129 aead_request_complete(req, err); in authenc_geniv_ahash_done()
132 static int crypto_authenc_genicv(struct aead_request *req, unsigned int flags) in crypto_authenc_genicv() argument
134 struct crypto_aead *authenc = crypto_aead_reqtfm(req); in crypto_authenc_genicv()
[all …]
Dxts.c82 static int xts_xor_tweak(struct skcipher_request *req, bool second_pass, in xts_xor_tweak() argument
85 struct xts_request_ctx *rctx = skcipher_request_ctx(req); in xts_xor_tweak()
86 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); in xts_xor_tweak()
87 const bool cts = (req->cryptlen % XTS_BLOCK_SIZE); in xts_xor_tweak()
94 req = &rctx->subreq; in xts_xor_tweak()
96 skcipher_request_set_tfm(req, tfm); in xts_xor_tweak()
98 err = skcipher_walk_virt(&w, req, false); in xts_xor_tweak()
133 static int xts_xor_tweak_pre(struct skcipher_request *req, bool enc) in xts_xor_tweak_pre() argument
135 return xts_xor_tweak(req, false, enc); in xts_xor_tweak_pre()
138 static int xts_xor_tweak_post(struct skcipher_request *req, bool enc) in xts_xor_tweak_post() argument
[all …]
Dauthencesn.c43 static void authenc_esn_request_complete(struct aead_request *req, int err) in authenc_esn_request_complete() argument
46 aead_request_complete(req, err); in authenc_esn_request_complete()
86 static int crypto_authenc_esn_genicv_tail(struct aead_request *req, in crypto_authenc_esn_genicv_tail() argument
89 struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req); in crypto_authenc_esn_genicv_tail()
91 struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); in crypto_authenc_esn_genicv_tail()
96 unsigned int assoclen = req->assoclen; in crypto_authenc_esn_genicv_tail()
97 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_genicv_tail()
98 struct scatterlist *dst = req->dst; in crypto_authenc_esn_genicv_tail()
113 struct aead_request *req = areq->data; in authenc_esn_geniv_ahash_done() local
115 err = err ?: crypto_authenc_esn_genicv_tail(req, 0); in authenc_esn_geniv_ahash_done()
[all …]
Dscompress.c120 static int scomp_acomp_comp_decomp(struct acomp_req *req, int dir) in scomp_acomp_comp_decomp() argument
122 struct crypto_acomp *tfm = crypto_acomp_reqtfm(req); in scomp_acomp_comp_decomp()
125 void **ctx = acomp_request_ctx(req); in scomp_acomp_comp_decomp()
130 if (!req->src || !req->slen || req->slen > SCOMP_SCRATCH_SIZE) in scomp_acomp_comp_decomp()
133 if (req->dst && !req->dlen) in scomp_acomp_comp_decomp()
136 if (!req->dlen || req->dlen > SCOMP_SCRATCH_SIZE) in scomp_acomp_comp_decomp()
137 req->dlen = SCOMP_SCRATCH_SIZE; in scomp_acomp_comp_decomp()
139 dlen = req->dlen; in scomp_acomp_comp_decomp()
144 scatterwalk_map_and_copy(scratch->src, req->src, 0, req->slen, 0); in scomp_acomp_comp_decomp()
146 ret = crypto_scomp_compress(scomp, scratch->src, req->slen, in scomp_acomp_comp_decomp()
[all …]
Dcryptd.c76 struct skcipher_request req; member
165 struct crypto_async_request *req, *backlog; in cryptd_queue_worker() local
173 req = crypto_dequeue_request(&cpu_queue->queue); in cryptd_queue_worker()
176 if (!req) in cryptd_queue_worker()
181 req->complete(req, 0); in cryptd_queue_worker()
240 static void cryptd_skcipher_complete(struct skcipher_request *req, int err) in cryptd_skcipher_complete() argument
242 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); in cryptd_skcipher_complete()
244 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req); in cryptd_skcipher_complete()
248 rctx->complete(&req->base, err); in cryptd_skcipher_complete()
258 struct skcipher_request *req = skcipher_request_cast(base); in cryptd_skcipher_encrypt() local
[all …]
Dahash.c119 int crypto_hash_walk_first(struct ahash_request *req, in crypto_hash_walk_first() argument
122 walk->total = req->nbytes; in crypto_hash_walk_first()
129 walk->alignmask = crypto_ahash_alignmask(crypto_ahash_reqtfm(req)); in crypto_hash_walk_first()
130 walk->sg = req->src; in crypto_hash_walk_first()
131 walk->flags = req->base.flags; in crypto_hash_walk_first()
199 static int ahash_save_req(struct ahash_request *req, crypto_completion_t cplt) in ahash_save_req() argument
201 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); in ahash_save_req()
207 (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? in ahash_save_req()
237 priv->result = req->result; in ahash_save_req()
238 priv->complete = req->base.complete; in ahash_save_req()
[all …]
Dccm.c62 struct aead_request *req) in crypto_ccm_reqctx() argument
64 unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req)); in crypto_ccm_reqctx()
66 return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1); in crypto_ccm_reqctx()
127 static int format_input(u8 *info, struct aead_request *req, in format_input() argument
130 struct crypto_aead *aead = crypto_aead_reqtfm(req); in format_input()
131 unsigned int lp = req->iv[0]; in format_input()
137 memcpy(info, req->iv, 16); in format_input()
143 if (req->assoclen) in format_input()
168 static int crypto_ccm_auth(struct aead_request *req, struct scatterlist *plain, in crypto_ccm_auth() argument
171 struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req); in crypto_ccm_auth()
[all …]
Dkeywrap.c124 static int crypto_kw_decrypt(struct skcipher_request *req) in crypto_kw_decrypt() argument
126 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); in crypto_kw_decrypt()
130 u64 t = 6 * ((req->cryptlen) >> 3); in crypto_kw_decrypt()
138 if (req->cryptlen < (2 * SEMIBSIZE) || req->cryptlen % SEMIBSIZE) in crypto_kw_decrypt()
142 memcpy(&block.A, req->iv, SEMIBSIZE); in crypto_kw_decrypt()
149 src = req->src; in crypto_kw_decrypt()
150 dst = req->dst; in crypto_kw_decrypt()
154 unsigned int nbytes = req->cryptlen; in crypto_kw_decrypt()
180 src = req->dst; in crypto_kw_decrypt()
181 dst = req->dst; in crypto_kw_decrypt()
[all …]
Dlrw.c140 static int lrw_xor_tweak(struct skcipher_request *req, bool second_pass) in lrw_xor_tweak() argument
143 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); in lrw_xor_tweak()
145 struct lrw_request_ctx *rctx = skcipher_request_ctx(req); in lrw_xor_tweak()
153 req = &rctx->subreq; in lrw_xor_tweak()
155 skcipher_request_set_tfm(req, tfm); in lrw_xor_tweak()
158 err = skcipher_walk_virt(&w, req, false); in lrw_xor_tweak()
198 static int lrw_xor_tweak_pre(struct skcipher_request *req) in lrw_xor_tweak_pre() argument
200 return lrw_xor_tweak(req, false); in lrw_xor_tweak_pre()
203 static int lrw_xor_tweak_post(struct skcipher_request *req) in lrw_xor_tweak_post() argument
205 return lrw_xor_tweak(req, true); in lrw_xor_tweak_post()
[all …]
Dessiv.c136 struct skcipher_request *req = areq->data; in essiv_skcipher_done() local
138 skcipher_request_complete(req, err); in essiv_skcipher_done()
141 static int essiv_skcipher_crypt(struct skcipher_request *req, bool enc) in essiv_skcipher_crypt() argument
143 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); in essiv_skcipher_crypt()
145 struct skcipher_request *subreq = skcipher_request_ctx(req); in essiv_skcipher_crypt()
147 crypto_cipher_encrypt_one(tctx->essiv_cipher, req->iv, req->iv); in essiv_skcipher_crypt()
150 skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, in essiv_skcipher_crypt()
151 req->iv); in essiv_skcipher_crypt()
152 skcipher_request_set_callback(subreq, skcipher_request_flags(req), in essiv_skcipher_crypt()
153 essiv_skcipher_done, req); in essiv_skcipher_crypt()
[all …]
Daegis128-core.c388 static int crypto_aegis128_encrypt_generic(struct aead_request *req) in crypto_aegis128_encrypt_generic() argument
390 struct crypto_aead *tfm = crypto_aead_reqtfm(req); in crypto_aegis128_encrypt_generic()
394 unsigned int cryptlen = req->cryptlen; in crypto_aegis128_encrypt_generic()
398 skcipher_walk_aead_encrypt(&walk, req, false); in crypto_aegis128_encrypt_generic()
399 crypto_aegis128_init(&state, &ctx->key, req->iv); in crypto_aegis128_encrypt_generic()
400 crypto_aegis128_process_ad(&state, req->src, req->assoclen, false); in crypto_aegis128_encrypt_generic()
403 crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen); in crypto_aegis128_encrypt_generic()
405 scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen, in crypto_aegis128_encrypt_generic()
410 static int crypto_aegis128_decrypt_generic(struct aead_request *req) in crypto_aegis128_decrypt_generic() argument
413 struct crypto_aead *tfm = crypto_aead_reqtfm(req); in crypto_aegis128_decrypt_generic()
[all …]
Dadiantum.c126 struct skcipher_request req; /* must be last */ in adiantum_setkey() member
148 skcipher_request_set_tfm(&data->req, tctx->streamcipher); in adiantum_setkey()
149 skcipher_request_set_callback(&data->req, CRYPTO_TFM_REQ_MAY_SLEEP | in adiantum_setkey()
152 skcipher_request_set_crypt(&data->req, &data->sg, &data->sg, in adiantum_setkey()
154 err = crypto_wait_req(crypto_skcipher_encrypt(&data->req), &data->wait); in adiantum_setkey()
219 static void adiantum_hash_header(struct skcipher_request *req) in adiantum_hash_header() argument
221 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); in adiantum_hash_header()
223 struct adiantum_request_ctx *rctx = skcipher_request_ctx(req); in adiantum_hash_header()
224 const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE; in adiantum_hash_header()
240 poly1305_core_blocks(&state, &tctx->header_hash_key, req->iv, in adiantum_hash_header()
[all …]
Dpcrypt.c61 struct aead_request *req = pcrypt_request_ctx(preq); in pcrypt_aead_serial() local
63 aead_request_complete(req->base.data, padata->info); in pcrypt_aead_serial()
68 struct aead_request *req = areq->data; in pcrypt_aead_done() local
69 struct pcrypt_request *preq = aead_request_ctx(req); in pcrypt_aead_done()
80 struct aead_request *req = pcrypt_request_ctx(preq); in pcrypt_aead_enc() local
83 ret = crypto_aead_encrypt(req); in pcrypt_aead_enc()
92 static int pcrypt_aead_encrypt(struct aead_request *req) in pcrypt_aead_encrypt() argument
95 struct pcrypt_request *preq = aead_request_ctx(req); in pcrypt_aead_encrypt()
98 struct crypto_aead *aead = crypto_aead_reqtfm(req); in pcrypt_aead_encrypt()
100 u32 flags = aead_request_flags(req); in pcrypt_aead_encrypt()
[all …]
Dcrypto_engine.c26 struct crypto_async_request *req, int err) in crypto_finalize_request() argument
40 if (engine->cur_req == req) { in crypto_finalize_request()
48 enginectx = crypto_tfm_ctx(req->tfm); in crypto_finalize_request()
51 ret = enginectx->op.unprepare_request(engine, req); in crypto_finalize_request()
57 crypto_request_complete(req, err); in crypto_finalize_request()
258 struct crypto_async_request *req, in crypto_transfer_request() argument
271 ret = crypto_enqueue_request(&engine->queue, req); in crypto_transfer_request()
287 struct crypto_async_request *req) in crypto_transfer_request_to_engine() argument
289 return crypto_transfer_request(engine, req, true); in crypto_transfer_request_to_engine()
299 struct aead_request *req) in crypto_transfer_aead_request_to_engine() argument
[all …]
Decrdsa.c71 static int ecrdsa_verify(struct akcipher_request *req) in ecrdsa_verify() argument
73 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req); in ecrdsa_verify()
77 unsigned int ndigits = req->dst_len / sizeof(u64); in ecrdsa_verify()
94 !req->src || in ecrdsa_verify()
96 req->dst_len != ctx->digest_len || in ecrdsa_verify()
97 req->dst_len != ctx->curve->g.ndigits * sizeof(u64) || in ecrdsa_verify()
99 req->dst_len * 2 != req->src_len || in ecrdsa_verify()
100 WARN_ON(req->src_len > sizeof(sig)) || in ecrdsa_verify()
101 WARN_ON(req->dst_len > sizeof(digest))) in ecrdsa_verify()
104 sg_copy_to_buffer(req->src, sg_nents_for_len(req->src, req->src_len), in ecrdsa_verify()
[all …]
Dhctr2.c170 static int hctr2_hash_tweak(struct skcipher_request *req) in hctr2_hash_tweak() argument
172 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); in hctr2_hash_tweak()
174 struct hctr2_request_ctx *rctx = skcipher_request_ctx(req); in hctr2_hash_tweak()
177 bool has_remainder = req->cryptlen % POLYVAL_BLOCK_SIZE; in hctr2_hash_tweak()
183 err = crypto_shash_update(hash_desc, req->iv, TWEAK_SIZE); in hctr2_hash_tweak()
192 static int hctr2_hash_message(struct skcipher_request *req, in hctr2_hash_message() argument
197 struct hctr2_request_ctx *rctx = skcipher_request_ctx(req); in hctr2_hash_message()
199 const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE; in hctr2_hash_message()
229 static int hctr2_finish(struct skcipher_request *req) in hctr2_finish() argument
231 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); in hctr2_finish()
[all …]
Dchacha_generic.c15 static int chacha_stream_xor(struct skcipher_request *req, in chacha_stream_xor() argument
22 err = skcipher_walk_virt(&walk, req, false); in chacha_stream_xor()
40 static int crypto_chacha_crypt(struct skcipher_request *req) in crypto_chacha_crypt() argument
42 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); in crypto_chacha_crypt()
45 return chacha_stream_xor(req, ctx, req->iv); in crypto_chacha_crypt()
48 static int crypto_xchacha_crypt(struct skcipher_request *req) in crypto_xchacha_crypt() argument
50 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); in crypto_xchacha_crypt()
57 chacha_init_generic(state, ctx->key, req->iv); in crypto_xchacha_crypt()
62 memcpy(&real_iv[0], req->iv + 24, 8); /* stream position */ in crypto_xchacha_crypt()
63 memcpy(&real_iv[8], req->iv + 16, 8); /* remaining 64 nonce bits */ in crypto_xchacha_crypt()
[all …]
Dshash.c257 static int shash_async_init(struct ahash_request *req) in shash_async_init() argument
259 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); in shash_async_init()
260 struct shash_desc *desc = ahash_request_ctx(req); in shash_async_init()
267 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc) in shash_ahash_update() argument
272 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0; in shash_ahash_update()
280 static int shash_async_update(struct ahash_request *req) in shash_async_update() argument
282 return shash_ahash_update(req, ahash_request_ctx(req)); in shash_async_update()
285 static int shash_async_final(struct ahash_request *req) in shash_async_final() argument
287 return crypto_shash_final(ahash_request_ctx(req), req->result); in shash_async_final()
290 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc) in shash_ahash_finup() argument
[all …]
Dpcbc.c20 static int crypto_pcbc_encrypt_segment(struct skcipher_request *req, in crypto_pcbc_encrypt_segment() argument
42 static int crypto_pcbc_encrypt_inplace(struct skcipher_request *req, in crypto_pcbc_encrypt_inplace() argument
64 static int crypto_pcbc_encrypt(struct skcipher_request *req) in crypto_pcbc_encrypt() argument
66 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); in crypto_pcbc_encrypt()
72 err = skcipher_walk_virt(&walk, req, false); in crypto_pcbc_encrypt()
76 nbytes = crypto_pcbc_encrypt_inplace(req, &walk, in crypto_pcbc_encrypt()
79 nbytes = crypto_pcbc_encrypt_segment(req, &walk, in crypto_pcbc_encrypt()
87 static int crypto_pcbc_decrypt_segment(struct skcipher_request *req, in crypto_pcbc_decrypt_segment() argument
109 static int crypto_pcbc_decrypt_inplace(struct skcipher_request *req, in crypto_pcbc_decrypt_inplace() argument
131 static int crypto_pcbc_decrypt(struct skcipher_request *req) in crypto_pcbc_decrypt() argument
[all …]

123