Home
last modified time | relevance | path

Searched refs:req (Results 1 – 25 of 50) sorted by relevance

12

/crypto/
Dchacha20poly1305.c44 struct ahash_request req; /* must be last member */ member
50 struct skcipher_request req; /* must be last member */ member
72 static inline void async_done_continue(struct aead_request *req, int err, in async_done_continue() argument
76 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); in async_done_continue()
79 err = cont(req); in async_done_continue()
83 aead_request_complete(req, err); in async_done_continue()
86 static void chacha_iv(u8 *iv, struct aead_request *req, u32 icb) in chacha_iv() argument
88 struct chachapoly_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); in chacha_iv()
93 memcpy(iv + sizeof(leicb) + ctx->saltlen, req->iv, in chacha_iv()
97 static int poly_verify_tag(struct aead_request *req) in poly_verify_tag() argument
[all …]
Dseqiv.c23 static void seqiv_aead_encrypt_complete2(struct aead_request *req, int err) in seqiv_aead_encrypt_complete2() argument
25 struct aead_request *subreq = aead_request_ctx(req); in seqiv_aead_encrypt_complete2()
34 geniv = crypto_aead_reqtfm(req); in seqiv_aead_encrypt_complete2()
35 memcpy(req->iv, subreq->iv, crypto_aead_ivsize(geniv)); in seqiv_aead_encrypt_complete2()
44 struct aead_request *req = base->data; in seqiv_aead_encrypt_complete() local
46 seqiv_aead_encrypt_complete2(req, err); in seqiv_aead_encrypt_complete()
47 aead_request_complete(req, err); in seqiv_aead_encrypt_complete()
50 static int seqiv_aead_encrypt(struct aead_request *req) in seqiv_aead_encrypt() argument
52 struct crypto_aead *geniv = crypto_aead_reqtfm(req); in seqiv_aead_encrypt()
54 struct aead_request *subreq = aead_request_ctx(req); in seqiv_aead_encrypt()
[all …]
Dgcm.c61 int (*complete)(struct aead_request *req, u32 flags);
83 static int crypto_rfc4543_copy_src_to_dst(struct aead_request *req, bool enc);
86 struct aead_request *req) in crypto_gcm_reqctx() argument
88 unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req)); in crypto_gcm_reqctx()
90 return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1); in crypto_gcm_reqctx()
106 struct skcipher_request req; in crypto_gcm_setkey() member
126 skcipher_request_set_tfm(&data->req, ctr); in crypto_gcm_setkey()
127 skcipher_request_set_callback(&data->req, CRYPTO_TFM_REQ_MAY_SLEEP | in crypto_gcm_setkey()
131 skcipher_request_set_crypt(&data->req, data->sg, data->sg, in crypto_gcm_setkey()
134 err = crypto_wait_req(crypto_skcipher_encrypt(&data->req), in crypto_gcm_setkey()
[all …]
Dechainiv.c26 static int echainiv_encrypt(struct aead_request *req) in echainiv_encrypt() argument
28 struct crypto_aead *geniv = crypto_aead_reqtfm(req); in echainiv_encrypt()
30 struct aead_request *subreq = aead_request_ctx(req); in echainiv_encrypt()
37 if (req->cryptlen < ivsize) in echainiv_encrypt()
42 info = req->iv; in echainiv_encrypt()
44 if (req->src != req->dst) { in echainiv_encrypt()
48 skcipher_request_set_callback(nreq, req->base.flags, in echainiv_encrypt()
50 skcipher_request_set_crypt(nreq, req->src, req->dst, in echainiv_encrypt()
51 req->assoclen + req->cryptlen, in echainiv_encrypt()
59 aead_request_set_callback(subreq, req->base.flags, in echainiv_encrypt()
[all …]
Drsa-pkcs1pad.c173 static int pkcs1pad_encrypt_sign_complete(struct akcipher_request *req, int err) in pkcs1pad_encrypt_sign_complete() argument
175 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req); in pkcs1pad_encrypt_sign_complete()
177 struct pkcs1pad_request *req_ctx = akcipher_request_ctx(req); in pkcs1pad_encrypt_sign_complete()
197 sg_copy_to_buffer(req->dst, sg_nents_for_len(req->dst, len), in pkcs1pad_encrypt_sign_complete()
199 sg_copy_from_buffer(req->dst, in pkcs1pad_encrypt_sign_complete()
200 sg_nents_for_len(req->dst, ctx->key_size), in pkcs1pad_encrypt_sign_complete()
205 req->dst_len = ctx->key_size; in pkcs1pad_encrypt_sign_complete()
215 struct akcipher_request *req = child_async_req->data; in pkcs1pad_encrypt_sign_complete_cb() local
221 async_req.data = req->base.data; in pkcs1pad_encrypt_sign_complete_cb()
222 async_req.tfm = crypto_akcipher_tfm(crypto_akcipher_reqtfm(req)); in pkcs1pad_encrypt_sign_complete_cb()
[all …]
Dcts.c65 static inline u8 *crypto_cts_reqctx_space(struct skcipher_request *req) in crypto_cts_reqctx_space() argument
67 struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req); in crypto_cts_reqctx_space()
68 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); in crypto_cts_reqctx_space()
94 struct skcipher_request *req = areq->data; in cts_cbc_crypt_done() local
99 skcipher_request_complete(req, err); in cts_cbc_crypt_done()
102 static int cts_cbc_encrypt(struct skcipher_request *req) in cts_cbc_encrypt() argument
104 struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req); in cts_cbc_encrypt()
105 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); in cts_cbc_encrypt()
114 lastn = req->cryptlen - offset; in cts_cbc_encrypt()
116 sg = scatterwalk_ffwd(rctx->sg, req->dst, offset - bsize); in cts_cbc_encrypt()
[all …]
Dxts.c87 static int xor_tweak(struct skcipher_request *req, bool second_pass, bool enc) in xor_tweak() argument
89 struct rctx *rctx = skcipher_request_ctx(req); in xor_tweak()
90 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); in xor_tweak()
91 const bool cts = (req->cryptlen % XTS_BLOCK_SIZE); in xor_tweak()
98 req = &rctx->subreq; in xor_tweak()
100 skcipher_request_set_tfm(req, tfm); in xor_tweak()
102 err = skcipher_walk_virt(&w, req, false); in xor_tweak()
137 static int xor_tweak_pre(struct skcipher_request *req, bool enc) in xor_tweak_pre() argument
139 return xor_tweak(req, false, enc); in xor_tweak_pre()
142 static int xor_tweak_post(struct skcipher_request *req, bool enc) in xor_tweak_post() argument
[all …]
Dauthenc.c40 static void authenc_request_complete(struct aead_request *req, int err) in authenc_request_complete() argument
43 aead_request_complete(req, err); in authenc_request_complete()
124 struct aead_request *req = areq->data; in authenc_geniv_ahash_done() local
125 struct crypto_aead *authenc = crypto_aead_reqtfm(req); in authenc_geniv_ahash_done()
128 struct authenc_request_ctx *areq_ctx = aead_request_ctx(req); in authenc_geniv_ahash_done()
134 scatterwalk_map_and_copy(ahreq->result, req->dst, in authenc_geniv_ahash_done()
135 req->assoclen + req->cryptlen, in authenc_geniv_ahash_done()
139 aead_request_complete(req, err); in authenc_geniv_ahash_done()
142 static int crypto_authenc_genicv(struct aead_request *req, unsigned int flags) in crypto_authenc_genicv() argument
144 struct crypto_aead *authenc = crypto_aead_reqtfm(req); in crypto_authenc_genicv()
[all …]
Dauthencesn.c43 static void authenc_esn_request_complete(struct aead_request *req, int err) in authenc_esn_request_complete() argument
46 aead_request_complete(req, err); in authenc_esn_request_complete()
96 static int crypto_authenc_esn_genicv_tail(struct aead_request *req, in crypto_authenc_esn_genicv_tail() argument
99 struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req); in crypto_authenc_esn_genicv_tail()
101 struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); in crypto_authenc_esn_genicv_tail()
106 unsigned int assoclen = req->assoclen; in crypto_authenc_esn_genicv_tail()
107 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_genicv_tail()
108 struct scatterlist *dst = req->dst; in crypto_authenc_esn_genicv_tail()
123 struct aead_request *req = areq->data; in authenc_esn_geniv_ahash_done() local
125 err = err ?: crypto_authenc_esn_genicv_tail(req, 0); in authenc_esn_geniv_ahash_done()
[all …]
Dahash.c129 int crypto_hash_walk_first(struct ahash_request *req, in crypto_hash_walk_first() argument
132 walk->total = req->nbytes; in crypto_hash_walk_first()
139 walk->alignmask = crypto_ahash_alignmask(crypto_ahash_reqtfm(req)); in crypto_hash_walk_first()
140 walk->sg = req->src; in crypto_hash_walk_first()
141 walk->flags = req->base.flags & CRYPTO_TFM_REQ_MASK; in crypto_hash_walk_first()
147 int crypto_ahash_walk_first(struct ahash_request *req, in crypto_ahash_walk_first() argument
150 walk->total = req->nbytes; in crypto_ahash_walk_first()
157 walk->alignmask = crypto_ahash_alignmask(crypto_ahash_reqtfm(req)); in crypto_ahash_walk_first()
158 walk->sg = req->src; in crypto_ahash_walk_first()
159 walk->flags = req->base.flags & CRYPTO_TFM_REQ_MASK; in crypto_ahash_walk_first()
[all …]
Dscompress.c120 static int scomp_acomp_comp_decomp(struct acomp_req *req, int dir) in scomp_acomp_comp_decomp() argument
122 struct crypto_acomp *tfm = crypto_acomp_reqtfm(req); in scomp_acomp_comp_decomp()
125 void **ctx = acomp_request_ctx(req); in scomp_acomp_comp_decomp()
129 if (!req->src || !req->slen || req->slen > SCOMP_SCRATCH_SIZE) in scomp_acomp_comp_decomp()
132 if (req->dst && !req->dlen) in scomp_acomp_comp_decomp()
135 if (!req->dlen || req->dlen > SCOMP_SCRATCH_SIZE) in scomp_acomp_comp_decomp()
136 req->dlen = SCOMP_SCRATCH_SIZE; in scomp_acomp_comp_decomp()
141 scatterwalk_map_and_copy(scratch->src, req->src, 0, req->slen, 0); in scomp_acomp_comp_decomp()
143 ret = crypto_scomp_compress(scomp, scratch->src, req->slen, in scomp_acomp_comp_decomp()
144 scratch->dst, &req->dlen, *ctx); in scomp_acomp_comp_decomp()
[all …]
Dcryptd.c160 struct crypto_async_request *req, *backlog; in cryptd_queue_worker() local
172 req = crypto_dequeue_request(&cpu_queue->queue); in cryptd_queue_worker()
176 if (!req) in cryptd_queue_worker()
181 req->complete(req, 0); in cryptd_queue_worker()
268 static void cryptd_skcipher_complete(struct skcipher_request *req, int err) in cryptd_skcipher_complete() argument
270 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); in cryptd_skcipher_complete()
272 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req); in cryptd_skcipher_complete()
276 rctx->complete(&req->base, err); in cryptd_skcipher_complete()
286 struct skcipher_request *req = skcipher_request_cast(base); in cryptd_skcipher_encrypt() local
287 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req); in cryptd_skcipher_encrypt()
[all …]
Dccm.c63 struct aead_request *req) in crypto_ccm_reqctx() argument
65 unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req)); in crypto_ccm_reqctx()
67 return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1); in crypto_ccm_reqctx()
135 static int format_input(u8 *info, struct aead_request *req, in format_input() argument
138 struct crypto_aead *aead = crypto_aead_reqtfm(req); in format_input()
139 unsigned int lp = req->iv[0]; in format_input()
145 memcpy(info, req->iv, 16); in format_input()
151 if (req->assoclen) in format_input()
176 static int crypto_ccm_auth(struct aead_request *req, struct scatterlist *plain, in crypto_ccm_auth() argument
179 struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req); in crypto_ccm_auth()
[all …]
Dkeywrap.c123 static int crypto_kw_decrypt(struct skcipher_request *req) in crypto_kw_decrypt() argument
125 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); in crypto_kw_decrypt()
129 u64 t = 6 * ((req->cryptlen) >> 3); in crypto_kw_decrypt()
137 if (req->cryptlen < (2 * SEMIBSIZE) || req->cryptlen % SEMIBSIZE) in crypto_kw_decrypt()
141 memcpy(&block.A, req->iv, SEMIBSIZE); in crypto_kw_decrypt()
148 src = req->src; in crypto_kw_decrypt()
149 dst = req->dst; in crypto_kw_decrypt()
153 unsigned int nbytes = req->cryptlen; in crypto_kw_decrypt()
179 src = req->dst; in crypto_kw_decrypt()
180 dst = req->dst; in crypto_kw_decrypt()
[all …]
Dlrw.c142 static int xor_tweak(struct skcipher_request *req, bool second_pass) in xor_tweak() argument
145 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); in xor_tweak()
147 struct rctx *rctx = skcipher_request_ctx(req); in xor_tweak()
155 req = &rctx->subreq; in xor_tweak()
157 skcipher_request_set_tfm(req, tfm); in xor_tweak()
160 err = skcipher_walk_virt(&w, req, false); in xor_tweak()
199 static int xor_tweak_pre(struct skcipher_request *req) in xor_tweak_pre() argument
201 return xor_tweak(req, false); in xor_tweak_pre()
204 static int xor_tweak_post(struct skcipher_request *req) in xor_tweak_post() argument
206 return xor_tweak(req, true); in xor_tweak_post()
[all …]
Dcrypto_engine.c25 struct crypto_async_request *req, int err) in crypto_finalize_request() argument
33 if (engine->cur_req == req) in crypto_finalize_request()
38 enginectx = crypto_tfm_ctx(req->tfm); in crypto_finalize_request()
41 ret = enginectx->op.unprepare_request(engine, req); in crypto_finalize_request()
51 req->complete(req, err); in crypto_finalize_request()
182 struct crypto_async_request *req, in crypto_transfer_request() argument
195 ret = crypto_enqueue_request(&engine->queue, req); in crypto_transfer_request()
211 struct crypto_async_request *req) in crypto_transfer_request_to_engine() argument
213 return crypto_transfer_request(engine, req, true); in crypto_transfer_request_to_engine()
224 struct ablkcipher_request *req) in crypto_transfer_ablkcipher_request_to_engine() argument
[all …]
Dessiv.c153 struct skcipher_request *req = areq->data; in essiv_skcipher_done() local
155 skcipher_request_complete(req, err); in essiv_skcipher_done()
158 static int essiv_skcipher_crypt(struct skcipher_request *req, bool enc) in essiv_skcipher_crypt() argument
160 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); in essiv_skcipher_crypt()
162 struct skcipher_request *subreq = skcipher_request_ctx(req); in essiv_skcipher_crypt()
164 crypto_cipher_encrypt_one(tctx->essiv_cipher, req->iv, req->iv); in essiv_skcipher_crypt()
167 skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, in essiv_skcipher_crypt()
168 req->iv); in essiv_skcipher_crypt()
169 skcipher_request_set_callback(subreq, skcipher_request_flags(req), in essiv_skcipher_crypt()
170 essiv_skcipher_done, req); in essiv_skcipher_crypt()
[all …]
Dpcrypt.c54 struct aead_request *req = pcrypt_request_ctx(preq); in pcrypt_aead_serial() local
56 aead_request_complete(req->base.data, padata->info); in pcrypt_aead_serial()
61 struct aead_request *req = areq->data; in pcrypt_aead_done() local
62 struct pcrypt_request *preq = aead_request_ctx(req); in pcrypt_aead_done()
66 req->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; in pcrypt_aead_done()
74 struct aead_request *req = pcrypt_request_ctx(preq); in pcrypt_aead_enc() local
76 padata->info = crypto_aead_encrypt(req); in pcrypt_aead_enc()
84 static int pcrypt_aead_encrypt(struct aead_request *req) in pcrypt_aead_encrypt() argument
87 struct pcrypt_request *preq = aead_request_ctx(req); in pcrypt_aead_encrypt()
90 struct crypto_aead *aead = crypto_aead_reqtfm(req); in pcrypt_aead_encrypt()
[all …]
Dadiantum.c126 struct skcipher_request req; /* must be last */ in adiantum_setkey() member
151 skcipher_request_set_tfm(&data->req, tctx->streamcipher); in adiantum_setkey()
152 skcipher_request_set_callback(&data->req, CRYPTO_TFM_REQ_MAY_SLEEP | in adiantum_setkey()
155 skcipher_request_set_crypt(&data->req, &data->sg, &data->sg, in adiantum_setkey()
157 err = crypto_wait_req(crypto_skcipher_encrypt(&data->req), &data->wait); in adiantum_setkey()
227 static void adiantum_hash_header(struct skcipher_request *req) in adiantum_hash_header() argument
229 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); in adiantum_hash_header()
231 struct adiantum_request_ctx *rctx = skcipher_request_ctx(req); in adiantum_hash_header()
232 const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE; in adiantum_hash_header()
248 poly1305_core_blocks(&state, &tctx->header_hash_key, req->iv, in adiantum_hash_header()
[all …]
Dskcipher.c449 struct skcipher_request *req) in skcipher_walk_skcipher() argument
451 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); in skcipher_walk_skcipher()
453 walk->total = req->cryptlen; in skcipher_walk_skcipher()
455 walk->iv = req->iv; in skcipher_walk_skcipher()
456 walk->oiv = req->iv; in skcipher_walk_skcipher()
461 scatterwalk_start(&walk->in, req->src); in skcipher_walk_skcipher()
462 scatterwalk_start(&walk->out, req->dst); in skcipher_walk_skcipher()
465 walk->flags |= req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? in skcipher_walk_skcipher()
477 struct skcipher_request *req, bool atomic) in skcipher_walk_virt() argument
481 might_sleep_if(req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP); in skcipher_walk_virt()
[all …]
Decrdsa.c70 static int ecrdsa_verify(struct akcipher_request *req) in ecrdsa_verify() argument
72 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req); in ecrdsa_verify()
76 unsigned int ndigits = req->dst_len / sizeof(u64); in ecrdsa_verify()
93 !req->src || in ecrdsa_verify()
95 req->dst_len != ctx->digest_len || in ecrdsa_verify()
96 req->dst_len != ctx->curve->g.ndigits * sizeof(u64) || in ecrdsa_verify()
98 req->dst_len * 2 != req->src_len || in ecrdsa_verify()
99 WARN_ON(req->src_len > sizeof(sig)) || in ecrdsa_verify()
100 WARN_ON(req->dst_len > sizeof(digest))) in ecrdsa_verify()
103 sg_copy_to_buffer(req->src, sg_nents_for_len(req->src, req->src_len), in ecrdsa_verify()
[all …]
Dshash.c230 static int shash_async_init(struct ahash_request *req) in shash_async_init() argument
232 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); in shash_async_init()
233 struct shash_desc *desc = ahash_request_ctx(req); in shash_async_init()
240 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc) in shash_ahash_update() argument
245 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0; in shash_ahash_update()
253 static int shash_async_update(struct ahash_request *req) in shash_async_update() argument
255 return shash_ahash_update(req, ahash_request_ctx(req)); in shash_async_update()
258 static int shash_async_final(struct ahash_request *req) in shash_async_final() argument
260 return crypto_shash_final(ahash_request_ctx(req), req->result); in shash_async_final()
263 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc) in shash_ahash_finup() argument
[all …]
Dtcrypt.c139 static inline int do_one_aead_op(struct aead_request *req, int ret) in do_one_aead_op() argument
141 struct crypto_wait *wait = req->base.data; in do_one_aead_op()
149 struct aead_request *req; member
164 rc[i] = crypto_aead_encrypt(data[i].req); in do_mult_aead_op()
166 rc[i] = crypto_aead_decrypt(data[i].req); in do_mult_aead_op()
315 data[i].req = aead_request_alloc(tfm, GFP_KERNEL); in test_mb_aead_speed()
316 if (!data[i].req) { in test_mb_aead_speed()
320 aead_request_free(data[i].req); in test_mb_aead_speed()
327 aead_request_set_callback(data[i].req, in test_mb_aead_speed()
389 aead_request_set_ad(cur->req, aad_size); in test_mb_aead_speed()
[all …]
Dtestmgr.c1234 static int do_ahash_op(int (*op)(struct ahash_request *req), in do_ahash_op() argument
1235 struct ahash_request *req, in do_ahash_op()
1243 err = op(req); in do_ahash_op()
1274 struct ahash_request *req, in test_ahash_vec_cfg() argument
1278 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); in test_ahash_vec_cfg()
1319 testmgr_poison(req->__ctx, crypto_ahash_reqsize(tfm)); in test_ahash_vec_cfg()
1325 ahash_request_set_callback(req, req_flags, crypto_req_done, in test_ahash_vec_cfg()
1327 ahash_request_set_crypt(req, tsgl->sgl, result, vec->psize); in test_ahash_vec_cfg()
1328 err = do_ahash_op(crypto_ahash_digest, req, &wait, cfg->nosimd); in test_ahash_vec_cfg()
1347 ahash_request_set_callback(req, req_flags, crypto_req_done, &wait); in test_ahash_vec_cfg()
[all …]
Dpcbc.c19 static int crypto_pcbc_encrypt_segment(struct skcipher_request *req, in crypto_pcbc_encrypt_segment() argument
41 static int crypto_pcbc_encrypt_inplace(struct skcipher_request *req, in crypto_pcbc_encrypt_inplace() argument
63 static int crypto_pcbc_encrypt(struct skcipher_request *req) in crypto_pcbc_encrypt() argument
65 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); in crypto_pcbc_encrypt()
71 err = skcipher_walk_virt(&walk, req, false); in crypto_pcbc_encrypt()
75 nbytes = crypto_pcbc_encrypt_inplace(req, &walk, in crypto_pcbc_encrypt()
78 nbytes = crypto_pcbc_encrypt_segment(req, &walk, in crypto_pcbc_encrypt()
86 static int crypto_pcbc_decrypt_segment(struct skcipher_request *req, in crypto_pcbc_decrypt_segment() argument
108 static int crypto_pcbc_decrypt_inplace(struct skcipher_request *req, in crypto_pcbc_decrypt_inplace() argument
130 static int crypto_pcbc_decrypt(struct skcipher_request *req) in crypto_pcbc_decrypt() argument
[all …]

12