Home
last modified time | relevance | path

Searched refs:flags (Results 1 – 25 of 38) sorted by relevance

12

/crypto/
Dcrypto_engine.c28 unsigned long flags; in crypto_finalize_request() local
39 spin_lock_irqsave(&engine->queue_lock, flags); in crypto_finalize_request()
44 spin_unlock_irqrestore(&engine->queue_lock, flags); in crypto_finalize_request()
75 unsigned long flags; in crypto_pump_requests() local
80 spin_lock_irqsave(&engine->queue_lock, flags); in crypto_pump_requests()
106 spin_unlock_irqrestore(&engine->queue_lock, flags); in crypto_pump_requests()
112 spin_lock_irqsave(&engine->queue_lock, flags); in crypto_pump_requests()
137 spin_unlock_irqrestore(&engine->queue_lock, flags); in crypto_pump_requests()
192 spin_lock_irqsave(&engine->queue_lock, flags); in crypto_pump_requests()
222 spin_lock_irqsave(&engine->queue_lock, flags); in crypto_pump_requests()
[all …]
Dgcm.c60 int (*complete)(struct aead_request *req, u32 flags);
205 unsigned int len, u32 flags) in gcm_hash_update() argument
210 ahash_request_set_callback(ahreq, flags, compl, req); in gcm_hash_update()
218 crypto_completion_t compl, u32 flags) in gcm_hash_remain() argument
220 return gcm_hash_update(req, compl, &gcm_zeroes->sg, remain, flags); in gcm_hash_remain()
223 static int gcm_hash_len(struct aead_request *req, u32 flags) in gcm_hash_len() argument
234 ahash_request_set_callback(ahreq, flags, gcm_hash_len_done, req); in gcm_hash_len()
241 static int gcm_hash_len_continue(struct aead_request *req, u32 flags) in gcm_hash_len_continue() argument
246 return gctx->complete(req, flags); in gcm_hash_len_continue()
264 static int gcm_hash_crypt_remain_continue(struct aead_request *req, u32 flags) in gcm_hash_crypt_remain_continue() argument
[all …]
Daes_ti.c23 unsigned long flags; in aesti_encrypt() local
29 local_irq_save(flags); in aesti_encrypt()
33 local_irq_restore(flags); in aesti_encrypt()
39 unsigned long flags; in aesti_decrypt() local
45 local_irq_save(flags); in aesti_decrypt()
49 local_irq_restore(flags); in aesti_decrypt()
Dskcipher.c81 return walk->flags & SKCIPHER_WALK_SLEEP ? GFP_KERNEL : GFP_ATOMIC; in skcipher_walk_gfp()
101 (walk->flags & SKCIPHER_WALK_PHYS) ? 2 : 1); in skcipher_done_slow()
118 if (likely(!(walk->flags & (SKCIPHER_WALK_PHYS | in skcipher_walk_done()
124 } else if (walk->flags & SKCIPHER_WALK_DIFF) { in skcipher_walk_done()
127 } else if (walk->flags & SKCIPHER_WALK_COPY) { in skcipher_walk_done()
131 } else if (unlikely(walk->flags & SKCIPHER_WALK_SLOW)) { in skcipher_walk_done()
157 crypto_yield(walk->flags & SKCIPHER_WALK_SLEEP ? in skcipher_walk_done()
167 if (walk->flags & SKCIPHER_WALK_PHYS) in skcipher_walk_done()
227 bool phys = walk->flags & SKCIPHER_WALK_PHYS; in skcipher_next_slow()
281 walk->flags |= SKCIPHER_WALK_SLOW; in skcipher_next_slow()
[all …]
Dalgif_hash.c132 int offset, size_t size, int flags) in hash_sendpage() argument
139 if (flags & MSG_SENDPAGE_NOTLAST) in hash_sendpage()
140 flags |= MSG_MORE; in hash_sendpage()
146 if (!(flags & MSG_MORE)) { in hash_sendpage()
155 if (!(flags & MSG_MORE)) { in hash_sendpage()
175 ctx->more = flags & MSG_MORE; in hash_sendpage()
184 int flags) in hash_recvmsg() argument
230 static int hash_accept(struct socket *sock, struct socket *newsock, int flags, in hash_accept() argument
340 int offset, size_t size, int flags) in hash_sendpage_nokey() argument
348 return hash_sendpage(sock, page, offset, size, flags); in hash_sendpage_nokey()
[all …]
Dalgif_skcipher.c51 size_t ignored, int flags) in _skcipher_recvmsg() argument
65 err = af_alg_wait_for_data(sk, flags, bs); in _skcipher_recvmsg()
77 err = af_alg_get_rsgl(sk, msg, flags, areq, ctx->used, &len); in _skcipher_recvmsg()
150 size_t ignored, int flags) in skcipher_recvmsg() argument
157 int err = _skcipher_recvmsg(sock, msg, ignored, flags); in skcipher_recvmsg()
250 int offset, size_t size, int flags) in skcipher_sendpage_nokey() argument
258 return af_alg_sendpage(sock, page, offset, size, flags); in skcipher_sendpage_nokey()
262 size_t ignored, int flags) in skcipher_recvmsg_nokey() argument
270 return skcipher_recvmsg(sock, msg, ignored, flags); in skcipher_recvmsg_nokey()
Dansi_cprng.c48 u32 flags; member
137 ctx->flags |= PRNG_NEED_RESET; in _get_more_prng_bytes()
192 if (ctx->flags & PRNG_NEED_RESET) in get_prng_bytes()
200 if (ctx->flags & PRNG_FIXED_SIZE) { in get_prng_bytes()
284 ctx->flags |= PRNG_NEED_RESET; in reset_prng_context()
314 ctx->flags &= ~PRNG_NEED_RESET; in reset_prng_context()
340 ctx->flags |= PRNG_NEED_RESET; in cprng_init()
379 if (prng->flags & PRNG_NEED_RESET) in cprng_reset()
Dchacha20poly1305.c63 u32 flags; member
76 rctx->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; in async_done_continue()
141 skcipher_request_set_callback(&creq->req, rctx->flags, in chacha_decrypt()
181 ahash_request_set_callback(&preq->req, rctx->flags, in poly_tail()
211 ahash_request_set_callback(&preq->req, rctx->flags, in poly_cipherpad()
241 ahash_request_set_callback(&preq->req, rctx->flags, in poly_cipher()
270 ahash_request_set_callback(&preq->req, rctx->flags, in poly_adpad()
294 ahash_request_set_callback(&preq->req, rctx->flags, in poly_ad()
320 ahash_request_set_callback(&preq->req, rctx->flags, in poly_setkey()
344 ahash_request_set_callback(&preq->req, rctx->flags, in poly_init()
[all …]
Dalgif_aead.c89 size_t ignored, int flags) in _aead_recvmsg() argument
110 err = af_alg_wait_for_data(sk, flags, 0); in _aead_recvmsg()
159 err = af_alg_get_rsgl(sk, msg, flags, areq, outlen, &usedpages); in _aead_recvmsg()
324 size_t ignored, int flags) in aead_recvmsg() argument
331 int err = _aead_recvmsg(sock, msg, ignored, flags); in aead_recvmsg()
424 int offset, size_t size, int flags) in aead_sendpage_nokey() argument
432 return af_alg_sendpage(sock, page, offset, size, flags); in aead_sendpage_nokey()
436 size_t ignored, int flags) in aead_recvmsg_nokey() argument
444 return aead_recvmsg(sock, msg, ignored, flags); in aead_recvmsg_nokey()
Ddrbg.c120 .flags = DRBG_CTR | DRBG_STRENGTH128,
126 .flags = DRBG_CTR | DRBG_STRENGTH192,
132 .flags = DRBG_CTR | DRBG_STRENGTH256,
141 .flags = DRBG_HASH | DRBG_STRENGTH128,
147 .flags = DRBG_HASH | DRBG_STRENGTH256,
153 .flags = DRBG_HASH | DRBG_STRENGTH256,
159 .flags = DRBG_HASH | DRBG_STRENGTH256,
168 .flags = DRBG_HMAC | DRBG_STRENGTH128,
174 .flags = DRBG_HMAC | DRBG_STRENGTH256,
180 .flags = DRBG_HMAC | DRBG_STRENGTH256,
[all …]
Dseqiv.c72 skcipher_request_set_callback(nreq, req->base.flags, in seqiv_aead_encrypt()
85 info = kmemdup(req->iv, ivsize, req->base.flags & in seqiv_aead_encrypt()
95 aead_request_set_callback(subreq, req->base.flags, compl, data); in seqiv_aead_encrypt()
126 aead_request_set_callback(subreq, req->base.flags, compl, data); in seqiv_aead_decrypt()
Dahash.c31 u32 flags; member
99 crypto_yield(walk->flags); in crypto_hash_walk_done()
131 walk->flags = req->base.flags; in crypto_hash_walk_first()
207 (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? in ahash_save_req()
240 priv->flags = req->base.flags; in ahash_save_req()
267 ahash_request_set_callback(req, priv->flags, in ahash_restore_req()
422 areq->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; in ahash_def_finup_done1()
Dcts.c121 skcipher_request_set_callback(subreq, req->base.flags & in cts_cbc_encrypt()
159 skcipher_request_set_callback(subreq, req->base.flags, in crypto_cts_encrypt()
170 skcipher_request_set_callback(subreq, req->base.flags, in crypto_cts_encrypt()
214 skcipher_request_set_callback(subreq, req->base.flags & in cts_cbc_decrypt()
254 skcipher_request_set_callback(subreq, req->base.flags, in crypto_cts_decrypt()
262 skcipher_request_set_callback(subreq, req->base.flags, in crypto_cts_decrypt()
Dechainiv.c48 skcipher_request_set_callback(nreq, req->base.flags, in echainiv_encrypt()
59 aead_request_set_callback(subreq, req->base.flags, in echainiv_encrypt()
102 aead_request_set_callback(subreq, req->base.flags, compl, data); in echainiv_decrypt()
Daf_alg.c338 static int alg_accept(struct socket *sock, struct socket *newsock, int flags, in alg_accept() argument
699 static int af_alg_wait_for_wmem(struct sock *sk, unsigned int flags) in af_alg_wait_for_wmem() argument
705 if (flags & MSG_DONTWAIT) in af_alg_wait_for_wmem()
756 int af_alg_wait_for_data(struct sock *sk, unsigned flags, unsigned min) in af_alg_wait_for_data() argument
764 if (flags & MSG_DONTWAIT) in af_alg_wait_for_data()
989 int offset, size_t size, int flags) in af_alg_sendpage() argument
997 if (flags & MSG_SENDPAGE_NOTLAST) in af_alg_sendpage()
998 flags |= MSG_MORE; in af_alg_sendpage()
1008 err = af_alg_wait_for_wmem(sk, flags); in af_alg_sendpage()
1031 ctx->more = flags & MSG_MORE; in af_alg_sendpage()
[all …]
Dpcrypt.c100 u32 flags = aead_request_flags(req); in pcrypt_aead_encrypt() local
111 aead_request_set_callback(creq, flags & ~CRYPTO_TFM_REQ_MAY_SLEEP, in pcrypt_aead_encrypt()
149 u32 flags = aead_request_flags(req); in pcrypt_aead_decrypt() local
160 aead_request_set_callback(creq, flags & ~CRYPTO_TFM_REQ_MAY_SLEEP, in pcrypt_aead_decrypt()
Dinternal.h176 static inline void crypto_yield(u32 flags) in crypto_yield() argument
178 if (flags & CRYPTO_TFM_REQ_MAY_SLEEP) in crypto_yield()
Dauthencesn.c87 unsigned int flags) in crypto_authenc_esn_genicv_tail() argument
120 unsigned int flags) in crypto_authenc_esn_genicv() argument
148 ahash_request_set_callback(ahreq, flags, in crypto_authenc_esn_genicv()
220 unsigned int flags) in crypto_authenc_esn_decrypt_tail() argument
254 skcipher_request_set_callback(skreq, flags, in crypto_authenc_esn_decrypt_tail()
Djitterentropy.h16 unsigned int flags);
Dxts.c183 skcipher_request_set_callback(subreq, req->base.flags, xts_cts_done, in xts_cts_final()
206 rctx->subreq.base.flags &= CRYPTO_TFM_REQ_MAY_BACKLOG; in xts_encrypt_done()
226 rctx->subreq.base.flags &= CRYPTO_TFM_REQ_MAY_BACKLOG; in xts_decrypt_done()
251 skcipher_request_set_callback(subreq, req->base.flags, compl, req); in xts_init_crypt()
Dccm.c44 u32 flags; member
200 ahash_request_set_callback(ahreq, pctx->flags, NULL, NULL); in crypto_ccm_auth()
261 pctx->flags = aead_request_flags(req); in crypto_ccm_init_crypt()
310 skcipher_request_set_callback(skreq, pctx->flags, in crypto_ccm_encrypt()
333 pctx->flags = 0; in crypto_ccm_decrypt_done()
375 skcipher_request_set_callback(skreq, pctx->flags, in crypto_ccm_decrypt()
638 aead_request_set_callback(subreq, req->base.flags, req->base.complete, in crypto_rfc4309_crypt()
Dauthenc.c132 static int crypto_authenc_genicv(struct aead_request *req, unsigned int flags) in crypto_authenc_genicv() argument
150 ahash_request_set_callback(ahreq, flags, in crypto_authenc_genicv()
230 unsigned int flags) in crypto_authenc_decrypt_tail() argument
256 skcipher_request_set_callback(skreq, flags, in crypto_authenc_decrypt_tail()
/crypto/async_tx/
Dasync_xor.c30 enum async_tx_flags flags_orig = submit->flags; in do_async_xor()
40 submit->flags = flags_orig; in do_async_xor()
46 submit->flags &= ~ASYNC_TX_ACK; in do_async_xor()
47 submit->flags |= ASYNC_TX_FENCE; in do_async_xor()
56 if (submit->flags & ASYNC_TX_FENCE) in do_async_xor()
124 if (submit->flags & ASYNC_TX_XOR_ZERO_DST) in do_sync_xor_offs()
233 if (submit->flags & ASYNC_TX_XOR_DROP_DST) { in async_xor_offs()
339 if (submit->flags & ASYNC_TX_FENCE) in async_xor_val_offs()
366 enum async_tx_flags flags_orig = submit->flags; in async_xor_val_offs()
373 submit->flags |= ASYNC_TX_XOR_DROP_DST; in async_xor_val_offs()
[all …]
Dasync_raid6_recov.c39 if (submit->flags & ASYNC_TX_FENCE) in async_sum_product()
107 if (submit->flags & ASYNC_TX_FENCE) in async_mult()
163 enum async_tx_flags flags = submit->flags; in __2data_recov_4() local
194 init_async_submit(submit, flags | ASYNC_TX_XOR_ZERO_DST, tx, cb_fn, in __2data_recov_4()
213 enum async_tx_flags flags = submit->flags; in __2data_recov_5() local
286 init_async_submit(submit, flags | ASYNC_TX_XOR_DROP_DST, tx, cb_fn, in __2data_recov_5()
304 enum async_tx_flags flags = submit->flags; in __2data_recov_n() local
375 init_async_submit(submit, flags | ASYNC_TX_XOR_DROP_DST, tx, cb_fn, in __2data_recov_n()
479 enum async_tx_flags flags = submit->flags; in async_raid6_datap_recov() local
584 init_async_submit(submit, flags | ASYNC_TX_XOR_DROP_DST, tx, cb_fn, in async_raid6_datap_recov()
Dasync_pq.c43 enum async_tx_flags flags_orig = submit->flags; in do_async_gen_syndrome()
52 submit->flags = flags_orig; in do_async_gen_syndrome()
59 submit->flags &= ~ASYNC_TX_ACK; in do_async_gen_syndrome()
60 submit->flags |= ASYNC_TX_FENCE; in do_async_gen_syndrome()
69 if (submit->flags & ASYNC_TX_FENCE) in do_async_gen_syndrome()
133 if (submit->flags & ASYNC_TX_PQ_XOR_DST) { in do_sync_gen_syndrome()
193 if (unmap && !(submit->flags & ASYNC_TX_PQ_XOR_DST) && in async_gen_syndrome()
356 if (submit->flags & ASYNC_TX_FENCE) in async_syndrome_val()
378 enum async_tx_flags flags_orig = submit->flags; in async_syndrome_val()
432 submit->flags = flags_orig; in async_syndrome_val()

12