/crypto/ |
D | crypto_engine.c | 27 unsigned long flags; in crypto_finalize_request() local 32 spin_lock_irqsave(&engine->queue_lock, flags); in crypto_finalize_request() 35 spin_unlock_irqrestore(&engine->queue_lock, flags); in crypto_finalize_request() 45 spin_lock_irqsave(&engine->queue_lock, flags); in crypto_finalize_request() 48 spin_unlock_irqrestore(&engine->queue_lock, flags); in crypto_finalize_request() 69 unsigned long flags; in crypto_pump_requests() local 74 spin_lock_irqsave(&engine->queue_lock, flags); in crypto_pump_requests() 100 spin_unlock_irqrestore(&engine->queue_lock, flags); in crypto_pump_requests() 106 spin_lock_irqsave(&engine->queue_lock, flags); in crypto_pump_requests() 126 spin_unlock_irqrestore(&engine->queue_lock, flags); in crypto_pump_requests() [all …]
|
D | aes_ti.c | 23 unsigned long flags; in aesti_encrypt() local 29 local_irq_save(flags); in aesti_encrypt() 33 local_irq_restore(flags); in aesti_encrypt() 39 unsigned long flags; in aesti_decrypt() local 45 local_irq_save(flags); in aesti_decrypt() 49 local_irq_restore(flags); in aesti_decrypt()
|
D | gcm.c | 61 int (*complete)(struct aead_request *req, u32 flags); 211 unsigned int len, u32 flags) in gcm_hash_update() argument 216 ahash_request_set_callback(ahreq, flags, compl, req); in gcm_hash_update() 224 crypto_completion_t compl, u32 flags) in gcm_hash_remain() argument 226 return gcm_hash_update(req, compl, &gcm_zeroes->sg, remain, flags); in gcm_hash_remain() 229 static int gcm_hash_len(struct aead_request *req, u32 flags) in gcm_hash_len() argument 240 ahash_request_set_callback(ahreq, flags, gcm_hash_len_done, req); in gcm_hash_len() 247 static int gcm_hash_len_continue(struct aead_request *req, u32 flags) in gcm_hash_len_continue() argument 252 return gctx->complete(req, flags); in gcm_hash_len_continue() 270 static int gcm_hash_crypt_remain_continue(struct aead_request *req, u32 flags) in gcm_hash_crypt_remain_continue() argument [all …]
|
D | blkcipher.c | 81 if (walk->flags & BLKCIPHER_WALK_COPY) { in blkcipher_done_fast() 85 } else if (!(walk->flags & BLKCIPHER_WALK_PHYS)) { in blkcipher_done_fast() 86 if (walk->flags & BLKCIPHER_WALK_DIFF) in blkcipher_done_fast() 108 if (likely(!(walk->flags & BLKCIPHER_WALK_SLOW))) { in blkcipher_walk_done() 123 crypto_yield(desc->flags); in blkcipher_walk_done() 170 walk->flags |= BLKCIPHER_WALK_SLOW; in blkcipher_next_slow() 199 if (walk->flags & BLKCIPHER_WALK_PHYS) in blkcipher_next_fast() 209 walk->flags |= BLKCIPHER_WALK_DIFF; in blkcipher_next_fast() 225 desc->flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN; in blkcipher_walk_next() 231 walk->flags &= ~(BLKCIPHER_WALK_SLOW | BLKCIPHER_WALK_COPY | in blkcipher_walk_next() [all …]
|
D | skcipher.c | 80 return walk->flags & SKCIPHER_WALK_SLEEP ? GFP_KERNEL : GFP_ATOMIC; in skcipher_walk_gfp() 100 (walk->flags & SKCIPHER_WALK_PHYS) ? 2 : 1); in skcipher_done_slow() 117 if (likely(!(walk->flags & (SKCIPHER_WALK_PHYS | in skcipher_walk_done() 123 } else if (walk->flags & SKCIPHER_WALK_DIFF) { in skcipher_walk_done() 126 } else if (walk->flags & SKCIPHER_WALK_COPY) { in skcipher_walk_done() 130 } else if (unlikely(walk->flags & SKCIPHER_WALK_SLOW)) { in skcipher_walk_done() 156 crypto_yield(walk->flags & SKCIPHER_WALK_SLEEP ? in skcipher_walk_done() 166 if (walk->flags & SKCIPHER_WALK_PHYS) in skcipher_walk_done() 226 bool phys = walk->flags & SKCIPHER_WALK_PHYS; in skcipher_next_slow() 280 walk->flags |= SKCIPHER_WALK_SLOW; in skcipher_next_slow() [all …]
|
D | algif_hash.c | 132 int offset, size_t size, int flags) in hash_sendpage() argument 139 if (flags & MSG_SENDPAGE_NOTLAST) in hash_sendpage() 140 flags |= MSG_MORE; in hash_sendpage() 146 if (!(flags & MSG_MORE)) { in hash_sendpage() 155 if (!(flags & MSG_MORE)) { in hash_sendpage() 175 ctx->more = flags & MSG_MORE; in hash_sendpage() 184 int flags) in hash_recvmsg() argument 230 static int hash_accept(struct socket *sock, struct socket *newsock, int flags, in hash_accept() argument 345 int offset, size_t size, int flags) in hash_sendpage_nokey() argument 353 return hash_sendpage(sock, page, offset, size, flags); in hash_sendpage_nokey() [all …]
|
D | ahash.c | 30 u32 flags; member 47 if (walk->flags & CRYPTO_ALG_ASYNC) in hash_walk_next() 100 if (walk->flags & CRYPTO_ALG_ASYNC) in crypto_hash_walk_done() 108 crypto_yield(walk->flags); in crypto_hash_walk_done() 141 walk->flags = req->base.flags & CRYPTO_TFM_REQ_MASK; in crypto_hash_walk_first() 159 walk->flags = req->base.flags & CRYPTO_TFM_REQ_MASK; in crypto_ahash_walk_first() 160 walk->flags |= CRYPTO_ALG_ASYNC; in crypto_ahash_walk_first() 238 (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? in ahash_save_req() 271 priv->flags = req->base.flags; in ahash_save_req() 298 ahash_request_set_callback(req, priv->flags, in ahash_restore_req() [all …]
|
D | algif_skcipher.c | 51 size_t ignored, int flags) in _skcipher_recvmsg() argument 65 err = af_alg_wait_for_data(sk, flags); in _skcipher_recvmsg() 77 err = af_alg_get_rsgl(sk, msg, flags, areq, -1, &len); in _skcipher_recvmsg() 154 size_t ignored, int flags) in skcipher_recvmsg() argument 161 int err = _skcipher_recvmsg(sock, msg, ignored, flags); in skcipher_recvmsg() 259 int offset, size_t size, int flags) in skcipher_sendpage_nokey() argument 267 return af_alg_sendpage(sock, page, offset, size, flags); in skcipher_sendpage_nokey() 271 size_t ignored, int flags) in skcipher_recvmsg_nokey() argument 279 return skcipher_recvmsg(sock, msg, ignored, flags); in skcipher_recvmsg_nokey()
|
D | ansi_cprng.c | 47 u32 flags; member 136 ctx->flags |= PRNG_NEED_RESET; in _get_more_prng_bytes() 191 if (ctx->flags & PRNG_NEED_RESET) in get_prng_bytes() 199 if (ctx->flags & PRNG_FIXED_SIZE) { in get_prng_bytes() 283 ctx->flags |= PRNG_NEED_RESET; in reset_prng_context() 313 ctx->flags &= ~PRNG_NEED_RESET; in reset_prng_context() 339 ctx->flags |= PRNG_NEED_RESET; in cprng_init() 378 if (prng->flags & PRNG_NEED_RESET) in cprng_reset()
|
D | chacha20poly1305.c | 65 u32 flags; member 78 rctx->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; in async_done_continue() 143 skcipher_request_set_callback(&creq->req, rctx->flags, in chacha_decrypt() 183 ahash_request_set_callback(&preq->req, rctx->flags, in poly_tail() 213 ahash_request_set_callback(&preq->req, rctx->flags, in poly_cipherpad() 243 ahash_request_set_callback(&preq->req, rctx->flags, in poly_cipher() 272 ahash_request_set_callback(&preq->req, rctx->flags, in poly_adpad() 296 ahash_request_set_callback(&preq->req, rctx->flags, in poly_ad() 322 ahash_request_set_callback(&preq->req, rctx->flags, in poly_setkey() 346 ahash_request_set_callback(&preq->req, rctx->flags, in poly_init() [all …]
|
D | algif_aead.c | 89 size_t ignored, int flags) in _aead_recvmsg() argument 110 err = af_alg_wait_for_data(sk, flags); in _aead_recvmsg() 159 err = af_alg_get_rsgl(sk, msg, flags, areq, outlen, &usedpages); in _aead_recvmsg() 323 size_t ignored, int flags) in aead_recvmsg() argument 330 int err = _aead_recvmsg(sock, msg, ignored, flags); in aead_recvmsg() 428 int offset, size_t size, int flags) in aead_sendpage_nokey() argument 436 return af_alg_sendpage(sock, page, offset, size, flags); in aead_sendpage_nokey() 440 size_t ignored, int flags) in aead_recvmsg_nokey() argument 448 return aead_recvmsg(sock, msg, ignored, flags); in aead_recvmsg_nokey()
|
D | drbg.c | 119 .flags = DRBG_CTR | DRBG_STRENGTH128, 125 .flags = DRBG_CTR | DRBG_STRENGTH192, 131 .flags = DRBG_CTR | DRBG_STRENGTH256, 140 .flags = DRBG_HASH | DRBG_STRENGTH128, 146 .flags = DRBG_HASH | DRBG_STRENGTH256, 152 .flags = DRBG_HASH | DRBG_STRENGTH256, 158 .flags = DRBG_HASH | DRBG_STRENGTH256, 167 .flags = DRBG_HMAC | DRBG_STRENGTH128, 173 .flags = DRBG_HMAC | DRBG_STRENGTH256, 179 .flags = DRBG_HMAC | DRBG_STRENGTH256, [all …]
|
D | rsa-pkcs1pad.c | 223 async_req.flags = child_async_req->flags; in pkcs1pad_encrypt_sign_complete_cb() 262 akcipher_request_set_callback(&req_ctx->child_req, req->base.flags, in pkcs1pad_encrypt() 341 async_req.flags = child_async_req->flags; in pkcs1pad_decrypt_complete_cb() 363 akcipher_request_set_callback(&req_ctx->child_req, req->base.flags, in pkcs1pad_decrypt() 421 akcipher_request_set_callback(&req_ctx->child_req, req->base.flags, in pkcs1pad_sign() 519 async_req.flags = child_async_req->flags; in pkcs1pad_verify_complete_cb() 551 akcipher_request_set_callback(&req_ctx->child_req, req->base.flags, in pkcs1pad_verify()
|
D | seqiv.c | 74 skcipher_request_set_callback(nreq, req->base.flags, in seqiv_aead_encrypt() 87 info = kmemdup(req->iv, ivsize, req->base.flags & in seqiv_aead_encrypt() 97 aead_request_set_callback(subreq, req->base.flags, compl, data); in seqiv_aead_encrypt() 128 aead_request_set_callback(subreq, req->base.flags, compl, data); in seqiv_aead_decrypt()
|
D | cts.c | 125 skcipher_request_set_callback(subreq, req->base.flags & in cts_cbc_encrypt() 163 skcipher_request_set_callback(subreq, req->base.flags, in crypto_cts_encrypt() 174 skcipher_request_set_callback(subreq, req->base.flags, in crypto_cts_encrypt() 218 skcipher_request_set_callback(subreq, req->base.flags & in cts_cbc_decrypt() 258 skcipher_request_set_callback(subreq, req->base.flags, in crypto_cts_decrypt() 266 skcipher_request_set_callback(subreq, req->base.flags, in crypto_cts_decrypt()
|
D | pcrypt.c | 66 req->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; in pcrypt_aead_done() 92 u32 flags = aead_request_flags(req); in pcrypt_aead_encrypt() local 100 aead_request_set_callback(creq, flags & ~CRYPTO_TFM_REQ_MAY_SLEEP, in pcrypt_aead_encrypt() 134 u32 flags = aead_request_flags(req); in pcrypt_aead_decrypt() local 142 aead_request_set_callback(creq, flags & ~CRYPTO_TFM_REQ_MAY_SLEEP, in pcrypt_aead_decrypt()
|
D | echainiv.c | 48 skcipher_request_set_callback(nreq, req->base.flags, in echainiv_encrypt() 59 aead_request_set_callback(subreq, req->base.flags, in echainiv_encrypt() 102 aead_request_set_callback(subreq, req->base.flags, compl, data); in echainiv_decrypt()
|
D | af_alg.c | 326 static int alg_accept(struct socket *sock, struct socket *newsock, int flags, in alg_accept() argument 687 static int af_alg_wait_for_wmem(struct sock *sk, unsigned int flags) in af_alg_wait_for_wmem() argument 693 if (flags & MSG_DONTWAIT) in af_alg_wait_for_wmem() 743 int af_alg_wait_for_data(struct sock *sk, unsigned flags) in af_alg_wait_for_data() argument 751 if (flags & MSG_DONTWAIT) in af_alg_wait_for_data() 962 int offset, size_t size, int flags) in af_alg_sendpage() argument 970 if (flags & MSG_SENDPAGE_NOTLAST) in af_alg_sendpage() 971 flags |= MSG_MORE; in af_alg_sendpage() 981 err = af_alg_wait_for_wmem(sk, flags); in af_alg_sendpage() 1004 ctx->more = flags & MSG_MORE; in af_alg_sendpage() [all …]
|
D | ablkcipher.c | 109 if (likely(!(walk->flags & ABLKCIPHER_WALK_SLOW))) { in ablkcipher_walk_done() 124 crypto_yield(req->base.flags); in ablkcipher_walk_done() 169 walk->flags |= ABLKCIPHER_WALK_SLOW; in ablkcipher_next_slow() 224 req->base.flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN; in ablkcipher_walk_next() 228 walk->flags &= ~ABLKCIPHER_WALK_SLOW; in ablkcipher_walk_next()
|
D | authencesn.c | 97 unsigned int flags) in crypto_authenc_esn_genicv_tail() argument 130 unsigned int flags) in crypto_authenc_esn_genicv() argument 158 ahash_request_set_callback(ahreq, flags, in crypto_authenc_esn_genicv() 230 unsigned int flags) in crypto_authenc_esn_decrypt_tail() argument 264 skcipher_request_set_callback(skreq, flags, in crypto_authenc_esn_decrypt_tail()
|
D | sm4_generic.c | 160 u32 *flags = &tfm->crt_flags; in crypto_sm4_set_key() local 167 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; in crypto_sm4_set_key()
|
D | xts.c | 186 skcipher_request_set_callback(subreq, req->base.flags, cts_done, req); in cts_final() 208 rctx->subreq.base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; in encrypt_done() 228 rctx->subreq.base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; in decrypt_done() 251 skcipher_request_set_callback(subreq, req->base.flags, compl, req); in init_crypt()
|
/crypto/async_tx/ |
D | async_xor.c | 30 enum async_tx_flags flags_orig = submit->flags; in do_async_xor() 40 submit->flags = flags_orig; in do_async_xor() 46 submit->flags &= ~ASYNC_TX_ACK; in do_async_xor() 47 submit->flags |= ASYNC_TX_FENCE; in do_async_xor() 56 if (submit->flags & ASYNC_TX_FENCE) in do_async_xor() 122 if (submit->flags & ASYNC_TX_XOR_ZERO_DST) in do_sync_xor() 208 if (submit->flags & ASYNC_TX_XOR_DROP_DST) { in async_xor() 279 if (submit->flags & ASYNC_TX_FENCE) in async_xor_val() 305 enum async_tx_flags flags_orig = submit->flags; in async_xor_val() 312 submit->flags |= ASYNC_TX_XOR_DROP_DST; in async_xor_val() [all …]
|
D | async_raid6_recov.c | 38 if (submit->flags & ASYNC_TX_FENCE) in async_sum_product() 102 if (submit->flags & ASYNC_TX_FENCE) in async_mult() 153 enum async_tx_flags flags = submit->flags; in __2data_recov_4() local 176 init_async_submit(submit, flags | ASYNC_TX_XOR_ZERO_DST, tx, cb_fn, in __2data_recov_4() 192 enum async_tx_flags flags = submit->flags; in __2data_recov_5() local 251 init_async_submit(submit, flags | ASYNC_TX_XOR_DROP_DST, tx, cb_fn, in __2data_recov_5() 266 enum async_tx_flags flags = submit->flags; in __2data_recov_n() local 319 init_async_submit(submit, flags | ASYNC_TX_XOR_DROP_DST, tx, cb_fn, in __2data_recov_n() 415 enum async_tx_flags flags = submit->flags; in async_raid6_datap_recov() local 507 init_async_submit(submit, flags | ASYNC_TX_XOR_DROP_DST, tx, cb_fn, in async_raid6_datap_recov()
|
D | async_pq.c | 43 enum async_tx_flags flags_orig = submit->flags; in do_async_gen_syndrome() 52 submit->flags = flags_orig; in do_async_gen_syndrome() 59 submit->flags &= ~ASYNC_TX_ACK; in do_async_gen_syndrome() 60 submit->flags |= ASYNC_TX_FENCE; in do_async_gen_syndrome() 69 if (submit->flags & ASYNC_TX_FENCE) in do_async_gen_syndrome() 132 if (submit->flags & ASYNC_TX_PQ_XOR_DST) { in do_sync_gen_syndrome() 179 if (unmap && !(submit->flags & ASYNC_TX_PQ_XOR_DST) && in async_gen_syndrome() 339 if (submit->flags & ASYNC_TX_FENCE) in async_syndrome_val() 359 enum async_tx_flags flags_orig = submit->flags; in async_syndrome_val() 408 submit->flags = flags_orig; in async_syndrome_val()
|