/kernel/linux/linux-5.10/drivers/crypto/cavium/nitrox/ |
D | nitrox_skcipher.c | 51 static void free_src_sglist(struct skcipher_request *skreq) in free_src_sglist() argument 53 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq); in free_src_sglist() 58 static void free_dst_sglist(struct skcipher_request *skreq) in free_dst_sglist() argument 60 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq); in free_dst_sglist() 67 struct skcipher_request *skreq = arg; in nitrox_skcipher_callback() local 69 free_src_sglist(skreq); in nitrox_skcipher_callback() 70 free_dst_sglist(skreq); in nitrox_skcipher_callback() 76 skcipher_request_complete(skreq, err); in nitrox_skcipher_callback() 81 struct skcipher_request *skreq = arg; in nitrox_cbc_cipher_callback() local 82 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq); in nitrox_cbc_cipher_callback() [all …]
|
/kernel/linux/linux-5.10/drivers/block/ |
D | skd_main.c | 360 struct skd_request_context *skreq); 362 struct skd_request_context *skreq); 376 struct skd_request_context *skreq, const char *event); 423 struct skd_request_context *skreq) in skd_prep_zerosize_flush_cdb() argument 425 skreq->flush_cmd = 1; in skd_prep_zerosize_flush_cdb() 486 struct skd_request_context *const skreq = blk_mq_rq_to_pdu(req); in skd_mq_queue_rq() local 497 skreq->retries = 0; in skd_mq_queue_rq() 506 SKD_ASSERT(skreq->state == SKD_REQ_STATE_IDLE); in skd_mq_queue_rq() 512 skreq->id = tag + SKD_ID_RW_REQUEST; in skd_mq_queue_rq() 513 skreq->flush_cmd = 0; in skd_mq_queue_rq() [all …]
|
/kernel/linux/linux-5.10/drivers/crypto/hisilicon/sec/ |
D | sec_algs.c | 424 struct skcipher_request *skreq = container_of(req_base, in sec_skcipher_alg_callback() local 427 struct sec_request *sec_req = skcipher_request_ctx(skreq); in sec_skcipher_alg_callback() 431 struct crypto_skcipher *atfm = crypto_skcipher_reqtfm(skreq); in sec_skcipher_alg_callback() 461 skreq->iv, in sec_skcipher_alg_callback() 468 skreq->iv, in sec_skcipher_alg_callback() 477 crypto_inc(skreq->iv, 16); in sec_skcipher_alg_callback() 532 dma_unmap_sg(dev, skreq->src, sec_req->len_in, in sec_skcipher_alg_callback() 534 if (skreq->src != skreq->dst) in sec_skcipher_alg_callback() 535 dma_unmap_sg(dev, skreq->dst, sec_req->len_out, in sec_skcipher_alg_callback() 537 skreq->base.complete(&skreq->base, sec_req->err); in sec_skcipher_alg_callback() [all …]
|
/kernel/linux/linux-5.10/crypto/ |
D | authenc.c | 181 SYNC_SKCIPHER_REQUEST_ON_STACK(skreq, ctx->null); in crypto_authenc_copy_assoc() 183 skcipher_request_set_sync_tfm(skreq, ctx->null); in crypto_authenc_copy_assoc() 184 skcipher_request_set_callback(skreq, aead_request_flags(req), in crypto_authenc_copy_assoc() 186 skcipher_request_set_crypt(skreq, req->src, req->dst, req->assoclen, in crypto_authenc_copy_assoc() 189 return crypto_skcipher_encrypt(skreq); in crypto_authenc_copy_assoc() 201 struct skcipher_request *skreq = (void *)(areq_ctx->tail + in crypto_authenc_encrypt() local 217 skcipher_request_set_tfm(skreq, enc); in crypto_authenc_encrypt() 218 skcipher_request_set_callback(skreq, aead_request_flags(req), in crypto_authenc_encrypt() 220 skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv); in crypto_authenc_encrypt() 222 err = crypto_skcipher_encrypt(skreq); in crypto_authenc_encrypt() [all …]
|
D | authencesn.c | 171 SYNC_SKCIPHER_REQUEST_ON_STACK(skreq, ctx->null); in crypto_authenc_esn_copy() 173 skcipher_request_set_sync_tfm(skreq, ctx->null); in crypto_authenc_esn_copy() 174 skcipher_request_set_callback(skreq, aead_request_flags(req), in crypto_authenc_esn_copy() 176 skcipher_request_set_crypt(skreq, req->src, req->dst, len, NULL); in crypto_authenc_esn_copy() 178 return crypto_skcipher_encrypt(skreq); in crypto_authenc_esn_copy() 186 struct skcipher_request *skreq = (void *)(areq_ctx->tail + in crypto_authenc_esn_encrypt() local 207 skcipher_request_set_tfm(skreq, enc); in crypto_authenc_esn_encrypt() 208 skcipher_request_set_callback(skreq, aead_request_flags(req), in crypto_authenc_esn_encrypt() 210 skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv); in crypto_authenc_esn_encrypt() 212 err = crypto_skcipher_encrypt(skreq); in crypto_authenc_esn_encrypt() [all …]
|
D | ccm.c | 48 struct skcipher_request skreq; member 289 struct skcipher_request *skreq = &pctx->skreq; in crypto_ccm_encrypt() local 308 skcipher_request_set_tfm(skreq, ctx->ctr); in crypto_ccm_encrypt() 309 skcipher_request_set_callback(skreq, pctx->flags, in crypto_ccm_encrypt() 311 skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv); in crypto_ccm_encrypt() 312 err = crypto_skcipher_encrypt(skreq); in crypto_ccm_encrypt() 349 struct skcipher_request *skreq = &pctx->skreq; in crypto_ccm_decrypt() local 373 skcipher_request_set_tfm(skreq, ctx->ctr); in crypto_ccm_decrypt() 374 skcipher_request_set_callback(skreq, pctx->flags, in crypto_ccm_decrypt() 376 skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv); in crypto_ccm_decrypt() [all …]
|
D | gcm.c | 73 struct skcipher_request skreq; member 183 struct skcipher_request *skreq = &pctx->u.skreq; in crypto_gcm_init_crypt() local 188 skcipher_request_set_tfm(skreq, ctx->ctr); in crypto_gcm_init_crypt() 189 skcipher_request_set_crypt(skreq, pctx->src, dst, in crypto_gcm_init_crypt() 454 struct skcipher_request *skreq = &pctx->u.skreq; in crypto_gcm_encrypt() local 459 skcipher_request_set_callback(skreq, flags, gcm_encrypt_done, req); in crypto_gcm_encrypt() 461 return crypto_skcipher_encrypt(skreq) ?: in crypto_gcm_encrypt() 493 struct skcipher_request *skreq = &pctx->u.skreq; in gcm_dec_hash_continue() local 497 skcipher_request_set_callback(skreq, flags, gcm_decrypt_done, req); in gcm_dec_hash_continue() 498 return crypto_skcipher_decrypt(skreq) ?: crypto_gcm_verify(req); in gcm_dec_hash_continue()
|
D | algif_aead.c | 78 SYNC_SKCIPHER_REQUEST_ON_STACK(skreq, null_tfm); in crypto_aead_copy_sgl() 80 skcipher_request_set_sync_tfm(skreq, null_tfm); in crypto_aead_copy_sgl() 81 skcipher_request_set_callback(skreq, CRYPTO_TFM_REQ_MAY_SLEEP, in crypto_aead_copy_sgl() 83 skcipher_request_set_crypt(skreq, src, dst, len, NULL); in crypto_aead_copy_sgl() 85 return crypto_skcipher_encrypt(skreq); in crypto_aead_copy_sgl()
|
/kernel/linux/linux-5.10/drivers/crypto/marvell/cesa/ |
D | cipher.c | 138 struct skcipher_request *skreq = skcipher_request_cast(req); in mv_cesa_skcipher_process() local 139 struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(skreq); in mv_cesa_skcipher_process() 143 return mv_cesa_skcipher_std_process(skreq, status); in mv_cesa_skcipher_process() 150 struct skcipher_request *skreq = skcipher_request_cast(req); in mv_cesa_skcipher_step() local 151 struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(skreq); in mv_cesa_skcipher_step() 156 mv_cesa_skcipher_std_step(skreq); in mv_cesa_skcipher_step() 181 struct skcipher_request *skreq = skcipher_request_cast(req); in mv_cesa_skcipher_prepare() local 182 struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(skreq); in mv_cesa_skcipher_prepare() 187 mv_cesa_skcipher_dma_prepare(skreq); in mv_cesa_skcipher_prepare() 189 mv_cesa_skcipher_std_prepare(skreq); in mv_cesa_skcipher_prepare() [all …]
|
/kernel/linux/patches/linux-5.10/imx8mm_patch/patches/ |
D | 0003_linux_crypto.patch | 861 + SYNC_SKCIPHER_REQUEST_ON_STACK(skreq, ctx->null); 863 + skcipher_request_set_sync_tfm(skreq, ctx->null); 864 + skcipher_request_set_callback(skreq, aead_request_flags(req), 866 + skcipher_request_set_crypt(skreq, src, dst, len, NULL); 868 + return crypto_skcipher_encrypt(skreq); 876 + struct skcipher_request *skreq; 959 + skreq = (void *)(treq_ctx->tail + ctx->reqoff); 960 + skcipher_request_set_tfm(skreq, ctx->enc); 961 + skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv); 962 + skcipher_request_set_callback(skreq, aead_request_flags(req), [all …]
|