/kernel/linux/linux-5.10/drivers/crypto/caam/ |
D | caamhash.c | 535 int src_nents; member 548 if (edesc->src_nents) in ahash_unmap() 549 dma_unmap_sg(dev, req->src, edesc->src_nents, DMA_TO_DEVICE); in ahash_unmap() 824 int src_nents, mapped_nents, sec4_sg_bytes, sec4_sg_src_index; in ahash_update_ctx() local 846 src_nents = sg_nents_for_len(req->src, src_len); in ahash_update_ctx() 847 if (src_nents < 0) { in ahash_update_ctx() 849 return src_nents; in ahash_update_ctx() 852 if (src_nents) { in ahash_update_ctx() 853 mapped_nents = dma_map_sg(jrdev, req->src, src_nents, in ahash_update_ctx() 874 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); in ahash_update_ctx() [all …]
|
D | caamalg_qi2.c | 146 struct scatterlist *dst, int src_nents, in caam_unmap() argument 152 if (src_nents) in caam_unmap() 153 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap() 157 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap() 359 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; in aead_edesc_alloc() local 380 src_nents = sg_nents_for_len(req->src, src_len); in aead_edesc_alloc() 381 if (unlikely(src_nents < 0)) { in aead_edesc_alloc() 385 return ERR_PTR(src_nents); in aead_edesc_alloc() 396 if (src_nents) { in aead_edesc_alloc() 397 mapped_src_nents = dma_map_sg(dev, req->src, src_nents, in aead_edesc_alloc() [all …]
|
D | caamalg_qi2.h | 111 int src_nents; member 131 int src_nents; member 148 int src_nents; member
|
D | caamalg_qi.c | 798 int src_nents; member 820 int src_nents; member 868 struct scatterlist *dst, int src_nents, in caam_unmap() argument 874 if (src_nents) in caam_unmap() 875 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap() 879 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap() 895 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in aead_unmap() 907 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in skcipher_unmap() 946 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; in aead_edesc_alloc() local 972 src_nents = sg_nents_for_len(req->src, src_len); in aead_edesc_alloc() [all …]
|
D | caamalg.c | 888 int src_nents; member 914 int src_nents; member 927 struct scatterlist *dst, int src_nents, in caam_unmap() argument 933 if (src_nents) in caam_unmap() 934 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap() 938 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap() 953 edesc->src_nents, edesc->dst_nents, 0, 0, in aead_unmap() 964 edesc->src_nents, edesc->dst_nents, in skcipher_unmap() 1249 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen); in init_skcipher_job() 1253 edesc->src_nents > 1 ? 100 : req->cryptlen, 1); in init_skcipher_job() [all …]
|
D | caampkc.h | 135 int src_nents; member
|
D | caampkc.c | 49 dma_unmap_sg(dev, req_ctx->fixup_src, edesc->src_nents, DMA_TO_DEVICE); in rsa_io_unmap() 253 int src_nents, dst_nents; in rsa_edesc_alloc() local 281 src_nents = sg_nents_for_len(req_ctx->fixup_src, in rsa_edesc_alloc() 285 mapped_src_nents = dma_map_sg(dev, req_ctx->fixup_src, src_nents, in rsa_edesc_alloc() 331 edesc->src_nents = src_nents; in rsa_edesc_alloc() 362 dma_unmap_sg(dev, req_ctx->fixup_src, src_nents, DMA_TO_DEVICE); in rsa_edesc_alloc()
|
/kernel/linux/linux-5.10/drivers/crypto/qce/ |
D | skcipher.c | 49 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); in qce_skcipher_done() 74 int dst_nents, src_nents, ret; in qce_skcipher_async_req_handle() local 84 rctx->src_nents = sg_nents_for_len(req->src, req->cryptlen); in qce_skcipher_async_req_handle() 88 rctx->dst_nents = rctx->src_nents; in qce_skcipher_async_req_handle() 89 if (rctx->src_nents < 0) { in qce_skcipher_async_req_handle() 91 return rctx->src_nents; in qce_skcipher_async_req_handle() 132 src_nents = dma_map_sg(qce->dev, req->src, rctx->src_nents, dir_src); in qce_skcipher_async_req_handle() 133 if (src_nents < 0) { in qce_skcipher_async_req_handle() 134 ret = src_nents; in qce_skcipher_async_req_handle() 140 src_nents = dst_nents - 1; in qce_skcipher_async_req_handle() [all …]
|
D | sha.c | 47 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_done() 87 rctx->src_nents = sg_nents_for_len(req->src, req->nbytes); in qce_ahash_async_req_handle() 88 if (rctx->src_nents < 0) { in qce_ahash_async_req_handle() 90 return rctx->src_nents; in qce_ahash_async_req_handle() 93 ret = dma_map_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_async_req_handle() 103 ret = qce_dma_prep_sgs(&qce->dma, req->src, rctx->src_nents, in qce_ahash_async_req_handle() 121 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_async_req_handle()
|
D | cipher.h | 38 int src_nents; member
|
D | sha.h | 49 int src_nents; member
|
/kernel/linux/linux-5.10/drivers/crypto/virtio/ |
D | virtio_crypto_algs.c | 349 int src_nents, dst_nents; in __virtio_crypto_skcipher_do_req() local 359 src_nents = sg_nents_for_len(req->src, req->cryptlen); in __virtio_crypto_skcipher_do_req() 360 if (src_nents < 0) { in __virtio_crypto_skcipher_do_req() 362 return src_nents; in __virtio_crypto_skcipher_do_req() 368 src_nents, dst_nents); in __virtio_crypto_skcipher_do_req() 371 sg_total = src_nents + dst_nents + 3; in __virtio_crypto_skcipher_do_req() 451 for (sg = req->src; src_nents; sg = sg_next(sg), src_nents--) in __virtio_crypto_skcipher_do_req()
|
/kernel/linux/linux-5.10/drivers/crypto/marvell/cesa/ |
D | cipher.c | 66 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_cleanup() 69 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_cleanup() 94 len = sg_pcopy_to_buffer(req->src, creq->src_nents, in mv_cesa_skcipher_std_step() 317 ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_req_init() 329 ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_req_init() 390 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_req_init() 423 creq->src_nents = sg_nents_for_len(req->src, req->cryptlen); in mv_cesa_skcipher_req_init() 424 if (creq->src_nents < 0) { in mv_cesa_skcipher_req_init() 426 return creq->src_nents; in mv_cesa_skcipher_req_init()
|
D | hash.c | 104 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, DMA_TO_DEVICE); in mv_cesa_ahash_dma_cleanup() 192 sreq->offset += sg_pcopy_to_buffer(req->src, creq->src_nents, in mv_cesa_ahash_std_step() 403 sg_pcopy_to_buffer(ahashreq->src, creq->src_nents, in mv_cesa_ahash_req_cleanup() 457 sg_pcopy_to_buffer(req->src, creq->src_nents, in mv_cesa_ahash_cache_req() 622 if (creq->src_nents) { in mv_cesa_ahash_dma_req_init() 623 ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_ahash_dma_req_init() 729 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, DMA_TO_DEVICE); in mv_cesa_ahash_dma_req_init() 741 creq->src_nents = sg_nents_for_len(req->src, req->nbytes); in mv_cesa_ahash_req_init() 742 if (creq->src_nents < 0) { in mv_cesa_ahash_req_init() 744 return creq->src_nents; in mv_cesa_ahash_req_init()
|
D | cesa.h | 569 int src_nents; member 615 int src_nents; member
|
/kernel/linux/linux-5.10/drivers/crypto/ |
D | talitos.c | 966 unsigned int src_nents = edesc->src_nents ? : 1; in talitos_sg_unmap() local 976 if (src_nents == 1 || !is_sec1) in talitos_sg_unmap() 977 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in talitos_sg_unmap() 981 } else if (src_nents == 1 || !is_sec1) { in talitos_sg_unmap() 982 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in talitos_sg_unmap() 1219 sg_count = edesc->src_nents ?: 1; in ipsec_esp() 1331 int src_nents, dst_nents, alloc_len, dma_len, src_len, dst_len; in talitos_edesc_alloc() local 1346 src_nents = sg_nents_for_len(src, src_len); in talitos_edesc_alloc() 1347 if (src_nents < 0) { in talitos_edesc_alloc() 1351 src_nents = (src_nents == 1) ? 0 : src_nents; in talitos_edesc_alloc() [all …]
|
D | picoxcell_crypto.c | 318 int src_nents, dst_nents; in spacc_aead_make_ddts() local 326 src_nents = sg_nents_for_len(areq->src, total); in spacc_aead_make_ddts() 327 if (src_nents < 0) { in spacc_aead_make_ddts() 329 return src_nents; in spacc_aead_make_ddts() 331 if (src_nents + 1 > MAX_DDT_LEN) in spacc_aead_make_ddts() 341 if (src_nents + 1 > MAX_DDT_LEN) in spacc_aead_make_ddts() 357 src_ents = dma_map_sg(engine->dev, areq->src, src_nents, in spacc_aead_make_ddts() 366 dma_unmap_sg(engine->dev, areq->src, src_nents, in spacc_aead_make_ddts() 371 src_ents = dma_map_sg(engine->dev, areq->src, src_nents, in spacc_aead_make_ddts()
|
D | talitos.h | 61 int src_nents; member
|
D | sa2ul.c | 1062 int sg_nents, src_nents, dst_nents; in sa_run() local 1133 src_nents = 1; in sa_run() 1139 mapped_sg->sgt.orig_nents = src_nents; in sa_run() 1161 &split_size, &src, &src_nents, gfp_flags); in sa_run() 1163 src_nents = mapped_sg->sgt.nents; in sa_run() 1173 dst_nents = src_nents; in sa_run() 1235 src_nents, DMA_MEM_TO_DEV, in sa_run()
|
/kernel/linux/linux-5.10/drivers/crypto/ccp/ |
D | ccp-dmaengine.c | 357 unsigned int src_nents, in ccp_create_desc() argument 376 if (!dst_nents || !src_nents) in ccp_create_desc() 393 src_nents--; in ccp_create_desc() 394 if (!src_nents) in ccp_create_desc()
|
/kernel/linux/linux-5.10/drivers/crypto/bcm/ |
D | cipher.c | 225 rctx->src_nents, chunksize); in spu_skcipher_tx_sg_create() 347 rctx->src_nents = spu_sg_count(rctx->src_sg, rctx->src_skip, chunksize); in handle_skcipher_req() 445 tx_frag_num += rctx->src_nents; in handle_skcipher_req() 608 rctx->src_nents, new_data_len); in spu_ahash_tx_sg_create() 770 rctx->src_nents = spu_sg_count(rctx->src_sg, rctx->src_skip, in handle_ahash_req() 873 tx_frag_num += rctx->src_nents; in handle_ahash_req() 1220 rctx->src_nents, datalen); in spu_aead_tx_sg_create() 1346 rctx->src_nents = spu_sg_count(rctx->src_sg, rctx->src_skip, chunksize); in handle_aead_req() 1505 tx_frag_num += rctx->src_nents; in handle_aead_req() 1737 rctx->src_nents = 0; in skcipher_enqueue() [all …]
|
D | cipher.h | 286 int src_nents; /* Number of src entries with data */ member
|
/kernel/linux/patches/linux-5.10/imx8mm_patch/patches/drivers/ |
D | 0016_linux_drivers_crypto.patch | 1032 + * @src_nents: number of segments in input scatterlist 1042 + int src_nents; 1055 * @src_nents: number of segments in input scatterlist 1067 + caam_unmap(dev, req->src, edesc->dst, edesc->src_nents, 1114 + int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1150 + src_nents = sg_nents_for_len(req->src, src_len); 1151 + if (unlikely(src_nents < 0)) { 1155 + return ERR_PTR(src_nents); 1158 + mapped_src_nents = dma_map_sg(qidev, req->src, src_nents, 1169 + src_nents = sg_nents_for_len(req->src, src_len); [all …]
|