/kernel/linux/linux-5.10/drivers/crypto/caam/ |
D | caampkc.c | 43 static void rsa_io_unmap(struct device *dev, struct rsa_edesc *edesc, in rsa_io_unmap() argument 48 dma_unmap_sg(dev, req->dst, edesc->dst_nents, DMA_FROM_DEVICE); in rsa_io_unmap() 49 dma_unmap_sg(dev, req_ctx->fixup_src, edesc->src_nents, DMA_TO_DEVICE); in rsa_io_unmap() 51 if (edesc->sec4_sg_bytes) in rsa_io_unmap() 52 dma_unmap_single(dev, edesc->sec4_sg_dma, edesc->sec4_sg_bytes, in rsa_io_unmap() 56 static void rsa_pub_unmap(struct device *dev, struct rsa_edesc *edesc, in rsa_pub_unmap() argument 62 struct rsa_pub_pdb *pdb = &edesc->pdb.pub; in rsa_pub_unmap() 68 static void rsa_priv_f1_unmap(struct device *dev, struct rsa_edesc *edesc, in rsa_priv_f1_unmap() argument 74 struct rsa_priv_f1_pdb *pdb = &edesc->pdb.priv_f1; in rsa_priv_f1_unmap() 80 static void rsa_priv_f2_unmap(struct device *dev, struct rsa_edesc *edesc, in rsa_priv_f2_unmap() argument [all …]
|
D | caamhash.c | 119 struct ahash_edesc *edesc; member 543 struct ahash_edesc *edesc, in ahash_unmap() argument 548 if (edesc->src_nents) in ahash_unmap() 549 dma_unmap_sg(dev, req->src, edesc->src_nents, DMA_TO_DEVICE); in ahash_unmap() 551 if (edesc->sec4_sg_bytes) in ahash_unmap() 552 dma_unmap_single(dev, edesc->sec4_sg_dma, in ahash_unmap() 553 edesc->sec4_sg_bytes, DMA_TO_DEVICE); in ahash_unmap() 563 struct ahash_edesc *edesc, in ahash_unmap_ctx() argument 572 ahash_unmap(dev, edesc, req, dst_len); in ahash_unmap_ctx() 580 struct ahash_edesc *edesc; in ahash_done_cpy() local [all …]
|
D | caamalg.c | 124 struct skcipher_edesc *edesc; member 129 struct aead_edesc *edesc; member 949 struct aead_edesc *edesc, in aead_unmap() argument 953 edesc->src_nents, edesc->dst_nents, 0, 0, in aead_unmap() 954 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); in aead_unmap() 957 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc, in skcipher_unmap() argument 964 edesc->src_nents, edesc->dst_nents, in skcipher_unmap() 965 edesc->iv_dma, ivsize, in skcipher_unmap() 966 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); in skcipher_unmap() 975 struct aead_edesc *edesc; in aead_crypt_done() local [all …]
|
D | caamalg_qi2.c | 361 struct aead_edesc *edesc; in aead_edesc_alloc() local 370 edesc = qi_cache_zalloc(GFP_DMA | flags); in aead_edesc_alloc() 371 if (unlikely(!edesc)) { in aead_edesc_alloc() 384 qi_cache_free(edesc); in aead_edesc_alloc() 392 qi_cache_free(edesc); in aead_edesc_alloc() 401 qi_cache_free(edesc); in aead_edesc_alloc() 415 qi_cache_free(edesc); in aead_edesc_alloc() 429 qi_cache_free(edesc); in aead_edesc_alloc() 437 qi_cache_free(edesc); in aead_edesc_alloc() 467 sg_table = &edesc->sgt[0]; in aead_edesc_alloc() [all …]
|
D | caamalg_qi.c | 889 struct aead_edesc *edesc, in aead_unmap() argument 895 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in aead_unmap() 896 edesc->iv_dma, ivsize, DMA_TO_DEVICE, edesc->qm_sg_dma, in aead_unmap() 897 edesc->qm_sg_bytes); in aead_unmap() 898 dma_unmap_single(dev, edesc->assoclen_dma, 4, DMA_TO_DEVICE); in aead_unmap() 901 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc, in skcipher_unmap() argument 907 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in skcipher_unmap() 908 edesc->iv_dma, ivsize, DMA_BIDIRECTIONAL, edesc->qm_sg_dma, in skcipher_unmap() 909 edesc->qm_sg_bytes); in skcipher_unmap() 915 struct aead_edesc *edesc; in aead_done() local [all …]
|
D | caampkc.h | 116 struct rsa_edesc *edesc; member
|
D | caamalg_qi2.h | 189 void *edesc; member
|
/kernel/linux/linux-5.10/drivers/dma/ti/ |
D | edma.c | 227 struct edma_desc *edesc; member 779 struct edma_desc *edesc; in edma_execute() local 783 if (!echan->edesc) { in edma_execute() 789 echan->edesc = to_edma_desc(&vdesc->tx); in edma_execute() 792 edesc = echan->edesc; in edma_execute() 795 left = edesc->pset_nr - edesc->processed; in edma_execute() 797 edesc->sg_len = 0; in edma_execute() 801 j = i + edesc->processed; in edma_execute() 802 edma_write_slot(ecc, echan->slot[i], &edesc->pset[j].param); in edma_execute() 803 edesc->sg_len += edesc->pset[j].len; in edma_execute() [all …]
|
/kernel/linux/linux-5.10/drivers/crypto/ |
D | talitos.c | 325 struct talitos_edesc *edesc; in get_request_hdr() local 333 edesc = container_of(request->desc, struct talitos_edesc, desc); in get_request_hdr() 335 return ((struct talitos_desc *)(edesc->buf + edesc->dma_len))->hdr1; in get_request_hdr() 490 struct talitos_edesc *edesc; local 492 edesc = container_of(priv->chan[ch].fifo[iter].desc, 495 (edesc->buf + edesc->dma_len))->hdr; 959 struct talitos_edesc *edesc, in talitos_sg_unmap() argument 966 unsigned int src_nents = edesc->src_nents ? : 1; in talitos_sg_unmap() 967 unsigned int dst_nents = edesc->dst_nents ? : 1; in talitos_sg_unmap() 970 dma_sync_single_for_device(dev, edesc->dma_link_tbl + offset, in talitos_sg_unmap() [all …]
|
/kernel/linux/linux-5.10/drivers/dma/ |
D | fsl-edma-common.c | 168 fsl_chan->edesc = NULL; in fsl_edma_terminate_all() 183 if (fsl_chan->edesc) { in fsl_edma_pause() 199 if (fsl_chan->edesc) { in fsl_edma_resume() 273 struct fsl_edma_desc *edesc = fsl_chan->edesc; in fsl_edma_desc_residue() local 276 enum dma_transfer_direction dir = edesc->dirn; in fsl_edma_desc_residue() 282 for (len = i = 0; i < fsl_chan->edesc->n_tcds; i++) in fsl_edma_desc_residue() 283 len += le32_to_cpu(edesc->tcd[i].vtcd->nbytes) in fsl_edma_desc_residue() 284 * le16_to_cpu(edesc->tcd[i].vtcd->biter); in fsl_edma_desc_residue() 295 for (i = 0; i < fsl_chan->edesc->n_tcds; i++) { in fsl_edma_desc_residue() 296 size = le32_to_cpu(edesc->tcd[i].vtcd->nbytes) in fsl_edma_desc_residue() [all …]
|
D | mcf-edma.c | 39 if (!mcf_chan->edesc) { in mcf_edma_tx_handler() 45 if (!mcf_chan->edesc->iscyclic) { in mcf_edma_tx_handler() 46 list_del(&mcf_chan->edesc->vdesc.node); in mcf_edma_tx_handler() 47 vchan_cookie_complete(&mcf_chan->edesc->vdesc); in mcf_edma_tx_handler() 48 mcf_chan->edesc = NULL; in mcf_edma_tx_handler() 52 vchan_cyclic_callback(&mcf_chan->edesc->vdesc); in mcf_edma_tx_handler() 55 if (!mcf_chan->edesc) in mcf_edma_tx_handler()
|
D | fsl-edma.c | 49 if (!fsl_chan->edesc) { in fsl_edma_tx_handler() 55 if (!fsl_chan->edesc->iscyclic) { in fsl_edma_tx_handler() 56 list_del(&fsl_chan->edesc->vdesc.node); in fsl_edma_tx_handler() 57 vchan_cookie_complete(&fsl_chan->edesc->vdesc); in fsl_edma_tx_handler() 58 fsl_chan->edesc = NULL; in fsl_edma_tx_handler() 62 vchan_cyclic_callback(&fsl_chan->edesc->vdesc); in fsl_edma_tx_handler() 65 if (!fsl_chan->edesc) in fsl_edma_tx_handler()
|
D | fsl-edma-common.h | 121 struct fsl_edma_desc *edesc; member
|
/kernel/linux/patches/linux-5.10/imx8mm_patch/patches/drivers/ |
D | 0017_linux_drivers_dma_dmabuf.patch | 97 fsl_chan->edesc = NULL; 351 + struct fsl_edma3_desc *edesc; 523 + fsl_chan->edesc = NULL; 542 + if (fsl_chan->edesc) { 558 + if (fsl_chan->edesc) { 602 + struct fsl_edma3_desc *edesc = fsl_chan->edesc; 610 + for (len = i = 0; i < fsl_chan->edesc->n_tcds; i++) 611 + len += le32_to_cpu(edesc->tcd[i].vtcd->nbytes) 612 + * le16_to_cpu(edesc->tcd[i].vtcd->biter); 623 + if (edesc->iscyclic) [all …]
|
D | 0016_linux_drivers_crypto.patch | 1057 dma_unmap_single(dev, edesc->assoclen_dma, 4, DMA_TO_DEVICE); 1061 + struct tls_edesc *edesc, 1067 + caam_unmap(dev, req->src, edesc->dst, edesc->src_nents, 1068 + edesc->dst_nents, edesc->iv_dma, ivsize, DMA_TO_DEVICE, 1069 + edesc->qm_sg_dma, edesc->qm_sg_bytes); 1072 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc, 1082 + struct tls_edesc *edesc; 1093 + edesc = container_of(drv_req, typeof(*edesc), drv_req); 1094 + tls_unmap(qidev, edesc, aead_req); 1097 + qi_cache_free(edesc); [all …]
|