Home
last modified time | relevance | path

Searched refs:dst_sg (Results 1 – 15 of 15) sorted by relevance

/kernel/linux/linux-5.10/drivers/crypto/amlogic/
Damlogic-gxl-cipher.c28 struct scatterlist *dst_sg = areq->dst; in meson_cipher_need_fallback() local
33 if (sg_nents(src_sg) != sg_nents(dst_sg)) in meson_cipher_need_fallback()
37 if (sg_nents(src_sg) > MAXDESC - 3 || sg_nents(dst_sg) > MAXDESC - 3) in meson_cipher_need_fallback()
40 while (src_sg && dst_sg) { in meson_cipher_need_fallback()
43 if ((dst_sg->length % 16) != 0) in meson_cipher_need_fallback()
45 if (src_sg->length != dst_sg->length) in meson_cipher_need_fallback()
49 if (!IS_ALIGNED(dst_sg->offset, sizeof(u32))) in meson_cipher_need_fallback()
52 dst_sg = sg_next(dst_sg); in meson_cipher_need_fallback()
95 struct scatterlist *dst_sg = areq->dst; in meson_cipher() local
204 dst_sg = areq->dst; in meson_cipher()
[all …]
/kernel/linux/linux-5.10/drivers/crypto/ccp/
Dccp-dmaengine.c354 struct scatterlist *dst_sg, in ccp_create_desc() argument
373 if (!dst_sg || !src_sg) in ccp_create_desc()
388 dst_len = sg_dma_len(dst_sg); in ccp_create_desc()
411 dst_sg = sg_next(dst_sg); in ccp_create_desc()
412 if (!dst_sg) in ccp_create_desc()
415 dst_len = sg_dma_len(dst_sg); in ccp_create_desc()
435 ccp_pt->dst_dma = sg_dma_address(dst_sg) + dst_offset; in ccp_create_desc()
486 struct scatterlist dst_sg, src_sg; in ccp_prep_dma_memcpy() local
492 sg_init_table(&dst_sg, 1); in ccp_prep_dma_memcpy()
493 sg_dma_address(&dst_sg) = dst; in ccp_prep_dma_memcpy()
[all …]
/kernel/linux/linux-5.10/drivers/mailbox/
Dbcm-flexrm-mailbox.c606 struct scatterlist *src_sg = msg->spu.src, *dst_sg = msg->spu.dst; in flexrm_spu_estimate_nonheader_desc_count() local
608 while (src_sg || dst_sg) { in flexrm_spu_estimate_nonheader_desc_count()
616 while (dst_target && dst_sg) { in flexrm_spu_estimate_nonheader_desc_count()
618 if (dst_sg->length < dst_target) in flexrm_spu_estimate_nonheader_desc_count()
619 dst_target -= dst_sg->length; in flexrm_spu_estimate_nonheader_desc_count()
622 dst_sg = sg_next(dst_sg); in flexrm_spu_estimate_nonheader_desc_count()
665 struct scatterlist *src_sg = msg->spu.src, *dst_sg = msg->spu.dst; in flexrm_spu_write_descs() local
667 while (src_sg || dst_sg) { in flexrm_spu_write_descs()
684 while (dst_target && dst_sg) { in flexrm_spu_write_descs()
685 if (sg_dma_len(dst_sg) & 0xf) in flexrm_spu_write_descs()
[all …]
Dbcm-pdc-mailbox.c274 struct scatterlist *dst_sg; member
627 dma_unmap_sg(dev, rx_ctx->dst_sg, sg_nents(rx_ctx->dst_sg), in pdc_receive_one()
814 static int pdc_rx_list_init(struct pdc_state *pdcs, struct scatterlist *dst_sg, in pdc_rx_list_init() argument
853 rx_ctx->dst_sg = dst_sg; in pdc_rx_list_init()
/kernel/linux/linux-5.10/drivers/crypto/qce/
Dskcipher.c50 dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); in qce_skcipher_done()
123 rctx->dst_sg = rctx->dst_tbl.sgl; in qce_skcipher_async_req_handle()
125 dst_nents = dma_map_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); in qce_skcipher_async_req_handle()
139 rctx->src_sg = rctx->dst_sg; in qce_skcipher_async_req_handle()
144 rctx->dst_sg, dst_nents, in qce_skcipher_async_req_handle()
163 dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); in qce_skcipher_async_req_handle()
Dcipher.h42 struct scatterlist *dst_sg; member
/kernel/linux/linux-5.10/fs/crypto/
Dkeysetup_v1.c54 struct scatterlist src_sg, dst_sg; in derive_key_aes() local
76 sg_init_one(&dst_sg, derived_key, derived_keysize); in derive_key_aes()
77 skcipher_request_set_crypt(req, &src_sg, &dst_sg, derived_keysize, in derive_key_aes()
Dfname.c146 struct scatterlist src_sg, dst_sg; in fname_decrypt() local
165 sg_init_one(&dst_sg, oname->name, oname->len); in fname_decrypt()
166 skcipher_request_set_crypt(req, &src_sg, &dst_sg, iname->len, &iv); in fname_decrypt()
/kernel/linux/linux-5.10/drivers/dma/
Dnbpfaxi.c921 struct scatterlist *src_sg, struct scatterlist *dst_sg, in nbpf_prep_sg() argument
934 mem_sg = dst_sg; in nbpf_prep_sg()
967 sg_dma_address(dst_sg), in nbpf_prep_sg()
978 dst_sg = sg_next(dst_sg); in nbpf_prep_sg()
979 mem_sg = direction == DMA_DEV_TO_MEM ? dst_sg : src_sg; in nbpf_prep_sg()
994 struct scatterlist dst_sg; in nbpf_prep_memcpy() local
997 sg_init_table(&dst_sg, 1); in nbpf_prep_memcpy()
1000 sg_dma_address(&dst_sg) = dst; in nbpf_prep_memcpy()
1003 sg_dma_len(&dst_sg) = len; in nbpf_prep_memcpy()
1009 return nbpf_prep_sg(chan, &src_sg, &dst_sg, 1, in nbpf_prep_memcpy()
Dste_dma40.c2481 struct scatterlist dst_sg; in d40_prep_memcpy() local
2484 sg_init_table(&dst_sg, 1); in d40_prep_memcpy()
2487 sg_dma_address(&dst_sg) = dst; in d40_prep_memcpy()
2490 sg_dma_len(&dst_sg) = size; in d40_prep_memcpy()
2493 return d40_prep_sg(chan, &src_sg, &dst_sg, 1, in d40_prep_memcpy()
/kernel/linux/linux-5.10/fs/ecryptfs/
Dkeystore.c597 struct scatterlist dst_sg[2]; member
801 s->dst_sg, 2); in ecryptfs_write_tag_70_packet()
827 skcipher_request_set_crypt(s->skcipher_req, s->src_sg, s->dst_sg, in ecryptfs_write_tag_70_packet()
867 struct scatterlist dst_sg[2]; member
996 s->block_aligned_filename_size, s->dst_sg, 2); in ecryptfs_parse_tag_70_packet()
1043 skcipher_request_set_crypt(s->skcipher_req, s->src_sg, s->dst_sg, in ecryptfs_parse_tag_70_packet()
1648 struct scatterlist dst_sg[2]; in decrypt_passphrase_encrypted_session_key() local
1686 dst_sg, 2); in decrypt_passphrase_encrypted_session_key()
1715 skcipher_request_set_crypt(req, src_sg, dst_sg, in decrypt_passphrase_encrypted_session_key()
2181 struct scatterlist dst_sg[2]; in write_tag_3_packet() local
[all …]
Dcrypto.c291 struct scatterlist *dst_sg, in crypt_scatterlist() argument
336 skcipher_request_set_crypt(req, src_sg, dst_sg, size, iv); in crypt_scatterlist()
384 struct scatterlist src_sg, dst_sg; in crypt_extent() local
399 sg_init_table(&dst_sg, 1); in crypt_extent()
403 sg_set_page(&dst_sg, dst_page, extent_size, in crypt_extent()
406 rc = crypt_scatterlist(crypt_stat, &dst_sg, &src_sg, extent_size, in crypt_extent()
/kernel/linux/linux-5.10/drivers/crypto/bcm/
Dcipher.h293 struct scatterlist *dst_sg; member
Dcipher.c160 datalen = spu_msg_sg_add(&sg, &rctx->dst_sg, &rctx->dst_skip, in spu_skcipher_rx_sg_create()
348 rctx->dst_nents = spu_sg_count(rctx->dst_sg, rctx->dst_skip, chunksize); in handle_skcipher_req()
1107 datalen = spu_msg_sg_add(&sg, &rctx->dst_sg, &rctx->dst_skip, in spu_aead_rx_sg_create()
1347 rctx->dst_nents = spu_sg_count(rctx->dst_sg, rctx->dst_skip, chunksize); in handle_aead_req()
1739 rctx->dst_sg = req->dst; in skcipher_enqueue()
1926 rctx->dst_sg = NULL; in ahash_enqueue()
2684 rctx->dst_sg = rctx->src_sg; in aead_enqueue()
2692 if (spu_sg_at_offset(req->dst, req->assoclen, &rctx->dst_sg, in aead_enqueue()
2722 rctx->dst_sg, rctx->dst_skip); in aead_enqueue()
/kernel/linux/linux-5.10/arch/x86/crypto/
Daesni-intel_glue.c702 struct scatterlist *dst_sg; in gcmaes_crypt_by_sg() local
735 dst_sg = scatterwalk_ffwd(dst_start, req->dst, in gcmaes_crypt_by_sg()
737 scatterwalk_start(&dst_sg_walk, dst_sg); in gcmaes_crypt_by_sg()