Home
last modified time | relevance | path

Searched refs:in_sg (Results 1 – 14 of 14) sorted by relevance

/drivers/crypto/nx/
Dnx-aes-xcbc.c75 struct nx_sg *in_sg, *out_sg; in nx_xcbc_empty() local
93 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) keys, &len, in nx_xcbc_empty()
105 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_xcbc_empty()
121 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) keys[1], &len, in nx_xcbc_empty()
134 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_xcbc_empty()
186 struct nx_sg *in_sg; in nx_xcbc_update() local
209 in_sg = nx_ctx->in_sg; in nx_xcbc_update()
244 in_sg = nx_build_sg_list(nx_ctx->in_sg, in nx_xcbc_update()
255 in_sg = nx_build_sg_list(in_sg, in nx_xcbc_update()
265 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * in nx_xcbc_update()
[all …]
Dnx-sha256.c116 struct nx_sg *in_sg = nx_ctx->in_sg; in nx_sha256_update() local
120 in_sg = nx_build_sg_list(in_sg, in nx_sha256_update()
129 used_sgs = in_sg - nx_ctx->in_sg; in nx_sha256_update()
144 in_sg = nx_build_sg_list(in_sg, (u8 *) data, in nx_sha256_update()
147 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_sha256_update()
194 struct nx_sg *in_sg, *out_sg; in nx_sha256_final() local
223 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) sctx->buf, in nx_sha256_final()
239 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_sha256_final()
Dnx-sha512.c116 struct nx_sg *in_sg = nx_ctx->in_sg; in nx_sha512_update() local
120 in_sg = nx_build_sg_list(in_sg, in nx_sha512_update()
128 used_sgs = in_sg - nx_ctx->in_sg; in nx_sha512_update()
143 in_sg = nx_build_sg_list(in_sg, (u8 *) data, in nx_sha512_update()
146 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_sha512_update()
197 struct nx_sg *in_sg, *out_sg; in nx_sha512_final() local
232 in_sg = nx_build_sg_list(nx_ctx->in_sg, sctx->buf, &len, in nx_sha512_final()
244 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_sha512_final()
Dnx-aes-gcm.c118 struct nx_sg *nx_sg = nx_ctx->in_sg; in nx_gca()
148 nx_sg = nx_walk_and_build(nx_ctx->in_sg, max_sg_len, in nx_gca()
156 nx_ctx->op_aead.inlen = (nx_ctx->in_sg - nx_sg) in nx_gca()
216 nx_sg = nx_walk_and_build(nx_ctx->in_sg, max_sg_len, in gmac()
224 nx_ctx->op.inlen = (nx_ctx->in_sg - nx_sg) in gmac()
262 struct nx_sg *in_sg, *out_sg; in gcm_empty() local
282 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) desc->info, in gcm_empty()
295 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in gcm_empty()
Dnx.c281 struct nx_sg *nx_insg = nx_ctx->in_sg; in nx_build_sg_lists()
306 nx_ctx->op.inlen = trim_sg_list(nx_ctx->in_sg, nx_insg, delta, nbytes); in nx_build_sg_lists()
326 nx_ctx->op.in = __pa(nx_ctx->in_sg); in nx_ctx_init()
334 nx_ctx->op_aead.in = __pa(nx_ctx->in_sg); in nx_ctx_init()
690 nx_ctx->in_sg = (struct nx_sg *)((u8 *)nx_ctx->csbcpb + NX_PAGE_SIZE); in nx_crypto_ctx_init()
691 nx_ctx->out_sg = (struct nx_sg *)((u8 *)nx_ctx->in_sg + NX_PAGE_SIZE); in nx_crypto_ctx_init()
766 nx_ctx->in_sg = NULL; in nx_crypto_ctx_exit()
Dnx-aes-ccm.c176 struct nx_sg *nx_insg = nx_ctx->in_sg; in generate_pat()
264 nx_ctx->op.inlen = (nx_ctx->in_sg - nx_insg) * in generate_pat()
297 nx_insg = nx_walk_and_build(nx_ctx->in_sg, in generate_pat()
311 nx_ctx->op_aead.inlen = (nx_ctx->in_sg - nx_insg) * in generate_pat()
Dnx.h136 struct nx_sg *in_sg; /* aligned pointer into kmem to an sg list */ member
/drivers/crypto/
Domap-des.c150 struct scatterlist *in_sg; member
401 struct scatterlist *in_sg, struct scatterlist *out_sg, in omap_des_crypt_dma() argument
411 scatterwalk_start(&dd->in_walk, dd->in_sg); in omap_des_crypt_dma()
420 dma_sync_sg_for_device(dd->dev, dd->in_sg, in_sg_len, DMA_TO_DEVICE); in omap_des_crypt_dma()
439 tx_in = dmaengine_prep_slave_sg(dd->dma_lch_in, in_sg, in_sg_len, in omap_des_crypt_dma()
490 err = dma_map_sg(dd->dev, dd->in_sg, dd->in_sg_len, in omap_des_crypt_dma_start()
505 err = omap_des_crypt_dma(tfm, dd->in_sg, dd->out_sg, dd->in_sg_len, in omap_des_crypt_dma_start()
508 dma_unmap_sg(dd->dev, dd->in_sg, dd->in_sg_len, DMA_TO_DEVICE); in omap_des_crypt_dma_start()
576 sg_copy_buf(buf_in, dd->in_sg, 0, dd->total, 0); in omap_des_copy_sgs()
580 dd->in_sg = &dd->in_sgl; in omap_des_copy_sgs()
[all …]
Domap-aes.c170 struct scatterlist *in_sg; member
414 struct scatterlist *in_sg, struct scatterlist *out_sg, in omap_aes_crypt_dma() argument
424 scatterwalk_start(&dd->in_walk, dd->in_sg); in omap_aes_crypt_dma()
433 dma_sync_sg_for_device(dd->dev, dd->in_sg, in_sg_len, DMA_TO_DEVICE); in omap_aes_crypt_dma()
452 tx_in = dmaengine_prep_slave_sg(dd->dma_lch_in, in_sg, in_sg_len, in omap_aes_crypt_dma()
503 err = dma_map_sg(dd->dev, dd->in_sg, dd->in_sg_len, in omap_aes_crypt_dma_start()
518 err = omap_aes_crypt_dma(tfm, dd->in_sg, dd->out_sg, dd->in_sg_len, in omap_aes_crypt_dma_start()
521 dma_unmap_sg(dd->dev, dd->in_sg, dd->in_sg_len, DMA_TO_DEVICE); in omap_aes_crypt_dma_start()
595 sg_copy_buf(buf_in, dd->in_sg, 0, dd->total, 0); in omap_aes_copy_sgs()
599 dd->in_sg = &dd->in_sgl; in omap_aes_copy_sgs()
[all …]
Dsahara.c191 struct scatterlist *in_sg; member
230 struct scatterlist *in_sg; member
479 dev->nb_in_sg = sg_nents_for_len(dev->in_sg, dev->total); in sahara_hw_descriptor_create()
487 ret = dma_map_sg(dev->device, dev->in_sg, dev->nb_in_sg, in sahara_hw_descriptor_create()
502 sg = dev->in_sg; in sahara_hw_descriptor_create()
545 dma_unmap_sg(dev->device, dev->in_sg, dev->nb_in_sg, in sahara_hw_descriptor_create()
566 dev->in_sg = req->src; in sahara_aes_process()
595 dma_unmap_sg(dev->device, dev->in_sg, dev->nb_in_sg, in sahara_aes_process()
793 dev->in_sg = rctx->in_sg; in sahara_sha_hw_links_create()
795 dev->nb_in_sg = sg_nents_for_len(dev->in_sg, rctx->total); in sahara_sha_hw_links_create()
[all …]
Datmel-aes.c122 struct scatterlist *in_sg; member
404 dd->nb_in_sg = atmel_aes_sg_length(dd->req, dd->in_sg); in atmel_aes_crypt_cpu_start()
412 dd->bufcnt = sg_copy_to_buffer(dd->in_sg, dd->nb_in_sg, in atmel_aes_crypt_cpu_start()
435 in = IS_ALIGNED((u32)dd->in_sg->offset, sizeof(u32)) && in atmel_aes_crypt_dma_start()
436 IS_ALIGNED(dd->in_sg->length, dd->ctx->block_size); in atmel_aes_crypt_dma_start()
441 if (sg_dma_len(dd->in_sg) != sg_dma_len(dd->out_sg)) in atmel_aes_crypt_dma_start()
447 count = min(dd->total, sg_dma_len(dd->in_sg)); in atmel_aes_crypt_dma_start()
450 err = dma_map_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); in atmel_aes_crypt_dma_start()
460 dma_unmap_sg(dd->dev, dd->in_sg, 1, in atmel_aes_crypt_dma_start()
465 addr_in = sg_dma_address(dd->in_sg); in atmel_aes_crypt_dma_start()
[all …]
Datmel-tdes.c115 struct scatterlist *in_sg; member
329 dma_unmap_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); in atmel_tdes_crypt_pdc_stop()
516 in = IS_ALIGNED((u32)dd->in_sg->offset, sizeof(u32)) && in atmel_tdes_crypt_start()
517 IS_ALIGNED(dd->in_sg->length, dd->ctx->block_size); in atmel_tdes_crypt_start()
522 if (sg_dma_len(dd->in_sg) != sg_dma_len(dd->out_sg)) in atmel_tdes_crypt_start()
528 count = min(dd->total, sg_dma_len(dd->in_sg)); in atmel_tdes_crypt_start()
531 err = dma_map_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); in atmel_tdes_crypt_start()
541 dma_unmap_sg(dd->dev, dd->in_sg, 1, in atmel_tdes_crypt_start()
546 addr_in = sg_dma_address(dd->in_sg); in atmel_tdes_crypt_start()
553 count = atmel_tdes_sg_copy(&dd->in_sg, &dd->in_offset, in atmel_tdes_crypt_start()
[all …]
/drivers/crypto/sunxi-ss/
Dsun4i-ss-cipher.c132 struct scatterlist *in_sg = areq->src; in sun4i_ss_cipher_poll() local
172 while (in_sg && no_chunk == 1) { in sun4i_ss_cipher_poll()
173 if ((in_sg->length % 4) != 0) in sun4i_ss_cipher_poll()
175 in_sg = sg_next(in_sg); in sun4i_ss_cipher_poll()
/drivers/usb/wusbcore/
Dwa-xfer.c1019 static struct scatterlist *wa_xfer_create_subset_sg(struct scatterlist *in_sg, in wa_xfer_create_subset_sg() argument
1026 struct scatterlist *current_xfer_sg = in_sg; in wa_xfer_create_subset_sg()