/drivers/crypto/nx/ |
D | nx-aes-xcbc.c | 64 struct nx_sg *out_sg; in nx_xcbc_init() local 76 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state, in nx_xcbc_init() 78 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_xcbc_init() 169 struct nx_sg *in_sg, *out_sg; in nx_xcbc_final() local 194 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, AES_BLOCK_SIZE, in nx_xcbc_final() 198 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_xcbc_final()
|
D | nx-sha256.c | 35 struct nx_sg *out_sg; in nx_sha256_init() local 44 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state, in nx_sha256_init() 46 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_sha256_init() 131 struct nx_sg *in_sg, *out_sg; in nx_sha256_final() local 150 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, SHA256_DIGEST_SIZE, in nx_sha256_final() 153 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_sha256_final()
|
D | nx-sha512.c | 35 struct nx_sg *out_sg; in nx_sha512_init() local 44 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state, in nx_sha512_init() 46 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_sha512_init() 133 struct nx_sg *in_sg, *out_sg; in nx_sha512_final() local 156 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, SHA512_DIGEST_SIZE, in nx_sha512_final() 159 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_sha512_final()
|
D | nx.c | 213 struct nx_sg *nx_outsg = nx_ctx->out_sg; in nx_build_sg_lists() 225 nx_ctx->op.outlen = (nx_ctx->out_sg - nx_outsg) * sizeof(struct nx_sg); in nx_build_sg_lists() 244 nx_ctx->op.out = __pa(nx_ctx->out_sg); in nx_ctx_init() 252 nx_ctx->op_aead.out = __pa(nx_ctx->out_sg); in nx_ctx_init() 536 nx_ctx->out_sg = (struct nx_sg *)((u8 *)nx_ctx->in_sg + NX_PAGE_SIZE); in nx_crypto_ctx_init() 540 (struct nx_csbcpb *)((u8 *)nx_ctx->out_sg + in nx_crypto_ctx_init() 610 nx_ctx->out_sg = NULL; in nx_crypto_ctx_exit()
|
D | nx-aes-ccm.c | 180 struct nx_sg *nx_outsg = nx_ctx->out_sg; in generate_pat() 207 nx_ctx->op.outlen = (nx_ctx->out_sg - nx_outsg) * in generate_pat()
|
D | nx.h | 128 struct nx_sg *out_sg; /* aligned pointer into kmem to an sg list */ member
|
/drivers/crypto/ |
D | omap-aes.c | 153 struct scatterlist *out_sg; member 461 struct scatterlist *in_sg, struct scatterlist *out_sg) in omap_aes_crypt_dma() argument 514 tx_out = dmaengine_prep_slave_sg(dd->dma_lch_out, out_sg, 1, in omap_aes_crypt_dma() 544 struct scatterlist *in_sg, *out_sg; in omap_aes_crypt_dma_start() local 549 if (sg_is_last(dd->in_sg) && sg_is_last(dd->out_sg)) { in omap_aes_crypt_dma_start() 552 out = IS_ALIGNED((u32)dd->out_sg->offset, sizeof(u32)); in omap_aes_crypt_dma_start() 559 count = min(count, sg_dma_len(dd->out_sg)); in omap_aes_crypt_dma_start() 574 err = dma_map_sg(dd->dev, dd->out_sg, 1, DMA_FROM_DEVICE); in omap_aes_crypt_dma_start() 582 addr_out = sg_dma_address(dd->out_sg); in omap_aes_crypt_dma_start() 585 out_sg = dd->out_sg; in omap_aes_crypt_dma_start() [all …]
|
D | atmel-aes.c | 123 struct scatterlist *out_sg; member 397 dd->nb_out_sg = atmel_aes_sg_length(dd->req, dd->out_sg); in atmel_aes_crypt_cpu_start() 426 out = IS_ALIGNED((u32)dd->out_sg->offset, sizeof(u32)) && in atmel_aes_crypt_dma_start() 427 IS_ALIGNED(dd->out_sg->length, dd->ctx->block_size); in atmel_aes_crypt_dma_start() 430 if (sg_dma_len(dd->in_sg) != sg_dma_len(dd->out_sg)) in atmel_aes_crypt_dma_start() 437 count = min(count, sg_dma_len(dd->out_sg)); in atmel_aes_crypt_dma_start() 445 err = dma_map_sg(dd->dev, dd->out_sg, 1, in atmel_aes_crypt_dma_start() 455 addr_out = sg_dma_address(dd->out_sg); in atmel_aes_crypt_dma_start() 476 dma_unmap_sg(dd->dev, dd->out_sg, 1, DMA_TO_DEVICE); in atmel_aes_crypt_dma_start() 584 dd->out_sg = req->dst; in atmel_aes_handle_queue() [all …]
|
D | atmel-tdes.c | 117 struct scatterlist *out_sg; member 323 dma_unmap_sg(dd->dev, dd->out_sg, 1, DMA_FROM_DEVICE); in atmel_tdes_crypt_pdc_stop() 330 count = atmel_tdes_sg_copy(&dd->out_sg, &dd->out_offset, in atmel_tdes_crypt_pdc_stop() 513 out = IS_ALIGNED((u32)dd->out_sg->offset, sizeof(u32)) && in atmel_tdes_crypt_start() 514 IS_ALIGNED(dd->out_sg->length, dd->ctx->block_size); in atmel_tdes_crypt_start() 517 if (sg_dma_len(dd->in_sg) != sg_dma_len(dd->out_sg)) in atmel_tdes_crypt_start() 524 count = min(count, sg_dma_len(dd->out_sg)); in atmel_tdes_crypt_start() 532 err = dma_map_sg(dd->dev, dd->out_sg, 1, in atmel_tdes_crypt_start() 542 addr_out = sg_dma_address(dd->out_sg); in atmel_tdes_crypt_start() 566 dma_unmap_sg(dd->dev, dd->out_sg, 1, DMA_TO_DEVICE); in atmel_tdes_crypt_start() [all …]
|
D | tegra-aes.c | 174 struct scatterlist *out_sg; member 399 struct scatterlist *in_sg, *out_sg; in tegra_aes_handle_req() local 433 dd->out_sg = req->dst; in tegra_aes_handle_req() 436 out_sg = dd->out_sg; in tegra_aes_handle_req() 484 ret = dma_map_sg(dd->dev, out_sg, 1, DMA_FROM_DEVICE); in tegra_aes_handle_req() 493 addr_out = sg_dma_address(out_sg); in tegra_aes_handle_req() 496 WARN_ON(sg_dma_len(in_sg) != sg_dma_len(out_sg)); in tegra_aes_handle_req() 502 dma_unmap_sg(dd->dev, out_sg, 1, DMA_FROM_DEVICE); in tegra_aes_handle_req() 514 out_sg = sg_next(out_sg); in tegra_aes_handle_req() 515 WARN_ON(((total != 0) && (!in_sg || !out_sg))); in tegra_aes_handle_req()
|
D | sahara.c | 161 struct scatterlist *out_sg; member 408 dma_unmap_sg(dev->device, dev->out_sg, dev->nb_out_sg, in sahara_aes_done_task() 458 dev->nb_out_sg = sahara_sg_length(dev->out_sg, dev->total); in sahara_hw_descriptor_create() 471 ret = dma_map_sg(dev->device, dev->out_sg, dev->nb_out_sg, in sahara_hw_descriptor_create() 494 sg = dev->out_sg; in sahara_hw_descriptor_create() 523 dma_unmap_sg(dev->device, dev->out_sg, dev->nb_out_sg, in sahara_hw_descriptor_create() 565 dev->out_sg = req->dst; in sahara_aes_queue_task()
|