• Home
  • Raw
  • Download

Lines Matching refs:sg_table

370 	struct dpaa2_sg_entry *sg_table;  in aead_edesc_alloc()  local
470 sg_table = &edesc->sgt[0]; in aead_edesc_alloc()
471 qm_sg_bytes = qm_sg_nents * sizeof(*sg_table); in aead_edesc_alloc()
483 u8 *iv = (u8 *)(sg_table + qm_sg_nents); in aead_edesc_alloc()
521 dma_to_qm_sg_one(sg_table, edesc->assoclen_dma, 4, 0); in aead_edesc_alloc()
524 dma_to_qm_sg_one(sg_table + qm_sg_index, iv_dma, ivsize, 0); in aead_edesc_alloc()
527 sg_to_qm_sg_last(req->src, src_len, sg_table + qm_sg_index, 0); in aead_edesc_alloc()
531 sg_to_qm_sg_last(req->dst, dst_len, sg_table + qm_sg_index, 0); in aead_edesc_alloc()
533 qm_sg_dma = dma_map_single(dev, sg_table, qm_sg_bytes, DMA_TO_DEVICE); in aead_edesc_alloc()
563 (1 + !!ivsize) * sizeof(*sg_table)); in aead_edesc_alloc()
579 sizeof(*sg_table)); in aead_edesc_alloc()
1126 struct dpaa2_sg_entry *sg_table; in skcipher_edesc_alloc() local
1202 sg_table = &edesc->sgt[0]; in skcipher_edesc_alloc()
1203 iv = (u8 *)(sg_table + qm_sg_ents); in skcipher_edesc_alloc()
1220 dma_to_qm_sg_one(sg_table, iv_dma, ivsize, 0); in skcipher_edesc_alloc()
1221 sg_to_qm_sg(req->src, req->cryptlen, sg_table + 1, 0); in skcipher_edesc_alloc()
1224 sg_to_qm_sg(req->dst, req->cryptlen, sg_table + dst_sg_idx, 0); in skcipher_edesc_alloc()
1226 dma_to_qm_sg_one(sg_table + dst_sg_idx + mapped_dst_nents, iv_dma, in skcipher_edesc_alloc()
1229 edesc->qm_sg_dma = dma_map_single(dev, sg_table, edesc->qm_sg_bytes, in skcipher_edesc_alloc()
1251 sizeof(*sg_table)); in skcipher_edesc_alloc()
1254 sizeof(*sg_table)); in skcipher_edesc_alloc()
3541 struct dpaa2_sg_entry *sg_table; in ahash_update_ctx() local
3572 sizeof(*sg_table); in ahash_update_ctx()
3573 sg_table = &edesc->sgt[0]; in ahash_update_ctx()
3575 ret = ctx_map_to_qm_sg(ctx->dev, state, ctx->ctx_len, sg_table, in ahash_update_ctx()
3580 ret = buf_map_to_qm_sg(ctx->dev, sg_table + 1, state); in ahash_update_ctx()
3586 sg_table + qm_sg_src_index, 0); in ahash_update_ctx()
3588 dpaa2_sg_set_final(sg_table + qm_sg_src_index - 1, in ahash_update_ctx()
3592 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, in ahash_update_ctx()
3652 struct dpaa2_sg_entry *sg_table; in ahash_final_ctx() local
3660 qm_sg_bytes = pad_sg_nents(1 + (buflen ? 1 : 0)) * sizeof(*sg_table); in ahash_final_ctx()
3661 sg_table = &edesc->sgt[0]; in ahash_final_ctx()
3663 ret = ctx_map_to_qm_sg(ctx->dev, state, ctx->ctx_len, sg_table, in ahash_final_ctx()
3668 ret = buf_map_to_qm_sg(ctx->dev, sg_table + 1, state); in ahash_final_ctx()
3672 dpaa2_sg_set_final(sg_table + (buflen ? 1 : 0), true); in ahash_final_ctx()
3674 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_bytes, in ahash_final_ctx()
3724 struct dpaa2_sg_entry *sg_table; in ahash_finup_ctx() local
3754 sizeof(*sg_table); in ahash_finup_ctx()
3755 sg_table = &edesc->sgt[0]; in ahash_finup_ctx()
3757 ret = ctx_map_to_qm_sg(ctx->dev, state, ctx->ctx_len, sg_table, in ahash_finup_ctx()
3762 ret = buf_map_to_qm_sg(ctx->dev, sg_table + 1, state); in ahash_finup_ctx()
3766 sg_to_qm_sg_last(req->src, req->nbytes, sg_table + qm_sg_src_index, 0); in ahash_finup_ctx()
3768 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_bytes, in ahash_finup_ctx()
3849 struct dpaa2_sg_entry *sg_table = &edesc->sgt[0]; in ahash_digest() local
3851 qm_sg_bytes = pad_sg_nents(mapped_nents) * sizeof(*sg_table); in ahash_digest()
3852 sg_to_qm_sg_last(req->src, req->nbytes, sg_table, 0); in ahash_digest()
3853 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, in ahash_digest()
3993 struct dpaa2_sg_entry *sg_table; in ahash_update_no_ctx() local
4023 sizeof(*sg_table); in ahash_update_no_ctx()
4024 sg_table = &edesc->sgt[0]; in ahash_update_no_ctx()
4026 ret = buf_map_to_qm_sg(ctx->dev, sg_table, state); in ahash_update_no_ctx()
4030 sg_to_qm_sg_last(req->src, src_len, sg_table + 1, 0); in ahash_update_no_ctx()
4032 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, in ahash_update_no_ctx()
4106 struct dpaa2_sg_entry *sg_table; in ahash_finup_no_ctx() local
4134 qm_sg_bytes = pad_sg_nents(2 + mapped_nents) * sizeof(*sg_table); in ahash_finup_no_ctx()
4135 sg_table = &edesc->sgt[0]; in ahash_finup_no_ctx()
4137 ret = buf_map_to_qm_sg(ctx->dev, sg_table, state); in ahash_finup_no_ctx()
4141 sg_to_qm_sg_last(req->src, req->nbytes, sg_table + 1, 0); in ahash_finup_no_ctx()
4143 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_bytes, in ahash_finup_no_ctx()
4211 struct dpaa2_sg_entry *sg_table; in ahash_update_first() local
4240 sg_table = &edesc->sgt[0]; in ahash_update_first()
4249 sg_to_qm_sg_last(req->src, src_len, sg_table, 0); in ahash_update_first()
4251 sizeof(*sg_table); in ahash_update_first()
4252 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, in ahash_update_first()