• Home
  • Raw
  • Download

Lines Matching refs:jrdev

102 	struct device *jrdev;  member
120 void (*ahash_op_done)(struct device *jrdev, u32 *desc, u32 err,
141 static inline int map_seq_out_ptr_ctx(u32 *desc, struct device *jrdev, in map_seq_out_ptr_ctx() argument
146 state->ctx_dma = dma_map_single(jrdev, state->caam_ctx, in map_seq_out_ptr_ctx()
148 if (dma_mapping_error(jrdev, state->ctx_dma)) { in map_seq_out_ptr_ctx()
149 dev_err(jrdev, "unable to map ctx\n"); in map_seq_out_ptr_ctx()
160 static inline int buf_map_to_sec4_sg(struct device *jrdev, in buf_map_to_sec4_sg() argument
169 state->buf_dma = dma_map_single(jrdev, state->buf, buflen, in buf_map_to_sec4_sg()
171 if (dma_mapping_error(jrdev, state->buf_dma)) { in buf_map_to_sec4_sg()
172 dev_err(jrdev, "unable to map buf\n"); in buf_map_to_sec4_sg()
183 static inline int ctx_map_to_sec4_sg(struct device *jrdev, in ctx_map_to_sec4_sg() argument
188 state->ctx_dma = dma_map_single(jrdev, state->caam_ctx, ctx_len, flag); in ctx_map_to_sec4_sg()
189 if (dma_mapping_error(jrdev, state->ctx_dma)) { in ctx_map_to_sec4_sg()
190 dev_err(jrdev, "unable to map ctx\n"); in ctx_map_to_sec4_sg()
204 struct device *jrdev = ctx->jrdev; in ahash_set_sh_desc() local
205 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); in ahash_set_sh_desc()
214 dma_sync_single_for_device(jrdev, ctx->sh_desc_update_dma, in ahash_set_sh_desc()
225 dma_sync_single_for_device(jrdev, ctx->sh_desc_update_first_dma, in ahash_set_sh_desc()
235 dma_sync_single_for_device(jrdev, ctx->sh_desc_fin_dma, in ahash_set_sh_desc()
246 dma_sync_single_for_device(jrdev, ctx->sh_desc_digest_dma, in ahash_set_sh_desc()
260 struct device *jrdev = ctx->jrdev; in axcbc_set_sh_desc() local
267 dma_sync_single_for_device(jrdev, ctx->sh_desc_update_dma, in axcbc_set_sh_desc()
277 dma_sync_single_for_device(jrdev, ctx->sh_desc_fin_dma, in axcbc_set_sh_desc()
290 dma_sync_single_for_device(jrdev, ctx->sh_desc_update_first_dma, in axcbc_set_sh_desc()
300 dma_sync_single_for_device(jrdev, ctx->sh_desc_digest_dma, in axcbc_set_sh_desc()
312 struct device *jrdev = ctx->jrdev; in acmac_set_sh_desc() local
319 dma_sync_single_for_device(jrdev, ctx->sh_desc_update_dma, in acmac_set_sh_desc()
329 dma_sync_single_for_device(jrdev, ctx->sh_desc_fin_dma, in acmac_set_sh_desc()
339 dma_sync_single_for_device(jrdev, ctx->sh_desc_update_first_dma, in acmac_set_sh_desc()
349 dma_sync_single_for_device(jrdev, ctx->sh_desc_digest_dma, in acmac_set_sh_desc()
362 struct device *jrdev = ctx->jrdev; in hash_digest_key() local
370 dev_err(jrdev, "unable to allocate key input memory\n"); in hash_digest_key()
376 key_dma = dma_map_single(jrdev, key, *keylen, DMA_BIDIRECTIONAL); in hash_digest_key()
377 if (dma_mapping_error(jrdev, key_dma)) { in hash_digest_key()
378 dev_err(jrdev, "unable to map key memory\n"); in hash_digest_key()
402 ret = caam_jr_enqueue(jrdev, desc, split_key_done, &result); in hash_digest_key()
412 dma_unmap_single(jrdev, key_dma, *keylen, DMA_BIDIRECTIONAL); in hash_digest_key()
425 struct device *jrdev = ctx->jrdev; in ahash_setkey() local
428 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent); in ahash_setkey()
432 dev_dbg(jrdev, "keylen %d\n", keylen); in ahash_setkey()
466 dma_sync_single_for_device(ctx->jrdev, in ahash_setkey()
471 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, key, in ahash_setkey()
488 struct device *jrdev = ctx->jrdev; in axcbc_setkey() local
494 dma_sync_single_for_device(jrdev, ctx->adata.key_dma, keylen, in axcbc_setkey()
575 static inline void ahash_done_cpy(struct device *jrdev, u32 *desc, u32 err, in ahash_done_cpy() argument
579 struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev); in ahash_done_cpy()
588 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); in ahash_done_cpy()
594 ecode = caam_jr_strstatus(jrdev, err); in ahash_done_cpy()
596 ahash_unmap_ctx(jrdev, edesc, req, digestsize, dir); in ahash_done_cpy()
614 static void ahash_done(struct device *jrdev, u32 *desc, u32 err, in ahash_done() argument
617 ahash_done_cpy(jrdev, desc, err, context, DMA_FROM_DEVICE); in ahash_done()
620 static void ahash_done_ctx_src(struct device *jrdev, u32 *desc, u32 err, in ahash_done_ctx_src() argument
623 ahash_done_cpy(jrdev, desc, err, context, DMA_BIDIRECTIONAL); in ahash_done_ctx_src()
626 static inline void ahash_done_switch(struct device *jrdev, u32 *desc, u32 err, in ahash_done_switch() argument
630 struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev); in ahash_done_switch()
639 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); in ahash_done_switch()
644 ecode = caam_jr_strstatus(jrdev, err); in ahash_done_switch()
646 ahash_unmap_ctx(jrdev, edesc, req, ctx->ctx_len, dir); in ahash_done_switch()
677 static void ahash_done_bi(struct device *jrdev, u32 *desc, u32 err, in ahash_done_bi() argument
680 ahash_done_switch(jrdev, desc, err, context, DMA_BIDIRECTIONAL); in ahash_done_bi()
683 static void ahash_done_ctx_dst(struct device *jrdev, u32 *desc, u32 err, in ahash_done_ctx_dst() argument
686 ahash_done_switch(jrdev, desc, err, context, DMA_FROM_DEVICE); in ahash_done_ctx_dst()
707 dev_err(ctx->jrdev, "could not allocate extended descriptor\n"); in ahash_edesc_alloc()
735 src_dma = dma_map_single(ctx->jrdev, sg, sgsize, DMA_TO_DEVICE); in ahash_edesc_add_src()
736 if (dma_mapping_error(ctx->jrdev, src_dma)) { in ahash_edesc_add_src()
737 dev_err(ctx->jrdev, "unable to map S/G table\n"); in ahash_edesc_add_src()
760 struct device *jrdev = ctx->jrdev; in ahash_do_one_req() local
766 ret = caam_jr_enqueue(jrdev, desc, state->ahash_op_done, req); in ahash_do_one_req()
772 ahash_unmap(jrdev, state->edesc, req, 0); in ahash_do_one_req()
781 static int ahash_enqueue_req(struct device *jrdev, in ahash_enqueue_req() argument
782 void (*cbk)(struct device *jrdev, u32 *desc, in ahash_enqueue_req() argument
787 struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev); in ahash_enqueue_req()
804 ret = caam_jr_enqueue(jrdev, desc, cbk, req); in ahash_enqueue_req()
807 ahash_unmap_ctx(jrdev, edesc, req, dst_len, dir); in ahash_enqueue_req()
820 struct device *jrdev = ctx->jrdev; in ahash_update_ctx() local
851 dev_err(jrdev, "Invalid number of src SG.\n"); in ahash_update_ctx()
856 mapped_nents = dma_map_sg(jrdev, req->src, src_nents, in ahash_update_ctx()
859 dev_err(jrdev, "unable to DMA map source\n"); in ahash_update_ctx()
877 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); in ahash_update_ctx()
884 ret = ctx_map_to_sec4_sg(jrdev, state, ctx->ctx_len, in ahash_update_ctx()
889 ret = buf_map_to_sec4_sg(jrdev, edesc->sec4_sg + 1, state); in ahash_update_ctx()
903 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, in ahash_update_ctx()
906 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in ahash_update_ctx()
907 dev_err(jrdev, "unable to map S/G table\n"); in ahash_update_ctx()
921 ret = ahash_enqueue_req(jrdev, ahash_done_bi, req, in ahash_update_ctx()
935 ahash_unmap_ctx(jrdev, edesc, req, ctx->ctx_len, DMA_BIDIRECTIONAL); in ahash_update_ctx()
945 struct device *jrdev = ctx->jrdev; in ahash_final_ctx() local
966 ret = ctx_map_to_sec4_sg(jrdev, state, ctx->ctx_len, in ahash_final_ctx()
971 ret = buf_map_to_sec4_sg(jrdev, edesc->sec4_sg + 1, state); in ahash_final_ctx()
977 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, in ahash_final_ctx()
979 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in ahash_final_ctx()
980 dev_err(jrdev, "unable to map S/G table\n"); in ahash_final_ctx()
993 return ahash_enqueue_req(jrdev, ahash_done_ctx_src, req, in ahash_final_ctx()
996 ahash_unmap_ctx(jrdev, edesc, req, digestsize, DMA_BIDIRECTIONAL); in ahash_final_ctx()
1006 struct device *jrdev = ctx->jrdev; in ahash_finup_ctx() local
1017 dev_err(jrdev, "Invalid number of src SG.\n"); in ahash_finup_ctx()
1022 mapped_nents = dma_map_sg(jrdev, req->src, src_nents, in ahash_finup_ctx()
1025 dev_err(jrdev, "unable to DMA map source\n"); in ahash_finup_ctx()
1038 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); in ahash_finup_ctx()
1046 ret = ctx_map_to_sec4_sg(jrdev, state, ctx->ctx_len, in ahash_finup_ctx()
1051 ret = buf_map_to_sec4_sg(jrdev, edesc->sec4_sg + 1, state); in ahash_finup_ctx()
1067 return ahash_enqueue_req(jrdev, ahash_done_ctx_src, req, in ahash_finup_ctx()
1070 ahash_unmap_ctx(jrdev, edesc, req, digestsize, DMA_BIDIRECTIONAL); in ahash_finup_ctx()
1080 struct device *jrdev = ctx->jrdev; in ahash_digest() local
1091 dev_err(jrdev, "Invalid number of src SG.\n"); in ahash_digest()
1096 mapped_nents = dma_map_sg(jrdev, req->src, src_nents, in ahash_digest()
1099 dev_err(jrdev, "unable to map source for DMA\n"); in ahash_digest()
1110 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); in ahash_digest()
1119 ahash_unmap(jrdev, edesc, req, digestsize); in ahash_digest()
1126 ret = map_seq_out_ptr_ctx(desc, jrdev, state, digestsize); in ahash_digest()
1128 ahash_unmap(jrdev, edesc, req, digestsize); in ahash_digest()
1137 return ahash_enqueue_req(jrdev, ahash_done, req, digestsize, in ahash_digest()
1147 struct device *jrdev = ctx->jrdev; in ahash_final_no_ctx() local
1164 state->buf_dma = dma_map_single(jrdev, buf, buflen, in ahash_final_no_ctx()
1166 if (dma_mapping_error(jrdev, state->buf_dma)) { in ahash_final_no_ctx()
1167 dev_err(jrdev, "unable to map src\n"); in ahash_final_no_ctx()
1174 ret = map_seq_out_ptr_ctx(desc, jrdev, state, digestsize); in ahash_final_no_ctx()
1182 return ahash_enqueue_req(jrdev, ahash_done, req, in ahash_final_no_ctx()
1185 ahash_unmap(jrdev, edesc, req, digestsize); in ahash_final_no_ctx()
1196 struct device *jrdev = ctx->jrdev; in ahash_update_no_ctx() local
1227 dev_err(jrdev, "Invalid number of src SG.\n"); in ahash_update_no_ctx()
1232 mapped_nents = dma_map_sg(jrdev, req->src, src_nents, in ahash_update_no_ctx()
1235 dev_err(jrdev, "unable to DMA map source\n"); in ahash_update_no_ctx()
1253 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); in ahash_update_no_ctx()
1260 ret = buf_map_to_sec4_sg(jrdev, edesc->sec4_sg, state); in ahash_update_no_ctx()
1268 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, in ahash_update_no_ctx()
1271 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in ahash_update_no_ctx()
1272 dev_err(jrdev, "unable to map S/G table\n"); in ahash_update_no_ctx()
1279 ret = map_seq_out_ptr_ctx(desc, jrdev, state, ctx->ctx_len); in ahash_update_no_ctx()
1287 ret = ahash_enqueue_req(jrdev, ahash_done_ctx_dst, req, in ahash_update_no_ctx()
1306 ahash_unmap_ctx(jrdev, edesc, req, ctx->ctx_len, DMA_TO_DEVICE); in ahash_update_no_ctx()
1317 struct device *jrdev = ctx->jrdev; in ahash_finup_no_ctx() local
1327 dev_err(jrdev, "Invalid number of src SG.\n"); in ahash_finup_no_ctx()
1332 mapped_nents = dma_map_sg(jrdev, req->src, src_nents, in ahash_finup_no_ctx()
1335 dev_err(jrdev, "unable to DMA map source\n"); in ahash_finup_no_ctx()
1350 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); in ahash_finup_no_ctx()
1359 ret = buf_map_to_sec4_sg(jrdev, edesc->sec4_sg, state); in ahash_finup_no_ctx()
1366 dev_err(jrdev, "unable to map S/G table\n"); in ahash_finup_no_ctx()
1370 ret = map_seq_out_ptr_ctx(desc, jrdev, state, digestsize); in ahash_finup_no_ctx()
1378 return ahash_enqueue_req(jrdev, ahash_done, req, in ahash_finup_no_ctx()
1381 ahash_unmap(jrdev, edesc, req, digestsize); in ahash_finup_no_ctx()
1393 struct device *jrdev = ctx->jrdev; in ahash_update_first() local
1422 dev_err(jrdev, "Invalid number of src SG.\n"); in ahash_update_first()
1427 mapped_nents = dma_map_sg(jrdev, req->src, src_nents, in ahash_update_first()
1430 dev_err(jrdev, "unable to map source for DMA\n"); in ahash_update_first()
1446 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); in ahash_update_first()
1459 ret = map_seq_out_ptr_ctx(desc, jrdev, state, ctx->ctx_len); in ahash_update_first()
1467 ret = ahash_enqueue_req(jrdev, ahash_done_ctx_dst, req, in ahash_update_first()
1489 ahash_unmap_ctx(jrdev, edesc, req, ctx->ctx_len, DMA_TO_DEVICE); in ahash_update_first()
1782 ctx->jrdev = caam_jr_alloc(); in caam_hash_cra_init()
1783 if (IS_ERR(ctx->jrdev)) { in caam_hash_cra_init()
1785 return PTR_ERR(ctx->jrdev); in caam_hash_cra_init()
1788 priv = dev_get_drvdata(ctx->jrdev->parent); in caam_hash_cra_init()
1815 ctx->adata.key_dma = dma_map_single_attrs(ctx->jrdev, ctx->key, in caam_hash_cra_init()
1819 if (dma_mapping_error(ctx->jrdev, ctx->adata.key_dma)) { in caam_hash_cra_init()
1820 dev_err(ctx->jrdev, "unable to map key\n"); in caam_hash_cra_init()
1821 caam_jr_free(ctx->jrdev); in caam_hash_cra_init()
1826 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_update, in caam_hash_cra_init()
1830 if (dma_mapping_error(ctx->jrdev, dma_addr)) { in caam_hash_cra_init()
1831 dev_err(ctx->jrdev, "unable to map shared descriptors\n"); in caam_hash_cra_init()
1834 dma_unmap_single_attrs(ctx->jrdev, ctx->adata.key_dma, in caam_hash_cra_init()
1839 caam_jr_free(ctx->jrdev); in caam_hash_cra_init()
1871 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_update_dma, in caam_hash_cra_exit()
1876 dma_unmap_single_attrs(ctx->jrdev, ctx->adata.key_dma, in caam_hash_cra_exit()
1879 caam_jr_free(ctx->jrdev); in caam_hash_cra_exit()