• Home
  • Raw
  • Download

Lines Matching refs:rsa

161 		struct qat_rsa_input_params rsa;  member
165 struct qat_rsa_output_params rsa; member
174 struct qat_rsa_ctx *rsa; member
178 struct akcipher_request *rsa; member
556 struct akcipher_request *areq = req->areq.rsa; in qat_rsa_cb()
557 struct device *dev = &GET_DEV(req->ctx.rsa->inst->accel_dev); in qat_rsa_cb()
564 dma_free_coherent(dev, req->ctx.rsa->key_sz, req->src_align, in qat_rsa_cb()
565 req->in.rsa.enc.m); in qat_rsa_cb()
567 dma_unmap_single(dev, req->in.rsa.enc.m, req->ctx.rsa->key_sz, in qat_rsa_cb()
570 areq->dst_len = req->ctx.rsa->key_sz; in qat_rsa_cb()
575 dma_free_coherent(dev, req->ctx.rsa->key_sz, req->dst_align, in qat_rsa_cb()
576 req->out.rsa.enc.c); in qat_rsa_cb()
578 dma_unmap_single(dev, req->out.rsa.enc.c, req->ctx.rsa->key_sz, in qat_rsa_cb()
712 qat_req->ctx.rsa = ctx; in qat_rsa_enc()
713 qat_req->areq.rsa = req; in qat_rsa_enc()
719 qat_req->in.rsa.enc.e = ctx->dma_e; in qat_rsa_enc()
720 qat_req->in.rsa.enc.n = ctx->dma_n; in qat_rsa_enc()
732 qat_req->in.rsa.enc.m = dma_map_single(dev, sg_virt(req->src), in qat_rsa_enc()
734 if (unlikely(dma_mapping_error(dev, qat_req->in.rsa.enc.m))) in qat_rsa_enc()
741 &qat_req->in.rsa.enc.m, in qat_rsa_enc()
751 qat_req->out.rsa.enc.c = dma_map_single(dev, sg_virt(req->dst), in qat_rsa_enc()
755 if (unlikely(dma_mapping_error(dev, qat_req->out.rsa.enc.c))) in qat_rsa_enc()
760 &qat_req->out.rsa.enc.c, in qat_rsa_enc()
766 qat_req->in.rsa.in_tab[3] = 0; in qat_rsa_enc()
767 qat_req->out.rsa.out_tab[1] = 0; in qat_rsa_enc()
768 qat_req->phy_in = dma_map_single(dev, &qat_req->in.rsa.enc.m, in qat_rsa_enc()
774 qat_req->phy_out = dma_map_single(dev, &qat_req->out.rsa.enc.c, in qat_rsa_enc()
804 qat_req->out.rsa.enc.c); in qat_rsa_enc()
806 if (!dma_mapping_error(dev, qat_req->out.rsa.enc.c)) in qat_rsa_enc()
807 dma_unmap_single(dev, qat_req->out.rsa.enc.c, in qat_rsa_enc()
812 qat_req->in.rsa.enc.m); in qat_rsa_enc()
814 if (!dma_mapping_error(dev, qat_req->in.rsa.enc.m)) in qat_rsa_enc()
815 dma_unmap_single(dev, qat_req->in.rsa.enc.m, in qat_rsa_enc()
848 qat_req->ctx.rsa = ctx; in qat_rsa_dec()
849 qat_req->areq.rsa = req; in qat_rsa_dec()
856 qat_req->in.rsa.dec_crt.p = ctx->dma_p; in qat_rsa_dec()
857 qat_req->in.rsa.dec_crt.q = ctx->dma_q; in qat_rsa_dec()
858 qat_req->in.rsa.dec_crt.dp = ctx->dma_dp; in qat_rsa_dec()
859 qat_req->in.rsa.dec_crt.dq = ctx->dma_dq; in qat_rsa_dec()
860 qat_req->in.rsa.dec_crt.qinv = ctx->dma_qinv; in qat_rsa_dec()
862 qat_req->in.rsa.dec.d = ctx->dma_d; in qat_rsa_dec()
863 qat_req->in.rsa.dec.n = ctx->dma_n; in qat_rsa_dec()
876 qat_req->in.rsa.dec.c = dma_map_single(dev, sg_virt(req->src), in qat_rsa_dec()
878 if (unlikely(dma_mapping_error(dev, qat_req->in.rsa.dec.c))) in qat_rsa_dec()
885 &qat_req->in.rsa.dec.c, in qat_rsa_dec()
895 qat_req->out.rsa.dec.m = dma_map_single(dev, sg_virt(req->dst), in qat_rsa_dec()
899 if (unlikely(dma_mapping_error(dev, qat_req->out.rsa.dec.m))) in qat_rsa_dec()
904 &qat_req->out.rsa.dec.m, in qat_rsa_dec()
912 qat_req->in.rsa.in_tab[6] = 0; in qat_rsa_dec()
914 qat_req->in.rsa.in_tab[3] = 0; in qat_rsa_dec()
915 qat_req->out.rsa.out_tab[1] = 0; in qat_rsa_dec()
916 qat_req->phy_in = dma_map_single(dev, &qat_req->in.rsa.dec.c, in qat_rsa_dec()
922 qat_req->phy_out = dma_map_single(dev, &qat_req->out.rsa.dec.m, in qat_rsa_dec()
956 qat_req->out.rsa.dec.m); in qat_rsa_dec()
958 if (!dma_mapping_error(dev, qat_req->out.rsa.dec.m)) in qat_rsa_dec()
959 dma_unmap_single(dev, qat_req->out.rsa.dec.m, in qat_rsa_dec()
964 qat_req->in.rsa.dec.c); in qat_rsa_dec()
966 if (!dma_mapping_error(dev, qat_req->in.rsa.dec.c)) in qat_rsa_dec()
967 dma_unmap_single(dev, qat_req->in.rsa.dec.c, in qat_rsa_dec()
1300 static struct akcipher_alg rsa = { variable
1341 rsa.base.cra_flags = 0; in qat_asym_algs_register()
1342 ret = crypto_register_akcipher(&rsa); in qat_asym_algs_register()
1356 crypto_unregister_akcipher(&rsa); in qat_asym_algs_unregister()