Home
last modified time | relevance | path

Searched refs:rsa (Results 1 – 13 of 13) sorted by relevance

/drivers/crypto/ccp/
Dccp-crypto-rsa.c52 req->dst_len = rctx->cmd.u.rsa.key_size >> 3; in ccp_rsa_complete()
61 return ctx->u.rsa.n_len; in ccp_rsa_maxsize()
75 rctx->cmd.u.rsa.key_size = ctx->u.rsa.key_len; /* in bits */ in ccp_rsa_crypt()
77 rctx->cmd.u.rsa.exp = &ctx->u.rsa.e_sg; in ccp_rsa_crypt()
78 rctx->cmd.u.rsa.exp_len = ctx->u.rsa.e_len; in ccp_rsa_crypt()
80 rctx->cmd.u.rsa.exp = &ctx->u.rsa.d_sg; in ccp_rsa_crypt()
81 rctx->cmd.u.rsa.exp_len = ctx->u.rsa.d_len; in ccp_rsa_crypt()
83 rctx->cmd.u.rsa.mod = &ctx->u.rsa.n_sg; in ccp_rsa_crypt()
84 rctx->cmd.u.rsa.mod_len = ctx->u.rsa.n_len; in ccp_rsa_crypt()
85 rctx->cmd.u.rsa.src = req->src; in ccp_rsa_crypt()
[all …]
Dccp-ops.c1837 struct ccp_rsa_engine *rsa = &cmd->u.rsa; in ccp_run_rsa_cmd() local
1844 if (rsa->key_size > cmd_q->ccp->vdata->rsamax) in ccp_run_rsa_cmd()
1847 if (!rsa->exp || !rsa->mod || !rsa->src || !rsa->dst) in ccp_run_rsa_cmd()
1862 o_len = 32 * ((rsa->key_size + 255) / 256); in ccp_run_rsa_cmd()
1890 ret = ccp_reverse_set_dm_area(&exp, 0, rsa->exp, 0, rsa->exp_len); in ccp_run_rsa_cmd()
1919 ret = ccp_reverse_set_dm_area(&src, 0, rsa->mod, 0, rsa->mod_len); in ccp_run_rsa_cmd()
1922 ret = ccp_reverse_set_dm_area(&src, o_len, rsa->src, 0, rsa->src_len); in ccp_run_rsa_cmd()
1939 op.u.rsa.mod_size = rsa->key_size; in ccp_run_rsa_cmd()
1940 op.u.rsa.input_len = i_len; in ccp_run_rsa_cmd()
1942 ret = cmd_q->ccp->vdata->perform->rsa(&op); in ccp_run_rsa_cmd()
[all …]
DMakefile22 ccp-crypto-rsa.o \
Dccp-dev-v3.c238 | (op->u.rsa.mod_size << REQ1_RSA_MOD_SIZE_SHIFT) in ccp_perform_rsa()
241 cr[1] = op->u.rsa.input_len - 1; in ccp_perform_rsa()
572 .rsa = ccp_perform_rsa,
Dccp-dev.h551 struct ccp_rsa_op rsa; member
657 int (*rsa)(struct ccp_op *); member
Dccp-dev-v5.c119 } rsa; member
149 #define CCP_RSA_SIZE(p) ((p)->rsa.size)
468 CCP_RSA_SIZE(&function) = (op->u.rsa.mod_size + 7) >> 3; in ccp5_perform_rsa()
471 CCP5_CMD_LEN(&desc) = op->u.rsa.input_len; in ccp5_perform_rsa()
1103 .rsa = ccp5_perform_rsa,
Dccp-crypto.h267 struct ccp_rsa_ctx rsa; member
/drivers/net/ethernet/intel/ixgbe/
Dixgbe_ipsec.c396 struct rx_sa *rsa; in ixgbe_ipsec_find_rx_state() local
400 hash_for_each_possible_rcu(ipsec->rx_sa_list, rsa, hlist, in ixgbe_ipsec_find_rx_state()
402 if (rsa->mode & IXGBE_RXTXMOD_VF) in ixgbe_ipsec_find_rx_state()
404 if (spi == rsa->xs->id.spi && in ixgbe_ipsec_find_rx_state()
405 ((ip4 && *daddr == rsa->xs->id.daddr.a4) || in ixgbe_ipsec_find_rx_state()
406 (!ip4 && !memcmp(daddr, &rsa->xs->id.daddr.a6, in ixgbe_ipsec_find_rx_state()
407 sizeof(rsa->xs->id.daddr.a6)))) && in ixgbe_ipsec_find_rx_state()
408 proto == rsa->xs->id.proto) { in ixgbe_ipsec_find_rx_state()
409 ret = rsa->xs; in ixgbe_ipsec_find_rx_state()
589 struct rx_sa rsa; in ixgbe_ipsec_add_sa() local
[all …]
/drivers/net/ethernet/intel/ixgbevf/
Dipsec.c183 struct rx_sa *rsa; in ixgbevf_ipsec_find_rx_state() local
186 hash_for_each_possible_rcu(ipsec->rx_sa_list, rsa, hlist, in ixgbevf_ipsec_find_rx_state()
188 if (spi == rsa->xs->id.spi && in ixgbevf_ipsec_find_rx_state()
189 ((ip4 && *daddr == rsa->xs->id.daddr.a4) || in ixgbevf_ipsec_find_rx_state()
190 (!ip4 && !memcmp(daddr, &rsa->xs->id.daddr.a6, in ixgbevf_ipsec_find_rx_state()
191 sizeof(rsa->xs->id.daddr.a6)))) && in ixgbevf_ipsec_find_rx_state()
192 proto == rsa->xs->id.proto) { in ixgbevf_ipsec_find_rx_state()
193 ret = rsa->xs; in ixgbevf_ipsec_find_rx_state()
284 struct rx_sa rsa; in ixgbevf_ipsec_add_sa() local
299 memset(&rsa, 0, sizeof(rsa)); in ixgbevf_ipsec_add_sa()
[all …]
/drivers/crypto/qat/qat_common/
Dqat_asym_algs.c117 struct qat_rsa_input_params rsa; member
121 struct qat_rsa_output_params rsa; member
130 struct qat_rsa_ctx *rsa; member
134 struct akcipher_request *rsa; member
516 struct akcipher_request *areq = req->areq.rsa; in qat_rsa_cb()
517 struct device *dev = &GET_DEV(req->ctx.rsa->inst->accel_dev); in qat_rsa_cb()
523 dma_unmap_single(dev, req->in.rsa.enc.m, req->ctx.rsa->key_sz, in qat_rsa_cb()
528 areq->dst_len = req->ctx.rsa->key_sz; in qat_rsa_cb()
529 dma_unmap_single(dev, req->out.rsa.enc.c, req->ctx.rsa->key_sz, in qat_rsa_cb()
677 qat_req->ctx.rsa = ctx; in qat_rsa_enc()
[all …]
/drivers/crypto/hisilicon/hpre/
Dhpre_crypto.c124 struct hpre_rsa_ctx rsa; member
139 struct akcipher_request *rsa; member
457 areq = req->areq.rsa; in hpre_rsa_cb()
522 h_req->areq.rsa = akreq; in hpre_msg_request_set()
793 akcipher_request_set_tfm(req, ctx->rsa.soft_tfm); in hpre_rsa_enc()
799 if (unlikely(!ctx->rsa.pubkey)) in hpre_rsa_enc()
807 msg->key = cpu_to_le64(ctx->rsa.dma_pubkey); in hpre_rsa_enc()
841 akcipher_request_set_tfm(req, ctx->rsa.soft_tfm); in hpre_rsa_dec()
847 if (unlikely(!ctx->rsa.prikey)) in hpre_rsa_dec()
855 msg->key = cpu_to_le64(ctx->rsa.dma_crt_prikey); in hpre_rsa_dec()
[all …]
/drivers/gpu/drm/i915/gt/uc/
Dintel_guc_fw.c49 u32 rsa[UOS_RSA_SCRATCH_COUNT]; in guc_xfer_rsa_mmio() local
53 copied = intel_uc_fw_copy_rsa(guc_fw, rsa, sizeof(rsa)); in guc_xfer_rsa_mmio()
54 if (copied < sizeof(rsa)) in guc_xfer_rsa_mmio()
58 intel_uncore_write(uncore, UOS_RSA_SCRATCH(i), rsa[i]); in guc_xfer_rsa_mmio()
/drivers/crypto/virtio/
Dvirtio_crypto_akcipher_algs.c423 para.u.rsa.padding_algo = cpu_to_le32(padding_algo); in virtio_crypto_rsa_set_key()
424 para.u.rsa.hash_algo = cpu_to_le32(hash_algo); in virtio_crypto_rsa_set_key()