• Home
  • Raw
  • Download

Lines Matching refs:nx_ctx

27 	struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(tfm);  in nx_crypto_ctx_sha256_init()  local
34 nx_ctx_init(nx_ctx, HCOP_FC_SHA); in nx_crypto_ctx_sha256_init()
36 nx_ctx->ap = &nx_ctx->props[NX_PROPS_SHA256]; in nx_crypto_ctx_sha256_init()
38 NX_CPB_SET_DIGEST_SIZE(nx_ctx->csbcpb, NX_DS_SHA256); in nx_crypto_ctx_sha256_init()
65 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); in nx_sha256_update() local
66 struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb; in nx_sha256_update()
75 spin_lock_irqsave(&nx_ctx->lock, irq_flags); in nx_sha256_update()
92 max_sg_len = min_t(u64, nx_ctx->ap->sglen, in nx_sha256_update()
95 nx_ctx->ap->databytelen/NX_PAGE_SIZE); in nx_sha256_update()
98 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state, in nx_sha256_update()
100 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_sha256_update()
109 struct nx_sg *in_sg = nx_ctx->in_sg; in nx_sha256_update()
122 used_sgs = in_sg - nx_ctx->in_sg; in nx_sha256_update()
140 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_sha256_update()
153 if (!nx_ctx->op.inlen || !nx_ctx->op.outlen) { in nx_sha256_update()
158 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, 0); in nx_sha256_update()
162 atomic_inc(&(nx_ctx->stats->sha256_ops)); in nx_sha256_update()
177 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); in nx_sha256_update()
184 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); in nx_sha256_final() local
185 struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb; in nx_sha256_final()
192 spin_lock_irqsave(&nx_ctx->lock, irq_flags); in nx_sha256_final()
194 max_sg_len = min_t(u64, nx_ctx->ap->sglen, in nx_sha256_final()
197 nx_ctx->ap->databytelen/NX_PAGE_SIZE); in nx_sha256_final()
215 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) sctx->buf, in nx_sha256_final()
224 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len, max_sg_len); in nx_sha256_final()
231 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_sha256_final()
232 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_sha256_final()
233 if (!nx_ctx->op.outlen) { in nx_sha256_final()
238 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, 0); in nx_sha256_final()
242 atomic_inc(&(nx_ctx->stats->sha256_ops)); in nx_sha256_final()
244 atomic64_add(sctx->count, &(nx_ctx->stats->sha256_bytes)); in nx_sha256_final()
247 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); in nx_sha256_final()