Lines Matching refs:nx_ctx
22 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(tfm); in nx_crypto_ctx_sha256_init() local
29 nx_ctx_init(nx_ctx, HCOP_FC_SHA); in nx_crypto_ctx_sha256_init()
31 nx_ctx->ap = &nx_ctx->props[NX_PROPS_SHA256]; in nx_crypto_ctx_sha256_init()
33 NX_CPB_SET_DIGEST_SIZE(nx_ctx->csbcpb, NX_DS_SHA256); in nx_crypto_ctx_sha256_init()
60 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); in nx_sha256_update() local
61 struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb; in nx_sha256_update()
70 spin_lock_irqsave(&nx_ctx->lock, irq_flags); in nx_sha256_update()
87 max_sg_len = min_t(u64, nx_ctx->ap->sglen, in nx_sha256_update()
90 nx_ctx->ap->databytelen/NX_PAGE_SIZE); in nx_sha256_update()
93 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state, in nx_sha256_update()
95 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_sha256_update()
104 struct nx_sg *in_sg = nx_ctx->in_sg; in nx_sha256_update()
117 used_sgs = in_sg - nx_ctx->in_sg; in nx_sha256_update()
135 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_sha256_update()
148 if (!nx_ctx->op.inlen || !nx_ctx->op.outlen) { in nx_sha256_update()
153 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, 0); in nx_sha256_update()
157 atomic_inc(&(nx_ctx->stats->sha256_ops)); in nx_sha256_update()
172 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); in nx_sha256_update()
179 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); in nx_sha256_final() local
180 struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb; in nx_sha256_final()
187 spin_lock_irqsave(&nx_ctx->lock, irq_flags); in nx_sha256_final()
189 max_sg_len = min_t(u64, nx_ctx->ap->sglen, in nx_sha256_final()
192 nx_ctx->ap->databytelen/NX_PAGE_SIZE); in nx_sha256_final()
210 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) sctx->buf, in nx_sha256_final()
219 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len, max_sg_len); in nx_sha256_final()
226 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_sha256_final()
227 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_sha256_final()
228 if (!nx_ctx->op.outlen) { in nx_sha256_final()
233 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, 0); in nx_sha256_final()
237 atomic_inc(&(nx_ctx->stats->sha256_ops)); in nx_sha256_final()
239 atomic64_add(sctx->count, &(nx_ctx->stats->sha256_bytes)); in nx_sha256_final()
242 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); in nx_sha256_final()