Lines Matching refs:nx_ctx
32 struct nx_crypto_ctx *nx_ctx = crypto_shash_ctx(desc); in nx_xcbc_set_key() local
33 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; in nx_xcbc_set_key()
37 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_128]; in nx_xcbc_set_key()
61 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); in nx_xcbc_empty() local
62 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; in nx_xcbc_empty()
81 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) keys, &len, in nx_xcbc_empty()
82 nx_ctx->ap->sglen); in nx_xcbc_empty()
87 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *) keys, &len, in nx_xcbc_empty()
88 nx_ctx->ap->sglen); in nx_xcbc_empty()
93 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_xcbc_empty()
94 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_xcbc_empty()
96 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, 0); in nx_xcbc_empty()
99 atomic_inc(&(nx_ctx->stats->aes_ops)); in nx_xcbc_empty()
108 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) keys[1], &len, in nx_xcbc_empty()
109 nx_ctx->ap->sglen); in nx_xcbc_empty()
115 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len, in nx_xcbc_empty()
116 nx_ctx->ap->sglen); in nx_xcbc_empty()
121 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_xcbc_empty()
122 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_xcbc_empty()
124 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, 0); in nx_xcbc_empty()
127 atomic_inc(&(nx_ctx->stats->aes_ops)); in nx_xcbc_empty()
140 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(tfm); in nx_crypto_ctx_aes_xcbc_init2() local
141 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; in nx_crypto_ctx_aes_xcbc_init2()
148 nx_ctx_init(nx_ctx, HCOP_FC_AES); in nx_crypto_ctx_aes_xcbc_init2()
170 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); in nx_xcbc_update() local
171 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; in nx_xcbc_update()
180 spin_lock_irqsave(&nx_ctx->lock, irq_flags); in nx_xcbc_update()
195 in_sg = nx_ctx->in_sg; in nx_xcbc_update()
197 nx_ctx->ap->sglen); in nx_xcbc_update()
199 nx_ctx->ap->databytelen/NX_PAGE_SIZE); in nx_xcbc_update()
202 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state, in nx_xcbc_update()
203 &len, nx_ctx->ap->sglen); in nx_xcbc_update()
210 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_xcbc_update()
230 in_sg = nx_build_sg_list(nx_ctx->in_sg, in nx_xcbc_update()
251 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * in nx_xcbc_update()
263 if (!nx_ctx->op.inlen || !nx_ctx->op.outlen) { in nx_xcbc_update()
268 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, 0); in nx_xcbc_update()
272 atomic_inc(&(nx_ctx->stats->aes_ops)); in nx_xcbc_update()
280 in_sg = nx_ctx->in_sg; in nx_xcbc_update()
288 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); in nx_xcbc_update()
295 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); in nx_xcbc_final() local
296 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; in nx_xcbc_final()
302 spin_lock_irqsave(&nx_ctx->lock, irq_flags); in nx_xcbc_final()
324 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *)sctx->buffer, in nx_xcbc_final()
325 &len, nx_ctx->ap->sglen); in nx_xcbc_final()
333 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len, in nx_xcbc_final()
334 nx_ctx->ap->sglen); in nx_xcbc_final()
341 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_xcbc_final()
342 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_xcbc_final()
344 if (!nx_ctx->op.outlen) { in nx_xcbc_final()
349 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, 0); in nx_xcbc_final()
353 atomic_inc(&(nx_ctx->stats->aes_ops)); in nx_xcbc_final()
357 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); in nx_xcbc_final()