Lines Matching refs:nx_ctx
40 int nx_hcall_sync(struct nx_crypto_ctx *nx_ctx, in nx_hcall_sync() argument
47 atomic_inc(&(nx_ctx->stats->sync_ops)); in nx_hcall_sync()
56 atomic_inc(&(nx_ctx->stats->errors)); in nx_hcall_sync()
57 atomic_set(&(nx_ctx->stats->last_error), op->hcall_err); in nx_hcall_sync()
58 atomic_set(&(nx_ctx->stats->last_error_pid), current->pid); in nx_hcall_sync()
258 int nx_build_sg_lists(struct nx_crypto_ctx *nx_ctx, in nx_build_sg_lists() argument
268 struct nx_sg *nx_insg = nx_ctx->in_sg; in nx_build_sg_lists()
269 struct nx_sg *nx_outsg = nx_ctx->out_sg; in nx_build_sg_lists()
272 max_sg_len = min_t(u64, nx_ctx->ap->sglen, in nx_build_sg_lists()
275 nx_ctx->ap->databytelen/NX_PAGE_SIZE); in nx_build_sg_lists()
280 *nbytes = min_t(u64, *nbytes, nx_ctx->ap->databytelen); in nx_build_sg_lists()
293 nx_ctx->op.inlen = trim_sg_list(nx_ctx->in_sg, nx_insg, delta, nbytes); in nx_build_sg_lists()
294 nx_ctx->op.outlen = trim_sg_list(nx_ctx->out_sg, nx_outsg, delta, nbytes); in nx_build_sg_lists()
305 void nx_ctx_init(struct nx_crypto_ctx *nx_ctx, unsigned int function) in nx_ctx_init() argument
307 spin_lock_init(&nx_ctx->lock); in nx_ctx_init()
308 memset(nx_ctx->kmem, 0, nx_ctx->kmem_len); in nx_ctx_init()
309 nx_ctx->csbcpb->csb.valid |= NX_CSB_VALID_BIT; in nx_ctx_init()
311 nx_ctx->op.flags = function; in nx_ctx_init()
312 nx_ctx->op.csbcpb = __pa(nx_ctx->csbcpb); in nx_ctx_init()
313 nx_ctx->op.in = __pa(nx_ctx->in_sg); in nx_ctx_init()
314 nx_ctx->op.out = __pa(nx_ctx->out_sg); in nx_ctx_init()
316 if (nx_ctx->csbcpb_aead) { in nx_ctx_init()
317 nx_ctx->csbcpb_aead->csb.valid |= NX_CSB_VALID_BIT; in nx_ctx_init()
319 nx_ctx->op_aead.flags = function; in nx_ctx_init()
320 nx_ctx->op_aead.csbcpb = __pa(nx_ctx->csbcpb_aead); in nx_ctx_init()
321 nx_ctx->op_aead.in = __pa(nx_ctx->in_sg); in nx_ctx_init()
322 nx_ctx->op_aead.out = __pa(nx_ctx->out_sg); in nx_ctx_init()
653 static int nx_crypto_ctx_init(struct nx_crypto_ctx *nx_ctx, u32 fc, u32 mode) in nx_crypto_ctx_init() argument
663 nx_ctx->kmem_len = (5 * NX_PAGE_SIZE) + in nx_crypto_ctx_init()
666 nx_ctx->kmem_len = (4 * NX_PAGE_SIZE) + in nx_crypto_ctx_init()
669 nx_ctx->kmem = kmalloc(nx_ctx->kmem_len, GFP_KERNEL); in nx_crypto_ctx_init()
670 if (!nx_ctx->kmem) in nx_crypto_ctx_init()
674 nx_ctx->csbcpb = (struct nx_csbcpb *)(round_up((u64)nx_ctx->kmem, in nx_crypto_ctx_init()
676 nx_ctx->in_sg = (struct nx_sg *)((u8 *)nx_ctx->csbcpb + NX_PAGE_SIZE); in nx_crypto_ctx_init()
677 nx_ctx->out_sg = (struct nx_sg *)((u8 *)nx_ctx->in_sg + NX_PAGE_SIZE); in nx_crypto_ctx_init()
680 nx_ctx->csbcpb_aead = in nx_crypto_ctx_init()
681 (struct nx_csbcpb *)((u8 *)nx_ctx->out_sg + in nx_crypto_ctx_init()
686 nx_ctx->stats = &nx_driver.stats; in nx_crypto_ctx_init()
687 memcpy(nx_ctx->props, nx_driver.of.ap[fc][mode], in nx_crypto_ctx_init()
747 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(tfm); in nx_crypto_ctx_exit() local
749 kfree_sensitive(nx_ctx->kmem); in nx_crypto_ctx_exit()
750 nx_ctx->csbcpb = NULL; in nx_crypto_ctx_exit()
751 nx_ctx->csbcpb_aead = NULL; in nx_crypto_ctx_exit()
752 nx_ctx->in_sg = NULL; in nx_crypto_ctx_exit()
753 nx_ctx->out_sg = NULL; in nx_crypto_ctx_exit()
763 struct nx_crypto_ctx *nx_ctx = crypto_aead_ctx(tfm); in nx_crypto_ctx_aead_exit() local
765 kfree_sensitive(nx_ctx->kmem); in nx_crypto_ctx_aead_exit()