Lines Matching refs:nx_ctx
27 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&tfm->base); in ccm_aes_nx_set_key() local
28 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; in ccm_aes_nx_set_key()
29 struct nx_csbcpb *csbcpb_aead = nx_ctx->csbcpb_aead; in ccm_aes_nx_set_key()
31 nx_ctx_init(nx_ctx, HCOP_FC_AES); in ccm_aes_nx_set_key()
37 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_128]; in ccm_aes_nx_set_key()
57 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&tfm->base); in ccm4309_aes_nx_set_key() local
64 memcpy(nx_ctx->priv.ccm.nonce, in_key + key_len, 3); in ccm4309_aes_nx_set_key()
155 struct nx_crypto_ctx *nx_ctx, in generate_pat() argument
161 struct nx_sg *nx_insg = nx_ctx->in_sg; in generate_pat()
162 struct nx_sg *nx_outsg = nx_ctx->out_sg; in generate_pat()
186 b0 = nx_ctx->csbcpb->cpb.aes_ccm.in_pat_or_b0; in generate_pat()
191 b0 = nx_ctx->csbcpb->cpb.aes_ccm.in_pat_or_b0; in generate_pat()
192 b1 = nx_ctx->priv.ccm.iauth_tag; in generate_pat()
198 b0 = nx_ctx->csbcpb_aead->cpb.aes_cca.b0; in generate_pat()
199 b1 = nx_ctx->csbcpb_aead->cpb.aes_cca.b1; in generate_pat()
202 b0 = nx_ctx->csbcpb_aead->cpb.aes_cca.b0; in generate_pat()
203 b1 = nx_ctx->csbcpb_aead->cpb.aes_cca.b1; in generate_pat()
236 nx_insg = nx_build_sg_list(nx_insg, b1, &len, nx_ctx->ap->sglen); in generate_pat()
242 nx_ctx->ap->sglen); in generate_pat()
249 nx_ctx->op.inlen = (nx_ctx->in_sg - nx_insg) * in generate_pat()
251 nx_ctx->op.outlen = (nx_ctx->out_sg - nx_outsg) * in generate_pat()
254 NX_CPB_FDM(nx_ctx->csbcpb) |= NX_FDM_ENDE_ENCRYPT; in generate_pat()
255 NX_CPB_FDM(nx_ctx->csbcpb) |= NX_FDM_INTERMEDIATE; in generate_pat()
257 result = nx_ctx->csbcpb->cpb.aes_ccm.out_pat_or_mac; in generate_pat()
259 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, in generate_pat()
264 atomic_inc(&(nx_ctx->stats->aes_ops)); in generate_pat()
265 atomic64_add(assoclen, &nx_ctx->stats->aes_bytes); in generate_pat()
273 max_sg_len = min_t(u64, nx_ctx->ap->sglen, in generate_pat()
276 nx_ctx->ap->databytelen/NX_PAGE_SIZE); in generate_pat()
280 nx_ctx->ap->databytelen); in generate_pat()
282 nx_insg = nx_walk_and_build(nx_ctx->in_sg, in generate_pat()
283 nx_ctx->ap->sglen, in generate_pat()
288 NX_CPB_FDM(nx_ctx->csbcpb_aead) |= in generate_pat()
291 NX_CPB_FDM(nx_ctx->csbcpb_aead) &= in generate_pat()
296 nx_ctx->op_aead.inlen = (nx_ctx->in_sg - nx_insg) * in generate_pat()
299 result = nx_ctx->csbcpb_aead->cpb.aes_cca.out_pat_or_b0; in generate_pat()
301 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op_aead, in generate_pat()
306 memcpy(nx_ctx->csbcpb_aead->cpb.aes_cca.b0, in generate_pat()
307 nx_ctx->csbcpb_aead->cpb.aes_cca.out_pat_or_b0, in generate_pat()
310 NX_CPB_FDM(nx_ctx->csbcpb_aead) |= NX_FDM_CONTINUATION; in generate_pat()
312 atomic_inc(&(nx_ctx->stats->aes_ops)); in generate_pat()
313 atomic64_add(assoclen, &nx_ctx->stats->aes_bytes); in generate_pat()
318 result = nx_ctx->csbcpb_aead->cpb.aes_cca.out_pat_or_b0; in generate_pat()
330 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); in ccm_nx_decrypt() local
331 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; in ccm_nx_decrypt()
334 struct nx_ccm_priv *priv = &nx_ctx->priv.ccm; in ccm_nx_decrypt()
339 spin_lock_irqsave(&nx_ctx->lock, irq_flags); in ccm_nx_decrypt()
348 rc = generate_pat(iv, req, nx_ctx, authsize, nbytes, assoclen, in ccm_nx_decrypt()
365 NX_CPB_FDM(nx_ctx->csbcpb) &= ~NX_FDM_ENDE_ENCRYPT; in ccm_nx_decrypt()
367 rc = nx_build_sg_lists(nx_ctx, iv, req->dst, req->src, in ccm_nx_decrypt()
373 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, in ccm_nx_decrypt()
390 atomic_inc(&(nx_ctx->stats->aes_ops)); in ccm_nx_decrypt()
392 &(nx_ctx->stats->aes_bytes)); in ccm_nx_decrypt()
400 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); in ccm_nx_decrypt()
408 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); in ccm_nx_encrypt() local
409 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; in ccm_nx_encrypt()
416 spin_lock_irqsave(&nx_ctx->lock, irq_flags); in ccm_nx_encrypt()
418 rc = generate_pat(iv, req, nx_ctx, authsize, nbytes, assoclen, in ccm_nx_encrypt()
436 rc = nx_build_sg_lists(nx_ctx, iv, req->dst, req->src, in ccm_nx_encrypt()
442 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, in ccm_nx_encrypt()
459 atomic_inc(&(nx_ctx->stats->aes_ops)); in ccm_nx_encrypt()
461 &(nx_ctx->stats->aes_bytes)); in ccm_nx_encrypt()
473 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); in ccm_nx_encrypt()
479 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); in ccm4309_aes_nx_encrypt() local
484 memcpy(iv + 1, nx_ctx->priv.ccm.nonce, 3); in ccm4309_aes_nx_encrypt()
503 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); in ccm4309_aes_nx_decrypt() local
508 memcpy(iv + 1, nx_ctx->priv.ccm.nonce, 3); in ccm4309_aes_nx_decrypt()