Lines Matching refs:nx_ctx
27 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&tfm->base); in ccm_aes_nx_set_key() local
28 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; in ccm_aes_nx_set_key()
29 struct nx_csbcpb *csbcpb_aead = nx_ctx->csbcpb_aead; in ccm_aes_nx_set_key()
31 nx_ctx_init(nx_ctx, HCOP_FC_AES); in ccm_aes_nx_set_key()
37 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_128]; in ccm_aes_nx_set_key()
57 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&tfm->base); in ccm4309_aes_nx_set_key() local
64 memcpy(nx_ctx->priv.ccm.nonce, in_key + key_len, 3); in ccm4309_aes_nx_set_key()
158 struct nx_crypto_ctx *nx_ctx, in generate_pat() argument
164 struct nx_sg *nx_insg = nx_ctx->in_sg; in generate_pat()
165 struct nx_sg *nx_outsg = nx_ctx->out_sg; in generate_pat()
189 b0 = nx_ctx->csbcpb->cpb.aes_ccm.in_pat_or_b0; in generate_pat()
194 b0 = nx_ctx->csbcpb->cpb.aes_ccm.in_pat_or_b0; in generate_pat()
195 b1 = nx_ctx->priv.ccm.iauth_tag; in generate_pat()
201 b0 = nx_ctx->csbcpb_aead->cpb.aes_cca.b0; in generate_pat()
202 b1 = nx_ctx->csbcpb_aead->cpb.aes_cca.b1; in generate_pat()
205 b0 = nx_ctx->csbcpb_aead->cpb.aes_cca.b0; in generate_pat()
206 b1 = nx_ctx->csbcpb_aead->cpb.aes_cca.b1; in generate_pat()
239 nx_insg = nx_build_sg_list(nx_insg, b1, &len, nx_ctx->ap->sglen); in generate_pat()
245 nx_ctx->ap->sglen); in generate_pat()
252 nx_ctx->op.inlen = (nx_ctx->in_sg - nx_insg) * in generate_pat()
254 nx_ctx->op.outlen = (nx_ctx->out_sg - nx_outsg) * in generate_pat()
257 NX_CPB_FDM(nx_ctx->csbcpb) |= NX_FDM_ENDE_ENCRYPT; in generate_pat()
258 NX_CPB_FDM(nx_ctx->csbcpb) |= NX_FDM_INTERMEDIATE; in generate_pat()
260 result = nx_ctx->csbcpb->cpb.aes_ccm.out_pat_or_mac; in generate_pat()
262 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, in generate_pat()
267 atomic_inc(&(nx_ctx->stats->aes_ops)); in generate_pat()
268 atomic64_add(assoclen, &nx_ctx->stats->aes_bytes); in generate_pat()
276 max_sg_len = min_t(u64, nx_ctx->ap->sglen, in generate_pat()
279 nx_ctx->ap->databytelen/NX_PAGE_SIZE); in generate_pat()
283 nx_ctx->ap->databytelen); in generate_pat()
285 nx_insg = nx_walk_and_build(nx_ctx->in_sg, in generate_pat()
286 nx_ctx->ap->sglen, in generate_pat()
291 NX_CPB_FDM(nx_ctx->csbcpb_aead) |= in generate_pat()
294 NX_CPB_FDM(nx_ctx->csbcpb_aead) &= in generate_pat()
299 nx_ctx->op_aead.inlen = (nx_ctx->in_sg - nx_insg) * in generate_pat()
302 result = nx_ctx->csbcpb_aead->cpb.aes_cca.out_pat_or_b0; in generate_pat()
304 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op_aead, in generate_pat()
309 memcpy(nx_ctx->csbcpb_aead->cpb.aes_cca.b0, in generate_pat()
310 nx_ctx->csbcpb_aead->cpb.aes_cca.out_pat_or_b0, in generate_pat()
313 NX_CPB_FDM(nx_ctx->csbcpb_aead) |= NX_FDM_CONTINUATION; in generate_pat()
315 atomic_inc(&(nx_ctx->stats->aes_ops)); in generate_pat()
316 atomic64_add(assoclen, &nx_ctx->stats->aes_bytes); in generate_pat()
321 result = nx_ctx->csbcpb_aead->cpb.aes_cca.out_pat_or_b0; in generate_pat()
333 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); in ccm_nx_decrypt() local
334 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; in ccm_nx_decrypt()
337 struct nx_ccm_priv *priv = &nx_ctx->priv.ccm; in ccm_nx_decrypt()
342 spin_lock_irqsave(&nx_ctx->lock, irq_flags); in ccm_nx_decrypt()
351 rc = generate_pat(iv, req, nx_ctx, authsize, nbytes, assoclen, in ccm_nx_decrypt()
368 NX_CPB_FDM(nx_ctx->csbcpb) &= ~NX_FDM_ENDE_ENCRYPT; in ccm_nx_decrypt()
370 rc = nx_build_sg_lists(nx_ctx, iv, req->dst, req->src, in ccm_nx_decrypt()
376 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, in ccm_nx_decrypt()
393 atomic_inc(&(nx_ctx->stats->aes_ops)); in ccm_nx_decrypt()
395 &(nx_ctx->stats->aes_bytes)); in ccm_nx_decrypt()
403 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); in ccm_nx_decrypt()
411 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); in ccm_nx_encrypt() local
412 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; in ccm_nx_encrypt()
419 spin_lock_irqsave(&nx_ctx->lock, irq_flags); in ccm_nx_encrypt()
421 rc = generate_pat(iv, req, nx_ctx, authsize, nbytes, assoclen, in ccm_nx_encrypt()
439 rc = nx_build_sg_lists(nx_ctx, iv, req->dst, req->src, in ccm_nx_encrypt()
445 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, in ccm_nx_encrypt()
462 atomic_inc(&(nx_ctx->stats->aes_ops)); in ccm_nx_encrypt()
464 &(nx_ctx->stats->aes_bytes)); in ccm_nx_encrypt()
476 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); in ccm_nx_encrypt()
482 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); in ccm4309_aes_nx_encrypt() local
487 memcpy(iv + 1, nx_ctx->priv.ccm.nonce, 3); in ccm4309_aes_nx_encrypt()
506 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); in ccm4309_aes_nx_decrypt() local
511 memcpy(iv + 1, nx_ctx->priv.ccm.nonce, 3); in ccm4309_aes_nx_decrypt()