/drivers/crypto/nx/ |
D | nx-aes-ccm.c | 133 static int generate_b0(u8 *iv, unsigned int assoclen, unsigned int authsize, in generate_b0() argument 147 if (assoclen) in generate_b0() 158 unsigned int assoclen, in generate_pat() argument 185 if (!assoclen) { in generate_pat() 187 } else if (assoclen <= 14) { in generate_pat() 193 iauth_len = assoclen; in generate_pat() 194 } else if (assoclen <= 65280) { in generate_pat() 208 rc = generate_b0(iv, assoclen, authsize, nbytes, b0); in generate_pat() 218 if (assoclen <= 65280) { in generate_pat() 219 *(u16 *)b1 = assoclen; in generate_pat() [all …]
|
D | nx-aes-gcm.c | 102 unsigned int assoclen) in nx_gca() argument 108 unsigned int nbytes = assoclen; in nx_gca() 159 atomic64_add(assoclen, &(nx_ctx->stats->aes_bytes)); in nx_gca() 169 static int gmac(struct aead_request *req, const u8 *iv, unsigned int assoclen) in gmac() argument 176 unsigned int nbytes = assoclen; in gmac() 231 atomic64_add(assoclen, &(nx_ctx->stats->aes_bytes)); in gmac() 308 unsigned int assoclen) in gcm_aes_nx_crypt() argument 325 if (assoclen == 0) in gcm_aes_nx_crypt() 328 rc = gmac(req, rctx->iv, assoclen); in gcm_aes_nx_crypt() 336 csbcpb->cpb.aes_gcm.bit_length_aad = assoclen * 8; in gcm_aes_nx_crypt() [all …]
|
/drivers/crypto/ |
D | omap-aes-gcm.c | 64 dd->aead_req->assoclen, dd->total, in omap_aes_gcm_done_task() 70 dd->total + dd->aead_req->assoclen, in omap_aes_gcm_done_task() 94 int alen, clen, cryptlen, assoclen, ret; in omap_aes_gcm_copy_buffers() local 101 assoclen = req->assoclen; in omap_aes_gcm_copy_buffers() 105 assoclen -= 8; in omap_aes_gcm_copy_buffers() 110 alen = ALIGN(assoclen, AES_BLOCK_SIZE); in omap_aes_gcm_copy_buffers() 113 nsg = !!(assoclen && cryptlen); in omap_aes_gcm_copy_buffers() 118 if (assoclen) { in omap_aes_gcm_copy_buffers() 120 ret = omap_crypto_align_sg(&tmp, assoclen, in omap_aes_gcm_copy_buffers() 132 tmp = scatterwalk_ffwd(sg_arr, req->src, req->assoclen); in omap_aes_gcm_copy_buffers() [all …]
|
D | atmel-aes.c | 1557 if (likely(req->assoclen != 0 || ctx->textlen != 0)) in atmel_aes_gcm_process() 1578 atmel_aes_write(dd, AES_AADLENR, req->assoclen); in atmel_aes_gcm_length() 1582 if (unlikely(req->assoclen == 0)) { in atmel_aes_gcm_length() 1588 padlen = atmel_aes_padlen(req->assoclen, AES_BLOCK_SIZE); in atmel_aes_gcm_length() 1589 if (unlikely(req->assoclen + padlen > dd->buflen)) in atmel_aes_gcm_length() 1591 sg_copy_to_buffer(req->src, sg_nents(req->src), dd->buf, req->assoclen); in atmel_aes_gcm_length() 1595 dd->datalen = req->assoclen + padlen; in atmel_aes_gcm_length() 1626 src = scatterwalk_ffwd(ctx->src, req->src, req->assoclen); in atmel_aes_gcm_data() 1628 scatterwalk_ffwd(ctx->dst, req->dst, req->assoclen)); in atmel_aes_gcm_data() 1666 data[0] = cpu_to_be64(req->assoclen * 8); in atmel_aes_gcm_tag_init() [all …]
|
D | atmel-authenc.h | 41 struct scatterlist *assoc, unsigned int assoclen,
|
/drivers/crypto/cavium/nitrox/ |
D | nitrox_aead.c | 166 creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen); in nitrox_set_creq() 167 creq->gph.param2 = cpu_to_be16(rctx->ivsize + rctx->assoclen); in nitrox_set_creq() 205 static inline bool nitrox_aes_gcm_assoclen_supported(unsigned int assoclen) in nitrox_aes_gcm_assoclen_supported() argument 207 if (assoclen <= 512) in nitrox_aes_gcm_assoclen_supported() 222 if (!nitrox_aes_gcm_assoclen_supported(areq->assoclen)) in nitrox_aes_gcm_enc() 228 rctx->assoclen = areq->assoclen; in nitrox_aes_gcm_enc() 229 rctx->srclen = areq->assoclen + areq->cryptlen; in nitrox_aes_gcm_enc() 256 if (!nitrox_aes_gcm_assoclen_supported(areq->assoclen)) in nitrox_aes_gcm_dec() 262 rctx->assoclen = areq->assoclen; in nitrox_aes_gcm_dec() 263 rctx->srclen = areq->cryptlen + areq->assoclen; in nitrox_aes_gcm_dec() [all …]
|
/drivers/crypto/qce/ |
D | aead.c | 54 if (req->assoclen) { in qce_aead_done() 71 totallen = req->cryptlen + req->assoclen; in qce_aead_done() 80 totallen = req->cryptlen + req->assoclen - ctx->authsize; in qce_aead_done() 120 unsigned int assoclen = req->assoclen; in qce_aead_prepare_dst_buf() local 124 totallen = rctx->cryptlen + assoclen; in qce_aead_prepare_dst_buf() 141 if (IS_CCM(rctx->flags) && assoclen) { in qce_aead_prepare_dst_buf() 143 msg_sg = scatterwalk_ffwd(__sg, req->dst, assoclen); in qce_aead_prepare_dst_buf() 146 rctx->assoclen); in qce_aead_prepare_dst_buf() 157 totallen = rctx->cryptlen + rctx->assoclen; in qce_aead_prepare_dst_buf() 191 unsigned int assoclen = rctx->assoclen; in qce_aead_ccm_prepare_buf_assoclen() local [all …]
|
D | aead.h | 39 unsigned int assoclen; member
|
/drivers/crypto/chelsio/ |
D | chcr_algo.c | 186 authsize, req->assoclen + in chcr_verify_tag() 2411 (req->assoclen > aadmax) || in chcr_aead_need_fallback() 2428 aead_request_set_ad(subreq, req->assoclen); in chcr_aead_fallback() 2472 dnents = sg_nents_xlen(req->dst, req->assoclen + req->cryptlen + in create_authenc_wr() 2475 snents = sg_nents_xlen(req->src, req->assoclen + req->cryptlen, in create_authenc_wr() 2481 reqctx->imm = (transhdr_len + req->assoclen + req->cryptlen) < in create_authenc_wr() 2483 temp = reqctx->imm ? roundup(req->assoclen + req->cryptlen, 16) in create_authenc_wr() 2511 chcr_req->sec_cpl.pldlen = htonl(req->assoclen + IV + req->cryptlen); in create_authenc_wr() 2514 null ? 0 : IV + req->assoclen, in create_authenc_wr() 2515 req->assoclen + IV + 1, in create_authenc_wr() [all …]
|
/drivers/crypto/ccree/ |
D | cc_aead.c | 236 areq->cryptlen, areq->assoclen); in cc_aead_complete() 736 areq_ctx->assoclen, NS_BIT); in cc_set_assoc_desc() 1089 if (areq_ctx->assoclen > 0) in cc_proc_header_desc() 1317 unsigned int assoclen = areq_ctx->assoclen; in validate_data_size() local 1340 if (!IS_ALIGNED(assoclen, sizeof(u32))) in validate_data_size() 1351 if (!IS_ALIGNED(assoclen, DES_BLOCK_SIZE)) in validate_data_size() 1476 if (req_ctx->assoclen > 0) { in cc_ccm() 1568 if (req_ctx->assoclen > 0) in config_ccm_adata() 1579 req_ctx->ccm_hdr_size = format_ccm_a0(a0, req_ctx->assoclen); in config_ccm_adata() 1818 if (req_ctx->assoclen > 0) in cc_gcm() [all …]
|
D | cc_buffer_mgr.c | 56 u32 skip = req->assoclen + req->cryptlen; in cc_copy_mac() 291 unsigned int assoclen) in cc_set_aead_conf_buf() argument 307 if (assoclen > 0) { in cc_set_aead_conf_buf() 519 areq_ctx->assoclen, req->cryptlen); in cc_unmap_aead_request() 599 if (areq_ctx->assoclen == 0) { in cc_aead_chain_assoc() 609 mapped_nents = sg_nents_for_len(req->src, areq_ctx->assoclen); in cc_aead_chain_assoc() 643 areq_ctx->assoclen, 0, is_last, in cc_aead_chain_assoc() 802 unsigned int size_for_map = req->assoclen + req->cryptlen; in cc_aead_chain_data() 804 u32 size_to_skip = req->assoclen; in cc_aead_chain_data() 841 size_for_map = req->assoclen + req->cryptlen; in cc_aead_chain_data() [all …]
|
D | cc_aead.h | 69 u32 assoclen; /* size of AAD buffer to authenticate */ member
|
/drivers/crypto/starfive/ |
D | jh7110-aes.c | 120 writel(upper_32_bits(cryp->assoclen), cryp->base + STARFIVE_AES_ALEN0); in starfive_aes_set_alen() 121 writel(lower_32_bits(cryp->assoclen), cryp->base + STARFIVE_AES_ALEN1); in starfive_aes_set_alen() 221 if (cryp->assoclen) in starfive_aes_ccm_init() 403 total_len = ALIGN(cryp->assoclen, AES_BLOCK_SIZE) / sizeof(unsigned int); in starfive_aes_gcm_write_adata() 432 total_len = cryp->assoclen; in starfive_aes_ccm_write_adata() 481 cryp->assoclen = 0; in starfive_aes_prepare_req() 485 cryp->assoclen = areq->assoclen; in starfive_aes_prepare_req() 502 if (cryp->assoclen) { in starfive_aes_prepare_req() 503 rctx->adata = kzalloc(ALIGN(cryp->assoclen, AES_BLOCK_SIZE), GFP_KERNEL); in starfive_aes_prepare_req() 508 scatterwalk_copychunks(rctx->adata, &cryp->in_walk, cryp->assoclen, 0); in starfive_aes_prepare_req() [all …]
|
/drivers/crypto/intel/keembay/ |
D | keembay-ocs-aes-core.c | 653 aead_request_set_ad(subreq, req->assoclen); in kmb_ocs_aead_common() 731 req->assoclen + req->cryptlen); in kmb_ocs_aead_dma_prepare() 751 dst_size = req->assoclen + out_size; in kmb_ocs_aead_dma_prepare() 759 tag_size, req->assoclen + in_size); in kmb_ocs_aead_dma_prepare() 778 dst_size = req->assoclen + in_size + tag_size; in kmb_ocs_aead_dma_prepare() 802 &rctx->aad_dst_dll, req->assoclen, in kmb_ocs_aead_dma_prepare() 810 out_size, req->assoclen); in kmb_ocs_aead_dma_prepare() 836 req->assoclen, 0); in kmb_ocs_aead_dma_prepare() 842 req->assoclen); in kmb_ocs_aead_dma_prepare() 862 req->assoclen, 0); in kmb_ocs_aead_dma_prepare() [all …]
|
/drivers/crypto/marvell/octeontx2/ |
D | otx2_cptvf_algs.c | 1024 rctx->ctrl_word.e.enc_data_offset = req->assoclen; in create_aead_ctx_hdr() 1028 if (req->assoclen > 248 || !IS_ALIGNED(req->assoclen, 8)) in create_aead_ctx_hdr() 1048 if (crypto_ipsec_check_assoclen(req->assoclen)) in create_aead_ctx_hdr() 1058 rctx->ctrl_word.e.iv_offset = req->assoclen - AES_GCM_IV_OFFSET; in create_aead_ctx_hdr() 1074 req_info->req.param2 = req->cryptlen + req->assoclen; in create_aead_ctx_hdr() 1078 req_info->req.param2 = req->cryptlen + req->assoclen - mac_len; in create_aead_ctx_hdr() 1133 u32 inputlen = req->cryptlen + req->assoclen; in create_aead_input_list() 1153 outputlen = req->cryptlen + req->assoclen + mac_len; in create_aead_output_list() 1155 outputlen = req->cryptlen + req->assoclen - mac_len; in create_aead_output_list() 1169 inputlen = req->cryptlen + req->assoclen; in create_aead_null_input_list() [all …]
|
/drivers/crypto/hisilicon/sec2/ |
D | sec_crypto.c | 907 copy_size = aead_req->cryptlen + aead_req->assoclen; in GEN_SEC_SETKEY_FUNC() 940 copy_size = c_req->c_len + aead_req->assoclen; in sec_cipher_pbuf_unmap() 961 skip_size = aead_req->assoclen + aead_req->cryptlen - authsize; in sec_aead_mac_init() 1493 if (aead_req->assoclen) in set_aead_auth_iv() 1560 sec_sqe->type2.alen_ivllen = cpu_to_le32(aq->assoclen); in sec_auth_bd_fill_xcm() 1562 sec_sqe->type2.cipher_src_offset = cpu_to_le16((u16)aq->assoclen); in sec_auth_bd_fill_xcm() 1586 sqe3->a_len_key = cpu_to_le32(aq->assoclen); in sec_auth_bd_fill_xcm_v3() 1588 sqe3->cipher_src_offset = cpu_to_le16((u16)aq->assoclen); in sec_auth_bd_fill_xcm_v3() 1618 sec_sqe->type2.alen_ivllen = cpu_to_le32(c_req->c_len + aq->assoclen); in sec_auth_bd_fill_ex() 1620 sec_sqe->type2.cipher_src_offset = cpu_to_le16((u16)aq->assoclen); in sec_auth_bd_fill_ex() [all …]
|
/drivers/crypto/inside-secure/ |
D | safexcel_cipher.c | 164 u32 cryptlen, u32 assoclen, u32 digestsize) in safexcel_aead_token() argument 168 int assocadj = assoclen - ctx->aadskip, aadalign; in safexcel_aead_token() 676 unsigned int cryptlen, unsigned int assoclen, in safexcel_send_req() argument 689 unsigned int totlen_src = cryptlen + assoclen; in safexcel_send_req() 826 assoclen, digestsize); in safexcel_send_req() 844 if (assoclen) { in safexcel_send_req() 845 if (assoclen >= len) { in safexcel_send_req() 846 assoclen -= len; in safexcel_send_req() 851 assoclen, in safexcel_send_req() 852 len - assoclen); in safexcel_send_req() [all …]
|
/drivers/crypto/marvell/octeontx/ |
D | otx_cptvf_algs.c | 977 rctx->ctrl_word.e.enc_data_offset = req->assoclen; in create_aead_ctx_hdr() 1005 rctx->ctrl_word.e.iv_offset = req->assoclen - AES_GCM_IV_OFFSET; in create_aead_ctx_hdr() 1021 req_info->req.param2 = req->cryptlen + req->assoclen; in create_aead_ctx_hdr() 1025 req_info->req.param2 = req->cryptlen + req->assoclen - mac_len; in create_aead_ctx_hdr() 1082 u32 inputlen = req->cryptlen + req->assoclen; in create_aead_input_list() 1102 outputlen = req->cryptlen + req->assoclen + mac_len; in create_aead_output_list() 1104 outputlen = req->cryptlen + req->assoclen - mac_len; in create_aead_output_list() 1120 inputlen = req->cryptlen + req->assoclen; in create_aead_null_input_list() 1122 inputlen = req->cryptlen + req->assoclen - mac_len; in create_aead_null_input_list() 1142 inputlen = req->cryptlen + req->assoclen; in create_aead_null_output_list() [all …]
|
/drivers/crypto/amcc/ |
D | crypto4xx_alg.c | 341 if (req->assoclen & 0x3 || req->assoclen > 1020) in crypto4xx_aead_need_fallback() 361 aead_request_set_ad(subreq, req->assoclen); in crypto4xx_aead_fallback() 470 sa, ctx->sa_len, req->assoclen, rctx->dst); in crypto4xx_crypt_aes_ccm() 607 ctx->sa_len, req->assoclen, rctx->dst); in crypto4xx_crypt_aes_gcm()
|
D | crypto4xx_core.c | 600 aead_req->assoclen + aead_req->cryptlen - in crypto4xx_aead_done() 685 const unsigned int assoclen, in crypto4xx_build_pd() argument 721 tmp = sg_nents_for_len(src, assoclen + datalen); in crypto4xx_build_pd() 730 if (assoclen) { in crypto4xx_build_pd() 731 nbytes += assoclen; in crypto4xx_build_pd() 732 dst = scatterwalk_ffwd(_dst, dst, assoclen); in crypto4xx_build_pd() 822 sa->sa_command_1.bf.hash_crypto_offset = (assoclen >> 2); in crypto4xx_build_pd() 921 pd->pd_ctl_len.w = 0x00400000 | (assoclen + datalen); in crypto4xx_build_pd()
|
/drivers/crypto/bcm/ |
D | cipher.c | 1319 aead_parms.assoc_size = req->assoclen; in handle_aead_req() 1392 spu->spu_ccm_update_iv(digestsize, &cipher_parms, req->assoclen, in handle_aead_req() 1421 req->assoclen + rctx->total_sent - in handle_aead_req() 1546 if (req->assoclen) in handle_aead_resp() 1548 req->assoclen); in handle_aead_resp() 1557 icv_offset = req->assoclen + rctx->total_sent; in handle_aead_resp() 1566 dump_sg(req->dst, req->assoclen, result_len); in handle_aead_resp() 2518 (req->assoclen == 0)) { in aead_need_fallback() 2542 (req->assoclen == 0)) { in aead_need_fallback() 2555 req->assoclen != 16 && req->assoclen != 20) { in aead_need_fallback() [all …]
|
/drivers/crypto/caam/ |
D | caamalg.c | 1101 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen, in init_aead_job() 1124 req->assoclen + req->cryptlen + authsize, in init_aead_job() 1128 req->assoclen + req->cryptlen - authsize, in init_aead_job() 1144 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); in init_gcm_job() 1148 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen)) in init_gcm_job() 1168 unsigned int assoclen = req->assoclen; in init_chachapoly_job() local 1182 assoclen -= ivsize; in init_chachapoly_job() 1185 append_math_add_imm_u32(desc, REG3, ZERO, IMM, assoclen); in init_chachapoly_job() 1235 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); in init_authenc_job() 1237 append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen); in init_authenc_job() [all …]
|
D | caamalg_qi2.h | 114 unsigned int assoclen; member
|
D | caamalg_qi.c | 808 unsigned int assoclen; member 974 src_len = req->assoclen + req->cryptlen + in aead_edesc_alloc() 993 src_len = req->assoclen + req->cryptlen; in aead_edesc_alloc() 1099 edesc->assoclen = cpu_to_caam32(req->assoclen); in aead_edesc_alloc() 1100 edesc->assoclen_dma = dma_map_single(qidev, &edesc->assoclen, 4, in aead_edesc_alloc() 1135 out_len = req->assoclen + req->cryptlen + in aead_edesc_alloc() 1137 in_len = 4 + ivsize + req->assoclen + req->cryptlen; in aead_edesc_alloc() 1200 return crypto_ipsec_check_assoclen(req->assoclen) ? : aead_crypt(req, in ipsec_gcm_encrypt() 1206 return crypto_ipsec_check_assoclen(req->assoclen) ? : aead_crypt(req, in ipsec_gcm_decrypt()
|
/drivers/crypto/stm32/ |
D | stm32-cryp.c | 553 if (cryp->areq->assoclen) { in stm32_cryp_gcm_init() 603 u32 alen = cryp->areq->assoclen; in stm32_cryp_write_ccm_first_header() 654 if (cryp->areq->assoclen) in stm32_cryp_ccm_init() 685 if (cryp->areq->assoclen) { in stm32_cryp_ccm_init() 1207 cryp->header_in = areq->assoclen; in stm32_cryp_prepare_req() 1211 cryp->header_in = areq->assoclen; in stm32_cryp_prepare_req() 1224 scatterwalk_copychunks(NULL, &cryp->out_walk, cryp->areq->assoclen, 2); in stm32_cryp_prepare_req() 1292 size_bit = cryp->areq->assoclen * 8; in stm32_cryp_read_auth_tag()
|