/crypto/ |
D | testmgr.h | 66 const char *iv; member 101 const char *iv; member 7359 .iv = "\xfe\xdc\xba\x98\x76\x54\x32\x10", 7371 .iv = "\x12\x34\x56\x78\x90\xab\xcd\xef", 7379 .iv = "\xe5\xc7\xcd\xde\x87\x2b\xf2\x7c", 7387 .iv = "\x43\xe9\x34\x00\x8c\x38\x9c\x0f", 7395 .iv = "\xE7\x82\x1D\xB8\x53\x11\xAC\x47", 7467 .iv = "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFD", 7535 .iv = "\xE7\x82\x1D\xB8\x53\x11\xAC\x47", 7767 .iv = "\x7D\x33\x88\x93\x0F\x93\xB2\x42", [all …]
|
D | cfb.c | 50 u8 *iv = walk->iv; in crypto_cfb_final() local 53 crypto_cfb_encrypt_one(tfm, iv, stream); in crypto_cfb_final() 64 u8 *iv = walk->iv; in crypto_cfb_encrypt_segment() local 67 crypto_cfb_encrypt_one(tfm, iv, dst); in crypto_cfb_encrypt_segment() 69 iv = dst; in crypto_cfb_encrypt_segment() 75 memcpy(walk->iv, iv, bsize); in crypto_cfb_encrypt_segment() 86 u8 *iv = walk->iv; in crypto_cfb_encrypt_inplace() local 90 crypto_cfb_encrypt_one(tfm, iv, tmp); in crypto_cfb_encrypt_inplace() 92 iv = src; in crypto_cfb_encrypt_inplace() 97 memcpy(walk->iv, iv, bsize); in crypto_cfb_encrypt_inplace() [all …]
|
D | pcbc.c | 27 u8 * const iv = walk->iv; in crypto_pcbc_encrypt_segment() local 30 crypto_xor(iv, src, bsize); in crypto_pcbc_encrypt_segment() 31 crypto_cipher_encrypt_one(tfm, dst, iv); in crypto_pcbc_encrypt_segment() 32 crypto_xor_cpy(iv, dst, src, bsize); in crypto_pcbc_encrypt_segment() 48 u8 * const iv = walk->iv; in crypto_pcbc_encrypt_inplace() local 53 crypto_xor(iv, src, bsize); in crypto_pcbc_encrypt_inplace() 54 crypto_cipher_encrypt_one(tfm, src, iv); in crypto_pcbc_encrypt_inplace() 55 crypto_xor_cpy(iv, tmpbuf, src, bsize); in crypto_pcbc_encrypt_inplace() 94 u8 * const iv = walk->iv; in crypto_pcbc_decrypt_segment() local 98 crypto_xor(dst, iv, bsize); in crypto_pcbc_decrypt_segment() [all …]
|
D | ofb.c | 30 u8 * const iv = walk.iv; in crypto_ofb_crypt() local 34 crypto_cipher_encrypt_one(cipher, iv, iv); in crypto_ofb_crypt() 35 crypto_xor_cpy(dst, src, iv, bsize); in crypto_ofb_crypt() 44 crypto_cipher_encrypt_one(cipher, walk.iv, walk.iv); in crypto_ofb_crypt() 45 crypto_xor_cpy(walk.dst.virt.addr, walk.src.virt.addr, walk.iv, in crypto_ofb_crypt()
|
D | chacha_generic.c | 35 const struct chacha_ctx *ctx, const u8 *iv) in chacha_stream_xor() argument 43 crypto_chacha_init(state, ctx, iv); in chacha_stream_xor() 59 void crypto_chacha_init(u32 *state, const struct chacha_ctx *ctx, const u8 *iv) in crypto_chacha_init() argument 73 state[12] = get_unaligned_le32(iv + 0); in crypto_chacha_init() 74 state[13] = get_unaligned_le32(iv + 4); in crypto_chacha_init() 75 state[14] = get_unaligned_le32(iv + 8); in crypto_chacha_init() 76 state[15] = get_unaligned_le32(iv + 12); in crypto_chacha_init() 115 return chacha_stream_xor(req, ctx, req->iv); in crypto_chacha_crypt() 128 crypto_chacha_init(state, ctx, req->iv); in crypto_xchacha_crypt() 133 memcpy(&real_iv[0], req->iv + 24, 8); /* stream position */ in crypto_xchacha_crypt() [all …]
|
D | lrw.c | 150 __be32 *iv; in xor_tweak() local 164 iv = (__be32 *)w.iv; in xor_tweak() 165 counter[0] = be32_to_cpu(iv[3]); in xor_tweak() 166 counter[1] = be32_to_cpu(iv[2]); in xor_tweak() 167 counter[2] = be32_to_cpu(iv[1]); in xor_tweak() 168 counter[3] = be32_to_cpu(iv[0]); in xor_tweak() 187 iv[0] = cpu_to_be32(counter[3]); in xor_tweak() 188 iv[1] = cpu_to_be32(counter[2]); in xor_tweak() 189 iv[2] = cpu_to_be32(counter[1]); in xor_tweak() 190 iv[3] = cpu_to_be32(counter[0]); in xor_tweak() [all …]
|
D | ablkcipher.c | 130 if (walk->iv != req->info) in ablkcipher_walk_done() 131 memcpy(req->info, walk->iv, tfm->crt_ablkcipher.ivsize); in ablkcipher_walk_done() 186 u8 *iv; in ablkcipher_copy_iv() local 193 iv = (u8 *)ALIGN((unsigned long)walk->iv_buffer, alignmask + 1); in ablkcipher_copy_iv() 194 iv = ablkcipher_get_spot(iv, bs) + aligned_bs; in ablkcipher_copy_iv() 195 iv = ablkcipher_get_spot(iv, bs) + aligned_bs; in ablkcipher_copy_iv() 196 iv = ablkcipher_get_spot(iv, ivsize); in ablkcipher_copy_iv() 198 walk->iv = memcpy(iv, walk->iv, ivsize); in ablkcipher_copy_iv() 268 walk->iv = req->info; in ablkcipher_walk_first() 274 if (unlikely(((unsigned long)walk->iv & alignmask))) { in ablkcipher_walk_first()
|
D | seqiv.c | 35 memcpy(req->iv, subreq->iv, crypto_aead_ivsize(geniv)); in seqiv_aead_encrypt_complete2() 38 kzfree(subreq->iv); in seqiv_aead_encrypt_complete2() 68 info = req->iv; in seqiv_aead_encrypt() 87 info = kmemdup(req->iv, ivsize, req->base.flags & in seqiv_aead_encrypt() 106 if (unlikely(info != req->iv)) in seqiv_aead_encrypt() 130 req->cryptlen - ivsize, req->iv); in seqiv_aead_decrypt() 133 scatterwalk_map_and_copy(req->iv, req->src, req->assoclen, ivsize, 0); in seqiv_aead_decrypt()
|
D | ccm.c | 139 unsigned int lp = req->iv[0]; in format_input() 145 memcpy(info, req->iv, 16); in format_input() 249 static inline int crypto_ccm_check_iv(const u8 *iv) in crypto_ccm_check_iv() argument 252 if (1 > iv[0] || iv[0] > 7) in crypto_ccm_check_iv() 262 u8 *iv = req->iv; in crypto_ccm_init_crypt() local 265 err = crypto_ccm_check_iv(iv); in crypto_ccm_init_crypt() 274 memset(iv + 15 - iv[0], 0, iv[0] + 1); in crypto_ccm_init_crypt() 302 u8 *iv = req->iv; in crypto_ccm_encrypt() local 320 skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv); in crypto_ccm_encrypt() 364 u8 *iv = pctx->idata; in crypto_ccm_decrypt() local [all …]
|
D | blkcipher.c | 129 if (walk->iv != desc->info) in blkcipher_walk_done() 130 memcpy(desc->info, walk->iv, walk->ivsize); in blkcipher_walk_done() 276 u8 *iv; in blkcipher_copy_iv() local 283 iv = (u8 *)ALIGN((unsigned long)walk->buffer, walk->alignmask + 1); in blkcipher_copy_iv() 284 iv = blkcipher_get_spot(iv, bs) + aligned_bs; in blkcipher_copy_iv() 285 iv = blkcipher_get_spot(iv, bs) + aligned_bs; in blkcipher_copy_iv() 286 iv = blkcipher_get_spot(iv, walk->ivsize); in blkcipher_copy_iv() 288 walk->iv = memcpy(iv, walk->iv, walk->ivsize); in blkcipher_copy_iv() 322 walk->iv = desc->info; in blkcipher_walk_first() 328 if (unlikely(((unsigned long)walk->iv & walk->alignmask))) { in blkcipher_walk_first() [all …]
|
D | ctr.c | 23 u8 iv[CTR_RFC3686_BLOCK_SIZE]; member 32 u8 *ctrblk = walk->iv; in crypto_ctr_crypt_final() 51 u8 *ctrblk = walk->iv; in crypto_ctr_crypt_segment() 79 u8 *ctrblk = walk->iv; in crypto_ctr_crypt_inplace() 204 u8 *iv = rctx->iv; in crypto_rfc3686_crypt() local 207 memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE); in crypto_rfc3686_crypt() 208 memcpy(iv + CTR_RFC3686_NONCE_SIZE, req->iv, CTR_RFC3686_IV_SIZE); in crypto_rfc3686_crypt() 211 *(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) = in crypto_rfc3686_crypt() 218 req->cryptlen, iv); in crypto_rfc3686_crypt()
|
D | skcipher.c | 169 if (walk->iv != walk->oiv) in skcipher_walk_done() 170 memcpy(walk->oiv, walk->iv, walk->ivsize); in skcipher_walk_done() 208 if (!err && walk->iv != walk->oiv) in skcipher_walk_complete() 209 memcpy(walk->oiv, walk->iv, walk->ivsize); in skcipher_walk_complete() 404 u8 *iv; in skcipher_copy_iv() local 424 iv = PTR_ALIGN(walk->buffer, alignmask + 1); in skcipher_copy_iv() 425 iv = skcipher_get_spot(iv, bs) + aligned_bs; in skcipher_copy_iv() 427 walk->iv = memcpy(iv, walk->iv, walk->ivsize); in skcipher_copy_iv() 437 if (unlikely(((unsigned long)walk->iv & walk->alignmask))) { in skcipher_walk_first() 455 walk->iv = req->iv; in skcipher_walk_skcipher() [all …]
|
D | essiv.c | 164 crypto_cipher_encrypt_one(tctx->essiv_cipher, req->iv, req->iv); in essiv_skcipher_crypt() 168 req->iv); in essiv_skcipher_crypt() 209 crypto_cipher_encrypt_one(tctx->essiv_cipher, req->iv, req->iv); in essiv_aead_crypt() 218 scatterwalk_map_and_copy(req->iv, req->dst, in essiv_aead_crypt() 222 u8 *iv = (u8 *)aead_request_ctx(req) + tctx->ivoffset; in essiv_aead_crypt() local 235 memcpy(iv, req->iv, ivsize); in essiv_aead_crypt() 255 sg_set_buf(rctx->sg + 1, iv, ivsize); in essiv_aead_crypt() 267 aead_request_set_crypt(subreq, src, req->dst, req->cryptlen, req->iv); in essiv_aead_crypt()
|
D | chacha20poly1305.c | 48 u8 iv[CHACHA_IV_SIZE]; member 86 static void chacha_iv(u8 *iv, struct aead_request *req, u32 icb) in chacha_iv() argument 91 memcpy(iv, &leicb, sizeof(leicb)); in chacha_iv() 92 memcpy(iv + sizeof(leicb), ctx->salt, ctx->saltlen); in chacha_iv() 93 memcpy(iv + sizeof(leicb) + ctx->saltlen, req->iv, in chacha_iv() 136 chacha_iv(creq->iv, req, 1); in chacha_decrypt() 147 rctx->cryptlen, creq->iv); in chacha_decrypt() 381 chacha_iv(creq->iv, req, 0); in poly_genkey() 387 POLY1305_KEY_SIZE, creq->iv); in poly_genkey() 412 chacha_iv(creq->iv, req, 1); in chacha_encrypt() [all …]
|
D | gcm.c | 65 u8 iv[16]; member 101 u8 iv[16]; in crypto_gcm_setkey() member 132 sizeof(data->hash), data->iv); in crypto_gcm_setkey() 165 memcpy(pctx->iv, req->iv, GCM_AES_IV_SIZE); in crypto_gcm_init_common() 166 memcpy(pctx->iv + GCM_AES_IV_SIZE, &counter, 4); in crypto_gcm_init_common() 197 pctx->iv); in crypto_gcm_init_crypt() 769 u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child), in crypto_rfc4106_crypt() local 772 scatterwalk_map_and_copy(iv + GCM_AES_IV_SIZE, req->src, 0, req->assoclen - 8, 0); in crypto_rfc4106_crypt() 774 memcpy(iv, ctx->nonce, 4); in crypto_rfc4106_crypt() 775 memcpy(iv + 4, req->iv, 8); in crypto_rfc4106_crypt() [all …]
|
D | salsa20_generic.c | 104 const u8 *iv) in salsa20_init() argument 107 state[6] = get_unaligned_le32(iv + 0); in salsa20_init() 108 state[7] = get_unaligned_le32(iv + 4); in salsa20_init() 163 salsa20_init(state, ctx, req->iv); in salsa20_crypt()
|
D | cts.c | 128 skcipher_request_set_crypt(subreq, sg, sg, bsize, req->iv); in cts_cbc_encrypt() 167 req->iv); in crypto_cts_encrypt() 177 offset, req->iv); in crypto_cts_encrypt() 262 req->iv); in crypto_cts_decrypt() 275 memcpy(space, req->iv, bsize); in crypto_cts_decrypt() 281 offset, req->iv); in crypto_cts_decrypt()
|
D | algif_skcipher.c | 108 areq->first_rsgl.sgl.sg, len, ctx->iv); in _skcipher_recvmsg() 321 sock_kzfree_s(sk, ctx->iv, crypto_skcipher_ivsize(tfm)); in skcipher_sock_destruct() 338 ctx->iv = sock_kmalloc(sk, crypto_skcipher_ivsize(tfm), in skcipher_accept_parent_nokey() 340 if (!ctx->iv) { in skcipher_accept_parent_nokey() 344 memset(ctx->iv, 0, crypto_skcipher_ivsize(tfm)); in skcipher_accept_parent_nokey()
|
D | echainiv.c | 42 info = req->iv; in echainiv_encrypt() 104 req->cryptlen - ivsize, req->iv); in echainiv_decrypt() 107 scatterwalk_map_and_copy(req->iv, req->src, req->assoclen, ivsize, 0); in echainiv_decrypt()
|
D | testmgr.c | 1852 u8 *iv = PTR_ALIGN(&_iv[0], 2 * (MAX_ALGAPI_ALIGNMASK + 1)) + in test_aead_vec_cfg() local 1896 if (vec->iv) in test_aead_vec_cfg() 1897 memcpy(iv, vec->iv, ivsize); in test_aead_vec_cfg() 1899 memset(iv, 0, ivsize); in test_aead_vec_cfg() 1922 enc ? vec->plen : vec->clen, iv); in test_aead_vec_cfg() 1934 req->iv != iv || in test_aead_vec_cfg() 1947 if (req->iv != iv) in test_aead_vec_cfg() 2067 u8 iv[MAX_IVLEN]; in generate_random_aead_testvec() local 2078 generate_random_bytes((u8 *)vec->iv, ivsize); in generate_random_aead_testvec() 2117 memcpy(iv, vec->iv, ivsize); in generate_random_aead_testvec() [all …]
|
D | af_alg.c | 453 if (cmsg->cmsg_len < CMSG_LEN(sizeof(*con->iv))) in af_alg_cmsg_send() 455 con->iv = (void *)CMSG_DATA(cmsg); in af_alg_cmsg_send() 456 if (cmsg->cmsg_len < CMSG_LEN(con->iv->ivlen + in af_alg_cmsg_send() 457 sizeof(*con->iv))) in af_alg_cmsg_send() 850 if (con.iv && con.iv->ivlen != ivsize) in af_alg_sendmsg() 869 if (con.iv) in af_alg_sendmsg() 870 memcpy(ctx->iv, con.iv->iv, ivsize); in af_alg_sendmsg()
|
D | algif_aead.c | 281 areq->first_rsgl.sgl.sg, used, ctx->iv); in _aead_recvmsg() 534 sock_kzfree_s(sk, ctx->iv, ivlen); in aead_sock_destruct() 553 ctx->iv = sock_kmalloc(sk, ivlen, GFP_KERNEL); in aead_accept_parent_nokey() 554 if (!ctx->iv) { in aead_accept_parent_nokey() 558 memset(ctx->iv, 0, ivlen); in aead_accept_parent_nokey()
|
D | tcrypt.c | 262 char *iv; in test_mb_aead_speed() local 271 iv = kzalloc(MAX_IVLEN, GFP_KERNEL); in test_mb_aead_speed() 272 if (!iv) in test_mb_aead_speed() 371 memset(iv, 0xff, iv_len); in test_mb_aead_speed() 396 *b_size, iv); in test_mb_aead_speed() 410 iv); in test_mb_aead_speed() 450 kfree(iv); in test_mb_aead_speed() 532 char *iv; in test_aead_speed() local 540 iv = kzalloc(MAX_IVLEN, GFP_KERNEL); in test_aead_speed() 541 if (!iv) in test_aead_speed() [all …]
|
D | keywrap.c | 141 memcpy(&block.A, req->iv, SEMIBSIZE); in crypto_kw_decrypt() 256 memcpy(req->iv, &block.A, SEMIBSIZE); in crypto_kw_encrypt()
|
D | adiantum.c | 122 u8 iv[XCHACHA_IV_SIZE]; in adiantum_setkey() member 148 data->iv[0] = 1; in adiantum_setkey() 156 sizeof(data->derived_keys), data->iv); in adiantum_setkey() 248 poly1305_core_blocks(&state, &tctx->header_hash_key, req->iv, in adiantum_hash_header()
|