/kernel/linux/linux-5.10/drivers/s390/cio/ |
D | airq.c | 130 struct airq_iv *iv; in airq_iv_create() local 133 iv = kzalloc(sizeof(*iv), GFP_KERNEL); in airq_iv_create() 134 if (!iv) in airq_iv_create() 136 iv->bits = bits; in airq_iv_create() 137 iv->flags = flags; in airq_iv_create() 145 iv->vector = dma_pool_zalloc(airq_iv_cache, GFP_KERNEL, in airq_iv_create() 146 &iv->vector_dma); in airq_iv_create() 147 if (!iv->vector) in airq_iv_create() 150 iv->vector = cio_dma_zalloc(size); in airq_iv_create() 151 if (!iv->vector) in airq_iv_create() [all …]
|
/kernel/linux/linux-5.10/arch/s390/include/asm/ |
D | airq.h | 51 void airq_iv_release(struct airq_iv *iv); 52 unsigned long airq_iv_alloc(struct airq_iv *iv, unsigned long num); 53 void airq_iv_free(struct airq_iv *iv, unsigned long bit, unsigned long num); 54 unsigned long airq_iv_scan(struct airq_iv *iv, unsigned long start, 57 static inline unsigned long airq_iv_alloc_bit(struct airq_iv *iv) in airq_iv_alloc_bit() argument 59 return airq_iv_alloc(iv, 1); in airq_iv_alloc_bit() 62 static inline void airq_iv_free_bit(struct airq_iv *iv, unsigned long bit) in airq_iv_free_bit() argument 64 airq_iv_free(iv, bit, 1); in airq_iv_free_bit() 67 static inline unsigned long airq_iv_end(struct airq_iv *iv) in airq_iv_end() argument 69 return iv->end; in airq_iv_end() [all …]
|
/kernel/linux/linux-5.10/crypto/ |
D | testmgr.h | 66 const char *iv; member 107 const char *iv; member 8705 .iv = "\xfe\xdc\xba\x98\x76\x54\x32\x10", 8717 .iv = "\x12\x34\x56\x78\x90\xab\xcd\xef", 8725 .iv = "\xe5\xc7\xcd\xde\x87\x2b\xf2\x7c", 8733 .iv = "\x43\xe9\x34\x00\x8c\x38\x9c\x0f", 8741 .iv = "\xE7\x82\x1D\xB8\x53\x11\xAC\x47", 8813 .iv = "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFD", 8881 .iv = "\xE7\x82\x1D\xB8\x53\x11\xAC\x47", 9113 .iv = "\x7D\x33\x88\x93\x0F\x93\xB2\x42", [all …]
|
D | cfb.c | 50 u8 *iv = walk->iv; in crypto_cfb_final() local 53 crypto_cfb_encrypt_one(tfm, iv, stream); in crypto_cfb_final() 64 u8 *iv = walk->iv; in crypto_cfb_encrypt_segment() local 67 crypto_cfb_encrypt_one(tfm, iv, dst); in crypto_cfb_encrypt_segment() 69 iv = dst; in crypto_cfb_encrypt_segment() 75 memcpy(walk->iv, iv, bsize); in crypto_cfb_encrypt_segment() 86 u8 *iv = walk->iv; in crypto_cfb_encrypt_inplace() local 90 crypto_cfb_encrypt_one(tfm, iv, tmp); in crypto_cfb_encrypt_inplace() 92 iv = src; in crypto_cfb_encrypt_inplace() 97 memcpy(walk->iv, iv, bsize); in crypto_cfb_encrypt_inplace() [all …]
|
D | pcbc.c | 27 u8 * const iv = walk->iv; in crypto_pcbc_encrypt_segment() local 30 crypto_xor(iv, src, bsize); in crypto_pcbc_encrypt_segment() 31 crypto_cipher_encrypt_one(tfm, dst, iv); in crypto_pcbc_encrypt_segment() 32 crypto_xor_cpy(iv, dst, src, bsize); in crypto_pcbc_encrypt_segment() 48 u8 * const iv = walk->iv; in crypto_pcbc_encrypt_inplace() local 53 crypto_xor(iv, src, bsize); in crypto_pcbc_encrypt_inplace() 54 crypto_cipher_encrypt_one(tfm, src, iv); in crypto_pcbc_encrypt_inplace() 55 crypto_xor_cpy(iv, tmpbuf, src, bsize); in crypto_pcbc_encrypt_inplace() 94 u8 * const iv = walk->iv; in crypto_pcbc_decrypt_segment() local 98 crypto_xor(dst, iv, bsize); in crypto_pcbc_decrypt_segment() [all …]
|
D | cbc.c | 26 u8 *iv = walk->iv; in crypto_cbc_encrypt_segment() local 33 crypto_xor(iv, src, bsize); in crypto_cbc_encrypt_segment() 34 fn(tfm, dst, iv); in crypto_cbc_encrypt_segment() 35 memcpy(iv, dst, bsize); in crypto_cbc_encrypt_segment() 53 u8 *iv = walk->iv; in crypto_cbc_encrypt_inplace() local 60 crypto_xor(src, iv, bsize); in crypto_cbc_encrypt_inplace() 62 iv = src; in crypto_cbc_encrypt_inplace() 67 memcpy(walk->iv, iv, bsize); in crypto_cbc_encrypt_inplace() 101 u8 *iv = walk->iv; in crypto_cbc_decrypt_segment() local 109 crypto_xor(dst, iv, bsize); in crypto_cbc_decrypt_segment() [all …]
|
D | ofb.c | 30 u8 * const iv = walk.iv; in crypto_ofb_crypt() local 34 crypto_cipher_encrypt_one(cipher, iv, iv); in crypto_ofb_crypt() 35 crypto_xor_cpy(dst, src, iv, bsize); in crypto_ofb_crypt() 44 crypto_cipher_encrypt_one(cipher, walk.iv, walk.iv); in crypto_ofb_crypt() 45 crypto_xor_cpy(walk.dst.virt.addr, walk.src.virt.addr, walk.iv, in crypto_ofb_crypt()
|
/kernel/linux/linux-5.10/drivers/crypto/nx/ |
D | nx-aes-ccm.c | 123 static inline int crypto_ccm_check_iv(const u8 *iv) in crypto_ccm_check_iv() argument 126 if (1 > iv[0] || iv[0] > 7) in crypto_ccm_check_iv() 133 static int generate_b0(u8 *iv, unsigned int assoclen, unsigned int authsize, in generate_b0() argument 139 memcpy(b0, iv, 16); in generate_b0() 156 static int generate_pat(u8 *iv, in generate_pat() argument 172 memset(iv + 15 - iv[0], 0, iv[0] + 1); in generate_pat() 211 rc = generate_b0(iv, assoclen, authsize, nbytes, b0); in generate_pat() 330 u8 *iv, in ccm_nx_decrypt() argument 351 rc = generate_pat(iv, req, nx_ctx, authsize, nbytes, assoclen, in ccm_nx_decrypt() 370 rc = nx_build_sg_lists(nx_ctx, iv, req->dst, req->src, in ccm_nx_decrypt() [all …]
|
D | nx-aes-ctr.c | 72 static int ctr_aes_nx_crypt(struct skcipher_request *req, u8 *iv) in ctr_aes_nx_crypt() argument 86 rc = nx_build_sg_lists(nx_ctx, iv, req->dst, req->src, in ctr_aes_nx_crypt() 88 csbcpb->cpb.aes_ctr.iv); in ctr_aes_nx_crypt() 102 memcpy(iv, csbcpb->cpb.aes_cbc.cv, AES_BLOCK_SIZE); in ctr_aes_nx_crypt() 119 u8 iv[16]; in ctr3686_aes_nx_crypt() local 121 memcpy(iv, nx_ctx->priv.ctr.nonce, CTR_RFC3686_NONCE_SIZE); in ctr3686_aes_nx_crypt() 122 memcpy(iv + CTR_RFC3686_NONCE_SIZE, req->iv, CTR_RFC3686_IV_SIZE); in ctr3686_aes_nx_crypt() 123 iv[12] = iv[13] = iv[14] = 0; in ctr3686_aes_nx_crypt() 124 iv[15] = 1; in ctr3686_aes_nx_crypt() 126 return ctr_aes_nx_crypt(req, iv); in ctr3686_aes_nx_crypt()
|
D | nx-aes-gcm.c | 169 static int gmac(struct aead_request *req, const u8 *iv, unsigned int assoclen) in gmac() argument 192 memcpy(csbcpb->cpb.aes_gcm.iv_or_cnt, iv, AES_BLOCK_SIZE); in gmac() 242 static int gcm_empty(struct aead_request *req, const u8 *iv, int enc) in gcm_empty() argument 269 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) iv, in gcm_empty() 322 *(u32 *)&rctx->iv[NX_GCM_CTR_OFFSET] = 1; in gcm_aes_nx_crypt() 326 rc = gcm_empty(req, rctx->iv, enc); in gcm_aes_nx_crypt() 328 rc = gmac(req, rctx->iv, assoclen); in gcm_aes_nx_crypt() 357 rc = nx_build_sg_lists(nx_ctx, rctx->iv, req->dst, in gcm_aes_nx_crypt() 376 memcpy(rctx->iv, csbcpb->cpb.aes_gcm.out_cnt, AES_BLOCK_SIZE); in gcm_aes_nx_crypt() 419 char *iv = rctx->iv; in gcm_aes_nx_encrypt() local [all …]
|
/kernel/linux/linux-5.10/drivers/staging/wlan-ng/ |
D | p80211wep.c | 91 u8 *iv, u8 *icv) in wep_decrypt() argument 102 key[0] = iv[0]; in wep_decrypt() 103 key[1] = iv[1]; in wep_decrypt() 104 key[2] = iv[2]; in wep_decrypt() 105 keyidx = WEP_KEY(iv[3]); in wep_decrypt() 162 u8 *dst, u32 len, int keynum, u8 *iv, u8 *icv) in wep_encrypt() argument 179 get_random_bytes(iv, 3); in wep_encrypt() 180 while ((iv[1] == 0xff) && (iv[0] >= 3) && (iv[0] < keylen)) in wep_encrypt() 181 get_random_bytes(iv, 3); in wep_encrypt() 183 iv[3] = (keynum & 0x03) << 6; in wep_encrypt() [all …]
|
/kernel/linux/linux-5.10/arch/x86/crypto/ |
D | aesni-intel_glue.c | 93 const u8 *in, unsigned int len, u8 *iv); 95 const u8 *in, unsigned int len, u8 *iv); 101 const u8 *in, unsigned int len, u8 *iv); 104 const u8 *in, unsigned int len, u8 *iv); 109 const u8 *in, unsigned int len, u8 *iv); 111 const u8 *in, unsigned int len, u8 *iv); 130 const u8 *in, unsigned long plaintext_len, u8 *iv, 152 const u8 *in, unsigned long ciphertext_len, u8 *iv, 159 u8 *iv, 174 void (*init)(void *ctx, struct gcm_context_data *gdata, u8 *iv, [all …]
|
D | glue_helper-asm-avx2.S | 55 #define load_ctr_16way(iv, bswap, x0, x1, x2, x3, x4, x5, x6, x7, t0, t0x, t1, \ argument 62 vmovdqu (iv), t2x; \ 86 vmovdqu t2x, (iv); 99 #define gf128mul_x_ble(iv, mask, tmp) \ argument 100 vpsrad $31, iv, tmp; \ 101 vpaddq iv, iv, iv; \ 104 vpxor tmp, iv, iv; 106 #define gf128mul_x2_ble(iv, mask1, mask2, tmp0, tmp1) \ argument 107 vpsrad $31, iv, tmp0; \ 108 vpaddq iv, iv, tmp1; \ [all …]
|
D | glue_helper.c | 80 u128 *iv = (u128 *)walk.iv; in glue_cbc_encrypt_req_128bit() local 83 u128_xor(dst, src, iv); in glue_cbc_encrypt_req_128bit() 85 iv = dst; in glue_cbc_encrypt_req_128bit() 91 *(u128 *)walk.iv = *iv; in glue_cbc_encrypt_req_128bit() 149 u128_xor(dst, dst, (u128 *)walk.iv); in glue_cbc_decrypt_req_128bit() 150 *(u128 *)walk.iv = last_iv; in glue_cbc_decrypt_req_128bit() 181 be128_to_le128(&ctrblk, (be128 *)walk.iv); in glue_ctr_req_128bit() 204 le128_to_be128((be128 *)walk.iv, &ctrblk); in glue_ctr_req_128bit() 214 be128_to_le128(&ctrblk, (be128 *)walk.iv); in glue_ctr_req_128bit() 220 le128_to_be128((be128 *)walk.iv, &ctrblk); in glue_ctr_req_128bit() [all …]
|
D | glue_helper-asm-avx.S | 44 #define load_ctr_8way(iv, bswap, x0, x1, x2, x3, x4, x5, x6, x7, t0, t1, t2) \ argument 50 vmovdqu (iv), x7; \ 70 vmovdqu t2, (iv); 83 #define gf128mul_x_ble(iv, mask, tmp) \ argument 84 vpsrad $31, iv, tmp; \ 85 vpaddq iv, iv, iv; \ 88 vpxor tmp, iv, iv; 90 #define load_xts_8way(iv, src, dst, x0, x1, x2, x3, x4, x5, x6, x7, tiv, t0, \ argument 95 vmovdqu (iv), tiv; \ 129 vmovdqu tiv, (iv);
|
D | twofish_glue_3way.c | 55 void twofish_enc_blk_ctr(const void *ctx, u8 *d, const u8 *s, le128 *iv) in twofish_enc_blk_ctr() argument 64 le128_to_be128(&ctrblk, iv); in twofish_enc_blk_ctr() 65 le128_inc(iv); in twofish_enc_blk_ctr() 72 void twofish_enc_blk_ctr_3way(const void *ctx, u8 *d, const u8 *s, le128 *iv) in twofish_enc_blk_ctr_3way() argument 84 le128_to_be128(&ctrblks[0], iv); in twofish_enc_blk_ctr_3way() 85 le128_inc(iv); in twofish_enc_blk_ctr_3way() 86 le128_to_be128(&ctrblks[1], iv); in twofish_enc_blk_ctr_3way() 87 le128_inc(iv); in twofish_enc_blk_ctr_3way() 88 le128_to_be128(&ctrblks[2], iv); in twofish_enc_blk_ctr_3way() 89 le128_inc(iv); in twofish_enc_blk_ctr_3way()
|
/kernel/linux/linux-5.10/include/crypto/ |
D | chacha.h | 58 void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv); 59 static inline void chacha_init_generic(u32 *state, const u32 *key, const u8 *iv) in chacha_init_generic() argument 70 state[12] = get_unaligned_le32(iv + 0); in chacha_init_generic() 71 state[13] = get_unaligned_le32(iv + 4); in chacha_init_generic() 72 state[14] = get_unaligned_le32(iv + 8); in chacha_init_generic() 73 state[15] = get_unaligned_le32(iv + 12); in chacha_init_generic() 76 static inline void chacha_init(u32 *state, const u32 *key, const u8 *iv) in chacha_init() argument 79 chacha_init_arch(state, key, iv); in chacha_init() 81 chacha_init_generic(state, key, iv); in chacha_init()
|
/kernel/linux/linux-5.10/lib/crypto/ |
D | chacha20poly1305.c | 39 u8 iv[CHACHA_IV_SIZE]; in xchacha_init() local 41 memset(iv, 0, 8); in xchacha_init() 42 memcpy(iv + 8, nonce + 16, 8); in xchacha_init() 50 chacha_init(chacha_state, k, iv); in xchacha_init() 53 memzero_explicit(iv, sizeof(iv)); in xchacha_init() 97 __le64 iv[2]; in chacha20poly1305_encrypt() local 101 iv[0] = 0; in chacha20poly1305_encrypt() 102 iv[1] = cpu_to_le64(nonce); in chacha20poly1305_encrypt() 104 chacha_init(chacha_state, k, (u8 *)iv); in chacha20poly1305_encrypt() 107 memzero_explicit(iv, sizeof(iv)); in chacha20poly1305_encrypt() [all …]
|
/kernel/linux/linux-5.10/drivers/crypto/ccp/ |
D | ccp-crypto-aes.c | 32 memcpy(req->iv, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_complete() 83 if (!req->iv) in ccp_aes_crypt() 86 memcpy(rctx->iv, req->iv, AES_BLOCK_SIZE); in ccp_aes_crypt() 89 sg_init_one(iv_sg, rctx->iv, iv_len); in ccp_aes_crypt() 101 rctx->cmd.u.aes.iv = iv_sg; in ccp_aes_crypt() 141 req->iv = rctx->rfc3686_info; in ccp_aes_rfc3686_complete() 165 u8 *iv; in ccp_aes_rfc3686_crypt() local 168 iv = rctx->rfc3686_iv; in ccp_aes_rfc3686_crypt() 169 memcpy(iv, ctx->u.aes.nonce, CTR_RFC3686_NONCE_SIZE); in ccp_aes_rfc3686_crypt() 171 iv += CTR_RFC3686_NONCE_SIZE; in ccp_aes_rfc3686_crypt() [all …]
|
/kernel/linux/linux-5.10/net/mac80211/ |
D | wep.c | 31 static inline bool ieee80211_wep_weak_iv(u32 iv, int keylen) in ieee80211_wep_weak_iv() argument 38 if ((iv & 0xff00) == 0xff00) { in ieee80211_wep_weak_iv() 39 u8 B = (iv >> 16) & 0xff; in ieee80211_wep_weak_iv() 48 int keylen, int keyidx, u8 *iv) in ieee80211_wep_get_iv() argument 54 if (!iv) in ieee80211_wep_get_iv() 57 *iv++ = (local->wep_iv >> 16) & 0xff; in ieee80211_wep_get_iv() 58 *iv++ = (local->wep_iv >> 8) & 0xff; in ieee80211_wep_get_iv() 59 *iv++ = local->wep_iv & 0xff; in ieee80211_wep_get_iv() 60 *iv++ = keyidx << 6; in ieee80211_wep_get_iv() 135 u8 *iv; in ieee80211_wep_encrypt() local [all …]
|
/kernel/linux/linux-5.10/drivers/crypto/amcc/ |
D | crypto4xx_alg.c | 75 __le32 iv[AES_IV_SIZE]; in crypto4xx_crypt() local 81 crypto4xx_memcpy_to_le32(iv, req->iv, ivlen); in crypto4xx_crypt() 84 req->cryptlen, iv, ivlen, decrypt ? ctx->sa_in : ctx->sa_out, in crypto4xx_crypt() 226 __le32 iv[AES_IV_SIZE / 4] = { in crypto4xx_rfc3686_encrypt() local 228 cpu_to_le32p((u32 *) req->iv), in crypto4xx_rfc3686_encrypt() 229 cpu_to_le32p((u32 *) (req->iv + 4)), in crypto4xx_rfc3686_encrypt() 233 req->cryptlen, iv, AES_IV_SIZE, in crypto4xx_rfc3686_encrypt() 241 __le32 iv[AES_IV_SIZE / 4] = { in crypto4xx_rfc3686_decrypt() local 243 cpu_to_le32p((u32 *) req->iv), in crypto4xx_rfc3686_decrypt() 244 cpu_to_le32p((u32 *) (req->iv + 4)), in crypto4xx_rfc3686_decrypt() [all …]
|
/kernel/linux/linux-5.10/arch/x86/include/asm/crypto/ |
D | serpent-avx.h | 26 le128 *iv); 29 const u8 *src, le128 *iv); 31 const u8 *src, le128 *iv); 34 le128 *iv); 36 extern void serpent_xts_enc(const void *ctx, u8 *dst, const u8 *src, le128 *iv); 37 extern void serpent_xts_dec(const void *ctx, u8 *dst, const u8 *src, le128 *iv);
|
/kernel/linux/linux-5.10/block/ |
D | blk-integrity.c | 29 struct bio_vec iv, ivprv = { NULL }; in blk_rq_count_integrity_sg() local 35 bio_for_each_integrity_vec(iv, bio, iter) { in blk_rq_count_integrity_sg() 38 if (!biovec_phys_mergeable(q, &ivprv, &iv)) in blk_rq_count_integrity_sg() 40 if (seg_size + iv.bv_len > queue_max_segment_size(q)) in blk_rq_count_integrity_sg() 43 seg_size += iv.bv_len; in blk_rq_count_integrity_sg() 47 seg_size = iv.bv_len; in blk_rq_count_integrity_sg() 51 ivprv = iv; in blk_rq_count_integrity_sg() 71 struct bio_vec iv, ivprv = { NULL }; in blk_rq_map_integrity_sg() local 77 bio_for_each_integrity_vec(iv, bio, iter) { in blk_rq_map_integrity_sg() 80 if (!biovec_phys_mergeable(q, &ivprv, &iv)) in blk_rq_map_integrity_sg() [all …]
|
/kernel/linux/linux-5.10/net/qrtr/ |
D | ns.c | 109 struct kvec iv; in service_announce_new() local 114 iv.iov_base = &pkt; in service_announce_new() 115 iv.iov_len = sizeof(pkt); in service_announce_new() 127 return kernel_sendmsg(qrtr_ns.sock, &msg, &iv, 1, sizeof(pkt)); in service_announce_new() 135 struct kvec iv; in service_announce_del() local 141 iv.iov_base = &pkt; in service_announce_del() 142 iv.iov_len = sizeof(pkt); in service_announce_del() 154 ret = kernel_sendmsg(qrtr_ns.sock, &msg, &iv, 1, sizeof(pkt)); in service_announce_del() 166 struct kvec iv; in lookup_notify() local 169 iv.iov_base = &pkt; in lookup_notify() [all …]
|
/kernel/linux/linux-5.10/net/rxrpc/ |
D | rxkad.c | 106 struct rxrpc_crypt iv; in rxkad_prime_packet_security() local 126 memcpy(&iv, token->kad->session_key, sizeof(iv)); in rxkad_prime_packet_security() 136 skcipher_request_set_crypt(req, &sg, &sg, tmpsize, iv.x); in rxkad_prime_packet_security() 186 struct rxrpc_crypt iv; in rxkad_secure_packet_auth() local 199 memset(&iv, 0, sizeof(iv)); in rxkad_secure_packet_auth() 204 skcipher_request_set_crypt(req, &sg, &sg, 8, iv.x); in rxkad_secure_packet_auth() 224 struct rxrpc_crypt iv; in rxkad_secure_packet_encrypt() local 242 memcpy(&iv, token->kad->session_key, sizeof(iv)); in rxkad_secure_packet_encrypt() 247 skcipher_request_set_crypt(req, &sg[0], &sg[0], sizeof(rxkhdr), iv.x); in rxkad_secure_packet_encrypt() 262 skcipher_request_set_crypt(req, sg, sg, len, iv.x); in rxkad_secure_packet_encrypt() [all …]
|