/arch/s390/include/asm/ |
D | airq.h | 57 static inline unsigned long airq_iv_alloc_bit(struct airq_iv *iv) in airq_iv_alloc_bit() 62 static inline void airq_iv_free_bit(struct airq_iv *iv, unsigned long bit) in airq_iv_free_bit() 67 static inline unsigned long airq_iv_end(struct airq_iv *iv) in airq_iv_end() 72 static inline void airq_iv_lock(struct airq_iv *iv, unsigned long bit) in airq_iv_lock() 78 static inline void airq_iv_unlock(struct airq_iv *iv, unsigned long bit) in airq_iv_unlock() 84 static inline void airq_iv_set_data(struct airq_iv *iv, unsigned long bit, in airq_iv_set_data() 90 static inline unsigned int airq_iv_get_data(struct airq_iv *iv, in airq_iv_get_data() 96 static inline void airq_iv_set_ptr(struct airq_iv *iv, unsigned long bit, in airq_iv_set_ptr() 102 static inline unsigned long airq_iv_get_ptr(struct airq_iv *iv, in airq_iv_get_ptr()
|
/arch/x86/crypto/ |
D | serpent_avx_glue.c | 47 void __serpent_crypt_ctr(const void *ctx, u8 *d, const u8 *s, le128 *iv) in __serpent_crypt_ctr() 61 void serpent_xts_enc(const void *ctx, u8 *dst, const u8 *src, le128 *iv) in serpent_xts_enc() 67 void serpent_xts_dec(const void *ctx, u8 *dst, const u8 *src, le128 *iv) in serpent_xts_dec()
|
D | aesni-intel_glue.c | 487 const u8 *in, unsigned int len, u8 *iv) in aesni_ctr_enc_avx_tfm() 554 static void aesni_xts_enc(const void *ctx, u8 *dst, const u8 *src, le128 *iv) in aesni_xts_enc() 559 static void aesni_xts_dec(const void *ctx, u8 *dst, const u8 *src, le128 *iv) in aesni_xts_dec() 564 static void aesni_xts_enc32(const void *ctx, u8 *dst, const u8 *src, le128 *iv) in aesni_xts_enc32() 569 static void aesni_xts_dec32(const void *ctx, u8 *dst, const u8 *src, le128 *iv) in aesni_xts_dec32() 699 u8 *iv, void *aes_ctx) in gcmaes_crypt_by_sg() 832 u8 *hash_subkey, u8 *iv, void *aes_ctx) in gcmaes_encrypt() 839 u8 *hash_subkey, u8 *iv, void *aes_ctx) in gcmaes_decrypt() 851 u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN); in helper_rfc4106_encrypt() local 879 u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN); in helper_rfc4106_decrypt() local [all …]
|
D | cast6_avx_glue.c | 41 static void cast6_xts_enc(const void *ctx, u8 *dst, const u8 *src, le128 *iv) in cast6_xts_enc() 46 static void cast6_xts_dec(const void *ctx, u8 *dst, const u8 *src, le128 *iv) in cast6_xts_dec() 51 static void cast6_crypt_ctr(const void *ctx, u8 *d, const u8 *s, le128 *iv) in cast6_crypt_ctr()
|
D | glue_helper-asm-avx2.S | 55 #define load_ctr_16way(iv, bswap, x0, x1, x2, x3, x4, x5, x6, x7, t0, t0x, t1, \ argument 99 #define gf128mul_x_ble(iv, mask, tmp) \ argument 106 #define gf128mul_x2_ble(iv, mask1, mask2, tmp0, tmp1) \ argument 118 #define load_xts_16way(iv, src, dst, x0, x1, x2, x3, x4, x5, x6, x7, tiv, \ argument
|
D | serpent_sse2_glue.c | 50 static void serpent_crypt_ctr(const void *ctx, u8 *d, const u8 *s, le128 *iv) in serpent_crypt_ctr() 64 le128 *iv) in serpent_crypt_ctr_xway()
|
D | camellia_aesni_avx_glue.c | 42 void camellia_xts_enc(const void *ctx, u8 *dst, const u8 *src, le128 *iv) in camellia_xts_enc() 48 void camellia_xts_dec(const void *ctx, u8 *dst, const u8 *src, le128 *iv) in camellia_xts_dec()
|
D | glue_helper-asm-avx.S | 44 #define load_ctr_8way(iv, bswap, x0, x1, x2, x3, x4, x5, x6, x7, t0, t1, t2) \ argument 83 #define gf128mul_x_ble(iv, mask, tmp) \ argument 90 #define load_xts_8way(iv, src, dst, x0, x1, x2, x3, x4, x5, x6, x7, tiv, t0, \ argument
|
D | twofish_glue_3way.c | 55 void twofish_enc_blk_ctr(const void *ctx, u8 *d, const u8 *s, le128 *iv) in twofish_enc_blk_ctr() 72 void twofish_enc_blk_ctr_3way(const void *ctx, u8 *d, const u8 *s, le128 *iv) in twofish_enc_blk_ctr_3way()
|
D | twofish_avx_glue.c | 48 static void twofish_xts_enc(const void *ctx, u8 *dst, const u8 *src, le128 *iv) in twofish_xts_enc() 53 static void twofish_xts_dec(const void *ctx, u8 *dst, const u8 *src, le128 *iv) in twofish_xts_dec()
|
D | camellia_glue.c | 1272 u128 iv = *src; in camellia_decrypt_cbc_2way() local 1280 void camellia_crypt_ctr(const void *ctx, u8 *d, const u8 *s, le128 *iv) in camellia_crypt_ctr() 1296 void camellia_crypt_ctr_2way(const void *ctx, u8 *d, const u8 *s, le128 *iv) in camellia_crypt_ctr_2way()
|
D | glue_helper.c | 80 u128 *iv = (u128 *)walk.iv; in glue_cbc_encrypt_req_128bit() local 363 le128 *iv, common_glue_func_t fn) in glue_xts_crypt_128bit_one()
|
D | chacha_glue.c | 127 const struct chacha_ctx *ctx, const u8 *iv) in chacha_simd_stream_xor()
|
D | cast5_avx_glue.c | 124 u64 *iv = (u64 *)walk.iv; in cbc_encrypt() local
|
D | blowfish_glue.c | 135 u64 *iv = (u64 *)walk->iv; in __cbc_encrypt() local
|
D | des3_ede_glue.c | 143 u64 *iv = (u64 *)walk->iv; in __cbc_encrypt() local
|
D | camellia-aesni-avx2-asm_64.S | 1205 #define gf128mul_x_ble(iv, mask, tmp) \ argument 1212 #define gf128mul_x2_ble(iv, mask1, mask2, tmp0, tmp1) \ argument
|
/arch/arm/crypto/ |
D | chacha-neon-glue.c | 66 const struct chacha_ctx *ctx, const u8 *iv) in chacha_neon_stream_xor()
|
D | aes-neonbs-glue.c | 331 int rounds, int blocks, u8 iv[], int)) in __xts_crypt()
|
/arch/arm64/crypto/ |
D | chacha-neon-glue.c | 63 const struct chacha_ctx *ctx, const u8 *iv) in chacha_neon_stream_xor()
|
D | ghash-ce-glue.c | 424 u8 iv[AES_BLOCK_SIZE]; in gcm_encrypt() local 544 u8 iv[2 * AES_BLOCK_SIZE]; in gcm_decrypt() local
|
D | aes-neonbs-glue.c | 319 int rounds, int blocks, u8 iv[])) in __xts_crypt()
|
/arch/s390/crypto/ |
D | des_s390.c | 30 u8 iv[DES_BLOCK_SIZE]; member 107 u8 iv[DES_BLOCK_SIZE]; in cbc_desall_crypt() member 352 static unsigned int __ctrblk_init(u8 *ctrptr, u8 *iv, unsigned int nbytes) in __ctrblk_init()
|
D | paes_s390.c | 286 u8 iv[AES_BLOCK_SIZE]; in cbc_paes_crypt() member 576 static unsigned int __ctrblk_init(u8 *ctrptr, u8 *iv, unsigned int nbytes) in __ctrblk_init()
|
/arch/x86/kernel/cpu/mce/ |
D | core.c | 1500 unsigned long iv; in mce_timer_fn() local 1535 unsigned long iv = __this_cpu_read(mce_next_interval); in mce_timer_kick() local 1892 unsigned long iv = check_interval * HZ; in mce_start_timer() local
|