/arch/s390/include/asm/ |
D | airq.h | 51 void airq_iv_release(struct airq_iv *iv); 52 unsigned long airq_iv_alloc(struct airq_iv *iv, unsigned long num); 53 void airq_iv_free(struct airq_iv *iv, unsigned long bit, unsigned long num); 54 unsigned long airq_iv_scan(struct airq_iv *iv, unsigned long start, 57 static inline unsigned long airq_iv_alloc_bit(struct airq_iv *iv) in airq_iv_alloc_bit() argument 59 return airq_iv_alloc(iv, 1); in airq_iv_alloc_bit() 62 static inline void airq_iv_free_bit(struct airq_iv *iv, unsigned long bit) in airq_iv_free_bit() argument 64 airq_iv_free(iv, bit, 1); in airq_iv_free_bit() 67 static inline unsigned long airq_iv_end(struct airq_iv *iv) in airq_iv_end() argument 69 return iv->end; in airq_iv_end() [all …]
|
/arch/x86/crypto/ |
D | sm4_aesni_avx_glue.c | 27 const u8 *src, u8 *iv); 29 const u8 *src, u8 *iv); 31 const u8 *src, u8 *iv); 104 const u8 *iv = walk.iv; in sm4_cbc_encrypt() local 109 crypto_xor_cpy(dst, src, iv, SM4_BLOCK_SIZE); in sm4_cbc_encrypt() 111 iv = dst; in sm4_cbc_encrypt() 116 if (iv != walk.iv) in sm4_cbc_encrypt() 117 memcpy(walk.iv, iv, SM4_BLOCK_SIZE); in sm4_cbc_encrypt() 144 func(ctx->rkey_dec, dst, src, walk.iv); in sm4_avx_cbc_decrypt() 152 u8 iv[SM4_BLOCK_SIZE]; in sm4_avx_cbc_decrypt() local [all …]
|
D | aesni-intel_glue.c | 92 const u8 *in, unsigned int len, u8 *iv); 94 const u8 *in, unsigned int len, u8 *iv); 96 const u8 *in, unsigned int len, u8 *iv); 98 const u8 *in, unsigned int len, u8 *iv); 104 const u8 *in, unsigned int len, u8 *iv); 107 const u8 *in, unsigned int len, u8 *iv); 112 const u8 *in, unsigned int len, u8 *iv); 118 u8 *iv, 132 asmlinkage void aes_ctr_enc_128_avx_by8(const u8 *in, u8 *iv, 134 asmlinkage void aes_ctr_enc_192_avx_by8(const u8 *in, u8 *iv, [all …]
|
D | ecb_cbc_helpers.h | 45 const u8 *__iv = walk.iv; \ 52 memcpy(walk.iv, __iv, __bsize); \ 61 crypto_xor(dst, walk.iv, __bsize); \ 62 memcpy(walk.iv, __iv, __bsize); \
|
D | blowfish_glue.c | 133 u64 *iv = (u64 *)walk->iv; in __cbc_encrypt() local 136 *dst = *src ^ *iv; in __cbc_encrypt() 138 iv = dst; in __cbc_encrypt() 145 *(u64 *)walk->iv = *iv; in __cbc_encrypt() 224 *dst ^= *(u64 *)walk->iv; in __cbc_decrypt() 225 *(u64 *)walk->iv = last_iv; in __cbc_decrypt()
|
D | des3_ede_glue.c | 141 u64 *iv = (u64 *)walk->iv; in __cbc_encrypt() local 144 *dst = *src ^ *iv; in __cbc_encrypt() 146 iv = dst; in __cbc_encrypt() 153 *(u64 *)walk->iv = *iv; in __cbc_encrypt() 230 *dst ^= *(u64 *)walk->iv; in __cbc_decrypt() 231 *(u64 *)walk->iv = last_iv; in __cbc_decrypt()
|
D | chacha_glue.c | 136 void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv) in chacha_init_arch() argument 138 chacha_init_generic(state, key, iv); in chacha_init_arch() 164 const struct chacha_ctx *ctx, const u8 *iv) in chacha_simd_stream_xor() argument 172 chacha_init_generic(state, ctx->key, iv); in chacha_simd_stream_xor() 203 return chacha_simd_stream_xor(req, ctx, req->iv); in chacha_simd() 214 chacha_init_generic(state, ctx->key, req->iv); in xchacha_simd() 225 memcpy(&real_iv[0], req->iv + 24, 8); in xchacha_simd() 226 memcpy(&real_iv[8], req->iv + 16, 8); in xchacha_simd()
|
D | sm4_aesni_avx2_glue.c | 23 const u8 *src, u8 *iv); 25 const u8 *src, u8 *iv); 27 const u8 *src, u8 *iv);
|
D | sm4-avx.h | 8 typedef void (*sm4_crypt_func)(const u32 *rk, u8 *dst, const u8 *src, u8 *iv);
|
/arch/mips/crypto/ |
D | chacha-glue.c | 23 void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv) in chacha_init_arch() argument 25 chacha_init_generic(state, key, iv); in chacha_init_arch() 30 const struct chacha_ctx *ctx, const u8 *iv) in chacha_mips_stream_xor() argument 38 chacha_init_generic(state, ctx->key, iv); in chacha_mips_stream_xor() 59 return chacha_mips_stream_xor(req, ctx, req->iv); in chacha_mips() 70 chacha_init_generic(state, ctx->key, req->iv); in xchacha_mips() 75 memcpy(&real_iv[0], req->iv + 24, 8); in xchacha_mips() 76 memcpy(&real_iv[8], req->iv + 16, 8); in xchacha_mips()
|
/arch/arm64/crypto/ |
D | ghash-ce-glue.c | 348 u8 iv[AES_BLOCK_SIZE]; in gcm_encrypt() local 360 memcpy(iv, req->iv, GCM_IV_SIZE); in gcm_encrypt() 361 put_unaligned_be32(2, iv + GCM_IV_SIZE); in gcm_encrypt() 383 dg, iv, ctx->aes_key.key_enc, nrounds, in gcm_encrypt() 404 aes_encrypt(&ctx->aes_key, buf, iv); in gcm_encrypt() 406 crypto_inc(iv, AES_BLOCK_SIZE); in gcm_encrypt() 421 aes_encrypt(&ctx->aes_key, buf, iv); in gcm_encrypt() 439 put_unaligned_be32(1, iv + GCM_IV_SIZE); in gcm_encrypt() 440 aes_encrypt(&ctx->aes_key, iv, iv); in gcm_encrypt() 441 crypto_xor(tag, iv, AES_BLOCK_SIZE); in gcm_encrypt() [all …]
|
D | aes-neonbs-glue.c | 34 int rounds, int blocks, u8 iv[]); 37 int rounds, int blocks, u8 iv[], u8 final[]); 40 int rounds, int blocks, u8 iv[]); 42 int rounds, int blocks, u8 iv[]); 48 int rounds, int blocks, u8 iv[]); 51 u32 const rk2[], u8 iv[], int first); 54 u32 const rk2[], u8 iv[], int first); 170 walk.iv); in cbc_encrypt() 196 walk.iv); in cbc_decrypt() 227 ctx->rk, ctx->rounds, blocks, walk.iv, final); in ctr_encrypt() [all …]
|
D | chacha-neon-glue.c | 77 void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv) in chacha_init_arch() argument 79 chacha_init_generic(state, key, iv); in chacha_init_arch() 105 const struct chacha_ctx *ctx, const u8 *iv) in chacha_neon_stream_xor() argument 113 chacha_init_generic(state, ctx->key, iv); in chacha_neon_stream_xor() 143 return chacha_neon_stream_xor(req, ctx, req->iv); in chacha_neon() 154 chacha_init_generic(state, ctx->key, req->iv); in xchacha_neon() 158 memcpy(&real_iv[0], req->iv + 24, 8); in xchacha_neon() 159 memcpy(&real_iv[8], req->iv + 16, 8); in xchacha_neon()
|
D | aes-glue.c | 83 int rounds, int blocks, u8 iv[]); 85 int rounds, int blocks, u8 iv[]); 88 int rounds, int bytes, u8 const iv[]); 90 int rounds, int bytes, u8 const iv[]); 99 int rounds, int bytes, u32 const rk2[], u8 iv[], 102 int rounds, int bytes, u32 const rk2[], u8 iv[], 106 int rounds, int blocks, u8 iv[], 109 int rounds, int blocks, u8 iv[], 230 ctx->key_enc, rounds, blocks, walk->iv); in cbc_encrypt_walk() 259 ctx->key_dec, rounds, blocks, walk->iv); in cbc_decrypt_walk() [all …]
|
D | aes-ce-ccm-glue.c | 65 u32 l = req->iv[0] + 1; in ccm_init_mac() 82 memcpy(maciv, req->iv, AES_BLOCK_SIZE - l); in ccm_init_mac() 95 memset(&req->iv[AES_BLOCK_SIZE - l], 0, l); in ccm_init_mac() 198 crypto_inc(walk->iv, AES_BLOCK_SIZE); in ccm_crypt_fallback() 199 aes_encrypt(ctx, buf, walk->iv); in ccm_crypt_fallback() 240 memcpy(buf, req->iv, AES_BLOCK_SIZE); in ccm_encrypt() 255 num_rounds(ctx), mac, walk.iv); in ccm_encrypt() 298 memcpy(buf, req->iv, AES_BLOCK_SIZE); in ccm_decrypt() 313 num_rounds(ctx), mac, walk.iv); in ccm_decrypt()
|
/arch/arm/crypto/ |
D | aes-ce-glue.c | 35 int rounds, int blocks, u8 iv[]); 37 int rounds, int blocks, u8 iv[]); 39 int rounds, int bytes, u8 const iv[]); 41 int rounds, int bytes, u8 const iv[]); 47 int rounds, int bytes, u8 iv[], 50 int rounds, int bytes, u8 iv[], 219 walk->iv); in cbc_encrypt_walk() 249 walk->iv); in cbc_decrypt_walk() 291 req->iv); in cts_cbc_encrypt() 310 req->iv); in cts_cbc_encrypt() [all …]
|
D | aes-neonbs-glue.c | 37 int rounds, int blocks, u8 iv[]); 43 int rounds, int blocks, u8 iv[], int); 45 int rounds, int blocks, u8 iv[], int); 159 req->cryptlen, req->iv); in cbc_encrypt() 183 walk.iv); in cbc_decrypt() 257 ctx->rk, ctx->rounds, blocks, walk.iv, final); in ctr_encrypt() 346 int rounds, int blocks, u8 iv[], int)) in __xts_crypt() argument 365 req->cryptlen - tail, req->iv); in __xts_crypt() 373 crypto_cipher_encrypt_one(ctx->tweak_tfm, walk.iv, walk.iv); in __xts_crypt() 387 ctx->key.rounds, blocks, walk.iv, reorder_last_tweak); in __xts_crypt() [all …]
|
D | chacha-glue.c | 79 void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv) in chacha_init_arch() argument 81 chacha_init_generic(state, key, iv); in chacha_init_arch() 110 const struct chacha_ctx *ctx, const u8 *iv, in chacha_stream_xor() argument 119 chacha_init_generic(state, ctx->key, iv); in chacha_stream_xor() 148 return chacha_stream_xor(req, ctx, req->iv, neon); in do_chacha() 169 chacha_init_generic(state, ctx->key, req->iv); in do_xchacha() 180 memcpy(&real_iv[0], req->iv + 24, 8); in do_xchacha() 181 memcpy(&real_iv[8], req->iv + 16, 8); in do_xchacha()
|
/arch/powerpc/crypto/ |
D | aes-spe-glue.c | 61 u32 bytes, u8 *iv); 63 u32 bytes, u8 *iv); 65 u32 bytes, u8 *iv); 67 u32 bytes, u8 *iv, u32 *key_twk); 69 u32 bytes, u8 *iv, u32 *key_twk); 237 walk.iv); in ppc_cbc_crypt() 241 walk.iv); in ppc_cbc_crypt() 277 ctx->key_enc, ctx->rounds, nbytes, walk.iv); in ppc_ctr_crypt() 306 walk.iv, twk); in ppc_xts_crypt() 310 walk.iv, twk); in ppc_xts_crypt() [all …]
|
/arch/sparc/crypto/ |
D | aes_glue.c | 45 unsigned int len, u64 *iv); 47 unsigned int len, u64 *iv); 49 unsigned int len, u64 *iv); 97 u64 *iv); 101 u64 *iv); 105 u64 *iv); 109 u64 *iv); 113 u64 *iv); 117 u64 *iv); 121 u64 *iv); [all …]
|
D | des_glue.c | 131 unsigned int len, u64 *iv); 134 unsigned int len, u64 *iv); 158 walk.iv); in __cbc_crypt() 164 walk.iv); in __cbc_crypt() 284 u64 *iv); 288 u64 *iv); 314 walk.iv); in __cbc3_crypt() 320 walk.iv); in __cbc3_crypt()
|
/arch/s390/crypto/ |
D | des_s390.c | 31 u8 iv[DES_BLOCK_SIZE]; member 116 u8 iv[DES_BLOCK_SIZE]; in cbc_desall_crypt() member 123 memcpy(param.iv, walk.iv, DES_BLOCK_SIZE); in cbc_desall_crypt() 130 memcpy(walk.iv, param.iv, DES_BLOCK_SIZE); in cbc_desall_crypt() 303 static unsigned int __ctrblk_init(u8 *ctrptr, u8 *iv, unsigned int nbytes) in __ctrblk_init() argument 309 memcpy(ctrptr, iv, DES_BLOCK_SIZE); in __ctrblk_init() 333 n = __ctrblk_init(ctrblk, walk.iv, nbytes); in ctr_desall_crypt() 334 ctrptr = (n > DES_BLOCK_SIZE) ? ctrblk : walk.iv; in ctr_desall_crypt() 338 memcpy(walk.iv, ctrptr + n - DES_BLOCK_SIZE, in ctr_desall_crypt() 340 crypto_inc(walk.iv, DES_BLOCK_SIZE); in ctr_desall_crypt() [all …]
|
D | paes_s390.c | 333 u8 iv[AES_BLOCK_SIZE]; in cbc_paes_crypt() member 341 memcpy(param.iv, walk.iv, AES_BLOCK_SIZE); in cbc_paes_crypt() 352 memcpy(walk.iv, param.iv, AES_BLOCK_SIZE); in cbc_paes_crypt() 515 memcpy(pcc_param.tweak, walk.iv, sizeof(pcc_param.tweak)); in xts_paes_crypt() 623 static unsigned int __ctrblk_init(u8 *ctrptr, u8 *iv, unsigned int nbytes) in __ctrblk_init() argument 628 memcpy(ctrptr, iv, AES_BLOCK_SIZE); in __ctrblk_init() 663 n = __ctrblk_init(ctrblk, walk.iv, nbytes); in ctr_paes_crypt() 664 ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk.iv; in ctr_paes_crypt() 669 memcpy(walk.iv, ctrptr + k - AES_BLOCK_SIZE, in ctr_paes_crypt() 671 crypto_inc(walk.iv, AES_BLOCK_SIZE); in ctr_paes_crypt() [all …]
|
D | aes_s390.c | 326 u8 iv[AES_BLOCK_SIZE]; in cbc_aes_crypt() member 336 memcpy(param.iv, walk.iv, AES_BLOCK_SIZE); in cbc_aes_crypt() 343 memcpy(walk.iv, param.iv, AES_BLOCK_SIZE); in cbc_aes_crypt() 461 memcpy(pcc_param.tweak, walk.iv, sizeof(pcc_param.tweak)); in xts_aes_crypt() 554 static unsigned int __ctrblk_init(u8 *ctrptr, u8 *iv, unsigned int nbytes) in __ctrblk_init() argument 559 memcpy(ctrptr, iv, AES_BLOCK_SIZE); in __ctrblk_init() 588 n = __ctrblk_init(ctrblk, walk.iv, nbytes); in ctr_aes_crypt() 589 ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk.iv; in ctr_aes_crypt() 593 memcpy(walk.iv, ctrptr + n - AES_BLOCK_SIZE, in ctr_aes_crypt() 595 crypto_inc(walk.iv, AES_BLOCK_SIZE); in ctr_aes_crypt() [all …]
|
/arch/x86/kernel/cpu/mce/ |
D | core.c | 1535 unsigned long iv; in mce_timer_fn() local 1539 iv = __this_cpu_read(mce_next_interval); in mce_timer_fn() 1545 iv = mce_adjust_timer(iv); in mce_timer_fn() 1555 iv = max(iv / 2, (unsigned long) HZ/100); in mce_timer_fn() 1557 iv = min(iv * 2, round_jiffies_relative(check_interval * HZ)); in mce_timer_fn() 1560 __this_cpu_write(mce_next_interval, iv); in mce_timer_fn() 1561 __start_timer(t, iv); in mce_timer_fn() 1570 unsigned long iv = __this_cpu_read(mce_next_interval); in mce_timer_kick() local 1574 if (interval < iv) in mce_timer_kick() 1971 unsigned long iv = check_interval * HZ; in mce_start_timer() local [all …]
|