Home
last modified time | relevance | path

Searched refs:iv (Results 1 – 25 of 35) sorted by relevance

12

/arch/s390/include/asm/
Dairq.h51 void airq_iv_release(struct airq_iv *iv);
52 unsigned long airq_iv_alloc(struct airq_iv *iv, unsigned long num);
53 void airq_iv_free(struct airq_iv *iv, unsigned long bit, unsigned long num);
54 unsigned long airq_iv_scan(struct airq_iv *iv, unsigned long start,
57 static inline unsigned long airq_iv_alloc_bit(struct airq_iv *iv) in airq_iv_alloc_bit() argument
59 return airq_iv_alloc(iv, 1); in airq_iv_alloc_bit()
62 static inline void airq_iv_free_bit(struct airq_iv *iv, unsigned long bit) in airq_iv_free_bit() argument
64 airq_iv_free(iv, bit, 1); in airq_iv_free_bit()
67 static inline unsigned long airq_iv_end(struct airq_iv *iv) in airq_iv_end() argument
69 return iv->end; in airq_iv_end()
[all …]
/arch/x86/crypto/
Dsm4_aesni_avx_glue.c27 const u8 *src, u8 *iv);
29 const u8 *src, u8 *iv);
31 const u8 *src, u8 *iv);
104 const u8 *iv = walk.iv; in sm4_cbc_encrypt() local
109 crypto_xor_cpy(dst, src, iv, SM4_BLOCK_SIZE); in sm4_cbc_encrypt()
111 iv = dst; in sm4_cbc_encrypt()
116 if (iv != walk.iv) in sm4_cbc_encrypt()
117 memcpy(walk.iv, iv, SM4_BLOCK_SIZE); in sm4_cbc_encrypt()
144 func(ctx->rkey_dec, dst, src, walk.iv); in sm4_avx_cbc_decrypt()
152 u8 iv[SM4_BLOCK_SIZE]; in sm4_avx_cbc_decrypt() local
[all …]
Daesni-intel_glue.c92 const u8 *in, unsigned int len, u8 *iv);
94 const u8 *in, unsigned int len, u8 *iv);
96 const u8 *in, unsigned int len, u8 *iv);
98 const u8 *in, unsigned int len, u8 *iv);
104 const u8 *in, unsigned int len, u8 *iv);
107 const u8 *in, unsigned int len, u8 *iv);
112 const u8 *in, unsigned int len, u8 *iv);
118 u8 *iv,
132 asmlinkage void aes_ctr_enc_128_avx_by8(const u8 *in, u8 *iv,
134 asmlinkage void aes_ctr_enc_192_avx_by8(const u8 *in, u8 *iv,
[all …]
Decb_cbc_helpers.h45 const u8 *__iv = walk.iv; \
52 memcpy(walk.iv, __iv, __bsize); \
61 crypto_xor(dst, walk.iv, __bsize); \
62 memcpy(walk.iv, __iv, __bsize); \
Dblowfish_glue.c133 u64 *iv = (u64 *)walk->iv; in __cbc_encrypt() local
136 *dst = *src ^ *iv; in __cbc_encrypt()
138 iv = dst; in __cbc_encrypt()
145 *(u64 *)walk->iv = *iv; in __cbc_encrypt()
224 *dst ^= *(u64 *)walk->iv; in __cbc_decrypt()
225 *(u64 *)walk->iv = last_iv; in __cbc_decrypt()
Ddes3_ede_glue.c141 u64 *iv = (u64 *)walk->iv; in __cbc_encrypt() local
144 *dst = *src ^ *iv; in __cbc_encrypt()
146 iv = dst; in __cbc_encrypt()
153 *(u64 *)walk->iv = *iv; in __cbc_encrypt()
230 *dst ^= *(u64 *)walk->iv; in __cbc_decrypt()
231 *(u64 *)walk->iv = last_iv; in __cbc_decrypt()
Dchacha_glue.c136 void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv) in chacha_init_arch() argument
138 chacha_init_generic(state, key, iv); in chacha_init_arch()
164 const struct chacha_ctx *ctx, const u8 *iv) in chacha_simd_stream_xor() argument
172 chacha_init_generic(state, ctx->key, iv); in chacha_simd_stream_xor()
203 return chacha_simd_stream_xor(req, ctx, req->iv); in chacha_simd()
214 chacha_init_generic(state, ctx->key, req->iv); in xchacha_simd()
225 memcpy(&real_iv[0], req->iv + 24, 8); in xchacha_simd()
226 memcpy(&real_iv[8], req->iv + 16, 8); in xchacha_simd()
Dsm4_aesni_avx2_glue.c23 const u8 *src, u8 *iv);
25 const u8 *src, u8 *iv);
27 const u8 *src, u8 *iv);
Dsm4-avx.h8 typedef void (*sm4_crypt_func)(const u32 *rk, u8 *dst, const u8 *src, u8 *iv);
/arch/mips/crypto/
Dchacha-glue.c23 void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv) in chacha_init_arch() argument
25 chacha_init_generic(state, key, iv); in chacha_init_arch()
30 const struct chacha_ctx *ctx, const u8 *iv) in chacha_mips_stream_xor() argument
38 chacha_init_generic(state, ctx->key, iv); in chacha_mips_stream_xor()
59 return chacha_mips_stream_xor(req, ctx, req->iv); in chacha_mips()
70 chacha_init_generic(state, ctx->key, req->iv); in xchacha_mips()
75 memcpy(&real_iv[0], req->iv + 24, 8); in xchacha_mips()
76 memcpy(&real_iv[8], req->iv + 16, 8); in xchacha_mips()
/arch/arm64/crypto/
Dghash-ce-glue.c348 u8 iv[AES_BLOCK_SIZE]; in gcm_encrypt() local
360 memcpy(iv, req->iv, GCM_IV_SIZE); in gcm_encrypt()
361 put_unaligned_be32(2, iv + GCM_IV_SIZE); in gcm_encrypt()
383 dg, iv, ctx->aes_key.key_enc, nrounds, in gcm_encrypt()
404 aes_encrypt(&ctx->aes_key, buf, iv); in gcm_encrypt()
406 crypto_inc(iv, AES_BLOCK_SIZE); in gcm_encrypt()
421 aes_encrypt(&ctx->aes_key, buf, iv); in gcm_encrypt()
439 put_unaligned_be32(1, iv + GCM_IV_SIZE); in gcm_encrypt()
440 aes_encrypt(&ctx->aes_key, iv, iv); in gcm_encrypt()
441 crypto_xor(tag, iv, AES_BLOCK_SIZE); in gcm_encrypt()
[all …]
Daes-neonbs-glue.c34 int rounds, int blocks, u8 iv[]);
37 int rounds, int blocks, u8 iv[], u8 final[]);
40 int rounds, int blocks, u8 iv[]);
42 int rounds, int blocks, u8 iv[]);
48 int rounds, int blocks, u8 iv[]);
51 u32 const rk2[], u8 iv[], int first);
54 u32 const rk2[], u8 iv[], int first);
170 walk.iv); in cbc_encrypt()
196 walk.iv); in cbc_decrypt()
227 ctx->rk, ctx->rounds, blocks, walk.iv, final); in ctr_encrypt()
[all …]
Dchacha-neon-glue.c77 void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv) in chacha_init_arch() argument
79 chacha_init_generic(state, key, iv); in chacha_init_arch()
105 const struct chacha_ctx *ctx, const u8 *iv) in chacha_neon_stream_xor() argument
113 chacha_init_generic(state, ctx->key, iv); in chacha_neon_stream_xor()
143 return chacha_neon_stream_xor(req, ctx, req->iv); in chacha_neon()
154 chacha_init_generic(state, ctx->key, req->iv); in xchacha_neon()
158 memcpy(&real_iv[0], req->iv + 24, 8); in xchacha_neon()
159 memcpy(&real_iv[8], req->iv + 16, 8); in xchacha_neon()
Daes-glue.c83 int rounds, int blocks, u8 iv[]);
85 int rounds, int blocks, u8 iv[]);
88 int rounds, int bytes, u8 const iv[]);
90 int rounds, int bytes, u8 const iv[]);
99 int rounds, int bytes, u32 const rk2[], u8 iv[],
102 int rounds, int bytes, u32 const rk2[], u8 iv[],
106 int rounds, int blocks, u8 iv[],
109 int rounds, int blocks, u8 iv[],
230 ctx->key_enc, rounds, blocks, walk->iv); in cbc_encrypt_walk()
259 ctx->key_dec, rounds, blocks, walk->iv); in cbc_decrypt_walk()
[all …]
Daes-ce-ccm-glue.c65 u32 l = req->iv[0] + 1; in ccm_init_mac()
82 memcpy(maciv, req->iv, AES_BLOCK_SIZE - l); in ccm_init_mac()
95 memset(&req->iv[AES_BLOCK_SIZE - l], 0, l); in ccm_init_mac()
198 crypto_inc(walk->iv, AES_BLOCK_SIZE); in ccm_crypt_fallback()
199 aes_encrypt(ctx, buf, walk->iv); in ccm_crypt_fallback()
240 memcpy(buf, req->iv, AES_BLOCK_SIZE); in ccm_encrypt()
255 num_rounds(ctx), mac, walk.iv); in ccm_encrypt()
298 memcpy(buf, req->iv, AES_BLOCK_SIZE); in ccm_decrypt()
313 num_rounds(ctx), mac, walk.iv); in ccm_decrypt()
/arch/arm/crypto/
Daes-ce-glue.c35 int rounds, int blocks, u8 iv[]);
37 int rounds, int blocks, u8 iv[]);
39 int rounds, int bytes, u8 const iv[]);
41 int rounds, int bytes, u8 const iv[]);
47 int rounds, int bytes, u8 iv[],
50 int rounds, int bytes, u8 iv[],
219 walk->iv); in cbc_encrypt_walk()
249 walk->iv); in cbc_decrypt_walk()
291 req->iv); in cts_cbc_encrypt()
310 req->iv); in cts_cbc_encrypt()
[all …]
Daes-neonbs-glue.c37 int rounds, int blocks, u8 iv[]);
43 int rounds, int blocks, u8 iv[], int);
45 int rounds, int blocks, u8 iv[], int);
159 req->cryptlen, req->iv); in cbc_encrypt()
183 walk.iv); in cbc_decrypt()
257 ctx->rk, ctx->rounds, blocks, walk.iv, final); in ctr_encrypt()
346 int rounds, int blocks, u8 iv[], int)) in __xts_crypt() argument
365 req->cryptlen - tail, req->iv); in __xts_crypt()
373 crypto_cipher_encrypt_one(ctx->tweak_tfm, walk.iv, walk.iv); in __xts_crypt()
387 ctx->key.rounds, blocks, walk.iv, reorder_last_tweak); in __xts_crypt()
[all …]
Dchacha-glue.c79 void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv) in chacha_init_arch() argument
81 chacha_init_generic(state, key, iv); in chacha_init_arch()
110 const struct chacha_ctx *ctx, const u8 *iv, in chacha_stream_xor() argument
119 chacha_init_generic(state, ctx->key, iv); in chacha_stream_xor()
148 return chacha_stream_xor(req, ctx, req->iv, neon); in do_chacha()
169 chacha_init_generic(state, ctx->key, req->iv); in do_xchacha()
180 memcpy(&real_iv[0], req->iv + 24, 8); in do_xchacha()
181 memcpy(&real_iv[8], req->iv + 16, 8); in do_xchacha()
/arch/powerpc/crypto/
Daes-spe-glue.c61 u32 bytes, u8 *iv);
63 u32 bytes, u8 *iv);
65 u32 bytes, u8 *iv);
67 u32 bytes, u8 *iv, u32 *key_twk);
69 u32 bytes, u8 *iv, u32 *key_twk);
237 walk.iv); in ppc_cbc_crypt()
241 walk.iv); in ppc_cbc_crypt()
277 ctx->key_enc, ctx->rounds, nbytes, walk.iv); in ppc_ctr_crypt()
306 walk.iv, twk); in ppc_xts_crypt()
310 walk.iv, twk); in ppc_xts_crypt()
[all …]
/arch/sparc/crypto/
Daes_glue.c45 unsigned int len, u64 *iv);
47 unsigned int len, u64 *iv);
49 unsigned int len, u64 *iv);
97 u64 *iv);
101 u64 *iv);
105 u64 *iv);
109 u64 *iv);
113 u64 *iv);
117 u64 *iv);
121 u64 *iv);
[all …]
Ddes_glue.c131 unsigned int len, u64 *iv);
134 unsigned int len, u64 *iv);
158 walk.iv); in __cbc_crypt()
164 walk.iv); in __cbc_crypt()
284 u64 *iv);
288 u64 *iv);
314 walk.iv); in __cbc3_crypt()
320 walk.iv); in __cbc3_crypt()
/arch/s390/crypto/
Ddes_s390.c31 u8 iv[DES_BLOCK_SIZE]; member
116 u8 iv[DES_BLOCK_SIZE]; in cbc_desall_crypt() member
123 memcpy(param.iv, walk.iv, DES_BLOCK_SIZE); in cbc_desall_crypt()
130 memcpy(walk.iv, param.iv, DES_BLOCK_SIZE); in cbc_desall_crypt()
303 static unsigned int __ctrblk_init(u8 *ctrptr, u8 *iv, unsigned int nbytes) in __ctrblk_init() argument
309 memcpy(ctrptr, iv, DES_BLOCK_SIZE); in __ctrblk_init()
333 n = __ctrblk_init(ctrblk, walk.iv, nbytes); in ctr_desall_crypt()
334 ctrptr = (n > DES_BLOCK_SIZE) ? ctrblk : walk.iv; in ctr_desall_crypt()
338 memcpy(walk.iv, ctrptr + n - DES_BLOCK_SIZE, in ctr_desall_crypt()
340 crypto_inc(walk.iv, DES_BLOCK_SIZE); in ctr_desall_crypt()
[all …]
Dpaes_s390.c333 u8 iv[AES_BLOCK_SIZE]; in cbc_paes_crypt() member
341 memcpy(param.iv, walk.iv, AES_BLOCK_SIZE); in cbc_paes_crypt()
352 memcpy(walk.iv, param.iv, AES_BLOCK_SIZE); in cbc_paes_crypt()
515 memcpy(pcc_param.tweak, walk.iv, sizeof(pcc_param.tweak)); in xts_paes_crypt()
623 static unsigned int __ctrblk_init(u8 *ctrptr, u8 *iv, unsigned int nbytes) in __ctrblk_init() argument
628 memcpy(ctrptr, iv, AES_BLOCK_SIZE); in __ctrblk_init()
663 n = __ctrblk_init(ctrblk, walk.iv, nbytes); in ctr_paes_crypt()
664 ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk.iv; in ctr_paes_crypt()
669 memcpy(walk.iv, ctrptr + k - AES_BLOCK_SIZE, in ctr_paes_crypt()
671 crypto_inc(walk.iv, AES_BLOCK_SIZE); in ctr_paes_crypt()
[all …]
Daes_s390.c326 u8 iv[AES_BLOCK_SIZE]; in cbc_aes_crypt() member
336 memcpy(param.iv, walk.iv, AES_BLOCK_SIZE); in cbc_aes_crypt()
343 memcpy(walk.iv, param.iv, AES_BLOCK_SIZE); in cbc_aes_crypt()
461 memcpy(pcc_param.tweak, walk.iv, sizeof(pcc_param.tweak)); in xts_aes_crypt()
554 static unsigned int __ctrblk_init(u8 *ctrptr, u8 *iv, unsigned int nbytes) in __ctrblk_init() argument
559 memcpy(ctrptr, iv, AES_BLOCK_SIZE); in __ctrblk_init()
588 n = __ctrblk_init(ctrblk, walk.iv, nbytes); in ctr_aes_crypt()
589 ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk.iv; in ctr_aes_crypt()
593 memcpy(walk.iv, ctrptr + n - AES_BLOCK_SIZE, in ctr_aes_crypt()
595 crypto_inc(walk.iv, AES_BLOCK_SIZE); in ctr_aes_crypt()
[all …]
/arch/x86/kernel/cpu/mce/
Dcore.c1535 unsigned long iv; in mce_timer_fn() local
1539 iv = __this_cpu_read(mce_next_interval); in mce_timer_fn()
1545 iv = mce_adjust_timer(iv); in mce_timer_fn()
1555 iv = max(iv / 2, (unsigned long) HZ/100); in mce_timer_fn()
1557 iv = min(iv * 2, round_jiffies_relative(check_interval * HZ)); in mce_timer_fn()
1560 __this_cpu_write(mce_next_interval, iv); in mce_timer_fn()
1561 __start_timer(t, iv); in mce_timer_fn()
1570 unsigned long iv = __this_cpu_read(mce_next_interval); in mce_timer_kick() local
1574 if (interval < iv) in mce_timer_kick()
1971 unsigned long iv = check_interval * HZ; in mce_start_timer() local
[all …]

12