/arch/s390/include/asm/ |
D | airq.h | 51 void airq_iv_release(struct airq_iv *iv); 52 unsigned long airq_iv_alloc(struct airq_iv *iv, unsigned long num); 53 void airq_iv_free(struct airq_iv *iv, unsigned long bit, unsigned long num); 54 unsigned long airq_iv_scan(struct airq_iv *iv, unsigned long start, 57 static inline unsigned long airq_iv_alloc_bit(struct airq_iv *iv) in airq_iv_alloc_bit() argument 59 return airq_iv_alloc(iv, 1); in airq_iv_alloc_bit() 62 static inline void airq_iv_free_bit(struct airq_iv *iv, unsigned long bit) in airq_iv_free_bit() argument 64 airq_iv_free(iv, bit, 1); in airq_iv_free_bit() 67 static inline unsigned long airq_iv_end(struct airq_iv *iv) in airq_iv_end() argument 69 return iv->end; in airq_iv_end() [all …]
|
/arch/x86/crypto/ |
D | glue_helper-asm-avx2.S | 55 #define load_ctr_16way(iv, bswap, x0, x1, x2, x3, x4, x5, x6, x7, t0, t0x, t1, \ argument 62 vmovdqu (iv), t2x; \ 86 vmovdqu t2x, (iv); 99 #define gf128mul_x_ble(iv, mask, tmp) \ argument 100 vpsrad $31, iv, tmp; \ 101 vpaddq iv, iv, iv; \ 104 vpxor tmp, iv, iv; 106 #define gf128mul_x2_ble(iv, mask1, mask2, tmp0, tmp1) \ argument 107 vpsrad $31, iv, tmp0; \ 108 vpaddq iv, iv, tmp1; \ [all …]
|
D | aesni-intel_glue.c | 93 const u8 *in, unsigned int len, u8 *iv); 95 const u8 *in, unsigned int len, u8 *iv); 101 const u8 *in, unsigned int len, u8 *iv); 104 const u8 *in, unsigned int len, u8 *iv); 109 const u8 *in, unsigned int len, u8 *iv); 111 const u8 *in, unsigned int len, u8 *iv); 130 const u8 *in, unsigned long plaintext_len, u8 *iv, 152 const u8 *in, unsigned long ciphertext_len, u8 *iv, 159 u8 *iv, 174 void (*init)(void *ctx, struct gcm_context_data *gdata, u8 *iv, [all …]
|
D | glue_helper.c | 80 u128 *iv = (u128 *)walk.iv; in glue_cbc_encrypt_req_128bit() local 83 u128_xor(dst, src, iv); in glue_cbc_encrypt_req_128bit() 85 iv = dst; in glue_cbc_encrypt_req_128bit() 91 *(u128 *)walk.iv = *iv; in glue_cbc_encrypt_req_128bit() 149 u128_xor(dst, dst, (u128 *)walk.iv); in glue_cbc_decrypt_req_128bit() 150 *(u128 *)walk.iv = last_iv; in glue_cbc_decrypt_req_128bit() 181 be128_to_le128(&ctrblk, (be128 *)walk.iv); in glue_ctr_req_128bit() 204 le128_to_be128((be128 *)walk.iv, &ctrblk); in glue_ctr_req_128bit() 214 be128_to_le128(&ctrblk, (be128 *)walk.iv); in glue_ctr_req_128bit() 220 le128_to_be128((be128 *)walk.iv, &ctrblk); in glue_ctr_req_128bit() [all …]
|
D | glue_helper-asm-avx.S | 44 #define load_ctr_8way(iv, bswap, x0, x1, x2, x3, x4, x5, x6, x7, t0, t1, t2) \ argument 50 vmovdqu (iv), x7; \ 70 vmovdqu t2, (iv); 83 #define gf128mul_x_ble(iv, mask, tmp) \ argument 84 vpsrad $31, iv, tmp; \ 85 vpaddq iv, iv, iv; \ 88 vpxor tmp, iv, iv; 90 #define load_xts_8way(iv, src, dst, x0, x1, x2, x3, x4, x5, x6, x7, tiv, t0, \ argument 95 vmovdqu (iv), tiv; \ 129 vmovdqu tiv, (iv);
|
D | twofish_glue_3way.c | 55 void twofish_enc_blk_ctr(const void *ctx, u8 *d, const u8 *s, le128 *iv) in twofish_enc_blk_ctr() argument 64 le128_to_be128(&ctrblk, iv); in twofish_enc_blk_ctr() 65 le128_inc(iv); in twofish_enc_blk_ctr() 72 void twofish_enc_blk_ctr_3way(const void *ctx, u8 *d, const u8 *s, le128 *iv) in twofish_enc_blk_ctr_3way() argument 84 le128_to_be128(&ctrblks[0], iv); in twofish_enc_blk_ctr_3way() 85 le128_inc(iv); in twofish_enc_blk_ctr_3way() 86 le128_to_be128(&ctrblks[1], iv); in twofish_enc_blk_ctr_3way() 87 le128_inc(iv); in twofish_enc_blk_ctr_3way() 88 le128_to_be128(&ctrblks[2], iv); in twofish_enc_blk_ctr_3way() 89 le128_inc(iv); in twofish_enc_blk_ctr_3way()
|
D | cast5_avx_glue.c | 27 __be64 *iv); 124 u64 *iv = (u64 *)walk.iv; in cbc_encrypt() local 127 *dst = *src ^ *iv; in cbc_encrypt() 129 iv = dst; in cbc_encrypt() 135 *(u64 *)walk.iv = *iv; in cbc_encrypt() 190 *dst ^= *(u64 *)walk->iv; in __cbc_decrypt() 191 *(u64 *)walk->iv = last_iv; in __cbc_decrypt() 219 u8 *ctrblk = walk->iv; in ctr_crypt_final() 243 (__be64 *)walk->iv); in __ctr_crypt() 261 ctrblk = *(u64 *)walk->iv; in __ctr_crypt() [all …]
|
D | cast6_avx_glue.c | 28 le128 *iv); 31 le128 *iv); 33 le128 *iv); 41 static void cast6_xts_enc(const void *ctx, u8 *dst, const u8 *src, le128 *iv) in cast6_xts_enc() argument 43 glue_xts_crypt_128bit_one(ctx, dst, src, iv, __cast6_encrypt); in cast6_xts_enc() 46 static void cast6_xts_dec(const void *ctx, u8 *dst, const u8 *src, le128 *iv) in cast6_xts_dec() argument 48 glue_xts_crypt_128bit_one(ctx, dst, src, iv, __cast6_decrypt); in cast6_xts_dec() 51 static void cast6_crypt_ctr(const void *ctx, u8 *d, const u8 *s, le128 *iv) in cast6_crypt_ctr() argument 57 le128_to_be128(&ctrblk, iv); in cast6_crypt_ctr() 58 le128_inc(iv); in cast6_crypt_ctr()
|
D | serpent_avx_glue.c | 36 le128 *iv); 40 const u8 *src, le128 *iv); 44 const u8 *src, le128 *iv); 47 void __serpent_crypt_ctr(const void *ctx, u8 *d, const u8 *s, le128 *iv) in __serpent_crypt_ctr() argument 53 le128_to_be128(&ctrblk, iv); in __serpent_crypt_ctr() 54 le128_inc(iv); in __serpent_crypt_ctr() 61 void serpent_xts_enc(const void *ctx, u8 *dst, const u8 *src, le128 *iv) in serpent_xts_enc() argument 63 glue_xts_crypt_128bit_one(ctx, dst, src, iv, __serpent_encrypt); in serpent_xts_enc() 67 void serpent_xts_dec(const void *ctx, u8 *dst, const u8 *src, le128 *iv) in serpent_xts_dec() argument 69 glue_xts_crypt_128bit_one(ctx, dst, src, iv, __serpent_decrypt); in serpent_xts_dec()
|
D | serpent_sse2_glue.c | 50 static void serpent_crypt_ctr(const void *ctx, u8 *d, const u8 *s, le128 *iv) in serpent_crypt_ctr() argument 56 le128_to_be128(&ctrblk, iv); in serpent_crypt_ctr() 57 le128_inc(iv); in serpent_crypt_ctr() 64 le128 *iv) in serpent_crypt_ctr_xway() argument 75 le128_to_be128(&ctrblks[i], iv); in serpent_crypt_ctr_xway() 76 le128_inc(iv); in serpent_crypt_ctr_xway()
|
D | blowfish_glue.c | 135 u64 *iv = (u64 *)walk->iv; in __cbc_encrypt() local 138 *dst = *src ^ *iv; in __cbc_encrypt() 140 iv = dst; in __cbc_encrypt() 147 *(u64 *)walk->iv = *iv; in __cbc_encrypt() 226 *dst ^= *(u64 *)walk->iv; in __cbc_decrypt() 227 *(u64 *)walk->iv = last_iv; in __cbc_decrypt() 252 u8 *ctrblk = walk->iv; in ctr_crypt_final() 270 u64 ctrblk = be64_to_cpu(*(__be64 *)walk->iv); in __ctr_crypt() 314 *(__be64 *)walk->iv = cpu_to_be64(ctrblk); in __ctr_crypt()
|
D | twofish_avx_glue.c | 30 le128 *iv); 33 le128 *iv); 35 le128 *iv); 48 static void twofish_xts_enc(const void *ctx, u8 *dst, const u8 *src, le128 *iv) in twofish_xts_enc() argument 50 glue_xts_crypt_128bit_one(ctx, dst, src, iv, twofish_enc_blk); in twofish_xts_enc() 53 static void twofish_xts_dec(const void *ctx, u8 *dst, const u8 *src, le128 *iv) in twofish_xts_dec() argument 55 glue_xts_crypt_128bit_one(ctx, dst, src, iv, twofish_dec_blk); in twofish_xts_dec()
|
D | des3_ede_glue.c | 143 u64 *iv = (u64 *)walk->iv; in __cbc_encrypt() local 146 *dst = *src ^ *iv; in __cbc_encrypt() 148 iv = dst; in __cbc_encrypt() 155 *(u64 *)walk->iv = *iv; in __cbc_encrypt() 232 *dst ^= *(u64 *)walk->iv; in __cbc_decrypt() 233 *(u64 *)walk->iv = last_iv; in __cbc_decrypt() 259 u8 *ctrblk = walk->iv; in ctr_crypt_final() 278 u64 ctrblk = be64_to_cpu(*(__be64 *)walk->iv); in __ctr_crypt() 317 *(__be64 *)walk->iv = cpu_to_be64(ctrblk); in __ctr_crypt()
|
D | camellia_aesni_avx_glue.c | 31 le128 *iv); 35 le128 *iv); 39 le128 *iv); 42 void camellia_xts_enc(const void *ctx, u8 *dst, const u8 *src, le128 *iv) in camellia_xts_enc() argument 44 glue_xts_crypt_128bit_one(ctx, dst, src, iv, camellia_enc_blk); in camellia_xts_enc() 48 void camellia_xts_dec(const void *ctx, u8 *dst, const u8 *src, le128 *iv) in camellia_xts_dec() argument 50 glue_xts_crypt_128bit_one(ctx, dst, src, iv, camellia_dec_blk); in camellia_xts_dec()
|
/arch/x86/include/asm/crypto/ |
D | serpent-avx.h | 26 le128 *iv); 29 const u8 *src, le128 *iv); 31 const u8 *src, le128 *iv); 34 le128 *iv); 36 extern void serpent_xts_enc(const void *ctx, u8 *dst, const u8 *src, le128 *iv); 37 extern void serpent_xts_dec(const void *ctx, u8 *dst, const u8 *src, le128 *iv);
|
D | camellia.h | 50 le128 *iv); 53 le128 *iv); 55 le128 *iv); 82 le128 *iv); 84 le128 *iv); 87 le128 *iv); 89 le128 *iv);
|
D | twofish.h | 21 le128 *iv); 23 le128 *iv);
|
/arch/arm64/crypto/ |
D | ghash-ce-glue.c | 424 u8 iv[AES_BLOCK_SIZE]; in gcm_encrypt() local 434 memcpy(iv, req->iv, GCM_IV_SIZE); in gcm_encrypt() 435 put_unaligned_be32(1, iv + GCM_IV_SIZE); in gcm_encrypt() 443 pmull_gcm_encrypt_block(tag, iv, ctx->aes_key.key_enc, nrounds); in gcm_encrypt() 444 put_unaligned_be32(2, iv + GCM_IV_SIZE); in gcm_encrypt() 445 pmull_gcm_encrypt_block(ks, iv, NULL, nrounds); in gcm_encrypt() 446 put_unaligned_be32(3, iv + GCM_IV_SIZE); in gcm_encrypt() 447 pmull_gcm_encrypt_block(ks + AES_BLOCK_SIZE, iv, NULL, nrounds); in gcm_encrypt() 448 put_unaligned_be32(4, iv + GCM_IV_SIZE); in gcm_encrypt() 458 iv, rk, nrounds, ks); in gcm_encrypt() [all …]
|
D | aes-neonbs-glue.c | 34 int rounds, int blocks, u8 iv[]); 37 int rounds, int blocks, u8 iv[], u8 final[]); 40 int rounds, int blocks, u8 iv[]); 42 int rounds, int blocks, u8 iv[]); 48 int rounds, int blocks, u8 iv[]); 51 u32 const rk2[], u8 iv[], int first); 54 u32 const rk2[], u8 iv[], int first); 174 walk.iv); in cbc_encrypt() 200 walk.iv); in cbc_decrypt() 250 ctx->rk, ctx->rounds, blocks, walk.iv, final); in ctr_encrypt() [all …]
|
D | chacha-neon-glue.c | 63 const struct chacha_ctx *ctx, const u8 *iv) in chacha_neon_stream_xor() argument 71 crypto_chacha_init(state, ctx, iv); in chacha_neon_stream_xor() 97 return chacha_neon_stream_xor(req, ctx, req->iv); in chacha_neon() 111 crypto_chacha_init(state, ctx, req->iv); in xchacha_neon() 118 memcpy(&real_iv[0], req->iv + 24, 8); in xchacha_neon() 119 memcpy(&real_iv[8], req->iv + 16, 8); in xchacha_neon()
|
D | aes-glue.c | 80 int rounds, int blocks, u8 iv[]); 82 int rounds, int blocks, u8 iv[]); 85 int rounds, int bytes, u8 const iv[]); 87 int rounds, int bytes, u8 const iv[]); 93 int rounds, int bytes, u32 const rk2[], u8 iv[], 96 int rounds, int bytes, u32 const rk2[], u8 iv[], 100 int rounds, int blocks, u8 iv[], 103 int rounds, int blocks, u8 iv[], 242 ctx->key_enc, rounds, blocks, walk->iv); in cbc_encrypt_walk() 271 ctx->key_dec, rounds, blocks, walk->iv); in cbc_decrypt_walk() [all …]
|
/arch/arm/crypto/ |
D | aes-ce-glue.c | 35 int rounds, int blocks, u8 iv[]); 37 int rounds, int blocks, u8 iv[]); 39 int rounds, int bytes, u8 const iv[]); 41 int rounds, int bytes, u8 const iv[]); 47 int rounds, int bytes, u8 iv[], 50 int rounds, int bytes, u8 iv[], 229 walk->iv); in cbc_encrypt_walk() 259 walk->iv); in cbc_decrypt_walk() 301 req->iv); in cts_cbc_encrypt() 320 req->iv); in cts_cbc_encrypt() [all …]
|
D | chacha-neon-glue.c | 66 const struct chacha_ctx *ctx, const u8 *iv) in chacha_neon_stream_xor() argument 74 crypto_chacha_init(state, ctx, iv); in chacha_neon_stream_xor() 100 return chacha_neon_stream_xor(req, ctx, req->iv); in chacha_neon() 114 crypto_chacha_init(state, ctx, req->iv); in xchacha_neon() 121 memcpy(&real_iv[0], req->iv + 24, 8); in xchacha_neon() 122 memcpy(&real_iv[8], req->iv + 16, 8); in xchacha_neon()
|
/arch/sparc/crypto/ |
D | aes_glue.c | 44 unsigned int len, u64 *iv); 46 unsigned int len, u64 *iv); 48 unsigned int len, u64 *iv); 96 u64 *iv); 100 u64 *iv); 104 u64 *iv); 108 u64 *iv); 112 u64 *iv); 116 u64 *iv); 120 u64 *iv); [all …]
|
/arch/s390/crypto/ |
D | des_s390.c | 30 u8 iv[DES_BLOCK_SIZE]; member 107 u8 iv[DES_BLOCK_SIZE]; in cbc_desall_crypt() member 112 memcpy(param.iv, walk->iv, DES_BLOCK_SIZE); in cbc_desall_crypt() 121 memcpy(walk->iv, param.iv, DES_BLOCK_SIZE); in cbc_desall_crypt() 352 static unsigned int __ctrblk_init(u8 *ctrptr, u8 *iv, unsigned int nbytes) in __ctrblk_init() argument 358 memcpy(ctrptr, iv, DES_BLOCK_SIZE); in __ctrblk_init() 381 n = __ctrblk_init(ctrblk, walk->iv, nbytes); in ctr_desall_crypt() 382 ctrptr = (n > DES_BLOCK_SIZE) ? ctrblk : walk->iv; in ctr_desall_crypt() 386 memcpy(walk->iv, ctrptr + n - DES_BLOCK_SIZE, in ctr_desall_crypt() 388 crypto_inc(walk->iv, DES_BLOCK_SIZE); in ctr_desall_crypt() [all …]
|