/crypto/ |
D | rmd160.c | 49 static void rmd160_transform(u32 *state, const __le32 *in) in rmd160_transform() argument 68 ROUND(aa, bb, cc, dd, ee, F1, K1, in[0], 11); in rmd160_transform() 69 ROUND(ee, aa, bb, cc, dd, F1, K1, in[1], 14); in rmd160_transform() 70 ROUND(dd, ee, aa, bb, cc, F1, K1, in[2], 15); in rmd160_transform() 71 ROUND(cc, dd, ee, aa, bb, F1, K1, in[3], 12); in rmd160_transform() 72 ROUND(bb, cc, dd, ee, aa, F1, K1, in[4], 5); in rmd160_transform() 73 ROUND(aa, bb, cc, dd, ee, F1, K1, in[5], 8); in rmd160_transform() 74 ROUND(ee, aa, bb, cc, dd, F1, K1, in[6], 7); in rmd160_transform() 75 ROUND(dd, ee, aa, bb, cc, F1, K1, in[7], 9); in rmd160_transform() 76 ROUND(cc, dd, ee, aa, bb, F1, K1, in[8], 11); in rmd160_transform() [all …]
|
D | md5.c | 37 #define MD5STEP(f, w, x, y, z, in, s) \ argument 38 (w += f(x, y, z) + in, w = (w<<s | w>>(32-s)) + x) 40 static void md5_transform(__u32 *hash, __u32 const *in) in md5_transform() argument 49 MD5STEP(F1, a, b, c, d, in[0] + 0xd76aa478, 7); in md5_transform() 50 MD5STEP(F1, d, a, b, c, in[1] + 0xe8c7b756, 12); in md5_transform() 51 MD5STEP(F1, c, d, a, b, in[2] + 0x242070db, 17); in md5_transform() 52 MD5STEP(F1, b, c, d, a, in[3] + 0xc1bdceee, 22); in md5_transform() 53 MD5STEP(F1, a, b, c, d, in[4] + 0xf57c0faf, 7); in md5_transform() 54 MD5STEP(F1, d, a, b, c, in[5] + 0x4787c62a, 12); in md5_transform() 55 MD5STEP(F1, c, d, a, b, in[6] + 0xa8304613, 17); in md5_transform() [all …]
|
D | md4.c | 67 static void md4_transform(u32 *hash, u32 const *in) in md4_transform() argument 76 ROUND1(a, b, c, d, in[0], 3); in md4_transform() 77 ROUND1(d, a, b, c, in[1], 7); in md4_transform() 78 ROUND1(c, d, a, b, in[2], 11); in md4_transform() 79 ROUND1(b, c, d, a, in[3], 19); in md4_transform() 80 ROUND1(a, b, c, d, in[4], 3); in md4_transform() 81 ROUND1(d, a, b, c, in[5], 7); in md4_transform() 82 ROUND1(c, d, a, b, in[6], 11); in md4_transform() 83 ROUND1(b, c, d, a, in[7], 19); in md4_transform() 84 ROUND1(a, b, c, d, in[8], 3); in md4_transform() [all …]
|
D | tea.c | 62 const __le32 *in = (const __le32 *)src; in tea_encrypt() local 65 y = le32_to_cpu(in[0]); in tea_encrypt() 66 z = le32_to_cpu(in[1]); in tea_encrypt() 90 const __le32 *in = (const __le32 *)src; in tea_decrypt() local 93 y = le32_to_cpu(in[0]); in tea_decrypt() 94 z = le32_to_cpu(in[1]); in tea_decrypt() 135 const __le32 *in = (const __le32 *)src; in xtea_encrypt() local 138 y = le32_to_cpu(in[0]); in xtea_encrypt() 139 z = le32_to_cpu(in[1]); in xtea_encrypt() 155 const __le32 *in = (const __le32 *)src; in xtea_decrypt() local [all …]
|
D | aes_generic.c | 1176 static void crypto_aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) in crypto_aes_encrypt() argument 1183 b0[0] = ctx->key_enc[0] ^ get_unaligned_le32(in); in crypto_aes_encrypt() 1184 b0[1] = ctx->key_enc[1] ^ get_unaligned_le32(in + 4); in crypto_aes_encrypt() 1185 b0[2] = ctx->key_enc[2] ^ get_unaligned_le32(in + 8); in crypto_aes_encrypt() 1186 b0[3] = ctx->key_enc[3] ^ get_unaligned_le32(in + 12); in crypto_aes_encrypt() 1246 static void crypto_aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) in crypto_aes_decrypt() argument 1253 b0[0] = ctx->key_dec[0] ^ get_unaligned_le32(in); in crypto_aes_decrypt() 1254 b0[1] = ctx->key_dec[1] ^ get_unaligned_le32(in + 4); in crypto_aes_decrypt() 1255 b0[2] = ctx->key_dec[2] ^ get_unaligned_le32(in + 8); in crypto_aes_decrypt() 1256 b0[3] = ctx->key_dec[3] ^ get_unaligned_le32(in + 12); in crypto_aes_decrypt()
|
D | sm4_generic.c | 41 static void sm4_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) in sm4_encrypt() argument 45 sm4_crypt_block(ctx->rkey_enc, out, in); in sm4_encrypt() 50 static void sm4_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) in sm4_decrypt() argument 54 sm4_crypt_block(ctx->rkey_dec, out, in); in sm4_decrypt()
|
D | aes_ti.c | 20 static void aesti_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) in aesti_encrypt() argument 31 aes_encrypt(ctx, out, in); in aesti_encrypt() 36 static void aesti_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) in aesti_decrypt() argument 47 aes_decrypt(ctx, out, in); in aesti_decrypt()
|
D | Kconfig | 32 required if you want the system to operate in a FIPS 200 36 # CRYPTO_FIPS140 just enables the support in the kernel for loading fips140.ko. 48 At load time, this module overrides the built-in implementations of 53 This module is intended to be loaded at early boot time in order to 58 bool "Enable evaluation testing features in FIPS 140 module" 64 services. This option should not be enabled in production builds. 210 algorithm that executes in kernel threads. 220 into an asynchronous algorithm that executes in a kernel thread. 359 with the Poly1305 authenticator. It is defined in RFC7539 for use in 438 (rfc3962 includes errata information in its Appendix A) or [all …]
|
D | aegis128-neon-inner.c | 246 const void *in = src; in crypto_aegis128_encrypt_chunk_neon() local 251 in = out = memcpy(buf + AEGIS_BLOCK_SIZE - size, src, size); in crypto_aegis128_encrypt_chunk_neon() 253 m = vqtbl1q_u8(vld1q_u8(in + size - AEGIS_BLOCK_SIZE), in crypto_aegis128_encrypt_chunk_neon() 292 const void *in = src; in crypto_aegis128_decrypt_chunk_neon() local 297 in = out = memcpy(buf + AEGIS_BLOCK_SIZE - size, src, size); in crypto_aegis128_decrypt_chunk_neon() 299 m = s ^ vqtbx1q_u8(s, vld1q_u8(in + size - AEGIS_BLOCK_SIZE), in crypto_aegis128_decrypt_chunk_neon()
|
D | sm2.c | 220 unsigned char *in; in sm2_z_digest_update() local 223 in = mpi_get_buffer(m, &inlen, NULL); in sm2_z_digest_update() 224 if (!in) in sm2_z_digest_update() 230 crypto_sm3_update(desc, in, inlen); in sm2_z_digest_update() 233 crypto_sm3_update(desc, in + inlen - pbytes, pbytes); in sm2_z_digest_update() 235 crypto_sm3_update(desc, in, inlen); in sm2_z_digest_update() 238 kfree(in); in sm2_z_digest_update()
|
D | skcipher.c | 61 walk->src.virt.addr = skcipher_map(&walk->in); in skcipher_map_src() 71 skcipher_unmap(&walk->in, walk->src.virt.addr); in skcipher_unmap_src() 151 scatterwalk_advance(&walk->in, n); in skcipher_walk_done() 153 scatterwalk_done(&walk->in, 0, nbytes); in skcipher_walk_done() 278 scatterwalk_copychunks(walk->src.virt.addr, &walk->in, bsize, 0); in skcipher_next_slow() 322 walk->src.phys.page = scatterwalk_page(&walk->in); in skcipher_next_fast() 323 walk->src.phys.offset = offset_in_page(walk->in.offset); in skcipher_next_fast() 355 n = scatterwalk_clamp(&walk->in, n); in skcipher_walk_next() 367 if (unlikely((walk->in.offset | walk->out.offset) & walk->alignmask)) { in skcipher_walk_next() 462 scatterwalk_start(&walk->in, req->src); in skcipher_walk_skcipher() [all …]
|
D | vmac.c | 435 u8 in[16] = { 0 }; in vmac_setkey() local 447 in[0] = 0x80; in vmac_setkey() 449 crypto_cipher_encrypt_one(tctx->cipher, (u8 *)out, in); in vmac_setkey() 452 in[15]++; in vmac_setkey() 456 in[0] = 0xC0; in vmac_setkey() 457 in[15] = 0; in vmac_setkey() 459 crypto_cipher_encrypt_one(tctx->cipher, (u8 *)out, in); in vmac_setkey() 462 in[15]++; in vmac_setkey() 466 in[0] = 0xE0; in vmac_setkey() 467 in[15] = 0; in vmac_setkey() [all …]
|
D | twofish_generic.c | 86 x = get_unaligned_le32(in + (n) * 4) ^ ctx->w[m] 95 static void twofish_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) in twofish_encrypt() argument 130 static void twofish_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) in twofish_decrypt() argument
|
D | polyval-generic.c | 108 void polyval_update_non4k(const u8 *key, const u8 *in, in polyval_update_non4k() argument 112 crypto_xor(accumulator, in, POLYVAL_BLOCK_SIZE); in polyval_update_non4k() 114 in += POLYVAL_BLOCK_SIZE; in polyval_update_non4k()
|
D | seed.c | 364 static void seed_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) in seed_encrypt() argument 367 const __be32 *src = (const __be32 *)in; in seed_encrypt() 402 static void seed_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) in seed_decrypt() argument 405 const __be32 *src = (const __be32 *)in; in seed_decrypt()
|
D | ecc.h | 50 static inline void ecc_swap_digits(const void *in, u64 *out, unsigned int ndigits) in ecc_swap_digits() argument 52 const __be64 *src = (__force __be64 *)in; in ecc_swap_digits()
|
D | fips140-module.c | 455 u8 *out, const u8 *in, int *hook_inuse) in fips140_aes_encrypt() argument 457 aes_encrypt(ctx, out, in); in fips140_aes_encrypt() 462 u8 *out, const u8 *in, int *hook_inuse) in fips140_aes_decrypt() argument 464 aes_decrypt(ctx, out, in); in fips140_aes_decrypt()
|
D | camellia_generic.c | 986 static void camellia_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) in camellia_encrypt() argument 993 tmp[0] = get_unaligned_be32(in); in camellia_encrypt() 994 tmp[1] = get_unaligned_be32(in + 4); in camellia_encrypt() 995 tmp[2] = get_unaligned_be32(in + 8); in camellia_encrypt() 996 tmp[3] = get_unaligned_be32(in + 12); in camellia_encrypt() 1012 static void camellia_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) in camellia_decrypt() argument 1019 tmp[0] = get_unaligned_be32(in); in camellia_decrypt() 1020 tmp[1] = get_unaligned_be32(in + 4); in camellia_decrypt() 1021 tmp[2] = get_unaligned_be32(in + 8); in camellia_decrypt() 1022 tmp[3] = get_unaligned_be32(in + 12); in camellia_decrypt()
|
D | blake2b_generic.c | 126 const u8 *in, unsigned int inlen) in crypto_blake2b_update_generic() argument 128 return crypto_blake2b_update(desc, in, inlen, blake2b_compress_generic); in crypto_blake2b_update_generic()
|
D | drbg.c | 310 const struct drbg_string *in); 321 struct list_head *in) in drbg_ctr_bcc() argument 332 list_for_each_entry(curr, in, list) { in drbg_ctr_bcc() 635 const struct list_head *in); 1697 const struct list_head *in) in drbg_kcapi_hash() argument 1703 list_for_each_entry(input, in, list) in drbg_kcapi_hash() 1801 const struct drbg_string *in) in drbg_kcapi_sym() argument 1807 BUG_ON(in->len < drbg_blocklen(drbg)); in drbg_kcapi_sym() 1808 crypto_cipher_encrypt_one(tfm, outval, in->buf); in drbg_kcapi_sym()
|
D | shash.c | 243 static int shash_default_import(struct shash_desc *desc, const void *in) in shash_default_import() argument 245 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm)); in shash_default_import() 360 static int shash_async_import(struct ahash_request *req, const void *in) in shash_async_import() argument 367 return crypto_shash_import(desc, in); in shash_async_import()
|
D | hmac.c | 90 static int hmac_import(struct shash_desc *pdesc, const void *in) in hmac_import() argument 97 return crypto_shash_import(desc, in); in hmac_import()
|
/crypto/asymmetric_keys/ |
D | asym_tpm.c | 445 const void *in, void *out) in tpm_key_encrypt() argument 478 sg_init_one(&in_sg, in, params->in_len); in tpm_key_encrypt() 505 const void *in, void *out) in tpm_key_decrypt() argument 539 in, params->in_len, out, params->out_len); in tpm_key_decrypt() 629 const void *in, void *out) in tpm_key_sign() argument 658 memcpy(asn1_wrapped + asn1->size, in, in_len); in tpm_key_sign() 660 in = asn1_wrapped; in tpm_key_sign() 686 r = tpm_sign(&tb, keyhandle, keyauth, in, in_len, out, params->out_len); in tpm_key_sign() 705 const void *in, void *out) in tpm_key_eds_op() argument 713 ret = tpm_key_encrypt(tk, params, in, out); in tpm_key_eds_op() [all …]
|
D | asymmetric_keys.h | 17 const void *in, void *out);
|
D | asymmetric_type.c | 541 const void *in, void *out) in asymmetric_key_eds_op() argument 558 ret = subtype->eds_op(params, in, out); in asymmetric_key_eds_op() 565 const void *in, const void *in2) in asymmetric_key_verify_signature() argument 572 .digest = (void *)in, in asymmetric_key_verify_signature()
|