/crypto/ |
D | rmd160.c | 49 static void rmd160_transform(u32 *state, const __le32 *in) in rmd160_transform() argument 68 ROUND(aa, bb, cc, dd, ee, F1, K1, in[0], 11); in rmd160_transform() 69 ROUND(ee, aa, bb, cc, dd, F1, K1, in[1], 14); in rmd160_transform() 70 ROUND(dd, ee, aa, bb, cc, F1, K1, in[2], 15); in rmd160_transform() 71 ROUND(cc, dd, ee, aa, bb, F1, K1, in[3], 12); in rmd160_transform() 72 ROUND(bb, cc, dd, ee, aa, F1, K1, in[4], 5); in rmd160_transform() 73 ROUND(aa, bb, cc, dd, ee, F1, K1, in[5], 8); in rmd160_transform() 74 ROUND(ee, aa, bb, cc, dd, F1, K1, in[6], 7); in rmd160_transform() 75 ROUND(dd, ee, aa, bb, cc, F1, K1, in[7], 9); in rmd160_transform() 76 ROUND(cc, dd, ee, aa, bb, F1, K1, in[8], 11); in rmd160_transform() [all …]
|
D | md5.c | 37 #define MD5STEP(f, w, x, y, z, in, s) \ argument 38 (w += f(x, y, z) + in, w = (w<<s | w>>(32-s)) + x) 40 static void md5_transform(__u32 *hash, __u32 const *in) in md5_transform() argument 49 MD5STEP(F1, a, b, c, d, in[0] + 0xd76aa478, 7); in md5_transform() 50 MD5STEP(F1, d, a, b, c, in[1] + 0xe8c7b756, 12); in md5_transform() 51 MD5STEP(F1, c, d, a, b, in[2] + 0x242070db, 17); in md5_transform() 52 MD5STEP(F1, b, c, d, a, in[3] + 0xc1bdceee, 22); in md5_transform() 53 MD5STEP(F1, a, b, c, d, in[4] + 0xf57c0faf, 7); in md5_transform() 54 MD5STEP(F1, d, a, b, c, in[5] + 0x4787c62a, 12); in md5_transform() 55 MD5STEP(F1, c, d, a, b, in[6] + 0xa8304613, 17); in md5_transform() [all …]
|
D | md4.c | 67 static void md4_transform(u32 *hash, u32 const *in) in md4_transform() argument 76 ROUND1(a, b, c, d, in[0], 3); in md4_transform() 77 ROUND1(d, a, b, c, in[1], 7); in md4_transform() 78 ROUND1(c, d, a, b, in[2], 11); in md4_transform() 79 ROUND1(b, c, d, a, in[3], 19); in md4_transform() 80 ROUND1(a, b, c, d, in[4], 3); in md4_transform() 81 ROUND1(d, a, b, c, in[5], 7); in md4_transform() 82 ROUND1(c, d, a, b, in[6], 11); in md4_transform() 83 ROUND1(b, c, d, a, in[7], 19); in md4_transform() 84 ROUND1(a, b, c, d, in[8], 3); in md4_transform() [all …]
|
D | tea.c | 62 const __le32 *in = (const __le32 *)src; in tea_encrypt() local 65 y = le32_to_cpu(in[0]); in tea_encrypt() 66 z = le32_to_cpu(in[1]); in tea_encrypt() 90 const __le32 *in = (const __le32 *)src; in tea_decrypt() local 93 y = le32_to_cpu(in[0]); in tea_decrypt() 94 z = le32_to_cpu(in[1]); in tea_decrypt() 135 const __le32 *in = (const __le32 *)src; in xtea_encrypt() local 138 y = le32_to_cpu(in[0]); in xtea_encrypt() 139 z = le32_to_cpu(in[1]); in xtea_encrypt() 155 const __le32 *in = (const __le32 *)src; in xtea_decrypt() local [all …]
|
D | aes_generic.c | 1176 static void crypto_aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) in crypto_aes_encrypt() argument 1183 b0[0] = ctx->key_enc[0] ^ get_unaligned_le32(in); in crypto_aes_encrypt() 1184 b0[1] = ctx->key_enc[1] ^ get_unaligned_le32(in + 4); in crypto_aes_encrypt() 1185 b0[2] = ctx->key_enc[2] ^ get_unaligned_le32(in + 8); in crypto_aes_encrypt() 1186 b0[3] = ctx->key_enc[3] ^ get_unaligned_le32(in + 12); in crypto_aes_encrypt() 1246 static void crypto_aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) in crypto_aes_decrypt() argument 1253 b0[0] = ctx->key_dec[0] ^ get_unaligned_le32(in); in crypto_aes_decrypt() 1254 b0[1] = ctx->key_dec[1] ^ get_unaligned_le32(in + 4); in crypto_aes_decrypt() 1255 b0[2] = ctx->key_dec[2] ^ get_unaligned_le32(in + 8); in crypto_aes_decrypt() 1256 b0[3] = ctx->key_dec[3] ^ get_unaligned_le32(in + 12); in crypto_aes_decrypt()
|
D | sm4_generic.c | 41 static void sm4_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) in sm4_encrypt() argument 45 sm4_crypt_block(ctx->rkey_enc, out, in); in sm4_encrypt() 50 static void sm4_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) in sm4_decrypt() argument 54 sm4_crypt_block(ctx->rkey_dec, out, in); in sm4_decrypt()
|
D | aes_ti.c | 20 static void aesti_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) in aesti_encrypt() argument 31 aes_encrypt(ctx, out, in); in aesti_encrypt() 36 static void aesti_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) in aesti_decrypt() argument 47 aes_decrypt(ctx, out, in); in aesti_decrypt()
|
D | aria_generic.c | 191 static void __aria_crypt(struct aria_ctx *ctx, u8 *out, const u8 *in, in __aria_crypt() argument 194 const __be32 *src = (const __be32 *)in; in __aria_crypt() 246 void aria_encrypt(void *_ctx, u8 *out, const u8 *in) in aria_encrypt() argument 250 __aria_crypt(ctx, out, in, ctx->enc_key); in aria_encrypt() 254 void aria_decrypt(void *_ctx, u8 *out, const u8 *in) in aria_decrypt() argument 258 __aria_crypt(ctx, out, in, ctx->dec_key); in aria_decrypt() 262 static void __aria_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) in __aria_encrypt() argument 266 __aria_crypt(ctx, out, in, ctx->enc_key); in __aria_encrypt() 269 static void __aria_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) in __aria_decrypt() argument 273 __aria_crypt(ctx, out, in, ctx->dec_key); in __aria_decrypt()
|
D | sm4.c | 160 void sm4_crypt_block(const u32 *rk, u8 *out, const u8 *in) in sm4_crypt_block() argument 164 x[0] = get_unaligned_be32(in + 0 * 4); in sm4_crypt_block() 165 x[1] = get_unaligned_be32(in + 1 * 4); in sm4_crypt_block() 166 x[2] = get_unaligned_be32(in + 2 * 4); in sm4_crypt_block() 167 x[3] = get_unaligned_be32(in + 3 * 4); in sm4_crypt_block()
|
D | aegis128-neon-inner.c | 246 const void *in = src; in crypto_aegis128_encrypt_chunk_neon() local 251 in = out = memcpy(buf + AEGIS_BLOCK_SIZE - size, src, size); in crypto_aegis128_encrypt_chunk_neon() 253 m = vqtbl1q_u8(vld1q_u8(in + size - AEGIS_BLOCK_SIZE), in crypto_aegis128_encrypt_chunk_neon() 292 const void *in = src; in crypto_aegis128_decrypt_chunk_neon() local 297 in = out = memcpy(buf + AEGIS_BLOCK_SIZE - size, src, size); in crypto_aegis128_decrypt_chunk_neon() 299 m = s ^ vqtbx1q_u8(s, vld1q_u8(in + size - AEGIS_BLOCK_SIZE), in crypto_aegis128_decrypt_chunk_neon()
|
D | sm2.c | 220 unsigned char *in; in sm2_z_digest_update() local 223 in = mpi_get_buffer(m, &inlen, NULL); in sm2_z_digest_update() 224 if (!in) in sm2_z_digest_update() 230 sm3_update(sctx, in, inlen); in sm2_z_digest_update() 233 sm3_update(sctx, in + inlen - pbytes, pbytes); in sm2_z_digest_update() 235 sm3_update(sctx, in, inlen); in sm2_z_digest_update() 238 kfree(in); in sm2_z_digest_update()
|
D | Kconfig | 32 required if you want the system to operate in a FIPS 200 65 At load time, this module overrides the built-in implementations of 70 This module is intended to be loaded at early boot time in order to 75 bool "Enable evaluation testing features in FIPS 140 module" 81 services. This option should not be enabled in production builds. 93 bool "Debug the integrity check in FIPS 140 module" 104 This option must not be enabled in production builds. 115 for f in {text,rodata}.{orig,checked}; do xxd -g1 $f > $f.xxd; done 263 algorithm that executes in kernel threads. 273 into an asynchronous algorithm that executes in a kernel thread. [all …]
|
D | skcipher.c | 61 walk->src.virt.addr = skcipher_map(&walk->in); in skcipher_map_src() 71 skcipher_unmap(&walk->in, walk->src.virt.addr); in skcipher_unmap_src() 151 scatterwalk_advance(&walk->in, n); in skcipher_walk_done() 153 scatterwalk_done(&walk->in, 0, nbytes); in skcipher_walk_done() 278 scatterwalk_copychunks(walk->src.virt.addr, &walk->in, bsize, 0); in skcipher_next_slow() 322 walk->src.phys.page = scatterwalk_page(&walk->in); in skcipher_next_fast() 323 walk->src.phys.offset = offset_in_page(walk->in.offset); in skcipher_next_fast() 355 n = scatterwalk_clamp(&walk->in, n); in skcipher_walk_next() 367 if (unlikely((walk->in.offset | walk->out.offset) & walk->alignmask)) { in skcipher_walk_next() 462 scatterwalk_start(&walk->in, req->src); in skcipher_walk_skcipher() [all …]
|
D | vmac.c | 435 u8 in[16] = { 0 }; in vmac_setkey() local 447 in[0] = 0x80; in vmac_setkey() 449 crypto_cipher_encrypt_one(tctx->cipher, (u8 *)out, in); in vmac_setkey() 452 in[15]++; in vmac_setkey() 456 in[0] = 0xC0; in vmac_setkey() 457 in[15] = 0; in vmac_setkey() 459 crypto_cipher_encrypt_one(tctx->cipher, (u8 *)out, in); in vmac_setkey() 462 in[15]++; in vmac_setkey() 466 in[0] = 0xE0; in vmac_setkey() 467 in[15] = 0; in vmac_setkey() [all …]
|
D | twofish_generic.c | 86 x = get_unaligned_le32(in + (n) * 4) ^ ctx->w[m] 95 static void twofish_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) in twofish_encrypt() argument 130 static void twofish_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) in twofish_decrypt() argument
|
D | polyval-generic.c | 108 void polyval_update_non4k(const u8 *key, const u8 *in, in polyval_update_non4k() argument 112 crypto_xor(accumulator, in, POLYVAL_BLOCK_SIZE); in polyval_update_non4k() 114 in += POLYVAL_BLOCK_SIZE; in polyval_update_non4k()
|
D | seed.c | 364 static void seed_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) in seed_encrypt() argument 367 const __be32 *src = (const __be32 *)in; in seed_encrypt() 402 static void seed_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) in seed_decrypt() argument 405 const __be32 *src = (const __be32 *)in; in seed_decrypt()
|
D | camellia_generic.c | 986 static void camellia_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) in camellia_encrypt() argument 993 tmp[0] = get_unaligned_be32(in); in camellia_encrypt() 994 tmp[1] = get_unaligned_be32(in + 4); in camellia_encrypt() 995 tmp[2] = get_unaligned_be32(in + 8); in camellia_encrypt() 996 tmp[3] = get_unaligned_be32(in + 12); in camellia_encrypt() 1012 static void camellia_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) in camellia_decrypt() argument 1019 tmp[0] = get_unaligned_be32(in); in camellia_decrypt() 1020 tmp[1] = get_unaligned_be32(in + 4); in camellia_decrypt() 1021 tmp[2] = get_unaligned_be32(in + 8); in camellia_decrypt() 1022 tmp[3] = get_unaligned_be32(in + 12); in camellia_decrypt()
|
D | fips140-module.c | 559 u8 *out, const u8 *in, int *hook_inuse) in fips140_aes_encrypt() argument 561 aes_encrypt(ctx, out, in); in fips140_aes_encrypt() 566 u8 *out, const u8 *in, int *hook_inuse) in fips140_aes_decrypt() argument 568 aes_decrypt(ctx, out, in); in fips140_aes_decrypt()
|
D | blake2b_generic.c | 126 const u8 *in, unsigned int inlen) in crypto_blake2b_update_generic() argument 128 return crypto_blake2b_update(desc, in, inlen, blake2b_compress_generic); in crypto_blake2b_update_generic()
|
D | drbg.c | 311 const struct drbg_string *in); 322 struct list_head *in) in drbg_ctr_bcc() argument 333 list_for_each_entry(curr, in, list) { in drbg_ctr_bcc() 636 const struct list_head *in); 1724 const struct list_head *in) in drbg_kcapi_hash() argument 1730 list_for_each_entry(input, in, list) in drbg_kcapi_hash() 1827 const struct drbg_string *in) in drbg_kcapi_sym() argument 1832 BUG_ON(in->len < drbg_blocklen(drbg)); in drbg_kcapi_sym() 1833 crypto_cipher_encrypt_one(tfm, outval, in->buf); in drbg_kcapi_sym()
|
D | shash.c | 243 static int shash_default_import(struct shash_desc *desc, const void *in) in shash_default_import() argument 245 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm)); in shash_default_import() 360 static int shash_async_import(struct ahash_request *req, const void *in) in shash_async_import() argument 367 return crypto_shash_import(desc, in); in shash_async_import()
|
D | hmac.c | 94 static int hmac_import(struct shash_desc *pdesc, const void *in) in hmac_import() argument 101 return crypto_shash_import(desc, in); in hmac_import()
|
/crypto/asymmetric_keys/ |
D | asymmetric_keys.h | 17 const void *in, void *out);
|
D | asymmetric_type.c | 572 const void *in, void *out) in asymmetric_key_eds_op() argument 589 ret = subtype->eds_op(params, in, out); in asymmetric_key_eds_op() 596 const void *in, const void *in2) in asymmetric_key_verify_signature() argument 603 .digest = (void *)in, in asymmetric_key_verify_signature()
|