/crypto/ |
D | ecc.c | 614 u64 tmp[2 * ECC_MAX_DIGITS]; in vli_mmod_slow() local 615 u64 *v[2] = { tmp, product }; in vli_mmod_slow() 687 const u64 *curve_prime, u64 *tmp) in vli_mmod_fast_192() argument 694 vli_set(tmp, &product[3], ndigits); in vli_mmod_fast_192() 695 carry = vli_add(result, result, tmp, ndigits); in vli_mmod_fast_192() 697 tmp[0] = 0; in vli_mmod_fast_192() 698 tmp[1] = product[3]; in vli_mmod_fast_192() 699 tmp[2] = product[4]; in vli_mmod_fast_192() 700 carry += vli_add(result, result, tmp, ndigits); in vli_mmod_fast_192() 702 tmp[0] = tmp[1] = product[5]; in vli_mmod_fast_192() [all …]
|
D | aegis128-core.c | 63 union aegis_block tmp; in crypto_aegis128_update() local 66 tmp = state->blocks[AEGIS128_STATE_BLOCKS - 1]; in crypto_aegis128_update() 70 crypto_aegis_aesenc(&state->blocks[0], &tmp, &state->blocks[0]); in crypto_aegis128_update() 156 union aegis_block tmp; in crypto_aegis128_encrypt_chunk() local 165 tmp = state->blocks[2]; in crypto_aegis128_encrypt_chunk() 166 crypto_aegis_block_and(&tmp, &state->blocks[3]); in crypto_aegis128_encrypt_chunk() 167 crypto_aegis_block_xor(&tmp, &state->blocks[4]); in crypto_aegis128_encrypt_chunk() 168 crypto_aegis_block_xor(&tmp, &state->blocks[1]); in crypto_aegis128_encrypt_chunk() 169 crypto_aegis_block_xor(&tmp, src_blk); in crypto_aegis128_encrypt_chunk() 173 *dst_blk = tmp; in crypto_aegis128_encrypt_chunk() [all …]
|
D | authencesn.c | 99 u32 tmp[2]; in crypto_authenc_esn_genicv_tail() local 102 scatterwalk_map_and_copy(tmp, dst, 4, 4, 0); in crypto_authenc_esn_genicv_tail() 103 scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0); in crypto_authenc_esn_genicv_tail() 104 scatterwalk_map_and_copy(tmp, dst, 0, 8, 1); in crypto_authenc_esn_genicv_tail() 133 u32 tmp[2]; in crypto_authenc_esn_genicv() local 139 scatterwalk_map_and_copy(tmp, dst, 0, 8, 0); in crypto_authenc_esn_genicv() 140 scatterwalk_map_and_copy(tmp, dst, 4, 4, 1); in crypto_authenc_esn_genicv() 141 scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 1); in crypto_authenc_esn_genicv() 235 u32 tmp[2]; in crypto_authenc_esn_decrypt_tail() local 241 scatterwalk_map_and_copy(tmp, dst, 4, 4, 0); in crypto_authenc_esn_decrypt_tail() [all …]
|
D | jitterentropy.c | 416 __u64 tmp = time << (DATA_SIZE_BITS - i); in jent_lfsr_time() local 418 tmp = tmp >> (DATA_SIZE_BITS - 1); in jent_lfsr_time() 430 tmp ^= ((new >> 63) & 1); in jent_lfsr_time() 431 tmp ^= ((new >> 60) & 1); in jent_lfsr_time() 432 tmp ^= ((new >> 55) & 1); in jent_lfsr_time() 433 tmp ^= ((new >> 30) & 1); in jent_lfsr_time() 434 tmp ^= ((new >> 27) & 1); in jent_lfsr_time() 435 tmp ^= ((new >> 22) & 1); in jent_lfsr_time() 437 new ^= tmp; in jent_lfsr_time()
|
D | cipher.c | 70 u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); in cipher_crypt_one() local 72 memcpy(tmp, src, bs); in cipher_crypt_one() 73 fn(crypto_cipher_tfm(tfm), tmp, tmp); in cipher_crypt_one() 74 memcpy(dst, tmp, bs); in cipher_crypt_one()
|
D | jitterentropy-kcapi.c | 89 __u64 tmp = 0; in jent_get_nstime() local 91 tmp = random_get_entropy(); in jent_get_nstime() 98 if (tmp == 0) in jent_get_nstime() 99 tmp = ktime_get_ns(); in jent_get_nstime() 101 *out = tmp; in jent_get_nstime()
|
D | cfb.c | 47 u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK]; in crypto_cfb_final() local 48 u8 *stream = PTR_ALIGN(tmp + 0, alignmask + 1); in crypto_cfb_final() 88 u8 tmp[MAX_CIPHER_BLOCKSIZE]; in crypto_cfb_encrypt_inplace() local 91 crypto_cfb_encrypt_one(tfm, iv, tmp); in crypto_cfb_encrypt_inplace() 92 crypto_xor(src, tmp, bsize); in crypto_cfb_encrypt_inplace() 158 u8 tmp[MAX_CIPHER_BLOCKSIZE]; in crypto_cfb_decrypt_inplace() local 161 crypto_cfb_encrypt_one(tfm, iv, tmp); in crypto_cfb_decrypt_inplace() 163 crypto_xor(src, tmp, bsize); in crypto_cfb_decrypt_inplace()
|
D | ansi_cprng.c | 84 unsigned char tmp[DEFAULT_BLK_SZ]; in _get_more_prng_bytes() local 106 memcpy(tmp, ctx->DT, DEFAULT_BLK_SZ); in _get_more_prng_bytes() 108 hexdump("tmp stage 0: ", tmp, DEFAULT_BLK_SZ); in _get_more_prng_bytes() 117 xor_vectors(ctx->I, ctx->V, tmp, DEFAULT_BLK_SZ); in _get_more_prng_bytes() 118 hexdump("tmp stage 1: ", tmp, DEFAULT_BLK_SZ); in _get_more_prng_bytes() 147 xor_vectors(ctx->rand_data, ctx->I, tmp, in _get_more_prng_bytes() 150 hexdump("tmp stage 2: ", tmp, DEFAULT_BLK_SZ); in _get_more_prng_bytes() 156 crypto_cipher_encrypt_one(ctx->tfm, output, tmp); in _get_more_prng_bytes()
|
D | camellia_generic.c | 991 u32 tmp[4]; in camellia_encrypt() local 993 tmp[0] = get_unaligned_be32(in); in camellia_encrypt() 994 tmp[1] = get_unaligned_be32(in + 4); in camellia_encrypt() 995 tmp[2] = get_unaligned_be32(in + 8); in camellia_encrypt() 996 tmp[3] = get_unaligned_be32(in + 12); in camellia_encrypt() 1003 camellia_do_encrypt(cctx->key_table, tmp, max); in camellia_encrypt() 1006 put_unaligned_be32(tmp[2], out); in camellia_encrypt() 1007 put_unaligned_be32(tmp[3], out + 4); in camellia_encrypt() 1008 put_unaligned_be32(tmp[0], out + 8); in camellia_encrypt() 1009 put_unaligned_be32(tmp[1], out + 12); in camellia_encrypt() [all …]
|
D | twofish_common.c | 470 tmp = poly_to_exp[key[i] - 1]; \ 471 (a) ^= exp_to_poly[tmp + (w)]; \ 472 (b) ^= exp_to_poly[tmp + (x)]; \ 473 (c) ^= exp_to_poly[tmp + (y)]; \ 474 (d) ^= exp_to_poly[tmp + (z)]; \ 583 u8 tmp; in __twofish_setkey() local
|
D | polyval-generic.c | 158 u8 tmp[POLYVAL_BLOCK_SIZE]; in polyval_update() local 176 copy_and_reverse(tmp, src); in polyval_update() 177 crypto_xor(dctx->buffer, tmp, POLYVAL_BLOCK_SIZE); in polyval_update()
|
D | scatterwalk.c | 59 struct scatterlist tmp[2]; in scatterwalk_map_and_copy() local 64 sg = scatterwalk_ffwd(tmp, sg, start); in scatterwalk_map_and_copy()
|
D | sm3_generic.c | 57 unsigned int tmp; in sm3_expand() local 64 tmp = w[i - 16] ^ w[i - 9] ^ rol32(w[i - 3], 15); in sm3_expand() 65 w[i] = p1(tmp) ^ (rol32(w[i - 13], 7)) ^ w[i - 6]; in sm3_expand()
|
D | ctr.c | 34 u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK]; in crypto_ctr_crypt_final() local 35 u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1); in crypto_ctr_crypt_final() 82 u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK]; in crypto_ctr_crypt_inplace() local 83 u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1); in crypto_ctr_crypt_inplace()
|
D | ghash-generic.c | 120 u8 *tmp = dst + (GHASH_BLOCK_SIZE - dctx->bytes); in ghash_flush() local 123 *tmp++ ^= 0; in ghash_flush()
|
D | xctr.c | 82 u8 tmp[XCTR_BLOCKSIZE + MAX_CIPHER_ALIGNMASK]; in crypto_xctr_crypt_inplace() local 83 u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1); in crypto_xctr_crypt_inplace()
|
D | lrw.c | 75 be128 tmp = { 0 }; in lrw_setkey() local 95 lrw_setbit128_bbe(&tmp, i); in lrw_setkey() 96 ctx->mulinc[i] = tmp; in lrw_setkey()
|
D | skcipher.c | 184 struct skcipher_walk_buffer *p, *tmp; in skcipher_walk_complete() local 186 list_for_each_entry_safe(p, tmp, &walk->buffers, entry) { in skcipher_walk_complete() 289 u8 *tmp = walk->page; in skcipher_next_copy() local 292 memcpy(tmp, walk->src.virt.addr, walk->nbytes); in skcipher_next_copy() 295 walk->src.virt.addr = tmp; in skcipher_next_copy() 296 walk->dst.virt.addr = tmp; in skcipher_next_copy()
|
D | fips140-module.c | 229 struct crypto_alg *alg, *tmp; in unregister_existing_fips140_algos() local 242 list_for_each_entry_safe(alg, tmp, &crypto_alg_list, cra_list) { in unregister_existing_fips140_algos()
|
D | algif_aead.c | 101 struct af_alg_tsgl *tsgl, *tmp; in _aead_recvmsg() local 182 list_for_each_entry_safe(tsgl, tmp, &ctx->tsgl_list, list) { in _aead_recvmsg()
|
D | drbg.c | 828 unsigned char *tmp = drbg->scratchpad + drbg_statelen(drbg); in drbg_hash_df() local 843 ret = drbg_kcapi_hash(drbg, tmp, entropylist); in drbg_hash_df() 850 memcpy(outval + len, tmp, blocklen); in drbg_hash_df() 855 memset(tmp, 0, drbg_blocklen(drbg)); in drbg_hash_df()
|
D | af_alg.c | 667 struct af_alg_rsgl *rsgl, *tmp; in af_alg_free_areq_sgls() local 672 list_for_each_entry_safe(rsgl, tmp, &areq->rsgl_list, list) { in af_alg_free_areq_sgls()
|
D | Kconfig | 79 adb shell cp /sys/kernel/debug/fips140/{text,rodata} /data/local/tmp/ 80 adb pull /data/local/tmp/text text.checked 81 adb pull /data/local/tmp/rodata rodata.checked
|
/crypto/asymmetric_keys/ |
D | verify_pefile.c | 249 unsigned *canon, tmp, loop, i, hashed_bytes; in pefile_digest_pe_contents() local 259 tmp = ctx->image_checksum_offset + sizeof(uint32_t); in pefile_digest_pe_contents() 260 ret = crypto_shash_update(desc, pebuf + tmp, in pefile_digest_pe_contents() 261 ctx->cert_dirent_offset - tmp); in pefile_digest_pe_contents() 265 tmp = ctx->cert_dirent_offset + sizeof(struct data_dirent); in pefile_digest_pe_contents() 266 ret = crypto_shash_update(desc, pebuf + tmp, ctx->header_size - tmp); in pefile_digest_pe_contents() 307 tmp = hashed_bytes + ctx->certs_size; in pefile_digest_pe_contents() 310 pelen - tmp); in pefile_digest_pe_contents()
|
/crypto/async_tx/ |
D | async_xor.c | 38 dma_addr_t tmp; in do_async_xor() local 62 tmp = src_list[0]; in do_async_xor() 80 src_list[0] = tmp; in do_async_xor()
|