Home
last modified time | relevance | path

Searched refs:hash (Results 1 – 18 of 18) sorted by relevance

/lib/
Dhashtable_test.c37 DEFINE_HASHTABLE(hash, 1); in hashtable_test_hash_empty()
39 KUNIT_EXPECT_TRUE(test, hash_empty(hash)); in hashtable_test_hash_empty()
43 hash_add(hash, &a.node, a.key); in hashtable_test_hash_empty()
46 KUNIT_EXPECT_FALSE(test, hash_empty(hash)); in hashtable_test_hash_empty()
52 DEFINE_HASHTABLE(hash, 4); in hashtable_test_hash_hashed()
56 hash_add(hash, &a.node, a.key); in hashtable_test_hash_hashed()
59 hash_add(hash, &b.node, b.key); in hashtable_test_hash_hashed()
69 DEFINE_HASHTABLE(hash, 3); in hashtable_test_hash_add()
74 hash_add(hash, &a.node, a.key); in hashtable_test_hash_add()
78 hash_add(hash, &b.node, b.key); in hashtable_test_hash_add()
[all …]
Doid_registry.c31 unsigned i, j, k, hash; in look_up_OID() local
35 hash = datasize - 1; in look_up_OID()
38 hash += octets[i] * 33; in look_up_OID()
39 hash = (hash >> 24) ^ (hash >> 16) ^ (hash >> 8) ^ hash; in look_up_OID()
40 hash &= 0xff; in look_up_OID()
51 xhash = oid_search_table[j].hash; in look_up_OID()
52 if (xhash > hash) { in look_up_OID()
56 if (xhash < hash) { in look_up_OID()
Dbuild_OID_registry125 my $hash = $#octets;
127 $hash += $_ * 33;
130 $hash = ($hash >> 24) ^ ($hash >> 16) ^ ($hash >> 8) ^ ($hash);
132 push @hash_values, $hash & 0xff;
Dstackdepot.c60 u32 hash; /* Hash in the hash table */ member
260 depot_alloc_stack(unsigned long *entries, int size, u32 hash, void **prealloc) in depot_alloc_stack() argument
302 stack->hash = hash; in depot_alloc_stack()
345 u32 hash) in find_stack() argument
350 if (found->hash == hash && in find_stack()
367 u32 hash; in __stack_depot_save() local
382 hash = hash_stack(entries, nr_entries); in __stack_depot_save()
383 bucket = &stack_table[hash & stack_hash_mask]; in __stack_depot_save()
390 found = find_stack(smp_load_acquire(bucket), entries, nr_entries, hash); in __stack_depot_save()
417 found = find_stack(*bucket, entries, nr_entries, hash); in __stack_depot_save()
[all …]
Drhashtable.c53 int lockdep_rht_bucket_is_held(const struct bucket_table *tbl, u32 hash) in lockdep_rht_bucket_is_held() argument
59 return bit_spin_is_locked(0, (unsigned long *)&tbl->buckets[hash]); in lockdep_rht_bucket_is_held()
492 struct bucket_table *tbl, unsigned int hash, in rhashtable_lookup_one() argument
504 rht_for_each_from(head, rht_ptr(bkt, tbl, hash), tbl, hash) { in rhashtable_lookup_one()
524 head = rht_dereference_bucket(head->next, tbl, hash); in rhashtable_lookup_one()
543 struct bucket_table *tbl, unsigned int hash, struct rhash_head *obj, in rhashtable_insert_one() argument
568 head = rht_ptr(bkt, tbl, hash); in rhashtable_insert_one()
597 unsigned int hash; in rhashtable_try_insert() local
604 hash = rht_head_hashfn(ht, tbl, obj, ht->p); in rhashtable_try_insert()
607 bkt = rht_bucket_var(tbl, hash); in rhashtable_try_insert()
[all …]
Ddigsig.c205 unsigned char hash[SHA1_DIGEST_SIZE]; in digsig_verify() local
244 crypto_shash_final(desc, hash); in digsig_verify()
250 hash, sizeof(hash)); in digsig_verify()
Dsiphash_kunit.c113 #define chk(hash, vector, fmt...) \ argument
114 KUNIT_EXPECT_EQ_MSG(test, hash, vector, fmt)
Ddebugobjects.c490 unsigned long hash; in get_bucket() local
492 hash = hash_long((addr >> ODEBUG_CHUNK_SHIFT), ODEBUG_HASH_BITS); in get_bucket()
493 return &obj_hash[hash]; in get_bucket()
Diov_iter.c1241 struct ahash_request *hash = hashp; in hash_and_copy_to_iter() local
1247 ahash_request_set_crypt(hash, &sg, NULL, copied); in hash_and_copy_to_iter()
1248 crypto_ahash_update(hash); in hash_and_copy_to_iter()
DKconfig.debug2399 tristate "Perform selftest on resizable hash table"
2563 tristate "KUnit Test for integer hash functions" if !KUNIT_ALL_TESTS
2568 integer (<linux/hash.h>) hash functions on boot.
2789 Enable this option to test the kernel's siphash (<linux/siphash.h>) hash
Dtest_bpf.c14346 skb->hash = SKB_HASH; in populate_skb()
/lib/zstd/compress/
Dzstd_ldm.c68 U64 hash = state->rolling; in ZSTD_ldm_gear_reset() local
72 hash = (hash << 1) + ZSTD_ldm_gearTab[data[n] & 0xff]; \ in ZSTD_ldm_gear_reset()
101 U64 hash, mask; in ZSTD_ldm_gear_feed() local
103 hash = state->rolling; in ZSTD_ldm_gear_feed()
108 hash = (hash << 1) + ZSTD_ldm_gearTab[data[n] & 0xff]; \ in ZSTD_ldm_gear_feed()
110 if (UNLIKELY((hash & mask) == 0)) { \ in ZSTD_ldm_gear_feed()
131 state->rolling = hash; in ZSTD_ldm_gear_feed()
173 ldmState_t* ldmState, size_t hash, ldmParams_t const ldmParams) in ZSTD_ldm_getBucket() argument
175 return ldmState->hashTable + (hash << ldmParams.bucketSizeLog); in ZSTD_ldm_getBucket()
181 size_t const hash, const ldmEntry_t entry, in ZSTD_ldm_insertEntry() argument
[all …]
Dzstd_compress_internal.h246 U32 hash; member
842 static U64 ZSTD_rollingHash_append(U64 hash, void const* buf, size_t size) in ZSTD_rollingHash_append() argument
847 hash *= prime8bytes; in ZSTD_rollingHash_append()
848 hash += istart[pos] + ZSTD_ROLL_HASH_CHAR_OFFSET; in ZSTD_rollingHash_append()
850 return hash; in ZSTD_rollingHash_append()
873 MEM_STATIC U64 ZSTD_rollingHash_rotate(U64 hash, BYTE toRemove, BYTE toAdd, U64 primePower) in ZSTD_rollingHash_rotate() argument
875 hash -= (toRemove + ZSTD_ROLL_HASH_CHAR_OFFSET) * primePower; in ZSTD_rollingHash_rotate()
876 hash *= prime8bytes; in ZSTD_rollingHash_rotate()
877 hash += toAdd + ZSTD_ROLL_HASH_CHAR_OFFSET; in ZSTD_rollingHash_rotate()
878 return hash; in ZSTD_rollingHash_rotate()
Dzstd_lazy.c873 U32 const hash = (U32)ZSTD_hashPtr(base + idx, hashLog + ZSTD_ROW_HASH_TAG_BITS, mls); in ZSTD_row_fillHashCache() local
874 U32 const row = (hash >> ZSTD_ROW_HASH_TAG_BITS) << rowLog; in ZSTD_row_fillHashCache()
876 ms->hashCache[idx & ZSTD_ROW_HASH_CACHE_MASK] = hash; in ZSTD_row_fillHashCache()
896 { U32 const hash = cache[idx & ZSTD_ROW_HASH_CACHE_MASK]; in ZSTD_row_nextCachedHash() local
898 return hash; in ZSTD_row_nextCachedHash()
917 …U32 const hash = useCache ? ZSTD_row_nextCachedHash(ms->hashCache, hashTable, tagTable, base, upda… in ZSTD_row_update_internalImpl() local
919 U32 const relRow = (hash >> ZSTD_ROW_HASH_TAG_BITS) << rowLog; in ZSTD_row_update_internalImpl()
925 assert(hash == ZSTD_hashPtr(base + updateStartIdx, hashLog + ZSTD_ROW_HASH_TAG_BITS, mls)); in ZSTD_row_update_internalImpl()
926 ((BYTE*)tagRow)[pos + ZSTD_ROW_HASH_TAG_OFFSET] = hash & ZSTD_ROW_HASH_TAG_MASK; in ZSTD_row_update_internalImpl()
1183 …U32 const hash = ZSTD_row_nextCachedHash(hashCache, hashTable, tagTable, base, curr, hashLog, rowL… in ZSTD_RowFindBestMatch() local
[all …]
Dzstd_fast.c39 size_t const hash = ZSTD_hashPtr(ip + p, hBits, mls); in ZSTD_fillHashTable() local
40 if (hashTable[hash] == 0) { /* not yet filled */ in ZSTD_fillHashTable()
41 hashTable[hash] = curr + p; in ZSTD_fillHashTable()
/lib/xz/
Dxz_dec_stream.c93 struct xz_dec_hash hash; member
115 struct xz_dec_hash hash; member
259 s->block.hash.unpadded += s->block_header.size in dec_block()
263 s->block.hash.unpadded += check_sizes[s->check_type]; in dec_block()
266 s->block.hash.unpadded += 4; in dec_block()
269 s->block.hash.uncompressed += s->block.uncompressed; in dec_block()
270 s->block.hash.crc32 = xz_crc32( in dec_block()
271 (const uint8_t *)&s->block.hash, in dec_block()
272 sizeof(s->block.hash), s->block.hash.crc32); in dec_block()
323 s->index.hash.unpadded += s->vli; in dec_index()
[all …]
/lib/crypto/
Dblake2s-selftest.c552 u8 hash[BLAKE2S_HASH_SIZE]; in blake2s_digest_test() local
568 blake2s(hash, buf, key + BLAKE2S_KEY_SIZE - keylen, outlen, i, in blake2s_digest_test()
570 if (memcmp(hash, blake2s_testvecs[i], outlen)) { in blake2s_digest_test()
584 blake2s_final(&state, hash); in blake2s_digest_test()
585 if (memcmp(hash, blake2s_testvecs[i], outlen)) { in blake2s_digest_test()
/lib/zstd/decompress/
Dzstd_decompress.c88 const U64 hash = xxh64(&dictID, sizeof(U32), 0); in ZSTD_DDictHashSet_getIndex() local
90 return hash & (hashSet->ddictPtrTableSize - 1); in ZSTD_DDictHashSet_getIndex()