Home
last modified time | relevance | path

Searched refs:hash (Results 1 – 20 of 20) sorted by relevance

/kernel/trace/
Dftrace.c403 struct hlist_head *hash; member
580 memset(stat->hash, 0, in ftrace_profile_reset()
647 if (stat->hash) { in ftrace_profile_init_cpu()
659 stat->hash = kcalloc(size, sizeof(struct hlist_head), GFP_KERNEL); in ftrace_profile_init_cpu()
661 if (!stat->hash) in ftrace_profile_init_cpu()
666 kfree(stat->hash); in ftrace_profile_init_cpu()
667 stat->hash = NULL; in ftrace_profile_init_cpu()
697 hhd = &stat->hash[key]; in ftrace_find_profiled_func()
716 hlist_add_head_rcu(&rec->node, &stat->hash[key]); in ftrace_add_profile()
769 if (!stat->hash || !ftrace_profile_enabled) in function_profile_call()
[all …]
Dtrace_functions.c804 struct ftrace_hash *hash, char *glob, in ftrace_trace_probe_callback() argument
841 ftrace_trace_onoff_callback(struct trace_array *tr, struct ftrace_hash *hash, in ftrace_trace_onoff_callback() argument
855 return ftrace_trace_probe_callback(tr, ops, hash, glob, cmd, in ftrace_trace_onoff_callback()
860 ftrace_stacktrace_callback(struct trace_array *tr, struct ftrace_hash *hash, in ftrace_stacktrace_callback() argument
870 return ftrace_trace_probe_callback(tr, ops, hash, glob, cmd, in ftrace_stacktrace_callback()
875 ftrace_dump_callback(struct trace_array *tr, struct ftrace_hash *hash, in ftrace_dump_callback() argument
886 return ftrace_trace_probe_callback(tr, ops, hash, glob, cmd, in ftrace_dump_callback()
891 ftrace_cpudump_callback(struct trace_array *tr, struct ftrace_hash *hash, in ftrace_cpudump_callback() argument
902 return ftrace_trace_probe_callback(tr, ops, hash, glob, cmd, in ftrace_cpudump_callback()
Dtrace.h818 ftrace_lookup_ip(struct ftrace_hash *hash, unsigned long ip);
820 static __always_inline bool ftrace_hash_empty(struct ftrace_hash *hash) in ftrace_hash_empty() argument
822 return !hash || !(hash->count || (hash->flags & FTRACE_HASH_FL_MOD)); in ftrace_hash_empty()
873 struct ftrace_hash *hash; in ftrace_graph_addr() local
883 hash = rcu_dereference_protected(ftrace_graph_hash, !preemptible()); in ftrace_graph_addr()
885 if (ftrace_hash_empty(hash)) { in ftrace_graph_addr()
890 if (ftrace_lookup_ip(hash, addr)) { in ftrace_graph_addr()
989 struct ftrace_hash *hash,
DKconfig746 to be aggregated into hash tables and dumped to stdout by
Dtrace_events.c3329 event_enable_func(struct trace_array *tr, struct ftrace_hash *hash, in event_enable_func() argument
Dtrace.c8632 ftrace_trace_snapshot_callback(struct trace_array *tr, struct ftrace_hash *hash, in ftrace_trace_snapshot_callback() argument
/kernel/bpf/
Dhashtab.c126 u32 hash; member
160 struct bucket *b, u32 hash, in htab_lock_bucket() argument
166 hash = hash & min_t(u32, HASHTAB_MAP_LOCK_MASK, htab->n_buckets - 1); in htab_lock_bucket()
173 if (unlikely(__this_cpu_inc_return(*(htab->map_locked[hash])) != 1)) { in htab_lock_bucket()
174 __this_cpu_dec(*(htab->map_locked[hash])); in htab_lock_bucket()
192 struct bucket *b, u32 hash, in htab_unlock_bucket() argument
197 hash = hash & min_t(u32, HASHTAB_MAP_LOCK_MASK, htab->n_buckets - 1); in htab_unlock_bucket()
202 __this_cpu_dec(*(htab->map_locked[hash])); in htab_unlock_bucket()
301 u32 hash) in prealloc_lru_pop() argument
303 struct bpf_lru_node *node = bpf_lru_pop_free(&htab->lru, hash); in prealloc_lru_pop()
[all …]
Dbpf_lru_list.c356 u32 hash) in __local_list_add_pending() argument
358 *(u32 *)((void *)node + lru->hash_offset) = hash; in __local_list_add_pending()
405 u32 hash) in bpf_percpu_lru_pop_free() argument
426 *(u32 *)((void *)node + lru->hash_offset) = hash; in bpf_percpu_lru_pop_free()
437 u32 hash) in bpf_common_lru_pop_free() argument
457 __local_list_add_pending(lru, loc_l, cpu, node, hash); in bpf_common_lru_pop_free()
492 __local_list_add_pending(lru, loc_l, cpu, node, hash); in bpf_common_lru_pop_free()
499 struct bpf_lru_node *bpf_lru_pop_free(struct bpf_lru *lru, u32 hash) in bpf_lru_pop_free() argument
502 return bpf_percpu_lru_pop_free(lru, hash); in bpf_lru_pop_free()
504 return bpf_common_lru_pop_free(lru, hash); in bpf_lru_pop_free()
Dstackmap.c21 u32 hash; member
263 u32 hash, id, trace_nr, trace_len; in __bpf_get_stackid() local
275 hash = jhash2((u32 *)ips, trace_len / sizeof(u32), 0); in __bpf_get_stackid()
276 id = hash & (smap->n_buckets - 1); in __bpf_get_stackid()
279 hash_matches = bucket && bucket->hash == hash; in __bpf_get_stackid()
318 new_bucket->hash = hash; in __bpf_get_stackid()
Ddevmap.c94 struct hlist_head *hash; in dev_map_create_hash() local
96 hash = bpf_map_area_alloc((u64) entries * sizeof(*hash), numa_node); in dev_map_create_hash()
97 if (hash != NULL) in dev_map_create_hash()
99 INIT_HLIST_HEAD(&hash[i]); in dev_map_create_hash()
101 return hash; in dev_map_create_hash()
Dbpf_lru_list.h75 struct bpf_lru_node *bpf_lru_pop_free(struct bpf_lru *lru, u32 hash);
/kernel/locking/
Dqspinlock_paravirt.h207 #define for_each_hash_entry(he, offset, hash) \ argument
208 for (hash &= ~(PV_HE_PER_LINE - 1), he = &pv_lock_hash[hash], offset = 0; \
210 offset++, he = &pv_lock_hash[(hash + offset) & ((1 << pv_lock_hash_bits) - 1)])
214 unsigned long offset, hash = hash_ptr(lock, pv_lock_hash_bits); in pv_hash() local
218 for_each_hash_entry(he, offset, hash) { in pv_hash()
241 unsigned long offset, hash = hash_ptr(lock, pv_lock_hash_bits); in pv_unhash() local
245 for_each_hash_entry(he, offset, hash) { in pv_unhash()
Dlockdep.c509 u32 hash; member
523 return t1->hash == t2->hash && t1->nr_entries == t2->nr_entries && in traces_identical()
532 u32 hash; in save_trace() local
553 hash = jhash(trace->entries, trace->nr_entries * in save_trace()
555 trace->hash = hash; in save_trace()
556 hash_head = stack_trace_hash + (hash & (STACK_TRACE_HASH_SIZE - 1)); in save_trace()
1167 unsigned long hash = hash_long((uintptr_t)key, KEYHASH_BITS); in keyhashentry() local
1169 return lock_keys_hash + hash; in keyhashentry()
/kernel/
Daudit_tree.c26 struct list_head hash; member
197 INIT_LIST_HEAD(&chunk->hash); in alloc_chunk()
238 list_add_rcu(&chunk->hash, list); in insert_hash()
248 list_for_each_entry_rcu(p, list, hash) { in audit_tree_lookup()
322 list_replace_rcu(&old->hash, &new->hash); in replace_chunk()
367 list_del_rcu(&chunk->hash); in untag_chunk()
1025 list_del_rcu(&chunk->hash); in evict_chunk()
Dmodule_signature.c34 ms->hash != 0 || in mod_check_sig()
Dworkqueue.c3451 u32 hash = 0; in wqattrs_hash() local
3453 hash = jhash_1word(attrs->nice, hash); in wqattrs_hash()
3454 hash = jhash(cpumask_bits(attrs->cpumask), in wqattrs_hash()
3455 BITS_TO_LONGS(nr_cpumask_bits) * sizeof(long), hash); in wqattrs_hash()
3456 return hash; in wqattrs_hash()
3662 u32 hash = wqattrs_hash(attrs); in get_unbound_pool() local
3670 hash_for_each_possible(unbound_pool_hash, pool, hash_node, hash) { in get_unbound_pool()
3711 hash_add(unbound_pool_hash, &pool->hash_node, hash); in get_unbound_pool()
/kernel/time/
Dposix-timers.c113 static int hash(struct signal_struct *sig, unsigned int nr) in hash() function
135 struct hlist_head *head = &posix_timers_hashtable[hash(sig, id)]; in posix_timer_by_id()
149 head = &posix_timers_hashtable[hash(sig, sig->posix_timer_id)]; in posix_timer_add()
/kernel/power/
DKconfig252 dmesg -s 1000000 | grep 'hash matches'
/kernel/futex/
Dcore.c420 u32 hash = jhash2((u32 *)key, offsetof(typeof(*key), both.offset) / 4, in hash_futex() local
423 return &futex_queues[hash & (futex_hashsize - 1)]; in hash_futex()
/kernel/events/
Dcore.c9570 u64 hash = swevent_hash(type, event_id); in __find_swevent_head() local
9572 return &hlist->heads[hash]; in __find_swevent_head()