Home
last modified time | relevance | path

Searched refs:depth (Results 1 – 25 of 25) sorted by relevance

/kernel/trace/
Dtrace_functions_graph.c24 int depth; member
195 .depth = 0, in __trace_graph_function()
199 .depth = 0, in __trace_graph_function()
651 cpu_data->depth = call->depth - 1; in print_graph_entry_leaf()
654 if (call->depth < FTRACE_RETFUNC_DEPTH && in print_graph_entry_leaf()
655 !WARN_ON_ONCE(call->depth < 0)) in print_graph_entry_leaf()
656 cpu_data->enter_funcs[call->depth] = 0; in print_graph_entry_leaf()
663 for (i = 0; i < call->depth * TRACE_GRAPH_INDENT; i++) in print_graph_entry_leaf()
689 cpu_data->depth = call->depth; in print_graph_entry_nested()
692 if (call->depth < FTRACE_RETFUNC_DEPTH && in print_graph_entry_nested()
[all …]
Dtrace_entries.h82 __field_packed( int, graph_ent, depth )
85 F_printk("--> %ps (%d)", (void *)__entry->func, __entry->depth)
96 __field_packed( int, ret, depth )
103 (void *)__entry->func, __entry->depth,
105 __entry->depth)
Dfgraph.c128 trace.depth = ++current->curr_ret_depth; in function_graph_enter()
194 trace->depth = current->curr_ret_depth--; in ftrace_pop_return_trace()
Dtrace.h897 trace_recursion_set_depth(trace->depth); in ftrace_graph_addr()
919 trace->depth == trace_recursion_depth()) in ftrace_graph_addr_finish()
966 (trace->depth < 0) || in ftrace_graph_ignore_func()
967 (fgraph_max_depth && trace->depth >= fgraph_max_depth); in ftrace_graph_ignore_func()
Dblktrace.c957 unsigned int depth, bool explicit) in blk_add_trace_unplug() argument
964 __be64 rpdu = cpu_to_be64(depth); in blk_add_trace_unplug()
DKconfig258 kernel executes, and keeping a maximum stack depth value and
/kernel/locking/
Dlockdep.c750 int i, depth = READ_ONCE(p->lockdep_depth); in lockdep_print_held_locks() local
752 if (!depth) in lockdep_print_held_locks()
755 printk("%d lock%s held by %s/%d:\n", depth, in lockdep_print_held_locks()
756 depth > 1 ? "s" : "", p->comm, task_pid_nr(p)); in lockdep_print_held_locks()
763 for (i = 0; i < depth; i++) { in lockdep_print_held_locks()
1006 for (i = chain->base; i < chain->base + chain->depth; i++) in check_lock_chain_key()
1496 int depth = 0; in get_lock_depth() local
1501 depth++; in get_lock_depth()
1503 return depth; in get_lock_depth()
1821 print_circular_bug_entry(struct lock_list *target, int depth) in print_circular_bug_entry() argument
[all …]
Drtmutex.c614 int ret = 0, depth = 0; in rt_mutex_adjust_prio_chain() local
631 if (++depth > max_lock_depth) { in rt_mutex_adjust_prio_chain()
Dlockdep_proc.c171 for (i = 0; i < chain->depth; i++) { in lc_show()
/kernel/
Dumh.c282 void __usermodehelper_set_disable_depth(enum umh_disable_depth depth) in __usermodehelper_set_disable_depth() argument
285 usermodehelper_disabled = depth; in __usermodehelper_set_disable_depth()
296 int __usermodehelper_disable(enum umh_disable_depth depth) in __usermodehelper_disable() argument
300 if (!depth) in __usermodehelper_disable()
304 usermodehelper_disabled = depth; in __usermodehelper_disable()
Dstackleak.c57 const unsigned int depth = STACKLEAK_SEARCH_DEPTH / sizeof(unsigned long); in stackleak_erase() local
67 while (kstack_ptr > boundary && poison_count <= depth) { in stackleak_erase()
Dresource.c102 int depth; in r_show() local
104 for (depth = 0, p = r; depth < MAX_IORES_LEVEL; depth++, p = p->parent) in r_show()
116 depth * 2, "", in r_show()
/kernel/irq/
Dpm.c21 desc->depth++; in irq_pm_check_wakeup()
176 desc->depth++; in resume_irq()
Ddebug.h19 irq, desc, desc->depth, desc->irq_count, desc->irqs_unhandled); in print_irq_desc()
Dchip.c262 desc->depth = 0; in irq_startup()
311 desc->depth = 1; in irq_shutdown()
1031 desc->depth = 1; in __irq_do_set_handler()
1113 WARN_ON_ONCE(!desc->depth && (set & _IRQ_NOAUTOEN)); in irq_modify_status()
Dmanage.c692 if (!desc->depth++) in __disable_irq()
787 switch (desc->depth) { in __enable_irq()
809 desc->depth--; in __enable_irq()
1788 desc->depth = 1; in __setup_irq()
2092 if (WARN_ON(desc->depth == 0)) in free_nmi()
Dspurious.c425 desc->depth++; in note_interrupt()
Ddebugfs.c174 seq_printf(m, "ddepth: %u\n", desc->depth); in irq_debug_show()
Dirqdesc.c119 desc->depth = 1; in desc_set_defaults()
557 .depth = 1,
/kernel/rcu/
Dtree_exp.h735 int depth = rcu_preempt_depth(); in rcu_exp_handler() local
746 if (!depth) { in rcu_exp_handler()
770 if (depth > 0) { in rcu_exp_handler()
/kernel/bpf/
Dbtf.c357 u8 depth; member
968 if ((indent - show->state.depth) >= indents) in __btf_show_indent()
969 return indent - show->state.depth; in __btf_show_indent()
985 if (show->state.depth == 0) in btf_show_delim()
1014 show->state.depth == 0) { \
1020 if (show->state.depth > show->state.depth_to_show) \
1021 show->state.depth_to_show = show->state.depth; \
1031 if (show->state.depth > show->state.depth_to_show) \
1032 show->state.depth_to_show = show->state.depth; \
1111 if (show->state.depth == 0) { in btf_show_obj_safe()
[all …]
Dverifier.c3859 int depth = 0, frame = 0, idx = 0, i = 0, subprog_end; in check_max_stack_depth() local
3887 if (idx && subprog[idx].has_tail_call && depth >= 256) { in check_max_stack_depth()
3890 depth); in check_max_stack_depth()
3896 depth += round_up(max_t(u32, subprog[idx].stack_depth, 1), 32); in check_max_stack_depth()
3897 if (depth > MAX_BPF_STACK) { in check_max_stack_depth()
3899 frame + 1, depth); in check_max_stack_depth()
3960 depth -= round_up(max_t(u32, subprog[idx].stack_depth, 1), 32); in check_max_stack_depth()
12848 int i, depth; in fixup_call_args() local
12883 depth = get_callee_stack_depth(env, insn, i); in fixup_call_args()
12884 if (depth < 0) in fixup_call_args()
[all …]
/kernel/events/
Duprobes.c1773 n_utask->depth++; in dup_utask()
1855 utask->depth--; in cleanup_return_instances()
1874 if (utask->depth >= MAX_URETPROBE_DEPTH) { in prepare_uretprobe()
1917 utask->depth++; in prepare_uretprobe()
2159 utask->depth--; in handle_trampoline()
/kernel/cgroup/
Dcgroup.c3592 int depth = READ_ONCE(cgrp->max_depth); in cgroup_max_depth_show() local
3594 if (depth == INT_MAX) in cgroup_max_depth_show()
3597 seq_printf(seq, "%d\n", depth); in cgroup_max_depth_show()
3607 int depth; in cgroup_max_depth_write() local
3611 depth = INT_MAX; in cgroup_max_depth_write()
3613 ret = kstrtoint(buf, 0, &depth); in cgroup_max_depth_write()
3618 if (depth < 0) in cgroup_max_depth_write()
3625 cgrp->max_depth = depth; in cgroup_max_depth_write()
/kernel/sched/
Dfair.c421 se_depth = (*se)->depth; in find_matching_se()
422 pse_depth = (*pse)->depth; in find_matching_se()
7691 int se_depth = se->depth; in pick_next_task_fair()
7692 int pse_depth = pse->depth; in pick_next_task_fair()
11599 int sea_depth = sea->depth; in cfs_prio_less()
11600 int seb_depth = seb->depth; in cfs_prio_less()
11799 se->depth = se->parent ? se->parent->depth + 1 : 0; in attach_entity_cfs_rq()
11905 se->depth = se->parent ? se->parent->depth + 1 : 0; in task_set_group_fair()
12053 se->depth = 0; in init_tg_cfs_entry()
12056 se->depth = parent->depth + 1; in init_tg_cfs_entry()