| /tools/perf/util/ |
| D | unwind.h | 26 struct perf_sample *data, int max_stack, bool best_effort); 37 struct perf_sample *data, int max_stack, 66 int max_stack __maybe_unused, in unwind__get_entries()
|
| D | unwind-libdw.c | 253 return entry(pc, ui) || !(--ui->max_stack) ? in frame_callback() 260 int max_stack, in unwind__get_entries() argument 269 .max_stack = max_stack, in unwind__get_entries() 279 ui = zalloc(sizeof(ui_buf) + sizeof(ui_buf.entries[0]) * max_stack); in unwind__get_entries() 303 if (err && ui->max_stack != max_stack) in unwind__get_entries()
|
| D | unwind-libunwind.c | 84 struct perf_sample *data, int max_stack, in unwind__get_entries() argument 90 return ops->get_entries(cb, arg, thread, data, max_stack, best_effort); in unwind__get_entries()
|
| D | unwind-libunwind-local.c | 735 void *arg, int max_stack) in get_entries() argument 739 unw_word_t ips[max_stack]; in get_entries() 755 if (max_stack - 1 > 0) { in get_entries() 766 while (!ret && (unw_step(&c) > 0) && i < max_stack) { in get_entries() 782 max_stack = i; in get_entries() 788 for (i = 0; i < max_stack && !ret; i++) { in get_entries() 792 j = max_stack - i - 1; in get_entries() 801 struct perf_sample *data, int max_stack, in _unwind__get_entries() argument 814 if (max_stack <= 0) in _unwind__get_entries() 817 return get_entries(&ui, cb, arg, max_stack); in _unwind__get_entries()
|
| D | unwind-libdw.h | 21 int max_stack; member
|
| D | evsel_config.h | 41 int max_stack; member
|
| D | top.h | 34 int max_stack; member
|
| D | callchain.h | 104 u16 max_stack; member 254 int max_stack); 317 struct perf_sample *sample, int max_stack,
|
| D | machine.h | 187 int max_stack, 196 int max_stack) in thread__resolve_callchain() argument 204 max_stack, in thread__resolve_callchain()
|
| D | lock-contention.h | 143 int max_stack; member
|
| D | bpf_lock_contention.c | 35 bpf_map__set_value_size(skel->maps.stacks, con->max_stack * sizeof(u64)); in lock_contention_prepare() 384 idx < con->max_stack - 1) in lock_contention_get_name() 414 size_t stack_size = con->max_stack * sizeof(*stack_trace); in lock_contention_read()
|
| D | kwork.h | 233 unsigned int max_stack; member
|
| D | machine.c | 2557 int max_stack, in resolve_lbr_callchain_sample() argument 2563 int chain_nr = min(max_stack, (int)chain->nr), i; in resolve_lbr_callchain_sample() 2676 int max_stack, in thread__resolve_callchain_sample() argument 2696 root_al, max_stack, in thread__resolve_callchain_sample() 2722 int nr = min(max_stack, (int)branch->nr); in thread__resolve_callchain_sample() 2791 i < chain_nr && nr_entries < max_stack; i++) { in thread__resolve_callchain_sample() 2924 int max_stack, bool symbols) in thread__resolve_callchain_unwind() argument 2940 thread, sample, max_stack, false); in thread__resolve_callchain_unwind() 2949 int max_stack, in __thread__resolve_callchain() argument 2963 max_stack, symbols); in __thread__resolve_callchain() [all …]
|
| D | callchain.c | 280 param->max_stack = size; in parse_callchain_record() 1120 int max_stack) in sample__resolve_callchain() argument 1128 parent, al, max_stack); in sample__resolve_callchain() 1802 struct perf_sample *sample, int max_stack, in sample__for_each_callchain_node() argument 1814 max_stack, symbols); in sample__for_each_callchain_node()
|
| D | evsel.c | 869 attr->sample_max_stack = param->max_stack; in __evsel__config_callchain() 956 int max_stack = 0; in evsel__apply_config_terms() local 996 max_stack = term->val.max_stack; in evsel__apply_config_terms() 1031 if ((callgraph_buf != NULL) || (dump_size > 0) || max_stack) { in evsel__apply_config_terms() 1034 if (max_stack) { in evsel__apply_config_terms() 1035 param.max_stack = max_stack; in evsel__apply_config_terms()
|
| D | parse-events.c | 1218 ADD_CONFIG_TERM_VAL(MAX_STACK, max_stack, in get_config_terms()
|
| /tools/perf/ |
| D | builtin-report.c | 102 int max_stack; member 334 ret = hist_entry_iter__add(&iter, &al, rep->max_stack, rep); in process_sample_event() 1293 .max_stack = PERF_MAX_STACK_DEPTH, in cmd_report() 1357 OPT_INTEGER(0, "max-stack", &report.max_stack, in cmd_report() 1544 (int)itrace_synth_opts.callchain_sz > report.max_stack) in cmd_report() 1545 report.max_stack = itrace_synth_opts.callchain_sz; in cmd_report()
|
| D | builtin-top.c | 845 if (hist_entry_iter__add(&iter, &al, top->max_stack, top) < 0) in perf_event__process_sample() 1458 .max_stack = sysctl__max_stack(), in cmd_top() 1534 OPT_INTEGER(0, "max-stack", &top.max_stack, in cmd_top()
|
| D | builtin-lock.c | 1038 static u64 *get_callstack(struct perf_sample *sample, int max_stack) in get_callstack() argument 1044 callstack = calloc(max_stack, sizeof(*callstack)); in get_callstack() 1048 for (i = 0, c = 0; i < sample->callchain->nr && c < max_stack; i++) { in get_callstack() 2067 .max_stack = max_stack_depth, in __cmd_contention()
|
| D | builtin-trace.c | 186 unsigned int max_stack; member 2738 int max_stack = evsel->core.attr.sample_max_stack ? local 2740 trace->max_stack; 2747 err = thread__resolve_callchain(al.thread, cursor, evsel, sample, NULL, NULL, max_stack); 4404 evsel->core.attr.sample_max_stack = trace->max_stack; 5115 .max_stack = UINT_MAX, 5179 OPT_UINTEGER(0, "max-stack", &trace.max_stack, 5334 if (trace.max_stack == UINT_MAX) { 5335 trace.max_stack = input_name ? PERF_MAX_STACK_DEPTH : sysctl__max_stack();
|
| D | builtin-kwork.c | 711 NULL, NULL, kwork->max_stack + 2) != 0) { in timehist_save_callchain() 2338 .max_stack = 5, in cmd_kwork() 2404 OPT_UINTEGER(0, "max-stack", &kwork.max_stack, in cmd_kwork()
|
| D | builtin-sched.c | 226 unsigned int max_stack; member 2343 NULL, NULL, sched->max_stack + 2) != 0) { in save_task_callchain() 3806 .max_stack = 5, in cmd_sched() 3854 OPT_UINTEGER(0, "max-stack", &sched.max_stack, in cmd_sched()
|