• Home
  • Raw
  • Download

Lines Matching refs:pt

146 	struct intel_pt *pt;  member
185 static void intel_pt_dump(struct intel_pt *pt __maybe_unused, in intel_pt_dump()
225 static void intel_pt_dump_event(struct intel_pt *pt, unsigned char *buf, in intel_pt_dump_event() argument
229 intel_pt_dump(pt, buf, len); in intel_pt_dump_event()
245 struct intel_pt *pt = container_of(session->auxtrace, struct intel_pt, in intel_pt_dump_sample() local
249 intel_pt_dump(pt, sample->aux_sample.data, sample->aux_sample.size); in intel_pt_dump_sample()
252 static bool intel_pt_log_events(struct intel_pt *pt, u64 tm) in intel_pt_log_events() argument
254 struct perf_time_interval *range = pt->synth_opts.ptime_range; in intel_pt_log_events()
255 int n = pt->synth_opts.range_num; in intel_pt_log_events()
257 if (pt->synth_opts.log_plus_flags & AUXTRACE_LOG_FLG_ALL_PERF_EVTS) in intel_pt_log_events()
260 if (pt->synth_opts.log_minus_flags & AUXTRACE_LOG_FLG_ALL_PERF_EVTS) in intel_pt_log_events()
270 static int intel_pt_do_fix_overlap(struct intel_pt *pt, struct auxtrace_buffer *a, in intel_pt_do_fix_overlap() argument
277 pt->have_tsc, &consecutive); in intel_pt_do_fix_overlap()
295 int fd = perf_data__fd(ptq->pt->session->data); in intel_pt_get_buffer()
302 might_overlap = ptq->pt->snapshot_mode || ptq->pt->sampling_mode; in intel_pt_get_buffer()
304 intel_pt_do_fix_overlap(ptq->pt, old_buffer, buffer)) in intel_pt_get_buffer()
346 queue = &ptq->pt->queues.queue_array[ptq->queue_nr]; in intel_pt_lookahead()
396 queue = &ptq->pt->queues.queue_array[ptq->queue_nr]; in intel_pt_get_trace()
552 static inline u8 intel_pt_cpumode(struct intel_pt *pt, uint64_t ip) in intel_pt_cpumode() argument
554 return ip >= pt->kernel_start ? in intel_pt_cpumode()
565 struct machine *machine = ptq->pt->machine; in intel_pt_walk_next_insn()
581 cpumode = intel_pt_cpumode(ptq->pt, *ip); in intel_pt_walk_next_insn()
587 thread = ptq->pt->unknown_thread; in intel_pt_walk_next_insn()
691 static bool intel_pt_match_pgd_ip(struct intel_pt *pt, uint64_t ip, in intel_pt_match_pgd_ip() argument
699 list_for_each_entry(filt, &pt->filts.head, list) { in intel_pt_match_pgd_ip()
737 if (ip >= ptq->pt->kernel_start) in __intel_pt_pgd_ip()
738 return intel_pt_match_pgd_ip(ptq->pt, ip, ip, NULL); in __intel_pt_pgd_ip()
751 return intel_pt_match_pgd_ip(ptq->pt, ip, offset, in __intel_pt_pgd_ip()
760 static bool intel_pt_get_config(struct intel_pt *pt, in intel_pt_get_config() argument
763 if (attr->type == pt->pmu_type) { in intel_pt_get_config()
772 static bool intel_pt_exclude_kernel(struct intel_pt *pt) in intel_pt_exclude_kernel() argument
776 evlist__for_each_entry(pt->session->evlist, evsel) { in intel_pt_exclude_kernel()
777 if (intel_pt_get_config(pt, &evsel->core.attr, NULL) && in intel_pt_exclude_kernel()
784 static bool intel_pt_return_compression(struct intel_pt *pt) in intel_pt_return_compression() argument
789 if (!pt->noretcomp_bit) in intel_pt_return_compression()
792 evlist__for_each_entry(pt->session->evlist, evsel) { in intel_pt_return_compression()
793 if (intel_pt_get_config(pt, &evsel->core.attr, &config) && in intel_pt_return_compression()
794 (config & pt->noretcomp_bit)) in intel_pt_return_compression()
800 static bool intel_pt_branch_enable(struct intel_pt *pt) in intel_pt_branch_enable() argument
805 evlist__for_each_entry(pt->session->evlist, evsel) { in intel_pt_branch_enable()
806 if (intel_pt_get_config(pt, &evsel->core.attr, &config) && in intel_pt_branch_enable()
813 static unsigned int intel_pt_mtc_period(struct intel_pt *pt) in intel_pt_mtc_period() argument
819 if (!pt->mtc_freq_bits) in intel_pt_mtc_period()
822 for (shift = 0, config = pt->mtc_freq_bits; !(config & 1); shift++) in intel_pt_mtc_period()
825 evlist__for_each_entry(pt->session->evlist, evsel) { in intel_pt_mtc_period()
826 if (intel_pt_get_config(pt, &evsel->core.attr, &config)) in intel_pt_mtc_period()
827 return (config & pt->mtc_freq_bits) >> shift; in intel_pt_mtc_period()
832 static bool intel_pt_timeless_decoding(struct intel_pt *pt) in intel_pt_timeless_decoding() argument
838 if (!pt->tsc_bit || !pt->cap_user_time_zero) in intel_pt_timeless_decoding()
841 evlist__for_each_entry(pt->session->evlist, evsel) { in intel_pt_timeless_decoding()
844 if (intel_pt_get_config(pt, &evsel->core.attr, &config)) { in intel_pt_timeless_decoding()
845 if (config & pt->tsc_bit) in intel_pt_timeless_decoding()
854 static bool intel_pt_tracing_kernel(struct intel_pt *pt) in intel_pt_tracing_kernel() argument
858 evlist__for_each_entry(pt->session->evlist, evsel) { in intel_pt_tracing_kernel()
859 if (intel_pt_get_config(pt, &evsel->core.attr, NULL) && in intel_pt_tracing_kernel()
866 static bool intel_pt_have_tsc(struct intel_pt *pt) in intel_pt_have_tsc() argument
872 if (!pt->tsc_bit) in intel_pt_have_tsc()
875 evlist__for_each_entry(pt->session->evlist, evsel) { in intel_pt_have_tsc()
876 if (intel_pt_get_config(pt, &evsel->core.attr, &config)) { in intel_pt_have_tsc()
877 if (config & pt->tsc_bit) in intel_pt_have_tsc()
886 static bool intel_pt_sampling_mode(struct intel_pt *pt) in intel_pt_sampling_mode() argument
890 evlist__for_each_entry(pt->session->evlist, evsel) { in intel_pt_sampling_mode()
898 static u64 intel_pt_ctl(struct intel_pt *pt) in intel_pt_ctl() argument
903 evlist__for_each_entry(pt->session->evlist, evsel) { in intel_pt_ctl()
904 if (intel_pt_get_config(pt, &evsel->core.attr, &config)) in intel_pt_ctl()
910 static u64 intel_pt_ns_to_ticks(const struct intel_pt *pt, u64 ns) in intel_pt_ns_to_ticks() argument
914 quot = ns / pt->tc.time_mult; in intel_pt_ns_to_ticks()
915 rem = ns % pt->tc.time_mult; in intel_pt_ns_to_ticks()
916 return (quot << pt->tc.time_shift) + (rem << pt->tc.time_shift) / in intel_pt_ns_to_ticks()
917 pt->tc.time_mult; in intel_pt_ns_to_ticks()
920 static struct ip_callchain *intel_pt_alloc_chain(struct intel_pt *pt) in intel_pt_alloc_chain() argument
925 sz += (pt->synth_opts.callchain_sz + 1) * sizeof(u64); in intel_pt_alloc_chain()
929 static int intel_pt_callchain_init(struct intel_pt *pt) in intel_pt_callchain_init() argument
933 evlist__for_each_entry(pt->session->evlist, evsel) { in intel_pt_callchain_init()
938 pt->chain = intel_pt_alloc_chain(pt); in intel_pt_callchain_init()
939 if (!pt->chain) in intel_pt_callchain_init()
945 static void intel_pt_add_callchain(struct intel_pt *pt, in intel_pt_add_callchain() argument
948 struct thread *thread = machine__findnew_thread(pt->machine, in intel_pt_add_callchain()
952 thread_stack__sample_late(thread, sample->cpu, pt->chain, in intel_pt_add_callchain()
953 pt->synth_opts.callchain_sz + 1, sample->ip, in intel_pt_add_callchain()
954 pt->kernel_start); in intel_pt_add_callchain()
956 sample->callchain = pt->chain; in intel_pt_add_callchain()
967 static int intel_pt_br_stack_init(struct intel_pt *pt) in intel_pt_br_stack_init() argument
971 evlist__for_each_entry(pt->session->evlist, evsel) { in intel_pt_br_stack_init()
976 pt->br_stack = intel_pt_alloc_br_stack(pt->br_stack_sz); in intel_pt_br_stack_init()
977 if (!pt->br_stack) in intel_pt_br_stack_init()
983 static void intel_pt_add_br_stack(struct intel_pt *pt, in intel_pt_add_br_stack() argument
986 struct thread *thread = machine__findnew_thread(pt->machine, in intel_pt_add_br_stack()
990 thread_stack__br_sample_late(thread, sample->cpu, pt->br_stack, in intel_pt_add_br_stack()
991 pt->br_stack_sz, sample->ip, in intel_pt_add_br_stack()
992 pt->kernel_start); in intel_pt_add_br_stack()
994 sample->branch_stack = pt->br_stack; in intel_pt_add_br_stack()
1000 static struct intel_pt_queue *intel_pt_alloc_queue(struct intel_pt *pt, in intel_pt_alloc_queue() argument
1004 struct perf_env *env = pt->machine->env; in intel_pt_alloc_queue()
1011 if (pt->synth_opts.callchain) { in intel_pt_alloc_queue()
1012 ptq->chain = intel_pt_alloc_chain(pt); in intel_pt_alloc_queue()
1017 if (pt->synth_opts.last_branch || pt->synth_opts.other_events) { in intel_pt_alloc_queue()
1018 unsigned int entry_cnt = max(LBRS_MAX, pt->br_stack_sz); in intel_pt_alloc_queue()
1029 ptq->pt = pt; in intel_pt_alloc_queue()
1031 ptq->exclude_kernel = intel_pt_exclude_kernel(pt); in intel_pt_alloc_queue()
1041 params.return_compression = intel_pt_return_compression(pt); in intel_pt_alloc_queue()
1042 params.branch_enable = intel_pt_branch_enable(pt); in intel_pt_alloc_queue()
1043 params.ctl = intel_pt_ctl(pt); in intel_pt_alloc_queue()
1044 params.max_non_turbo_ratio = pt->max_non_turbo_ratio; in intel_pt_alloc_queue()
1045 params.mtc_period = intel_pt_mtc_period(pt); in intel_pt_alloc_queue()
1046 params.tsc_ctc_ratio_n = pt->tsc_ctc_ratio_n; in intel_pt_alloc_queue()
1047 params.tsc_ctc_ratio_d = pt->tsc_ctc_ratio_d; in intel_pt_alloc_queue()
1048 params.quick = pt->synth_opts.quick; in intel_pt_alloc_queue()
1050 if (pt->filts.cnt > 0) in intel_pt_alloc_queue()
1053 if (pt->synth_opts.instructions) { in intel_pt_alloc_queue()
1054 if (pt->synth_opts.period) { in intel_pt_alloc_queue()
1055 switch (pt->synth_opts.period_type) { in intel_pt_alloc_queue()
1059 params.period = pt->synth_opts.period; in intel_pt_alloc_queue()
1063 params.period = pt->synth_opts.period; in intel_pt_alloc_queue()
1067 params.period = intel_pt_ns_to_ticks(pt, in intel_pt_alloc_queue()
1068 pt->synth_opts.period); in intel_pt_alloc_queue()
1112 static void intel_pt_set_pid_tid_cpu(struct intel_pt *pt, in intel_pt_set_pid_tid_cpu() argument
1117 if (queue->tid == -1 || pt->have_sched_switch) { in intel_pt_set_pid_tid_cpu()
1118 ptq->tid = machine__get_current_tid(pt->machine, ptq->cpu); in intel_pt_set_pid_tid_cpu()
1125 ptq->thread = machine__find_thread(pt->machine, -1, ptq->tid); in intel_pt_set_pid_tid_cpu()
1166 static void intel_pt_setup_time_range(struct intel_pt *pt, in intel_pt_setup_time_range() argument
1169 if (!pt->range_cnt) in intel_pt_setup_time_range()
1172 ptq->sel_timestamp = pt->time_ranges[0].start; in intel_pt_setup_time_range()
1178 ptq->sel_timestamp = pt->time_ranges[0].end; in intel_pt_setup_time_range()
1183 static int intel_pt_setup_queue(struct intel_pt *pt, in intel_pt_setup_queue() argument
1193 ptq = intel_pt_alloc_queue(pt, queue_nr); in intel_pt_setup_queue()
1204 if (pt->sampling_mode && !pt->snapshot_mode && in intel_pt_setup_queue()
1205 pt->timeless_decoding) in intel_pt_setup_queue()
1208 ptq->sync_switch = pt->sync_switch; in intel_pt_setup_queue()
1210 intel_pt_setup_time_range(pt, ptq); in intel_pt_setup_queue()
1219 if (pt->timeless_decoding) in intel_pt_setup_queue()
1256 ret = auxtrace_heap__add(&pt->heap, queue_nr, ptq->timestamp); in intel_pt_setup_queue()
1265 static int intel_pt_setup_queues(struct intel_pt *pt) in intel_pt_setup_queues() argument
1270 for (i = 0; i < pt->queues.nr_queues; i++) { in intel_pt_setup_queues()
1271 ret = intel_pt_setup_queue(pt, &pt->queues.queue_array[i], i); in intel_pt_setup_queues()
1278 static inline bool intel_pt_skip_event(struct intel_pt *pt) in intel_pt_skip_event() argument
1280 return pt->synth_opts.initial_skip && in intel_pt_skip_event()
1281 pt->num_events++ < pt->synth_opts.initial_skip; in intel_pt_skip_event()
1289 static inline bool intel_pt_skip_cbr_event(struct intel_pt *pt) in intel_pt_skip_cbr_event() argument
1291 return pt->synth_opts.initial_skip && in intel_pt_skip_cbr_event()
1292 pt->num_events + 4 < pt->synth_opts.initial_skip; in intel_pt_skip_cbr_event()
1309 static void intel_pt_prep_b_sample(struct intel_pt *pt, in intel_pt_prep_b_sample() argument
1316 if (!pt->timeless_decoding) in intel_pt_prep_b_sample()
1317 sample->time = tsc_to_perf_time(ptq->timestamp, &pt->tc); in intel_pt_prep_b_sample()
1320 sample->cpumode = intel_pt_cpumode(pt, sample->ip); in intel_pt_prep_b_sample()
1335 static inline int intel_pt_opt_inject(struct intel_pt *pt, in intel_pt_opt_inject() argument
1339 if (!pt->synth_opts.inject) in intel_pt_opt_inject()
1345 static int intel_pt_deliver_synth_event(struct intel_pt *pt, in intel_pt_deliver_synth_event() argument
1351 ret = intel_pt_opt_inject(pt, event, sample, type); in intel_pt_deliver_synth_event()
1355 ret = perf_session__deliver_synth_event(pt->session, event, sample); in intel_pt_deliver_synth_event()
1364 struct intel_pt *pt = ptq->pt; in intel_pt_synth_branch_sample() local
1373 if (pt->branches_filter && !(pt->branches_filter & ptq->flags)) in intel_pt_synth_branch_sample()
1376 if (intel_pt_skip_event(pt)) in intel_pt_synth_branch_sample()
1379 intel_pt_prep_b_sample(pt, ptq, event, &sample); in intel_pt_synth_branch_sample()
1381 sample.id = ptq->pt->branches_id; in intel_pt_synth_branch_sample()
1382 sample.stream_id = ptq->pt->branches_id; in intel_pt_synth_branch_sample()
1388 if (pt->synth_opts.last_branch && sort__mode == SORT_MODE__BRANCH) { in intel_pt_synth_branch_sample()
1408 return intel_pt_deliver_synth_event(pt, event, &sample, in intel_pt_synth_branch_sample()
1409 pt->branches_sample_type); in intel_pt_synth_branch_sample()
1412 static void intel_pt_prep_sample(struct intel_pt *pt, in intel_pt_prep_sample() argument
1417 intel_pt_prep_b_sample(pt, ptq, event, sample); in intel_pt_prep_sample()
1419 if (pt->synth_opts.callchain) { in intel_pt_prep_sample()
1421 pt->synth_opts.callchain_sz + 1, in intel_pt_prep_sample()
1422 sample->ip, pt->kernel_start); in intel_pt_prep_sample()
1426 if (pt->synth_opts.last_branch) { in intel_pt_prep_sample()
1428 pt->br_stack_sz); in intel_pt_prep_sample()
1435 struct intel_pt *pt = ptq->pt; in intel_pt_synth_instruction_sample() local
1439 if (intel_pt_skip_event(pt)) in intel_pt_synth_instruction_sample()
1442 intel_pt_prep_sample(pt, ptq, event, &sample); in intel_pt_synth_instruction_sample()
1444 sample.id = ptq->pt->instructions_id; in intel_pt_synth_instruction_sample()
1445 sample.stream_id = ptq->pt->instructions_id; in intel_pt_synth_instruction_sample()
1446 if (pt->synth_opts.quick) in intel_pt_synth_instruction_sample()
1461 return intel_pt_deliver_synth_event(pt, event, &sample, in intel_pt_synth_instruction_sample()
1462 pt->instructions_sample_type); in intel_pt_synth_instruction_sample()
1467 struct intel_pt *pt = ptq->pt; in intel_pt_synth_transaction_sample() local
1471 if (intel_pt_skip_event(pt)) in intel_pt_synth_transaction_sample()
1474 intel_pt_prep_sample(pt, ptq, event, &sample); in intel_pt_synth_transaction_sample()
1476 sample.id = ptq->pt->transactions_id; in intel_pt_synth_transaction_sample()
1477 sample.stream_id = ptq->pt->transactions_id; in intel_pt_synth_transaction_sample()
1479 return intel_pt_deliver_synth_event(pt, event, &sample, in intel_pt_synth_transaction_sample()
1480 pt->transactions_sample_type); in intel_pt_synth_transaction_sample()
1483 static void intel_pt_prep_p_sample(struct intel_pt *pt, in intel_pt_prep_p_sample() argument
1488 intel_pt_prep_sample(pt, ptq, event, sample); in intel_pt_prep_p_sample()
1500 struct intel_pt *pt = ptq->pt; in intel_pt_synth_ptwrite_sample() local
1505 if (intel_pt_skip_event(pt)) in intel_pt_synth_ptwrite_sample()
1508 intel_pt_prep_p_sample(pt, ptq, event, &sample); in intel_pt_synth_ptwrite_sample()
1510 sample.id = ptq->pt->ptwrites_id; in intel_pt_synth_ptwrite_sample()
1511 sample.stream_id = ptq->pt->ptwrites_id; in intel_pt_synth_ptwrite_sample()
1520 return intel_pt_deliver_synth_event(pt, event, &sample, in intel_pt_synth_ptwrite_sample()
1521 pt->ptwrites_sample_type); in intel_pt_synth_ptwrite_sample()
1526 struct intel_pt *pt = ptq->pt; in intel_pt_synth_cbr_sample() local
1532 if (intel_pt_skip_cbr_event(pt)) in intel_pt_synth_cbr_sample()
1537 intel_pt_prep_p_sample(pt, ptq, event, &sample); in intel_pt_synth_cbr_sample()
1539 sample.id = ptq->pt->cbr_id; in intel_pt_synth_cbr_sample()
1540 sample.stream_id = ptq->pt->cbr_id; in intel_pt_synth_cbr_sample()
1542 flags = (u16)ptq->state->cbr_payload | (pt->max_non_turbo_ratio << 16); in intel_pt_synth_cbr_sample()
1544 raw.freq = cpu_to_le32(raw.cbr * pt->cbr2khz); in intel_pt_synth_cbr_sample()
1550 return intel_pt_deliver_synth_event(pt, event, &sample, in intel_pt_synth_cbr_sample()
1551 pt->pwr_events_sample_type); in intel_pt_synth_cbr_sample()
1556 struct intel_pt *pt = ptq->pt; in intel_pt_synth_mwait_sample() local
1561 if (intel_pt_skip_event(pt)) in intel_pt_synth_mwait_sample()
1564 intel_pt_prep_p_sample(pt, ptq, event, &sample); in intel_pt_synth_mwait_sample()
1566 sample.id = ptq->pt->mwait_id; in intel_pt_synth_mwait_sample()
1567 sample.stream_id = ptq->pt->mwait_id; in intel_pt_synth_mwait_sample()
1575 return intel_pt_deliver_synth_event(pt, event, &sample, in intel_pt_synth_mwait_sample()
1576 pt->pwr_events_sample_type); in intel_pt_synth_mwait_sample()
1581 struct intel_pt *pt = ptq->pt; in intel_pt_synth_pwre_sample() local
1586 if (intel_pt_skip_event(pt)) in intel_pt_synth_pwre_sample()
1589 intel_pt_prep_p_sample(pt, ptq, event, &sample); in intel_pt_synth_pwre_sample()
1591 sample.id = ptq->pt->pwre_id; in intel_pt_synth_pwre_sample()
1592 sample.stream_id = ptq->pt->pwre_id; in intel_pt_synth_pwre_sample()
1600 return intel_pt_deliver_synth_event(pt, event, &sample, in intel_pt_synth_pwre_sample()
1601 pt->pwr_events_sample_type); in intel_pt_synth_pwre_sample()
1606 struct intel_pt *pt = ptq->pt; in intel_pt_synth_exstop_sample() local
1611 if (intel_pt_skip_event(pt)) in intel_pt_synth_exstop_sample()
1614 intel_pt_prep_p_sample(pt, ptq, event, &sample); in intel_pt_synth_exstop_sample()
1616 sample.id = ptq->pt->exstop_id; in intel_pt_synth_exstop_sample()
1617 sample.stream_id = ptq->pt->exstop_id; in intel_pt_synth_exstop_sample()
1625 return intel_pt_deliver_synth_event(pt, event, &sample, in intel_pt_synth_exstop_sample()
1626 pt->pwr_events_sample_type); in intel_pt_synth_exstop_sample()
1631 struct intel_pt *pt = ptq->pt; in intel_pt_synth_pwrx_sample() local
1636 if (intel_pt_skip_event(pt)) in intel_pt_synth_pwrx_sample()
1639 intel_pt_prep_p_sample(pt, ptq, event, &sample); in intel_pt_synth_pwrx_sample()
1641 sample.id = ptq->pt->pwrx_id; in intel_pt_synth_pwrx_sample()
1642 sample.stream_id = ptq->pt->pwrx_id; in intel_pt_synth_pwrx_sample()
1650 return intel_pt_deliver_synth_event(pt, event, &sample, in intel_pt_synth_pwrx_sample()
1651 pt->pwr_events_sample_type); in intel_pt_synth_pwrx_sample()
1786 struct intel_pt *pt = ptq->pt; in intel_pt_synth_pebs_sample() local
1787 struct evsel *evsel = pt->pebs_evsel; in intel_pt_synth_pebs_sample()
1793 if (intel_pt_skip_event(pt)) in intel_pt_synth_pebs_sample()
1813 cpumode = sample.ip < ptq->pt->kernel_start ? in intel_pt_synth_pebs_sample()
1826 else if (!pt->timeless_decoding) in intel_pt_synth_pebs_sample()
1829 sample.time = tsc_to_perf_time(timestamp, &pt->tc); in intel_pt_synth_pebs_sample()
1833 pt->synth_opts.callchain) { in intel_pt_synth_pebs_sample()
1835 pt->synth_opts.callchain_sz, sample.ip, in intel_pt_synth_pebs_sample()
1836 pt->kernel_start); in intel_pt_synth_pebs_sample()
1861 } else if (pt->synth_opts.last_branch) { in intel_pt_synth_pebs_sample()
1864 pt->br_stack_sz); in intel_pt_synth_pebs_sample()
1898 return intel_pt_deliver_synth_event(pt, event, &sample, sample_type); in intel_pt_synth_pebs_sample()
1901 static int intel_pt_synth_error(struct intel_pt *pt, int code, int cpu, in intel_pt_synth_error() argument
1908 if (pt->synth_opts.error_minus_flags) { in intel_pt_synth_error()
1910 pt->synth_opts.error_minus_flags & AUXTRACE_ERR_FLG_OVERFLOW) in intel_pt_synth_error()
1913 pt->synth_opts.error_minus_flags & AUXTRACE_ERR_FLG_DATA_LOST) in intel_pt_synth_error()
1922 err = perf_session__deliver_synth_event(pt->session, &event, NULL); in intel_pt_synth_error()
1933 struct intel_pt *pt = ptq->pt; in intel_ptq_synth_error() local
1936 tm = pt->timeless_decoding ? 0 : tsc_to_perf_time(tm, &pt->tc); in intel_ptq_synth_error()
1938 return intel_pt_synth_error(pt, state->err, ptq->cpu, ptq->pid, in intel_ptq_synth_error()
1942 static int intel_pt_next_tid(struct intel_pt *pt, struct intel_pt_queue *ptq) in intel_pt_next_tid() argument
1953 err = machine__set_current_tid(pt->machine, ptq->cpu, -1, tid); in intel_pt_next_tid()
1955 queue = &pt->queues.queue_array[ptq->queue_nr]; in intel_pt_next_tid()
1956 intel_pt_set_pid_tid_cpu(pt, queue); in intel_pt_next_tid()
1965 struct intel_pt *pt = ptq->pt; in intel_pt_is_switch_ip() local
1967 return ip == pt->switch_ip && in intel_pt_is_switch_ip()
1979 struct intel_pt *pt = ptq->pt; in intel_pt_sample() local
1994 if (pt->sample_pebs && state->type & INTEL_PT_BLK_ITEMS) { in intel_pt_sample()
2000 if (pt->sample_pwr_events) { in intel_pt_sample()
2030 if (pt->sample_instructions && (state->type & INTEL_PT_INSTRUCTION)) { in intel_pt_sample()
2036 if (pt->sample_transactions && (state->type & INTEL_PT_TRANSACTION)) { in intel_pt_sample()
2042 if (pt->sample_ptwrites && (state->type & INTEL_PT_PTW)) { in intel_pt_sample()
2051 if (pt->use_thread_stack) { in intel_pt_sample()
2054 state->trace_nr, pt->callstack, in intel_pt_sample()
2055 pt->br_stack_sz_plus, in intel_pt_sample()
2056 pt->mispred_all); in intel_pt_sample()
2061 if (pt->sample_branches) { in intel_pt_sample()
2075 err = intel_pt_next_tid(pt, ptq); in intel_pt_sample()
2089 state->to_ip == pt->ptss_ip && in intel_pt_sample()
2097 static u64 intel_pt_switch_ip(struct intel_pt *pt, u64 *ptss_ip) in intel_pt_switch_ip() argument
2099 struct machine *machine = pt->machine; in intel_pt_switch_ip()
2131 if (pt->have_sched_switch == 1) in intel_pt_switch_ip()
2149 static void intel_pt_enable_sync_switch(struct intel_pt *pt) in intel_pt_enable_sync_switch() argument
2153 pt->sync_switch = true; in intel_pt_enable_sync_switch()
2155 for (i = 0; i < pt->queues.nr_queues; i++) { in intel_pt_enable_sync_switch()
2156 struct auxtrace_queue *queue = &pt->queues.queue_array[i]; in intel_pt_enable_sync_switch()
2170 struct intel_pt *pt = ptq->pt; in intel_pt_next_time() local
2175 ptq->sel_timestamp = pt->time_ranges[ptq->sel_idx].end; in intel_pt_next_time()
2177 } else if (ptq->sel_idx + 1 < pt->range_cnt) { in intel_pt_next_time()
2181 ptq->sel_timestamp = pt->time_ranges[ptq->sel_idx].start; in intel_pt_next_time()
2209 intel_pt_next_tid(ptq->pt, ptq); in intel_pt_time_filter()
2239 struct intel_pt *pt = ptq->pt; in intel_pt_run_decoder() local
2243 if (!pt->kernel_start) { in intel_pt_run_decoder()
2244 pt->kernel_start = machine__kernel_start(pt->machine); in intel_pt_run_decoder()
2245 if (pt->per_cpu_mmaps && in intel_pt_run_decoder()
2246 (pt->have_sched_switch == 1 || pt->have_sched_switch == 3) && in intel_pt_run_decoder()
2247 !pt->timeless_decoding && intel_pt_tracing_kernel(pt) && in intel_pt_run_decoder()
2248 !pt->sampling_mode) { in intel_pt_run_decoder()
2249 pt->switch_ip = intel_pt_switch_ip(pt, &pt->ptss_ip); in intel_pt_run_decoder()
2250 if (pt->switch_ip) { in intel_pt_run_decoder()
2252 pt->switch_ip, pt->ptss_ip); in intel_pt_run_decoder()
2253 intel_pt_enable_sync_switch(pt); in intel_pt_run_decoder()
2270 state->from_ip >= pt->kernel_start) { in intel_pt_run_decoder()
2272 intel_pt_next_tid(pt, ptq); in intel_pt_run_decoder()
2275 if (pt->synth_opts.errors) { in intel_pt_run_decoder()
2288 if (pt->est_tsc && in intel_pt_run_decoder()
2289 (state->from_ip >= pt->kernel_start || !state->from_ip) && in intel_pt_run_decoder()
2290 state->to_ip && state->to_ip < pt->kernel_start) { in intel_pt_run_decoder()
2312 if (!pt->timeless_decoding && ptq->timestamp >= *timestamp) { in intel_pt_run_decoder()
2320 static inline int intel_pt_update_queues(struct intel_pt *pt) in intel_pt_update_queues() argument
2322 if (pt->queues.new_data) { in intel_pt_update_queues()
2323 pt->queues.new_data = false; in intel_pt_update_queues()
2324 return intel_pt_setup_queues(pt); in intel_pt_update_queues()
2329 static int intel_pt_process_queues(struct intel_pt *pt, u64 timestamp) in intel_pt_process_queues() argument
2339 if (!pt->heap.heap_cnt) in intel_pt_process_queues()
2342 if (pt->heap.heap_array[0].ordinal >= timestamp) in intel_pt_process_queues()
2345 queue_nr = pt->heap.heap_array[0].queue_nr; in intel_pt_process_queues()
2346 queue = &pt->queues.queue_array[queue_nr]; in intel_pt_process_queues()
2350 queue_nr, pt->heap.heap_array[0].ordinal, in intel_pt_process_queues()
2353 auxtrace_heap__pop(&pt->heap); in intel_pt_process_queues()
2355 if (pt->heap.heap_cnt) { in intel_pt_process_queues()
2356 ts = pt->heap.heap_array[0].ordinal + 1; in intel_pt_process_queues()
2363 intel_pt_set_pid_tid_cpu(pt, queue); in intel_pt_process_queues()
2368 auxtrace_heap__add(&pt->heap, queue_nr, ts); in intel_pt_process_queues()
2373 ret = auxtrace_heap__add(&pt->heap, queue_nr, ts); in intel_pt_process_queues()
2384 static int intel_pt_process_timeless_queues(struct intel_pt *pt, pid_t tid, in intel_pt_process_timeless_queues() argument
2387 struct auxtrace_queues *queues = &pt->queues; in intel_pt_process_timeless_queues()
2392 struct auxtrace_queue *queue = &pt->queues.queue_array[i]; in intel_pt_process_timeless_queues()
2397 intel_pt_set_pid_tid_cpu(pt, queue); in intel_pt_process_timeless_queues()
2408 struct machine *m = ptq->pt->machine; in intel_pt_sample_set_pid_tid_cpu()
2432 static int intel_pt_process_timeless_sample(struct intel_pt *pt, in intel_pt_process_timeless_sample() argument
2439 queue = auxtrace_queues__sample_queue(&pt->queues, sample, pt->session); in intel_pt_process_timeless_sample()
2454 static int intel_pt_lost(struct intel_pt *pt, struct perf_sample *sample) in intel_pt_lost() argument
2456 return intel_pt_synth_error(pt, INTEL_PT_ERR_LOST, sample->cpu, in intel_pt_lost()
2460 static struct intel_pt_queue *intel_pt_cpu_to_ptq(struct intel_pt *pt, int cpu) in intel_pt_cpu_to_ptq() argument
2464 if (cpu < 0 || !pt->queues.nr_queues) in intel_pt_cpu_to_ptq()
2467 if ((unsigned)cpu >= pt->queues.nr_queues) in intel_pt_cpu_to_ptq()
2468 i = pt->queues.nr_queues - 1; in intel_pt_cpu_to_ptq()
2472 if (pt->queues.queue_array[i].cpu == cpu) in intel_pt_cpu_to_ptq()
2473 return pt->queues.queue_array[i].priv; in intel_pt_cpu_to_ptq()
2476 if (pt->queues.queue_array[--i].cpu == cpu) in intel_pt_cpu_to_ptq()
2477 return pt->queues.queue_array[i].priv; in intel_pt_cpu_to_ptq()
2480 for (; j < pt->queues.nr_queues; j++) { in intel_pt_cpu_to_ptq()
2481 if (pt->queues.queue_array[j].cpu == cpu) in intel_pt_cpu_to_ptq()
2482 return pt->queues.queue_array[j].priv; in intel_pt_cpu_to_ptq()
2488 static int intel_pt_sync_switch(struct intel_pt *pt, int cpu, pid_t tid, in intel_pt_sync_switch() argument
2494 if (!pt->sync_switch) in intel_pt_sync_switch()
2497 ptq = intel_pt_cpu_to_ptq(pt, cpu); in intel_pt_sync_switch()
2512 &pt->tc); in intel_pt_sync_switch()
2513 err = auxtrace_heap__add(&pt->heap, ptq->queue_nr, in intel_pt_sync_switch()
2533 static int intel_pt_process_switch(struct intel_pt *pt, in intel_pt_process_switch() argument
2540 evsel = perf_evlist__id2evsel(pt->session->evlist, sample->id); in intel_pt_process_switch()
2541 if (evsel != pt->switch_evsel) in intel_pt_process_switch()
2549 &pt->tc)); in intel_pt_process_switch()
2551 ret = intel_pt_sync_switch(pt, cpu, tid, sample->time); in intel_pt_process_switch()
2555 return machine__set_current_tid(pt->machine, cpu, -1, tid); in intel_pt_process_switch()
2558 static int intel_pt_context_switch_in(struct intel_pt *pt, in intel_pt_context_switch_in() argument
2565 if (pt->sync_switch) { in intel_pt_context_switch_in()
2568 ptq = intel_pt_cpu_to_ptq(pt, cpu); in intel_pt_context_switch_in()
2590 if (machine__get_current_tid(pt->machine, cpu) == tid) in intel_pt_context_switch_in()
2593 return machine__set_current_tid(pt->machine, cpu, pid, tid); in intel_pt_context_switch_in()
2596 static int intel_pt_context_switch(struct intel_pt *pt, union perf_event *event, in intel_pt_context_switch() argument
2605 if (pt->have_sched_switch == 3) { in intel_pt_context_switch()
2607 return intel_pt_context_switch_in(pt, sample); in intel_pt_context_switch()
2624 ret = intel_pt_sync_switch(pt, cpu, tid, sample->time); in intel_pt_context_switch()
2628 return machine__set_current_tid(pt->machine, cpu, pid, tid); in intel_pt_context_switch()
2631 static int intel_pt_process_itrace_start(struct intel_pt *pt, in intel_pt_process_itrace_start() argument
2635 if (!pt->per_cpu_mmaps) in intel_pt_process_itrace_start()
2641 perf_time_to_tsc(sample->time, &pt->tc)); in intel_pt_process_itrace_start()
2643 return machine__set_current_tid(pt->machine, sample->cpu, in intel_pt_process_itrace_start()
2660 static int intel_pt_text_poke(struct intel_pt *pt, union perf_event *event) in intel_pt_text_poke() argument
2666 struct thread *thread = pt->unknown_thread; in intel_pt_text_poke()
2668 struct machine *machine = pt->machine; in intel_pt_text_poke()
2714 struct intel_pt *pt = container_of(session->auxtrace, struct intel_pt, in intel_pt_process_event() local
2728 timestamp = perf_time_to_tsc(sample->time, &pt->tc); in intel_pt_process_event()
2732 if (timestamp || pt->timeless_decoding) { in intel_pt_process_event()
2733 err = intel_pt_update_queues(pt); in intel_pt_process_event()
2738 if (pt->timeless_decoding) { in intel_pt_process_event()
2739 if (pt->sampling_mode) { in intel_pt_process_event()
2741 err = intel_pt_process_timeless_sample(pt, in intel_pt_process_event()
2744 err = intel_pt_process_timeless_queues(pt, in intel_pt_process_event()
2749 err = intel_pt_process_queues(pt, timestamp); in intel_pt_process_event()
2755 if (pt->synth_opts.add_callchain && !sample->callchain) in intel_pt_process_event()
2756 intel_pt_add_callchain(pt, sample); in intel_pt_process_event()
2757 if (pt->synth_opts.add_last_branch && !sample->branch_stack) in intel_pt_process_event()
2758 intel_pt_add_br_stack(pt, sample); in intel_pt_process_event()
2763 pt->synth_opts.errors) { in intel_pt_process_event()
2764 err = intel_pt_lost(pt, sample); in intel_pt_process_event()
2769 if (pt->switch_evsel && event->header.type == PERF_RECORD_SAMPLE) in intel_pt_process_event()
2770 err = intel_pt_process_switch(pt, sample); in intel_pt_process_event()
2772 err = intel_pt_process_itrace_start(pt, event, sample); in intel_pt_process_event()
2775 err = intel_pt_context_switch(pt, event, sample); in intel_pt_process_event()
2778 err = intel_pt_text_poke(pt, event); in intel_pt_process_event()
2780 if (intel_pt_enable_logging && intel_pt_log_events(pt, sample->time)) { in intel_pt_process_event()
2791 struct intel_pt *pt = container_of(session->auxtrace, struct intel_pt, in intel_pt_flush() local
2801 ret = intel_pt_update_queues(pt); in intel_pt_flush()
2805 if (pt->timeless_decoding) in intel_pt_flush()
2806 return intel_pt_process_timeless_queues(pt, -1, in intel_pt_flush()
2809 return intel_pt_process_queues(pt, MAX_TIMESTAMP); in intel_pt_flush()
2814 struct intel_pt *pt = container_of(session->auxtrace, struct intel_pt, in intel_pt_free_events() local
2816 struct auxtrace_queues *queues = &pt->queues; in intel_pt_free_events()
2829 struct intel_pt *pt = container_of(session->auxtrace, struct intel_pt, in intel_pt_free() local
2832 auxtrace_heap__free(&pt->heap); in intel_pt_free()
2835 thread__put(pt->unknown_thread); in intel_pt_free()
2836 addr_filters__exit(&pt->filts); in intel_pt_free()
2837 zfree(&pt->chain); in intel_pt_free()
2838 zfree(&pt->filter); in intel_pt_free()
2839 zfree(&pt->time_ranges); in intel_pt_free()
2840 free(pt); in intel_pt_free()
2846 struct intel_pt *pt = container_of(session->auxtrace, struct intel_pt, in intel_pt_evsel_is_auxtrace() local
2849 return evsel->core.attr.type == pt->pmu_type; in intel_pt_evsel_is_auxtrace()
2856 struct intel_pt *pt = container_of(session->auxtrace, struct intel_pt, in intel_pt_process_auxtrace_event() local
2859 if (!pt->data_queued) { in intel_pt_process_auxtrace_event()
2873 err = auxtrace_queues__add_event(&pt->queues, session, event, in intel_pt_process_auxtrace_event()
2881 intel_pt_dump_event(pt, buffer->data, in intel_pt_process_auxtrace_event()
2895 struct intel_pt *pt = container_of(session->auxtrace, struct intel_pt, in intel_pt_queue_data() local
2900 return auxtrace_queues__add_event(&pt->queues, session, event, in intel_pt_queue_data()
2905 timestamp = perf_time_to_tsc(sample->time, &pt->tc); in intel_pt_queue_data()
2909 return auxtrace_queues__add_sample(&pt->queues, session, sample, in intel_pt_queue_data()
2966 static struct evsel *intel_pt_evsel(struct intel_pt *pt, in intel_pt_evsel() argument
2972 if (evsel->core.attr.type == pt->pmu_type && evsel->core.ids) in intel_pt_evsel()
2979 static int intel_pt_synth_events(struct intel_pt *pt, in intel_pt_synth_events() argument
2983 struct evsel *evsel = intel_pt_evsel(pt, evlist); in intel_pt_synth_events()
2999 if (pt->timeless_decoding) in intel_pt_synth_events()
3003 if (!pt->per_cpu_mmaps) in intel_pt_synth_events()
3017 if (pt->synth_opts.branches) { in intel_pt_synth_events()
3024 pt->sample_branches = true; in intel_pt_synth_events()
3025 pt->branches_sample_type = attr.sample_type; in intel_pt_synth_events()
3026 pt->branches_id = id; in intel_pt_synth_events()
3031 if (pt->synth_opts.callchain) in intel_pt_synth_events()
3033 if (pt->synth_opts.last_branch) { in intel_pt_synth_events()
3043 if (pt->synth_opts.instructions) { in intel_pt_synth_events()
3045 if (pt->synth_opts.period_type == PERF_ITRACE_PERIOD_NANOSECS) in intel_pt_synth_events()
3047 intel_pt_ns_to_ticks(pt, pt->synth_opts.period); in intel_pt_synth_events()
3049 attr.sample_period = pt->synth_opts.period; in intel_pt_synth_events()
3053 pt->sample_instructions = true; in intel_pt_synth_events()
3054 pt->instructions_sample_type = attr.sample_type; in intel_pt_synth_events()
3055 pt->instructions_id = id; in intel_pt_synth_events()
3062 if (pt->synth_opts.transactions) { in intel_pt_synth_events()
3067 pt->sample_transactions = true; in intel_pt_synth_events()
3068 pt->transactions_sample_type = attr.sample_type; in intel_pt_synth_events()
3069 pt->transactions_id = id; in intel_pt_synth_events()
3077 if (pt->synth_opts.ptwrites) { in intel_pt_synth_events()
3082 pt->sample_ptwrites = true; in intel_pt_synth_events()
3083 pt->ptwrites_sample_type = attr.sample_type; in intel_pt_synth_events()
3084 pt->ptwrites_id = id; in intel_pt_synth_events()
3089 if (pt->synth_opts.pwr_events) { in intel_pt_synth_events()
3090 pt->sample_pwr_events = true; in intel_pt_synth_events()
3091 pt->pwr_events_sample_type = attr.sample_type; in intel_pt_synth_events()
3097 pt->cbr_id = id; in intel_pt_synth_events()
3102 if (pt->synth_opts.pwr_events && (evsel->core.attr.config & 0x10)) { in intel_pt_synth_events()
3107 pt->mwait_id = id; in intel_pt_synth_events()
3115 pt->pwre_id = id; in intel_pt_synth_events()
3123 pt->exstop_id = id; in intel_pt_synth_events()
3131 pt->pwrx_id = id; in intel_pt_synth_events()
3139 static void intel_pt_setup_pebs_events(struct intel_pt *pt) in intel_pt_setup_pebs_events() argument
3143 if (!pt->synth_opts.other_events) in intel_pt_setup_pebs_events()
3146 evlist__for_each_entry(pt->session->evlist, evsel) { in intel_pt_setup_pebs_events()
3148 pt->sample_pebs = true; in intel_pt_setup_pebs_events()
3149 pt->pebs_evsel = evsel; in intel_pt_setup_pebs_events()
3183 struct intel_pt *pt = data; in intel_pt_perf_config() local
3186 pt->mispred_all = perf_config_bool(var, value); in intel_pt_perf_config()
3192 static u64 intel_pt_tsc_start(u64 ns, struct intel_pt *pt) in intel_pt_tsc_start() argument
3196 tsc = perf_time_to_tsc(ns, &pt->tc); in intel_pt_tsc_start()
3199 tm = tsc_to_perf_time(tsc, &pt->tc); in intel_pt_tsc_start()
3206 tm = tsc_to_perf_time(++tsc, &pt->tc); in intel_pt_tsc_start()
3212 static u64 intel_pt_tsc_end(u64 ns, struct intel_pt *pt) in intel_pt_tsc_end() argument
3216 tsc = perf_time_to_tsc(ns, &pt->tc); in intel_pt_tsc_end()
3219 tm = tsc_to_perf_time(tsc, &pt->tc); in intel_pt_tsc_end()
3226 tm = tsc_to_perf_time(--tsc, &pt->tc); in intel_pt_tsc_end()
3231 static int intel_pt_setup_time_ranges(struct intel_pt *pt, in intel_pt_setup_time_ranges() argument
3238 if (!n || !p || pt->timeless_decoding) in intel_pt_setup_time_ranges()
3241 pt->time_ranges = calloc(n, sizeof(struct range)); in intel_pt_setup_time_ranges()
3242 if (!pt->time_ranges) in intel_pt_setup_time_ranges()
3245 pt->range_cnt = n; in intel_pt_setup_time_ranges()
3250 struct range *r = &pt->time_ranges[i]; in intel_pt_setup_time_ranges()
3258 r->start = ts ? intel_pt_tsc_start(ts, pt) : 0; in intel_pt_setup_time_ranges()
3259 r->end = te ? intel_pt_tsc_end(te, pt) : 0; in intel_pt_setup_time_ranges()
3319 struct intel_pt *pt; in intel_pt_process_auxtrace_info() local
3328 pt = zalloc(sizeof(struct intel_pt)); in intel_pt_process_auxtrace_info()
3329 if (!pt) in intel_pt_process_auxtrace_info()
3332 addr_filters__init(&pt->filts); in intel_pt_process_auxtrace_info()
3334 err = perf_config(intel_pt_perf_config, pt); in intel_pt_process_auxtrace_info()
3338 err = auxtrace_queues__init(&pt->queues); in intel_pt_process_auxtrace_info()
3344 pt->session = session; in intel_pt_process_auxtrace_info()
3345 pt->machine = &session->machines.host; /* No kvm support */ in intel_pt_process_auxtrace_info()
3346 pt->auxtrace_type = auxtrace_info->type; in intel_pt_process_auxtrace_info()
3347 pt->pmu_type = auxtrace_info->priv[INTEL_PT_PMU_TYPE]; in intel_pt_process_auxtrace_info()
3348 pt->tc.time_shift = auxtrace_info->priv[INTEL_PT_TIME_SHIFT]; in intel_pt_process_auxtrace_info()
3349 pt->tc.time_mult = auxtrace_info->priv[INTEL_PT_TIME_MULT]; in intel_pt_process_auxtrace_info()
3350 pt->tc.time_zero = auxtrace_info->priv[INTEL_PT_TIME_ZERO]; in intel_pt_process_auxtrace_info()
3351 pt->cap_user_time_zero = auxtrace_info->priv[INTEL_PT_CAP_USER_TIME_ZERO]; in intel_pt_process_auxtrace_info()
3352 pt->tsc_bit = auxtrace_info->priv[INTEL_PT_TSC_BIT]; in intel_pt_process_auxtrace_info()
3353 pt->noretcomp_bit = auxtrace_info->priv[INTEL_PT_NORETCOMP_BIT]; in intel_pt_process_auxtrace_info()
3354 pt->have_sched_switch = auxtrace_info->priv[INTEL_PT_HAVE_SCHED_SWITCH]; in intel_pt_process_auxtrace_info()
3355 pt->snapshot_mode = auxtrace_info->priv[INTEL_PT_SNAPSHOT_MODE]; in intel_pt_process_auxtrace_info()
3356 pt->per_cpu_mmaps = auxtrace_info->priv[INTEL_PT_PER_CPU_MMAPS]; in intel_pt_process_auxtrace_info()
3361 pt->mtc_bit = auxtrace_info->priv[INTEL_PT_MTC_BIT]; in intel_pt_process_auxtrace_info()
3362 pt->mtc_freq_bits = auxtrace_info->priv[INTEL_PT_MTC_FREQ_BITS]; in intel_pt_process_auxtrace_info()
3363 pt->tsc_ctc_ratio_n = auxtrace_info->priv[INTEL_PT_TSC_CTC_N]; in intel_pt_process_auxtrace_info()
3364 pt->tsc_ctc_ratio_d = auxtrace_info->priv[INTEL_PT_TSC_CTC_D]; in intel_pt_process_auxtrace_info()
3365 pt->cyc_bit = auxtrace_info->priv[INTEL_PT_CYC_BIT]; in intel_pt_process_auxtrace_info()
3371 pt->max_non_turbo_ratio = in intel_pt_process_auxtrace_info()
3398 pt->filter = memdup(filter, len); in intel_pt_process_auxtrace_info()
3399 if (!pt->filter) { in intel_pt_process_auxtrace_info()
3404 mem_bswap_64(pt->filter, len); in intel_pt_process_auxtrace_info()
3405 if (pt->filter[len - 1]) { in intel_pt_process_auxtrace_info()
3410 err = addr_filters__parse_bare_filter(&pt->filts, in intel_pt_process_auxtrace_info()
3415 intel_pt_print_info_str("Filter string", pt->filter); in intel_pt_process_auxtrace_info()
3418 pt->timeless_decoding = intel_pt_timeless_decoding(pt); in intel_pt_process_auxtrace_info()
3419 if (pt->timeless_decoding && !pt->tc.time_mult) in intel_pt_process_auxtrace_info()
3420 pt->tc.time_mult = 1; in intel_pt_process_auxtrace_info()
3421 pt->have_tsc = intel_pt_have_tsc(pt); in intel_pt_process_auxtrace_info()
3422 pt->sampling_mode = intel_pt_sampling_mode(pt); in intel_pt_process_auxtrace_info()
3423 pt->est_tsc = !pt->timeless_decoding; in intel_pt_process_auxtrace_info()
3425 pt->unknown_thread = thread__new(999999999, 999999999); in intel_pt_process_auxtrace_info()
3426 if (!pt->unknown_thread) { in intel_pt_process_auxtrace_info()
3437 INIT_LIST_HEAD(&pt->unknown_thread->node); in intel_pt_process_auxtrace_info()
3439 err = thread__set_comm(pt->unknown_thread, "unknown", 0); in intel_pt_process_auxtrace_info()
3442 if (thread__init_maps(pt->unknown_thread, pt->machine)) { in intel_pt_process_auxtrace_info()
3447 pt->auxtrace.process_event = intel_pt_process_event; in intel_pt_process_auxtrace_info()
3448 pt->auxtrace.process_auxtrace_event = intel_pt_process_auxtrace_event; in intel_pt_process_auxtrace_info()
3449 pt->auxtrace.queue_data = intel_pt_queue_data; in intel_pt_process_auxtrace_info()
3450 pt->auxtrace.dump_auxtrace_sample = intel_pt_dump_sample; in intel_pt_process_auxtrace_info()
3451 pt->auxtrace.flush_events = intel_pt_flush; in intel_pt_process_auxtrace_info()
3452 pt->auxtrace.free_events = intel_pt_free_events; in intel_pt_process_auxtrace_info()
3453 pt->auxtrace.free = intel_pt_free; in intel_pt_process_auxtrace_info()
3454 pt->auxtrace.evsel_is_auxtrace = intel_pt_evsel_is_auxtrace; in intel_pt_process_auxtrace_info()
3455 session->auxtrace = &pt->auxtrace; in intel_pt_process_auxtrace_info()
3460 if (pt->have_sched_switch == 1) { in intel_pt_process_auxtrace_info()
3461 pt->switch_evsel = intel_pt_find_sched_switch(session->evlist); in intel_pt_process_auxtrace_info()
3462 if (!pt->switch_evsel) { in intel_pt_process_auxtrace_info()
3467 } else if (pt->have_sched_switch == 2 && in intel_pt_process_auxtrace_info()
3475 pt->synth_opts = *session->itrace_synth_opts; in intel_pt_process_auxtrace_info()
3477 itrace_synth_opts__set_default(&pt->synth_opts, in intel_pt_process_auxtrace_info()
3481 pt->synth_opts.branches = false; in intel_pt_process_auxtrace_info()
3482 pt->synth_opts.callchain = true; in intel_pt_process_auxtrace_info()
3483 pt->synth_opts.add_callchain = true; in intel_pt_process_auxtrace_info()
3485 pt->synth_opts.thread_stack = in intel_pt_process_auxtrace_info()
3489 if (pt->synth_opts.log) in intel_pt_process_auxtrace_info()
3493 if (pt->tc.time_mult) { in intel_pt_process_auxtrace_info()
3494 u64 tsc_freq = intel_pt_ns_to_ticks(pt, 1000000000); in intel_pt_process_auxtrace_info()
3496 if (!pt->max_non_turbo_ratio) in intel_pt_process_auxtrace_info()
3497 pt->max_non_turbo_ratio = in intel_pt_process_auxtrace_info()
3501 pt->max_non_turbo_ratio); in intel_pt_process_auxtrace_info()
3502 pt->cbr2khz = tsc_freq / pt->max_non_turbo_ratio / 1000; in intel_pt_process_auxtrace_info()
3505 err = intel_pt_setup_time_ranges(pt, session->itrace_synth_opts); in intel_pt_process_auxtrace_info()
3509 if (pt->synth_opts.calls) in intel_pt_process_auxtrace_info()
3510 pt->branches_filter |= PERF_IP_FLAG_CALL | PERF_IP_FLAG_ASYNC | in intel_pt_process_auxtrace_info()
3512 if (pt->synth_opts.returns) in intel_pt_process_auxtrace_info()
3513 pt->branches_filter |= PERF_IP_FLAG_RETURN | in intel_pt_process_auxtrace_info()
3516 if ((pt->synth_opts.callchain || pt->synth_opts.add_callchain) && in intel_pt_process_auxtrace_info()
3521 pt->synth_opts.callchain = false; in intel_pt_process_auxtrace_info()
3522 pt->synth_opts.add_callchain = false; in intel_pt_process_auxtrace_info()
3526 if (pt->synth_opts.add_callchain) { in intel_pt_process_auxtrace_info()
3527 err = intel_pt_callchain_init(pt); in intel_pt_process_auxtrace_info()
3532 if (pt->synth_opts.last_branch || pt->synth_opts.add_last_branch) { in intel_pt_process_auxtrace_info()
3533 pt->br_stack_sz = pt->synth_opts.last_branch_sz; in intel_pt_process_auxtrace_info()
3534 pt->br_stack_sz_plus = pt->br_stack_sz; in intel_pt_process_auxtrace_info()
3537 if (pt->synth_opts.add_last_branch) { in intel_pt_process_auxtrace_info()
3538 err = intel_pt_br_stack_init(pt); in intel_pt_process_auxtrace_info()
3548 if (intel_pt_tracing_kernel(pt)) in intel_pt_process_auxtrace_info()
3549 pt->br_stack_sz_plus += 1024; in intel_pt_process_auxtrace_info()
3551 pt->br_stack_sz_plus += 1; in intel_pt_process_auxtrace_info()
3554 pt->use_thread_stack = pt->synth_opts.callchain || in intel_pt_process_auxtrace_info()
3555 pt->synth_opts.add_callchain || in intel_pt_process_auxtrace_info()
3556 pt->synth_opts.thread_stack || in intel_pt_process_auxtrace_info()
3557 pt->synth_opts.last_branch || in intel_pt_process_auxtrace_info()
3558 pt->synth_opts.add_last_branch; in intel_pt_process_auxtrace_info()
3560 pt->callstack = pt->synth_opts.callchain || in intel_pt_process_auxtrace_info()
3561 pt->synth_opts.add_callchain || in intel_pt_process_auxtrace_info()
3562 pt->synth_opts.thread_stack; in intel_pt_process_auxtrace_info()
3564 err = intel_pt_synth_events(pt, session); in intel_pt_process_auxtrace_info()
3568 intel_pt_setup_pebs_events(pt); in intel_pt_process_auxtrace_info()
3570 if (pt->sampling_mode || list_empty(&session->auxtrace_index)) in intel_pt_process_auxtrace_info()
3573 err = auxtrace_queues__process_index(&pt->queues, session); in intel_pt_process_auxtrace_info()
3577 if (pt->queues.populated) in intel_pt_process_auxtrace_info()
3578 pt->data_queued = true; in intel_pt_process_auxtrace_info()
3580 if (pt->timeless_decoding) in intel_pt_process_auxtrace_info()
3586 zfree(&pt->chain); in intel_pt_process_auxtrace_info()
3587 thread__zput(pt->unknown_thread); in intel_pt_process_auxtrace_info()
3590 auxtrace_queues__free(&pt->queues); in intel_pt_process_auxtrace_info()
3593 addr_filters__exit(&pt->filts); in intel_pt_process_auxtrace_info()
3594 zfree(&pt->filter); in intel_pt_process_auxtrace_info()
3595 zfree(&pt->time_ranges); in intel_pt_process_auxtrace_info()
3596 free(pt); in intel_pt_process_auxtrace_info()