Lines Matching refs:pt

152 	struct intel_pt *pt;  member
194 static void intel_pt_dump(struct intel_pt *pt __maybe_unused, in intel_pt_dump()
234 static void intel_pt_dump_event(struct intel_pt *pt, unsigned char *buf, in intel_pt_dump_event() argument
238 intel_pt_dump(pt, buf, len); in intel_pt_dump_event()
254 struct intel_pt *pt = container_of(session->auxtrace, struct intel_pt, in intel_pt_dump_sample() local
258 intel_pt_dump(pt, sample->aux_sample.data, sample->aux_sample.size); in intel_pt_dump_sample()
261 static bool intel_pt_log_events(struct intel_pt *pt, u64 tm) in intel_pt_log_events() argument
263 struct perf_time_interval *range = pt->synth_opts.ptime_range; in intel_pt_log_events()
264 int n = pt->synth_opts.range_num; in intel_pt_log_events()
266 if (pt->synth_opts.log_plus_flags & AUXTRACE_LOG_FLG_ALL_PERF_EVTS) in intel_pt_log_events()
269 if (pt->synth_opts.log_minus_flags & AUXTRACE_LOG_FLG_ALL_PERF_EVTS) in intel_pt_log_events()
316 struct intel_pt *pt = ptq->pt; in intel_pt_findnew_vmcs_info() local
318 if (!vmcs && !pt->dflt_tsc_offset) in intel_pt_findnew_vmcs_info()
321 return intel_pt_findnew_vmcs(&pt->vmcs_info, vmcs, pt->dflt_tsc_offset); in intel_pt_findnew_vmcs_info()
324 static void intel_pt_free_vmcs_info(struct intel_pt *pt) in intel_pt_free_vmcs_info() argument
329 n = rb_first(&pt->vmcs_info); in intel_pt_free_vmcs_info()
333 rb_erase(&v->rb_node, &pt->vmcs_info); in intel_pt_free_vmcs_info()
338 static int intel_pt_do_fix_overlap(struct intel_pt *pt, struct auxtrace_buffer *a, in intel_pt_do_fix_overlap() argument
345 pt->have_tsc, &consecutive, in intel_pt_do_fix_overlap()
346 pt->synth_opts.vm_time_correlation); in intel_pt_do_fix_overlap()
354 if (pt->synth_opts.vm_time_correlation) in intel_pt_do_fix_overlap()
371 int fd = perf_data__fd(ptq->pt->session->data); in intel_pt_get_buffer()
378 might_overlap = ptq->pt->snapshot_mode || ptq->pt->sampling_mode; in intel_pt_get_buffer()
380 intel_pt_do_fix_overlap(ptq->pt, old_buffer, buffer)) in intel_pt_get_buffer()
422 queue = &ptq->pt->queues.queue_array[ptq->queue_nr]; in intel_pt_lookahead()
472 queue = &ptq->pt->queues.queue_array[ptq->queue_nr]; in intel_pt_get_trace()
642 return ip >= ptq->pt->kernel_start ? in intel_pt_nr_cpumode()
657 struct machines *machines = &ptq->pt->session->machines; in intel_pt_get_guest()
687 struct machine *machine = ptq->pt->machine; in intel_pt_walk_next_insn()
718 thread = ptq->pt->unknown_thread; in intel_pt_walk_next_insn()
823 static bool intel_pt_match_pgd_ip(struct intel_pt *pt, uint64_t ip, in intel_pt_match_pgd_ip() argument
831 list_for_each_entry(filt, &pt->filts.head, list) { in intel_pt_match_pgd_ip()
871 return intel_pt_match_pgd_ip(ptq->pt, ip, ip, NULL); in __intel_pt_pgd_ip()
874 } else if (ip >= ptq->pt->kernel_start) { in __intel_pt_pgd_ip()
875 return intel_pt_match_pgd_ip(ptq->pt, ip, ip, NULL); in __intel_pt_pgd_ip()
889 return intel_pt_match_pgd_ip(ptq->pt, ip, offset, in __intel_pt_pgd_ip()
898 static bool intel_pt_get_config(struct intel_pt *pt, in intel_pt_get_config() argument
901 if (attr->type == pt->pmu_type) { in intel_pt_get_config()
910 static bool intel_pt_exclude_kernel(struct intel_pt *pt) in intel_pt_exclude_kernel() argument
914 evlist__for_each_entry(pt->session->evlist, evsel) { in intel_pt_exclude_kernel()
915 if (intel_pt_get_config(pt, &evsel->core.attr, NULL) && in intel_pt_exclude_kernel()
922 static bool intel_pt_return_compression(struct intel_pt *pt) in intel_pt_return_compression() argument
927 if (!pt->noretcomp_bit) in intel_pt_return_compression()
930 evlist__for_each_entry(pt->session->evlist, evsel) { in intel_pt_return_compression()
931 if (intel_pt_get_config(pt, &evsel->core.attr, &config) && in intel_pt_return_compression()
932 (config & pt->noretcomp_bit)) in intel_pt_return_compression()
938 static bool intel_pt_branch_enable(struct intel_pt *pt) in intel_pt_branch_enable() argument
943 evlist__for_each_entry(pt->session->evlist, evsel) { in intel_pt_branch_enable()
944 if (intel_pt_get_config(pt, &evsel->core.attr, &config) && in intel_pt_branch_enable()
951 static unsigned int intel_pt_mtc_period(struct intel_pt *pt) in intel_pt_mtc_period() argument
957 if (!pt->mtc_freq_bits) in intel_pt_mtc_period()
960 for (shift = 0, config = pt->mtc_freq_bits; !(config & 1); shift++) in intel_pt_mtc_period()
963 evlist__for_each_entry(pt->session->evlist, evsel) { in intel_pt_mtc_period()
964 if (intel_pt_get_config(pt, &evsel->core.attr, &config)) in intel_pt_mtc_period()
965 return (config & pt->mtc_freq_bits) >> shift; in intel_pt_mtc_period()
970 static bool intel_pt_timeless_decoding(struct intel_pt *pt) in intel_pt_timeless_decoding() argument
976 if (!pt->tsc_bit || !pt->cap_user_time_zero || pt->synth_opts.timeless_decoding) in intel_pt_timeless_decoding()
979 evlist__for_each_entry(pt->session->evlist, evsel) { in intel_pt_timeless_decoding()
982 if (intel_pt_get_config(pt, &evsel->core.attr, &config)) { in intel_pt_timeless_decoding()
983 if (config & pt->tsc_bit) in intel_pt_timeless_decoding()
992 static bool intel_pt_tracing_kernel(struct intel_pt *pt) in intel_pt_tracing_kernel() argument
996 evlist__for_each_entry(pt->session->evlist, evsel) { in intel_pt_tracing_kernel()
997 if (intel_pt_get_config(pt, &evsel->core.attr, NULL) && in intel_pt_tracing_kernel()
1004 static bool intel_pt_have_tsc(struct intel_pt *pt) in intel_pt_have_tsc() argument
1010 if (!pt->tsc_bit) in intel_pt_have_tsc()
1013 evlist__for_each_entry(pt->session->evlist, evsel) { in intel_pt_have_tsc()
1014 if (intel_pt_get_config(pt, &evsel->core.attr, &config)) { in intel_pt_have_tsc()
1015 if (config & pt->tsc_bit) in intel_pt_have_tsc()
1024 static bool intel_pt_have_mtc(struct intel_pt *pt) in intel_pt_have_mtc() argument
1029 evlist__for_each_entry(pt->session->evlist, evsel) { in intel_pt_have_mtc()
1030 if (intel_pt_get_config(pt, &evsel->core.attr, &config) && in intel_pt_have_mtc()
1031 (config & pt->mtc_bit)) in intel_pt_have_mtc()
1037 static bool intel_pt_sampling_mode(struct intel_pt *pt) in intel_pt_sampling_mode() argument
1041 evlist__for_each_entry(pt->session->evlist, evsel) { in intel_pt_sampling_mode()
1049 static u64 intel_pt_ctl(struct intel_pt *pt) in intel_pt_ctl() argument
1054 evlist__for_each_entry(pt->session->evlist, evsel) { in intel_pt_ctl()
1055 if (intel_pt_get_config(pt, &evsel->core.attr, &config)) in intel_pt_ctl()
1061 static u64 intel_pt_ns_to_ticks(const struct intel_pt *pt, u64 ns) in intel_pt_ns_to_ticks() argument
1065 quot = ns / pt->tc.time_mult; in intel_pt_ns_to_ticks()
1066 rem = ns % pt->tc.time_mult; in intel_pt_ns_to_ticks()
1067 return (quot << pt->tc.time_shift) + (rem << pt->tc.time_shift) / in intel_pt_ns_to_ticks()
1068 pt->tc.time_mult; in intel_pt_ns_to_ticks()
1071 static struct ip_callchain *intel_pt_alloc_chain(struct intel_pt *pt) in intel_pt_alloc_chain() argument
1076 sz += (pt->synth_opts.callchain_sz + 1) * sizeof(u64); in intel_pt_alloc_chain()
1080 static int intel_pt_callchain_init(struct intel_pt *pt) in intel_pt_callchain_init() argument
1084 evlist__for_each_entry(pt->session->evlist, evsel) { in intel_pt_callchain_init()
1089 pt->chain = intel_pt_alloc_chain(pt); in intel_pt_callchain_init()
1090 if (!pt->chain) in intel_pt_callchain_init()
1096 static void intel_pt_add_callchain(struct intel_pt *pt, in intel_pt_add_callchain() argument
1099 struct thread *thread = machine__findnew_thread(pt->machine, in intel_pt_add_callchain()
1103 thread_stack__sample_late(thread, sample->cpu, pt->chain, in intel_pt_add_callchain()
1104 pt->synth_opts.callchain_sz + 1, sample->ip, in intel_pt_add_callchain()
1105 pt->kernel_start); in intel_pt_add_callchain()
1107 sample->callchain = pt->chain; in intel_pt_add_callchain()
1118 static int intel_pt_br_stack_init(struct intel_pt *pt) in intel_pt_br_stack_init() argument
1122 evlist__for_each_entry(pt->session->evlist, evsel) { in intel_pt_br_stack_init()
1127 pt->br_stack = intel_pt_alloc_br_stack(pt->br_stack_sz); in intel_pt_br_stack_init()
1128 if (!pt->br_stack) in intel_pt_br_stack_init()
1134 static void intel_pt_add_br_stack(struct intel_pt *pt, in intel_pt_add_br_stack() argument
1137 struct thread *thread = machine__findnew_thread(pt->machine, in intel_pt_add_br_stack()
1141 thread_stack__br_sample_late(thread, sample->cpu, pt->br_stack, in intel_pt_add_br_stack()
1142 pt->br_stack_sz, sample->ip, in intel_pt_add_br_stack()
1143 pt->kernel_start); in intel_pt_add_br_stack()
1145 sample->branch_stack = pt->br_stack; in intel_pt_add_br_stack()
1151 static struct intel_pt_queue *intel_pt_alloc_queue(struct intel_pt *pt, in intel_pt_alloc_queue() argument
1155 struct perf_env *env = pt->machine->env; in intel_pt_alloc_queue()
1162 if (pt->synth_opts.callchain) { in intel_pt_alloc_queue()
1163 ptq->chain = intel_pt_alloc_chain(pt); in intel_pt_alloc_queue()
1168 if (pt->synth_opts.last_branch || pt->synth_opts.other_events) { in intel_pt_alloc_queue()
1169 unsigned int entry_cnt = max(LBRS_MAX, pt->br_stack_sz); in intel_pt_alloc_queue()
1180 ptq->pt = pt; in intel_pt_alloc_queue()
1182 ptq->exclude_kernel = intel_pt_exclude_kernel(pt); in intel_pt_alloc_queue()
1193 params.return_compression = intel_pt_return_compression(pt); in intel_pt_alloc_queue()
1194 params.branch_enable = intel_pt_branch_enable(pt); in intel_pt_alloc_queue()
1195 params.ctl = intel_pt_ctl(pt); in intel_pt_alloc_queue()
1196 params.max_non_turbo_ratio = pt->max_non_turbo_ratio; in intel_pt_alloc_queue()
1197 params.mtc_period = intel_pt_mtc_period(pt); in intel_pt_alloc_queue()
1198 params.tsc_ctc_ratio_n = pt->tsc_ctc_ratio_n; in intel_pt_alloc_queue()
1199 params.tsc_ctc_ratio_d = pt->tsc_ctc_ratio_d; in intel_pt_alloc_queue()
1200 params.quick = pt->synth_opts.quick; in intel_pt_alloc_queue()
1201 params.vm_time_correlation = pt->synth_opts.vm_time_correlation; in intel_pt_alloc_queue()
1202 params.vm_tm_corr_dry_run = pt->synth_opts.vm_tm_corr_dry_run; in intel_pt_alloc_queue()
1203 params.first_timestamp = pt->first_timestamp; in intel_pt_alloc_queue()
1204 params.max_loops = pt->max_loops; in intel_pt_alloc_queue()
1206 if (pt->filts.cnt > 0) in intel_pt_alloc_queue()
1209 if (pt->synth_opts.instructions) { in intel_pt_alloc_queue()
1210 if (pt->synth_opts.period) { in intel_pt_alloc_queue()
1211 switch (pt->synth_opts.period_type) { in intel_pt_alloc_queue()
1215 params.period = pt->synth_opts.period; in intel_pt_alloc_queue()
1219 params.period = pt->synth_opts.period; in intel_pt_alloc_queue()
1223 params.period = intel_pt_ns_to_ticks(pt, in intel_pt_alloc_queue()
1224 pt->synth_opts.period); in intel_pt_alloc_queue()
1269 static void intel_pt_first_timestamp(struct intel_pt *pt, u64 timestamp) in intel_pt_first_timestamp() argument
1273 pt->first_timestamp = timestamp; in intel_pt_first_timestamp()
1275 for (i = 0; i < pt->queues.nr_queues; i++) { in intel_pt_first_timestamp()
1276 struct auxtrace_queue *queue = &pt->queues.queue_array[i]; in intel_pt_first_timestamp()
1284 static void intel_pt_set_pid_tid_cpu(struct intel_pt *pt, in intel_pt_set_pid_tid_cpu() argument
1289 if (queue->tid == -1 || pt->have_sched_switch) { in intel_pt_set_pid_tid_cpu()
1290 ptq->tid = machine__get_current_tid(pt->machine, ptq->cpu); in intel_pt_set_pid_tid_cpu()
1297 ptq->thread = machine__find_thread(pt->machine, -1, ptq->tid); in intel_pt_set_pid_tid_cpu()
1340 static void intel_pt_setup_time_range(struct intel_pt *pt, in intel_pt_setup_time_range() argument
1343 if (!pt->range_cnt) in intel_pt_setup_time_range()
1346 ptq->sel_timestamp = pt->time_ranges[0].start; in intel_pt_setup_time_range()
1352 ptq->sel_timestamp = pt->time_ranges[0].end; in intel_pt_setup_time_range()
1357 static int intel_pt_setup_queue(struct intel_pt *pt, in intel_pt_setup_queue() argument
1367 ptq = intel_pt_alloc_queue(pt, queue_nr); in intel_pt_setup_queue()
1378 if (pt->sampling_mode && !pt->snapshot_mode && in intel_pt_setup_queue()
1379 pt->timeless_decoding) in intel_pt_setup_queue()
1382 ptq->sync_switch = pt->sync_switch; in intel_pt_setup_queue()
1384 intel_pt_setup_time_range(pt, ptq); in intel_pt_setup_queue()
1393 if (pt->timeless_decoding) in intel_pt_setup_queue()
1430 ret = auxtrace_heap__add(&pt->heap, queue_nr, ptq->timestamp); in intel_pt_setup_queue()
1439 static int intel_pt_setup_queues(struct intel_pt *pt) in intel_pt_setup_queues() argument
1444 for (i = 0; i < pt->queues.nr_queues; i++) { in intel_pt_setup_queues()
1445 ret = intel_pt_setup_queue(pt, &pt->queues.queue_array[i], i); in intel_pt_setup_queues()
1452 static inline bool intel_pt_skip_event(struct intel_pt *pt) in intel_pt_skip_event() argument
1454 return pt->synth_opts.initial_skip && in intel_pt_skip_event()
1455 pt->num_events++ < pt->synth_opts.initial_skip; in intel_pt_skip_event()
1463 static inline bool intel_pt_skip_cbr_event(struct intel_pt *pt) in intel_pt_skip_cbr_event() argument
1465 return pt->synth_opts.initial_skip && in intel_pt_skip_cbr_event()
1466 pt->num_events + 4 < pt->synth_opts.initial_skip; in intel_pt_skip_cbr_event()
1483 static void intel_pt_prep_b_sample(struct intel_pt *pt, in intel_pt_prep_b_sample() argument
1490 if (!pt->timeless_decoding) in intel_pt_prep_b_sample()
1491 sample->time = tsc_to_perf_time(ptq->timestamp, &pt->tc); in intel_pt_prep_b_sample()
1509 static inline int intel_pt_opt_inject(struct intel_pt *pt, in intel_pt_opt_inject() argument
1513 if (!pt->synth_opts.inject) in intel_pt_opt_inject()
1519 static int intel_pt_deliver_synth_event(struct intel_pt *pt, in intel_pt_deliver_synth_event() argument
1525 ret = intel_pt_opt_inject(pt, event, sample, type); in intel_pt_deliver_synth_event()
1529 ret = perf_session__deliver_synth_event(pt->session, event, sample); in intel_pt_deliver_synth_event()
1538 struct intel_pt *pt = ptq->pt; in intel_pt_synth_branch_sample() local
1547 if (pt->branches_filter && !(pt->branches_filter & ptq->flags)) in intel_pt_synth_branch_sample()
1550 if (intel_pt_skip_event(pt)) in intel_pt_synth_branch_sample()
1553 intel_pt_prep_b_sample(pt, ptq, event, &sample); in intel_pt_synth_branch_sample()
1555 sample.id = ptq->pt->branches_id; in intel_pt_synth_branch_sample()
1556 sample.stream_id = ptq->pt->branches_id; in intel_pt_synth_branch_sample()
1562 if (pt->synth_opts.last_branch && sort__mode == SORT_MODE__BRANCH) { in intel_pt_synth_branch_sample()
1582 return intel_pt_deliver_synth_event(pt, event, &sample, in intel_pt_synth_branch_sample()
1583 pt->branches_sample_type); in intel_pt_synth_branch_sample()
1586 static void intel_pt_prep_sample(struct intel_pt *pt, in intel_pt_prep_sample() argument
1591 intel_pt_prep_b_sample(pt, ptq, event, sample); in intel_pt_prep_sample()
1593 if (pt->synth_opts.callchain) { in intel_pt_prep_sample()
1595 pt->synth_opts.callchain_sz + 1, in intel_pt_prep_sample()
1596 sample->ip, pt->kernel_start); in intel_pt_prep_sample()
1600 if (pt->synth_opts.last_branch) { in intel_pt_prep_sample()
1602 pt->br_stack_sz); in intel_pt_prep_sample()
1609 struct intel_pt *pt = ptq->pt; in intel_pt_synth_instruction_sample() local
1613 if (intel_pt_skip_event(pt)) in intel_pt_synth_instruction_sample()
1616 intel_pt_prep_sample(pt, ptq, event, &sample); in intel_pt_synth_instruction_sample()
1618 sample.id = ptq->pt->instructions_id; in intel_pt_synth_instruction_sample()
1619 sample.stream_id = ptq->pt->instructions_id; in intel_pt_synth_instruction_sample()
1620 if (pt->synth_opts.quick) in intel_pt_synth_instruction_sample()
1635 return intel_pt_deliver_synth_event(pt, event, &sample, in intel_pt_synth_instruction_sample()
1636 pt->instructions_sample_type); in intel_pt_synth_instruction_sample()
1641 struct intel_pt *pt = ptq->pt; in intel_pt_synth_transaction_sample() local
1645 if (intel_pt_skip_event(pt)) in intel_pt_synth_transaction_sample()
1648 intel_pt_prep_sample(pt, ptq, event, &sample); in intel_pt_synth_transaction_sample()
1650 sample.id = ptq->pt->transactions_id; in intel_pt_synth_transaction_sample()
1651 sample.stream_id = ptq->pt->transactions_id; in intel_pt_synth_transaction_sample()
1653 return intel_pt_deliver_synth_event(pt, event, &sample, in intel_pt_synth_transaction_sample()
1654 pt->transactions_sample_type); in intel_pt_synth_transaction_sample()
1657 static void intel_pt_prep_p_sample(struct intel_pt *pt, in intel_pt_prep_p_sample() argument
1662 intel_pt_prep_sample(pt, ptq, event, sample); in intel_pt_prep_p_sample()
1674 struct intel_pt *pt = ptq->pt; in intel_pt_synth_ptwrite_sample() local
1679 if (intel_pt_skip_event(pt)) in intel_pt_synth_ptwrite_sample()
1682 intel_pt_prep_p_sample(pt, ptq, event, &sample); in intel_pt_synth_ptwrite_sample()
1684 sample.id = ptq->pt->ptwrites_id; in intel_pt_synth_ptwrite_sample()
1685 sample.stream_id = ptq->pt->ptwrites_id; in intel_pt_synth_ptwrite_sample()
1694 return intel_pt_deliver_synth_event(pt, event, &sample, in intel_pt_synth_ptwrite_sample()
1695 pt->ptwrites_sample_type); in intel_pt_synth_ptwrite_sample()
1700 struct intel_pt *pt = ptq->pt; in intel_pt_synth_cbr_sample() local
1706 if (intel_pt_skip_cbr_event(pt)) in intel_pt_synth_cbr_sample()
1711 intel_pt_prep_p_sample(pt, ptq, event, &sample); in intel_pt_synth_cbr_sample()
1713 sample.id = ptq->pt->cbr_id; in intel_pt_synth_cbr_sample()
1714 sample.stream_id = ptq->pt->cbr_id; in intel_pt_synth_cbr_sample()
1716 flags = (u16)ptq->state->cbr_payload | (pt->max_non_turbo_ratio << 16); in intel_pt_synth_cbr_sample()
1718 raw.freq = cpu_to_le32(raw.cbr * pt->cbr2khz); in intel_pt_synth_cbr_sample()
1724 return intel_pt_deliver_synth_event(pt, event, &sample, in intel_pt_synth_cbr_sample()
1725 pt->pwr_events_sample_type); in intel_pt_synth_cbr_sample()
1730 struct intel_pt *pt = ptq->pt; in intel_pt_synth_psb_sample() local
1735 if (intel_pt_skip_event(pt)) in intel_pt_synth_psb_sample()
1738 intel_pt_prep_p_sample(pt, ptq, event, &sample); in intel_pt_synth_psb_sample()
1740 sample.id = ptq->pt->psb_id; in intel_pt_synth_psb_sample()
1741 sample.stream_id = ptq->pt->psb_id; in intel_pt_synth_psb_sample()
1750 return intel_pt_deliver_synth_event(pt, event, &sample, in intel_pt_synth_psb_sample()
1751 pt->pwr_events_sample_type); in intel_pt_synth_psb_sample()
1756 struct intel_pt *pt = ptq->pt; in intel_pt_synth_mwait_sample() local
1761 if (intel_pt_skip_event(pt)) in intel_pt_synth_mwait_sample()
1764 intel_pt_prep_p_sample(pt, ptq, event, &sample); in intel_pt_synth_mwait_sample()
1766 sample.id = ptq->pt->mwait_id; in intel_pt_synth_mwait_sample()
1767 sample.stream_id = ptq->pt->mwait_id; in intel_pt_synth_mwait_sample()
1775 return intel_pt_deliver_synth_event(pt, event, &sample, in intel_pt_synth_mwait_sample()
1776 pt->pwr_events_sample_type); in intel_pt_synth_mwait_sample()
1781 struct intel_pt *pt = ptq->pt; in intel_pt_synth_pwre_sample() local
1786 if (intel_pt_skip_event(pt)) in intel_pt_synth_pwre_sample()
1789 intel_pt_prep_p_sample(pt, ptq, event, &sample); in intel_pt_synth_pwre_sample()
1791 sample.id = ptq->pt->pwre_id; in intel_pt_synth_pwre_sample()
1792 sample.stream_id = ptq->pt->pwre_id; in intel_pt_synth_pwre_sample()
1800 return intel_pt_deliver_synth_event(pt, event, &sample, in intel_pt_synth_pwre_sample()
1801 pt->pwr_events_sample_type); in intel_pt_synth_pwre_sample()
1806 struct intel_pt *pt = ptq->pt; in intel_pt_synth_exstop_sample() local
1811 if (intel_pt_skip_event(pt)) in intel_pt_synth_exstop_sample()
1814 intel_pt_prep_p_sample(pt, ptq, event, &sample); in intel_pt_synth_exstop_sample()
1816 sample.id = ptq->pt->exstop_id; in intel_pt_synth_exstop_sample()
1817 sample.stream_id = ptq->pt->exstop_id; in intel_pt_synth_exstop_sample()
1825 return intel_pt_deliver_synth_event(pt, event, &sample, in intel_pt_synth_exstop_sample()
1826 pt->pwr_events_sample_type); in intel_pt_synth_exstop_sample()
1831 struct intel_pt *pt = ptq->pt; in intel_pt_synth_pwrx_sample() local
1836 if (intel_pt_skip_event(pt)) in intel_pt_synth_pwrx_sample()
1839 intel_pt_prep_p_sample(pt, ptq, event, &sample); in intel_pt_synth_pwrx_sample()
1841 sample.id = ptq->pt->pwrx_id; in intel_pt_synth_pwrx_sample()
1842 sample.stream_id = ptq->pt->pwrx_id; in intel_pt_synth_pwrx_sample()
1850 return intel_pt_deliver_synth_event(pt, event, &sample, in intel_pt_synth_pwrx_sample()
1851 pt->pwr_events_sample_type); in intel_pt_synth_pwrx_sample()
1986 struct intel_pt *pt = ptq->pt; in intel_pt_synth_pebs_sample() local
1987 struct evsel *evsel = pt->pebs_evsel; in intel_pt_synth_pebs_sample()
1993 if (intel_pt_skip_event(pt)) in intel_pt_synth_pebs_sample()
2023 else if (!pt->timeless_decoding) in intel_pt_synth_pebs_sample()
2026 sample.time = tsc_to_perf_time(timestamp, &pt->tc); in intel_pt_synth_pebs_sample()
2030 pt->synth_opts.callchain) { in intel_pt_synth_pebs_sample()
2032 pt->synth_opts.callchain_sz, sample.ip, in intel_pt_synth_pebs_sample()
2033 pt->kernel_start); in intel_pt_synth_pebs_sample()
2058 } else if (pt->synth_opts.last_branch) { in intel_pt_synth_pebs_sample()
2061 pt->br_stack_sz); in intel_pt_synth_pebs_sample()
2112 return intel_pt_deliver_synth_event(pt, event, &sample, sample_type); in intel_pt_synth_pebs_sample()
2115 static int intel_pt_synth_error(struct intel_pt *pt, int code, int cpu, in intel_pt_synth_error() argument
2122 if (pt->synth_opts.error_minus_flags) { in intel_pt_synth_error()
2124 pt->synth_opts.error_minus_flags & AUXTRACE_ERR_FLG_OVERFLOW) in intel_pt_synth_error()
2127 pt->synth_opts.error_minus_flags & AUXTRACE_ERR_FLG_DATA_LOST) in intel_pt_synth_error()
2136 err = perf_session__deliver_synth_event(pt->session, &event, NULL); in intel_pt_synth_error()
2147 struct intel_pt *pt = ptq->pt; in intel_ptq_synth_error() local
2150 tm = pt->timeless_decoding ? 0 : tsc_to_perf_time(tm, &pt->tc); in intel_ptq_synth_error()
2152 return intel_pt_synth_error(pt, state->err, ptq->cpu, ptq->pid, in intel_ptq_synth_error()
2156 static int intel_pt_next_tid(struct intel_pt *pt, struct intel_pt_queue *ptq) in intel_pt_next_tid() argument
2167 err = machine__set_current_tid(pt->machine, ptq->cpu, -1, tid); in intel_pt_next_tid()
2169 queue = &pt->queues.queue_array[ptq->queue_nr]; in intel_pt_next_tid()
2170 intel_pt_set_pid_tid_cpu(pt, queue); in intel_pt_next_tid()
2179 struct intel_pt *pt = ptq->pt; in intel_pt_is_switch_ip() local
2181 return ip == pt->switch_ip && in intel_pt_is_switch_ip()
2193 struct intel_pt *pt = ptq->pt; in intel_pt_sample() local
2208 if (pt->sample_pebs && state->type & INTEL_PT_BLK_ITEMS) { in intel_pt_sample()
2214 if (pt->sample_pwr_events) { in intel_pt_sample()
2249 if (pt->sample_instructions && (state->type & INTEL_PT_INSTRUCTION)) { in intel_pt_sample()
2255 if (pt->sample_transactions && (state->type & INTEL_PT_TRANSACTION)) { in intel_pt_sample()
2261 if (pt->sample_ptwrites && (state->type & INTEL_PT_PTW)) { in intel_pt_sample()
2270 if (pt->use_thread_stack) { in intel_pt_sample()
2273 state->trace_nr, pt->callstack, in intel_pt_sample()
2274 pt->br_stack_sz_plus, in intel_pt_sample()
2275 pt->mispred_all); in intel_pt_sample()
2280 if (pt->sample_branches) { in intel_pt_sample()
2314 err = intel_pt_next_tid(pt, ptq); in intel_pt_sample()
2328 state->to_ip == pt->ptss_ip && in intel_pt_sample()
2336 static u64 intel_pt_switch_ip(struct intel_pt *pt, u64 *ptss_ip) in intel_pt_switch_ip() argument
2338 struct machine *machine = pt->machine; in intel_pt_switch_ip()
2370 if (pt->have_sched_switch == 1) in intel_pt_switch_ip()
2388 static void intel_pt_enable_sync_switch(struct intel_pt *pt) in intel_pt_enable_sync_switch() argument
2392 pt->sync_switch = true; in intel_pt_enable_sync_switch()
2394 for (i = 0; i < pt->queues.nr_queues; i++) { in intel_pt_enable_sync_switch()
2395 struct auxtrace_queue *queue = &pt->queues.queue_array[i]; in intel_pt_enable_sync_switch()
2409 struct intel_pt *pt = ptq->pt; in intel_pt_next_time() local
2414 ptq->sel_timestamp = pt->time_ranges[ptq->sel_idx].end; in intel_pt_next_time()
2416 } else if (ptq->sel_idx + 1 < pt->range_cnt) { in intel_pt_next_time()
2420 ptq->sel_timestamp = pt->time_ranges[ptq->sel_idx].start; in intel_pt_next_time()
2448 intel_pt_next_tid(ptq->pt, ptq); in intel_pt_time_filter()
2478 struct intel_pt *pt = ptq->pt; in intel_pt_run_decoder() local
2482 if (!pt->kernel_start) { in intel_pt_run_decoder()
2483 pt->kernel_start = machine__kernel_start(pt->machine); in intel_pt_run_decoder()
2484 if (pt->per_cpu_mmaps && in intel_pt_run_decoder()
2485 (pt->have_sched_switch == 1 || pt->have_sched_switch == 3) && in intel_pt_run_decoder()
2486 !pt->timeless_decoding && intel_pt_tracing_kernel(pt) && in intel_pt_run_decoder()
2487 !pt->sampling_mode && !pt->synth_opts.vm_time_correlation) { in intel_pt_run_decoder()
2488 pt->switch_ip = intel_pt_switch_ip(pt, &pt->ptss_ip); in intel_pt_run_decoder()
2489 if (pt->switch_ip) { in intel_pt_run_decoder()
2491 pt->switch_ip, pt->ptss_ip); in intel_pt_run_decoder()
2492 intel_pt_enable_sync_switch(pt); in intel_pt_run_decoder()
2509 state->from_ip >= pt->kernel_start) { in intel_pt_run_decoder()
2511 intel_pt_next_tid(pt, ptq); in intel_pt_run_decoder()
2513 if (pt->synth_opts.errors) { in intel_pt_run_decoder()
2526 if (pt->est_tsc && in intel_pt_run_decoder()
2527 (state->from_ip >= pt->kernel_start || !state->from_ip) && in intel_pt_run_decoder()
2528 state->to_ip && state->to_ip < pt->kernel_start) { in intel_pt_run_decoder()
2550 if (!pt->timeless_decoding && ptq->timestamp >= *timestamp) { in intel_pt_run_decoder()
2558 static inline int intel_pt_update_queues(struct intel_pt *pt) in intel_pt_update_queues() argument
2560 if (pt->queues.new_data) { in intel_pt_update_queues()
2561 pt->queues.new_data = false; in intel_pt_update_queues()
2562 return intel_pt_setup_queues(pt); in intel_pt_update_queues()
2567 static int intel_pt_process_queues(struct intel_pt *pt, u64 timestamp) in intel_pt_process_queues() argument
2577 if (!pt->heap.heap_cnt) in intel_pt_process_queues()
2580 if (pt->heap.heap_array[0].ordinal >= timestamp) in intel_pt_process_queues()
2583 queue_nr = pt->heap.heap_array[0].queue_nr; in intel_pt_process_queues()
2584 queue = &pt->queues.queue_array[queue_nr]; in intel_pt_process_queues()
2588 queue_nr, pt->heap.heap_array[0].ordinal, in intel_pt_process_queues()
2591 auxtrace_heap__pop(&pt->heap); in intel_pt_process_queues()
2593 if (pt->heap.heap_cnt) { in intel_pt_process_queues()
2594 ts = pt->heap.heap_array[0].ordinal + 1; in intel_pt_process_queues()
2601 intel_pt_set_pid_tid_cpu(pt, queue); in intel_pt_process_queues()
2606 auxtrace_heap__add(&pt->heap, queue_nr, ts); in intel_pt_process_queues()
2611 ret = auxtrace_heap__add(&pt->heap, queue_nr, ts); in intel_pt_process_queues()
2622 static int intel_pt_process_timeless_queues(struct intel_pt *pt, pid_t tid, in intel_pt_process_timeless_queues() argument
2625 struct auxtrace_queues *queues = &pt->queues; in intel_pt_process_timeless_queues()
2630 struct auxtrace_queue *queue = &pt->queues.queue_array[i]; in intel_pt_process_timeless_queues()
2635 intel_pt_set_pid_tid_cpu(pt, queue); in intel_pt_process_timeless_queues()
2646 struct machine *m = ptq->pt->machine; in intel_pt_sample_set_pid_tid_cpu()
2670 static int intel_pt_process_timeless_sample(struct intel_pt *pt, in intel_pt_process_timeless_sample() argument
2677 queue = auxtrace_queues__sample_queue(&pt->queues, sample, pt->session); in intel_pt_process_timeless_sample()
2692 static int intel_pt_lost(struct intel_pt *pt, struct perf_sample *sample) in intel_pt_lost() argument
2694 return intel_pt_synth_error(pt, INTEL_PT_ERR_LOST, sample->cpu, in intel_pt_lost()
2698 static struct intel_pt_queue *intel_pt_cpu_to_ptq(struct intel_pt *pt, int cpu) in intel_pt_cpu_to_ptq() argument
2702 if (cpu < 0 || !pt->queues.nr_queues) in intel_pt_cpu_to_ptq()
2705 if ((unsigned)cpu >= pt->queues.nr_queues) in intel_pt_cpu_to_ptq()
2706 i = pt->queues.nr_queues - 1; in intel_pt_cpu_to_ptq()
2710 if (pt->queues.queue_array[i].cpu == cpu) in intel_pt_cpu_to_ptq()
2711 return pt->queues.queue_array[i].priv; in intel_pt_cpu_to_ptq()
2714 if (pt->queues.queue_array[--i].cpu == cpu) in intel_pt_cpu_to_ptq()
2715 return pt->queues.queue_array[i].priv; in intel_pt_cpu_to_ptq()
2718 for (; j < pt->queues.nr_queues; j++) { in intel_pt_cpu_to_ptq()
2719 if (pt->queues.queue_array[j].cpu == cpu) in intel_pt_cpu_to_ptq()
2720 return pt->queues.queue_array[j].priv; in intel_pt_cpu_to_ptq()
2726 static int intel_pt_sync_switch(struct intel_pt *pt, int cpu, pid_t tid, in intel_pt_sync_switch() argument
2732 if (!pt->sync_switch) in intel_pt_sync_switch()
2735 ptq = intel_pt_cpu_to_ptq(pt, cpu); in intel_pt_sync_switch()
2750 &pt->tc); in intel_pt_sync_switch()
2751 err = auxtrace_heap__add(&pt->heap, ptq->queue_nr, in intel_pt_sync_switch()
2771 static int intel_pt_process_switch(struct intel_pt *pt, in intel_pt_process_switch() argument
2776 struct evsel *evsel = evlist__id2evsel(pt->session->evlist, sample->id); in intel_pt_process_switch()
2778 if (evsel != pt->switch_evsel) in intel_pt_process_switch()
2786 &pt->tc)); in intel_pt_process_switch()
2788 ret = intel_pt_sync_switch(pt, cpu, tid, sample->time); in intel_pt_process_switch()
2792 return machine__set_current_tid(pt->machine, cpu, -1, tid); in intel_pt_process_switch()
2795 static int intel_pt_context_switch_in(struct intel_pt *pt, in intel_pt_context_switch_in() argument
2802 if (pt->sync_switch) { in intel_pt_context_switch_in()
2805 ptq = intel_pt_cpu_to_ptq(pt, cpu); in intel_pt_context_switch_in()
2827 if (machine__get_current_tid(pt->machine, cpu) == tid) in intel_pt_context_switch_in()
2830 return machine__set_current_tid(pt->machine, cpu, pid, tid); in intel_pt_context_switch_in()
2833 static int intel_pt_context_switch(struct intel_pt *pt, union perf_event *event, in intel_pt_context_switch() argument
2842 if (pt->have_sched_switch == 3) { in intel_pt_context_switch()
2844 return intel_pt_context_switch_in(pt, sample); in intel_pt_context_switch()
2861 ret = intel_pt_sync_switch(pt, cpu, tid, sample->time); in intel_pt_context_switch()
2865 return machine__set_current_tid(pt->machine, cpu, pid, tid); in intel_pt_context_switch()
2868 static int intel_pt_process_itrace_start(struct intel_pt *pt, in intel_pt_process_itrace_start() argument
2872 if (!pt->per_cpu_mmaps) in intel_pt_process_itrace_start()
2878 perf_time_to_tsc(sample->time, &pt->tc)); in intel_pt_process_itrace_start()
2880 return machine__set_current_tid(pt->machine, sample->cpu, in intel_pt_process_itrace_start()
2897 static int intel_pt_text_poke(struct intel_pt *pt, union perf_event *event) in intel_pt_text_poke() argument
2903 struct thread *thread = pt->unknown_thread; in intel_pt_text_poke()
2905 struct machine *machine = pt->machine; in intel_pt_text_poke()
2951 struct intel_pt *pt = container_of(session->auxtrace, struct intel_pt, in intel_pt_process_event() local
2965 timestamp = perf_time_to_tsc(sample->time, &pt->tc); in intel_pt_process_event()
2969 if (timestamp || pt->timeless_decoding) { in intel_pt_process_event()
2970 err = intel_pt_update_queues(pt); in intel_pt_process_event()
2975 if (pt->timeless_decoding) { in intel_pt_process_event()
2976 if (pt->sampling_mode) { in intel_pt_process_event()
2978 err = intel_pt_process_timeless_sample(pt, in intel_pt_process_event()
2981 err = intel_pt_process_timeless_queues(pt, in intel_pt_process_event()
2986 if (!pt->first_timestamp) in intel_pt_process_event()
2987 intel_pt_first_timestamp(pt, timestamp); in intel_pt_process_event()
2988 err = intel_pt_process_queues(pt, timestamp); in intel_pt_process_event()
2994 if (pt->synth_opts.add_callchain && !sample->callchain) in intel_pt_process_event()
2995 intel_pt_add_callchain(pt, sample); in intel_pt_process_event()
2996 if (pt->synth_opts.add_last_branch && !sample->branch_stack) in intel_pt_process_event()
2997 intel_pt_add_br_stack(pt, sample); in intel_pt_process_event()
3002 pt->synth_opts.errors) { in intel_pt_process_event()
3003 err = intel_pt_lost(pt, sample); in intel_pt_process_event()
3008 if (pt->switch_evsel && event->header.type == PERF_RECORD_SAMPLE) in intel_pt_process_event()
3009 err = intel_pt_process_switch(pt, sample); in intel_pt_process_event()
3011 err = intel_pt_process_itrace_start(pt, event, sample); in intel_pt_process_event()
3014 err = intel_pt_context_switch(pt, event, sample); in intel_pt_process_event()
3017 err = intel_pt_text_poke(pt, event); in intel_pt_process_event()
3019 if (intel_pt_enable_logging && intel_pt_log_events(pt, sample->time)) { in intel_pt_process_event()
3030 struct intel_pt *pt = container_of(session->auxtrace, struct intel_pt, in intel_pt_flush() local
3040 ret = intel_pt_update_queues(pt); in intel_pt_flush()
3044 if (pt->timeless_decoding) in intel_pt_flush()
3045 return intel_pt_process_timeless_queues(pt, -1, in intel_pt_flush()
3048 return intel_pt_process_queues(pt, MAX_TIMESTAMP); in intel_pt_flush()
3053 struct intel_pt *pt = container_of(session->auxtrace, struct intel_pt, in intel_pt_free_events() local
3055 struct auxtrace_queues *queues = &pt->queues; in intel_pt_free_events()
3068 struct intel_pt *pt = container_of(session->auxtrace, struct intel_pt, in intel_pt_free() local
3071 auxtrace_heap__free(&pt->heap); in intel_pt_free()
3074 intel_pt_free_vmcs_info(pt); in intel_pt_free()
3075 thread__put(pt->unknown_thread); in intel_pt_free()
3076 addr_filters__exit(&pt->filts); in intel_pt_free()
3077 zfree(&pt->chain); in intel_pt_free()
3078 zfree(&pt->filter); in intel_pt_free()
3079 zfree(&pt->time_ranges); in intel_pt_free()
3080 free(pt); in intel_pt_free()
3086 struct intel_pt *pt = container_of(session->auxtrace, struct intel_pt, in intel_pt_evsel_is_auxtrace() local
3089 return evsel->core.attr.type == pt->pmu_type; in intel_pt_evsel_is_auxtrace()
3096 struct intel_pt *pt = container_of(session->auxtrace, struct intel_pt, in intel_pt_process_auxtrace_event() local
3099 if (!pt->data_queued) { in intel_pt_process_auxtrace_event()
3113 err = auxtrace_queues__add_event(&pt->queues, session, event, in intel_pt_process_auxtrace_event()
3121 intel_pt_dump_event(pt, buffer->data, in intel_pt_process_auxtrace_event()
3135 struct intel_pt *pt = container_of(session->auxtrace, struct intel_pt, in intel_pt_queue_data() local
3140 return auxtrace_queues__add_event(&pt->queues, session, event, in intel_pt_queue_data()
3145 timestamp = perf_time_to_tsc(sample->time, &pt->tc); in intel_pt_queue_data()
3149 return auxtrace_queues__add_sample(&pt->queues, session, sample, in intel_pt_queue_data()
3206 static struct evsel *intel_pt_evsel(struct intel_pt *pt, in intel_pt_evsel() argument
3212 if (evsel->core.attr.type == pt->pmu_type && evsel->core.ids) in intel_pt_evsel()
3219 static int intel_pt_synth_events(struct intel_pt *pt, in intel_pt_synth_events() argument
3223 struct evsel *evsel = intel_pt_evsel(pt, evlist); in intel_pt_synth_events()
3239 if (pt->timeless_decoding) in intel_pt_synth_events()
3243 if (!pt->per_cpu_mmaps) in intel_pt_synth_events()
3257 if (pt->synth_opts.branches) { in intel_pt_synth_events()
3264 pt->sample_branches = true; in intel_pt_synth_events()
3265 pt->branches_sample_type = attr.sample_type; in intel_pt_synth_events()
3266 pt->branches_id = id; in intel_pt_synth_events()
3271 if (pt->synth_opts.callchain) in intel_pt_synth_events()
3273 if (pt->synth_opts.last_branch) { in intel_pt_synth_events()
3283 if (pt->synth_opts.instructions) { in intel_pt_synth_events()
3285 if (pt->synth_opts.period_type == PERF_ITRACE_PERIOD_NANOSECS) in intel_pt_synth_events()
3287 intel_pt_ns_to_ticks(pt, pt->synth_opts.period); in intel_pt_synth_events()
3289 attr.sample_period = pt->synth_opts.period; in intel_pt_synth_events()
3293 pt->sample_instructions = true; in intel_pt_synth_events()
3294 pt->instructions_sample_type = attr.sample_type; in intel_pt_synth_events()
3295 pt->instructions_id = id; in intel_pt_synth_events()
3302 if (pt->synth_opts.transactions) { in intel_pt_synth_events()
3307 pt->sample_transactions = true; in intel_pt_synth_events()
3308 pt->transactions_sample_type = attr.sample_type; in intel_pt_synth_events()
3309 pt->transactions_id = id; in intel_pt_synth_events()
3317 if (pt->synth_opts.ptwrites) { in intel_pt_synth_events()
3322 pt->sample_ptwrites = true; in intel_pt_synth_events()
3323 pt->ptwrites_sample_type = attr.sample_type; in intel_pt_synth_events()
3324 pt->ptwrites_id = id; in intel_pt_synth_events()
3329 if (pt->synth_opts.pwr_events) { in intel_pt_synth_events()
3330 pt->sample_pwr_events = true; in intel_pt_synth_events()
3331 pt->pwr_events_sample_type = attr.sample_type; in intel_pt_synth_events()
3337 pt->cbr_id = id; in intel_pt_synth_events()
3345 pt->psb_id = id; in intel_pt_synth_events()
3350 if (pt->synth_opts.pwr_events && (evsel->core.attr.config & 0x10)) { in intel_pt_synth_events()
3355 pt->mwait_id = id; in intel_pt_synth_events()
3363 pt->pwre_id = id; in intel_pt_synth_events()
3371 pt->exstop_id = id; in intel_pt_synth_events()
3379 pt->pwrx_id = id; in intel_pt_synth_events()
3387 static void intel_pt_setup_pebs_events(struct intel_pt *pt) in intel_pt_setup_pebs_events() argument
3391 if (!pt->synth_opts.other_events) in intel_pt_setup_pebs_events()
3394 evlist__for_each_entry(pt->session->evlist, evsel) { in intel_pt_setup_pebs_events()
3396 pt->sample_pebs = true; in intel_pt_setup_pebs_events()
3397 pt->pebs_evsel = evsel; in intel_pt_setup_pebs_events()
3431 struct intel_pt *pt = data; in intel_pt_perf_config() local
3434 pt->mispred_all = perf_config_bool(var, value); in intel_pt_perf_config()
3437 perf_config_int(&pt->max_loops, var, value); in intel_pt_perf_config()
3443 static u64 intel_pt_tsc_start(u64 ns, struct intel_pt *pt) in intel_pt_tsc_start() argument
3447 tsc = perf_time_to_tsc(ns, &pt->tc); in intel_pt_tsc_start()
3450 tm = tsc_to_perf_time(tsc, &pt->tc); in intel_pt_tsc_start()
3457 tm = tsc_to_perf_time(++tsc, &pt->tc); in intel_pt_tsc_start()
3463 static u64 intel_pt_tsc_end(u64 ns, struct intel_pt *pt) in intel_pt_tsc_end() argument
3467 tsc = perf_time_to_tsc(ns, &pt->tc); in intel_pt_tsc_end()
3470 tm = tsc_to_perf_time(tsc, &pt->tc); in intel_pt_tsc_end()
3477 tm = tsc_to_perf_time(--tsc, &pt->tc); in intel_pt_tsc_end()
3482 static int intel_pt_setup_time_ranges(struct intel_pt *pt, in intel_pt_setup_time_ranges() argument
3489 if (!n || !p || pt->timeless_decoding) in intel_pt_setup_time_ranges()
3492 pt->time_ranges = calloc(n, sizeof(struct range)); in intel_pt_setup_time_ranges()
3493 if (!pt->time_ranges) in intel_pt_setup_time_ranges()
3496 pt->range_cnt = n; in intel_pt_setup_time_ranges()
3501 struct range *r = &pt->time_ranges[i]; in intel_pt_setup_time_ranges()
3509 r->start = ts ? intel_pt_tsc_start(ts, pt) : 0; in intel_pt_setup_time_ranges()
3510 r->end = te ? intel_pt_tsc_end(te, pt) : 0; in intel_pt_setup_time_ranges()
3521 static int intel_pt_parse_vm_tm_corr_arg(struct intel_pt *pt, char **args) in intel_pt_parse_vm_tm_corr_arg() argument
3538 pt->dflt_tsc_offset = tsc_offset; in intel_pt_parse_vm_tm_corr_arg()
3548 vmcs_info = intel_pt_findnew_vmcs(&pt->vmcs_info, vmcs, tsc_offset); in intel_pt_parse_vm_tm_corr_arg()
3560 static int intel_pt_parse_vm_tm_corr_args(struct intel_pt *pt) in intel_pt_parse_vm_tm_corr_args() argument
3562 char *args = pt->synth_opts.vm_tm_corr_args; in intel_pt_parse_vm_tm_corr_args()
3569 ret = intel_pt_parse_vm_tm_corr_arg(pt, &args); in intel_pt_parse_vm_tm_corr_args()
3629 struct intel_pt *pt; in intel_pt_process_auxtrace_info() local
3638 pt = zalloc(sizeof(struct intel_pt)); in intel_pt_process_auxtrace_info()
3639 if (!pt) in intel_pt_process_auxtrace_info()
3642 pt->vmcs_info = RB_ROOT; in intel_pt_process_auxtrace_info()
3644 addr_filters__init(&pt->filts); in intel_pt_process_auxtrace_info()
3646 err = perf_config(intel_pt_perf_config, pt); in intel_pt_process_auxtrace_info()
3650 err = auxtrace_queues__init(&pt->queues); in intel_pt_process_auxtrace_info()
3657 pt->synth_opts = *session->itrace_synth_opts; in intel_pt_process_auxtrace_info()
3661 itrace_synth_opts__set_default(&pt->synth_opts, opts->default_no_sample); in intel_pt_process_auxtrace_info()
3663 pt->synth_opts.branches = false; in intel_pt_process_auxtrace_info()
3664 pt->synth_opts.callchain = true; in intel_pt_process_auxtrace_info()
3665 pt->synth_opts.add_callchain = true; in intel_pt_process_auxtrace_info()
3667 pt->synth_opts.thread_stack = opts->thread_stack; in intel_pt_process_auxtrace_info()
3670 pt->session = session; in intel_pt_process_auxtrace_info()
3671 pt->machine = &session->machines.host; /* No kvm support */ in intel_pt_process_auxtrace_info()
3672 pt->auxtrace_type = auxtrace_info->type; in intel_pt_process_auxtrace_info()
3673 pt->pmu_type = auxtrace_info->priv[INTEL_PT_PMU_TYPE]; in intel_pt_process_auxtrace_info()
3674 pt->tc.time_shift = auxtrace_info->priv[INTEL_PT_TIME_SHIFT]; in intel_pt_process_auxtrace_info()
3675 pt->tc.time_mult = auxtrace_info->priv[INTEL_PT_TIME_MULT]; in intel_pt_process_auxtrace_info()
3676 pt->tc.time_zero = auxtrace_info->priv[INTEL_PT_TIME_ZERO]; in intel_pt_process_auxtrace_info()
3677 pt->cap_user_time_zero = auxtrace_info->priv[INTEL_PT_CAP_USER_TIME_ZERO]; in intel_pt_process_auxtrace_info()
3678 pt->tsc_bit = auxtrace_info->priv[INTEL_PT_TSC_BIT]; in intel_pt_process_auxtrace_info()
3679 pt->noretcomp_bit = auxtrace_info->priv[INTEL_PT_NORETCOMP_BIT]; in intel_pt_process_auxtrace_info()
3680 pt->have_sched_switch = auxtrace_info->priv[INTEL_PT_HAVE_SCHED_SWITCH]; in intel_pt_process_auxtrace_info()
3681 pt->snapshot_mode = auxtrace_info->priv[INTEL_PT_SNAPSHOT_MODE]; in intel_pt_process_auxtrace_info()
3682 pt->per_cpu_mmaps = auxtrace_info->priv[INTEL_PT_PER_CPU_MMAPS]; in intel_pt_process_auxtrace_info()
3687 pt->mtc_bit = auxtrace_info->priv[INTEL_PT_MTC_BIT]; in intel_pt_process_auxtrace_info()
3688 pt->mtc_freq_bits = auxtrace_info->priv[INTEL_PT_MTC_FREQ_BITS]; in intel_pt_process_auxtrace_info()
3689 pt->tsc_ctc_ratio_n = auxtrace_info->priv[INTEL_PT_TSC_CTC_N]; in intel_pt_process_auxtrace_info()
3690 pt->tsc_ctc_ratio_d = auxtrace_info->priv[INTEL_PT_TSC_CTC_D]; in intel_pt_process_auxtrace_info()
3691 pt->cyc_bit = auxtrace_info->priv[INTEL_PT_CYC_BIT]; in intel_pt_process_auxtrace_info()
3697 pt->max_non_turbo_ratio = in intel_pt_process_auxtrace_info()
3724 pt->filter = memdup(filter, len); in intel_pt_process_auxtrace_info()
3725 if (!pt->filter) { in intel_pt_process_auxtrace_info()
3730 mem_bswap_64(pt->filter, len); in intel_pt_process_auxtrace_info()
3731 if (pt->filter[len - 1]) { in intel_pt_process_auxtrace_info()
3736 err = addr_filters__parse_bare_filter(&pt->filts, in intel_pt_process_auxtrace_info()
3741 intel_pt_print_info_str("Filter string", pt->filter); in intel_pt_process_auxtrace_info()
3744 pt->timeless_decoding = intel_pt_timeless_decoding(pt); in intel_pt_process_auxtrace_info()
3745 if (pt->timeless_decoding && !pt->tc.time_mult) in intel_pt_process_auxtrace_info()
3746 pt->tc.time_mult = 1; in intel_pt_process_auxtrace_info()
3747 pt->have_tsc = intel_pt_have_tsc(pt); in intel_pt_process_auxtrace_info()
3748 pt->sampling_mode = intel_pt_sampling_mode(pt); in intel_pt_process_auxtrace_info()
3749 pt->est_tsc = !pt->timeless_decoding; in intel_pt_process_auxtrace_info()
3751 if (pt->synth_opts.vm_time_correlation) { in intel_pt_process_auxtrace_info()
3752 if (pt->timeless_decoding) { in intel_pt_process_auxtrace_info()
3763 if (!intel_pt_have_mtc(pt)) { in intel_pt_process_auxtrace_info()
3768 err = intel_pt_parse_vm_tm_corr_args(pt); in intel_pt_process_auxtrace_info()
3773 pt->unknown_thread = thread__new(999999999, 999999999); in intel_pt_process_auxtrace_info()
3774 if (!pt->unknown_thread) { in intel_pt_process_auxtrace_info()
3785 INIT_LIST_HEAD(&pt->unknown_thread->node); in intel_pt_process_auxtrace_info()
3787 err = thread__set_comm(pt->unknown_thread, "unknown", 0); in intel_pt_process_auxtrace_info()
3790 if (thread__init_maps(pt->unknown_thread, pt->machine)) { in intel_pt_process_auxtrace_info()
3795 pt->auxtrace.process_event = intel_pt_process_event; in intel_pt_process_auxtrace_info()
3796 pt->auxtrace.process_auxtrace_event = intel_pt_process_auxtrace_event; in intel_pt_process_auxtrace_info()
3797 pt->auxtrace.queue_data = intel_pt_queue_data; in intel_pt_process_auxtrace_info()
3798 pt->auxtrace.dump_auxtrace_sample = intel_pt_dump_sample; in intel_pt_process_auxtrace_info()
3799 pt->auxtrace.flush_events = intel_pt_flush; in intel_pt_process_auxtrace_info()
3800 pt->auxtrace.free_events = intel_pt_free_events; in intel_pt_process_auxtrace_info()
3801 pt->auxtrace.free = intel_pt_free; in intel_pt_process_auxtrace_info()
3802 pt->auxtrace.evsel_is_auxtrace = intel_pt_evsel_is_auxtrace; in intel_pt_process_auxtrace_info()
3803 session->auxtrace = &pt->auxtrace; in intel_pt_process_auxtrace_info()
3808 if (pt->have_sched_switch == 1) { in intel_pt_process_auxtrace_info()
3809 pt->switch_evsel = intel_pt_find_sched_switch(session->evlist); in intel_pt_process_auxtrace_info()
3810 if (!pt->switch_evsel) { in intel_pt_process_auxtrace_info()
3815 } else if (pt->have_sched_switch == 2 && in intel_pt_process_auxtrace_info()
3822 if (pt->synth_opts.log) in intel_pt_process_auxtrace_info()
3826 if (pt->tc.time_mult) { in intel_pt_process_auxtrace_info()
3827 u64 tsc_freq = intel_pt_ns_to_ticks(pt, 1000000000); in intel_pt_process_auxtrace_info()
3829 if (!pt->max_non_turbo_ratio) in intel_pt_process_auxtrace_info()
3830 pt->max_non_turbo_ratio = in intel_pt_process_auxtrace_info()
3834 pt->max_non_turbo_ratio); in intel_pt_process_auxtrace_info()
3835 pt->cbr2khz = tsc_freq / pt->max_non_turbo_ratio / 1000; in intel_pt_process_auxtrace_info()
3838 err = intel_pt_setup_time_ranges(pt, session->itrace_synth_opts); in intel_pt_process_auxtrace_info()
3842 if (pt->synth_opts.calls) in intel_pt_process_auxtrace_info()
3843 pt->branches_filter |= PERF_IP_FLAG_CALL | PERF_IP_FLAG_ASYNC | in intel_pt_process_auxtrace_info()
3845 if (pt->synth_opts.returns) in intel_pt_process_auxtrace_info()
3846 pt->branches_filter |= PERF_IP_FLAG_RETURN | in intel_pt_process_auxtrace_info()
3849 if ((pt->synth_opts.callchain || pt->synth_opts.add_callchain) && in intel_pt_process_auxtrace_info()
3854 pt->synth_opts.callchain = false; in intel_pt_process_auxtrace_info()
3855 pt->synth_opts.add_callchain = false; in intel_pt_process_auxtrace_info()
3859 if (pt->synth_opts.add_callchain) { in intel_pt_process_auxtrace_info()
3860 err = intel_pt_callchain_init(pt); in intel_pt_process_auxtrace_info()
3865 if (pt->synth_opts.last_branch || pt->synth_opts.add_last_branch) { in intel_pt_process_auxtrace_info()
3866 pt->br_stack_sz = pt->synth_opts.last_branch_sz; in intel_pt_process_auxtrace_info()
3867 pt->br_stack_sz_plus = pt->br_stack_sz; in intel_pt_process_auxtrace_info()
3870 if (pt->synth_opts.add_last_branch) { in intel_pt_process_auxtrace_info()
3871 err = intel_pt_br_stack_init(pt); in intel_pt_process_auxtrace_info()
3881 if (intel_pt_tracing_kernel(pt)) in intel_pt_process_auxtrace_info()
3882 pt->br_stack_sz_plus += 1024; in intel_pt_process_auxtrace_info()
3884 pt->br_stack_sz_plus += 1; in intel_pt_process_auxtrace_info()
3887 pt->use_thread_stack = pt->synth_opts.callchain || in intel_pt_process_auxtrace_info()
3888 pt->synth_opts.add_callchain || in intel_pt_process_auxtrace_info()
3889 pt->synth_opts.thread_stack || in intel_pt_process_auxtrace_info()
3890 pt->synth_opts.last_branch || in intel_pt_process_auxtrace_info()
3891 pt->synth_opts.add_last_branch; in intel_pt_process_auxtrace_info()
3893 pt->callstack = pt->synth_opts.callchain || in intel_pt_process_auxtrace_info()
3894 pt->synth_opts.add_callchain || in intel_pt_process_auxtrace_info()
3895 pt->synth_opts.thread_stack; in intel_pt_process_auxtrace_info()
3897 err = intel_pt_synth_events(pt, session); in intel_pt_process_auxtrace_info()
3901 intel_pt_setup_pebs_events(pt); in intel_pt_process_auxtrace_info()
3903 if (pt->sampling_mode || list_empty(&session->auxtrace_index)) in intel_pt_process_auxtrace_info()
3906 err = auxtrace_queues__process_index(&pt->queues, session); in intel_pt_process_auxtrace_info()
3910 if (pt->queues.populated) in intel_pt_process_auxtrace_info()
3911 pt->data_queued = true; in intel_pt_process_auxtrace_info()
3913 if (pt->timeless_decoding) in intel_pt_process_auxtrace_info()
3919 zfree(&pt->chain); in intel_pt_process_auxtrace_info()
3920 thread__zput(pt->unknown_thread); in intel_pt_process_auxtrace_info()
3923 auxtrace_queues__free(&pt->queues); in intel_pt_process_auxtrace_info()
3926 addr_filters__exit(&pt->filts); in intel_pt_process_auxtrace_info()
3927 zfree(&pt->filter); in intel_pt_process_auxtrace_info()
3928 zfree(&pt->time_ranges); in intel_pt_process_auxtrace_info()
3929 free(pt); in intel_pt_process_auxtrace_info()