Lines Matching refs:evsel

26 	struct perf_evsel *evsel;  member
57 if (a->evsel == NULL && b->evsel == NULL) { in saved_value_cmp()
67 if (a->evsel == b->evsel) in saved_value_cmp()
69 if ((char *)a->evsel < (char *)b->evsel) in saved_value_cmp()
95 static struct saved_value *saved_value_lookup(struct perf_evsel *evsel, in saved_value_lookup() argument
106 .evsel = evsel, in saved_value_lookup()
147 static int evsel_context(struct perf_evsel *evsel) in evsel_context() argument
151 if (evsel->attr.exclude_kernel) in evsel_context()
153 if (evsel->attr.exclude_user) in evsel_context()
155 if (evsel->attr.exclude_hv) in evsel_context()
157 if (evsel->attr.exclude_host) in evsel_context()
159 if (evsel->attr.exclude_idle) in evsel_context()
414 struct perf_evsel *evsel, double avg, in print_stalled_cycles_frontend() argument
420 int ctx = evsel_context(evsel); in print_stalled_cycles_frontend()
437 struct perf_evsel *evsel, double avg, in print_stalled_cycles_backend() argument
443 int ctx = evsel_context(evsel); in print_stalled_cycles_backend()
456 struct perf_evsel *evsel, in print_branch_misses() argument
463 int ctx = evsel_context(evsel); in print_branch_misses()
476 struct perf_evsel *evsel, in print_l1_dcache_misses() argument
484 int ctx = evsel_context(evsel); in print_l1_dcache_misses()
497 struct perf_evsel *evsel, in print_l1_icache_misses() argument
505 int ctx = evsel_context(evsel); in print_l1_icache_misses()
517 struct perf_evsel *evsel, in print_dtlb_cache_misses() argument
524 int ctx = evsel_context(evsel); in print_dtlb_cache_misses()
536 struct perf_evsel *evsel, in print_itlb_cache_misses() argument
543 int ctx = evsel_context(evsel); in print_itlb_cache_misses()
555 struct perf_evsel *evsel, in print_ll_cache_misses() argument
562 int ctx = evsel_context(evsel); in print_ll_cache_misses()
677 static void print_smi_cost(int cpu, struct perf_evsel *evsel, in print_smi_cost() argument
682 int ctx = evsel_context(evsel); in print_smi_cost()
753 void perf_stat__print_shadow_stats(struct perf_evsel *evsel, in perf_stat__print_shadow_stats() argument
763 int ctx = evsel_context(evsel); in perf_stat__print_shadow_stats()
767 if (perf_evsel__match(evsel, HARDWARE, HW_INSTRUCTIONS)) { in perf_stat__print_shadow_stats()
795 } else if (perf_evsel__match(evsel, HARDWARE, HW_BRANCH_MISSES)) { in perf_stat__print_shadow_stats()
797 print_branch_misses(cpu, evsel, avg, out, st); in perf_stat__print_shadow_stats()
801 evsel->attr.type == PERF_TYPE_HW_CACHE && in perf_stat__print_shadow_stats()
802 evsel->attr.config == ( PERF_COUNT_HW_CACHE_L1D | in perf_stat__print_shadow_stats()
807 print_l1_dcache_misses(cpu, evsel, avg, out, st); in perf_stat__print_shadow_stats()
811 evsel->attr.type == PERF_TYPE_HW_CACHE && in perf_stat__print_shadow_stats()
812 evsel->attr.config == ( PERF_COUNT_HW_CACHE_L1I | in perf_stat__print_shadow_stats()
817 print_l1_icache_misses(cpu, evsel, avg, out, st); in perf_stat__print_shadow_stats()
821 evsel->attr.type == PERF_TYPE_HW_CACHE && in perf_stat__print_shadow_stats()
822 evsel->attr.config == ( PERF_COUNT_HW_CACHE_DTLB | in perf_stat__print_shadow_stats()
827 print_dtlb_cache_misses(cpu, evsel, avg, out, st); in perf_stat__print_shadow_stats()
831 evsel->attr.type == PERF_TYPE_HW_CACHE && in perf_stat__print_shadow_stats()
832 evsel->attr.config == ( PERF_COUNT_HW_CACHE_ITLB | in perf_stat__print_shadow_stats()
837 print_itlb_cache_misses(cpu, evsel, avg, out, st); in perf_stat__print_shadow_stats()
841 evsel->attr.type == PERF_TYPE_HW_CACHE && in perf_stat__print_shadow_stats()
842 evsel->attr.config == ( PERF_COUNT_HW_CACHE_LL | in perf_stat__print_shadow_stats()
847 print_ll_cache_misses(cpu, evsel, avg, out, st); in perf_stat__print_shadow_stats()
850 } else if (perf_evsel__match(evsel, HARDWARE, HW_CACHE_MISSES)) { in perf_stat__print_shadow_stats()
861 } else if (perf_evsel__match(evsel, HARDWARE, HW_STALLED_CYCLES_FRONTEND)) { in perf_stat__print_shadow_stats()
862 print_stalled_cycles_frontend(cpu, evsel, avg, out, st); in perf_stat__print_shadow_stats()
863 } else if (perf_evsel__match(evsel, HARDWARE, HW_STALLED_CYCLES_BACKEND)) { in perf_stat__print_shadow_stats()
864 print_stalled_cycles_backend(cpu, evsel, avg, out, st); in perf_stat__print_shadow_stats()
865 } else if (perf_evsel__match(evsel, HARDWARE, HW_CPU_CYCLES)) { in perf_stat__print_shadow_stats()
874 } else if (perf_stat_evsel__is(evsel, CYCLES_IN_TX)) { in perf_stat__print_shadow_stats()
884 } else if (perf_stat_evsel__is(evsel, CYCLES_IN_TX_CP)) { in perf_stat__print_shadow_stats()
895 } else if (perf_stat_evsel__is(evsel, TRANSACTION_START)) { in perf_stat__print_shadow_stats()
908 } else if (perf_stat_evsel__is(evsel, ELISION_START)) { in perf_stat__print_shadow_stats()
916 } else if (perf_evsel__is_clock(evsel)) { in perf_stat__print_shadow_stats()
919 avg / (ratio * evsel->scale)); in perf_stat__print_shadow_stats()
922 } else if (perf_stat_evsel__is(evsel, TOPDOWN_FETCH_BUBBLES)) { in perf_stat__print_shadow_stats()
929 } else if (perf_stat_evsel__is(evsel, TOPDOWN_SLOTS_RETIRED)) { in perf_stat__print_shadow_stats()
936 } else if (perf_stat_evsel__is(evsel, TOPDOWN_RECOVERY_BUBBLES)) { in perf_stat__print_shadow_stats()
943 } else if (perf_stat_evsel__is(evsel, TOPDOWN_SLOTS_ISSUED)) { in perf_stat__print_shadow_stats()
962 } else if (evsel->metric_expr) { in perf_stat__print_shadow_stats()
963 generic_metric(evsel->metric_expr, evsel->metric_events, evsel->name, in perf_stat__print_shadow_stats()
964 evsel->metric_name, avg, cpu, out, st); in perf_stat__print_shadow_stats()
979 } else if (perf_stat_evsel__is(evsel, SMI_NUM)) { in perf_stat__print_shadow_stats()
980 print_smi_cost(cpu, evsel, out, st); in perf_stat__print_shadow_stats()
985 if ((me = metricgroup__lookup(metric_events, evsel, false)) != NULL) { in perf_stat__print_shadow_stats()
992 evsel->name, mexp->metric_name, in perf_stat__print_shadow_stats()