Lines Matching refs:map_idx
36 int map_idx; /* cpu or thread map index */ member
50 if (a->map_idx != b->map_idx) in saved_value_cmp()
51 return a->map_idx - b->map_idx; in saved_value_cmp()
98 int map_idx, in saved_value_lookup() argument
108 .map_idx = map_idx, in saved_value_lookup()
206 int map_idx, u64 count, in update_runtime_stat() argument
209 struct saved_value *v = saved_value_lookup(NULL, map_idx, true, type, in update_runtime_stat()
222 int map_idx, struct runtime_stat *st) in perf_stat__update_shadow_stats() argument
234 update_runtime_stat(st, STAT_NSECS, map_idx, count_ns, &rsd); in perf_stat__update_shadow_stats()
236 update_runtime_stat(st, STAT_CYCLES, map_idx, count, &rsd); in perf_stat__update_shadow_stats()
238 update_runtime_stat(st, STAT_CYCLES_IN_TX, map_idx, count, &rsd); in perf_stat__update_shadow_stats()
240 update_runtime_stat(st, STAT_TRANSACTION, map_idx, count, &rsd); in perf_stat__update_shadow_stats()
242 update_runtime_stat(st, STAT_ELISION, map_idx, count, &rsd); in perf_stat__update_shadow_stats()
245 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
248 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
251 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
254 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
257 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
260 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
263 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
266 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
269 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
272 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
275 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
278 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
281 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
284 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
287 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
289 update_runtime_stat(st, STAT_BRANCHES, map_idx, count, &rsd); in perf_stat__update_shadow_stats()
291 update_runtime_stat(st, STAT_CACHEREFS, map_idx, count, &rsd); in perf_stat__update_shadow_stats()
293 update_runtime_stat(st, STAT_L1_DCACHE, map_idx, count, &rsd); in perf_stat__update_shadow_stats()
295 update_runtime_stat(st, STAT_L1_ICACHE, map_idx, count, &rsd); in perf_stat__update_shadow_stats()
297 update_runtime_stat(st, STAT_LL_CACHE, map_idx, count, &rsd); in perf_stat__update_shadow_stats()
299 update_runtime_stat(st, STAT_DTLB_CACHE, map_idx, count, &rsd); in perf_stat__update_shadow_stats()
301 update_runtime_stat(st, STAT_ITLB_CACHE, map_idx, count, &rsd); in perf_stat__update_shadow_stats()
303 update_runtime_stat(st, STAT_SMI_NUM, map_idx, count, &rsd); in perf_stat__update_shadow_stats()
305 update_runtime_stat(st, STAT_APERF, map_idx, count, &rsd); in perf_stat__update_shadow_stats()
308 v = saved_value_lookup(counter, map_idx, true, STAT_NONE, 0, st, in perf_stat__update_shadow_stats()
315 map_idx, true, STAT_NONE, 0, st, rsd.cgrp); in perf_stat__update_shadow_stats()
457 enum stat_type type, int map_idx, in runtime_stat_avg() argument
462 v = saved_value_lookup(NULL, map_idx, false, type, rsd->ctx, st, rsd->cgrp); in runtime_stat_avg()
470 enum stat_type type, int map_idx, in runtime_stat_n() argument
475 v = saved_value_lookup(NULL, map_idx, false, type, rsd->ctx, st, rsd->cgrp); in runtime_stat_n()
483 int map_idx, double avg, in print_stalled_cycles_frontend() argument
491 total = runtime_stat_avg(st, STAT_CYCLES, map_idx, rsd); in print_stalled_cycles_frontend()
506 int map_idx, double avg, in print_stalled_cycles_backend() argument
514 total = runtime_stat_avg(st, STAT_CYCLES, map_idx, rsd); in print_stalled_cycles_backend()
525 int map_idx, double avg, in print_branch_misses() argument
533 total = runtime_stat_avg(st, STAT_BRANCHES, map_idx, rsd); in print_branch_misses()
544 int map_idx, double avg, in print_l1_dcache_misses() argument
552 total = runtime_stat_avg(st, STAT_L1_DCACHE, map_idx, rsd); in print_l1_dcache_misses()
563 int map_idx, double avg, in print_l1_icache_misses() argument
571 total = runtime_stat_avg(st, STAT_L1_ICACHE, map_idx, rsd); in print_l1_icache_misses()
581 int map_idx, double avg, in print_dtlb_cache_misses() argument
589 total = runtime_stat_avg(st, STAT_DTLB_CACHE, map_idx, rsd); in print_dtlb_cache_misses()
599 int map_idx, double avg, in print_itlb_cache_misses() argument
607 total = runtime_stat_avg(st, STAT_ITLB_CACHE, map_idx, rsd); in print_itlb_cache_misses()
617 int map_idx, double avg, in print_ll_cache_misses() argument
625 total = runtime_stat_avg(st, STAT_LL_CACHE, map_idx, rsd); in print_ll_cache_misses()
683 static double td_total_slots(int map_idx, struct runtime_stat *st, in td_total_slots() argument
686 return runtime_stat_avg(st, STAT_TOPDOWN_TOTAL_SLOTS, map_idx, rsd); in td_total_slots()
689 static double td_bad_spec(int map_idx, struct runtime_stat *st, in td_bad_spec() argument
696 total = runtime_stat_avg(st, STAT_TOPDOWN_SLOTS_ISSUED, map_idx, rsd) - in td_bad_spec()
697 runtime_stat_avg(st, STAT_TOPDOWN_SLOTS_RETIRED, map_idx, rsd) + in td_bad_spec()
698 runtime_stat_avg(st, STAT_TOPDOWN_RECOVERY_BUBBLES, map_idx, rsd); in td_bad_spec()
700 total_slots = td_total_slots(map_idx, st, rsd); in td_bad_spec()
706 static double td_retiring(int map_idx, struct runtime_stat *st, in td_retiring() argument
710 double total_slots = td_total_slots(map_idx, st, rsd); in td_retiring()
712 map_idx, rsd); in td_retiring()
719 static double td_fe_bound(int map_idx, struct runtime_stat *st, in td_fe_bound() argument
723 double total_slots = td_total_slots(map_idx, st, rsd); in td_fe_bound()
725 map_idx, rsd); in td_fe_bound()
732 static double td_be_bound(int map_idx, struct runtime_stat *st, in td_be_bound() argument
735 double sum = (td_fe_bound(map_idx, st, rsd) + in td_be_bound()
736 td_bad_spec(map_idx, st, rsd) + in td_be_bound()
737 td_retiring(map_idx, st, rsd)); in td_be_bound()
748 static double td_metric_ratio(int map_idx, enum stat_type type, in td_metric_ratio() argument
752 double sum = runtime_stat_avg(stat, STAT_TOPDOWN_RETIRING, map_idx, rsd) + in td_metric_ratio()
753 runtime_stat_avg(stat, STAT_TOPDOWN_FE_BOUND, map_idx, rsd) + in td_metric_ratio()
754 runtime_stat_avg(stat, STAT_TOPDOWN_BE_BOUND, map_idx, rsd) + in td_metric_ratio()
755 runtime_stat_avg(stat, STAT_TOPDOWN_BAD_SPEC, map_idx, rsd); in td_metric_ratio()
756 double d = runtime_stat_avg(stat, type, map_idx, rsd); in td_metric_ratio()
768 static bool full_td(int map_idx, struct runtime_stat *stat, in full_td() argument
773 if (runtime_stat_avg(stat, STAT_TOPDOWN_RETIRING, map_idx, rsd) > 0) in full_td()
775 if (runtime_stat_avg(stat, STAT_TOPDOWN_BE_BOUND, map_idx, rsd) > 0) in full_td()
777 if (runtime_stat_avg(stat, STAT_TOPDOWN_FE_BOUND, map_idx, rsd) > 0) in full_td()
779 if (runtime_stat_avg(stat, STAT_TOPDOWN_BAD_SPEC, map_idx, rsd) > 0) in full_td()
784 static void print_smi_cost(struct perf_stat_config *config, int map_idx, in print_smi_cost() argument
792 smi_num = runtime_stat_avg(st, STAT_SMI_NUM, map_idx, rsd); in print_smi_cost()
793 aperf = runtime_stat_avg(st, STAT_APERF, map_idx, rsd); in print_smi_cost()
794 cycles = runtime_stat_avg(st, STAT_CYCLES, map_idx, rsd); in print_smi_cost()
811 int map_idx, in prepare_metric() argument
850 v = saved_value_lookup(metric_events[i], map_idx, false, in prepare_metric()
893 int map_idx, in generic_metric() argument
911 i = prepare_metric(metric_events, metric_refs, pctx, map_idx, st); in generic_metric()
956 double test_generic_metric(struct metric_expr *mexp, int map_idx, struct runtime_stat *st) in test_generic_metric() argument
965 if (prepare_metric(mexp->metric_events, mexp->metric_refs, pctx, map_idx, st) < 0) in test_generic_metric()
978 double avg, int map_idx, in perf_stat__print_shadow_stats() argument
997 total = runtime_stat_avg(st, STAT_CYCLES, map_idx, &rsd); in perf_stat__print_shadow_stats()
1007 total = runtime_stat_avg(st, STAT_STALLED_CYCLES_FRONT, map_idx, &rsd); in perf_stat__print_shadow_stats()
1011 map_idx, &rsd)); in perf_stat__print_shadow_stats()
1021 if (runtime_stat_n(st, STAT_BRANCHES, map_idx, &rsd) != 0) in perf_stat__print_shadow_stats()
1022 print_branch_misses(config, map_idx, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1031 if (runtime_stat_n(st, STAT_L1_DCACHE, map_idx, &rsd) != 0) in perf_stat__print_shadow_stats()
1032 print_l1_dcache_misses(config, map_idx, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1041 if (runtime_stat_n(st, STAT_L1_ICACHE, map_idx, &rsd) != 0) in perf_stat__print_shadow_stats()
1042 print_l1_icache_misses(config, map_idx, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1051 if (runtime_stat_n(st, STAT_DTLB_CACHE, map_idx, &rsd) != 0) in perf_stat__print_shadow_stats()
1052 print_dtlb_cache_misses(config, map_idx, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1061 if (runtime_stat_n(st, STAT_ITLB_CACHE, map_idx, &rsd) != 0) in perf_stat__print_shadow_stats()
1062 print_itlb_cache_misses(config, map_idx, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1071 if (runtime_stat_n(st, STAT_LL_CACHE, map_idx, &rsd) != 0) in perf_stat__print_shadow_stats()
1072 print_ll_cache_misses(config, map_idx, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1076 total = runtime_stat_avg(st, STAT_CACHEREFS, map_idx, &rsd); in perf_stat__print_shadow_stats()
1081 if (runtime_stat_n(st, STAT_CACHEREFS, map_idx, &rsd) != 0) in perf_stat__print_shadow_stats()
1087 print_stalled_cycles_frontend(config, map_idx, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1089 print_stalled_cycles_backend(config, map_idx, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1091 total = runtime_stat_avg(st, STAT_NSECS, map_idx, &rsd); in perf_stat__print_shadow_stats()
1100 total = runtime_stat_avg(st, STAT_CYCLES, map_idx, &rsd); in perf_stat__print_shadow_stats()
1110 total = runtime_stat_avg(st, STAT_CYCLES, map_idx, &rsd); in perf_stat__print_shadow_stats()
1111 total2 = runtime_stat_avg(st, STAT_CYCLES_IN_TX, map_idx, &rsd); in perf_stat__print_shadow_stats()
1121 total = runtime_stat_avg(st, STAT_CYCLES_IN_TX, map_idx, &rsd); in perf_stat__print_shadow_stats()
1126 if (runtime_stat_n(st, STAT_CYCLES_IN_TX, map_idx, &rsd) != 0) in perf_stat__print_shadow_stats()
1133 total = runtime_stat_avg(st, STAT_CYCLES_IN_TX, map_idx, &rsd); in perf_stat__print_shadow_stats()
1146 double fe_bound = td_fe_bound(map_idx, st, &rsd); in perf_stat__print_shadow_stats()
1153 double retiring = td_retiring(map_idx, st, &rsd); in perf_stat__print_shadow_stats()
1160 double bad_spec = td_bad_spec(map_idx, st, &rsd); in perf_stat__print_shadow_stats()
1167 double be_bound = td_be_bound(map_idx, st, &rsd); in perf_stat__print_shadow_stats()
1180 if (td_total_slots(map_idx, st, &rsd) > 0) in perf_stat__print_shadow_stats()
1186 full_td(map_idx, st, &rsd)) { in perf_stat__print_shadow_stats()
1187 double retiring = td_metric_ratio(map_idx, in perf_stat__print_shadow_stats()
1195 full_td(map_idx, st, &rsd)) { in perf_stat__print_shadow_stats()
1196 double fe_bound = td_metric_ratio(map_idx, in perf_stat__print_shadow_stats()
1204 full_td(map_idx, st, &rsd)) { in perf_stat__print_shadow_stats()
1205 double be_bound = td_metric_ratio(map_idx, in perf_stat__print_shadow_stats()
1213 full_td(map_idx, st, &rsd)) { in perf_stat__print_shadow_stats()
1214 double bad_spec = td_metric_ratio(map_idx, in perf_stat__print_shadow_stats()
1222 full_td(map_idx, st, &rsd) && (config->topdown_level > 1)) { in perf_stat__print_shadow_stats()
1223 double retiring = td_metric_ratio(map_idx, in perf_stat__print_shadow_stats()
1226 double heavy_ops = td_metric_ratio(map_idx, in perf_stat__print_shadow_stats()
1242 full_td(map_idx, st, &rsd) && (config->topdown_level > 1)) { in perf_stat__print_shadow_stats()
1243 double bad_spec = td_metric_ratio(map_idx, in perf_stat__print_shadow_stats()
1246 double br_mis = td_metric_ratio(map_idx, in perf_stat__print_shadow_stats()
1262 full_td(map_idx, st, &rsd) && (config->topdown_level > 1)) { in perf_stat__print_shadow_stats()
1263 double fe_bound = td_metric_ratio(map_idx, in perf_stat__print_shadow_stats()
1266 double fetch_lat = td_metric_ratio(map_idx, in perf_stat__print_shadow_stats()
1282 full_td(map_idx, st, &rsd) && (config->topdown_level > 1)) { in perf_stat__print_shadow_stats()
1283 double be_bound = td_metric_ratio(map_idx, in perf_stat__print_shadow_stats()
1286 double mem_bound = td_metric_ratio(map_idx, in perf_stat__print_shadow_stats()
1304 map_idx, out, st); in perf_stat__print_shadow_stats()
1305 } else if (runtime_stat_n(st, STAT_NSECS, map_idx, &rsd) != 0) { in perf_stat__print_shadow_stats()
1309 total = runtime_stat_avg(st, STAT_NSECS, map_idx, &rsd); in perf_stat__print_shadow_stats()
1317 print_smi_cost(config, map_idx, out, st, &rsd); in perf_stat__print_shadow_stats()
1331 map_idx, out, st); in perf_stat__print_shadow_stats()