Lines Matching refs:rsd
207 struct runtime_stat_data *rsd) in update_runtime_stat() argument
210 rsd->ctx, st, rsd->cgrp); in update_runtime_stat()
226 struct runtime_stat_data rsd = { in perf_stat__update_shadow_stats() local
234 update_runtime_stat(st, STAT_NSECS, map_idx, count_ns, &rsd); in perf_stat__update_shadow_stats()
236 update_runtime_stat(st, STAT_CYCLES, map_idx, count, &rsd); in perf_stat__update_shadow_stats()
238 update_runtime_stat(st, STAT_CYCLES_IN_TX, map_idx, count, &rsd); in perf_stat__update_shadow_stats()
240 update_runtime_stat(st, STAT_TRANSACTION, map_idx, count, &rsd); in perf_stat__update_shadow_stats()
242 update_runtime_stat(st, STAT_ELISION, map_idx, count, &rsd); in perf_stat__update_shadow_stats()
245 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
248 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
251 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
254 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
257 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
260 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
263 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
266 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
269 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
272 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
275 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
278 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
281 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
284 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
287 map_idx, count, &rsd); in perf_stat__update_shadow_stats()
289 update_runtime_stat(st, STAT_BRANCHES, map_idx, count, &rsd); in perf_stat__update_shadow_stats()
291 update_runtime_stat(st, STAT_CACHEREFS, map_idx, count, &rsd); in perf_stat__update_shadow_stats()
293 update_runtime_stat(st, STAT_L1_DCACHE, map_idx, count, &rsd); in perf_stat__update_shadow_stats()
295 update_runtime_stat(st, STAT_L1_ICACHE, map_idx, count, &rsd); in perf_stat__update_shadow_stats()
297 update_runtime_stat(st, STAT_LL_CACHE, map_idx, count, &rsd); in perf_stat__update_shadow_stats()
299 update_runtime_stat(st, STAT_DTLB_CACHE, map_idx, count, &rsd); in perf_stat__update_shadow_stats()
301 update_runtime_stat(st, STAT_ITLB_CACHE, map_idx, count, &rsd); in perf_stat__update_shadow_stats()
303 update_runtime_stat(st, STAT_SMI_NUM, map_idx, count, &rsd); in perf_stat__update_shadow_stats()
305 update_runtime_stat(st, STAT_APERF, map_idx, count, &rsd); in perf_stat__update_shadow_stats()
309 rsd.cgrp); in perf_stat__update_shadow_stats()
315 map_idx, true, STAT_NONE, 0, st, rsd.cgrp); in perf_stat__update_shadow_stats()
458 struct runtime_stat_data *rsd) in runtime_stat_avg() argument
462 v = saved_value_lookup(NULL, map_idx, false, type, rsd->ctx, st, rsd->cgrp); in runtime_stat_avg()
471 struct runtime_stat_data *rsd) in runtime_stat_n() argument
475 v = saved_value_lookup(NULL, map_idx, false, type, rsd->ctx, st, rsd->cgrp); in runtime_stat_n()
486 struct runtime_stat_data *rsd) in print_stalled_cycles_frontend() argument
491 total = runtime_stat_avg(st, STAT_CYCLES, map_idx, rsd); in print_stalled_cycles_frontend()
509 struct runtime_stat_data *rsd) in print_stalled_cycles_backend() argument
514 total = runtime_stat_avg(st, STAT_CYCLES, map_idx, rsd); in print_stalled_cycles_backend()
528 struct runtime_stat_data *rsd) in print_branch_misses() argument
533 total = runtime_stat_avg(st, STAT_BRANCHES, map_idx, rsd); in print_branch_misses()
547 struct runtime_stat_data *rsd) in print_l1_dcache_misses() argument
552 total = runtime_stat_avg(st, STAT_L1_DCACHE, map_idx, rsd); in print_l1_dcache_misses()
566 struct runtime_stat_data *rsd) in print_l1_icache_misses() argument
571 total = runtime_stat_avg(st, STAT_L1_ICACHE, map_idx, rsd); in print_l1_icache_misses()
584 struct runtime_stat_data *rsd) in print_dtlb_cache_misses() argument
589 total = runtime_stat_avg(st, STAT_DTLB_CACHE, map_idx, rsd); in print_dtlb_cache_misses()
602 struct runtime_stat_data *rsd) in print_itlb_cache_misses() argument
607 total = runtime_stat_avg(st, STAT_ITLB_CACHE, map_idx, rsd); in print_itlb_cache_misses()
620 struct runtime_stat_data *rsd) in print_ll_cache_misses() argument
625 total = runtime_stat_avg(st, STAT_LL_CACHE, map_idx, rsd); in print_ll_cache_misses()
684 struct runtime_stat_data *rsd) in td_total_slots() argument
686 return runtime_stat_avg(st, STAT_TOPDOWN_TOTAL_SLOTS, map_idx, rsd); in td_total_slots()
690 struct runtime_stat_data *rsd) in td_bad_spec() argument
696 total = runtime_stat_avg(st, STAT_TOPDOWN_SLOTS_ISSUED, map_idx, rsd) - in td_bad_spec()
697 runtime_stat_avg(st, STAT_TOPDOWN_SLOTS_RETIRED, map_idx, rsd) + in td_bad_spec()
698 runtime_stat_avg(st, STAT_TOPDOWN_RECOVERY_BUBBLES, map_idx, rsd); in td_bad_spec()
700 total_slots = td_total_slots(map_idx, st, rsd); in td_bad_spec()
707 struct runtime_stat_data *rsd) in td_retiring() argument
710 double total_slots = td_total_slots(map_idx, st, rsd); in td_retiring()
712 map_idx, rsd); in td_retiring()
720 struct runtime_stat_data *rsd) in td_fe_bound() argument
723 double total_slots = td_total_slots(map_idx, st, rsd); in td_fe_bound()
725 map_idx, rsd); in td_fe_bound()
733 struct runtime_stat_data *rsd) in td_be_bound() argument
735 double sum = (td_fe_bound(map_idx, st, rsd) + in td_be_bound()
736 td_bad_spec(map_idx, st, rsd) + in td_be_bound()
737 td_retiring(map_idx, st, rsd)); in td_be_bound()
750 struct runtime_stat_data *rsd) in td_metric_ratio() argument
752 double sum = runtime_stat_avg(stat, STAT_TOPDOWN_RETIRING, map_idx, rsd) + in td_metric_ratio()
753 runtime_stat_avg(stat, STAT_TOPDOWN_FE_BOUND, map_idx, rsd) + in td_metric_ratio()
754 runtime_stat_avg(stat, STAT_TOPDOWN_BE_BOUND, map_idx, rsd) + in td_metric_ratio()
755 runtime_stat_avg(stat, STAT_TOPDOWN_BAD_SPEC, map_idx, rsd); in td_metric_ratio()
756 double d = runtime_stat_avg(stat, type, map_idx, rsd); in td_metric_ratio()
769 struct runtime_stat_data *rsd) in full_td() argument
773 if (runtime_stat_avg(stat, STAT_TOPDOWN_RETIRING, map_idx, rsd) > 0) in full_td()
775 if (runtime_stat_avg(stat, STAT_TOPDOWN_BE_BOUND, map_idx, rsd) > 0) in full_td()
777 if (runtime_stat_avg(stat, STAT_TOPDOWN_FE_BOUND, map_idx, rsd) > 0) in full_td()
779 if (runtime_stat_avg(stat, STAT_TOPDOWN_BAD_SPEC, map_idx, rsd) > 0) in full_td()
787 struct runtime_stat_data *rsd) in print_smi_cost() argument
792 smi_num = runtime_stat_avg(st, STAT_SMI_NUM, map_idx, rsd); in print_smi_cost()
793 aperf = runtime_stat_avg(st, STAT_APERF, map_idx, rsd); in print_smi_cost()
794 cycles = runtime_stat_avg(st, STAT_CYCLES, map_idx, rsd); in print_smi_cost()
987 struct runtime_stat_data rsd = { in perf_stat__print_shadow_stats() local
997 total = runtime_stat_avg(st, STAT_CYCLES, map_idx, &rsd); in perf_stat__print_shadow_stats()
1007 total = runtime_stat_avg(st, STAT_STALLED_CYCLES_FRONT, map_idx, &rsd); in perf_stat__print_shadow_stats()
1011 map_idx, &rsd)); in perf_stat__print_shadow_stats()
1021 if (runtime_stat_n(st, STAT_BRANCHES, map_idx, &rsd) != 0) in perf_stat__print_shadow_stats()
1022 print_branch_misses(config, map_idx, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1031 if (runtime_stat_n(st, STAT_L1_DCACHE, map_idx, &rsd) != 0) in perf_stat__print_shadow_stats()
1032 print_l1_dcache_misses(config, map_idx, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1041 if (runtime_stat_n(st, STAT_L1_ICACHE, map_idx, &rsd) != 0) in perf_stat__print_shadow_stats()
1042 print_l1_icache_misses(config, map_idx, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1051 if (runtime_stat_n(st, STAT_DTLB_CACHE, map_idx, &rsd) != 0) in perf_stat__print_shadow_stats()
1052 print_dtlb_cache_misses(config, map_idx, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1061 if (runtime_stat_n(st, STAT_ITLB_CACHE, map_idx, &rsd) != 0) in perf_stat__print_shadow_stats()
1062 print_itlb_cache_misses(config, map_idx, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1071 if (runtime_stat_n(st, STAT_LL_CACHE, map_idx, &rsd) != 0) in perf_stat__print_shadow_stats()
1072 print_ll_cache_misses(config, map_idx, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1076 total = runtime_stat_avg(st, STAT_CACHEREFS, map_idx, &rsd); in perf_stat__print_shadow_stats()
1081 if (runtime_stat_n(st, STAT_CACHEREFS, map_idx, &rsd) != 0) in perf_stat__print_shadow_stats()
1087 print_stalled_cycles_frontend(config, map_idx, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1089 print_stalled_cycles_backend(config, map_idx, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1091 total = runtime_stat_avg(st, STAT_NSECS, map_idx, &rsd); in perf_stat__print_shadow_stats()
1100 total = runtime_stat_avg(st, STAT_CYCLES, map_idx, &rsd); in perf_stat__print_shadow_stats()
1110 total = runtime_stat_avg(st, STAT_CYCLES, map_idx, &rsd); in perf_stat__print_shadow_stats()
1111 total2 = runtime_stat_avg(st, STAT_CYCLES_IN_TX, map_idx, &rsd); in perf_stat__print_shadow_stats()
1121 total = runtime_stat_avg(st, STAT_CYCLES_IN_TX, map_idx, &rsd); in perf_stat__print_shadow_stats()
1126 if (runtime_stat_n(st, STAT_CYCLES_IN_TX, map_idx, &rsd) != 0) in perf_stat__print_shadow_stats()
1133 total = runtime_stat_avg(st, STAT_CYCLES_IN_TX, map_idx, &rsd); in perf_stat__print_shadow_stats()
1146 double fe_bound = td_fe_bound(map_idx, st, &rsd); in perf_stat__print_shadow_stats()
1153 double retiring = td_retiring(map_idx, st, &rsd); in perf_stat__print_shadow_stats()
1160 double bad_spec = td_bad_spec(map_idx, st, &rsd); in perf_stat__print_shadow_stats()
1167 double be_bound = td_be_bound(map_idx, st, &rsd); in perf_stat__print_shadow_stats()
1180 if (td_total_slots(map_idx, st, &rsd) > 0) in perf_stat__print_shadow_stats()
1186 full_td(map_idx, st, &rsd)) { in perf_stat__print_shadow_stats()
1189 &rsd); in perf_stat__print_shadow_stats()
1195 full_td(map_idx, st, &rsd)) { in perf_stat__print_shadow_stats()
1198 &rsd); in perf_stat__print_shadow_stats()
1204 full_td(map_idx, st, &rsd)) { in perf_stat__print_shadow_stats()
1207 &rsd); in perf_stat__print_shadow_stats()
1213 full_td(map_idx, st, &rsd)) { in perf_stat__print_shadow_stats()
1216 &rsd); in perf_stat__print_shadow_stats()
1222 full_td(map_idx, st, &rsd) && (config->topdown_level > 1)) { in perf_stat__print_shadow_stats()
1225 &rsd); in perf_stat__print_shadow_stats()
1228 &rsd); in perf_stat__print_shadow_stats()
1242 full_td(map_idx, st, &rsd) && (config->topdown_level > 1)) { in perf_stat__print_shadow_stats()
1245 &rsd); in perf_stat__print_shadow_stats()
1248 &rsd); in perf_stat__print_shadow_stats()
1262 full_td(map_idx, st, &rsd) && (config->topdown_level > 1)) { in perf_stat__print_shadow_stats()
1265 &rsd); in perf_stat__print_shadow_stats()
1268 &rsd); in perf_stat__print_shadow_stats()
1282 full_td(map_idx, st, &rsd) && (config->topdown_level > 1)) { in perf_stat__print_shadow_stats()
1285 &rsd); in perf_stat__print_shadow_stats()
1288 &rsd); in perf_stat__print_shadow_stats()
1305 } else if (runtime_stat_n(st, STAT_NSECS, map_idx, &rsd) != 0) { in perf_stat__print_shadow_stats()
1309 total = runtime_stat_avg(st, STAT_NSECS, map_idx, &rsd); in perf_stat__print_shadow_stats()
1317 print_smi_cost(config, map_idx, out, st, &rsd); in perf_stat__print_shadow_stats()