Lines Matching refs:rsd

215 				struct runtime_stat_data *rsd)  in update_runtime_stat()  argument
218 rsd->ctx, st, rsd->cgrp); in update_runtime_stat()
234 struct runtime_stat_data rsd = { in perf_stat__update_shadow_stats() local
242 update_runtime_stat(st, STAT_NSECS, cpu, count_ns, &rsd); in perf_stat__update_shadow_stats()
244 update_runtime_stat(st, STAT_CYCLES, cpu, count, &rsd); in perf_stat__update_shadow_stats()
246 update_runtime_stat(st, STAT_CYCLES_IN_TX, cpu, count, &rsd); in perf_stat__update_shadow_stats()
248 update_runtime_stat(st, STAT_TRANSACTION, cpu, count, &rsd); in perf_stat__update_shadow_stats()
250 update_runtime_stat(st, STAT_ELISION, cpu, count, &rsd); in perf_stat__update_shadow_stats()
253 cpu, count, &rsd); in perf_stat__update_shadow_stats()
256 cpu, count, &rsd); in perf_stat__update_shadow_stats()
259 cpu, count, &rsd); in perf_stat__update_shadow_stats()
262 cpu, count, &rsd); in perf_stat__update_shadow_stats()
265 cpu, count, &rsd); in perf_stat__update_shadow_stats()
268 cpu, count, &rsd); in perf_stat__update_shadow_stats()
271 cpu, count, &rsd); in perf_stat__update_shadow_stats()
274 cpu, count, &rsd); in perf_stat__update_shadow_stats()
277 cpu, count, &rsd); in perf_stat__update_shadow_stats()
280 cpu, count, &rsd); in perf_stat__update_shadow_stats()
283 cpu, count, &rsd); in perf_stat__update_shadow_stats()
286 cpu, count, &rsd); in perf_stat__update_shadow_stats()
289 cpu, count, &rsd); in perf_stat__update_shadow_stats()
292 cpu, count, &rsd); in perf_stat__update_shadow_stats()
295 cpu, count, &rsd); in perf_stat__update_shadow_stats()
297 update_runtime_stat(st, STAT_BRANCHES, cpu, count, &rsd); in perf_stat__update_shadow_stats()
299 update_runtime_stat(st, STAT_CACHEREFS, cpu, count, &rsd); in perf_stat__update_shadow_stats()
301 update_runtime_stat(st, STAT_L1_DCACHE, cpu, count, &rsd); in perf_stat__update_shadow_stats()
303 update_runtime_stat(st, STAT_L1_ICACHE, cpu, count, &rsd); in perf_stat__update_shadow_stats()
305 update_runtime_stat(st, STAT_LL_CACHE, cpu, count, &rsd); in perf_stat__update_shadow_stats()
307 update_runtime_stat(st, STAT_DTLB_CACHE, cpu, count, &rsd); in perf_stat__update_shadow_stats()
309 update_runtime_stat(st, STAT_ITLB_CACHE, cpu, count, &rsd); in perf_stat__update_shadow_stats()
311 update_runtime_stat(st, STAT_SMI_NUM, cpu, count, &rsd); in perf_stat__update_shadow_stats()
313 update_runtime_stat(st, STAT_APERF, cpu, count, &rsd); in perf_stat__update_shadow_stats()
317 rsd.cgrp); in perf_stat__update_shadow_stats()
323 cpu, true, STAT_NONE, 0, st, rsd.cgrp); in perf_stat__update_shadow_stats()
461 struct runtime_stat_data *rsd) in runtime_stat_avg() argument
465 v = saved_value_lookup(NULL, cpu, false, type, rsd->ctx, st, rsd->cgrp); in runtime_stat_avg()
474 struct runtime_stat_data *rsd) in runtime_stat_n() argument
478 v = saved_value_lookup(NULL, cpu, false, type, rsd->ctx, st, rsd->cgrp); in runtime_stat_n()
489 struct runtime_stat_data *rsd) in print_stalled_cycles_frontend() argument
494 total = runtime_stat_avg(st, STAT_CYCLES, cpu, rsd); in print_stalled_cycles_frontend()
512 struct runtime_stat_data *rsd) in print_stalled_cycles_backend() argument
517 total = runtime_stat_avg(st, STAT_CYCLES, cpu, rsd); in print_stalled_cycles_backend()
531 struct runtime_stat_data *rsd) in print_branch_misses() argument
536 total = runtime_stat_avg(st, STAT_BRANCHES, cpu, rsd); in print_branch_misses()
550 struct runtime_stat_data *rsd) in print_l1_dcache_misses() argument
555 total = runtime_stat_avg(st, STAT_L1_DCACHE, cpu, rsd); in print_l1_dcache_misses()
569 struct runtime_stat_data *rsd) in print_l1_icache_misses() argument
574 total = runtime_stat_avg(st, STAT_L1_ICACHE, cpu, rsd); in print_l1_icache_misses()
587 struct runtime_stat_data *rsd) in print_dtlb_cache_misses() argument
592 total = runtime_stat_avg(st, STAT_DTLB_CACHE, cpu, rsd); in print_dtlb_cache_misses()
605 struct runtime_stat_data *rsd) in print_itlb_cache_misses() argument
610 total = runtime_stat_avg(st, STAT_ITLB_CACHE, cpu, rsd); in print_itlb_cache_misses()
623 struct runtime_stat_data *rsd) in print_ll_cache_misses() argument
628 total = runtime_stat_avg(st, STAT_LL_CACHE, cpu, rsd); in print_ll_cache_misses()
687 struct runtime_stat_data *rsd) in td_total_slots() argument
689 return runtime_stat_avg(st, STAT_TOPDOWN_TOTAL_SLOTS, cpu, rsd); in td_total_slots()
693 struct runtime_stat_data *rsd) in td_bad_spec() argument
699 total = runtime_stat_avg(st, STAT_TOPDOWN_SLOTS_ISSUED, cpu, rsd) - in td_bad_spec()
700 runtime_stat_avg(st, STAT_TOPDOWN_SLOTS_RETIRED, cpu, rsd) + in td_bad_spec()
701 runtime_stat_avg(st, STAT_TOPDOWN_RECOVERY_BUBBLES, cpu, rsd); in td_bad_spec()
703 total_slots = td_total_slots(cpu, st, rsd); in td_bad_spec()
710 struct runtime_stat_data *rsd) in td_retiring() argument
713 double total_slots = td_total_slots(cpu, st, rsd); in td_retiring()
715 cpu, rsd); in td_retiring()
723 struct runtime_stat_data *rsd) in td_fe_bound() argument
726 double total_slots = td_total_slots(cpu, st, rsd); in td_fe_bound()
728 cpu, rsd); in td_fe_bound()
736 struct runtime_stat_data *rsd) in td_be_bound() argument
738 double sum = (td_fe_bound(cpu, st, rsd) + in td_be_bound()
739 td_bad_spec(cpu, st, rsd) + in td_be_bound()
740 td_retiring(cpu, st, rsd)); in td_be_bound()
753 struct runtime_stat_data *rsd) in td_metric_ratio() argument
755 double sum = runtime_stat_avg(stat, STAT_TOPDOWN_RETIRING, cpu, rsd) + in td_metric_ratio()
756 runtime_stat_avg(stat, STAT_TOPDOWN_FE_BOUND, cpu, rsd) + in td_metric_ratio()
757 runtime_stat_avg(stat, STAT_TOPDOWN_BE_BOUND, cpu, rsd) + in td_metric_ratio()
758 runtime_stat_avg(stat, STAT_TOPDOWN_BAD_SPEC, cpu, rsd); in td_metric_ratio()
759 double d = runtime_stat_avg(stat, type, cpu, rsd); in td_metric_ratio()
772 struct runtime_stat_data *rsd) in full_td() argument
776 if (runtime_stat_avg(stat, STAT_TOPDOWN_RETIRING, cpu, rsd) > 0) in full_td()
778 if (runtime_stat_avg(stat, STAT_TOPDOWN_BE_BOUND, cpu, rsd) > 0) in full_td()
780 if (runtime_stat_avg(stat, STAT_TOPDOWN_FE_BOUND, cpu, rsd) > 0) in full_td()
782 if (runtime_stat_avg(stat, STAT_TOPDOWN_BAD_SPEC, cpu, rsd) > 0) in full_td()
790 struct runtime_stat_data *rsd) in print_smi_cost() argument
795 smi_num = runtime_stat_avg(st, STAT_SMI_NUM, cpu, rsd); in print_smi_cost()
796 aperf = runtime_stat_avg(st, STAT_APERF, cpu, rsd); in print_smi_cost()
797 cycles = runtime_stat_avg(st, STAT_CYCLES, cpu, rsd); in print_smi_cost()
959 struct runtime_stat_data rsd = { in perf_stat__print_shadow_stats() local
969 total = runtime_stat_avg(st, STAT_CYCLES, cpu, &rsd); in perf_stat__print_shadow_stats()
979 total = runtime_stat_avg(st, STAT_STALLED_CYCLES_FRONT, cpu, &rsd); in perf_stat__print_shadow_stats()
983 cpu, &rsd)); in perf_stat__print_shadow_stats()
993 if (runtime_stat_n(st, STAT_BRANCHES, cpu, &rsd) != 0) in perf_stat__print_shadow_stats()
994 print_branch_misses(config, cpu, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1003 if (runtime_stat_n(st, STAT_L1_DCACHE, cpu, &rsd) != 0) in perf_stat__print_shadow_stats()
1004 print_l1_dcache_misses(config, cpu, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1013 if (runtime_stat_n(st, STAT_L1_ICACHE, cpu, &rsd) != 0) in perf_stat__print_shadow_stats()
1014 print_l1_icache_misses(config, cpu, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1023 if (runtime_stat_n(st, STAT_DTLB_CACHE, cpu, &rsd) != 0) in perf_stat__print_shadow_stats()
1024 print_dtlb_cache_misses(config, cpu, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1033 if (runtime_stat_n(st, STAT_ITLB_CACHE, cpu, &rsd) != 0) in perf_stat__print_shadow_stats()
1034 print_itlb_cache_misses(config, cpu, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1043 if (runtime_stat_n(st, STAT_LL_CACHE, cpu, &rsd) != 0) in perf_stat__print_shadow_stats()
1044 print_ll_cache_misses(config, cpu, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1048 total = runtime_stat_avg(st, STAT_CACHEREFS, cpu, &rsd); in perf_stat__print_shadow_stats()
1053 if (runtime_stat_n(st, STAT_CACHEREFS, cpu, &rsd) != 0) in perf_stat__print_shadow_stats()
1059 print_stalled_cycles_frontend(config, cpu, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1061 print_stalled_cycles_backend(config, cpu, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1063 total = runtime_stat_avg(st, STAT_NSECS, cpu, &rsd); in perf_stat__print_shadow_stats()
1072 total = runtime_stat_avg(st, STAT_CYCLES, cpu, &rsd); in perf_stat__print_shadow_stats()
1082 total = runtime_stat_avg(st, STAT_CYCLES, cpu, &rsd); in perf_stat__print_shadow_stats()
1083 total2 = runtime_stat_avg(st, STAT_CYCLES_IN_TX, cpu, &rsd); in perf_stat__print_shadow_stats()
1093 total = runtime_stat_avg(st, STAT_CYCLES_IN_TX, cpu, &rsd); in perf_stat__print_shadow_stats()
1098 if (runtime_stat_n(st, STAT_CYCLES_IN_TX, cpu, &rsd) != 0) in perf_stat__print_shadow_stats()
1105 total = runtime_stat_avg(st, STAT_CYCLES_IN_TX, cpu, &rsd); in perf_stat__print_shadow_stats()
1118 double fe_bound = td_fe_bound(cpu, st, &rsd); in perf_stat__print_shadow_stats()
1125 double retiring = td_retiring(cpu, st, &rsd); in perf_stat__print_shadow_stats()
1132 double bad_spec = td_bad_spec(cpu, st, &rsd); in perf_stat__print_shadow_stats()
1139 double be_bound = td_be_bound(cpu, st, &rsd); in perf_stat__print_shadow_stats()
1152 if (td_total_slots(cpu, st, &rsd) > 0) in perf_stat__print_shadow_stats()
1158 full_td(cpu, st, &rsd)) { in perf_stat__print_shadow_stats()
1161 &rsd); in perf_stat__print_shadow_stats()
1167 full_td(cpu, st, &rsd)) { in perf_stat__print_shadow_stats()
1170 &rsd); in perf_stat__print_shadow_stats()
1176 full_td(cpu, st, &rsd)) { in perf_stat__print_shadow_stats()
1179 &rsd); in perf_stat__print_shadow_stats()
1185 full_td(cpu, st, &rsd)) { in perf_stat__print_shadow_stats()
1188 &rsd); in perf_stat__print_shadow_stats()
1194 full_td(cpu, st, &rsd) && (config->topdown_level > 1)) { in perf_stat__print_shadow_stats()
1197 &rsd); in perf_stat__print_shadow_stats()
1200 &rsd); in perf_stat__print_shadow_stats()
1214 full_td(cpu, st, &rsd) && (config->topdown_level > 1)) { in perf_stat__print_shadow_stats()
1217 &rsd); in perf_stat__print_shadow_stats()
1220 &rsd); in perf_stat__print_shadow_stats()
1234 full_td(cpu, st, &rsd) && (config->topdown_level > 1)) { in perf_stat__print_shadow_stats()
1237 &rsd); in perf_stat__print_shadow_stats()
1240 &rsd); in perf_stat__print_shadow_stats()
1254 full_td(cpu, st, &rsd) && (config->topdown_level > 1)) { in perf_stat__print_shadow_stats()
1257 &rsd); in perf_stat__print_shadow_stats()
1260 &rsd); in perf_stat__print_shadow_stats()
1276 } else if (runtime_stat_n(st, STAT_NSECS, cpu, &rsd) != 0) { in perf_stat__print_shadow_stats()
1280 total = runtime_stat_avg(st, STAT_NSECS, cpu, &rsd); in perf_stat__print_shadow_stats()
1288 print_smi_cost(config, cpu, out, st, &rsd); in perf_stat__print_shadow_stats()