Lines Matching refs:rsd
217 struct runtime_stat_data *rsd) in update_runtime_stat() argument
220 rsd->ctx, st, rsd->cgrp); in update_runtime_stat()
236 struct runtime_stat_data rsd = { in perf_stat__update_shadow_stats() local
244 update_runtime_stat(st, STAT_NSECS, cpu, count_ns, &rsd); in perf_stat__update_shadow_stats()
246 update_runtime_stat(st, STAT_CYCLES, cpu, count, &rsd); in perf_stat__update_shadow_stats()
248 update_runtime_stat(st, STAT_CYCLES_IN_TX, cpu, count, &rsd); in perf_stat__update_shadow_stats()
250 update_runtime_stat(st, STAT_TRANSACTION, cpu, count, &rsd); in perf_stat__update_shadow_stats()
252 update_runtime_stat(st, STAT_ELISION, cpu, count, &rsd); in perf_stat__update_shadow_stats()
255 cpu, count, &rsd); in perf_stat__update_shadow_stats()
258 cpu, count, &rsd); in perf_stat__update_shadow_stats()
261 cpu, count, &rsd); in perf_stat__update_shadow_stats()
264 cpu, count, &rsd); in perf_stat__update_shadow_stats()
267 cpu, count, &rsd); in perf_stat__update_shadow_stats()
270 cpu, count, &rsd); in perf_stat__update_shadow_stats()
273 cpu, count, &rsd); in perf_stat__update_shadow_stats()
276 cpu, count, &rsd); in perf_stat__update_shadow_stats()
279 cpu, count, &rsd); in perf_stat__update_shadow_stats()
282 cpu, count, &rsd); in perf_stat__update_shadow_stats()
285 cpu, count, &rsd); in perf_stat__update_shadow_stats()
288 cpu, count, &rsd); in perf_stat__update_shadow_stats()
291 cpu, count, &rsd); in perf_stat__update_shadow_stats()
294 cpu, count, &rsd); in perf_stat__update_shadow_stats()
297 cpu, count, &rsd); in perf_stat__update_shadow_stats()
299 update_runtime_stat(st, STAT_BRANCHES, cpu, count, &rsd); in perf_stat__update_shadow_stats()
301 update_runtime_stat(st, STAT_CACHEREFS, cpu, count, &rsd); in perf_stat__update_shadow_stats()
303 update_runtime_stat(st, STAT_L1_DCACHE, cpu, count, &rsd); in perf_stat__update_shadow_stats()
305 update_runtime_stat(st, STAT_L1_ICACHE, cpu, count, &rsd); in perf_stat__update_shadow_stats()
307 update_runtime_stat(st, STAT_LL_CACHE, cpu, count, &rsd); in perf_stat__update_shadow_stats()
309 update_runtime_stat(st, STAT_DTLB_CACHE, cpu, count, &rsd); in perf_stat__update_shadow_stats()
311 update_runtime_stat(st, STAT_ITLB_CACHE, cpu, count, &rsd); in perf_stat__update_shadow_stats()
313 update_runtime_stat(st, STAT_SMI_NUM, cpu, count, &rsd); in perf_stat__update_shadow_stats()
315 update_runtime_stat(st, STAT_APERF, cpu, count, &rsd); in perf_stat__update_shadow_stats()
319 rsd.cgrp); in perf_stat__update_shadow_stats()
325 cpu, true, STAT_NONE, 0, st, rsd.cgrp); in perf_stat__update_shadow_stats()
468 struct runtime_stat_data *rsd) in runtime_stat_avg() argument
472 v = saved_value_lookup(NULL, cpu, false, type, rsd->ctx, st, rsd->cgrp); in runtime_stat_avg()
481 struct runtime_stat_data *rsd) in runtime_stat_n() argument
485 v = saved_value_lookup(NULL, cpu, false, type, rsd->ctx, st, rsd->cgrp); in runtime_stat_n()
496 struct runtime_stat_data *rsd) in print_stalled_cycles_frontend() argument
501 total = runtime_stat_avg(st, STAT_CYCLES, cpu, rsd); in print_stalled_cycles_frontend()
519 struct runtime_stat_data *rsd) in print_stalled_cycles_backend() argument
524 total = runtime_stat_avg(st, STAT_CYCLES, cpu, rsd); in print_stalled_cycles_backend()
538 struct runtime_stat_data *rsd) in print_branch_misses() argument
543 total = runtime_stat_avg(st, STAT_BRANCHES, cpu, rsd); in print_branch_misses()
557 struct runtime_stat_data *rsd) in print_l1_dcache_misses() argument
562 total = runtime_stat_avg(st, STAT_L1_DCACHE, cpu, rsd); in print_l1_dcache_misses()
576 struct runtime_stat_data *rsd) in print_l1_icache_misses() argument
581 total = runtime_stat_avg(st, STAT_L1_ICACHE, cpu, rsd); in print_l1_icache_misses()
594 struct runtime_stat_data *rsd) in print_dtlb_cache_misses() argument
599 total = runtime_stat_avg(st, STAT_DTLB_CACHE, cpu, rsd); in print_dtlb_cache_misses()
612 struct runtime_stat_data *rsd) in print_itlb_cache_misses() argument
617 total = runtime_stat_avg(st, STAT_ITLB_CACHE, cpu, rsd); in print_itlb_cache_misses()
630 struct runtime_stat_data *rsd) in print_ll_cache_misses() argument
635 total = runtime_stat_avg(st, STAT_LL_CACHE, cpu, rsd); in print_ll_cache_misses()
694 struct runtime_stat_data *rsd) in td_total_slots() argument
696 return runtime_stat_avg(st, STAT_TOPDOWN_TOTAL_SLOTS, cpu, rsd); in td_total_slots()
700 struct runtime_stat_data *rsd) in td_bad_spec() argument
706 total = runtime_stat_avg(st, STAT_TOPDOWN_SLOTS_ISSUED, cpu, rsd) - in td_bad_spec()
707 runtime_stat_avg(st, STAT_TOPDOWN_SLOTS_RETIRED, cpu, rsd) + in td_bad_spec()
708 runtime_stat_avg(st, STAT_TOPDOWN_RECOVERY_BUBBLES, cpu, rsd); in td_bad_spec()
710 total_slots = td_total_slots(cpu, st, rsd); in td_bad_spec()
717 struct runtime_stat_data *rsd) in td_retiring() argument
720 double total_slots = td_total_slots(cpu, st, rsd); in td_retiring()
722 cpu, rsd); in td_retiring()
730 struct runtime_stat_data *rsd) in td_fe_bound() argument
733 double total_slots = td_total_slots(cpu, st, rsd); in td_fe_bound()
735 cpu, rsd); in td_fe_bound()
743 struct runtime_stat_data *rsd) in td_be_bound() argument
745 double sum = (td_fe_bound(cpu, st, rsd) + in td_be_bound()
746 td_bad_spec(cpu, st, rsd) + in td_be_bound()
747 td_retiring(cpu, st, rsd)); in td_be_bound()
760 struct runtime_stat_data *rsd) in td_metric_ratio() argument
762 double sum = runtime_stat_avg(stat, STAT_TOPDOWN_RETIRING, cpu, rsd) + in td_metric_ratio()
763 runtime_stat_avg(stat, STAT_TOPDOWN_FE_BOUND, cpu, rsd) + in td_metric_ratio()
764 runtime_stat_avg(stat, STAT_TOPDOWN_BE_BOUND, cpu, rsd) + in td_metric_ratio()
765 runtime_stat_avg(stat, STAT_TOPDOWN_BAD_SPEC, cpu, rsd); in td_metric_ratio()
766 double d = runtime_stat_avg(stat, type, cpu, rsd); in td_metric_ratio()
779 struct runtime_stat_data *rsd) in full_td() argument
783 if (runtime_stat_avg(stat, STAT_TOPDOWN_RETIRING, cpu, rsd) > 0) in full_td()
785 if (runtime_stat_avg(stat, STAT_TOPDOWN_BE_BOUND, cpu, rsd) > 0) in full_td()
787 if (runtime_stat_avg(stat, STAT_TOPDOWN_FE_BOUND, cpu, rsd) > 0) in full_td()
789 if (runtime_stat_avg(stat, STAT_TOPDOWN_BAD_SPEC, cpu, rsd) > 0) in full_td()
797 struct runtime_stat_data *rsd) in print_smi_cost() argument
802 smi_num = runtime_stat_avg(st, STAT_SMI_NUM, cpu, rsd); in print_smi_cost()
803 aperf = runtime_stat_avg(st, STAT_APERF, cpu, rsd); in print_smi_cost()
804 cycles = runtime_stat_avg(st, STAT_CYCLES, cpu, rsd); in print_smi_cost()
968 struct runtime_stat_data rsd = { in perf_stat__print_shadow_stats() local
978 total = runtime_stat_avg(st, STAT_CYCLES, cpu, &rsd); in perf_stat__print_shadow_stats()
988 total = runtime_stat_avg(st, STAT_STALLED_CYCLES_FRONT, cpu, &rsd); in perf_stat__print_shadow_stats()
992 cpu, &rsd)); in perf_stat__print_shadow_stats()
1002 if (runtime_stat_n(st, STAT_BRANCHES, cpu, &rsd) != 0) in perf_stat__print_shadow_stats()
1003 print_branch_misses(config, cpu, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1012 if (runtime_stat_n(st, STAT_L1_DCACHE, cpu, &rsd) != 0) in perf_stat__print_shadow_stats()
1013 print_l1_dcache_misses(config, cpu, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1022 if (runtime_stat_n(st, STAT_L1_ICACHE, cpu, &rsd) != 0) in perf_stat__print_shadow_stats()
1023 print_l1_icache_misses(config, cpu, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1032 if (runtime_stat_n(st, STAT_DTLB_CACHE, cpu, &rsd) != 0) in perf_stat__print_shadow_stats()
1033 print_dtlb_cache_misses(config, cpu, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1042 if (runtime_stat_n(st, STAT_ITLB_CACHE, cpu, &rsd) != 0) in perf_stat__print_shadow_stats()
1043 print_itlb_cache_misses(config, cpu, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1052 if (runtime_stat_n(st, STAT_LL_CACHE, cpu, &rsd) != 0) in perf_stat__print_shadow_stats()
1053 print_ll_cache_misses(config, cpu, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1057 total = runtime_stat_avg(st, STAT_CACHEREFS, cpu, &rsd); in perf_stat__print_shadow_stats()
1062 if (runtime_stat_n(st, STAT_CACHEREFS, cpu, &rsd) != 0) in perf_stat__print_shadow_stats()
1068 print_stalled_cycles_frontend(config, cpu, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1070 print_stalled_cycles_backend(config, cpu, avg, out, st, &rsd); in perf_stat__print_shadow_stats()
1072 total = runtime_stat_avg(st, STAT_NSECS, cpu, &rsd); in perf_stat__print_shadow_stats()
1081 total = runtime_stat_avg(st, STAT_CYCLES, cpu, &rsd); in perf_stat__print_shadow_stats()
1091 total = runtime_stat_avg(st, STAT_CYCLES, cpu, &rsd); in perf_stat__print_shadow_stats()
1092 total2 = runtime_stat_avg(st, STAT_CYCLES_IN_TX, cpu, &rsd); in perf_stat__print_shadow_stats()
1102 total = runtime_stat_avg(st, STAT_CYCLES_IN_TX, cpu, &rsd); in perf_stat__print_shadow_stats()
1107 if (runtime_stat_n(st, STAT_CYCLES_IN_TX, cpu, &rsd) != 0) in perf_stat__print_shadow_stats()
1114 total = runtime_stat_avg(st, STAT_CYCLES_IN_TX, cpu, &rsd); in perf_stat__print_shadow_stats()
1127 double fe_bound = td_fe_bound(cpu, st, &rsd); in perf_stat__print_shadow_stats()
1134 double retiring = td_retiring(cpu, st, &rsd); in perf_stat__print_shadow_stats()
1141 double bad_spec = td_bad_spec(cpu, st, &rsd); in perf_stat__print_shadow_stats()
1148 double be_bound = td_be_bound(cpu, st, &rsd); in perf_stat__print_shadow_stats()
1161 if (td_total_slots(cpu, st, &rsd) > 0) in perf_stat__print_shadow_stats()
1167 full_td(cpu, st, &rsd)) { in perf_stat__print_shadow_stats()
1170 &rsd); in perf_stat__print_shadow_stats()
1176 full_td(cpu, st, &rsd)) { in perf_stat__print_shadow_stats()
1179 &rsd); in perf_stat__print_shadow_stats()
1185 full_td(cpu, st, &rsd)) { in perf_stat__print_shadow_stats()
1188 &rsd); in perf_stat__print_shadow_stats()
1194 full_td(cpu, st, &rsd)) { in perf_stat__print_shadow_stats()
1197 &rsd); in perf_stat__print_shadow_stats()
1203 full_td(cpu, st, &rsd) && (config->topdown_level > 1)) { in perf_stat__print_shadow_stats()
1206 &rsd); in perf_stat__print_shadow_stats()
1209 &rsd); in perf_stat__print_shadow_stats()
1223 full_td(cpu, st, &rsd) && (config->topdown_level > 1)) { in perf_stat__print_shadow_stats()
1226 &rsd); in perf_stat__print_shadow_stats()
1229 &rsd); in perf_stat__print_shadow_stats()
1243 full_td(cpu, st, &rsd) && (config->topdown_level > 1)) { in perf_stat__print_shadow_stats()
1246 &rsd); in perf_stat__print_shadow_stats()
1249 &rsd); in perf_stat__print_shadow_stats()
1263 full_td(cpu, st, &rsd) && (config->topdown_level > 1)) { in perf_stat__print_shadow_stats()
1266 &rsd); in perf_stat__print_shadow_stats()
1269 &rsd); in perf_stat__print_shadow_stats()
1285 } else if (runtime_stat_n(st, STAT_NSECS, cpu, &rsd) != 0) { in perf_stat__print_shadow_stats()
1289 total = runtime_stat_avg(st, STAT_NSECS, cpu, &rsd); in perf_stat__print_shadow_stats()
1297 print_smi_cost(config, cpu, out, st, &rsd); in perf_stat__print_shadow_stats()