Lines Matching refs:evsel

27 	struct evsel *evsel;  member
60 if (a->evsel == NULL && b->evsel == NULL) { in saved_value_cmp()
70 if (a->evsel == b->evsel) in saved_value_cmp()
72 if ((char *)a->evsel < (char *)b->evsel) in saved_value_cmp()
98 static struct saved_value *saved_value_lookup(struct evsel *evsel, in saved_value_lookup() argument
109 .evsel = evsel, in saved_value_lookup()
149 static int evsel_context(struct evsel *evsel) in evsel_context() argument
153 if (evsel->core.attr.exclude_kernel) in evsel_context()
155 if (evsel->core.attr.exclude_user) in evsel_context()
157 if (evsel->core.attr.exclude_hv) in evsel_context()
159 if (evsel->core.attr.exclude_host) in evsel_context()
161 if (evsel->core.attr.exclude_idle) in evsel_context()
210 void perf_stat__update_shadow_stats(struct evsel *counter, u64 count, in perf_stat__update_shadow_stats()
321 static struct evsel *perf_stat__find_event(struct evlist *evsel_list, in perf_stat__find_event()
324 struct evsel *c2; in perf_stat__find_event()
336 struct evsel *counter, *leader, **metric_events, *oc; in perf_stat__collect_metric_expr()
359 metric_events = calloc(sizeof(struct evsel *), in perf_stat__collect_metric_expr()
450 struct evsel *evsel, double avg, in print_stalled_cycles_frontend() argument
456 int ctx = evsel_context(evsel); in print_stalled_cycles_frontend()
474 struct evsel *evsel, double avg, in print_stalled_cycles_backend() argument
480 int ctx = evsel_context(evsel); in print_stalled_cycles_backend()
494 struct evsel *evsel, in print_branch_misses() argument
501 int ctx = evsel_context(evsel); in print_branch_misses()
515 struct evsel *evsel, in print_l1_dcache_misses() argument
523 int ctx = evsel_context(evsel); in print_l1_dcache_misses()
537 struct evsel *evsel, in print_l1_icache_misses() argument
545 int ctx = evsel_context(evsel); in print_l1_icache_misses()
558 struct evsel *evsel, in print_dtlb_cache_misses() argument
565 int ctx = evsel_context(evsel); in print_dtlb_cache_misses()
578 struct evsel *evsel, in print_itlb_cache_misses() argument
585 int ctx = evsel_context(evsel); in print_itlb_cache_misses()
598 struct evsel *evsel, in print_ll_cache_misses() argument
605 int ctx = evsel_context(evsel); in print_ll_cache_misses()
762 int cpu, struct evsel *evsel, in print_smi_cost() argument
767 int ctx = evsel_context(evsel); in print_smi_cost()
786 static int prepare_metric(struct evsel **metric_events, in prepare_metric()
846 struct evsel **metric_events, in generic_metric()
923 struct evsel *evsel, in perf_stat__print_shadow_stats() argument
933 int ctx = evsel_context(evsel); in perf_stat__print_shadow_stats()
937 if (evsel__match(evsel, HARDWARE, HW_INSTRUCTIONS)) { in perf_stat__print_shadow_stats()
962 } else if (evsel__match(evsel, HARDWARE, HW_BRANCH_MISSES)) { in perf_stat__print_shadow_stats()
964 print_branch_misses(config, cpu, evsel, avg, out, st); in perf_stat__print_shadow_stats()
968 evsel->core.attr.type == PERF_TYPE_HW_CACHE && in perf_stat__print_shadow_stats()
969 evsel->core.attr.config == ( PERF_COUNT_HW_CACHE_L1D | in perf_stat__print_shadow_stats()
974 print_l1_dcache_misses(config, cpu, evsel, avg, out, st); in perf_stat__print_shadow_stats()
978 evsel->core.attr.type == PERF_TYPE_HW_CACHE && in perf_stat__print_shadow_stats()
979 evsel->core.attr.config == ( PERF_COUNT_HW_CACHE_L1I | in perf_stat__print_shadow_stats()
984 print_l1_icache_misses(config, cpu, evsel, avg, out, st); in perf_stat__print_shadow_stats()
988 evsel->core.attr.type == PERF_TYPE_HW_CACHE && in perf_stat__print_shadow_stats()
989 evsel->core.attr.config == ( PERF_COUNT_HW_CACHE_DTLB | in perf_stat__print_shadow_stats()
994 print_dtlb_cache_misses(config, cpu, evsel, avg, out, st); in perf_stat__print_shadow_stats()
998 evsel->core.attr.type == PERF_TYPE_HW_CACHE && in perf_stat__print_shadow_stats()
999 evsel->core.attr.config == ( PERF_COUNT_HW_CACHE_ITLB | in perf_stat__print_shadow_stats()
1004 print_itlb_cache_misses(config, cpu, evsel, avg, out, st); in perf_stat__print_shadow_stats()
1008 evsel->core.attr.type == PERF_TYPE_HW_CACHE && in perf_stat__print_shadow_stats()
1009 evsel->core.attr.config == ( PERF_COUNT_HW_CACHE_LL | in perf_stat__print_shadow_stats()
1014 print_ll_cache_misses(config, cpu, evsel, avg, out, st); in perf_stat__print_shadow_stats()
1017 } else if (evsel__match(evsel, HARDWARE, HW_CACHE_MISSES)) { in perf_stat__print_shadow_stats()
1028 } else if (evsel__match(evsel, HARDWARE, HW_STALLED_CYCLES_FRONTEND)) { in perf_stat__print_shadow_stats()
1029 print_stalled_cycles_frontend(config, cpu, evsel, avg, out, st); in perf_stat__print_shadow_stats()
1030 } else if (evsel__match(evsel, HARDWARE, HW_STALLED_CYCLES_BACKEND)) { in perf_stat__print_shadow_stats()
1031 print_stalled_cycles_backend(config, cpu, evsel, avg, out, st); in perf_stat__print_shadow_stats()
1032 } else if (evsel__match(evsel, HARDWARE, HW_CPU_CYCLES)) { in perf_stat__print_shadow_stats()
1041 } else if (perf_stat_evsel__is(evsel, CYCLES_IN_TX)) { in perf_stat__print_shadow_stats()
1051 } else if (perf_stat_evsel__is(evsel, CYCLES_IN_TX_CP)) { in perf_stat__print_shadow_stats()
1062 } else if (perf_stat_evsel__is(evsel, TRANSACTION_START)) { in perf_stat__print_shadow_stats()
1075 } else if (perf_stat_evsel__is(evsel, ELISION_START)) { in perf_stat__print_shadow_stats()
1083 } else if (evsel__is_clock(evsel)) { in perf_stat__print_shadow_stats()
1086 avg / (ratio * evsel->scale)); in perf_stat__print_shadow_stats()
1089 } else if (perf_stat_evsel__is(evsel, TOPDOWN_FETCH_BUBBLES)) { in perf_stat__print_shadow_stats()
1096 } else if (perf_stat_evsel__is(evsel, TOPDOWN_SLOTS_RETIRED)) { in perf_stat__print_shadow_stats()
1103 } else if (perf_stat_evsel__is(evsel, TOPDOWN_RECOVERY_BUBBLES)) { in perf_stat__print_shadow_stats()
1110 } else if (perf_stat_evsel__is(evsel, TOPDOWN_SLOTS_ISSUED)) { in perf_stat__print_shadow_stats()
1129 } else if (perf_stat_evsel__is(evsel, TOPDOWN_RETIRING) && in perf_stat__print_shadow_stats()
1138 } else if (perf_stat_evsel__is(evsel, TOPDOWN_FE_BOUND) && in perf_stat__print_shadow_stats()
1147 } else if (perf_stat_evsel__is(evsel, TOPDOWN_BE_BOUND) && in perf_stat__print_shadow_stats()
1156 } else if (perf_stat_evsel__is(evsel, TOPDOWN_BAD_SPEC) && in perf_stat__print_shadow_stats()
1165 } else if (evsel->metric_expr) { in perf_stat__print_shadow_stats()
1166 generic_metric(config, evsel->metric_expr, evsel->metric_events, NULL, in perf_stat__print_shadow_stats()
1167 evsel->name, evsel->metric_name, NULL, 1, cpu, out, st); in perf_stat__print_shadow_stats()
1182 } else if (perf_stat_evsel__is(evsel, SMI_NUM)) { in perf_stat__print_shadow_stats()
1183 print_smi_cost(config, cpu, evsel, out, st); in perf_stat__print_shadow_stats()
1188 if ((me = metricgroup__lookup(metric_events, evsel, false)) != NULL) { in perf_stat__print_shadow_stats()
1195 mexp->metric_refs, evsel->name, mexp->metric_name, in perf_stat__print_shadow_stats()