Lines Matching refs:n_events
990 n0 = cpuc->n_events; in x86_schedule_events()
1166 n = cpuc->n_events; in collect_events()
1167 if (!cpuc->n_events) in collect_events()
1295 int n_running = cpuc->n_events - cpuc->n_added; in x86_pmu_enable()
1329 for (i = 0; i < cpuc->n_events; i++) { in x86_pmu_enable()
1455 n0 = cpuc->n_events; in x86_pmu_add()
1489 cpuc->n_events = n; in x86_pmu_add()
1638 for (i = 0; i < cpuc->n_events; i++) { in x86_pmu_del()
1643 if (WARN_ON_ONCE(i == cpuc->n_events)) /* called ->del() without ->add() ? */ in x86_pmu_del()
1647 if (i >= cpuc->n_events - cpuc->n_added) in x86_pmu_del()
1653 while (++i < cpuc->n_events) { in x86_pmu_del()
1658 --cpuc->n_events; in x86_pmu_del()
2270 __this_cpu_sub(cpu_hw_events.n_events, __this_cpu_read(cpu_hw_events.n_txn)); in x86_pmu_cancel_txn()
2296 n = cpuc->n_events; in x86_pmu_commit_txn()
2435 fake_cpuc->n_events = n; in validate_group()
2440 fake_cpuc->n_events = 0; in validate_group()
2490 for (i = 0; i < cpuc->n_events; i++) in perf_clear_dirty_counters()