Lines Matching refs:n_events
988 n0 = cpuc->n_events; in x86_schedule_events()
1164 n = cpuc->n_events; in collect_events()
1165 if (!cpuc->n_events) in collect_events()
1291 int n_running = cpuc->n_events - cpuc->n_added; in x86_pmu_enable()
1325 for (i = 0; i < cpuc->n_events; i++) { in x86_pmu_enable()
1451 n0 = cpuc->n_events; in x86_pmu_add()
1485 cpuc->n_events = n; in x86_pmu_add()
1634 for (i = 0; i < cpuc->n_events; i++) { in x86_pmu_del()
1639 if (WARN_ON_ONCE(i == cpuc->n_events)) /* called ->del() without ->add() ? */ in x86_pmu_del()
1643 if (i >= cpuc->n_events - cpuc->n_added) in x86_pmu_del()
1649 while (++i < cpuc->n_events) { in x86_pmu_del()
1654 --cpuc->n_events; in x86_pmu_del()
2264 __this_cpu_sub(cpu_hw_events.n_events, __this_cpu_read(cpu_hw_events.n_txn)); in x86_pmu_cancel_txn()
2290 n = cpuc->n_events; in x86_pmu_commit_txn()
2429 fake_cpuc->n_events = n; in validate_group()
2434 fake_cpuc->n_events = 0; in validate_group()
2484 for (i = 0; i < cpuc->n_events; i++) in perf_clear_dirty_counters()