/Linux-v4.19/arch/alpha/kernel/ |
D | perf_event.c | 391 static void maybe_change_configuration(struct cpu_hw_events *cpuc) in maybe_change_configuration() argument 395 if (cpuc->n_added == 0) in maybe_change_configuration() 399 for (j = 0; j < cpuc->n_events; j++) { in maybe_change_configuration() 400 struct perf_event *pe = cpuc->event[j]; in maybe_change_configuration() 402 if (cpuc->current_idx[j] != PMC_NO_INDEX && in maybe_change_configuration() 403 cpuc->current_idx[j] != pe->hw.idx) { in maybe_change_configuration() 404 alpha_perf_event_update(pe, &pe->hw, cpuc->current_idx[j], 0); in maybe_change_configuration() 405 cpuc->current_idx[j] = PMC_NO_INDEX; in maybe_change_configuration() 410 cpuc->idx_mask = 0; in maybe_change_configuration() 411 for (j = 0; j < cpuc->n_events; j++) { in maybe_change_configuration() [all …]
|
/Linux-v4.19/arch/x86/events/intel/ |
D | lbr.c | 146 static void intel_pmu_lbr_filter(struct cpu_hw_events *cpuc); 155 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in __intel_pmu_lbr_enable() local 169 if (cpuc->lbr_sel) in __intel_pmu_lbr_enable() 170 lbr_select = cpuc->lbr_sel->config & x86_pmu.lbr_sel_mask; in __intel_pmu_lbr_enable() 171 if (!pmi && cpuc->lbr_sel) in __intel_pmu_lbr_enable() 219 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_lbr_reset() local 229 cpuc->last_task_ctx = NULL; in intel_pmu_lbr_reset() 230 cpuc->last_log_id = 0; in intel_pmu_lbr_reset() 342 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in __intel_pmu_lbr_restore() local 359 if ((task_ctx == cpuc->last_task_ctx) && in __intel_pmu_lbr_restore() [all …]
|
D | ds.c | 565 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_disable_bts() local 568 if (!cpuc->ds) in intel_pmu_disable_bts() 582 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_drain_bts_buffer() local 583 struct debug_store *ds = cpuc->ds; in intel_pmu_drain_bts_buffer() 589 struct perf_event *event = cpuc->events[INTEL_PMC_IDX_FIXED_BTS]; in intel_pmu_drain_bts_buffer() 883 static inline bool pebs_needs_sched_cb(struct cpu_hw_events *cpuc) in pebs_needs_sched_cb() argument 885 return cpuc->n_pebs && (cpuc->n_pebs == cpuc->n_large_pebs); in pebs_needs_sched_cb() 890 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_pebs_sched_task() local 892 if (!sched_in && pebs_needs_sched_cb(cpuc)) in intel_pmu_pebs_sched_task() 896 static inline void pebs_update_threshold(struct cpu_hw_events *cpuc) in pebs_update_threshold() argument [all …]
|
D | core.c | 1876 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in __intel_pmu_disable_all() local 1880 if (test_bit(INTEL_PMC_IDX_FIXED_BTS, cpuc->active_mask)) in __intel_pmu_disable_all() 1894 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in __intel_pmu_enable_all() local 1899 x86_pmu.intel_ctrl & ~cpuc->intel_ctrl_guest_mask); in __intel_pmu_enable_all() 1901 if (test_bit(INTEL_PMC_IDX_FIXED_BTS, cpuc->active_mask)) { in __intel_pmu_enable_all() 1903 cpuc->events[INTEL_PMC_IDX_FIXED_BTS]; in __intel_pmu_enable_all() 1933 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_nhm_workaround() local 1966 event = cpuc->events[i]; in intel_pmu_nhm_workaround() 1980 event = cpuc->events[i]; in intel_pmu_nhm_workaround() 2032 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_disable_event() local [all …]
|
D | bts.c | 256 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in bts_event_start() local 267 bts->ds_back.bts_buffer_base = cpuc->ds->bts_buffer_base; in bts_event_start() 268 bts->ds_back.bts_absolute_maximum = cpuc->ds->bts_absolute_maximum; in bts_event_start() 269 bts->ds_back.bts_interrupt_threshold = cpuc->ds->bts_interrupt_threshold; in bts_event_start() 301 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in bts_event_stop() local 326 cpuc->ds->bts_index = bts->ds_back.bts_buffer_base; in bts_event_stop() 327 cpuc->ds->bts_buffer_base = bts->ds_back.bts_buffer_base; in bts_event_stop() 328 cpuc->ds->bts_absolute_maximum = bts->ds_back.bts_absolute_maximum; in bts_event_stop() 329 cpuc->ds->bts_interrupt_threshold = bts->ds_back.bts_interrupt_threshold; in bts_event_stop() 516 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in bts_event_add() local [all …]
|
D | knc.c | 216 struct cpu_hw_events *cpuc; in knc_pmu_handle_irq() local 221 cpuc = this_cpu_ptr(&cpu_hw_events); in knc_pmu_handle_irq() 243 struct perf_event *event = cpuc->events[bit]; in knc_pmu_handle_irq() 247 if (!test_bit(bit, cpuc->active_mask)) in knc_pmu_handle_irq() 268 if (cpuc->enabled) in knc_pmu_handle_irq()
|
D | p4.c | 918 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in p4_pmu_disable_all() local 922 struct perf_event *event = cpuc->events[idx]; in p4_pmu_disable_all() 923 if (!test_bit(idx, cpuc->active_mask)) in p4_pmu_disable_all() 987 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in p4_pmu_enable_all() local 991 struct perf_event *event = cpuc->events[idx]; in p4_pmu_enable_all() 992 if (!test_bit(idx, cpuc->active_mask)) in p4_pmu_enable_all() 1001 struct cpu_hw_events *cpuc; in p4_pmu_handle_irq() local 1007 cpuc = this_cpu_ptr(&cpu_hw_events); in p4_pmu_handle_irq() 1012 if (!test_bit(idx, cpuc->active_mask)) { in p4_pmu_handle_irq() 1014 if (__test_and_clear_bit(idx, cpuc->running)) in p4_pmu_handle_irq() [all …]
|
/Linux-v4.19/arch/sparc/kernel/ |
D | perf_event.c | 827 static inline void sparc_pmu_enable_event(struct cpu_hw_events *cpuc, struct hw_perf_event *hwc, in… in sparc_pmu_enable_event() argument 835 enc = perf_event_get_enc(cpuc->events[idx]); in sparc_pmu_enable_event() 837 val = cpuc->pcr[pcr_index]; in sparc_pmu_enable_event() 840 cpuc->pcr[pcr_index] = val; in sparc_pmu_enable_event() 842 pcr_ops->write_pcr(pcr_index, cpuc->pcr[pcr_index]); in sparc_pmu_enable_event() 845 static inline void sparc_pmu_disable_event(struct cpu_hw_events *cpuc, struct hw_perf_event *hwc, i… in sparc_pmu_disable_event() argument 855 val = cpuc->pcr[pcr_index]; in sparc_pmu_disable_event() 858 cpuc->pcr[pcr_index] = val; in sparc_pmu_disable_event() 860 pcr_ops->write_pcr(pcr_index, cpuc->pcr[pcr_index]); in sparc_pmu_disable_event() 919 static void read_in_all_counters(struct cpu_hw_events *cpuc) in read_in_all_counters() argument [all …]
|
/Linux-v4.19/arch/x86/events/ |
D | core.c | 616 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in x86_pmu_disable_all() local 622 if (!test_bit(idx, cpuc->active_mask)) in x86_pmu_disable_all() 647 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in x86_pmu_disable() local 652 if (!cpuc->enabled) in x86_pmu_disable() 655 cpuc->n_added = 0; in x86_pmu_disable() 656 cpuc->enabled = 0; in x86_pmu_disable() 664 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in x86_pmu_enable_all() local 668 struct hw_perf_event *hwc = &cpuc->events[idx]->hw; in x86_pmu_enable_all() 670 if (!test_bit(idx, cpuc->active_mask)) in x86_pmu_enable_all() 867 int x86_schedule_events(struct cpu_hw_events *cpuc, int n, int *assign) in x86_schedule_events() argument [all …]
|
D | perf_event.h | 528 int (*schedule_events)(struct cpu_hw_events *cpuc, int n, int *assign); 547 (*get_event_constraints)(struct cpu_hw_events *cpuc, 551 void (*put_event_constraints)(struct cpu_hw_events *cpuc, 554 void (*start_scheduling)(struct cpu_hw_events *cpuc); 556 void (*commit_scheduling)(struct cpu_hw_events *cpuc, int idx, int cntr); 558 void (*stop_scheduling)(struct cpu_hw_events *cpuc); 785 int x86_schedule_events(struct cpu_hw_events *cpuc, int n, int *assign); 870 x86_get_event_constraints(struct cpu_hw_events *cpuc, int idx,
|
/Linux-v4.19/arch/x86/events/amd/ |
D | core.c | 207 static inline int amd_has_nb(struct cpu_hw_events *cpuc) in amd_has_nb() argument 209 struct amd_nb *nb = cpuc->amd_nb; in amd_has_nb() 235 static void __amd_put_nb_event_constraints(struct cpu_hw_events *cpuc, in __amd_put_nb_event_constraints() argument 238 struct amd_nb *nb = cpuc->amd_nb; in __amd_put_nb_event_constraints() 292 __amd_get_nb_event_constraints(struct cpu_hw_events *cpuc, struct perf_event *event, in __amd_get_nb_event_constraints() argument 296 struct amd_nb *nb = cpuc->amd_nb; in __amd_get_nb_event_constraints() 303 if (cpuc->is_fake) in __amd_get_nb_event_constraints() 368 struct cpu_hw_events *cpuc = &per_cpu(cpu_hw_events, cpu); in amd_pmu_cpu_prepare() local 370 WARN_ON_ONCE(cpuc->amd_nb); in amd_pmu_cpu_prepare() 375 cpuc->amd_nb = amd_alloc_nb(cpu); in amd_pmu_cpu_prepare() [all …]
|
/Linux-v4.19/arch/sh/kernel/ |
D | perf_event.c | 222 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sh_pmu_stop() local 228 cpuc->events[idx] = NULL; in sh_pmu_stop() 240 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sh_pmu_start() local 250 cpuc->events[idx] = event; in sh_pmu_start() 257 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sh_pmu_del() local 260 __clear_bit(event->hw.idx, cpuc->used_mask); in sh_pmu_del() 267 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sh_pmu_add() local 274 if (__test_and_set_bit(idx, cpuc->used_mask)) { in sh_pmu_add() 275 idx = find_first_zero_bit(cpuc->used_mask, sh_pmu->num_events); in sh_pmu_add() 279 __set_bit(idx, cpuc->used_mask); in sh_pmu_add()
|
/Linux-v4.19/arch/arm/kernel/ |
D | perf_event_xscale.c | 149 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in xscale1pmu_handle_irq() local 174 struct perf_event *event = cpuc->events[idx]; in xscale1pmu_handle_irq() 275 xscale1pmu_get_event_idx(struct pmu_hw_events *cpuc, in xscale1pmu_get_event_idx() argument 280 if (test_and_set_bit(XSCALE_CYCLE_COUNTER, cpuc->used_mask)) in xscale1pmu_get_event_idx() 285 if (!test_and_set_bit(XSCALE_COUNTER1, cpuc->used_mask)) in xscale1pmu_get_event_idx() 288 if (!test_and_set_bit(XSCALE_COUNTER0, cpuc->used_mask)) in xscale1pmu_get_event_idx() 295 static void xscalepmu_clear_event_idx(struct pmu_hw_events *cpuc, in xscalepmu_clear_event_idx() argument 298 clear_bit(event->hw.idx, cpuc->used_mask); in xscalepmu_clear_event_idx() 501 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in xscale2pmu_handle_irq() local 520 struct perf_event *event = cpuc->events[idx]; in xscale2pmu_handle_irq() [all …]
|
D | perf_event_v6.c | 310 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in armv6pmu_handle_irq() local 327 struct perf_event *event = cpuc->events[idx]; in armv6pmu_handle_irq() 388 armv6pmu_get_event_idx(struct pmu_hw_events *cpuc, in armv6pmu_get_event_idx() argument 394 if (test_and_set_bit(ARMV6_CYCLE_COUNTER, cpuc->used_mask)) in armv6pmu_get_event_idx() 403 if (!test_and_set_bit(ARMV6_COUNTER1, cpuc->used_mask)) in armv6pmu_get_event_idx() 406 if (!test_and_set_bit(ARMV6_COUNTER0, cpuc->used_mask)) in armv6pmu_get_event_idx() 414 static void armv6pmu_clear_event_idx(struct pmu_hw_events *cpuc, in armv6pmu_clear_event_idx() argument 417 clear_bit(event->hw.idx, cpuc->used_mask); in armv6pmu_clear_event_idx()
|
D | perf_event_v7.c | 953 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in armv7pmu_handle_irq() local 974 struct perf_event *event = cpuc->events[idx]; in armv7pmu_handle_irq() 1032 static int armv7pmu_get_event_idx(struct pmu_hw_events *cpuc, in armv7pmu_get_event_idx() argument 1042 if (test_and_set_bit(ARMV7_IDX_CYCLE_COUNTER, cpuc->used_mask)) in armv7pmu_get_event_idx() 1053 if (!test_and_set_bit(idx, cpuc->used_mask)) in armv7pmu_get_event_idx() 1061 static void armv7pmu_clear_event_idx(struct pmu_hw_events *cpuc, in armv7pmu_clear_event_idx() argument 1064 clear_bit(event->hw.idx, cpuc->used_mask); in armv7pmu_clear_event_idx() 1606 static int krait_pmu_get_event_idx(struct pmu_hw_events *cpuc, in krait_pmu_get_event_idx() argument 1626 if (test_and_set_bit(bit, cpuc->used_mask)) in krait_pmu_get_event_idx() 1630 idx = armv7pmu_get_event_idx(cpuc, event); in krait_pmu_get_event_idx() [all …]
|
/Linux-v4.19/arch/mips/kernel/ |
D | perf_event_mipsxx.c | 294 static int mipsxx_pmu_alloc_counter(struct cpu_hw_events *cpuc, in mipsxx_pmu_alloc_counter() argument 317 !test_and_set_bit(i, cpuc->used_mask)) in mipsxx_pmu_alloc_counter() 327 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in mipsxx_pmu_enable_event() local 334 cpuc->saved_ctrl[idx] = M_PERFCTL_EVENT(evt->event_base & 0xff) | in mipsxx_pmu_enable_event() 342 cpuc->saved_ctrl[idx] |= in mipsxx_pmu_enable_event() 350 cpuc->saved_ctrl[idx] |= M_TC_EN_ALL; in mipsxx_pmu_enable_event() 365 cpuc->saved_ctrl[idx] |= ctrl; in mipsxx_pmu_enable_event() 376 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in mipsxx_pmu_disable_event() local 382 cpuc->saved_ctrl[idx] = mipsxx_pmu_read_control(idx) & in mipsxx_pmu_disable_event() 384 mipsxx_pmu_write_control(idx, cpuc->saved_ctrl[idx]); in mipsxx_pmu_disable_event() [all …]
|
/Linux-v4.19/arch/riscv/kernel/ |
D | perf_event.c | 299 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in riscv_pmu_add() local 302 if (cpuc->n_events == riscv_pmu->num_counters) in riscv_pmu_add() 314 cpuc->events[hwc->idx] = event; in riscv_pmu_add() 315 cpuc->n_events++; in riscv_pmu_add() 330 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in riscv_pmu_del() local 333 cpuc->events[hwc->idx] = NULL; in riscv_pmu_del() 334 cpuc->n_events--; in riscv_pmu_del()
|
/Linux-v4.19/arch/arm64/kernel/ |
D | perf_event.c | 803 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in armv8pmu_handle_irq() local 829 struct perf_event *event = cpuc->events[idx]; in armv8pmu_handle_irq() 866 static int armv8pmu_get_single_idx(struct pmu_hw_events *cpuc, in armv8pmu_get_single_idx() argument 872 if (!test_and_set_bit(idx, cpuc->used_mask)) in armv8pmu_get_single_idx() 878 static int armv8pmu_get_chain_idx(struct pmu_hw_events *cpuc, in armv8pmu_get_chain_idx() argument 888 if (!test_and_set_bit(idx, cpuc->used_mask)) { in armv8pmu_get_chain_idx() 890 if (!test_and_set_bit(idx - 1, cpuc->used_mask)) in armv8pmu_get_chain_idx() 893 clear_bit(idx, cpuc->used_mask); in armv8pmu_get_chain_idx() 899 static int armv8pmu_get_event_idx(struct pmu_hw_events *cpuc, in armv8pmu_get_event_idx() argument 908 if (!test_and_set_bit(ARMV8_IDX_CYCLE_COUNTER, cpuc->used_mask)) in armv8pmu_get_event_idx() [all …]
|
/Linux-v4.19/kernel/rcu/ |
D | srcutree.c | 261 struct srcu_data *cpuc = per_cpu_ptr(sp->sda, cpu); in srcu_readers_lock_idx() local 263 sum += READ_ONCE(cpuc->srcu_lock_count[idx]); in srcu_readers_lock_idx() 278 struct srcu_data *cpuc = per_cpu_ptr(sp->sda, cpu); in srcu_readers_unlock_idx() local 280 sum += READ_ONCE(cpuc->srcu_unlock_count[idx]); in srcu_readers_unlock_idx() 347 struct srcu_data *cpuc = per_cpu_ptr(sp->sda, cpu); in srcu_readers_active() local 349 sum += READ_ONCE(cpuc->srcu_lock_count[0]); in srcu_readers_active() 350 sum += READ_ONCE(cpuc->srcu_lock_count[1]); in srcu_readers_active() 351 sum -= READ_ONCE(cpuc->srcu_unlock_count[0]); in srcu_readers_active() 352 sum -= READ_ONCE(cpuc->srcu_unlock_count[1]); in srcu_readers_active()
|