Searched refs:cpuhw (Results 1 – 7 of 7) sorted by relevance
101 struct cpu_hw_events *cpuhw; in validate_ctr_version() local105 cpuhw = &get_cpu_var(cpu_hw_events); in validate_ctr_version()111 if (cpuhw->info.cfvn < 1) in validate_ctr_version()116 if (cpuhw->info.csvn < 1) in validate_ctr_version()118 if ((cpuhw->info.csvn == 1 && hwc->config > 159) || in validate_ctr_version()119 (cpuhw->info.csvn == 2 && hwc->config > 175) || in validate_ctr_version()120 (cpuhw->info.csvn > 2 && hwc->config > 255)) in validate_ctr_version()124 if (cpuhw->info.csvn <= 3) in validate_ctr_version()139 if (!((cpuhw->info.auth_ctl & mtdiag_ctl) && in validate_ctr_version()140 (cpuhw->info.enable_ctl & mtdiag_ctl) && in validate_ctr_version()[all …]
116 static int sf_buffer_available(struct cpu_hw_sf *cpuhw) in sf_buffer_available() argument118 return !!cpuhw->sfb.sdbt; in sf_buffer_available()352 static void deallocate_buffers(struct cpu_hw_sf *cpuhw) in deallocate_buffers() argument354 if (cpuhw->sfb.sdbt) in deallocate_buffers()355 free_sampling_buffer(&cpuhw->sfb); in deallocate_buffers()358 static int allocate_buffers(struct cpu_hw_sf *cpuhw, struct hw_perf_event *hwc) in allocate_buffers() argument388 freq = sample_rate_to_freq(&cpuhw->qsi, SAMPL_RATE(hwc)); in allocate_buffers()403 if (sf_buffer_available(cpuhw)) in allocate_buffers()410 sample_size, cpuhw); in allocate_buffers()412 return alloc_sampling_buffer(&cpuhw->sfb, in allocate_buffers()[all …]
121 static unsigned long ebb_switch_in(bool ebb, struct cpu_hw_events *cpuhw) in ebb_switch_in() argument123 return cpuhw->mmcr[0]; in ebb_switch_in()129 static inline void power_pmu_bhrb_read(struct cpu_hw_events *cpuhw) {} in power_pmu_bhrb_read() argument360 struct cpu_hw_events *cpuhw = this_cpu_ptr(&cpu_hw_events); in power_pmu_bhrb_enable() local366 if (event->ctx->task && cpuhw->bhrb_context != event->ctx) { in power_pmu_bhrb_enable()368 cpuhw->bhrb_context = event->ctx; in power_pmu_bhrb_enable()370 cpuhw->bhrb_users++; in power_pmu_bhrb_enable()376 struct cpu_hw_events *cpuhw = this_cpu_ptr(&cpu_hw_events); in power_pmu_bhrb_disable() local381 WARN_ON_ONCE(!cpuhw->bhrb_users); in power_pmu_bhrb_disable()382 cpuhw->bhrb_users--; in power_pmu_bhrb_disable()[all …]
209 struct cpu_hw_events *cpuhw; in fsl_emb_pmu_disable() local213 cpuhw = this_cpu_ptr(&cpu_hw_events); in fsl_emb_pmu_disable()215 if (!cpuhw->disabled) { in fsl_emb_pmu_disable()216 cpuhw->disabled = 1; in fsl_emb_pmu_disable()221 if (!cpuhw->pmcs_enabled) { in fsl_emb_pmu_disable()223 cpuhw->pmcs_enabled = 1; in fsl_emb_pmu_disable()248 struct cpu_hw_events *cpuhw; in fsl_emb_pmu_enable() local252 cpuhw = this_cpu_ptr(&cpu_hw_events); in fsl_emb_pmu_enable()253 if (!cpuhw->disabled) in fsl_emb_pmu_enable()256 cpuhw->disabled = 0; in fsl_emb_pmu_enable()[all …]
415 struct cpu_hw_events *cpuhw; in amd_pmu_cpu_dead() local420 cpuhw = &per_cpu(cpu_hw_events, cpu); in amd_pmu_cpu_dead()422 if (cpuhw->amd_nb) { in amd_pmu_cpu_dead()423 struct amd_nb *nb = cpuhw->amd_nb; in amd_pmu_cpu_dead()428 cpuhw->amd_nb = NULL; in amd_pmu_cpu_dead()
357 struct cpu_hw_events *cpuhw = &per_cpu(cpu_hw_events, cpu); in sh_pmu_prepare_cpu() local359 memset(cpuhw, 0, sizeof(struct cpu_hw_events)); in sh_pmu_prepare_cpu()
1510 struct cpu_hw_events *cpuhw = this_cpu_ptr(&cpu_hw_events); in sparc_pmu_start_txn() local1512 WARN_ON_ONCE(cpuhw->txn_flags); /* txn already in flight */ in sparc_pmu_start_txn()1514 cpuhw->txn_flags = txn_flags; in sparc_pmu_start_txn()1528 struct cpu_hw_events *cpuhw = this_cpu_ptr(&cpu_hw_events); in sparc_pmu_cancel_txn() local1531 WARN_ON_ONCE(!cpuhw->txn_flags); /* no txn in flight */ in sparc_pmu_cancel_txn()1533 txn_flags = cpuhw->txn_flags; in sparc_pmu_cancel_txn()1534 cpuhw->txn_flags = 0; in sparc_pmu_cancel_txn()