Lines Matching refs:hwc

70 	struct hw_perf_event *hwc = &event->hw;  in x86_perf_event_update()  local
73 int idx = hwc->idx; in x86_perf_event_update()
87 prev_raw_count = local64_read(&hwc->prev_count); in x86_perf_event_update()
88 rdpmcl(hwc->event_base_rdpmc, new_raw_count); in x86_perf_event_update()
90 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in x86_perf_event_update()
106 local64_sub(delta, &hwc->period_left); in x86_perf_event_update()
300 set_ext_hw_attr(struct hw_perf_event *hwc, struct perf_event *event) in set_ext_hw_attr() argument
331 hwc->config |= val; in set_ext_hw_attr()
410 struct hw_perf_event *hwc = &event->hw; in x86_setup_perfctr() local
414 hwc->sample_period = x86_pmu.max_period; in x86_setup_perfctr()
415 hwc->last_period = hwc->sample_period; in x86_setup_perfctr()
416 local64_set(&hwc->period_left, hwc->sample_period); in x86_setup_perfctr()
423 return set_ext_hw_attr(hwc, event); in x86_setup_perfctr()
441 hwc->config |= config; in x86_setup_perfctr()
663 struct hw_perf_event *hwc = &cpuc->events[idx]->hw; in x86_pmu_enable_all() local
668 __x86_pmu_enable_event(hwc, ARCH_PERFMON_EVENTSEL_ENABLE); in x86_pmu_enable_all()
872 struct hw_perf_event *hwc; in x86_schedule_events() local
917 hwc = &cpuc->event_list[i]->hw; in x86_schedule_events()
921 if (hwc->idx == -1) in x86_schedule_events()
925 if (!test_bit(hwc->idx, c->idxmsk)) in x86_schedule_events()
929 if (test_bit(hwc->idx, used_mask)) in x86_schedule_events()
932 __set_bit(hwc->idx, used_mask); in x86_schedule_events()
934 assign[i] = hwc->idx; in x86_schedule_events()
1056 struct hw_perf_event *hwc = &event->hw; in x86_assign_hw_event() local
1058 hwc->idx = cpuc->assign[i]; in x86_assign_hw_event()
1059 hwc->last_cpu = smp_processor_id(); in x86_assign_hw_event()
1060 hwc->last_tag = ++cpuc->tags[i]; in x86_assign_hw_event()
1062 if (hwc->idx == INTEL_PMC_IDX_FIXED_BTS) { in x86_assign_hw_event()
1063 hwc->config_base = 0; in x86_assign_hw_event()
1064 hwc->event_base = 0; in x86_assign_hw_event()
1065 } else if (hwc->idx >= INTEL_PMC_IDX_FIXED) { in x86_assign_hw_event()
1066 hwc->config_base = MSR_ARCH_PERFMON_FIXED_CTR_CTRL; in x86_assign_hw_event()
1067 hwc->event_base = MSR_ARCH_PERFMON_FIXED_CTR0 + (hwc->idx - INTEL_PMC_IDX_FIXED); in x86_assign_hw_event()
1068 hwc->event_base_rdpmc = (hwc->idx - INTEL_PMC_IDX_FIXED) | 1<<30; in x86_assign_hw_event()
1070 hwc->config_base = x86_pmu_config_addr(hwc->idx); in x86_assign_hw_event()
1071 hwc->event_base = x86_pmu_event_addr(hwc->idx); in x86_assign_hw_event()
1072 hwc->event_base_rdpmc = x86_pmu_rdpmc_index(hwc->idx); in x86_assign_hw_event()
1097 static inline int match_prev_assignment(struct hw_perf_event *hwc, in match_prev_assignment() argument
1101 return hwc->idx == cpuc->assign[i] && in match_prev_assignment()
1102 hwc->last_cpu == smp_processor_id() && in match_prev_assignment()
1103 hwc->last_tag == cpuc->tags[i]; in match_prev_assignment()
1112 struct hw_perf_event *hwc; in x86_pmu_enable() local
1131 hwc = &event->hw; in x86_pmu_enable()
1139 if (hwc->idx == -1 || in x86_pmu_enable()
1140 match_prev_assignment(hwc, cpuc, i)) in x86_pmu_enable()
1147 if (hwc->state & PERF_HES_STOPPED) in x86_pmu_enable()
1148 hwc->state |= PERF_HES_ARCH; in x86_pmu_enable()
1158 hwc = &event->hw; in x86_pmu_enable()
1160 if (!match_prev_assignment(hwc, cpuc, i)) in x86_pmu_enable()
1165 if (hwc->state & PERF_HES_ARCH) in x86_pmu_enable()
1188 struct hw_perf_event *hwc = &event->hw; in x86_perf_event_set_period() local
1189 s64 left = local64_read(&hwc->period_left); in x86_perf_event_set_period()
1190 s64 period = hwc->sample_period; in x86_perf_event_set_period()
1191 int ret = 0, idx = hwc->idx; in x86_perf_event_set_period()
1201 local64_set(&hwc->period_left, left); in x86_perf_event_set_period()
1202 hwc->last_period = period; in x86_perf_event_set_period()
1208 local64_set(&hwc->period_left, left); in x86_perf_event_set_period()
1209 hwc->last_period = period; in x86_perf_event_set_period()
1230 local64_set(&hwc->prev_count, (u64)-left); in x86_perf_event_set_period()
1232 wrmsrl(hwc->event_base, (u64)(-left) & x86_pmu.cntval_mask); in x86_perf_event_set_period()
1240 wrmsrl(hwc->event_base, in x86_perf_event_set_period()
1265 struct hw_perf_event *hwc; in x86_pmu_add() local
1269 hwc = &event->hw; in x86_pmu_add()
1276 hwc->state = PERF_HES_UPTODATE | PERF_HES_STOPPED; in x86_pmu_add()
1278 hwc->state |= PERF_HES_ARCH; in x86_pmu_add()
1410 struct hw_perf_event *hwc = &event->hw; in x86_pmu_stop() local
1412 if (test_bit(hwc->idx, cpuc->active_mask)) { in x86_pmu_stop()
1414 __clear_bit(hwc->idx, cpuc->active_mask); in x86_pmu_stop()
1415 cpuc->events[hwc->idx] = NULL; in x86_pmu_stop()
1416 WARN_ON_ONCE(hwc->state & PERF_HES_STOPPED); in x86_pmu_stop()
1417 hwc->state |= PERF_HES_STOPPED; in x86_pmu_stop()
1420 if ((flags & PERF_EF_UPDATE) && !(hwc->state & PERF_HES_UPTODATE)) { in x86_pmu_stop()
1426 hwc->state |= PERF_HES_UPTODATE; in x86_pmu_stop()