| /Linux-v5.4/drivers/iio/buffer/ |
| D | industrialio-hw-consumer.c | 53 struct iio_hw_consumer *hwc, struct iio_dev *indio_dev) in iio_hw_consumer_get_buffer() argument 58 list_for_each_entry(buf, &hwc->buffers, head) { in iio_hw_consumer_get_buffer() 72 list_add_tail(&buf->head, &hwc->buffers); in iio_hw_consumer_get_buffer() 86 struct iio_hw_consumer *hwc; in iio_hw_consumer_alloc() local 90 hwc = kzalloc(sizeof(*hwc), GFP_KERNEL); in iio_hw_consumer_alloc() 91 if (!hwc) in iio_hw_consumer_alloc() 94 INIT_LIST_HEAD(&hwc->buffers); in iio_hw_consumer_alloc() 96 hwc->channels = iio_channel_get_all(dev); in iio_hw_consumer_alloc() 97 if (IS_ERR(hwc->channels)) { in iio_hw_consumer_alloc() 98 ret = PTR_ERR(hwc->channels); in iio_hw_consumer_alloc() [all …]
|
| /Linux-v5.4/arch/s390/include/asm/ |
| D | perf_event.h | 67 #define OVERFLOW_REG(hwc) ((hwc)->extra_reg.config) argument 68 #define SFB_ALLOC_REG(hwc) ((hwc)->extra_reg.alloc) argument 69 #define TEAR_REG(hwc) ((hwc)->last_tag) argument 70 #define SAMPL_RATE(hwc) ((hwc)->event_base) argument 71 #define SAMPL_FLAGS(hwc) ((hwc)->config_base) argument 72 #define SAMPL_DIAG_MODE(hwc) (SAMPL_FLAGS(hwc) & PERF_CPUM_SF_DIAG_MODE) argument 73 #define SDB_FULL_BLOCKS(hwc) (SAMPL_FLAGS(hwc) & PERF_CPUM_SF_FULL_BLOCKS) argument 74 #define SAMPLE_FREQ_MODE(hwc) (SAMPL_FLAGS(hwc) & PERF_CPUM_SF_FREQ_MODE) argument
|
| /Linux-v5.4/arch/alpha/kernel/ |
| D | perf_event.c | 253 struct hw_perf_event *hwc, int idx) in alpha_perf_event_set_period() argument 255 long left = local64_read(&hwc->period_left); in alpha_perf_event_set_period() 256 long period = hwc->sample_period; in alpha_perf_event_set_period() 261 local64_set(&hwc->period_left, left); in alpha_perf_event_set_period() 262 hwc->last_period = period; in alpha_perf_event_set_period() 268 local64_set(&hwc->period_left, left); in alpha_perf_event_set_period() 269 hwc->last_period = period; in alpha_perf_event_set_period() 283 local64_set(&hwc->prev_count, (unsigned long)(-left)); in alpha_perf_event_set_period() 308 struct hw_perf_event *hwc, int idx, long ovf) in alpha_perf_event_update() argument 314 prev_raw_count = local64_read(&hwc->prev_count); in alpha_perf_event_update() [all …]
|
| /Linux-v5.4/drivers/gpu/drm/amd/amdgpu/ |
| D | amdgpu_pmu.c | 48 struct hw_perf_event *hwc = &event->hw; in amdgpu_perf_event_init() local 55 hwc->conf = event->attr.config; in amdgpu_perf_event_init() 63 struct hw_perf_event *hwc = &event->hw; in amdgpu_perf_start() local 68 if (WARN_ON_ONCE(!(hwc->state & PERF_HES_STOPPED))) in amdgpu_perf_start() 71 WARN_ON_ONCE(!(hwc->state & PERF_HES_UPTODATE)); in amdgpu_perf_start() 72 hwc->state = 0; in amdgpu_perf_start() 77 pe->adev->df_funcs->pmc_start(pe->adev, hwc->conf, 1); in amdgpu_perf_start() 79 pe->adev->df_funcs->pmc_start(pe->adev, hwc->conf, 0); in amdgpu_perf_start() 92 struct hw_perf_event *hwc = &event->hw; in amdgpu_perf_read() local 100 prev = local64_read(&hwc->prev_count); in amdgpu_perf_read() [all …]
|
| /Linux-v5.4/drivers/perf/hisilicon/ |
| D | hisi_uncore_pmu.c | 128 struct hw_perf_event *hwc = &event->hw; in hisi_uncore_pmu_event_init() local 167 hwc->idx = -1; in hisi_uncore_pmu_event_init() 168 hwc->config_base = event->attr.config; in hisi_uncore_pmu_event_init() 183 struct hw_perf_event *hwc = &event->hw; in hisi_uncore_pmu_enable_event() local 185 hisi_pmu->ops->write_evtype(hisi_pmu, hwc->idx, in hisi_uncore_pmu_enable_event() 188 hisi_pmu->ops->enable_counter_int(hisi_pmu, hwc); in hisi_uncore_pmu_enable_event() 189 hisi_pmu->ops->enable_counter(hisi_pmu, hwc); in hisi_uncore_pmu_enable_event() 198 struct hw_perf_event *hwc = &event->hw; in hisi_uncore_pmu_disable_event() local 200 hisi_pmu->ops->disable_counter(hisi_pmu, hwc); in hisi_uncore_pmu_disable_event() 201 hisi_pmu->ops->disable_counter_int(hisi_pmu, hwc); in hisi_uncore_pmu_disable_event() [all …]
|
| D | hisi_uncore_ddrc_pmu.c | 47 #define GET_DDRC_EVENTID(hwc) (hwc->config_base & 0x7) argument 65 struct hw_perf_event *hwc) in hisi_ddrc_pmu_read_counter() argument 68 u32 idx = GET_DDRC_EVENTID(hwc); in hisi_ddrc_pmu_read_counter() 79 struct hw_perf_event *hwc, u64 val) in hisi_ddrc_pmu_write_counter() argument 81 u32 idx = GET_DDRC_EVENTID(hwc); in hisi_ddrc_pmu_write_counter() 122 struct hw_perf_event *hwc) in hisi_ddrc_pmu_enable_counter() argument 128 val |= (1 << GET_DDRC_EVENTID(hwc)); in hisi_ddrc_pmu_enable_counter() 133 struct hw_perf_event *hwc) in hisi_ddrc_pmu_disable_counter() argument 139 val &= ~(1 << GET_DDRC_EVENTID(hwc)); in hisi_ddrc_pmu_disable_counter() 147 struct hw_perf_event *hwc = &event->hw; in hisi_ddrc_pmu_get_event_idx() local [all …]
|
| /Linux-v5.4/arch/x86/events/amd/ |
| D | iommu.c | 209 struct hw_perf_event *hwc = &event->hw; in perf_iommu_event_init() local 227 hwc->conf = event->attr.config; in perf_iommu_event_init() 228 hwc->conf1 = event->attr.config1; in perf_iommu_event_init() 241 struct hw_perf_event *hwc = &ev->hw; in perf_iommu_enable_event() local 242 u8 bank = hwc->iommu_bank; in perf_iommu_enable_event() 243 u8 cntr = hwc->iommu_cntr; in perf_iommu_enable_event() 246 reg = GET_CSOURCE(hwc); in perf_iommu_enable_event() 249 reg = GET_DEVID_MASK(hwc); in perf_iommu_enable_event() 250 reg = GET_DEVID(hwc) | (reg << 32); in perf_iommu_enable_event() 255 reg = GET_PASID_MASK(hwc); in perf_iommu_enable_event() [all …]
|
| D | ibs.c | 111 perf_event_set_period(struct hw_perf_event *hwc, u64 min, u64 max, u64 *hw_period) in perf_event_set_period() argument 113 s64 left = local64_read(&hwc->period_left); in perf_event_set_period() 114 s64 period = hwc->sample_period; in perf_event_set_period() 122 local64_set(&hwc->period_left, left); in perf_event_set_period() 123 hwc->last_period = period; in perf_event_set_period() 129 local64_set(&hwc->period_left, left); in perf_event_set_period() 130 hwc->last_period = period; in perf_event_set_period() 156 struct hw_perf_event *hwc = &event->hw; in perf_event_try_update() local 168 prev_raw_count = local64_read(&hwc->prev_count); in perf_event_try_update() 169 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in perf_event_try_update() [all …]
|
| D | uncore.c | 84 struct hw_perf_event *hwc = &event->hw; in amd_uncore_read() local 93 prev = local64_read(&hwc->prev_count); in amd_uncore_read() 94 rdpmcl(hwc->event_base_rdpmc, new); in amd_uncore_read() 95 local64_set(&hwc->prev_count, new); in amd_uncore_read() 103 struct hw_perf_event *hwc = &event->hw; in amd_uncore_start() local 106 wrmsrl(hwc->event_base, (u64)local64_read(&hwc->prev_count)); in amd_uncore_start() 108 hwc->state = 0; in amd_uncore_start() 109 wrmsrl(hwc->config_base, (hwc->config | ARCH_PERFMON_EVENTSEL_ENABLE)); in amd_uncore_start() 115 struct hw_perf_event *hwc = &event->hw; in amd_uncore_stop() local 117 wrmsrl(hwc->config_base, hwc->config); in amd_uncore_stop() [all …]
|
| D | power.c | 49 struct hw_perf_event *hwc = &event->hw; in event_update() local 53 prev_pwr_acc = hwc->pwr_acc; in event_update() 54 prev_ptsc = hwc->ptsc; in event_update() 93 struct hw_perf_event *hwc = &event->hw; in pmu_event_stop() local 96 if (!(hwc->state & PERF_HES_STOPPED)) in pmu_event_stop() 97 hwc->state |= PERF_HES_STOPPED; in pmu_event_stop() 100 if ((mode & PERF_EF_UPDATE) && !(hwc->state & PERF_HES_UPTODATE)) { in pmu_event_stop() 106 hwc->state |= PERF_HES_UPTODATE; in pmu_event_stop() 112 struct hw_perf_event *hwc = &event->hw; in pmu_event_add() local 114 hwc->state = PERF_HES_UPTODATE | PERF_HES_STOPPED; in pmu_event_add()
|
| /Linux-v5.4/arch/arc/kernel/ |
| D | perf_event.c | 119 struct hw_perf_event *hwc, int idx) in arc_perf_event_update() argument 121 u64 prev_raw_count = local64_read(&hwc->prev_count); in arc_perf_event_update() 129 local64_set(&hwc->prev_count, new_raw_count); in arc_perf_event_update() 131 local64_sub(delta, &hwc->period_left); in arc_perf_event_update() 169 struct hw_perf_event *hwc = &event->hw; in arc_pmu_event_init() local 173 hwc->sample_period = arc_pmu->max_period; in arc_pmu_event_init() 174 hwc->last_period = hwc->sample_period; in arc_pmu_event_init() 175 local64_set(&hwc->period_left, hwc->sample_period); in arc_pmu_event_init() 178 hwc->config = 0; in arc_pmu_event_init() 183 hwc->config |= ARC_REG_PCT_CONFIG_KERN; in arc_pmu_event_init() [all …]
|
| /Linux-v5.4/arch/xtensa/kernel/ |
| D | perf_event.c | 137 struct hw_perf_event *hwc, int idx) in xtensa_perf_event_update() argument 143 prev_raw_count = local64_read(&hwc->prev_count); in xtensa_perf_event_update() 145 } while (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in xtensa_perf_event_update() 151 local64_sub(delta, &hwc->period_left); in xtensa_perf_event_update() 155 struct hw_perf_event *hwc, int idx) in xtensa_perf_event_set_period() argument 163 s64 period = hwc->sample_period; in xtensa_perf_event_set_period() 165 left = local64_read(&hwc->period_left); in xtensa_perf_event_set_period() 168 local64_set(&hwc->period_left, left); in xtensa_perf_event_set_period() 169 hwc->last_period = period; in xtensa_perf_event_set_period() 173 local64_set(&hwc->period_left, left); in xtensa_perf_event_set_period() [all …]
|
| /Linux-v5.4/arch/s390/kernel/ |
| D | perf_cpum_cf.c | 37 static int validate_ctr_version(const struct hw_perf_event *hwc) in validate_ctr_version() argument 46 switch (hwc->config_base) { in validate_ctr_version() 54 hwc->config > 79) || in validate_ctr_version() 55 (cpuhw->info.csvn >= 6 && hwc->config > 83)) in validate_ctr_version() 61 if ((cpuhw->info.csvn == 1 && hwc->config > 159) || in validate_ctr_version() 62 (cpuhw->info.csvn == 2 && hwc->config > 175) || in validate_ctr_version() 64 && hwc->config > 255) || in validate_ctr_version() 65 (cpuhw->info.csvn >= 6 && hwc->config > 287)) in validate_ctr_version() 95 static int validate_ctr_auth(const struct hw_perf_event *hwc) in validate_ctr_auth() argument 108 ctrs_state = cpumf_ctr_ctl[hwc->config_base]; in validate_ctr_auth() [all …]
|
| D | perf_cpum_sf.c | 316 static unsigned long sfb_max_limit(struct hw_perf_event *hwc) in sfb_max_limit() argument 318 return SAMPL_DIAG_MODE(hwc) ? CPUM_SF_MAX_SDB * CPUM_SF_SDB_DIAG_FACTOR in sfb_max_limit() 323 struct hw_perf_event *hwc) in sfb_pending_allocs() argument 326 return SFB_ALLOC_REG(hwc); in sfb_pending_allocs() 327 if (SFB_ALLOC_REG(hwc) > sfb->num_sdb) in sfb_pending_allocs() 328 return SFB_ALLOC_REG(hwc) - sfb->num_sdb; in sfb_pending_allocs() 333 struct hw_perf_event *hwc) in sfb_has_pending_allocs() argument 335 return sfb_pending_allocs(sfb, hwc) > 0; in sfb_has_pending_allocs() 338 static void sfb_account_allocs(unsigned long num, struct hw_perf_event *hwc) in sfb_account_allocs() argument 341 num = min_t(unsigned long, num, sfb_max_limit(hwc) - SFB_ALLOC_REG(hwc)); in sfb_account_allocs() [all …]
|
| /Linux-v5.4/arch/riscv/kernel/ |
| D | perf_event.c | 216 struct hw_perf_event *hwc = &event->hw; in riscv_pmu_read() local 219 int idx = hwc->idx; in riscv_pmu_read() 223 prev_raw_count = local64_read(&hwc->prev_count); in riscv_pmu_read() 226 oldval = local64_cmpxchg(&hwc->prev_count, prev_raw_count, in riscv_pmu_read() 253 struct hw_perf_event *hwc = &event->hw; in riscv_pmu_stop() local 255 WARN_ON_ONCE(hwc->state & PERF_HES_STOPPED); in riscv_pmu_stop() 256 hwc->state |= PERF_HES_STOPPED; in riscv_pmu_stop() 258 if ((flags & PERF_EF_UPDATE) && !(hwc->state & PERF_HES_UPTODATE)) { in riscv_pmu_stop() 260 hwc->state |= PERF_HES_UPTODATE; in riscv_pmu_stop() 269 struct hw_perf_event *hwc = &event->hw; in riscv_pmu_start() local [all …]
|
| /Linux-v5.4/drivers/perf/ |
| D | arm_pmu.c | 123 struct hw_perf_event *hwc = &event->hw; in armpmu_event_set_period() local 124 s64 left = local64_read(&hwc->period_left); in armpmu_event_set_period() 125 s64 period = hwc->sample_period; in armpmu_event_set_period() 132 local64_set(&hwc->period_left, left); in armpmu_event_set_period() 133 hwc->last_period = period; in armpmu_event_set_period() 139 local64_set(&hwc->period_left, left); in armpmu_event_set_period() 140 hwc->last_period = period; in armpmu_event_set_period() 153 local64_set(&hwc->prev_count, (u64)-left); in armpmu_event_set_period() 165 struct hw_perf_event *hwc = &event->hw; in armpmu_event_update() local 170 prev_raw_count = local64_read(&hwc->prev_count); in armpmu_event_update() [all …]
|
| D | thunderx2_pmu.c | 247 struct hw_perf_event *hwc = &event->hw; in init_cntr_base_l3c() local 250 hwc->config_base = (unsigned long)tx2_pmu->base in init_cntr_base_l3c() 252 hwc->event_base = (unsigned long)tx2_pmu->base in init_cntr_base_l3c() 259 struct hw_perf_event *hwc = &event->hw; in init_cntr_base_dmc() local 261 hwc->config_base = (unsigned long)tx2_pmu->base in init_cntr_base_dmc() 264 hwc->event_base = (unsigned long)tx2_pmu->base in init_cntr_base_dmc() 271 struct hw_perf_event *hwc = &event->hw; in uncore_start_event_l3c() local 275 reg_writel(val, hwc->config_base); in uncore_start_event_l3c() 276 local64_set(&hwc->prev_count, 0); in uncore_start_event_l3c() 277 reg_writel(0, hwc->event_base); in uncore_start_event_l3c() [all …]
|
| D | qcom_l2_pmu.c | 341 struct hw_perf_event *hwc = &event->hw; in l2_cache_event_update() local 343 u32 idx = hwc->idx; in l2_cache_event_update() 346 prev = local64_read(&hwc->prev_count); in l2_cache_event_update() 348 } while (local64_cmpxchg(&hwc->prev_count, prev, now) != prev); in l2_cache_event_update() 362 struct hw_perf_event *hwc) in l2_cache_cluster_set_period() argument 364 u32 idx = hwc->idx; in l2_cache_cluster_set_period() 377 local64_set(&hwc->prev_count, new); in l2_cache_cluster_set_period() 384 struct hw_perf_event *hwc = &event->hw; in l2_cache_get_event_idx() local 389 if (hwc->config_base == L2CYCLE_CTR_RAW_CODE) { in l2_cache_get_event_idx() 406 group = L2_EVT_GROUP(hwc->config_base); in l2_cache_get_event_idx() [all …]
|
| /Linux-v5.4/arch/nds32/kernel/ |
| D | perf_event_cpu.c | 186 struct hw_perf_event *hwc = &event->hw; in nds32_pmu_event_set_period() local 187 s64 left = local64_read(&hwc->period_left); in nds32_pmu_event_set_period() 188 s64 period = hwc->sample_period; in nds32_pmu_event_set_period() 192 if (unlikely(period != hwc->last_period)) in nds32_pmu_event_set_period() 193 left = period - (hwc->last_period - left); in nds32_pmu_event_set_period() 197 local64_set(&hwc->period_left, left); in nds32_pmu_event_set_period() 198 hwc->last_period = period; in nds32_pmu_event_set_period() 204 local64_set(&hwc->period_left, left); in nds32_pmu_event_set_period() 205 hwc->last_period = period; in nds32_pmu_event_set_period() 216 local64_set(&hwc->prev_count, (u64)(-left)); in nds32_pmu_event_set_period() [all …]
|
| /Linux-v5.4/arch/arm/kernel/ |
| D | perf_event_xscale.c | 175 struct hw_perf_event *hwc; in xscale1pmu_handle_irq() local 183 hwc = &event->hw; in xscale1pmu_handle_irq() 185 perf_sample_data_init(&data, 0, hwc->last_period); in xscale1pmu_handle_irq() 208 struct hw_perf_event *hwc = &event->hw; in xscale1pmu_enable_event() local 210 int idx = hwc->idx; in xscale1pmu_enable_event() 219 evt = (hwc->config_base << XSCALE1_COUNT0_EVT_SHFT) | in xscale1pmu_enable_event() 224 evt = (hwc->config_base << XSCALE1_COUNT1_EVT_SHFT) | in xscale1pmu_enable_event() 244 struct hw_perf_event *hwc = &event->hw; in xscale1pmu_disable_event() local 246 int idx = hwc->idx; in xscale1pmu_disable_event() 278 struct hw_perf_event *hwc = &event->hw; in xscale1pmu_get_event_idx() local [all …]
|
| /Linux-v5.4/arch/x86/events/intel/ |
| D | uncore_nhmex.c | 247 struct hw_perf_event *hwc = &event->hw; in nhmex_uncore_msr_enable_event() local 249 if (hwc->idx == UNCORE_PMC_IDX_FIXED) in nhmex_uncore_msr_enable_event() 250 wrmsrl(hwc->config_base, NHMEX_PMON_CTL_EN_BIT0); in nhmex_uncore_msr_enable_event() 252 wrmsrl(hwc->config_base, hwc->config | NHMEX_PMON_CTL_EN_BIT22); in nhmex_uncore_msr_enable_event() 254 wrmsrl(hwc->config_base, hwc->config | NHMEX_PMON_CTL_EN_BIT0); in nhmex_uncore_msr_enable_event() 352 struct hw_perf_event *hwc = &event->hw; in nhmex_bbox_hw_config() local 353 struct hw_perf_event_extra *reg1 = &hwc->extra_reg; in nhmex_bbox_hw_config() 354 struct hw_perf_event_extra *reg2 = &hwc->branch_reg; in nhmex_bbox_hw_config() 357 ctr = (hwc->config & NHMEX_B_PMON_CTR_MASK) >> in nhmex_bbox_hw_config() 359 ev_sel = (hwc->config & NHMEX_B_PMON_CTL_EV_SEL_MASK) >> in nhmex_bbox_hw_config() [all …]
|
| D | p4.c | 855 static inline int p4_pmu_clear_cccr_ovf(struct hw_perf_event *hwc) in p4_pmu_clear_cccr_ovf() argument 860 rdmsrl(hwc->config_base, v); in p4_pmu_clear_cccr_ovf() 862 wrmsrl(hwc->config_base, v & ~P4_CCCR_OVF); in p4_pmu_clear_cccr_ovf() 873 rdmsrl(hwc->event_base, v); in p4_pmu_clear_cccr_ovf() 905 struct hw_perf_event *hwc = &event->hw; in p4_pmu_disable_event() local 912 (void)wrmsrl_safe(hwc->config_base, in p4_pmu_disable_event() 913 p4_config_unpack_cccr(hwc->config) & ~P4_CCCR_ENABLE & ~P4_CCCR_OVF & ~P4_CCCR_RESERVED); in p4_pmu_disable_event() 951 struct hw_perf_event *hwc = &event->hw; in p4_pmu_enable_event() local 952 int thread = p4_ht_config_thread(hwc->config); in p4_pmu_enable_event() 953 u64 escr_conf = p4_config_unpack_escr(p4_clear_ht_bit(hwc->config)); in p4_pmu_enable_event() [all …]
|
| /Linux-v5.4/arch/x86/events/ |
| D | core.c | 70 struct hw_perf_event *hwc = &event->hw; in x86_perf_event_update() local 73 int idx = hwc->idx; in x86_perf_event_update() 87 prev_raw_count = local64_read(&hwc->prev_count); in x86_perf_event_update() 88 rdpmcl(hwc->event_base_rdpmc, new_raw_count); in x86_perf_event_update() 90 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in x86_perf_event_update() 106 local64_sub(delta, &hwc->period_left); in x86_perf_event_update() 300 set_ext_hw_attr(struct hw_perf_event *hwc, struct perf_event *event) in set_ext_hw_attr() argument 331 hwc->config |= val; in set_ext_hw_attr() 410 struct hw_perf_event *hwc = &event->hw; in x86_setup_perfctr() local 414 hwc->sample_period = x86_pmu.max_period; in x86_setup_perfctr() [all …]
|
| /Linux-v5.4/arch/sh/kernel/ |
| D | perf_event.c | 121 struct hw_perf_event *hwc = &event->hw; in __hw_perf_event_init() local 171 hwc->config |= config; in __hw_perf_event_init() 177 struct hw_perf_event *hwc, int idx) in sh_perf_event_update() argument 196 prev_raw_count = local64_read(&hwc->prev_count); in sh_perf_event_update() 199 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in sh_perf_event_update() 220 struct hw_perf_event *hwc = &event->hw; in sh_pmu_stop() local 221 int idx = hwc->idx; in sh_pmu_stop() 224 sh_pmu->disable(hwc, idx); in sh_pmu_stop() 238 struct hw_perf_event *hwc = &event->hw; in sh_pmu_start() local 239 int idx = hwc->idx; in sh_pmu_start() [all …]
|
| /Linux-v5.4/arch/mips/kernel/ |
| D | perf_event_mipsxx.c | 286 struct hw_perf_event *hwc) in mipsxx_pmu_alloc_counter() argument 294 unsigned long cntr_mask = (hwc->event_base >> 8) & 0xffff; in mipsxx_pmu_alloc_counter() 371 struct hw_perf_event *hwc, in mipspmu_event_set_period() argument 374 u64 left = local64_read(&hwc->period_left); in mipspmu_event_set_period() 375 u64 period = hwc->sample_period; in mipspmu_event_set_period() 381 local64_set(&hwc->period_left, left); in mipspmu_event_set_period() 382 hwc->last_period = period; in mipspmu_event_set_period() 387 local64_set(&hwc->period_left, left); in mipspmu_event_set_period() 388 hwc->last_period = period; in mipspmu_event_set_period() 394 local64_set(&hwc->period_left, left); in mipspmu_event_set_period() [all …]
|