Home
last modified time | relevance | path

Searched refs:used_mask (Results 1 – 20 of 20) sorted by relevance

/Linux-v4.19/arch/xtensa/kernel/
Dperf_event.c57 unsigned long used_mask[BITS_TO_LONGS(XCHAL_NUM_PERF_COUNTERS)]; member
290 if (__test_and_set_bit(idx, ev->used_mask)) { in xtensa_pmu_add()
291 idx = find_first_zero_bit(ev->used_mask, in xtensa_pmu_add()
296 __set_bit(idx, ev->used_mask); in xtensa_pmu_add()
315 __clear_bit(event->hw.idx, ev->used_mask); in xtensa_pmu_del()
368 for (i = find_first_bit(ev->used_mask, XCHAL_NUM_PERF_COUNTERS); in xtensa_pmu_irq_handler()
370 i = find_next_bit(ev->used_mask, XCHAL_NUM_PERF_COUNTERS, i + 1)) { in xtensa_pmu_irq_handler()
/Linux-v4.19/arch/sh/kernel/
Dperf_event.c33 unsigned long used_mask[BITS_TO_LONGS(MAX_HWEVENTS)]; member
260 __clear_bit(event->hw.idx, cpuc->used_mask); in sh_pmu_del()
274 if (__test_and_set_bit(idx, cpuc->used_mask)) { in sh_pmu_add()
275 idx = find_first_zero_bit(cpuc->used_mask, sh_pmu->num_events); in sh_pmu_add()
279 __set_bit(idx, cpuc->used_mask); in sh_pmu_add()
/Linux-v4.19/drivers/perf/hisilicon/
Dhisi_uncore_pmu.c106 unsigned long *used_mask = hisi_pmu->pmu_events.used_mask; in hisi_uncore_pmu_get_event_idx() local
110 idx = find_first_zero_bit(used_mask, num_counters); in hisi_uncore_pmu_get_event_idx()
114 set_bit(idx, used_mask); in hisi_uncore_pmu_get_event_idx()
126 clear_bit(idx, hisi_pmu->pmu_events.used_mask); in hisi_uncore_pmu_clear_event_idx()
335 int enabled = bitmap_weight(hisi_pmu->pmu_events.used_mask, in hisi_uncore_pmu_enable()
Dhisi_uncore_ddrc_pmu.c149 unsigned long *used_mask = ddrc_pmu->pmu_events.used_mask; in hisi_ddrc_pmu_get_event_idx() local
154 if (test_bit(idx, used_mask)) in hisi_ddrc_pmu_get_event_idx()
157 set_bit(idx, used_mask); in hisi_ddrc_pmu_get_event_idx()
Dhisi_uncore_pmu.h56 DECLARE_BITMAP(used_mask, HISI_MAX_COUNTERS);
/Linux-v4.19/drivers/perf/
Darm_dsu_pmu.c96 DECLARE_BITMAP(used_mask, DSU_PMU_MAX_HW_CNTRS);
308 unsigned long *used_mask = hw_events->used_mask; in dsu_pmu_get_event_idx() local
311 if (test_and_set_bit(DSU_PMU_IDX_CYCLE_COUNTER, used_mask)) in dsu_pmu_get_event_idx()
316 idx = find_first_zero_bit(used_mask, dsu_pmu->num_counters); in dsu_pmu_get_event_idx()
319 set_bit(idx, hw_events->used_mask); in dsu_pmu_get_event_idx()
478 clear_bit(idx, hw_events->used_mask); in dsu_pmu_del()
489 if (bitmap_empty(dsu_pmu->hw_events.used_mask, DSU_PMU_MAX_HW_CNTRS)) in dsu_pmu_enable()
536 memset(fake_hw.used_mask, 0, sizeof(fake_hw.used_mask)); in dsu_pmu_validate_group()
Darm-cci.c73 unsigned long *used_mask; member
320 if (test_and_set_bit(CCI400_PMU_CYCLE_CNTR_IDX, hw->used_mask)) in cci400_get_event_idx()
327 if (!test_and_set_bit(idx, hw->used_mask)) in cci400_get_event_idx()
649 for_each_set_bit(i, cci_pmu->hw_events.used_mask, cci_pmu->num_cntrs) { in cci_pmu_sync_counters()
810 if (!test_and_set_bit(idx, hw->used_mask)) in pmu_get_event_idx()
1100 int enabled = bitmap_weight(hw_events->used_mask, cci_pmu->num_cntrs); in cci_pmu_enable()
1224 clear_bit(idx, hw_events->used_mask); in cci_pmu_del()
1263 .used_mask = mask, in validate_group()
1641 cci_pmu->hw_events.used_mask = devm_kcalloc(dev, in cci_pmu_alloc()
1643 sizeof(*cci_pmu->hw_events.used_mask), in cci_pmu_alloc()
[all …]
Darm_pmu.c319 memset(&fake_pmu.used_mask, 0, sizeof(fake_pmu.used_mask)); in validate_group()
458 int enabled = bitmap_weight(hw_events->used_mask, armpmu->num_events); in armpmu_enable()
712 int enabled = bitmap_weight(hw_events->used_mask, armpmu->num_events); in cpu_pm_pmu_notify()
Dqcom_l3_pmu.c167 unsigned long used_mask[BITS_TO_LONGS(L3_NUM_COUNTERS)]; member
572 idx = bitmap_find_free_region(l3pmu->used_mask, L3_NUM_COUNTERS, order); in qcom_l3_cache__event_add()
599 bitmap_release_region(l3pmu->used_mask, hwc->idx, order); in qcom_l3_cache__event_del()
/Linux-v4.19/arch/arm/kernel/
Dperf_event_xscale.c280 if (test_and_set_bit(XSCALE_CYCLE_COUNTER, cpuc->used_mask)) in xscale1pmu_get_event_idx()
285 if (!test_and_set_bit(XSCALE_COUNTER1, cpuc->used_mask)) in xscale1pmu_get_event_idx()
288 if (!test_and_set_bit(XSCALE_COUNTER0, cpuc->used_mask)) in xscale1pmu_get_event_idx()
298 clear_bit(event->hw.idx, cpuc->used_mask); in xscalepmu_clear_event_idx()
656 if (!test_and_set_bit(XSCALE_COUNTER3, cpuc->used_mask)) in xscale2pmu_get_event_idx()
658 else if (!test_and_set_bit(XSCALE_COUNTER2, cpuc->used_mask)) in xscale2pmu_get_event_idx()
Dperf_event_v6.c394 if (test_and_set_bit(ARMV6_CYCLE_COUNTER, cpuc->used_mask)) in armv6pmu_get_event_idx()
403 if (!test_and_set_bit(ARMV6_COUNTER1, cpuc->used_mask)) in armv6pmu_get_event_idx()
406 if (!test_and_set_bit(ARMV6_COUNTER0, cpuc->used_mask)) in armv6pmu_get_event_idx()
417 clear_bit(event->hw.idx, cpuc->used_mask); in armv6pmu_clear_event_idx()
Dperf_event_v7.c1042 if (test_and_set_bit(ARMV7_IDX_CYCLE_COUNTER, cpuc->used_mask)) in armv7pmu_get_event_idx()
1053 if (!test_and_set_bit(idx, cpuc->used_mask)) in armv7pmu_get_event_idx()
1064 clear_bit(event->hw.idx, cpuc->used_mask); in armv7pmu_clear_event_idx()
1626 if (test_and_set_bit(bit, cpuc->used_mask)) in krait_pmu_get_event_idx()
1632 clear_bit(bit, cpuc->used_mask); in krait_pmu_get_event_idx()
1650 clear_bit(bit, cpuc->used_mask); in krait_pmu_clear_event_idx()
1956 if (test_and_set_bit(bit, cpuc->used_mask)) in scorpion_pmu_get_event_idx()
1962 clear_bit(bit, cpuc->used_mask); in scorpion_pmu_get_event_idx()
1980 clear_bit(bit, cpuc->used_mask); in scorpion_pmu_clear_event_idx()
/Linux-v4.19/arch/arc/kernel/
Dperf_event.c35 unsigned long used_mask[BITS_TO_LONGS(ARC_PERF_MAX_COUNTERS)]; member
325 __clear_bit(event->hw.idx, pmu_cpu->used_mask); in arc_pmu_del()
339 idx = ffz(pmu_cpu->used_mask[0]); in arc_pmu_add()
343 __set_bit(idx, pmu_cpu->used_mask); in arc_pmu_add()
/Linux-v4.19/arch/arm64/kernel/
Dperf_event.c872 if (!test_and_set_bit(idx, cpuc->used_mask)) in armv8pmu_get_single_idx()
888 if (!test_and_set_bit(idx, cpuc->used_mask)) { in armv8pmu_get_chain_idx()
890 if (!test_and_set_bit(idx - 1, cpuc->used_mask)) in armv8pmu_get_chain_idx()
893 clear_bit(idx, cpuc->used_mask); in armv8pmu_get_chain_idx()
908 if (!test_and_set_bit(ARMV8_IDX_CYCLE_COUNTER, cpuc->used_mask)) in armv8pmu_get_event_idx()
926 clear_bit(idx, cpuc->used_mask); in armv8pmu_clear_event_idx()
928 clear_bit(idx - 1, cpuc->used_mask); in armv8pmu_clear_event_idx()
/Linux-v4.19/arch/x86/events/intel/
Dp4.c1192 static int p4_next_cntr(int thread, unsigned long *used_mask, in p4_next_cntr() argument
1199 if (j != -1 && !test_bit(j, used_mask)) in p4_next_cntr()
1208 unsigned long used_mask[BITS_TO_LONGS(X86_PMC_IDX_MAX)]; in p4_pmu_schedule_events() local
1218 bitmap_zero(used_mask, X86_PMC_IDX_MAX); in p4_pmu_schedule_events()
1248 cntr_idx = p4_next_cntr(thread, used_mask, bind); in p4_pmu_schedule_events()
1282 set_bit(cntr_idx, used_mask); in p4_pmu_schedule_events()
Duncore.c407 unsigned long used_mask[BITS_TO_LONGS(UNCORE_PMC_IDX_MAX)]; in uncore_assign_events() local
412 bitmap_zero(used_mask, UNCORE_PMC_IDX_MAX); in uncore_assign_events()
435 if (test_bit(hwc->idx, used_mask)) in uncore_assign_events()
438 __set_bit(hwc->idx, used_mask); in uncore_assign_events()
/Linux-v4.19/include/linux/perf/
Darm_pmu.h59 DECLARE_BITMAP(used_mask, ARMPMU_MAX_HWEVENTS);
/Linux-v4.19/arch/mips/kernel/
Dperf_event_mipsxx.c42 unsigned long used_mask[BITS_TO_LONGS(MIPS_MAX_HWEVENTS)]; member
317 !test_and_set_bit(i, cpuc->used_mask)) in mipsxx_pmu_alloc_counter()
520 clear_bit(idx, cpuc->used_mask); in mipspmu_del()
1425 if (test_bit(n, cpuc->used_mask)) { \ in mipsxx_pmu_handle_shared_irq()
/Linux-v4.19/arch/x86/events/
Dcore.c870 unsigned long used_mask[BITS_TO_LONGS(X86_PMC_IDX_MAX)]; in x86_schedule_events() local
875 bitmap_zero(used_mask, X86_PMC_IDX_MAX); in x86_schedule_events()
905 if (test_bit(hwc->idx, used_mask)) in x86_schedule_events()
908 __set_bit(hwc->idx, used_mask); in x86_schedule_events()
/Linux-v4.19/mm/
Dpage_alloc.c5237 nodemask_t used_mask; in build_zonelists() local
5244 nodes_clear(used_mask); in build_zonelists()
5247 while ((node = find_next_best_node(local_node, &used_mask)) >= 0) { in build_zonelists()