/Linux-v5.4/arch/xtensa/kernel/ |
D | perf_event.c | 54 unsigned long used_mask[BITS_TO_LONGS(XCHAL_NUM_PERF_COUNTERS)]; member 287 if (__test_and_set_bit(idx, ev->used_mask)) { in xtensa_pmu_add() 288 idx = find_first_zero_bit(ev->used_mask, in xtensa_pmu_add() 293 __set_bit(idx, ev->used_mask); in xtensa_pmu_add() 312 __clear_bit(event->hw.idx, ev->used_mask); in xtensa_pmu_del() 365 for (i = find_first_bit(ev->used_mask, XCHAL_NUM_PERF_COUNTERS); in xtensa_pmu_irq_handler() 367 i = find_next_bit(ev->used_mask, XCHAL_NUM_PERF_COUNTERS, i + 1)) { in xtensa_pmu_irq_handler()
|
/Linux-v5.4/arch/sh/kernel/ |
D | perf_event.c | 30 unsigned long used_mask[BITS_TO_LONGS(MAX_HWEVENTS)]; member 257 __clear_bit(event->hw.idx, cpuc->used_mask); in sh_pmu_del() 271 if (__test_and_set_bit(idx, cpuc->used_mask)) { in sh_pmu_add() 272 idx = find_first_zero_bit(cpuc->used_mask, sh_pmu->num_events); in sh_pmu_add() 276 __set_bit(idx, cpuc->used_mask); in sh_pmu_add()
|
/Linux-v5.4/drivers/perf/hisilicon/ |
D | hisi_uncore_pmu.c | 103 unsigned long *used_mask = hisi_pmu->pmu_events.used_mask; in hisi_uncore_pmu_get_event_idx() local 107 idx = find_first_zero_bit(used_mask, num_counters); in hisi_uncore_pmu_get_event_idx() 111 set_bit(idx, used_mask); in hisi_uncore_pmu_get_event_idx() 123 clear_bit(idx, hisi_pmu->pmu_events.used_mask); in hisi_uncore_pmu_clear_event_idx() 323 int enabled = bitmap_weight(hisi_pmu->pmu_events.used_mask, in hisi_uncore_pmu_enable()
|
D | hisi_uncore_ddrc_pmu.c | 146 unsigned long *used_mask = ddrc_pmu->pmu_events.used_mask; in hisi_ddrc_pmu_get_event_idx() local 151 if (test_bit(idx, used_mask)) in hisi_ddrc_pmu_get_event_idx() 154 set_bit(idx, used_mask); in hisi_ddrc_pmu_get_event_idx()
|
D | hisi_uncore_pmu.h | 53 DECLARE_BITMAP(used_mask, HISI_MAX_COUNTERS);
|
/Linux-v5.4/drivers/perf/ |
D | arm_dsu_pmu.c | 93 DECLARE_BITMAP(used_mask, DSU_PMU_MAX_HW_CNTRS); 305 unsigned long *used_mask = hw_events->used_mask; in dsu_pmu_get_event_idx() local 308 if (test_and_set_bit(DSU_PMU_IDX_CYCLE_COUNTER, used_mask)) in dsu_pmu_get_event_idx() 313 idx = find_first_zero_bit(used_mask, dsu_pmu->num_counters); in dsu_pmu_get_event_idx() 316 set_bit(idx, hw_events->used_mask); in dsu_pmu_get_event_idx() 475 clear_bit(idx, hw_events->used_mask); in dsu_pmu_del() 486 if (bitmap_empty(dsu_pmu->hw_events.used_mask, DSU_PMU_MAX_HW_CNTRS)) in dsu_pmu_enable() 533 memset(fake_hw.used_mask, 0, sizeof(fake_hw.used_mask)); in dsu_pmu_validate_group()
|
D | arm-cci.c | 73 unsigned long *used_mask; member 320 if (test_and_set_bit(CCI400_PMU_CYCLE_CNTR_IDX, hw->used_mask)) in cci400_get_event_idx() 327 if (!test_and_set_bit(idx, hw->used_mask)) in cci400_get_event_idx() 649 for_each_set_bit(i, cci_pmu->hw_events.used_mask, cci_pmu->num_cntrs) { in cci_pmu_sync_counters() 810 if (!test_and_set_bit(idx, hw->used_mask)) in pmu_get_event_idx() 1100 int enabled = bitmap_weight(hw_events->used_mask, cci_pmu->num_cntrs); in cci_pmu_enable() 1224 clear_bit(idx, hw_events->used_mask); in cci_pmu_del() 1263 .used_mask = mask, in validate_group() 1633 cci_pmu->hw_events.used_mask = devm_kcalloc(dev, in cci_pmu_alloc() 1635 sizeof(*cci_pmu->hw_events.used_mask), in cci_pmu_alloc() [all …]
|
D | arm_pmu.c | 320 memset(&fake_pmu.used_mask, 0, sizeof(fake_pmu.used_mask)); in validate_group() 451 int enabled = bitmap_weight(hw_events->used_mask, armpmu->num_events); in armpmu_enable() 705 int enabled = bitmap_weight(hw_events->used_mask, armpmu->num_events); in cpu_pm_pmu_notify()
|
D | qcom_l3_pmu.c | 159 unsigned long used_mask[BITS_TO_LONGS(L3_NUM_COUNTERS)]; member 557 idx = bitmap_find_free_region(l3pmu->used_mask, L3_NUM_COUNTERS, order); in qcom_l3_cache__event_add() 584 bitmap_release_region(l3pmu->used_mask, hwc->idx, order); in qcom_l3_cache__event_del()
|
/Linux-v5.4/arch/arm/kernel/ |
D | perf_event_xscale.c | 280 if (test_and_set_bit(XSCALE_CYCLE_COUNTER, cpuc->used_mask)) in xscale1pmu_get_event_idx() 285 if (!test_and_set_bit(XSCALE_COUNTER1, cpuc->used_mask)) in xscale1pmu_get_event_idx() 288 if (!test_and_set_bit(XSCALE_COUNTER0, cpuc->used_mask)) in xscale1pmu_get_event_idx() 298 clear_bit(event->hw.idx, cpuc->used_mask); in xscalepmu_clear_event_idx() 656 if (!test_and_set_bit(XSCALE_COUNTER3, cpuc->used_mask)) in xscale2pmu_get_event_idx() 658 else if (!test_and_set_bit(XSCALE_COUNTER2, cpuc->used_mask)) in xscale2pmu_get_event_idx()
|
D | perf_event_v6.c | 394 if (test_and_set_bit(ARMV6_CYCLE_COUNTER, cpuc->used_mask)) in armv6pmu_get_event_idx() 403 if (!test_and_set_bit(ARMV6_COUNTER1, cpuc->used_mask)) in armv6pmu_get_event_idx() 406 if (!test_and_set_bit(ARMV6_COUNTER0, cpuc->used_mask)) in armv6pmu_get_event_idx() 417 clear_bit(event->hw.idx, cpuc->used_mask); in armv6pmu_clear_event_idx()
|
D | perf_event_v7.c | 1042 if (test_and_set_bit(ARMV7_IDX_CYCLE_COUNTER, cpuc->used_mask)) in armv7pmu_get_event_idx() 1053 if (!test_and_set_bit(idx, cpuc->used_mask)) in armv7pmu_get_event_idx() 1064 clear_bit(event->hw.idx, cpuc->used_mask); in armv7pmu_clear_event_idx() 1626 if (test_and_set_bit(bit, cpuc->used_mask)) in krait_pmu_get_event_idx() 1632 clear_bit(bit, cpuc->used_mask); in krait_pmu_get_event_idx() 1650 clear_bit(bit, cpuc->used_mask); in krait_pmu_clear_event_idx() 1956 if (test_and_set_bit(bit, cpuc->used_mask)) in scorpion_pmu_get_event_idx() 1962 clear_bit(bit, cpuc->used_mask); in scorpion_pmu_get_event_idx() 1980 clear_bit(bit, cpuc->used_mask); in scorpion_pmu_clear_event_idx()
|
/Linux-v5.4/arch/nds32/kernel/ |
D | perf_event_cpu.c | 574 if (!test_and_set_bit(idx, cpuc->used_mask)) in nds32_pmu_get_event_idx() 576 if (!test_and_set_bit(NDS32_IDX_COUNTER0, cpuc->used_mask)) in nds32_pmu_get_event_idx() 578 if (!test_and_set_bit(NDS32_IDX_COUNTER1, cpuc->used_mask)) in nds32_pmu_get_event_idx() 581 if (!test_and_set_bit(idx, cpuc->used_mask)) in nds32_pmu_get_event_idx() 583 else if (!test_and_set_bit(NDS32_IDX_COUNTER1, cpuc->used_mask)) in nds32_pmu_get_event_idx() 586 (NDS32_IDX_CYCLE_COUNTER, cpuc->used_mask)) in nds32_pmu_get_event_idx() 589 if (!test_and_set_bit(idx, cpuc->used_mask)) in nds32_pmu_get_event_idx() 698 int enabled = bitmap_weight(hw_events->used_mask, in nds32_pmu_enable() 993 clear_bit(idx, hw_events->used_mask); in nds32_pmu_del()
|
/Linux-v5.4/arch/arc/kernel/ |
D | perf_event.c | 49 unsigned long used_mask[BITS_TO_LONGS(ARC_PERF_MAX_COUNTERS)]; member 352 __clear_bit(event->hw.idx, pmu_cpu->used_mask); in arc_pmu_del() 366 idx = ffz(pmu_cpu->used_mask[0]); in arc_pmu_add() 370 __set_bit(idx, pmu_cpu->used_mask); in arc_pmu_add()
|
/Linux-v5.4/arch/arm64/kernel/ |
D | perf_event.c | 762 if (!test_and_set_bit(idx, cpuc->used_mask)) in armv8pmu_get_single_idx() 778 if (!test_and_set_bit(idx, cpuc->used_mask)) { in armv8pmu_get_chain_idx() 780 if (!test_and_set_bit(idx - 1, cpuc->used_mask)) in armv8pmu_get_chain_idx() 783 clear_bit(idx, cpuc->used_mask); in armv8pmu_get_chain_idx() 798 if (!test_and_set_bit(ARMV8_IDX_CYCLE_COUNTER, cpuc->used_mask)) in armv8pmu_get_event_idx() 816 clear_bit(idx, cpuc->used_mask); in armv8pmu_clear_event_idx() 818 clear_bit(idx - 1, cpuc->used_mask); in armv8pmu_clear_event_idx()
|
/Linux-v5.4/arch/x86/events/intel/ |
D | p4.c | 1192 static int p4_next_cntr(int thread, unsigned long *used_mask, in p4_next_cntr() argument 1199 if (j != -1 && !test_bit(j, used_mask)) in p4_next_cntr() 1208 unsigned long used_mask[BITS_TO_LONGS(X86_PMC_IDX_MAX)]; in p4_pmu_schedule_events() local 1218 bitmap_zero(used_mask, X86_PMC_IDX_MAX); in p4_pmu_schedule_events() 1248 cntr_idx = p4_next_cntr(thread, used_mask, bind); in p4_pmu_schedule_events() 1282 set_bit(cntr_idx, used_mask); in p4_pmu_schedule_events()
|
D | uncore.c | 424 unsigned long used_mask[BITS_TO_LONGS(UNCORE_PMC_IDX_MAX)]; in uncore_assign_events() local 429 bitmap_zero(used_mask, UNCORE_PMC_IDX_MAX); in uncore_assign_events() 452 if (test_bit(hwc->idx, used_mask)) in uncore_assign_events() 455 __set_bit(hwc->idx, used_mask); in uncore_assign_events()
|
/Linux-v5.4/include/linux/perf/ |
D | arm_pmu.h | 55 DECLARE_BITMAP(used_mask, ARMPMU_MAX_HWEVENTS);
|
/Linux-v5.4/arch/mips/kernel/ |
D | perf_event_mipsxx.c | 39 unsigned long used_mask[BITS_TO_LONGS(MIPS_MAX_HWEVENTS)]; member 308 !test_and_set_bit(i, cpuc->used_mask)) in mipsxx_pmu_alloc_counter() 502 clear_bit(idx, cpuc->used_mask); in mipspmu_del() 1409 if (!test_bit(n, cpuc->used_mask)) in mipsxx_pmu_handle_shared_irq()
|
/Linux-v5.4/arch/nds32/include/asm/ |
D | pmu.h | 54 unsigned long used_mask[BITS_TO_LONGS(MAX_COUNTERS)]; member
|
/Linux-v5.4/arch/x86/events/ |
D | core.c | 869 unsigned long used_mask[BITS_TO_LONGS(X86_PMC_IDX_MAX)]; in x86_schedule_events() local 874 bitmap_zero(used_mask, X86_PMC_IDX_MAX); in x86_schedule_events() 929 if (test_bit(hwc->idx, used_mask)) in x86_schedule_events() 932 __set_bit(hwc->idx, used_mask); in x86_schedule_events()
|
/Linux-v5.4/arch/csky/kernel/ |
D | perf_event.c | 33 unsigned long used_mask[BITS_TO_LONGS(CSKY_PMU_MAX_EVENTS)]; member
|
/Linux-v5.4/mm/ |
D | page_alloc.c | 5635 nodemask_t used_mask; in build_zonelists() local 5642 nodes_clear(used_mask); in build_zonelists() 5645 while ((node = find_next_best_node(local_node, &used_mask)) >= 0) { in build_zonelists()
|