/Linux-v5.4/arch/sh/kernel/ |
D | perf_event.c | 39 static atomic_t num_events; variable 74 return sh_pmu->num_events; in perf_num_counters() 83 if (!atomic_add_unless(&num_events, -1, 1)) { in hw_perf_event_destroy() 85 if (atomic_dec_return(&num_events) == 0) in hw_perf_event_destroy() 136 if (!atomic_inc_not_zero(&num_events)) { in __hw_perf_event_init() 138 if (atomic_read(&num_events) == 0 && in __hw_perf_event_init() 142 atomic_inc(&num_events); in __hw_perf_event_init() 272 idx = find_first_zero_bit(cpuc->used_mask, sh_pmu->num_events); in sh_pmu_add() 273 if (idx == sh_pmu->num_events) in sh_pmu_add() 375 WARN_ON(_pmu->num_events > MAX_HWEVENTS); in register_sh_pmu()
|
D | hw_breakpoint.c | 35 static struct sh_ubc ubc_dummy = { .num_events = 0 }; 52 for (i = 0; i < sh_ubc->num_events; i++) { in arch_install_hw_breakpoint() 61 if (WARN_ONCE(i == sh_ubc->num_events, "Can't find any breakpoint slot")) in arch_install_hw_breakpoint() 84 for (i = 0; i < sh_ubc->num_events; i++) { in arch_uninstall_hw_breakpoint() 93 if (WARN_ONCE(i == sh_ubc->num_events, "Can't find any breakpoint slot")) in arch_uninstall_hw_breakpoint() 267 for (i = 0; i < sh_ubc->num_events; i++) { in flush_ptrace_hw_breakpoint() 297 for (i = 0; i < sh_ubc->num_events; i++) { in hw_breakpoint_handler() 405 WARN_ON(ubc->num_events > HBP_NUM); in register_sh_ubc()
|
/Linux-v5.4/arch/sh/kernel/cpu/sh4a/ |
D | ubc.c | 48 for (i = 0; i < sh4a_ubc.num_events; i++) in sh4a_ubc_enable_all() 58 for (i = 0; i < sh4a_ubc.num_events; i++) in sh4a_ubc_disable_all() 68 for (i = 0; i < sh4a_ubc.num_events; i++) in sh4a_ubc_active_mask() 87 .num_events = 2, 114 for (i = 0; i < sh4a_ubc.num_events; i++) { in sh4a_ubc_init()
|
D | perf_event.c | 261 for (i = 0; i < sh4a_pmu.num_events; i++) in sh4a_pmu_disable_all() 269 for (i = 0; i < sh4a_pmu.num_events; i++) in sh4a_pmu_enable_all() 275 .num_events = 2,
|
/Linux-v5.4/drivers/gpu/drm/amd/amdkfd/ |
D | kfd_events.c | 527 static struct kfd_event_waiter *alloc_event_waiters(uint32_t num_events) in alloc_event_waiters() argument 532 event_waiters = kmalloc_array(num_events, in alloc_event_waiters() 536 for (i = 0; (event_waiters) && (i < num_events) ; i++) { in alloc_event_waiters() 581 static uint32_t test_event_condition(bool all, uint32_t num_events, in test_event_condition() argument 587 for (i = 0; i < num_events; i++) { in test_event_condition() 599 return activated_count == num_events ? in test_event_condition() 607 static int copy_signaled_event_data(uint32_t num_events, in copy_signaled_event_data() argument 617 for (i = 0; i < num_events; i++) { in copy_signaled_event_data() 653 static void free_waiters(uint32_t num_events, struct kfd_event_waiter *waiters) in free_waiters() argument 657 for (i = 0; i < num_events; i++) in free_waiters() [all …]
|
/Linux-v5.4/arch/powerpc/perf/ |
D | e500-pmu.c | 83 static int num_events = 128; variable 91 if (event_low >= num_events) in e500_xlate_event() 125 num_events = 256; in init_e500_pmu()
|
D | core-fsl-emb.c | 30 static atomic_t num_events; variable 222 if (atomic_read(&num_events)) { in fsl_emb_pmu_disable() 444 if (!atomic_add_unless(&num_events, -1, 1)) { in hw_perf_event_destroy() 446 if (atomic_dec_return(&num_events) == 0) in hw_perf_event_destroy() 569 if (!atomic_inc_not_zero(&num_events)) { in fsl_emb_pmu_event_init() 571 if (atomic_read(&num_events) == 0 && in fsl_emb_pmu_event_init() 575 atomic_inc(&num_events); in fsl_emb_pmu_event_init()
|
D | e6500-pmu.c | 85 static int num_events = 512; variable 91 if (event_low >= num_events || in e6500_xlate_event()
|
/Linux-v5.4/arch/s390/kernel/ |
D | perf_cpum_cf.c | 166 static atomic_t num_events = ATOMIC_INIT(0); variable 173 if (!atomic_add_unless(&num_events, -1, 1)) { in hw_perf_event_destroy() 175 if (atomic_dec_return(&num_events) == 0) in hw_perf_event_destroy() 275 if (!atomic_inc_not_zero(&num_events)) { in __hw_perf_event_init() 277 if (atomic_read(&num_events) == 0 && __kernel_cpumcf_begin()) in __hw_perf_event_init() 280 atomic_inc(&num_events); in __hw_perf_event_init()
|
D | perf_cpum_sf.c | 518 static atomic_t num_events; variable 586 if (!atomic_add_unless(&num_events, -1, 1)) { in hw_perf_event_destroy() 588 if (atomic_dec_return(&num_events) == 0) in hw_perf_event_destroy() 763 if (!atomic_inc_not_zero(&num_events)) { in __hw_perf_event_init() 765 if (atomic_read(&num_events) == 0 && reserve_pmc_hardware()) in __hw_perf_event_init() 768 atomic_inc(&num_events); in __hw_perf_event_init() 2063 if (!atomic_read(&num_events)) in cpusf_pmu_setup()
|
/Linux-v5.4/arch/sh/kernel/cpu/sh4/ |
D | perf_event.c | 227 for (i = 0; i < sh7750_pmu.num_events; i++) in sh7750_pmu_disable_all() 235 for (i = 0; i < sh7750_pmu.num_events; i++) in sh7750_pmu_enable_all() 241 .num_events = 2,
|
/Linux-v5.4/arch/sh/include/asm/ |
D | perf_event.h | 11 unsigned int num_events; member
|
D | hw_breakpoint.h | 31 unsigned int num_events; member
|
/Linux-v5.4/drivers/perf/ |
D | arm_pmu.c | 451 int enabled = bitmap_weight(hw_events->used_mask, armpmu->num_events); in armpmu_enable() 529 max_events = __oprofile_cpu_pmu->num_events; in perf_num_counters() 666 for (idx = 0; idx < armpmu->num_events; idx++) { in cpu_pm_pmu_setup() 705 int enabled = bitmap_weight(hw_events->used_mask, armpmu->num_events); in cpu_pm_pmu_notify() 874 pmu->name, pmu->num_events); in armpmu_register()
|
/Linux-v5.4/drivers/dma/ |
D | imx-sdma.c | 420 int num_events; member 455 .num_events = 32, 476 .num_events = 48, 482 .num_events = 48, 500 .num_events = 48, 520 .num_events = 48, 539 .num_events = 48, 557 .num_events = 48, 563 .num_events = 48, 1632 if (sdmac->event_id0 >= sdmac->sdma->drvdata->num_events) in sdma_config() [all …]
|
/Linux-v5.4/arch/arm/kernel/ |
D | perf_event_xscale.c | 173 for (idx = 0; idx < cpu_pmu->num_events; ++idx) { in xscale1pmu_handle_irq() 383 cpu_pmu->num_events = 3; in xscale1pmu_init() 519 for (idx = 0; idx < cpu_pmu->num_events; ++idx) { in xscale2pmu_handle_irq() 752 cpu_pmu->num_events = 5; in xscale2pmu_init()
|
D | perf_event_v6.c | 326 for (idx = 0; idx < cpu_pmu->num_events; ++idx) { in armv6pmu_handle_irq() 504 cpu_pmu->num_events = 3; in armv6pmu_init() 555 cpu_pmu->num_events = 3; in armv6mpcore_pmu_init()
|
/Linux-v5.4/arch/arm64/kernel/ |
D | perf_event.c | 345 (ARMV8_IDX_CYCLE_COUNTER + cpu_pmu->num_events - 1) 718 for (idx = 0; idx < cpu_pmu->num_events; ++idx) { in armv8pmu_handle_irq() 761 for (idx = ARMV8_IDX_COUNTER0; idx < cpu_pmu->num_events; idx ++) { in armv8pmu_get_single_idx() 777 for (idx = ARMV8_IDX_COUNTER0 + 1; idx < cpu_pmu->num_events; idx += 2) { in armv8pmu_get_chain_idx() 877 u32 idx, nb_cnt = cpu_pmu->num_events; in armv8pmu_reset() 979 cpu_pmu->num_events = (armv8pmu_pmcr_read() >> ARMV8_PMU_PMCR_N_SHIFT) in __armv8pmu_probe_pmu() 983 cpu_pmu->num_events += 1; in __armv8pmu_probe_pmu()
|
/Linux-v5.4/drivers/devfreq/event/ |
D | exynos-ppmu.c | 36 unsigned int num_events; member 514 info->num_events = count; in of_get_devfreq_events() 665 size = sizeof(struct devfreq_event_dev *) * info->num_events; in exynos_ppmu_probe() 673 for (i = 0; i < info->num_events; i++) { in exynos_ppmu_probe()
|
/Linux-v5.4/drivers/infiniband/hw/mlx5/ |
D | devx.c | 1848 static bool is_valid_events_legacy(int num_events, u16 *event_type_num_list, in is_valid_events_legacy() argument 1853 for (i = 0; i < num_events; i++) { in is_valid_events_legacy() 1868 int num_events, u16 *event_type_num_list, in is_valid_events() argument 1883 return is_valid_events_legacy(num_events, event_type_num_list, in is_valid_events() 1887 for (i = 0; i < num_events; i++) { in is_valid_events() 1933 int num_events; in UVERBS_HANDLER() local 1979 num_events = uverbs_attr_ptr_get_array_size( in UVERBS_HANDLER() 1983 if (num_events < 0) in UVERBS_HANDLER() 1984 return num_events; in UVERBS_HANDLER() 1986 if (num_events > MAX_NUM_EVENTS) in UVERBS_HANDLER() [all …]
|
/Linux-v5.4/drivers/iio/accel/ |
D | mma9553.c | 182 int num_events; member 225 data->num_events = MMA9553_EVENTS_INFO_SIZE; in mma9553_init_events() 226 for (i = 0; i < data->num_events; i++) { in mma9553_init_events() 239 for (i = 0; i < data->num_events; i++) in mma9553_get_event() 254 for (i = 0; i < data->num_events; i++) in mma9553_is_any_event_enabled()
|
/Linux-v5.4/arch/nds32/kernel/ |
D | perf_event_cpu.c | 250 for (idx = 0; idx < cpu_pmu->num_events; ++idx) { in nds32_pmu_handle_irq() 289 return ((idx >= 0) && (idx < cpu_pmu->num_events)); in nds32_pfm_counter_valid() 670 cpu_pmu->num_events = nds32_read_num_pfm_events(); in device_pmu_init() 699 nds32_pmu->num_events); in nds32_pmu_enable() 1062 nds32_pmu->name, nds32_pmu->num_events); in nds32_pmu_register()
|
/Linux-v5.4/include/sound/ |
D | soc-topology.h | 195 const struct snd_soc_tplg_widget_events *events, int num_events,
|
/Linux-v5.4/include/linux/perf/ |
D | arm_pmu.h | 99 int num_events; member
|
/Linux-v5.4/drivers/net/wireless/intel/iwlwifi/dvm/ |
D | main.c | 404 u32 start_idx, u32 num_events, in iwl_print_cont_event_trace() argument 430 if (WARN_ON(num_events > capacity - start_idx)) in iwl_print_cont_event_trace() 431 num_events = capacity - start_idx; in iwl_print_cont_event_trace() 437 for (i = 0; i < num_events; i++) { in iwl_print_cont_event_trace() 1697 u32 num_events, u32 mode, in iwl_print_event_log() argument 1709 if (num_events == 0) in iwl_print_event_log() 1737 for (i = 0; i < num_events; i++) { in iwl_print_event_log()
|