| /Linux-v5.4/arch/x86/oprofile/ |
| D | op_model_ppro.c | 26 static int num_counters = 2; variable 37 for (i = 0; i < num_counters; ++i) { in ppro_shutdown() 49 for (i = 0; i < num_counters; i++) { in ppro_fill_in_addresses() 96 for (i = 0; i < num_counters; ++i) { in ppro_setup_ctrs() 112 for (i = 0; i < num_counters; ++i) { in ppro_setup_ctrs() 133 for (i = 0; i < num_counters; ++i) { in ppro_check_ctrs() 163 for (i = 0; i < num_counters; ++i) { in ppro_start() 178 for (i = 0; i < num_counters; ++i) { in ppro_stop() 188 .num_counters = 2, 218 eax.split.num_counters = 2; in arch_perfmon_setup_counters() [all …]
|
| D | op_model_amd.c | 42 static int num_counters; variable 275 for (i = 0; i < num_counters; ++i) { in op_mux_switch_ctrl() 294 for (i = 0; i < num_counters; ++i) { in op_amd_shutdown() 306 for (i = 0; i < num_counters; i++) { in op_amd_fill_in_addresses() 314 if (num_counters == AMD64_NUM_COUNTERS_CORE) { in op_amd_fill_in_addresses() 349 for (i = 0; i < num_counters; ++i) { in op_amd_setup_ctrs() 365 for (i = 0; i < num_counters; ++i) { in op_amd_setup_ctrs() 387 for (i = 0; i < num_counters; ++i) { in op_amd_check_ctrs() 410 for (i = 0; i < num_counters; ++i) { in op_amd_start() 430 for (i = 0; i < num_counters; ++i) { in op_amd_stop() [all …]
|
| D | op_model_p4.c | 37 static unsigned int num_counters = NUM_COUNTERS_NON_HT; variable 47 num_counters = NUM_COUNTERS_HT2; in setup_num_counters() 384 #define VIRT_CTR(stagger, i) ((i) + ((num_counters) * (stagger))) 392 for (i = 0; i < num_counters; ++i) { in p4_shutdown() 401 for (i = num_counters; i < num_controls; ++i) { in p4_shutdown() 416 for (i = 0; i < num_counters; ++i) { in p4_fill_in_addresses() 468 if (num_counters == NUM_COUNTERS_NON_HT) { in p4_fill_in_addresses() 490 for (i = 0; i < num_counters; ++i) { in p4_fill_in_addresses() 585 for (i = 0; i < num_counters; i++) { in p4_setup_ctrs() 595 for (i = num_counters; i < num_controls; i++) { in p4_setup_ctrs() [all …]
|
| D | nmi_int.c | 81 for (i = 0; i < model->num_counters; ++i) { in nmi_cpu_save_registers() 145 return virt % model->num_counters; in op_x86_virt_to_phys() 206 for (i = 0; i < model->num_counters; ++i) { in nmi_cpu_save_mpx_registers() 219 for (i = 0; i < model->num_counters; ++i) { in nmi_cpu_restore_mpx_registers() 236 si += model->num_counters; in nmi_cpu_switch() 256 return counter_config[model->num_counters].count ? 0 : -EINVAL; in nmi_multiplex_on() 318 size_t counters_size = sizeof(struct op_msr) * model->num_counters; in allocate_msrs() 367 for (i = 0; i < model->num_counters; ++i) { in nmi_cpu_restore_registers() 468 sizeof(struct op_msr) * model->num_counters); in nmi_setup() 767 model->num_virt_counters = model->num_counters; in op_nmi_init()
|
| /Linux-v5.4/drivers/oprofile/ |
| D | oprofile_perf.c | 31 static int num_counters; variable 42 for (id = 0; id < num_counters; ++id) in op_overflow_handler() 46 if (id != num_counters) in op_overflow_handler() 64 for (i = 0; i < num_counters; ++i) { in op_perf_setup() 120 for (event = 0; event < num_counters; ++event) { in op_perf_start() 138 for (event = 0; event < num_counters; ++event) in op_perf_stop() 146 for (i = 0; i < num_counters; i++) { in oprofile_perf_create_files() 261 for (id = 0; id < num_counters; ++id) { in oprofile_perf_exit() 282 num_counters = perf_num_counters(); in oprofile_perf_init() 283 if (num_counters <= 0) { in oprofile_perf_init() [all …]
|
| /Linux-v5.4/arch/mips/oprofile/ |
| D | op_model_mipsxx.c | 137 unsigned int counters = op_model_mipsxx_ops.num_counters; in mipsxx_reg_setup() 166 unsigned int counters = op_model_mipsxx_ops.num_counters; in mipsxx_cpu_setup() 193 unsigned int counters = op_model_mipsxx_ops.num_counters; in mipsxx_cpu_start() 216 unsigned int counters = op_model_mipsxx_ops.num_counters; in mipsxx_cpu_stop() 238 unsigned int counters = op_model_mipsxx_ops.num_counters; in mipsxx_perfcount_handler() 346 op_model_mipsxx_ops.num_counters = counters; in mipsxx_init() 459 int counters = op_model_mipsxx_ops.num_counters; in mipsxx_exit()
|
| /Linux-v5.4/arch/x86/events/intel/ |
| D | uncore_snbep.c | 804 .num_counters = 2, 961 .num_counters = 4, 1071 .num_counters = 4, 1165 .num_counters = 4, 1173 .num_counters = 4, 1185 .num_counters = 4, 1201 .num_counters = 4, 1210 .num_counters = 3, 1533 .num_counters = 2, 1664 .num_counters = 4, [all …]
|
| D | uncore_snb.c | 191 .num_counters = 2, 209 .num_counters = 2, 269 .num_counters = 4, 301 .num_counters = 4, 328 .num_counters = 1, 580 .num_counters = 2, 974 .num_counters = 8,
|
| /Linux-v5.4/kernel/gcov/ |
| D | clang.c | 76 u32 num_counters; member 137 void llvm_gcda_emit_arcs(u32 num_counters, u64 *counters) in llvm_gcda_emit_arcs() argument 142 info->num_counters = num_counters; in llvm_gcda_emit_arcs() 240 sizeof(fn->counters[0]) * fn->num_counters); in gcov_info_reset() 293 for (i = 0; i < sfn_ptr->num_counters; i++) in gcov_info_add() 311 cv_size = fn->num_counters * sizeof(fn->counters[0]); in gcov_fn_info_dup() 476 pos += store_gcov_u32(buffer, pos, fi_ptr->num_counters * 2); in convert_to_gcda() 477 for (i = 0; i < fi_ptr->num_counters; i++) in convert_to_gcda()
|
| /Linux-v5.4/arch/powerpc/oprofile/ |
| D | op_model_fsl_emb.c | 24 static int num_counters; variable 262 for (i = 0;i < num_counters;i++) { in fsl_emb_cpu_setup() 279 num_counters = num_ctrs; in fsl_emb_reg_setup() 286 for (i = 0; i < num_counters; ++i) in fsl_emb_reg_setup() 298 for (i = 0; i < num_counters; ++i) { in fsl_emb_start() 351 for (i = 0; i < num_counters; ++i) { in fsl_emb_handle_interrupt()
|
| D | common.c | 56 op_per_cpu_rc = model->reg_setup(ctr, &sys, model->num_counters); in op_powerpc_setup() 154 for (i = 0; i < model->num_counters; ++i) { in op_powerpc_create_files() 225 model->num_counters = cur_cpu_spec->num_pmcs; in oprofile_arch_init()
|
| D | op_model_cell.c | 179 static int num_counters; variable 479 for (i = 0; i < num_counters; i++) in cell_virtual_cntr() 498 for (i = 0; i < num_counters; i++) { in cell_virtual_cntr() 527 for (i = 0; i < num_counters; i++) { in cell_virtual_cntr() 735 num_counters = 1; /* Only support one SPU event at a time */ in cell_reg_setup_spu_events() 756 num_counters = num_ctrs; in cell_reg_setup_ppu() 809 for (i = 0; i < num_counters; ++i) { in cell_reg_setup_ppu() 825 for (i = 0; i < num_counters; ++i) { in cell_reg_setup_ppu() 921 for (i = 0; i < num_counters; ++i) { in cell_cpu_setup() 1409 for (i = 0; i < num_counters; ++i) { in cell_global_start_ppu() [all …]
|
| /Linux-v5.4/drivers/perf/ |
| D | arm_dsu_pmu.c | 120 s8 num_counters; member 244 return (idx < dsu_pmu->num_counters) || in dsu_pmu_counter_valid() 313 idx = find_first_zero_bit(used_mask, dsu_pmu->num_counters); in dsu_pmu_get_event_idx() 314 if (idx >= dsu_pmu->num_counters) in dsu_pmu_get_event_idx() 601 dsu_pmu->num_counters = -1; in dsu_pmu_alloc() 639 u64 num_counters; in dsu_pmu_probe_pmu() local 642 num_counters = (__dsu_pmu_read_pmcr() >> CLUSTERPMCR_N_SHIFT) & in dsu_pmu_probe_pmu() 645 if (WARN_ON(num_counters > 31)) in dsu_pmu_probe_pmu() 646 num_counters = 31; in dsu_pmu_probe_pmu() 647 dsu_pmu->num_counters = num_counters; in dsu_pmu_probe_pmu() [all …]
|
| D | arm_smmuv3_pmu.c | 109 unsigned int num_counters; member 278 unsigned int num_ctrs = smmu_pmu->num_counters; in smmu_pmu_apply_event_filter() 307 unsigned int num_ctrs = smmu_pmu->num_counters; in smmu_pmu_get_event_idx() 376 if (++group_num_events > smmu_pmu->num_counters) in smmu_pmu_event_init() 387 if (++group_num_events > smmu_pmu->num_counters) in smmu_pmu_event_init() 617 for_each_set_bit(idx, (unsigned long *)&ovsr, smmu_pmu->num_counters) { in smmu_pmu_handle_irq() 698 u64 counter_present_mask = GENMASK_ULL(smmu_pmu->num_counters - 1, 0); in smmu_pmu_reset() 784 smmu_pmu->num_counters = FIELD_GET(SMMU_PMCG_CFGR_NCTR, cfgr) + 1; in smmu_pmu_probe() 829 &res_0->start, smmu_pmu->num_counters, in smmu_pmu_probe()
|
| D | qcom_l2_pmu.c | 155 int num_counters; member 386 int num_ctrs = cluster->l2cache_pmu->num_counters - 1; in l2_cache_get_event_idx() 430 int num_counters = cluster->l2cache_pmu->num_counters; in l2_cache_handle_irq() local 438 for_each_set_bit(idx, cluster->used_counters, num_counters) { in l2_cache_handle_irq() 968 l2cache_pmu->num_counters = get_num_counters(); in l2_cache_pmu_probe() 975 l2_cycle_ctr_idx = l2cache_pmu->num_counters - 1; in l2_cache_pmu_probe() 976 l2_counter_present_mask = GENMASK(l2cache_pmu->num_counters - 2, 0) | in l2_cache_pmu_probe()
|
| /Linux-v5.4/drivers/perf/hisilicon/ |
| D | hisi_uncore_pmu.c | 92 return counters <= hisi_pmu->num_counters; in hisi_validate_event_group() 97 return idx >= 0 && idx < hisi_pmu->num_counters; in hisi_uncore_pmu_counter_valid() 104 u32 num_counters = hisi_pmu->num_counters; in hisi_uncore_pmu_get_event_idx() local 107 idx = find_first_zero_bit(used_mask, num_counters); in hisi_uncore_pmu_get_event_idx() 108 if (idx == num_counters) in hisi_uncore_pmu_get_event_idx() 324 hisi_pmu->num_counters); in hisi_uncore_pmu_enable()
|
| /Linux-v5.4/net/bridge/netfilter/ |
| D | ebtables.c | 979 if (repl->num_counters) { in do_replace_finish() 980 unsigned long size = repl->num_counters * sizeof(*counterstmp); in do_replace_finish() 1006 if (repl->num_counters && repl->num_counters != t->private->nentries) { in do_replace_finish() 1021 if (repl->num_counters) in do_replace_finish() 1033 if (repl->num_counters && in do_replace_finish() 1035 repl->num_counters * sizeof(struct ebt_counter))) { in do_replace_finish() 1092 if (tmp.num_counters >= INT_MAX / sizeof(struct ebt_counter)) in do_replace() 1250 unsigned int num_counters, in do_update_counters() argument 1257 if (num_counters == 0) in do_update_counters() 1260 tmp = vmalloc(array_size(num_counters, sizeof(*tmp))); in do_update_counters() [all …]
|
| /Linux-v5.4/arch/powerpc/include/asm/ |
| D | oprofile_impl.h | 46 int num_counters); 56 int num_counters; member
|
| /Linux-v5.4/arch/x86/events/amd/ |
| D | uncore.c | 44 int num_counters; member 136 for (i = 0; i < uncore->num_counters; i++) { in amd_uncore_add() 145 for (i = 0; i < uncore->num_counters; i++) { in amd_uncore_add() 175 for (i = 0; i < uncore->num_counters; i++) { in amd_uncore_del() 338 uncore_nb->num_counters = num_counters_nb; in amd_uncore_cpu_up_prepare() 352 uncore_llc->num_counters = num_counters_llc; in amd_uncore_cpu_up_prepare()
|
| D | core.c | 377 for (i = 0; i < x86_pmu.num_counters; i++) { in __amd_put_nb_event_constraints() 444 for_each_set_bit(idx, c->idxmsk, x86_pmu.num_counters) { in __amd_get_nb_event_constraints() 487 for (i = 0; i < x86_pmu.num_counters; i++) { in amd_alloc_nb() 610 for (idx = 0; idx < x86_pmu.num_counters; idx++) { in amd_pmu_disable_all() 889 .num_counters = AMD64_NUM_COUNTERS, 944 x86_pmu.num_counters = AMD64_NUM_COUNTERS_CORE; in amd_core_pmu_init()
|
| /Linux-v5.4/arch/x86/events/ |
| D | core.c | 151 for (i = 0; i < x86_pmu.num_counters; i++) { in reserve_pmc_hardware() 156 for (i = 0; i < x86_pmu.num_counters; i++) { in reserve_pmc_hardware() 167 i = x86_pmu.num_counters; in reserve_pmc_hardware() 180 for (i = 0; i < x86_pmu.num_counters; i++) { in release_pmc_hardware() 204 for (i = 0; i < x86_pmu.num_counters; i++) { in check_hw_exists() 614 for (idx = 0; idx < x86_pmu.num_counters; idx++) { in x86_pmu_disable_all() 662 for (idx = 0; idx < x86_pmu.num_counters; idx++) { in x86_pmu_enable_all() 939 int gpmax = x86_pmu.num_counters; in x86_schedule_events() 1004 max_count = x86_pmu.num_counters + x86_pmu.num_counters_fixed; in collect_events() 1355 if (!x86_pmu.num_counters) in perf_event_print_debug() [all …]
|
| /Linux-v5.4/drivers/staging/comedi/drivers/ |
| D | ni_tio.h | 109 unsigned int num_counters; member 124 unsigned int num_counters,
|
| /Linux-v5.4/arch/alpha/oprofile/ |
| D | op_model_ev5.c | 197 .num_counters = 3, 207 .num_counters = 3,
|
| D | common.c | 59 for (i = e = 0; i < model->num_counters; ++i) in op_axp_setup() 113 for (i = 0; i < model->num_counters; ++i) { in op_axp_create_files()
|
| /Linux-v5.4/net/ipv4/netfilter/ |
| D | arp_tables.c | 878 unsigned int num_counters, in __do_replace() argument 889 counters = xt_counters_alloc(num_counters); in __do_replace() 907 oldinfo = xt_replace_table(t, num_counters, newinfo, &ret); in __do_replace() 930 sizeof(struct xt_counters) * num_counters) != 0) { in __do_replace() 959 if (tmp.num_counters >= INT_MAX / sizeof(struct xt_counters)) in do_replace() 961 if (tmp.num_counters == 0) in do_replace() 982 tmp.num_counters, tmp.counters); in do_replace() 1019 if (private->number != tmp.num_counters) { in do_add_counters() 1053 u32 num_counters; member 1217 repl.num_counters = 0; in translate_compat_table() [all …]
|