/Linux-v5.4/tools/perf/lib/ |
D | cpumap.c | 51 int nr_cpus; in cpu_map__default_new() local 53 nr_cpus = sysconf(_SC_NPROCESSORS_ONLN); in cpu_map__default_new() 54 if (nr_cpus < 0) in cpu_map__default_new() 57 cpus = malloc(sizeof(*cpus) + nr_cpus * sizeof(int)); in cpu_map__default_new() 61 for (i = 0; i < nr_cpus; ++i) in cpu_map__default_new() 64 cpus->nr = nr_cpus; in cpu_map__default_new() 71 static struct perf_cpu_map *cpu_map__trim_new(int nr_cpus, int *tmp_cpus) in cpu_map__trim_new() argument 73 size_t payload_size = nr_cpus * sizeof(int); in cpu_map__trim_new() 77 cpus->nr = nr_cpus; in cpu_map__trim_new() 88 int nr_cpus = 0; in perf_cpu_map__read() local [all …]
|
/Linux-v5.4/tools/testing/selftests/bpf/prog_tests/ |
D | perf_buffer.c | 22 int err, prog_fd, nr_cpus, i, duration = 0; in test_perf_buffer() local 33 nr_cpus = libbpf_num_possible_cpus(); in test_perf_buffer() 34 if (CHECK(nr_cpus < 0, "nr_cpus", "err %d\n", nr_cpus)) in test_perf_buffer() 66 for (i = 0; i < nr_cpus; i++) { in test_perf_buffer() 84 if (CHECK(CPU_COUNT(&cpu_seen) != nr_cpus, "seen_cpu_cnt", in test_perf_buffer() 85 "expect %d, seen %d\n", nr_cpus, CPU_COUNT(&cpu_seen))) in test_perf_buffer()
|
D | l4lb_all.c | 6 unsigned int nr_cpus = bpf_num_possible_cpus(); in test_l4lb() local 16 } stats[nr_cpus]; in test_l4lb() 69 for (i = 0; i < nr_cpus; i++) { in test_l4lb()
|
D | xdp_noinline.c | 7 unsigned int nr_cpus = bpf_num_possible_cpus(); in test_xdp_noinline() local 17 } stats[nr_cpus]; in test_xdp_noinline() 70 for (i = 0; i < nr_cpus; i++) { in test_xdp_noinline()
|
/Linux-v5.4/tools/testing/selftests/bpf/ |
D | test_lru_map.c | 27 static int nr_cpus; variable 107 unsigned long long value0[nr_cpus], value1[nr_cpus]; in map_subset() 138 while (next < nr_cpus) { in sched_next_online() 161 unsigned long long key, value[nr_cpus]; in test_lru_sanity0() 171 lru_map_fd = create_map(map_type, map_flags, 2 * nr_cpus); in test_lru_sanity0() 250 unsigned long long key, end_key, value[nr_cpus]; in test_lru_sanity1() 326 unsigned long long key, value[nr_cpus]; in test_lru_sanity2() 434 unsigned long long key, end_key, value[nr_cpus]; in test_lru_sanity3() 499 unsigned long long key, value[nr_cpus]; in test_lru_sanity4() 510 3 * tgt_free * nr_cpus); in test_lru_sanity4() [all …]
|
/Linux-v5.4/samples/bpf/ |
D | test_lru_dist.c | 32 static int nr_cpus; variable 225 if (next_to_try == nr_cpus) in sched_next_online() 228 while (next_to_try < nr_cpus) { in sched_next_online() 321 nr_cpus * lru_size); in test_parallel_lru_dist() 337 unsigned long long key, value[nr_cpus]; in test_lru_loss0() 349 map_fd = create_map(map_type, map_flags, 900 * nr_cpus); in test_lru_loss0() 391 unsigned long long key, value[nr_cpus]; in test_lru_loss1() 401 map_fd = create_map(map_type, map_flags, 1000 * nr_cpus); in test_lru_loss1() 429 unsigned long long key, value[nr_cpus]; in do_test_parallel_lru_loss() 478 nr_cpus * (1000 + 200)); in test_parallel_lru_loss() [all …]
|
D | tracex3_user.c | 21 unsigned int nr_cpus = bpf_num_possible_cpus(); in clear_stats() local 22 __u64 values[nr_cpus]; in clear_stats() 78 unsigned int nr_cpus = bpf_num_possible_cpus(); in print_hist() local 80 long values[nr_cpus]; in print_hist() 90 for (i = 0; i < nr_cpus; i++) in print_hist()
|
D | sampleip_user.c | 28 static int nr_cpus; variable 49 for (i = 0; i < nr_cpus; i++) { in sampling_start() 68 for (i = 0; i < nr_cpus; i++) in sampling_end() 166 nr_cpus = sysconf(_SC_NPROCESSORS_CONF); in main() 167 pmu_fd = malloc(nr_cpus * sizeof(int)); in main()
|
D | xdp_monitor_user.c | 151 unsigned int nr_cpus = bpf_num_possible_cpus(); in map_collect_record() local 152 struct datarec values[nr_cpus]; in map_collect_record() 168 for (i = 0; i < nr_cpus; i++) { in map_collect_record() 188 unsigned int nr_cpus = bpf_num_possible_cpus(); in map_collect_record_u64() local 189 struct u64rec values[nr_cpus]; in map_collect_record_u64() 202 for (i = 0; i < nr_cpus; i++) { in map_collect_record_u64() 298 unsigned int nr_cpus = bpf_num_possible_cpus(); in stats_print() local 319 for (i = 0; i < nr_cpus; i++) { in stats_print() 344 for (i = 0; i < nr_cpus; i++) { in stats_print() 370 for (i = 0; i < nr_cpus; i++) { in stats_print() [all …]
|
D | lwt_len_hist_user.c | 33 unsigned int nr_cpus = bpf_num_possible_cpus(); in main() local 35 uint64_t values[nr_cpus], sum, max_value = 0, data[MAX_INDEX] = {}; in main() 56 for (i = 0; i < nr_cpus; i++) in main()
|
D | tracex6_user.c | 75 int i, status, nr_cpus = sysconf(_SC_NPROCESSORS_CONF); in test_perf_event_array() local 76 pid_t pid[nr_cpus]; in test_perf_event_array() 81 for (i = 0; i < nr_cpus; i++) { in test_perf_event_array() 90 for (i = 0; i < nr_cpus; i++) { in test_perf_event_array()
|
D | xdp_redirect_cpu_user.c | 167 unsigned int nr_cpus = bpf_num_possible_cpus(); in map_collect_percpu() local 168 struct datarec values[nr_cpus]; in map_collect_percpu() 183 for (i = 0; i < nr_cpus; i++) { in map_collect_percpu() 199 unsigned int nr_cpus = bpf_num_possible_cpus(); in alloc_record_per_cpu() local 203 size = sizeof(struct datarec) * nr_cpus; in alloc_record_per_cpu() 207 fprintf(stderr, "Mem alloc error (nr_cpus:%u)\n", nr_cpus); in alloc_record_per_cpu() 300 unsigned int nr_cpus = bpf_num_possible_cpus(); in stats_print() local 321 for (i = 0; i < nr_cpus; i++) { in stats_print() 349 for (i = 0; i < nr_cpus; i++) { in stats_print() 386 for (i = 0; i < nr_cpus; i++) { in stats_print() [all …]
|
D | xdp_rxq_info_user.c | 199 unsigned int nr_cpus = bpf_num_possible_cpus(); in alloc_record_per_cpu() local 203 size = sizeof(struct datarec) * nr_cpus; in alloc_record_per_cpu() 207 fprintf(stderr, "Mem alloc error (nr_cpus:%u)\n", nr_cpus); in alloc_record_per_cpu() 265 unsigned int nr_cpus = bpf_num_possible_cpus(); in map_collect_percpu() local 266 struct datarec values[nr_cpus]; in map_collect_percpu() 280 for (i = 0; i < nr_cpus; i++) { in map_collect_percpu() 346 unsigned int nr_cpus = bpf_num_possible_cpus(); in stats_print() local 369 for (i = 0; i < nr_cpus; i++) { in stats_print() 403 for (i = 0; i < nr_cpus; i++) { in stats_print()
|
D | xdp_redirect_map_user.c | 65 unsigned int nr_cpus = bpf_num_possible_cpus(); in poll_stats() local 66 __u64 values[nr_cpus], prev[nr_cpus]; in poll_stats() 77 for (i = 0; i < nr_cpus; i++) in poll_stats()
|
D | xdp_redirect_user.c | 65 unsigned int nr_cpus = bpf_num_possible_cpus(); in poll_stats() local 66 __u64 values[nr_cpus], prev[nr_cpus]; in poll_stats() 77 for (i = 0; i < nr_cpus; i++) in poll_stats()
|
D | xdp1_user.c | 46 unsigned int nr_cpus = bpf_num_possible_cpus(); in poll_stats() local 47 __u64 values[nr_cpus], prev[UINT8_MAX] = { 0 }; in poll_stats() 59 for (i = 0; i < nr_cpus; i++) in poll_stats()
|
/Linux-v5.4/arch/mips/loongson64/common/ |
D | env.c | 142 loongson_sysconf.nr_cpus = ecpu->nr_cpus; in prom_init_env() 145 if (ecpu->nr_cpus > NR_CPUS || ecpu->nr_cpus == 0) in prom_init_env() 146 loongson_sysconf.nr_cpus = NR_CPUS; in prom_init_env() 147 loongson_sysconf.nr_nodes = (loongson_sysconf.nr_cpus + in prom_init_env()
|
/Linux-v5.4/virt/kvm/arm/vgic/ |
D | vgic-debug.c | 26 int nr_cpus; member 45 ++iter->vcpu_id < iter->nr_cpus) in iter_next() 58 int nr_cpus = atomic_read(&kvm->online_vcpus); in iter_init() local 62 iter->nr_cpus = nr_cpus; in iter_init() 78 iter->vcpu_id == iter->nr_cpus && in end_of_vgic() 234 if (iter->vcpu_id < iter->nr_cpus) in vgic_debug_show()
|
/Linux-v5.4/tools/perf/util/ |
D | svghelper.c | 700 int *pos, int nr_cpus) in scan_thread_topology() argument 709 for_each_set_bit(thr, cpumask_bits(&t->sib_thr[i]), nr_cpus) in scan_thread_topology() 715 static void scan_core_topology(int *map, struct topology *t, int nr_cpus) in scan_core_topology() argument 722 for_each_set_bit(cpu, cpumask_bits(&t->sib_core[i]), nr_cpus) in scan_core_topology() 723 scan_thread_topology(map, t, cpu, &pos, nr_cpus); in scan_core_topology() 726 static int str_to_bitmap(char *s, cpumask_t *b, int nr_cpus) in str_to_bitmap() argument 739 if (c >= nr_cpus) { in str_to_bitmap() 754 int i, nr_cpus; in svg_build_topology_map() local 758 nr_cpus = min(env->nr_cpus_online, MAX_NR_CPUS); in svg_build_topology_map() 774 if (str_to_bitmap(sib_core, &t.sib_core[i], nr_cpus)) { in svg_build_topology_map() [all …]
|
D | env.c | 233 int cpu, nr_cpus; in perf_env__read_cpu_topology_map() local 241 nr_cpus = env->nr_cpus_avail; in perf_env__read_cpu_topology_map() 242 if (nr_cpus == -1) in perf_env__read_cpu_topology_map() 245 env->cpu = calloc(nr_cpus, sizeof(env->cpu[0])); in perf_env__read_cpu_topology_map() 249 for (cpu = 0; cpu < nr_cpus; ++cpu) { in perf_env__read_cpu_topology_map() 255 env->nr_cpus_avail = nr_cpus; in perf_env__read_cpu_topology_map()
|
/Linux-v5.4/drivers/pci/controller/ |
D | pcie-iproc-msi.c | 98 int nr_cpus; member 197 return (hwirq % msi->nr_cpus); in hwirq_to_cpu() 252 msi->nr_cpus, 0); in iproc_msi_irq_domain_alloc() 254 bitmap_set(msi->bitmap, hwirq, msi->nr_cpus); in iproc_msi_irq_domain_alloc() 282 bitmap_clear(msi->bitmap, hwirq, msi->nr_cpus); in iproc_msi_irq_domain_free() 470 for (i = cpu; i < msi->nr_irqs; i += msi->nr_cpus) { in iproc_msi_irq_free() 482 for (i = cpu; i < msi->nr_irqs; i += msi->nr_cpus) { in iproc_msi_irq_setup() 534 msi->nr_cpus = num_possible_cpus(); in iproc_msi_init() 548 if (msi->nr_irqs < msi->nr_cpus) { in iproc_msi_init() 554 if (msi->nr_irqs % msi->nr_cpus != 0) { in iproc_msi_init() [all …]
|
/Linux-v5.4/tools/perf/arch/arm/util/ |
D | auxtrace.c | 21 int ret, i, nr_cpus = sysconf(_SC_NPROCESSORS_CONF); in find_all_arm_spe_pmus() local 25 arm_spe_pmus = zalloc(sizeof(struct perf_pmu *) * nr_cpus); in find_all_arm_spe_pmus() 32 for (i = 0; i < nr_cpus; i++) { in find_all_arm_spe_pmus()
|
/Linux-v5.4/tools/testing/selftests/vm/ |
D | userfaultfd.c | 62 static unsigned long nr_cpus, nr_pages, nr_pages_per_cpu, page_size; variable 586 pthread_t locking_threads[nr_cpus]; in stress() 587 pthread_t uffd_threads[nr_cpus]; in stress() 588 pthread_t background_threads[nr_cpus]; in stress() 592 for (cpu = 0; cpu < nr_cpus; cpu++) { in stress() 611 for (cpu = 0; cpu < nr_cpus; cpu++) in stress() 629 for (cpu = 0; cpu < nr_cpus; cpu++) in stress() 633 for (cpu = 0; cpu < nr_cpus; cpu++) { in stress() 1041 unsigned long userfaults[nr_cpus]; in userfaultfd_stress() 1073 pipefd = malloc(sizeof(int) * nr_cpus * 2); in userfaultfd_stress() [all …]
|
/Linux-v5.4/tools/virtio/virtio-trace/ |
D | trace-agent.c | 31 int nr_cpus = (int)sysconf(_SC_NPROCESSORS_CONF); in get_total_cpus() local 33 if (nr_cpus <= 0) { in get_total_cpus() 36 } else if (nr_cpus > MAX_CPUS) { in get_total_cpus() 41 return nr_cpus; in get_total_cpus()
|
/Linux-v5.4/tools/testing/selftests/rcutorture/bin/ |
D | functions.sh | 63 if test "$3" -gt "$nr_cpus" 65 echo $nr_cpus
|