/Linux-v4.19/tools/perf/util/ |
D | cpumap.c | 23 int nr_cpus; in cpu_map__default_new() local 25 nr_cpus = sysconf(_SC_NPROCESSORS_ONLN); in cpu_map__default_new() 26 if (nr_cpus < 0) in cpu_map__default_new() 29 cpus = malloc(sizeof(*cpus) + nr_cpus * sizeof(int)); in cpu_map__default_new() 32 for (i = 0; i < nr_cpus; ++i) in cpu_map__default_new() 35 cpus->nr = nr_cpus; in cpu_map__default_new() 42 static struct cpu_map *cpu_map__trim_new(int nr_cpus, int *tmp_cpus) in cpu_map__trim_new() argument 44 size_t payload_size = nr_cpus * sizeof(int); in cpu_map__trim_new() 48 cpus->nr = nr_cpus; in cpu_map__trim_new() 59 int nr_cpus = 0; in cpu_map__read() local [all …]
|
D | env.c | 71 int cpu, nr_cpus; in perf_env__read_cpu_topology_map() local 79 nr_cpus = env->nr_cpus_avail; in perf_env__read_cpu_topology_map() 80 if (nr_cpus == -1) in perf_env__read_cpu_topology_map() 83 env->cpu = calloc(nr_cpus, sizeof(env->cpu[0])); in perf_env__read_cpu_topology_map() 87 for (cpu = 0; cpu < nr_cpus; ++cpu) { in perf_env__read_cpu_topology_map() 92 env->nr_cpus_avail = nr_cpus; in perf_env__read_cpu_topology_map()
|
/Linux-v4.19/tools/testing/selftests/bpf/ |
D | test_lru_map.c | 28 static int nr_cpus; variable 46 unsigned long long value0[nr_cpus], value1[nr_cpus]; in map_subset() 77 while (next < nr_cpus) { in sched_next_online() 100 unsigned long long key, value[nr_cpus]; in test_lru_sanity0() 110 lru_map_fd = create_map(map_type, map_flags, 2 * nr_cpus); in test_lru_sanity0() 188 unsigned long long key, end_key, value[nr_cpus]; in test_lru_sanity1() 264 unsigned long long key, value[nr_cpus]; in test_lru_sanity2() 371 unsigned long long key, end_key, value[nr_cpus]; in test_lru_sanity3() 436 unsigned long long key, value[nr_cpus]; in test_lru_sanity4() 447 3 * tgt_free * nr_cpus); in test_lru_sanity4() [all …]
|
/Linux-v4.19/samples/bpf/ |
D | test_lru_dist.c | 35 static int nr_cpus; variable 228 if (next_to_try == nr_cpus) in sched_next_online() 231 while (next_to_try < nr_cpus) { in sched_next_online() 324 nr_cpus * lru_size); in test_parallel_lru_dist() 340 unsigned long long key, value[nr_cpus]; in test_lru_loss0() 352 map_fd = create_map(map_type, map_flags, 900 * nr_cpus); in test_lru_loss0() 394 unsigned long long key, value[nr_cpus]; in test_lru_loss1() 404 map_fd = create_map(map_type, map_flags, 1000 * nr_cpus); in test_lru_loss1() 432 unsigned long long key, value[nr_cpus]; in do_test_parallel_lru_loss() 481 nr_cpus * (1000 + 200)); in test_parallel_lru_loss() [all …]
|
D | tracex3_user.c | 26 unsigned int nr_cpus = bpf_num_possible_cpus(); in clear_stats() local 27 __u64 values[nr_cpus]; in clear_stats() 83 unsigned int nr_cpus = bpf_num_possible_cpus(); in print_hist() local 85 long values[nr_cpus]; in print_hist() 95 for (i = 0; i < nr_cpus; i++) in print_hist()
|
D | sampleip_user.c | 32 static int nr_cpus; variable 53 for (i = 0; i < nr_cpus; i++) { in sampling_start() 72 for (i = 0; i < nr_cpus; i++) in sampling_end() 165 nr_cpus = sysconf(_SC_NPROCESSORS_CONF); in main() 166 pmu_fd = malloc(nr_cpus * sizeof(int)); in main()
|
D | xdp1_user.c | 36 unsigned int nr_cpus = bpf_num_possible_cpus(); in poll_stats() local 38 __u64 values[nr_cpus], prev[nr_keys][nr_cpus]; in poll_stats() 51 for (i = 0; i < nr_cpus; i++) in poll_stats()
|
D | xdp_redirect_user.c | 45 unsigned int nr_cpus = bpf_num_possible_cpus(); in poll_stats() local 46 __u64 values[nr_cpus], prev[nr_cpus]; in poll_stats() 57 for (i = 0; i < nr_cpus; i++) in poll_stats()
|
D | xdp_redirect_map_user.c | 45 unsigned int nr_cpus = bpf_num_possible_cpus(); in poll_stats() local 46 __u64 values[nr_cpus], prev[nr_cpus]; in poll_stats() 57 for (i = 0; i < nr_cpus; i++) in poll_stats()
|
D | xdp_monitor_user.c | 151 unsigned int nr_cpus = bpf_num_possible_cpus(); in map_collect_record() local 152 struct datarec values[nr_cpus]; in map_collect_record() 168 for (i = 0; i < nr_cpus; i++) { in map_collect_record() 188 unsigned int nr_cpus = bpf_num_possible_cpus(); in map_collect_record_u64() local 189 struct u64rec values[nr_cpus]; in map_collect_record_u64() 202 for (i = 0; i < nr_cpus; i++) { in map_collect_record_u64() 298 unsigned int nr_cpus = bpf_num_possible_cpus(); in stats_print() local 319 for (i = 0; i < nr_cpus; i++) { in stats_print() 344 for (i = 0; i < nr_cpus; i++) { in stats_print() 370 for (i = 0; i < nr_cpus; i++) { in stats_print() [all …]
|
D | lwt_len_hist_user.c | 33 unsigned int nr_cpus = bpf_num_possible_cpus(); in main() local 35 uint64_t values[nr_cpus], sum, max_value = 0, data[MAX_INDEX] = {}; in main() 56 for (i = 0; i < nr_cpus; i++) in main()
|
D | xdp_redirect_cpu_user.c | 135 unsigned int nr_cpus = bpf_num_possible_cpus(); in map_collect_percpu() local 136 struct datarec values[nr_cpus]; in map_collect_percpu() 151 for (i = 0; i < nr_cpus; i++) { in map_collect_percpu() 167 unsigned int nr_cpus = bpf_num_possible_cpus(); in alloc_record_per_cpu() local 171 size = sizeof(struct datarec) * nr_cpus; in alloc_record_per_cpu() 175 fprintf(stderr, "Mem alloc error (nr_cpus:%u)\n", nr_cpus); in alloc_record_per_cpu() 268 unsigned int nr_cpus = bpf_num_possible_cpus(); in stats_print() local 289 for (i = 0; i < nr_cpus; i++) { in stats_print() 317 for (i = 0; i < nr_cpus; i++) { in stats_print() 354 for (i = 0; i < nr_cpus; i++) { in stats_print() [all …]
|
D | tracex6_user.c | 75 int i, status, nr_cpus = sysconf(_SC_NPROCESSORS_CONF); in test_perf_event_array() local 76 pid_t pid[nr_cpus]; in test_perf_event_array() 81 for (i = 0; i < nr_cpus; i++) { in test_perf_event_array() 90 for (i = 0; i < nr_cpus; i++) { in test_perf_event_array()
|
D | xdp_rxq_info_user.c | 184 unsigned int nr_cpus = bpf_num_possible_cpus(); in alloc_record_per_cpu() local 188 size = sizeof(struct datarec) * nr_cpus; in alloc_record_per_cpu() 192 fprintf(stderr, "Mem alloc error (nr_cpus:%u)\n", nr_cpus); in alloc_record_per_cpu() 250 unsigned int nr_cpus = bpf_num_possible_cpus(); in map_collect_percpu() local 251 struct datarec values[nr_cpus]; in map_collect_percpu() 265 for (i = 0; i < nr_cpus; i++) { in map_collect_percpu() 331 unsigned int nr_cpus = bpf_num_possible_cpus(); in stats_print() local 354 for (i = 0; i < nr_cpus; i++) { in stats_print() 388 for (i = 0; i < nr_cpus; i++) { in stats_print()
|
D | xdp_tx_iptunnel_user.c | 42 unsigned int nr_cpus = bpf_num_possible_cpus(); in poll_stats() local 44 __u64 values[nr_cpus], prev[nr_protos][nr_cpus]; in poll_stats() 57 for (i = 0; i < nr_cpus; i++) in poll_stats()
|
D | tracex2_user.c | 43 unsigned int nr_cpus = bpf_num_possible_cpus(); in print_hist_for_pid() local 45 long values[nr_cpus]; in print_hist_for_pid() 60 for (i = 0; i < nr_cpus; i++) in print_hist_for_pid()
|
/Linux-v4.19/arch/mips/loongson64/common/ |
D | env.c | 146 loongson_sysconf.nr_cpus = ecpu->nr_cpus; in prom_init_env() 149 if (ecpu->nr_cpus > NR_CPUS || ecpu->nr_cpus == 0) in prom_init_env() 150 loongson_sysconf.nr_cpus = NR_CPUS; in prom_init_env() 151 loongson_sysconf.nr_nodes = (loongson_sysconf.nr_cpus + in prom_init_env()
|
/Linux-v4.19/virt/kvm/arm/vgic/ |
D | vgic-debug.c | 37 int nr_cpus; member 56 ++iter->vcpu_id < iter->nr_cpus) in iter_next() 69 int nr_cpus = atomic_read(&kvm->online_vcpus); in iter_init() local 73 iter->nr_cpus = nr_cpus; in iter_init() 89 iter->vcpu_id == iter->nr_cpus && in end_of_vgic() 245 if (iter->vcpu_id < iter->nr_cpus) in vgic_debug_show()
|
/Linux-v4.19/drivers/pci/controller/ |
D | pcie-iproc-msi.c | 98 int nr_cpus; member 197 return (hwirq % msi->nr_cpus); in hwirq_to_cpu() 252 msi->nr_cpus, 0); in iproc_msi_irq_domain_alloc() 254 bitmap_set(msi->bitmap, hwirq, msi->nr_cpus); in iproc_msi_irq_domain_alloc() 282 bitmap_clear(msi->bitmap, hwirq, msi->nr_cpus); in iproc_msi_irq_domain_free() 470 for (i = cpu; i < msi->nr_irqs; i += msi->nr_cpus) { in iproc_msi_irq_free() 482 for (i = cpu; i < msi->nr_irqs; i += msi->nr_cpus) { in iproc_msi_irq_setup() 534 msi->nr_cpus = num_possible_cpus(); in iproc_msi_init() 548 if (msi->nr_irqs < msi->nr_cpus) { in iproc_msi_init() 554 if (msi->nr_irqs % msi->nr_cpus != 0) { in iproc_msi_init() [all …]
|
/Linux-v4.19/tools/perf/arch/arm/util/ |
D | auxtrace.c | 19 int ret, i, nr_cpus = sysconf(_SC_NPROCESSORS_CONF); in find_all_arm_spe_pmus() local 23 arm_spe_pmus = zalloc(sizeof(struct perf_pmu *) * nr_cpus); in find_all_arm_spe_pmus() 30 for (i = 0; i < nr_cpus; i++) { in find_all_arm_spe_pmus()
|
/Linux-v4.19/tools/testing/selftests/vm/ |
D | userfaultfd.c | 76 static unsigned long nr_cpus, nr_pages, nr_pages_per_cpu, page_size; variable 567 pthread_t locking_threads[nr_cpus]; in stress() 568 pthread_t uffd_threads[nr_cpus]; in stress() 569 pthread_t background_threads[nr_cpus]; in stress() 573 for (cpu = 0; cpu < nr_cpus; cpu++) { in stress() 592 for (cpu = 0; cpu < nr_cpus; cpu++) in stress() 608 for (cpu = 0; cpu < nr_cpus; cpu++) { in stress() 626 for (cpu = 0; cpu < nr_cpus; cpu++) in stress() 1021 unsigned long userfaults[nr_cpus]; in userfaultfd_stress() 1053 pipefd = malloc(sizeof(int) * nr_cpus * 2); in userfaultfd_stress() [all …]
|
/Linux-v4.19/tools/testing/selftests/rcutorture/bin/ |
D | functions.sh | 76 if test "$3" -gt "$nr_cpus" 78 echo $nr_cpus
|
/Linux-v4.19/tools/virtio/virtio-trace/ |
D | trace-agent.c | 33 int nr_cpus = (int)sysconf(_SC_NPROCESSORS_CONF); in get_total_cpus() local 35 if (nr_cpus <= 0) { in get_total_cpus() 38 } else if (nr_cpus > MAX_CPUS) { in get_total_cpus() 43 return nr_cpus; in get_total_cpus()
|
/Linux-v4.19/tools/perf/bench/ |
D | numa.c | 122 int nr_cpus; member 273 for (cpu = 0; cpu < g->p.nr_cpus; cpu++) in bind_to_cpu() 276 BUG_ON(target_cpu < 0 || target_cpu >= g->p.nr_cpus); in bind_to_cpu() 288 int cpus_per_node = g->p.nr_cpus / nr_numa_nodes(); in bind_to_node() 293 BUG_ON(cpus_per_node * nr_numa_nodes() != g->p.nr_cpus); in bind_to_node() 302 for (cpu = 0; cpu < g->p.nr_cpus; cpu++) in bind_to_node() 308 BUG_ON(cpu_stop > g->p.nr_cpus); in bind_to_node() 532 BUG_ON(step <= 0 || step >= g->p.nr_cpus); in parse_setup_cpu_list() 544 BUG_ON(bind_len <= 0 || bind_len > g->p.nr_cpus); in parse_setup_cpu_list() 557 if (bind_cpu_0 >= g->p.nr_cpus || bind_cpu_1 >= g->p.nr_cpus) { in parse_setup_cpu_list() [all …]
|
/Linux-v4.19/arch/s390/appldata/ |
D | appldata_os.c | 68 u32 nr_cpus; /* number of (virtual) CPUs */ member 138 os_data->nr_cpus = j; in appldata_get_os_data() 141 (os_data->nr_cpus * sizeof(struct appldata_os_per_cpu)); in appldata_get_os_data()
|