/Linux-v5.4/kernel/irq/ |
D | affinity.c | 103 unsigned ncpus; member 112 return ln->ncpus - rn->ncpus; in ncpus_cmp_func() 139 node_vectors[n].ncpus = UINT_MAX; in alloc_nodes_vectors() 143 unsigned ncpus; in alloc_nodes_vectors() local 146 ncpus = cpumask_weight(nmsk); in alloc_nodes_vectors() 148 if (!ncpus) in alloc_nodes_vectors() 150 remaining_ncpus += ncpus; in alloc_nodes_vectors() 151 node_vectors[n].ncpus = ncpus; in alloc_nodes_vectors() 228 unsigned nvectors, ncpus; in alloc_nodes_vectors() local 230 if (node_vectors[n].ncpus == UINT_MAX) in alloc_nodes_vectors() [all …]
|
/Linux-v5.4/tools/testing/selftests/rcutorture/bin/ |
D | cpus2use.sh | 17 ncpus=`grep '^processor' /proc/cpuinfo | wc -l` 19 awk -v ncpus=$ncpus '{ print ncpus * ($7 + $NF) / 100 }'` 20 awk -v ncpus=$ncpus -v idlecpus=$idlecpus < /dev/null '
|
D | kvm-build.sh | 37 ncpus=`cpus2use.sh` 38 make -j$ncpus $TORTURE_KMAKE_ARG > $resdir/Make.out 2>&1
|
/Linux-v5.4/arch/x86/include/asm/trace/ |
D | hyperv.h | 16 __field(unsigned int, ncpus) 21 TP_fast_assign(__entry->ncpus = cpumask_weight(cpus); 27 __entry->ncpus, __entry->mm, 64 __field(unsigned int, ncpus) 67 TP_fast_assign(__entry->ncpus = cpumask_weight(cpus); 71 __entry->ncpus, __entry->vector)
|
/Linux-v5.4/tools/perf/util/ |
D | counts.c | 8 struct perf_counts *perf_counts__new(int ncpus, int nthreads) in perf_counts__new() argument 15 values = xyarray__new(ncpus, nthreads, sizeof(struct perf_counts_values)); in perf_counts__new() 23 values = xyarray__new(ncpus, nthreads, sizeof(bool)); in perf_counts__new() 56 int perf_evsel__alloc_counts(struct evsel *evsel, int ncpus, int nthreads) in perf_evsel__alloc_counts() argument 58 evsel->counts = perf_counts__new(ncpus, nthreads); in perf_evsel__alloc_counts()
|
D | stat.c | 148 int ncpus, int nthreads) in perf_evsel__alloc_prev_raw_counts() argument 152 counts = perf_counts__new(ncpus, nthreads); in perf_evsel__alloc_prev_raw_counts() 176 int ncpus = perf_evsel__nr_cpus(evsel); in perf_evsel__alloc_stats() local 180 perf_evsel__alloc_counts(evsel, ncpus, nthreads) < 0 || in perf_evsel__alloc_stats() 181 (alloc_raw && perf_evsel__alloc_prev_raw_counts(evsel, ncpus, nthreads) < 0)) in perf_evsel__alloc_stats() 336 int ncpus = perf_evsel__nr_cpus(counter); in process_counter_maps() local 343 for (cpu = 0; cpu < ncpus; cpu++) { in process_counter_maps()
|
D | counts.h | 38 struct perf_counts *perf_counts__new(int ncpus, int nthreads); 42 int perf_evsel__alloc_counts(struct evsel *evsel, int ncpus, int nthreads);
|
D | cputopo.c | 180 long ncpus; in cpu_topology__new() local 185 ncpus = cpu__max_present_cpu(); in cpu_topology__new() 194 nr = (u32)(ncpus & UINT_MAX); in cpu_topology__new()
|
/Linux-v5.4/arch/powerpc/platforms/powermac/ |
D | smp.c | 269 int i, ncpus; in smp_psurge_probe() local 299 ncpus = 4; in smp_psurge_probe() 311 ncpus = 2; in smp_psurge_probe() 325 if (ncpus > NR_CPUS) in smp_psurge_probe() 326 ncpus = NR_CPUS; in smp_psurge_probe() 327 for (i = 1; i < ncpus ; ++i) in smp_psurge_probe() 566 static void __init smp_core99_setup_i2c_hwsync(int ncpus) in smp_core99_setup_i2c_hwsync() argument 698 static void __init smp_core99_setup(int ncpus) in smp_core99_setup() argument 706 smp_core99_setup_i2c_hwsync(ncpus); in smp_core99_setup() 754 for (i = 1; i < ncpus; ++i) in smp_core99_setup() [all …]
|
/Linux-v5.4/arch/powerpc/kernel/ |
D | crash.c | 107 unsigned int ncpus = num_online_cpus() - 1;/* Excluding the panic cpu */ in crash_kexec_prepare_cpus() local 114 ncpus = num_present_cpus() - 1; in crash_kexec_prepare_cpus() 126 while ((atomic_read(&cpus_in_crash) < ncpus) && (--msecs > 0)) in crash_kexec_prepare_cpus() 131 if (atomic_read(&cpus_in_crash) >= ncpus) { in crash_kexec_prepare_cpus() 137 ncpus - atomic_read(&cpus_in_crash)); in crash_kexec_prepare_cpus() 168 while (atomic_read(&cpus_in_crash) < ncpus) in crash_kexec_prepare_cpus()
|
/Linux-v5.4/tools/perf/lib/ |
D | evsel.c | 42 int perf_evsel__alloc_fd(struct perf_evsel *evsel, int ncpus, int nthreads) in perf_evsel__alloc_fd() argument 44 evsel->fd = xyarray__new(ncpus, nthreads, sizeof(int)); in perf_evsel__alloc_fd() 48 for (cpu = 0; cpu < ncpus; cpu++) { in perf_evsel__alloc_fd() 234 int perf_evsel__alloc_id(struct perf_evsel *evsel, int ncpus, int nthreads) in perf_evsel__alloc_id() argument 236 if (ncpus == 0 || nthreads == 0) in perf_evsel__alloc_id() 242 evsel->sample_id = xyarray__new(ncpus, nthreads, sizeof(struct perf_sample_id)); in perf_evsel__alloc_id() 246 evsel->id = zalloc(ncpus * nthreads * sizeof(u64)); in perf_evsel__alloc_id()
|
/Linux-v5.4/arch/mips/kernel/ |
D | crash.c | 59 unsigned int ncpus; in crash_kexec_prepare_cpus() local 64 ncpus = num_online_cpus() - 1;/* Excluding the panic cpu */ in crash_kexec_prepare_cpus() 75 while ((cpumask_weight(&cpus_in_crash) < ncpus) && (--msecs > 0)) { in crash_kexec_prepare_cpus()
|
/Linux-v5.4/drivers/clk/mvebu/ |
D | clk-cpu.c | 171 int ncpus = 0; in of_cpu_clk_setup() local 185 ncpus++; in of_cpu_clk_setup() 187 cpuclk = kcalloc(ncpus, sizeof(*cpuclk), GFP_KERNEL); in of_cpu_clk_setup() 191 clks = kcalloc(ncpus, sizeof(*clks), GFP_KERNEL); in of_cpu_clk_setup() 236 while(ncpus--) in of_cpu_clk_setup() 237 kfree(cpuclk[ncpus].clk_name); in of_cpu_clk_setup()
|
/Linux-v5.4/arch/sparc/kernel/ |
D | setup_32.c | 401 int i, ncpus, err; in topology_init() local 407 ncpus = 0; in topology_init() 408 while (!cpu_find_by_instance(ncpus, NULL, NULL)) in topology_init() 409 ncpus++; in topology_init() 410 ncpus_probed = ncpus; in topology_init()
|
D | ds.c | 479 static int dr_cpu_size_response(int ncpus) in dr_cpu_size_response() argument 483 (sizeof(struct dr_cpu_resp_entry) * ncpus)); in dr_cpu_size_response() 487 u64 handle, int resp_len, int ncpus, in dr_cpu_init_response() argument 502 tag->num_records = ncpus; in dr_cpu_init_response() 511 BUG_ON(i != ncpus); in dr_cpu_init_response() 514 static void dr_cpu_mark(struct ds_data *resp, int cpu, int ncpus, in dr_cpu_mark() argument 524 for (i = 0; i < ncpus; i++) { in dr_cpu_mark() 537 int resp_len, ncpus, cpu; in dr_cpu_configure() local 540 ncpus = cpumask_weight(mask); in dr_cpu_configure() 541 resp_len = dr_cpu_size_response(ncpus); in dr_cpu_configure() [all …]
|
D | sun4m_smp.c | 177 register int ncpus = SUN4M_NCPUS; in sun4m_cross_call() local 196 for (i = 0; i < ncpus; i++) { in sun4m_cross_call() 217 } while (++i < ncpus); in sun4m_cross_call() 225 } while (++i < ncpus); in sun4m_cross_call()
|
/Linux-v5.4/drivers/xen/ |
D | mcelog.c | 58 static uint32_t ncpus; variable 240 for (i = 0; i < ncpus; i++) in convert_log() 243 if (unlikely(i == ncpus)) { in convert_log() 368 ncpus = mc_op.u.mc_physcpuinfo.ncpus; in bind_virq_for_mce() 369 g_physinfo = kcalloc(ncpus, sizeof(struct mcinfo_logical_cpu), in bind_virq_for_mce()
|
/Linux-v5.4/tools/perf/lib/include/internal/ |
D | evsel.h | 53 int perf_evsel__alloc_fd(struct perf_evsel *evsel, int ncpus, int nthreads); 59 int perf_evsel__alloc_id(struct perf_evsel *evsel, int ncpus, int nthreads);
|
/Linux-v5.4/arch/xtensa/kernel/ |
D | smp.c | 93 unsigned int ncpus = get_core_count(); in smp_init_cpus() local 96 pr_info("%s: Core Count = %d\n", __func__, ncpus); in smp_init_cpus() 99 if (ncpus > NR_CPUS) { in smp_init_cpus() 100 ncpus = NR_CPUS; in smp_init_cpus() 101 pr_info("%s: limiting core count by %d\n", __func__, ncpus); in smp_init_cpus() 104 for (i = 0; i < ncpus; ++i) in smp_init_cpus()
|
/Linux-v5.4/drivers/gpu/drm/i915/selftests/ |
D | i915_request.c | 438 unsigned int ncpus = num_online_cpus(); in mock_breadcrumbs_smoketest() local 449 threads = kmalloc_array(ncpus, sizeof(*threads), GFP_KERNEL); in mock_breadcrumbs_smoketest() 470 for (n = 0; n < ncpus; n++) { in mock_breadcrumbs_smoketest() 475 ncpus = n; in mock_breadcrumbs_smoketest() 484 for (n = 0; n < ncpus; n++) { in mock_breadcrumbs_smoketest() 496 ncpus); in mock_breadcrumbs_smoketest() 1106 unsigned int ncpus = num_online_cpus(); in live_breadcrumbs_smoketest() local 1133 threads = kcalloc(ncpus * I915_NUM_ENGINES, in live_breadcrumbs_smoketest() 1179 for (n = 0; n < ncpus; n++) { in live_breadcrumbs_smoketest() 1191 threads[id * ncpus + n] = tsk; in live_breadcrumbs_smoketest() [all …]
|
/Linux-v5.4/kernel/locking/ |
D | test-ww_mutex.c | 328 static int test_cycle(unsigned int ncpus) in test_cycle() argument 333 for (n = 2; n <= ncpus + 1; n++) { in test_cycle() 583 int ncpus = num_online_cpus(); in test_ww_mutex_init() local 606 ret = test_cycle(ncpus); in test_ww_mutex_init() 610 ret = stress(16, 2*ncpus, STRESS_INORDER); in test_ww_mutex_init() 614 ret = stress(16, 2*ncpus, STRESS_REORDER); in test_ww_mutex_init() 618 ret = stress(4095, hweight32(STRESS_ALL)*ncpus, STRESS_ALL); in test_ww_mutex_init()
|
/Linux-v5.4/arch/x86/kernel/ |
D | kvmclock.c | 226 unsigned long ncpus; in kvmclock_init_mem() local 234 ncpus = num_possible_cpus() - HVC_BOOT_ARRAY_SIZE; in kvmclock_init_mem() 235 order = get_order(ncpus * sizeof(*hvclock_mem)); in kvmclock_init_mem()
|
/Linux-v5.4/drivers/misc/sgi-gru/ |
D | grukservices.c | 144 int ctxnum, ncpus; in gru_load_kernel_context() local 162 ncpus = uv_blade_nr_possible_cpus(blade_id); in gru_load_kernel_context() 164 GRU_NUM_KERNEL_CBR * ncpus + bs->bs_async_cbrs); in gru_load_kernel_context() 166 GRU_NUM_KERNEL_DSR_BYTES * ncpus + in gru_load_kernel_context() 362 int ncpus; in gru_lock_async_resource() local 365 ncpus = uv_blade_nr_possible_cpus(blade_id); in gru_lock_async_resource() 367 *cb = bs->kernel_cb + ncpus * GRU_HANDLE_STRIDE; in gru_lock_async_resource() 369 *dsr = bs->kernel_dsr + ncpus * GRU_NUM_KERNEL_DSR_BYTES; in gru_lock_async_resource()
|
/Linux-v5.4/tools/perf/bench/ |
D | futex-wake.c | 46 static unsigned int ncpus, threads_starting, nthreads = 0; variable 144 nthreads = ncpus; in bench_futex_wake()
|
/Linux-v5.4/arch/x86/platform/uv/ |
D | uv_time.c | 51 int ncpus; member 168 head->ncpus = uv_blade_nr_possible_cpus(bid); in uv_rtc_allocate_timers() 187 for (c = 0; c < head->ncpus; c++) { in uv_rtc_find_next_timer()
|