Home
last modified time | relevance | path

Searched refs:nr_cpu_ids (Results 1 – 25 of 203) sorted by relevance

123456789

/Linux-v4.19/kernel/
Dsmp.c159 if ((unsigned)cpu >= nr_cpu_ids || !cpu_online(cpu)) { in generic_exec_single()
374 for (cpu = cpumask_first_and(nodemask, mask); cpu < nr_cpu_ids; in smp_call_function_any()
424 if (cpu >= nr_cpu_ids) in smp_call_function_many()
433 if (next_cpu >= nr_cpu_ids) { in smp_call_function_many()
533 if (nr_cpus > 0 && nr_cpus < nr_cpu_ids) in nrcpus()
534 nr_cpu_ids = nr_cpus; in nrcpus()
553 unsigned int nr_cpu_ids __read_mostly = NR_CPUS;
554 EXPORT_SYMBOL(nr_cpu_ids);
559 nr_cpu_ids = find_last_bit(cpumask_bits(cpu_possible_mask),NR_CPUS) + 1; in setup_nr_cpu_ids()
791 if (cpu >= nr_cpu_ids || !cpu_online(cpu)) in smp_call_on_cpu()
/Linux-v4.19/arch/arm/mach-spear/
Dplatsmp.c100 if (ncores > nr_cpu_ids) { in spear13xx_smp_init_cpus()
102 ncores, nr_cpu_ids); in spear13xx_smp_init_cpus()
103 ncores = nr_cpu_ids; in spear13xx_smp_init_cpus()
/Linux-v4.19/arch/powerpc/kernel/
Dpaca.c198 paca_nr_cpu_ids = nr_cpu_ids; in allocate_paca_ptrs()
200 paca_ptrs_size = sizeof(struct paca_struct *) * nr_cpu_ids; in allocate_paca_ptrs()
241 new_ptrs_size = sizeof(struct paca_struct *) * nr_cpu_ids; in free_unused_pacas()
246 paca_nr_cpu_ids = nr_cpu_ids; in free_unused_pacas()
259 paca_ptrs_size + paca_struct_size, nr_cpu_ids); in free_unused_pacas()
Dsetup-common.c346 if (cpumask_next(cpu_id, cpu_online_mask) >= nr_cpu_ids) in show_cpuinfo()
358 if ((*pos) < nr_cpu_ids) in c_start()
462 cpu_to_phys_id = __va(memblock_alloc(nr_cpu_ids * sizeof(u32), in smp_setup_cpu_maps()
464 memset(cpu_to_phys_id, 0, nr_cpu_ids * sizeof(u32)); in smp_setup_cpu_maps()
491 for (j = 0; j < nthreads && cpu < nr_cpu_ids; j++) { in smp_setup_cpu_maps()
508 if (cpu >= nr_cpu_ids) { in smp_setup_cpu_maps()
544 if (maxcpus > nr_cpu_ids) { in smp_setup_cpu_maps()
548 maxcpus, nr_cpu_ids); in smp_setup_cpu_maps()
549 maxcpus = nr_cpu_ids; in smp_setup_cpu_maps()
859 memblock_free(__pa(cpu_to_phys_id), nr_cpu_ids * sizeof(u32)); in smp_setup_pacas()
/Linux-v4.19/arch/arm/mach-bcm/
Dbcm63xx_smp.c65 if (ncores > nr_cpu_ids) { in scu_a9_enable()
67 ncores, nr_cpu_ids); in scu_a9_enable()
68 ncores = nr_cpu_ids; in scu_a9_enable()
/Linux-v4.19/include/linux/
Dcpumask.h33 #define cpumask_pr_args(maskp) nr_cpu_ids, cpumask_bits(maskp)
36 #define nr_cpu_ids 1U macro
38 extern unsigned int nr_cpu_ids;
44 #define nr_cpumask_bits nr_cpu_ids
243 (cpu) < nr_cpu_ids;)
255 (cpu) < nr_cpu_ids;)
291 (cpu) < nr_cpu_ids;)
895 nr_cpu_ids); in cpumap_print_to_pagebuf()
/Linux-v4.19/kernel/irq/
Dcpuhotplug.c38 if (cpumask_any_but(m, cpu) < nr_cpu_ids && in irq_needs_fixup()
39 cpumask_any_and(m, cpu_online_mask) >= nr_cpu_ids) { in irq_needs_fixup()
112 if (cpumask_any_and(affinity, cpu_online_mask) >= nr_cpu_ids) { in migrate_one_irq()
Dipi.c70 if (next < nr_cpu_ids) in irq_reserve_ipi()
72 if (next < nr_cpu_ids) { in irq_reserve_ipi()
167 if (!data || !ipimask || cpu >= nr_cpu_ids) in ipi_get_hwirq()
197 if (cpu >= nr_cpu_ids) in ipi_send_verify()
Dmigration.c29 if (cpumask_any_and(desc->pending_mask, cpu_online_mask) >= nr_cpu_ids) { in irq_fixup_move_pending()
77 if (cpumask_any_and(desc->pending_mask, cpu_online_mask) < nr_cpu_ids) { in irq_move_masked_irq()
/Linux-v4.19/kernel/sched/
Dcpupri.c101 if (cpumask_any_and(&p->cpus_allowed, vec->mask) >= nr_cpu_ids) in cpupri_find()
115 if (cpumask_any(lowest_mask) >= nr_cpu_ids) in cpupri_find()
215 cp->cpu_to_pri = kcalloc(nr_cpu_ids, sizeof(int), GFP_KERNEL); in cpupri_init()
/Linux-v4.19/arch/arm/kernel/
Ddevtree.c155 if (WARN(cpuidx > nr_cpu_ids, "DT /cpu %u nodes greater than " in arm_dt_init_cpu_maps()
157 cpuidx, nr_cpu_ids)) { in arm_dt_init_cpu_maps()
158 cpuidx = nr_cpu_ids; in arm_dt_init_cpu_maps()
/Linux-v4.19/arch/powerpc/include/asm/
Dcputhreads.h55 if (cpu < nr_cpu_ids) in cpu_thread_mask_to_cores()
64 return nr_cpu_ids >> threads_shift; in cpu_nr_cores()
/Linux-v4.19/arch/x86/xen/
Dsmp_pv.c154 for (i = 0; i < nr_cpu_ids; i++) { in xen_fill_possible_map()
173 for (i = 0; i < nr_cpu_ids; i++) { in xen_filter_cpu_maps()
194 nr_cpu_ids = nr_cpu_ids - subtract; in xen_filter_cpu_maps()
270 for (cpu = nr_cpu_ids - 1; !cpu_possible(cpu); cpu--) in xen_pv_smp_prepare_cpus()
/Linux-v4.19/arch/arm/mach-omap2/
Domap-smp.c301 if (ncores > nr_cpu_ids) { in omap4_smp_init_cpus()
303 ncores, nr_cpu_ids); in omap4_smp_init_cpus()
304 ncores = nr_cpu_ids; in omap4_smp_init_cpus()
/Linux-v4.19/drivers/perf/
Darm_pmu_platform.c88 cpu = nr_cpu_ids; in pmu_parse_irq_affinity()
125 if (nr_cpu_ids != 1 && !pmu_has_irq_affinity(pdev->dev.of_node)) { in pmu_parse_irqs()
145 if (cpu >= nr_cpu_ids) in pmu_parse_irqs()
/Linux-v4.19/drivers/base/
Dcpu.c256 if (total_cpus && nr_cpu_ids < total_cpus) { in print_cpus_offline()
260 if (nr_cpu_ids == total_cpus-1) in print_cpus_offline()
261 n += snprintf(&buf[n], len - n, "%u", nr_cpu_ids); in print_cpus_offline()
264 nr_cpu_ids, total_cpus-1); in print_cpus_offline()
400 if (cpu < nr_cpu_ids && cpu_possible(cpu)) in get_cpu_device()
/Linux-v4.19/arch/x86/kernel/
Dsmpboot.c1183 if (def_to_bigsmp && nr_cpu_ids > 8) { in smp_sanity_check()
1204 nr_cpu_ids = 8; in smp_sanity_check()
1235 c->cpu_index = nr_cpu_ids; in smp_cpu_index_default()
1349 __max_logical_packages = DIV_ROUND_UP(nr_cpu_ids, ncpus); in calculate_max_logical_packages()
1431 if (possible > nr_cpu_ids) { in prefill_possible_map()
1433 possible, nr_cpu_ids); in prefill_possible_map()
1434 possible = nr_cpu_ids; in prefill_possible_map()
1446 nr_cpu_ids = possible; in prefill_possible_map()
/Linux-v4.19/arch/x86/events/intel/
Dcstate.c316 if (cpu >= nr_cpu_ids) in cstate_pmu_event_init()
385 if (target < nr_cpu_ids) { in cstate_cpu_exit()
396 if (target < nr_cpu_ids) { in cstate_cpu_exit()
415 if (has_cstate_core && target >= nr_cpu_ids) in cstate_cpu_init()
424 if (has_cstate_pkg && target >= nr_cpu_ids) in cstate_cpu_init()
/Linux-v4.19/arch/s390/kernel/
Dsmp.c721 for (i = 0; (i < info->combined) && (cpu < nr_cpu_ids); i++) { in __smp_rescan_cpus()
740 if (cpu >= nr_cpu_ids) in __smp_rescan_cpus()
836 if (base + i < nr_cpu_ids) in __cpu_up()
920 sclp_max = (sclp.max_cores * sclp_max) ?: nr_cpu_ids; in smp_fill_possible_mask()
921 possible = setup_possible_cpus ?: nr_cpu_ids; in smp_fill_possible_mask()
923 for (cpu = 0; cpu < possible && cpu < nr_cpu_ids; cpu++) in smp_fill_possible_mask()
1016 if (cpu + i >= nr_cpu_ids || !cpu_present(cpu + i)) in cpu_configure_store()
1031 if (cpu + i >= nr_cpu_ids || !cpu_present(cpu + i)) in cpu_configure_store()
/Linux-v4.19/arch/powerpc/sysdev/xive/
Dcommon.c467 num = min_t(int, cpumask_weight(mask), nr_cpu_ids); in xive_find_target_in_mask()
472 for (i = 0; i < first && cpu < nr_cpu_ids; i++) in xive_find_target_in_mask()
476 if (WARN_ON(cpu >= nr_cpu_ids)) in xive_find_target_in_mask()
497 if (cpu >= nr_cpu_ids) in xive_find_target_in_mask()
574 target >= nr_cpu_ids)) in xive_irq_startup()
689 if (cpumask_any_and(cpumask, cpu_online_mask) >= nr_cpu_ids) in xive_irq_set_affinity()
713 if (WARN_ON(target >= nr_cpu_ids)) in xive_irq_set_affinity()
/Linux-v4.19/arch/arm64/include/asm/
Dsmp_plat.h53 for (cpu = 0; cpu < nr_cpu_ids; cpu++) in get_logical_index()
/Linux-v4.19/arch/arm64/kernel/
Dsmp.c589 for (i = 0; i < nr_cpu_ids; i++) in acpi_parse_and_init_cpus()
668 if (cpu_count > nr_cpu_ids) in smp_init_cpus()
670 cpu_count, nr_cpu_ids); in smp_init_cpus()
684 for (i = 1; i < nr_cpu_ids; i++) { in smp_init_cpus()
/Linux-v4.19/drivers/infiniband/hw/hfi1/
Daffinity.c115 if (cpu >= nr_cpu_ids) /* empty */ in cpu_mask_set_get_first()
302 if (ret_cpu >= nr_cpu_ids) { in per_cpu_affinity_get()
338 if (max_cpu >= nr_cpu_ids) in per_cpu_affinity_put_max()
394 if (cpu >= nr_cpu_ids) { /* empty */ in _dev_comp_vect_cpu_get()
648 if (cpumask_first(local_mask) >= nr_cpu_ids) in hfi1_dev_affinity_init()
709 if (curr_cpu >= nr_cpu_ids) in hfi1_dev_affinity_init()
1201 if (cpu >= nr_cpu_ids) /* empty */ in hfi1_get_proc_affinity()
/Linux-v4.19/arch/x86/kernel/apic/
Dapic_common.c31 if (mps_cpu < nr_cpu_ids && cpu_present(mps_cpu)) in default_cpu_present_to_apicid()
/Linux-v4.19/lib/
Dflex_proportions.c170 #define PROP_BATCH (8*(1+ilog2(nr_cpu_ids)))
208 if (val < (nr_cpu_ids * PROP_BATCH)) in fprop_reflect_period_percpu()

123456789