Home
last modified time | relevance | path

Searched refs:threads_per_core (Results 1 – 24 of 24) sorted by relevance

/Linux-v5.10/arch/powerpc/include/asm/
Dcputhreads.h23 extern int threads_per_core;
28 #define threads_per_core 1 macro
52 for (i = 0; i < NR_CPUS; i += threads_per_core) { in cpu_thread_mask_to_cores()
83 return cpu & (threads_per_core - 1); in cpu_thread_in_core()
93 return cpu & ~(threads_per_core - 1); in cpu_first_thread_sibling()
98 return cpu | (threads_per_core - 1); in cpu_last_thread_sibling()
/Linux-v5.10/arch/powerpc/platforms/powernv/
Dsubcore.c152 for (i = cpu + 1; i < cpu + threads_per_core; i++) in wait_for_sync_step()
197 for (i = cpu + 1; i < cpu + threads_per_core; i++) in unsplit_core()
316 threads_per_subcore = threads_per_core / subcores_per_core; in cpu_update_split_mode()
421 if (setup_max_cpus % threads_per_core) in subcore_init()
Didle.c318 unsigned long core_thread_mask = (1UL << threads_per_core) - 1; in power7_idle_insn()
369 == threads_per_core) in power7_idle_insn()
610 unsigned long core_thread_mask = (1UL << threads_per_core) - 1; in power9_idle_stop()
823 int need_awake = threads_per_core; in pnv_power9_force_smt4_catch()
826 cpu0 = cpu & ~(threads_per_core - 1); in pnv_power9_force_smt4_catch()
827 for (thr = 0; thr < threads_per_core; ++thr) { in pnv_power9_force_smt4_catch()
833 for (thr = 0; thr < threads_per_core; ++thr) { in pnv_power9_force_smt4_catch()
843 for (thr = 0; thr < threads_per_core; ++thr) { in pnv_power9_force_smt4_catch()
852 for (thr = 0; thr < threads_per_core; ++thr) { in pnv_power9_force_smt4_catch()
869 cpu0 = cpu & ~(threads_per_core - 1); in pnv_power9_force_smt4_release()
[all …]
/Linux-v5.10/drivers/virt/nitro_enclaves/
Dne_misc_dev.c543 cpumask_set_cpu(cpu, ne_enclave->threads_per_core[core_id]); in ne_set_enclave_threads_per_core()
575 for_each_cpu(cpu, ne_enclave->threads_per_core[i]) in ne_get_cpu_from_cpu_pool()
594 *vcpu_id = cpumask_any(ne_enclave->threads_per_core[core_id]); in ne_get_cpu_from_cpu_pool()
659 if (cpumask_test_cpu(vcpu_id, ne_enclave->threads_per_core[i])) in ne_check_cpu_in_cpu_pool()
1049 for_each_cpu(cpu, ne_enclave->threads_per_core[i]) in ne_start_enclave_ioctl()
1369 for_each_cpu(cpu, ne_enclave->threads_per_core[i]) in ne_enclave_remove_all_vcpu_id_entries()
1373 free_cpumask_var(ne_enclave->threads_per_core[i]); in ne_enclave_remove_all_vcpu_id_entries()
1378 kfree(ne_enclave->threads_per_core); in ne_enclave_remove_all_vcpu_id_entries()
1575 ne_enclave->threads_per_core = kcalloc(ne_enclave->nr_parent_vm_cores, in ne_create_vm_ioctl()
1576 sizeof(*ne_enclave->threads_per_core), GFP_KERNEL); in ne_create_vm_ioctl()
[all …]
Dne_misc_dev.h80 cpumask_var_t *threads_per_core; member
/Linux-v5.10/arch/powerpc/perf/
Dimc-pmu.c588 int nid, rc = 0, core_id = (cpu / threads_per_core); in core_imc_mem_init()
626 int core_id = (cpu / threads_per_core); in is_core_imc_mem_inited()
705 core_id = cpu / threads_per_core; in ppc_core_imc_cpu_offline()
763 core_id = event->cpu / threads_per_core; in core_imc_counters_release()
831 core_id = event->cpu / threads_per_core; in core_imc_event_init()
1124 core_id = smp_processor_id() / threads_per_core; in thread_imc_event_add()
1159 core_id = smp_processor_id() / threads_per_core; in thread_imc_event_del()
1198 int core_id = (cpu_id / threads_per_core); in trace_imc_mem_alloc()
1350 int core_id = smp_processor_id() / threads_per_core; in trace_imc_event_add()
1400 int core_id = smp_processor_id() / threads_per_core; in trace_imc_event_del()
[all …]
Dhv-24x7.c1721 if (threads_per_core == 8) in hv_24x7_init()
/Linux-v5.10/arch/powerpc/platforms/85xx/
Dsmp.c288 if (threads_per_core == 2) { in smp_85xx_kick_cpu()
319 } else if (threads_per_core == 1) { in smp_85xx_kick_cpu()
326 } else if (threads_per_core > 2) { in smp_85xx_kick_cpu()
/Linux-v5.10/arch/powerpc/kvm/
De500mc.c353 if (threads_per_core == 2) in kvmppc_core_init_vm_e500mc()
364 if (threads_per_core == 2) in kvmppc_core_destroy_vm_e500mc()
400 kvmppc_init_lpid(KVMPPC_NR_LPIDS/threads_per_core); in kvmppc_e500mc_init()
De500.h299 if (threads_per_core == 2) in get_thread_specific_lpid()
Dbook3s_hv_builtin.c333 for (t = 1; t < threads_per_core; ++t) { in kvmhv_commence_exit()
Dbook3s_hv.c2595 for (i = 0; i < threads_per_core; ++i) in radix_flush_cpu()
4811 for (cpu = 0; cpu < nr_cpu_ids; cpu += threads_per_core) { in kvmppc_alloc_host_rm_ops()
5637 int first_cpu = i * threads_per_core; in kvm_init_subcore_bitmap()
5651 for (j = 0; j < threads_per_core; j++) { in kvm_init_subcore_bitmap()
/Linux-v5.10/arch/powerpc/kernel/
Dsetup_64.c133 smt_enabled_at_boot = threads_per_core; in check_smt_enabled()
138 smt_enabled_at_boot = threads_per_core; in check_smt_enabled()
148 min(threads_per_core, smt); in check_smt_enabled()
158 smt_enabled_at_boot = threads_per_core; in check_smt_enabled()
Dsetup-common.c393 int threads_per_core, threads_per_subcore, threads_shift __read_mostly; variable
395 EXPORT_SYMBOL_GPL(threads_per_core);
404 threads_per_core = tpc; in cpu_init_thread_core_maps()
Dsmp.c836 for (i = first_thread; i < first_thread + threads_per_core; i++) { in init_cpu_l1_cache_map()
1111 if (threads_per_core > 1 && secondaries_inhibited() && in __cpu_up()
1377 for (i = first_thread; i < first_thread + threads_per_core; i++) in add_cpu_to_masks()
/Linux-v5.10/arch/powerpc/sysdev/
Dfsl_rcpm.c148 if (threads_per_core == 2) { in rcpm_v2_cpu_die()
160 if (threads_per_core == 1) in rcpm_v2_cpu_die()
/Linux-v5.10/arch/ia64/kernel/
Dsetup.c727 if (c->threads_per_core > 1 || c->cores_per_socket > 1) in show_cpuinfo()
839 c->threads_per_core = c->cores_per_socket = c->num_log = 1; in identify_cpu()
844 if (c->threads_per_core > smp_num_siblings) in identify_cpu()
845 smp_num_siblings = c->threads_per_core; in identify_cpu()
Dsmpboot.c581 if (cpu_data(cpu)->threads_per_core == 1 && in remove_siblinginfo()
739 if (cpu_data(cpu)->threads_per_core == 1 && in __cpu_up()
814 c->threads_per_core = info.overview_tpc; in identify_siblings()
Dtopology.c145 if (cpu_data(cpu)->threads_per_core <= 1 && in cache_shared_cpu_map_setup()
/Linux-v5.10/arch/powerpc/platforms/pseries/
Dlpar.c206 vcpu_associativity = kcalloc(num_possible_cpus() / threads_per_core, in init_cpu_associativity()
208 pcpu_associativity = kcalloc(NR_CPUS_H / threads_per_core, in init_cpu_associativity()
231 assoc = &cpu_assoc[(int)(cpu / threads_per_core) * VPHN_ASSOC_BUFSIZE]; in __get_cpu_associativity()
/Linux-v5.10/arch/ia64/include/asm/
Dprocessor.h214 unsigned char threads_per_core; /* Threads per core */ member
/Linux-v5.10/tools/power/x86/turbostat/
Dturbostat.c232 topo.nodes_per_pkg * topo.cores_per_node * topo.threads_per_core) + \
233 ((node_no) * topo.cores_per_node * topo.threads_per_core) + \
234 ((core_no) * topo.threads_per_core) + \
406 int threads_per_core; member
438 topo.threads_per_core; ++thread_no) { in for_all_cpus()
2815 topo.threads_per_core; ++thread_no) { in for_all_cpus_2()
5466 topo.threads_per_core = max_siblings; in topology_probe()
5494 int num_threads = topo.threads_per_core * num_cores; in allocate_counters()
/Linux-v5.10/arch/powerpc/mm/
Dnuma.c575 for (i = 0; i < threads_per_core; i++) { in verify_cpu_node_mapping()
/Linux-v5.10/drivers/cpufreq/
Dpowernv-cpufreq.c835 for (i = 0; i < threads_per_core; i++) in powernv_cpufreq_cpu_init()