Lines Matching refs:c

90 static void init_amd_k5(struct cpuinfo_x86 *c)  in init_amd_k5()  argument
102 if (c->x86_model == 9 || c->x86_model == 10) { in init_amd_k5()
109 static void init_amd_k6(struct cpuinfo_x86 *c) in init_amd_k6() argument
115 if (c->x86_model < 6) { in init_amd_k6()
117 if (c->x86_model == 0) { in init_amd_k6()
118 clear_cpu_cap(c, X86_FEATURE_APIC); in init_amd_k6()
119 set_cpu_cap(c, X86_FEATURE_PGE); in init_amd_k6()
124 if (c->x86_model == 6 && c->x86_stepping == 1) { in init_amd_k6()
153 if (c->x86_model < 8 || in init_amd_k6()
154 (c->x86_model == 8 && c->x86_stepping < 8)) { in init_amd_k6()
173 if ((c->x86_model == 8 && c->x86_stepping > 7) || in init_amd_k6()
174 c->x86_model == 9 || c->x86_model == 13) { in init_amd_k6()
195 if (c->x86_model == 10) { in init_amd_k6()
203 static void init_amd_k7(struct cpuinfo_x86 *c) in init_amd_k7() argument
213 if (c->x86_model >= 6 && c->x86_model <= 10) { in init_amd_k7()
214 if (!cpu_has(c, X86_FEATURE_XMM)) { in init_amd_k7()
217 set_cpu_cap(c, X86_FEATURE_XMM); in init_amd_k7()
226 if ((c->x86_model == 8 && c->x86_stepping >= 1) || (c->x86_model > 8)) { in init_amd_k7()
236 if (!c->cpu_index) in init_amd_k7()
244 if ((c->x86_model == 6) && ((c->x86_stepping == 0) || in init_amd_k7()
245 (c->x86_stepping == 1))) in init_amd_k7()
249 if ((c->x86_model == 7) && (c->x86_stepping == 0)) in init_amd_k7()
259 if (((c->x86_model == 6) && (c->x86_stepping >= 2)) || in init_amd_k7()
260 ((c->x86_model == 7) && (c->x86_stepping >= 1)) || in init_amd_k7()
261 (c->x86_model > 7)) in init_amd_k7()
262 if (cpu_has(c, X86_FEATURE_MP)) in init_amd_k7()
305 static void legacy_fixup_core_id(struct cpuinfo_x86 *c) in legacy_fixup_core_id() argument
309 if (c->x86 >= 0x17) in legacy_fixup_core_id()
312 cus_per_node = c->x86_max_cores / nodes_per_socket; in legacy_fixup_core_id()
313 c->cpu_core_id %= cus_per_node; in legacy_fixup_core_id()
317 static void amd_get_topology_early(struct cpuinfo_x86 *c) in amd_get_topology_early() argument
319 if (cpu_has(c, X86_FEATURE_TOPOEXT)) in amd_get_topology_early()
329 static void amd_get_topology(struct cpuinfo_x86 *c) in amd_get_topology() argument
343 if (c->x86 == 0x15) in amd_get_topology()
344 c->cu_id = ebx & 0xff; in amd_get_topology()
346 if (c->x86 >= 0x17) { in amd_get_topology()
347 c->cpu_core_id = ebx & 0xff; in amd_get_topology()
350 c->x86_max_cores /= smp_num_siblings; in amd_get_topology()
357 err = detect_extended_topology(c); in amd_get_topology()
359 c->x86_coreid_bits = get_count_order(c->x86_max_cores); in amd_get_topology()
361 cacheinfo_amd_init_llc_id(c, cpu, node_id); in amd_get_topology()
363 } else if (cpu_has(c, X86_FEATURE_NODEID_MSR)) { in amd_get_topology()
374 set_cpu_cap(c, X86_FEATURE_AMD_DCM); in amd_get_topology()
375 legacy_fixup_core_id(c); in amd_get_topology()
383 static void amd_detect_cmp(struct cpuinfo_x86 *c) in amd_detect_cmp() argument
388 bits = c->x86_coreid_bits; in amd_detect_cmp()
390 c->cpu_core_id = c->initial_apicid & ((1 << bits)-1); in amd_detect_cmp()
392 c->phys_proc_id = c->initial_apicid >> bits; in amd_detect_cmp()
394 per_cpu(cpu_llc_id, cpu) = c->phys_proc_id; in amd_detect_cmp()
409 static void srat_detect_node(struct cpuinfo_x86 *c) in srat_detect_node() argument
414 unsigned apicid = c->apicid; in srat_detect_node()
426 x86_cpuinit.fixup_cpu_id(c, node); in srat_detect_node()
448 int ht_nodeid = c->initial_apicid; in srat_detect_node()
460 static void early_init_amd_mc(struct cpuinfo_x86 *c) in early_init_amd_mc() argument
466 if (c->extended_cpuid_level < 0x80000008) in early_init_amd_mc()
471 c->x86_max_cores = (ecx & 0xff) + 1; in early_init_amd_mc()
478 while ((1 << bits) < c->x86_max_cores) in early_init_amd_mc()
482 c->x86_coreid_bits = bits; in early_init_amd_mc()
486 static void bsp_init_amd(struct cpuinfo_x86 *c) in bsp_init_amd() argument
490 if (c->x86 >= 0xf) { in bsp_init_amd()
508 if (cpu_has(c, X86_FEATURE_CONSTANT_TSC)) { in bsp_init_amd()
510 if (c->x86 > 0x10 || in bsp_init_amd()
511 (c->x86 == 0x10 && c->x86_model >= 0x2)) { in bsp_init_amd()
520 if (c->x86 == 0x15) { in bsp_init_amd()
535 if (cpu_has(c, X86_FEATURE_MWAITX)) in bsp_init_amd()
552 c->x86 >= 0x15 && c->x86 <= 0x17) { in bsp_init_amd()
555 switch (c->x86) { in bsp_init_amd()
573 static void early_detect_mem_encrypt(struct cpuinfo_x86 *c) in early_detect_mem_encrypt() argument
589 if (cpu_has(c, X86_FEATURE_SME) || cpu_has(c, X86_FEATURE_SEV)) { in early_detect_mem_encrypt()
600 c->x86_phys_bits -= (cpuid_ebx(0x8000001f) >> 6) & 0x3f; in early_detect_mem_encrypt()
612 clear_cpu_cap(c, X86_FEATURE_SME); in early_detect_mem_encrypt()
614 clear_cpu_cap(c, X86_FEATURE_SEV); in early_detect_mem_encrypt()
618 static void early_init_amd(struct cpuinfo_x86 *c) in early_init_amd() argument
623 early_init_amd_mc(c); in early_init_amd()
626 if (c->x86 == 6) in early_init_amd()
627 set_cpu_cap(c, X86_FEATURE_K7); in early_init_amd()
630 if (c->x86 >= 0xf) in early_init_amd()
631 set_cpu_cap(c, X86_FEATURE_K8); in early_init_amd()
633 rdmsr_safe(MSR_AMD64_PATCH_LEVEL, &c->microcode, &dummy); in early_init_amd()
639 if (c->x86_power & (1 << 8)) { in early_init_amd()
640 set_cpu_cap(c, X86_FEATURE_CONSTANT_TSC); in early_init_amd()
641 set_cpu_cap(c, X86_FEATURE_NONSTOP_TSC); in early_init_amd()
645 if (c->x86_power & BIT(12)) in early_init_amd()
646 set_cpu_cap(c, X86_FEATURE_ACC_POWER); in early_init_amd()
649 set_cpu_cap(c, X86_FEATURE_SYSCALL32); in early_init_amd()
652 if (c->x86 == 5) in early_init_amd()
653 if (c->x86_model == 13 || c->x86_model == 9 || in early_init_amd()
654 (c->x86_model == 8 && c->x86_stepping >= 8)) in early_init_amd()
655 set_cpu_cap(c, X86_FEATURE_K6_MTRR); in early_init_amd()
665 if (c->x86 > 0x16) in early_init_amd()
666 set_cpu_cap(c, X86_FEATURE_EXTD_APICID); in early_init_amd()
667 else if (c->x86 >= 0xf) { in early_init_amd()
673 set_cpu_cap(c, X86_FEATURE_EXTD_APICID); in early_init_amd()
683 set_cpu_cap(c, X86_FEATURE_VMMCALL); in early_init_amd()
686 if (c->x86 == 0x16 && c->x86_model <= 0xf) in early_init_amd()
695 if (cpu_has_amd_erratum(c, amd_erratum_400)) in early_init_amd()
696 set_cpu_bug(c, X86_BUG_AMD_E400); in early_init_amd()
698 early_detect_mem_encrypt(c); in early_init_amd()
701 if (c->x86 == 0x15 && in early_init_amd()
702 (c->x86_model >= 0x10 && c->x86_model <= 0x6f) && in early_init_amd()
703 !cpu_has(c, X86_FEATURE_TOPOEXT)) { in early_init_amd()
708 set_cpu_cap(c, X86_FEATURE_TOPOEXT); in early_init_amd()
714 amd_get_topology_early(c); in early_init_amd()
717 static void init_amd_k8(struct cpuinfo_x86 *c) in init_amd_k8() argument
725 set_cpu_cap(c, X86_FEATURE_REP_GOOD); in init_amd_k8()
732 if (c->x86_model < 0x14 && cpu_has(c, X86_FEATURE_LAHF_LM)) { in init_amd_k8()
733 clear_cpu_cap(c, X86_FEATURE_LAHF_LM); in init_amd_k8()
740 if (!c->x86_model_id[0]) in init_amd_k8()
741 strcpy(c->x86_model_id, "Hammer"); in init_amd_k8()
753 set_cpu_bug(c, X86_BUG_SWAPGS_FENCE); in init_amd_k8()
756 static void init_amd_gh(struct cpuinfo_x86 *c) in init_amd_gh() argument
760 if (c == &boot_cpu_data) in init_amd_gh()
787 if (cpu_has_amd_erratum(c, amd_erratum_383)) in init_amd_gh()
788 set_cpu_bug(c, X86_BUG_AMD_TLB_MMATCH); in init_amd_gh()
793 static void init_amd_ln(struct cpuinfo_x86 *c) in init_amd_ln() argument
802 static void init_amd_bd(struct cpuinfo_x86 *c) in init_amd_bd() argument
810 if ((c->x86_model >= 0x02) && (c->x86_model < 0x20)) { in init_amd_bd()
818 static void init_amd_zn(struct cpuinfo_x86 *c) in init_amd_zn() argument
820 set_cpu_cap(c, X86_FEATURE_ZEN); in init_amd_zn()
825 if (c->x86_model <= 1 && c->x86_stepping <= 1) in init_amd_zn()
826 set_cpu_cap(c, X86_FEATURE_CPB); in init_amd_zn()
829 static void init_amd(struct cpuinfo_x86 *c) in init_amd() argument
831 early_init_amd(c); in init_amd()
837 clear_cpu_cap(c, 0*32+31); in init_amd()
839 if (c->x86 >= 0x10) in init_amd()
840 set_cpu_cap(c, X86_FEATURE_REP_GOOD); in init_amd()
843 c->apicid = hard_smp_processor_id(); in init_amd()
846 if (c->x86 < 6) in init_amd()
847 clear_cpu_cap(c, X86_FEATURE_MCE); in init_amd()
849 switch (c->x86) { in init_amd()
850 case 4: init_amd_k5(c); break; in init_amd()
851 case 5: init_amd_k6(c); break; in init_amd()
852 case 6: init_amd_k7(c); break; in init_amd()
853 case 0xf: init_amd_k8(c); break; in init_amd()
854 case 0x10: init_amd_gh(c); break; in init_amd()
855 case 0x12: init_amd_ln(c); break; in init_amd()
856 case 0x15: init_amd_bd(c); break; in init_amd()
857 case 0x17: init_amd_zn(c); break; in init_amd()
864 if ((c->x86 >= 6) && (!cpu_has(c, X86_FEATURE_XSAVEERPTR))) in init_amd()
865 set_cpu_bug(c, X86_BUG_FXSAVE_LEAK); in init_amd()
867 cpu_detect_cache_sizes(c); in init_amd()
869 amd_detect_cmp(c); in init_amd()
870 amd_get_topology(c); in init_amd()
871 srat_detect_node(c); in init_amd()
873 init_amd_cacheinfo(c); in init_amd()
875 if (cpu_has(c, X86_FEATURE_XMM2)) { in init_amd()
897 set_cpu_cap(c, X86_FEATURE_LFENCE_RDTSC); in init_amd()
900 set_cpu_cap(c, X86_FEATURE_MFENCE_RDTSC); in init_amd()
908 if (c->x86 > 0x11) in init_amd()
909 set_cpu_cap(c, X86_FEATURE_ARAT); in init_amd()
912 if (!cpu_has(c, X86_FEATURE_3DNOWPREFETCH)) in init_amd()
913 if (cpu_has(c, X86_FEATURE_3DNOW) || cpu_has(c, X86_FEATURE_LM)) in init_amd()
914 set_cpu_cap(c, X86_FEATURE_3DNOWPREFETCH); in init_amd()
917 if (!cpu_has(c, X86_FEATURE_XENPV)) in init_amd()
918 set_cpu_bug(c, X86_BUG_SYSRET_SS_ATTRS); in init_amd()
922 static unsigned int amd_size_cache(struct cpuinfo_x86 *c, unsigned int size) in amd_size_cache() argument
925 if (c->x86 == 6) { in amd_size_cache()
927 if (c->x86_model == 3 && c->x86_stepping == 0) in amd_size_cache()
930 if (c->x86_model == 4 && in amd_size_cache()
931 (c->x86_stepping == 0 || c->x86_stepping == 1)) in amd_size_cache()
938 static void cpu_detect_tlb_amd(struct cpuinfo_x86 *c) in cpu_detect_tlb_amd() argument
943 if (c->x86 < 0xf) in cpu_detect_tlb_amd()
946 if (c->extended_cpuid_level < 0x80000006) in cpu_detect_tlb_amd()
958 if (c->x86 == 0xf) { in cpu_detect_tlb_amd()
975 if (c->x86 == 0x15 && c->x86_model <= 0x1f) { in cpu_detect_tlb_amd()