Lines Matching +full:0 +full:x86

34 	u32 gprs[8] = { 0 };  in rdmsrl_amd_safe()
37 WARN_ONCE((boot_cpu_data.x86 != 0xf), in rdmsrl_amd_safe()
41 gprs[7] = 0x9c5a203a; in rdmsrl_amd_safe()
45 *p = gprs[0] | ((u64)gprs[2] << 32); in rdmsrl_amd_safe()
52 u32 gprs[8] = { 0 }; in wrmsrl_amd_safe()
54 WARN_ONCE((boot_cpu_data.x86 != 0xf), in wrmsrl_amd_safe()
57 gprs[0] = (u32)val; in wrmsrl_amd_safe()
60 gprs[7] = 0x9c5a203a; in wrmsrl_amd_safe()
93 * of the Elan at 0x000df000. Unfortunately, one of the Linux in init_amd_k5()
97 #define CBAR (0xfffc) /* Configuration Base Address (32-bit) */ in init_amd_k5()
98 #define CBAR_ENB (0x80000000) in init_amd_k5()
99 #define CBAR_KEY (0X000000CB) in init_amd_k5()
102 outl(0 | CBAR_KEY, CBAR); in init_amd_k5()
115 if (c->x86_model == 0) { in init_amd_k6()
158 if ((l&0x0000FFFF) == 0) { in init_amd_k6()
160 l = (1<<0)|((mbytes/4)<<1); in init_amd_k6()
179 if ((l&0xFFFF0000) == 0) { in init_amd_k6()
207 * Bit 15 of Athlon specific MSR 15, needs to be 0 in init_amd_k7()
226 if ((l & 0xfff00000) != 0x20000000) { in init_amd_k7()
228 l, ((l & 0x000fffff)|0x20000000)); in init_amd_k7()
229 wrmsr(MSR_K7_CLK_CTL, (l & 0x000fffff)|0x20000000, h); in init_amd_k7()
242 if ((c->x86_model == 6) && ((c->x86_stepping == 0) || in init_amd_k7()
247 if ((c->x86_model == 7) && (c->x86_stepping == 0)) in init_amd_k7()
284 for (i = apicid - 1; i >= 0; i--) { in nearby_node()
361 c->x86 >= 0x19 && snp_probe_rmptable_info()) { in bsp_determine_snp()
375 if (c->x86 > 0x10 || in bsp_init_amd()
376 (c->x86 == 0x10 && c->x86_model >= 0x2)) { in bsp_init_amd()
385 if (c->x86 == 0x15) { in bsp_init_amd()
389 cpuid = cpuid_edx(0x80000005); in bsp_init_amd()
390 assoc = cpuid >> 16 & 0xff; in bsp_init_amd()
405 c->x86 >= 0x15 && c->x86 <= 0x17) { in bsp_init_amd()
408 switch (c->x86) { in bsp_init_amd()
409 case 0x15: bit = 54; break; in bsp_init_amd()
410 case 0x16: bit = 33; break; in bsp_init_amd()
411 case 0x17: bit = 10; break; in bsp_init_amd()
428 switch (c->x86) { in bsp_init_amd()
429 case 0x17: in bsp_init_amd()
431 case 0x00 ... 0x2f: in bsp_init_amd()
432 case 0x50 ... 0x5f: in bsp_init_amd()
435 case 0x30 ... 0x4f: in bsp_init_amd()
436 case 0x60 ... 0x7f: in bsp_init_amd()
437 case 0x90 ... 0x91: in bsp_init_amd()
438 case 0xa0 ... 0xaf: in bsp_init_amd()
446 case 0x19: in bsp_init_amd()
448 case 0x00 ... 0x0f: in bsp_init_amd()
449 case 0x20 ... 0x5f: in bsp_init_amd()
452 case 0x10 ... 0x1f: in bsp_init_amd()
453 case 0x60 ... 0xaf: in bsp_init_amd()
461 case 0x1a: in bsp_init_amd()
463 case 0x00 ... 0x2f: in bsp_init_amd()
464 case 0x40 ... 0x4f: in bsp_init_amd()
465 case 0x60 ... 0x7f: in bsp_init_amd()
481 WARN_ONCE(1, "Family 0x%x, model: 0x%x??\n", c->x86, c->x86_model); in bsp_init_amd()
513 c->x86_phys_bits -= (cpuid_ebx(0x8000001f) >> 6) & 0x3f; in early_detect_mem_encrypt()
540 if (c->x86 >= 0xf) in early_init_amd()
566 if (c->x86 == 5) in early_init_amd()
574 * >= 0x10, but even old K8s came out of reset with version 0x10. So, we in early_init_amd()
579 if (c->x86 > 0x16) in early_init_amd()
581 else if (c->x86 >= 0xf) { in early_init_amd()
585 val = read_pci_config(0, 24, 0, 0x68); in early_init_amd()
586 if ((val >> 17 & 0x3) == 0x3) in early_init_amd()
600 if (c->x86 == 0x16 && c->x86_model <= 0xf) in early_init_amd()
606 if (c->x86 == 0x17 && boot_cpu_has(X86_FEATURE_AMD_IBPB)) in early_init_amd()
608 else if (c->x86 >= 0x19 && !wrmsrl_safe(MSR_IA32_PRED_CMD, PRED_CMD_SBPB)) { in early_init_amd()
622 if ((level >= 0x0f48 && level < 0x0f50) || level >= 0x0f58) in init_amd_k8()
627 * (model = 0x14) and later actually support it. in init_amd_k8()
630 if (c->x86_model < 0x14 && cpu_has(c, X86_FEATURE_LAHF_LM)) { in init_amd_k8()
632 if (!rdmsrl_amd_safe(0xc001100d, &value)) { in init_amd_k8()
634 wrmsrl_amd_safe(0xc001100d, value); in init_amd_k8()
638 if (!c->x86_model_id[0]) in init_amd_k8()
659 if (c->x86_model > 0x41 || in init_amd_k8()
660 (c->x86_model == 0x41 && c->x86_stepping >= 0x2)) in init_amd_k8()
703 if (c->x86_model > 0x2 || in init_amd_gh()
704 (c->x86_model == 0x2 && c->x86_stepping >= 0x1)) in init_amd_gh()
729 return 0; in rdrand_cmdline()
737 * suspend/resume is done by arch/x86/power/cpu.c, which is in clear_rdrand_cpuid_bit()
783 if ((c->x86_model >= 0x02) && (c->x86_model < 0x20)) { in init_amd_bd()
784 if (!rdmsrl_safe(MSR_F15H_IC_CFG, &value) && !(value & 0x1E)) { in init_amd_bd()
785 value |= 0x1E; in init_amd_bd()
799 AMD_CPU_DESC(0x17, 0x1, 0x2, 0x0800126e),
800 AMD_CPU_DESC(0x17, 0x31, 0x0, 0x08301052),
868 u32 good_rev = 0; in cpu_has_zenbleed_microcode()
871 case 0x30 ... 0x3f: good_rev = 0x0830107b; break; in cpu_has_zenbleed_microcode()
872 case 0x60 ... 0x67: good_rev = 0x0860010c; break; in cpu_has_zenbleed_microcode()
873 case 0x68 ... 0x6f: good_rev = 0x08608107; break; in cpu_has_zenbleed_microcode()
874 case 0x70 ... 0x7f: good_rev = 0x08701033; break; in cpu_has_zenbleed_microcode()
875 case 0xa0 ... 0xaf: good_rev = 0x08a00009; break; in cpu_has_zenbleed_microcode()
914 * Zen3 (Fam19 model < 0x10) parts are not susceptible to in init_amd_zen3()
943 clear_cpu_cap(c, 0*32+31); in init_amd()
945 if (c->x86 >= 0x10) in init_amd()
953 if (c->x86 < 6) in init_amd()
956 switch (c->x86) { in init_amd()
960 case 0xf: init_amd_k8(c); break; in init_amd()
961 case 0x10: init_amd_gh(c); break; in init_amd()
962 case 0x12: init_amd_ln(c); break; in init_amd()
963 case 0x15: init_amd_bd(c); break; in init_amd()
964 case 0x16: init_amd_jg(c); break; in init_amd()
971 if (c->x86 >= 0x17) in init_amd()
989 if ((c->x86 >= 6) && (!cpu_has(c, X86_FEATURE_XSAVEERPTR))) in init_amd()
1021 * Family 0x12 and above processors have APIC timer in init_amd()
1024 if (c->x86 > 0x11) in init_amd()
1042 (boot_cpu_has(X86_FEATURE_ZEN1) && c->x86_model > 0x2f)) in init_amd()
1066 if (c->x86 == 6) { in amd_size_cache()
1068 if (c->x86_model == 3 && c->x86_stepping == 0) in amd_size_cache()
1072 (c->x86_stepping == 0 || c->x86_stepping == 1)) in amd_size_cache()
1082 u16 mask = 0xfff; in cpu_detect_tlb_amd()
1084 if (c->x86 < 0xf) in cpu_detect_tlb_amd()
1087 if (c->extended_cpuid_level < 0x80000006) in cpu_detect_tlb_amd()
1090 cpuid(0x80000006, &eax, &ebx, &ecx, &edx); in cpu_detect_tlb_amd()
1097 * characteristics from the CPUID function 0x80000005 instead. in cpu_detect_tlb_amd()
1099 if (c->x86 == 0xf) { in cpu_detect_tlb_amd()
1100 cpuid(0x80000005, &eax, &ebx, &ecx, &edx); in cpu_detect_tlb_amd()
1101 mask = 0xff; in cpu_detect_tlb_amd()
1106 tlb_lld_2m[ENTRIES] = (cpuid_eax(0x80000005) >> 16) & 0xff; in cpu_detect_tlb_amd()
1116 if (c->x86 == 0x15 && c->x86_model <= 0x1f) { in cpu_detect_tlb_amd()
1119 cpuid(0x80000005, &eax, &ebx, &ecx, &edx); in cpu_detect_tlb_amd()
1120 tlb_lli_2m[ENTRIES] = eax & 0xff; in cpu_detect_tlb_amd()
1177 wrmsr(amd_msr_dr_addr_masks[dr], mask, 0); in amd_set_dr_addr_mask()
1184 return 0; in amd_get_dr_addr_mask()
1187 return 0; in amd_get_dr_addr_mask()