Lines Matching refs:cpi
372 #define CPI_FAMILY_XTD(cpi) BITX((cpi)->cpi_std[1].cp_eax, 27, 20) argument
373 #define CPI_MODEL_XTD(cpi) BITX((cpi)->cpi_std[1].cp_eax, 19, 16) argument
374 #define CPI_TYPE(cpi) BITX((cpi)->cpi_std[1].cp_eax, 13, 12) argument
375 #define CPI_FAMILY(cpi) BITX((cpi)->cpi_std[1].cp_eax, 11, 8) argument
376 #define CPI_STEP(cpi) BITX((cpi)->cpi_std[1].cp_eax, 3, 0) argument
377 #define CPI_MODEL(cpi) BITX((cpi)->cpi_std[1].cp_eax, 7, 4) argument
379 #define CPI_FEATURES_EDX(cpi) ((cpi)->cpi_std[1].cp_edx) argument
380 #define CPI_FEATURES_ECX(cpi) ((cpi)->cpi_std[1].cp_ecx) argument
381 #define CPI_FEATURES_XTD_EDX(cpi) ((cpi)->cpi_extd[1].cp_edx) argument
382 #define CPI_FEATURES_XTD_ECX(cpi) ((cpi)->cpi_extd[1].cp_ecx) argument
383 #define CPI_FEATURES_7_0_EBX(cpi) ((cpi)->cpi_std[7].cp_ebx) argument
385 #define CPI_BRANDID(cpi) BITX((cpi)->cpi_std[1].cp_ebx, 7, 0) argument
386 #define CPI_CHUNKS(cpi) BITX((cpi)->cpi_std[1].cp_ebx, 15, 7) argument
387 #define CPI_CPU_COUNT(cpi) BITX((cpi)->cpi_std[1].cp_ebx, 23, 16) argument
388 #define CPI_APIC_ID(cpi) BITX((cpi)->cpi_std[1].cp_ebx, 31, 24) argument
423 #define IS_LEGACY_P6(cpi) ( \ argument
424 cpi->cpi_family == 6 && \
425 (cpi->cpi_model == 1 || \
426 cpi->cpi_model == 3 || \
427 cpi->cpi_model == 5 || \
428 cpi->cpi_model == 6 || \
429 cpi->cpi_model == 7 || \
430 cpi->cpi_model == 8 || \
431 cpi->cpi_model == 0xA || \
432 cpi->cpi_model == 0xB) \
436 #define IS_NEW_F6(cpi) ((cpi->cpi_family == 6) && !IS_LEGACY_P6(cpi)) argument
439 #define IS_EXTENDED_MODEL_INTEL(cpi) (cpi->cpi_family == 0x6 || \ argument
440 cpi->cpi_family >= 0xf)
454 #define MWAIT_SUPPORTED(cpi) ((cpi)->cpi_std[1].cp_ecx & CPUID_INTC_ECX_MON) argument
455 #define MWAIT_INT_ENABLE(cpi) ((cpi)->cpi_std[5].cp_ecx & 0x2) argument
456 #define MWAIT_EXTENSION(cpi) ((cpi)->cpi_std[5].cp_ecx & 0x1) argument
457 #define MWAIT_SIZE_MIN(cpi) BITX((cpi)->cpi_std[5].cp_eax, 15, 0) argument
458 #define MWAIT_SIZE_MAX(cpi) BITX((cpi)->cpi_std[5].cp_ebx, 15, 0) argument
462 #define MWAIT_NUM_SUBC_STATES(cpi, c_state) \ argument
463 BITX((cpi)->cpi_std[5].cp_edx, c_state + 3, c_state)
589 struct cpuid_info *cpi = cpu->cpu_m.mcpu_cpi; in cpuid_free_space() local
592 ASSERT(cpi != NULL); in cpuid_free_space()
593 ASSERT(cpi != &cpuid_info0); in cpuid_free_space()
598 for (i = 1; i < cpi->cpi_std_4_size; i++) in cpuid_free_space()
599 kmem_free(cpi->cpi_std_4[i], sizeof (struct cpuid_regs)); in cpuid_free_space()
600 if (cpi->cpi_std_4_size > 0) in cpuid_free_space()
601 kmem_free(cpi->cpi_std_4, in cpuid_free_space()
602 cpi->cpi_std_4_size * sizeof (struct cpuid_regs *)); in cpuid_free_space()
604 kmem_free(cpi, sizeof (*cpi)); in cpuid_free_space()
738 struct cpuid_info *cpi = cpu->cpu_m.mcpu_cpi; in cpuid_intel_getids() local
740 for (i = 1; i < cpi->cpi_ncpu_per_chip; i <<= 1) in cpuid_intel_getids()
743 cpi->cpi_chipid = cpi->cpi_apicid >> chipid_shift; in cpuid_intel_getids()
744 cpi->cpi_clogid = cpi->cpi_apicid & ((1 << chipid_shift) - 1); in cpuid_intel_getids()
752 if (cpi->cpi_ncore_per_chip == 1) in cpuid_intel_getids()
753 ncpu_per_core = cpi->cpi_ncpu_per_chip; in cpuid_intel_getids()
754 else if (cpi->cpi_ncore_per_chip > 1) in cpuid_intel_getids()
755 ncpu_per_core = cpi->cpi_ncpu_per_chip / in cpuid_intel_getids()
756 cpi->cpi_ncore_per_chip; in cpuid_intel_getids()
780 cpi->cpi_coreid = cpi->cpi_apicid >> coreid_shift; in cpuid_intel_getids()
781 cpi->cpi_pkgcoreid = cpi->cpi_clogid >> coreid_shift; in cpuid_intel_getids()
786 cpi->cpi_coreid = cpi->cpi_chipid; in cpuid_intel_getids()
787 cpi->cpi_pkgcoreid = 0; in cpuid_intel_getids()
789 cpi->cpi_procnodeid = cpi->cpi_chipid; in cpuid_intel_getids()
790 cpi->cpi_compunitid = cpi->cpi_coreid; in cpuid_intel_getids()
799 struct cpuid_info *cpi = cpu->cpu_m.mcpu_cpi; in cpuid_amd_getids() local
825 cpi->cpi_coreid = cpu->cpu_id; in cpuid_amd_getids()
826 cpi->cpi_compunitid = cpu->cpu_id; in cpuid_amd_getids()
828 if (cpi->cpi_xmaxeax >= 0x80000008) { in cpuid_amd_getids()
830 coreidsz = BITX((cpi)->cpi_extd[8].cp_ecx, 15, 12); in cpuid_amd_getids()
836 cpi->cpi_ncore_per_chip = in cpuid_amd_getids()
837 BITX((cpi)->cpi_extd[8].cp_ecx, 7, 0) + 1; in cpuid_amd_getids()
840 for (i = 1; i < cpi->cpi_ncore_per_chip; i <<= 1) in cpuid_amd_getids()
847 cpi->cpi_ncore_per_chip = 1; in cpuid_amd_getids()
851 cpi->cpi_clogid = cpi->cpi_pkgcoreid = in cpuid_amd_getids()
852 cpi->cpi_apicid & ((1<<coreidsz) - 1); in cpuid_amd_getids()
853 cpi->cpi_ncpu_per_chip = cpi->cpi_ncore_per_chip; in cpuid_amd_getids()
857 cpi->cpi_xmaxeax >= 0x8000001e) { in cpuid_amd_getids()
858 cp = &cpi->cpi_extd[0x1e]; in cpuid_amd_getids()
862 cpi->cpi_procnodes_per_pkg = BITX(cp->cp_ecx, 10, 8) + 1; in cpuid_amd_getids()
863 cpi->cpi_procnodeid = BITX(cp->cp_ecx, 7, 0); in cpuid_amd_getids()
864 cpi->cpi_cores_per_compunit = BITX(cp->cp_ebx, 15, 8) + 1; in cpuid_amd_getids()
865 cpi->cpi_compunitid = BITX(cp->cp_ebx, 7, 0) in cpuid_amd_getids()
866 + (cpi->cpi_ncore_per_chip / cpi->cpi_cores_per_compunit) in cpuid_amd_getids()
867 * (cpi->cpi_procnodeid / cpi->cpi_procnodes_per_pkg); in cpuid_amd_getids()
868 } else if (cpi->cpi_family == 0xf || cpi->cpi_family >= 0x11) { in cpuid_amd_getids()
869 cpi->cpi_procnodeid = (cpi->cpi_apicid >> coreidsz) & 7; in cpuid_amd_getids()
870 } else if (cpi->cpi_family == 0x10) { in cpuid_amd_getids()
876 if ((cpi->cpi_model < 8) || BITX(nb_caps_reg, 29, 29) == 0) { in cpuid_amd_getids()
878 cpi->cpi_procnodeid = BITX(cpi->cpi_apicid, 5, in cpuid_amd_getids()
886 cpi->cpi_procnodes_per_pkg = 2; in cpuid_amd_getids()
888 first_half = (cpi->cpi_pkgcoreid <= in cpuid_amd_getids()
889 (cpi->cpi_ncore_per_chip/2 - 1)); in cpuid_amd_getids()
891 if (cpi->cpi_apicid == cpi->cpi_pkgcoreid) { in cpuid_amd_getids()
893 cpi->cpi_procnodeid = (first_half ? 0 : 1); in cpuid_amd_getids()
898 node2_1 = BITX(cpi->cpi_apicid, 5, 4) << 1; in cpuid_amd_getids()
908 cpi->cpi_procnodeid = node2_1 + in cpuid_amd_getids()
911 cpi->cpi_procnodeid = node2_1 + in cpuid_amd_getids()
916 cpi->cpi_procnodeid = 0; in cpuid_amd_getids()
919 cpi->cpi_chipid = in cpuid_amd_getids()
920 cpi->cpi_procnodeid / cpi->cpi_procnodes_per_pkg; in cpuid_amd_getids()
948 struct cpuid_info *cpi; in cpuid_pass1() local
965 cpi = cpu->cpu_m.mcpu_cpi; in cpuid_pass1()
966 ASSERT(cpi != NULL); in cpuid_pass1()
967 cp = &cpi->cpi_std[0]; in cpuid_pass1()
969 cpi->cpi_maxeax = __cpuid_insn(cp); in cpuid_pass1()
971 uint32_t *iptr = (uint32_t *)cpi->cpi_vendorstr; in cpuid_pass1()
975 *(char *)&cpi->cpi_vendorstr[12] = '\0'; in cpuid_pass1()
978 cpi->cpi_vendor = _cpuid_vendorstr_to_vendorcode(cpi->cpi_vendorstr); in cpuid_pass1()
979 x86_vendor = cpi->cpi_vendor; /* for compatibility */ in cpuid_pass1()
984 if (cpi->cpi_maxeax > CPI_MAXEAX_MAX) in cpuid_pass1()
985 cpi->cpi_maxeax = CPI_MAXEAX_MAX; in cpuid_pass1()
986 if (cpi->cpi_maxeax < 1) in cpuid_pass1()
989 cp = &cpi->cpi_std[1]; in cpuid_pass1()
996 cpi->cpi_model = CPI_MODEL(cpi); in cpuid_pass1()
997 cpi->cpi_family = CPI_FAMILY(cpi); in cpuid_pass1()
999 if (cpi->cpi_family == 0xf) in cpuid_pass1()
1000 cpi->cpi_family += CPI_FAMILY_XTD(cpi); in cpuid_pass1()
1008 switch (cpi->cpi_vendor) { in cpuid_pass1()
1010 if (IS_EXTENDED_MODEL_INTEL(cpi)) in cpuid_pass1()
1011 cpi->cpi_model += CPI_MODEL_XTD(cpi) << 4; in cpuid_pass1()
1014 if (CPI_FAMILY(cpi) == 0xf) in cpuid_pass1()
1015 cpi->cpi_model += CPI_MODEL_XTD(cpi) << 4; in cpuid_pass1()
1018 if (cpi->cpi_model == 0xf) in cpuid_pass1()
1019 cpi->cpi_model += CPI_MODEL_XTD(cpi) << 4; in cpuid_pass1()
1023 cpi->cpi_step = CPI_STEP(cpi); in cpuid_pass1()
1024 cpi->cpi_brandid = CPI_BRANDID(cpi); in cpuid_pass1()
1035 cpi->cpi_pabits = cpi->cpi_vabits = 32; in cpuid_pass1()
1037 switch (cpi->cpi_vendor) { in cpuid_pass1()
1039 if (cpi->cpi_family == 5) in cpuid_pass1()
1041 else if (IS_LEGACY_P6(cpi)) { in cpuid_pass1()
1047 if (cpi->cpi_model < 3 && cpi->cpi_step < 3) in cpuid_pass1()
1049 } else if (IS_NEW_F6(cpi) || cpi->cpi_family == 0xf) { in cpuid_pass1()
1058 } else if (cpi->cpi_family > 0xf) in cpuid_pass1()
1064 if (cpi->cpi_maxeax < 5) in cpuid_pass1()
1072 if (cpi->cpi_family == 0xf && cpi->cpi_model == 0xe) { in cpuid_pass1()
1074 cpi->cpi_model = 0xc; in cpuid_pass1()
1077 if (cpi->cpi_family == 5) { in cpuid_pass1()
1090 if (cpi->cpi_model == 0) { in cpuid_pass1()
1100 if (cpi->cpi_model < 6) in cpuid_pass1()
1108 if (cpi->cpi_family >= 0xf) in cpuid_pass1()
1114 if (cpi->cpi_maxeax < 5) in cpuid_pass1()
1133 if (cpi->cpi_family == 5 && cpi->cpi_model == 4 && in cpuid_pass1()
1134 (cpi->cpi_step == 2 || cpi->cpi_step == 3)) in cpuid_pass1()
1141 if (cpi->cpi_family == 6) in cpuid_pass1()
1230 platform_cpuid_mangle(cpi->cpi_vendor, 1, cp); in cpuid_pass1()
1236 if (cpi->cpi_vendor == X86_VENDOR_Intel && cpi->cpi_maxeax >= 7) { in cpuid_pass1()
1238 ecp = &cpi->cpi_std[7]; in cpuid_pass1()
1356 if (cpi->cpi_std[7].cp_ebx & in cpuid_pass1()
1361 if (cpi->cpi_std[7].cp_ebx & in cpuid_pass1()
1366 if (cpi->cpi_std[7].cp_ebx & in cpuid_pass1()
1387 cpi->cpi_mwait.support |= MWAIT_SUPPORT; in cpuid_pass1()
1420 cpi->cpi_pabits = 36; in cpuid_pass1()
1432 cpi->cpi_ncpu_per_chip = CPI_CPU_COUNT(cpi); in cpuid_pass1()
1433 if (cpi->cpi_ncpu_per_chip > 1) in cpuid_pass1()
1436 cpi->cpi_ncpu_per_chip = 1; in cpuid_pass1()
1444 switch (cpi->cpi_vendor) { in cpuid_pass1()
1446 if (IS_NEW_F6(cpi) || cpi->cpi_family >= 0xf) in cpuid_pass1()
1450 if (cpi->cpi_family > 5 || in cpuid_pass1()
1451 (cpi->cpi_family == 5 && cpi->cpi_model >= 1)) in cpuid_pass1()
1471 cp = &cpi->cpi_extd[0]; in cpuid_pass1()
1473 cpi->cpi_xmaxeax = __cpuid_insn(cp); in cpuid_pass1()
1476 if (cpi->cpi_xmaxeax & 0x80000000) { in cpuid_pass1()
1478 if (cpi->cpi_xmaxeax > CPI_XMAXEAX_MAX) in cpuid_pass1()
1479 cpi->cpi_xmaxeax = CPI_XMAXEAX_MAX; in cpuid_pass1()
1481 switch (cpi->cpi_vendor) { in cpuid_pass1()
1484 if (cpi->cpi_xmaxeax < 0x80000001) in cpuid_pass1()
1486 cp = &cpi->cpi_extd[1]; in cpuid_pass1()
1490 if (cpi->cpi_vendor == X86_VENDOR_AMD && in cpuid_pass1()
1491 cpi->cpi_family == 5 && in cpuid_pass1()
1492 cpi->cpi_model == 6 && in cpuid_pass1()
1493 cpi->cpi_step == 6) { in cpuid_pass1()
1504 platform_cpuid_mangle(cpi->cpi_vendor, 0x80000001, cp); in cpuid_pass1()
1529 if ((cpi->cpi_vendor == X86_VENDOR_AMD) && in cpuid_pass1()
1530 (cpi->cpi_std[1].cp_edx & CPUID_INTC_EDX_FXSR) && in cpuid_pass1()
1540 if (cpi->cpi_vendor == X86_VENDOR_AMD && in cpuid_pass1()
1585 switch (cpi->cpi_vendor) { in cpuid_pass1()
1587 if (cpi->cpi_maxeax >= 4) { in cpuid_pass1()
1588 cp = &cpi->cpi_std[4]; in cpuid_pass1()
1592 platform_cpuid_mangle(cpi->cpi_vendor, 4, cp); in cpuid_pass1()
1596 if (cpi->cpi_xmaxeax < 0x80000008) in cpuid_pass1()
1598 cp = &cpi->cpi_extd[8]; in cpuid_pass1()
1601 platform_cpuid_mangle(cpi->cpi_vendor, 0x80000008, cp); in cpuid_pass1()
1607 cpi->cpi_pabits = BITX(cp->cp_eax, 7, 0); in cpuid_pass1()
1608 cpi->cpi_vabits = BITX(cp->cp_eax, 15, 8); in cpuid_pass1()
1617 switch (cpi->cpi_vendor) { in cpuid_pass1()
1619 if (cpi->cpi_maxeax < 4) { in cpuid_pass1()
1620 cpi->cpi_ncore_per_chip = 1; in cpuid_pass1()
1623 cpi->cpi_ncore_per_chip = in cpuid_pass1()
1624 BITX((cpi)->cpi_std[4].cp_eax, 31, 26) + 1; in cpuid_pass1()
1628 if (cpi->cpi_xmaxeax < 0x80000008) { in cpuid_pass1()
1629 cpi->cpi_ncore_per_chip = 1; in cpuid_pass1()
1640 cpi->cpi_ncore_per_chip = in cpuid_pass1()
1641 BITX((cpi)->cpi_extd[8].cp_ecx, 7, 0) + 1; in cpuid_pass1()
1645 cpi->cpi_ncore_per_chip = 1; in cpuid_pass1()
1652 switch (cpi->cpi_vendor) { in cpuid_pass1()
1654 if (cpi->cpi_maxeax >= 7) { in cpuid_pass1()
1655 cp = &cpi->cpi_extd[7]; in cpuid_pass1()
1665 cpi->cpi_ncore_per_chip = 1; in cpuid_pass1()
1671 if (cpi->cpi_ncore_per_chip > 1) { in cpuid_pass1()
1679 if (cpi->cpi_ncpu_per_chip == cpi->cpi_ncore_per_chip) { in cpuid_pass1()
1683 cpi->cpi_apicid = CPI_APIC_ID(cpi); in cpuid_pass1()
1684 cpi->cpi_procnodes_per_pkg = 1; in cpuid_pass1()
1685 cpi->cpi_cores_per_compunit = 1; in cpuid_pass1()
1691 cpi->cpi_chipid = -1; in cpuid_pass1()
1692 cpi->cpi_clogid = 0; in cpuid_pass1()
1693 cpi->cpi_coreid = cpu->cpu_id; in cpuid_pass1()
1694 cpi->cpi_pkgcoreid = 0; in cpuid_pass1()
1695 if (cpi->cpi_vendor == X86_VENDOR_AMD) in cpuid_pass1()
1696 cpi->cpi_procnodeid = BITX(cpi->cpi_apicid, 3, 0); in cpuid_pass1()
1698 cpi->cpi_procnodeid = cpi->cpi_chipid; in cpuid_pass1()
1699 } else if (cpi->cpi_ncpu_per_chip > 1) { in cpuid_pass1()
1700 if (cpi->cpi_vendor == X86_VENDOR_Intel) in cpuid_pass1()
1702 else if (cpi->cpi_vendor == X86_VENDOR_AMD) in cpuid_pass1()
1709 cpi->cpi_coreid = cpi->cpi_chipid; in cpuid_pass1()
1710 cpi->cpi_pkgcoreid = 0; in cpuid_pass1()
1711 cpi->cpi_procnodeid = cpi->cpi_chipid; in cpuid_pass1()
1712 cpi->cpi_compunitid = cpi->cpi_chipid; in cpuid_pass1()
1719 cpi->cpi_chiprev = _cpuid_chiprev(cpi->cpi_vendor, cpi->cpi_family, in cpuid_pass1()
1720 cpi->cpi_model, cpi->cpi_step); in cpuid_pass1()
1721 cpi->cpi_chiprevstr = _cpuid_chiprevstr(cpi->cpi_vendor, in cpuid_pass1()
1722 cpi->cpi_family, cpi->cpi_model, cpi->cpi_step); in cpuid_pass1()
1723 cpi->cpi_socket = _cpuid_skt(cpi->cpi_vendor, cpi->cpi_family, in cpuid_pass1()
1724 cpi->cpi_model, cpi->cpi_step); in cpuid_pass1()
1727 cpi->cpi_pass = 1; in cpuid_pass1()
1747 struct cpuid_info *cpi = cpu->cpu_m.mcpu_cpi; in cpuid_pass2() local
1749 ASSERT(cpi->cpi_pass == 1); in cpuid_pass2()
1751 if (cpi->cpi_maxeax < 1) in cpuid_pass2()
1754 if ((nmax = cpi->cpi_maxeax + 1) > NMAX_CPI_STD) in cpuid_pass2()
1759 for (n = 2, cp = &cpi->cpi_std[2]; n < nmax; n++, cp++) { in cpuid_pass2()
1781 platform_cpuid_mangle(cpi->cpi_vendor, n, cp); in cpuid_pass2()
1793 cpi->cpi_ncache = sizeof (*cp) * in cpuid_pass2()
1795 if (cpi->cpi_ncache == 0) in cpuid_pass2()
1797 cpi->cpi_ncache--; /* skip count byte */ in cpuid_pass2()
1804 if (cpi->cpi_ncache > (sizeof (*cp) - 1)) in cpuid_pass2()
1805 cpi->cpi_ncache = sizeof (*cp) - 1; in cpuid_pass2()
1807 dp = cpi->cpi_cacheinfo; in cpuid_pass2()
1847 if (!(cpi->cpi_mwait.support & MWAIT_SUPPORT)) in cpuid_pass2()
1854 mwait_size = (size_t)MWAIT_SIZE_MAX(cpi); in cpuid_pass2()
1864 cpi->cpi_mwait.mon_min = (size_t)MWAIT_SIZE_MIN(cpi); in cpuid_pass2()
1865 cpi->cpi_mwait.mon_max = mwait_size; in cpuid_pass2()
1866 if (MWAIT_EXTENSION(cpi)) { in cpuid_pass2()
1867 cpi->cpi_mwait.support |= MWAIT_EXTENSIONS; in cpuid_pass2()
1868 if (MWAIT_INT_ENABLE(cpi)) in cpuid_pass2()
1869 cpi->cpi_mwait.support |= in cpuid_pass2()
1879 if (cpi->cpi_maxeax >= 0xB && cpi->cpi_vendor == X86_VENDOR_Intel) { in cpuid_pass2()
1920 cpi->cpi_apicid = x2apic_id; in cpuid_pass2()
1921 cpi->cpi_ncpu_per_chip = ncpu_per_chip; in cpuid_pass2()
1922 cpi->cpi_ncore_per_chip = ncpu_per_chip / in cpuid_pass2()
1924 cpi->cpi_chipid = x2apic_id >> chipid_shift; in cpuid_pass2()
1925 cpi->cpi_clogid = x2apic_id & ((1 << chipid_shift) - 1); in cpuid_pass2()
1926 cpi->cpi_coreid = x2apic_id >> coreid_shift; in cpuid_pass2()
1927 cpi->cpi_pkgcoreid = cpi->cpi_clogid >> coreid_shift; in cpuid_pass2()
1937 if (cpi->cpi_maxeax >= 0xD) { in cpuid_pass2()
1955 cpi->cpi_xsave.xsav_hw_features_low = cp->cp_eax; in cpuid_pass2()
1956 cpi->cpi_xsave.xsav_hw_features_high = cp->cp_edx; in cpuid_pass2()
1957 cpi->cpi_xsave.xsav_max_size = cp->cp_ecx; in cpuid_pass2()
1963 if (cpi->cpi_xsave.xsav_hw_features_low & XFEATURE_AVX) { in cpuid_pass2()
1975 cpi->cpi_xsave.ymm_size = cp->cp_eax; in cpuid_pass2()
1976 cpi->cpi_xsave.ymm_offset = cp->cp_ebx; in cpuid_pass2()
1982 xsave_state_size = cpi->cpi_xsave.xsav_max_size; in cpuid_pass2()
1988 cpu->cpu_id, cpi->cpi_xsave.xsav_hw_features_low, in cpuid_pass2()
1989 cpi->cpi_xsave.xsav_hw_features_high, in cpuid_pass2()
1990 (int)cpi->cpi_xsave.xsav_max_size, in cpuid_pass2()
1991 (int)cpi->cpi_xsave.ymm_size, in cpuid_pass2()
1992 (int)cpi->cpi_xsave.ymm_offset); in cpuid_pass2()
2027 CPI_FEATURES_ECX(cpi) &= in cpuid_pass2()
2029 CPI_FEATURES_ECX(cpi) &= in cpuid_pass2()
2031 CPI_FEATURES_ECX(cpi) &= in cpuid_pass2()
2033 CPI_FEATURES_ECX(cpi) &= in cpuid_pass2()
2035 CPI_FEATURES_7_0_EBX(cpi) &= in cpuid_pass2()
2037 CPI_FEATURES_7_0_EBX(cpi) &= in cpuid_pass2()
2039 CPI_FEATURES_7_0_EBX(cpi) &= in cpuid_pass2()
2051 if ((cpi->cpi_xmaxeax & 0x80000000) == 0) in cpuid_pass2()
2054 if ((nmax = cpi->cpi_xmaxeax - 0x80000000 + 1) > NMAX_CPI_EXTD) in cpuid_pass2()
2060 iptr = (void *)cpi->cpi_brandstr; in cpuid_pass2()
2061 for (n = 2, cp = &cpi->cpi_extd[2]; n < nmax; cp++, n++) { in cpuid_pass2()
2064 platform_cpuid_mangle(cpi->cpi_vendor, 0x80000000 + n, cp); in cpuid_pass2()
2078 switch (cpi->cpi_vendor) { in cpuid_pass2()
2086 if (cpi->cpi_family < 6 || in cpuid_pass2()
2087 (cpi->cpi_family == 6 && in cpuid_pass2()
2088 cpi->cpi_model < 1)) in cpuid_pass2()
2096 switch (cpi->cpi_vendor) { in cpuid_pass2()
2103 if (cpi->cpi_family < 6 || in cpuid_pass2()
2104 cpi->cpi_family == 6 && in cpuid_pass2()
2105 cpi->cpi_model < 1) in cpuid_pass2()
2112 if (cpi->cpi_family == 6 && in cpuid_pass2()
2113 cpi->cpi_model == 3 && in cpuid_pass2()
2114 cpi->cpi_step == 0) { in cpuid_pass2()
2124 if (cpi->cpi_family != 6) in cpuid_pass2()
2131 if (cpi->cpi_model == 7 || in cpuid_pass2()
2132 cpi->cpi_model == 8) in cpuid_pass2()
2141 if (cpi->cpi_model == 9 && cpi->cpi_step == 1) in cpuid_pass2()
2159 cpi->cpi_pass = 2; in cpuid_pass2()
2163 intel_cpubrand(const struct cpuid_info *cpi) in intel_cpubrand() argument
2168 cpi->cpi_maxeax < 1 || cpi->cpi_family < 5) in intel_cpubrand()
2171 switch (cpi->cpi_family) { in intel_cpubrand()
2175 switch (cpi->cpi_model) { in intel_cpubrand()
2190 cp = &cpi->cpi_std[2]; /* cache info */ in intel_cpubrand()
2235 return (cpi->cpi_model == 5 ? in intel_cpubrand()
2238 return (cpi->cpi_model == 5 ? in intel_cpubrand()
2249 if (cpi->cpi_brandid != 0) { in intel_cpubrand()
2278 sgn = (cpi->cpi_family << 8) | in intel_cpubrand()
2279 (cpi->cpi_model << 4) | cpi->cpi_step; in intel_cpubrand()
2282 if (brand_tbl[i].bt_bid == cpi->cpi_brandid) in intel_cpubrand()
2285 if (sgn == 0x6b1 && cpi->cpi_brandid == 3) in intel_cpubrand()
2287 if (sgn < 0xf13 && cpi->cpi_brandid == 0xb) in intel_cpubrand()
2289 if (sgn < 0xf13 && cpi->cpi_brandid == 0xe) in intel_cpubrand()
2299 amd_cpubrand(const struct cpuid_info *cpi) in amd_cpubrand() argument
2302 cpi->cpi_maxeax < 1 || cpi->cpi_family < 5) in amd_cpubrand()
2305 switch (cpi->cpi_family) { in amd_cpubrand()
2307 switch (cpi->cpi_model) { in amd_cpubrand()
2326 switch (cpi->cpi_model) { in amd_cpubrand()
2342 return ((cpi->cpi_extd[6].cp_ecx >> 16) >= 256 ? in amd_cpubrand()
2351 if (cpi->cpi_family == 0xf && cpi->cpi_model == 5 && in amd_cpubrand()
2352 cpi->cpi_brandid != 0) { in amd_cpubrand()
2353 switch (BITX(cpi->cpi_brandid, 7, 5)) { in amd_cpubrand()
2369 cyrix_cpubrand(struct cpuid_info *cpi, uint_t type) in cyrix_cpubrand() argument
2372 cpi->cpi_maxeax < 1 || cpi->cpi_family < 5 || in cyrix_cpubrand()
2395 if (cpi->cpi_family == 4 && cpi->cpi_model == 9) in cyrix_cpubrand()
2397 else if (cpi->cpi_family == 5) { in cyrix_cpubrand()
2398 switch (cpi->cpi_model) { in cyrix_cpubrand()
2406 } else if (cpi->cpi_family == 6) { in cyrix_cpubrand()
2407 switch (cpi->cpi_model) { in cyrix_cpubrand()
2431 fabricate_brandstr(struct cpuid_info *cpi) in fabricate_brandstr() argument
2435 switch (cpi->cpi_vendor) { in fabricate_brandstr()
2437 brand = intel_cpubrand(cpi); in fabricate_brandstr()
2440 brand = amd_cpubrand(cpi); in fabricate_brandstr()
2443 brand = cyrix_cpubrand(cpi, x86_type); in fabricate_brandstr()
2446 if (cpi->cpi_family == 5 && cpi->cpi_model == 0) in fabricate_brandstr()
2450 if (cpi->cpi_family == 5) in fabricate_brandstr()
2451 switch (cpi->cpi_model) { in fabricate_brandstr()
2466 if (cpi->cpi_family == 5 && in fabricate_brandstr()
2467 (cpi->cpi_model == 0 || cpi->cpi_model == 2)) in fabricate_brandstr()
2471 if (cpi->cpi_family == 5 && cpi->cpi_model == 0) in fabricate_brandstr()
2475 if (cpi->cpi_family == 5 && cpi->cpi_model == 4) in fabricate_brandstr()
2484 (void) strcpy((char *)cpi->cpi_brandstr, brand); in fabricate_brandstr()
2491 (void) snprintf(cpi->cpi_brandstr, sizeof (cpi->cpi_brandstr), in fabricate_brandstr()
2492 "%s %d.%d.%d", cpi->cpi_vendorstr, cpi->cpi_family, in fabricate_brandstr()
2493 cpi->cpi_model, cpi->cpi_step); in fabricate_brandstr()
2511 struct cpuid_info *cpi = cpu->cpu_m.mcpu_cpi; in cpuid_pass3() local
2513 ASSERT(cpi->cpi_pass == 2); in cpuid_pass3()
2523 cpi->cpi_ncpu_shr_last_cache = 1; in cpuid_pass3()
2524 cpi->cpi_last_lvl_cacheid = cpu->cpu_id; in cpuid_pass3()
2526 if (cpi->cpi_maxeax >= 4 && cpi->cpi_vendor == X86_VENDOR_Intel) { in cpuid_pass3()
2545 cpi->cpi_ncpu_shr_last_cache = in cpuid_pass3()
2549 cpi->cpi_std_4_size = size = i; in cpuid_pass3()
2557 cpi->cpi_std_4 = in cpuid_pass3()
2559 cpi->cpi_std_4[0] = &cpi->cpi_std[4]; in cpuid_pass3()
2569 cp = cpi->cpi_std_4[i] = in cpuid_pass3()
2585 for (i = 1; i < cpi->cpi_ncpu_shr_last_cache; i <<= 1) in cpuid_pass3()
2587 cpi->cpi_last_lvl_cacheid = cpi->cpi_apicid >> shft; in cpuid_pass3()
2593 if ((cpi->cpi_xmaxeax & 0x80000000) == 0) { in cpuid_pass3()
2594 fabricate_brandstr(cpi); in cpuid_pass3()
2602 if (cpi->cpi_brandstr[0]) { in cpuid_pass3()
2603 size_t maxlen = sizeof (cpi->cpi_brandstr); in cpuid_pass3()
2606 dst = src = (char *)cpi->cpi_brandstr; in cpuid_pass3()
2649 while (--dst > cpi->cpi_brandstr) in cpuid_pass3()
2655 fabricate_brandstr(cpi); in cpuid_pass3()
2657 cpi->cpi_pass = 3; in cpuid_pass3()
2669 struct cpuid_info *cpi; in cpuid_pass4() local
2674 cpi = cpu->cpu_m.mcpu_cpi; in cpuid_pass4()
2676 ASSERT(cpi->cpi_pass == 3); in cpuid_pass4()
2678 if (cpi->cpi_maxeax >= 1) { in cpuid_pass4()
2679 uint32_t *edx = &cpi->cpi_support[STD_EDX_FEATURES]; in cpuid_pass4()
2680 uint32_t *ecx = &cpi->cpi_support[STD_ECX_FEATURES]; in cpuid_pass4()
2681 uint32_t *ebx = &cpi->cpi_support[STD_EBX_FEATURES]; in cpuid_pass4()
2683 *edx = CPI_FEATURES_EDX(cpi); in cpuid_pass4()
2684 *ecx = CPI_FEATURES_ECX(cpi); in cpuid_pass4()
2685 *ebx = CPI_FEATURES_7_0_EBX(cpi); in cpuid_pass4()
2800 if (cpi->cpi_xmaxeax < 0x80000001) in cpuid_pass4()
2803 switch (cpi->cpi_vendor) { in cpuid_pass4()
2817 edx = &cpi->cpi_support[AMD_EDX_FEATURES]; in cpuid_pass4()
2818 ecx = &cpi->cpi_support[AMD_ECX_FEATURES]; in cpuid_pass4()
2820 *edx = CPI_FEATURES_XTD_EDX(cpi); in cpuid_pass4()
2821 *ecx = CPI_FEATURES_XTD_ECX(cpi); in cpuid_pass4()
2826 switch (cpi->cpi_vendor) { in cpuid_pass4()
2873 switch (cpi->cpi_vendor) { in cpuid_pass4()
2904 cpi->cpi_support[TM_EDX_FEATURES] = cp.cp_edx; in cpuid_pass4()
2912 cpi->cpi_pass = 4; in cpuid_pass4()
2928 struct cpuid_info *cpi; in cpuid_insn() local
2933 cpi = cpu->cpu_m.mcpu_cpi; in cpuid_insn()
2941 if (cp->cp_eax <= cpi->cpi_maxeax && cp->cp_eax < NMAX_CPI_STD) in cpuid_insn()
2942 xcp = &cpi->cpi_std[cp->cp_eax]; in cpuid_insn()
2943 else if (cp->cp_eax >= 0x80000000 && cp->cp_eax <= cpi->cpi_xmaxeax && in cpuid_insn()
2945 xcp = &cpi->cpi_extd[cp->cp_eax - 0x80000000]; in cpuid_insn()
3011 struct cpuid_info *cpi = cpu->cpu_m.mcpu_cpi; in cpuid_syscall32_insn() local
3013 if (cpi->cpi_vendor == X86_VENDOR_AMD && in cpuid_syscall32_insn()
3014 cpi->cpi_xmaxeax >= 0x80000001 && in cpuid_syscall32_insn()
3015 (CPI_FEATURES_XTD_EDX(cpi) & CPUID_AMD_EDX_SYSC)) in cpuid_syscall32_insn()
3025 struct cpuid_info *cpi = cpu->cpu_m.mcpu_cpi; in cpuid_getidstr() local
3035 return (snprintf(s, n, fmt_ht, cpi->cpi_chipid, in cpuid_getidstr()
3036 cpi->cpi_vendorstr, cpi->cpi_std[1].cp_eax, in cpuid_getidstr()
3037 cpi->cpi_family, cpi->cpi_model, in cpuid_getidstr()
3038 cpi->cpi_step, cpu->cpu_type_info.pi_clock)); in cpuid_getidstr()
3040 cpi->cpi_vendorstr, cpi->cpi_std[1].cp_eax, in cpuid_getidstr()
3041 cpi->cpi_family, cpi->cpi_model, in cpuid_getidstr()
3042 cpi->cpi_step, cpu->cpu_type_info.pi_clock)); in cpuid_getidstr()
3140 struct cpuid_info *cpi; in cpuid_getsocketstr() local
3143 cpi = cpu->cpu_m.mcpu_cpi; in cpuid_getsocketstr()
3147 socketstr = _cpuid_sktstr(cpi->cpi_vendor, cpi->cpi_family, in cpuid_getsocketstr()
3148 cpi->cpi_model, cpi->cpi_step); in cpuid_getsocketstr()
3227 struct cpuid_info *cpi; in cpuid_have_cr8access()
3230 cpi = cpu->cpu_m.mcpu_cpi; in cpuid_have_cr8access()
3231 if (cpi->cpi_vendor == X86_VENDOR_AMD && cpi->cpi_maxeax >= 1 && in cpuid_have_cr8access()
3232 (CPI_FEATURES_XTD_ECX(cpi) & CPUID_AMD_ECX_CR8D) != 0) in cpuid_have_cr8access()
3252 struct cpuid_info *cpi; in cpuid_get_addrsize() local
3256 cpi = cpu->cpu_m.mcpu_cpi; in cpuid_get_addrsize()
3261 *pabits = cpi->cpi_pabits; in cpuid_get_addrsize()
3263 *vabits = cpi->cpi_vabits; in cpuid_get_addrsize()
3276 struct cpuid_info *cpi; in cpuid_get_dtlb_nent() local
3281 cpi = cpu->cpu_m.mcpu_cpi; in cpuid_get_dtlb_nent()
3288 if (cpi->cpi_xmaxeax >= 0x80000006) { in cpuid_get_dtlb_nent()
3289 struct cpuid_regs *cp = &cpi->cpi_extd[6]; in cpuid_get_dtlb_nent()
3323 if (cpi->cpi_xmaxeax >= 0x80000005) { in cpuid_get_dtlb_nent()
3324 struct cpuid_regs *cp = &cpi->cpi_extd[5]; in cpuid_get_dtlb_nent()
3352 struct cpuid_info *cpi = cpu->cpu_m.mcpu_cpi; in cpuid_opteron_erratum() local
3359 if (cpi->cpi_vendor != X86_VENDOR_AMD || in cpuid_opteron_erratum()
3360 cpi->cpi_family == 4 || cpi->cpi_family == 5 || in cpuid_opteron_erratum()
3361 cpi->cpi_family == 6) in cpuid_opteron_erratum()
3365 eax = cpi->cpi_std[1].cp_eax; in cpuid_opteron_erratum()
3405 return (cpi->cpi_family < 0x10); in cpuid_opteron_erratum()
3411 return (cpi->cpi_family <= 0x11); in cpuid_opteron_erratum()
3415 return (cpi->cpi_family <= 0x11); in cpuid_opteron_erratum()
3432 return (cpi->cpi_family < 0x10); in cpuid_opteron_erratum()
3436 return (cpi->cpi_family <= 0x11); in cpuid_opteron_erratum()
3448 return (cpi->cpi_family < 0x10); in cpuid_opteron_erratum()
3458 return (cpi->cpi_family < 0x10); in cpuid_opteron_erratum()
3518 return (cpi->cpi_family < 0x10 || cpi->cpi_family == 0x11); in cpuid_opteron_erratum()
3522 return (cpi->cpi_family < 0x10); in cpuid_opteron_erratum()
3528 if (CPI_FAMILY(cpi) == 0xf) { in cpuid_opteron_erratum()
3544 return (cpi->cpi_family == 0x10 && cpi->cpi_model < 4); in cpuid_opteron_erratum()
3552 return (cpi->cpi_family == 0x10 || cpi->cpi_family == 0x12); in cpuid_opteron_erratum()
3570 struct cpuid_info *cpi; in osvw_opteron_erratum() local
3576 cpi = cpu->cpu_m.mcpu_cpi; in osvw_opteron_erratum()
3580 osvwfeature = cpi->cpi_extd[1].cp_ecx & CPUID_AMD_ECX_OSVW; in osvw_opteron_erratum()
3584 (cpi->cpi_extd[1].cp_ecx & CPUID_AMD_ECX_OSVW)); in osvw_opteron_erratum()
3809 intel_cpuid_4_cache_info(struct cachetab *ct, struct cpuid_info *cpi) in intel_cpuid_4_cache_info() argument
3814 for (i = 0; i < cpi->cpi_std_4_size; i++) { in intel_cpuid_4_cache_info()
3815 level = CPI_CACHE_LVL(cpi->cpi_std_4[i]); in intel_cpuid_4_cache_info()
3818 ct->ct_assoc = CPI_CACHE_WAYS(cpi->cpi_std_4[i]) + 1; in intel_cpuid_4_cache_info()
3820 CPI_CACHE_COH_LN_SZ(cpi->cpi_std_4[i]) + 1; in intel_cpuid_4_cache_info()
3822 (CPI_CACHE_PARTS(cpi->cpi_std_4[i]) + 1) * in intel_cpuid_4_cache_info()
3824 (cpi->cpi_std_4[i]->cp_ecx + 1); in intel_cpuid_4_cache_info()
3843 intel_walk_cacheinfo(struct cpuid_info *cpi, in intel_walk_cacheinfo() argument
3851 if ((dp = cpi->cpi_cacheinfo) == NULL) in intel_walk_cacheinfo()
3853 for (i = 0; i < cpi->cpi_ncache; i++, dp++) { in intel_walk_cacheinfo()
3861 if (*dp == 0x49 && cpi->cpi_maxeax >= 0x4 && in intel_walk_cacheinfo()
3862 intel_cpuid_4_cache_info(&des_49_ct, cpi) == 1) { in intel_walk_cacheinfo()
3892 cyrix_walk_cacheinfo(struct cpuid_info *cpi, in cyrix_walk_cacheinfo() argument
3899 if ((dp = cpi->cpi_cacheinfo) == NULL) in cyrix_walk_cacheinfo()
3901 for (i = 0; i < cpi->cpi_ncache; i++, dp++) { in cyrix_walk_cacheinfo()
4038 amd_cache_info(struct cpuid_info *cpi, dev_info_t *devi) in amd_cache_info() argument
4042 if (cpi->cpi_xmaxeax < 0x80000005) in amd_cache_info()
4044 cp = &cpi->cpi_extd[5]; in amd_cache_info()
4061 switch (cpi->cpi_vendor) { in amd_cache_info()
4064 if (cpi->cpi_family >= 5) { in amd_cache_info()
4104 if (cpi->cpi_xmaxeax < 0x80000006) in amd_cache_info()
4106 cp = &cpi->cpi_extd[6]; in amd_cache_info()
4144 x86_which_cacheinfo(struct cpuid_info *cpi) in x86_which_cacheinfo() argument
4146 switch (cpi->cpi_vendor) { in x86_which_cacheinfo()
4148 if (cpi->cpi_maxeax >= 2) in x86_which_cacheinfo()
4156 if (cpi->cpi_family > 5 || in x86_which_cacheinfo()
4157 (cpi->cpi_family == 5 && cpi->cpi_model >= 1)) in x86_which_cacheinfo()
4161 if (cpi->cpi_family >= 5) in x86_which_cacheinfo()
4176 if (cpi->cpi_xmaxeax >= 0x80000005) in x86_which_cacheinfo()
4178 else if (cpi->cpi_vendor == X86_VENDOR_Cyrix) in x86_which_cacheinfo()
4180 else if (cpi->cpi_maxeax >= 2) in x86_which_cacheinfo()
4189 struct cpuid_info *cpi) in cpuid_set_cpu_properties() argument
4221 "vendor-id", cpi->cpi_vendorstr); in cpuid_set_cpu_properties()
4223 if (cpi->cpi_maxeax == 0) { in cpuid_set_cpu_properties()
4231 "family", CPI_FAMILY(cpi)); in cpuid_set_cpu_properties()
4233 "cpu-model", CPI_MODEL(cpi)); in cpuid_set_cpu_properties()
4235 "stepping-id", CPI_STEP(cpi)); in cpuid_set_cpu_properties()
4238 switch (cpi->cpi_vendor) { in cpuid_set_cpu_properties()
4248 "type", CPI_TYPE(cpi)); in cpuid_set_cpu_properties()
4251 switch (cpi->cpi_vendor) { in cpuid_set_cpu_properties()
4254 create = cpi->cpi_family >= 0xf; in cpuid_set_cpu_properties()
4262 "ext-family", CPI_FAMILY_XTD(cpi)); in cpuid_set_cpu_properties()
4265 switch (cpi->cpi_vendor) { in cpuid_set_cpu_properties()
4267 create = IS_EXTENDED_MODEL_INTEL(cpi); in cpuid_set_cpu_properties()
4270 create = CPI_FAMILY(cpi) == 0xf; in cpuid_set_cpu_properties()
4278 "ext-model", CPI_MODEL_XTD(cpi)); in cpuid_set_cpu_properties()
4281 switch (cpi->cpi_vendor) { in cpuid_set_cpu_properties()
4286 create = cpi->cpi_xmaxeax >= 0x80000001; in cpuid_set_cpu_properties()
4294 "generation", BITX((cpi)->cpi_extd[1].cp_eax, 11, 8)); in cpuid_set_cpu_properties()
4297 switch (cpi->cpi_vendor) { in cpuid_set_cpu_properties()
4303 create = cpi->cpi_family > 6 || in cpuid_set_cpu_properties()
4304 (cpi->cpi_family == 6 && cpi->cpi_model >= 8); in cpuid_set_cpu_properties()
4307 create = cpi->cpi_family >= 0xf; in cpuid_set_cpu_properties()
4313 if (create && cpi->cpi_brandid != 0) { in cpuid_set_cpu_properties()
4315 "brand-id", cpi->cpi_brandid); in cpuid_set_cpu_properties()
4319 switch (cpi->cpi_vendor) { in cpuid_set_cpu_properties()
4324 create = IS_NEW_F6(cpi) || cpi->cpi_family >= 0xf; in cpuid_set_cpu_properties()
4327 create = cpi->cpi_family >= 0xf; in cpuid_set_cpu_properties()
4335 "chunks", CPI_CHUNKS(cpi)); in cpuid_set_cpu_properties()
4337 "apic-id", cpi->cpi_apicid); in cpuid_set_cpu_properties()
4338 if (cpi->cpi_chipid >= 0) { in cpuid_set_cpu_properties()
4340 "chip#", cpi->cpi_chipid); in cpuid_set_cpu_properties()
4342 "clog#", cpi->cpi_clogid); in cpuid_set_cpu_properties()
4348 "cpuid-features", CPI_FEATURES_EDX(cpi)); in cpuid_set_cpu_properties()
4352 switch (cpi->cpi_vendor) { in cpuid_set_cpu_properties()
4354 create = IS_NEW_F6(cpi) || cpi->cpi_family >= 0xf; in cpuid_set_cpu_properties()
4357 create = cpi->cpi_family >= 0xf; in cpuid_set_cpu_properties()
4365 "cpuid-features-ecx", CPI_FEATURES_ECX(cpi)); in cpuid_set_cpu_properties()
4368 switch (cpi->cpi_vendor) { in cpuid_set_cpu_properties()
4374 create = cpi->cpi_xmaxeax >= 0x80000001; in cpuid_set_cpu_properties()
4382 "ext-cpuid-features", CPI_FEATURES_XTD_EDX(cpi)); in cpuid_set_cpu_properties()
4384 "ext-cpuid-features-ecx", CPI_FEATURES_XTD_ECX(cpi)); in cpuid_set_cpu_properties()
4394 "brand-string", cpi->cpi_brandstr); in cpuid_set_cpu_properties()
4399 switch (x86_which_cacheinfo(cpi)) { in cpuid_set_cpu_properties()
4401 intel_walk_cacheinfo(cpi, cpu_devi, add_cacheent_props); in cpuid_set_cpu_properties()
4404 cyrix_walk_cacheinfo(cpi, cpu_devi, add_cacheent_props); in cpuid_set_cpu_properties()
4407 amd_cache_info(cpi, cpu_devi); in cpuid_set_cpu_properties()
4459 amd_l2cacheinfo(struct cpuid_info *cpi, struct l2info *l2i) in amd_l2cacheinfo() argument
4466 if (cpi->cpi_xmaxeax < 0x80000006) in amd_l2cacheinfo()
4468 cp = &cpi->cpi_extd[6]; in amd_l2cacheinfo()
4490 struct cpuid_info *cpi = cpu->cpu_m.mcpu_cpi; in getl2cacheinfo() local
4498 switch (x86_which_cacheinfo(cpi)) { in getl2cacheinfo()
4500 intel_walk_cacheinfo(cpi, l2i, intel_l2cinfo); in getl2cacheinfo()
4503 cyrix_walk_cacheinfo(cpi, l2i, intel_l2cinfo); in getl2cacheinfo()
4506 amd_l2cacheinfo(cpi, l2i); in getl2cacheinfo()
4610 struct cpuid_info *cpi; in cpuid_deep_cstates_supported() local
4615 cpi = CPU->cpu_m.mcpu_cpi; in cpuid_deep_cstates_supported()
4620 switch (cpi->cpi_vendor) { in cpuid_deep_cstates_supported()
4622 if (cpi->cpi_xmaxeax < 0x80000007) in cpuid_deep_cstates_supported()
4700 struct cpuid_info *cpi; in cpuid_arat_supported() local
4706 cpi = CPU->cpu_m.mcpu_cpi; in cpuid_arat_supported()
4708 switch (cpi->cpi_vendor) { in cpuid_arat_supported()
4714 if (cpi->cpi_maxeax >= 6) { in cpuid_arat_supported()
4732 struct cpuid_info *cpi = cp->cpu_m.mcpu_cpi; in cpuid_iepb_supported() local
4746 if ((cpi->cpi_vendor != X86_VENDOR_Intel) || (cpi->cpi_maxeax < 6)) in cpuid_iepb_supported()
4766 struct cpuid_info *cpi = CPU->cpu_m.mcpu_cpi; in cpuid_deadline_tsc_supported() local
4772 switch (cpi->cpi_vendor) { in cpuid_deadline_tsc_supported()
4774 if (cpi->cpi_maxeax >= 1) { in cpuid_deadline_tsc_supported()