Lines Matching refs:cpi

375 #define	CPI_FAMILY_XTD(cpi)	BITX((cpi)->cpi_std[1].cp_eax, 27, 20)  argument
376 #define CPI_MODEL_XTD(cpi) BITX((cpi)->cpi_std[1].cp_eax, 19, 16) argument
377 #define CPI_TYPE(cpi) BITX((cpi)->cpi_std[1].cp_eax, 13, 12) argument
378 #define CPI_FAMILY(cpi) BITX((cpi)->cpi_std[1].cp_eax, 11, 8) argument
379 #define CPI_STEP(cpi) BITX((cpi)->cpi_std[1].cp_eax, 3, 0) argument
380 #define CPI_MODEL(cpi) BITX((cpi)->cpi_std[1].cp_eax, 7, 4) argument
382 #define CPI_FEATURES_EDX(cpi) ((cpi)->cpi_std[1].cp_edx) argument
383 #define CPI_FEATURES_ECX(cpi) ((cpi)->cpi_std[1].cp_ecx) argument
384 #define CPI_FEATURES_XTD_EDX(cpi) ((cpi)->cpi_extd[1].cp_edx) argument
385 #define CPI_FEATURES_XTD_ECX(cpi) ((cpi)->cpi_extd[1].cp_ecx) argument
386 #define CPI_FEATURES_7_0_EBX(cpi) ((cpi)->cpi_std[7].cp_ebx) argument
388 #define CPI_BRANDID(cpi) BITX((cpi)->cpi_std[1].cp_ebx, 7, 0) argument
389 #define CPI_CHUNKS(cpi) BITX((cpi)->cpi_std[1].cp_ebx, 15, 7) argument
390 #define CPI_CPU_COUNT(cpi) BITX((cpi)->cpi_std[1].cp_ebx, 23, 16) argument
391 #define CPI_APIC_ID(cpi) BITX((cpi)->cpi_std[1].cp_ebx, 31, 24) argument
426 #define IS_LEGACY_P6(cpi) ( \ argument
427 cpi->cpi_family == 6 && \
428 (cpi->cpi_model == 1 || \
429 cpi->cpi_model == 3 || \
430 cpi->cpi_model == 5 || \
431 cpi->cpi_model == 6 || \
432 cpi->cpi_model == 7 || \
433 cpi->cpi_model == 8 || \
434 cpi->cpi_model == 0xA || \
435 cpi->cpi_model == 0xB) \
439 #define IS_NEW_F6(cpi) ((cpi->cpi_family == 6) && !IS_LEGACY_P6(cpi)) argument
442 #define IS_EXTENDED_MODEL_INTEL(cpi) (cpi->cpi_family == 0x6 || \ argument
443 cpi->cpi_family >= 0xf)
457 #define MWAIT_SUPPORTED(cpi) ((cpi)->cpi_std[1].cp_ecx & CPUID_INTC_ECX_MON) argument
458 #define MWAIT_INT_ENABLE(cpi) ((cpi)->cpi_std[5].cp_ecx & 0x2) argument
459 #define MWAIT_EXTENSION(cpi) ((cpi)->cpi_std[5].cp_ecx & 0x1) argument
460 #define MWAIT_SIZE_MIN(cpi) BITX((cpi)->cpi_std[5].cp_eax, 15, 0) argument
461 #define MWAIT_SIZE_MAX(cpi) BITX((cpi)->cpi_std[5].cp_ebx, 15, 0) argument
465 #define MWAIT_NUM_SUBC_STATES(cpi, c_state) \ argument
466 BITX((cpi)->cpi_std[5].cp_edx, c_state + 3, c_state)
592 struct cpuid_info *cpi = cpu->cpu_m.mcpu_cpi; in cpuid_free_space() local
595 ASSERT(cpi != NULL); in cpuid_free_space()
596 ASSERT(cpi != &cpuid_info0); in cpuid_free_space()
601 for (i = 1; i < cpi->cpi_std_4_size; i++) in cpuid_free_space()
602 kmem_free(cpi->cpi_std_4[i], sizeof (struct cpuid_regs)); in cpuid_free_space()
603 if (cpi->cpi_std_4_size > 0) in cpuid_free_space()
604 kmem_free(cpi->cpi_std_4, in cpuid_free_space()
605 cpi->cpi_std_4_size * sizeof (struct cpuid_regs *)); in cpuid_free_space()
607 kmem_free(cpi, sizeof (*cpi)); in cpuid_free_space()
741 struct cpuid_info *cpi = cpu->cpu_m.mcpu_cpi; in cpuid_intel_getids() local
743 for (i = 1; i < cpi->cpi_ncpu_per_chip; i <<= 1) in cpuid_intel_getids()
746 cpi->cpi_chipid = cpi->cpi_apicid >> chipid_shift; in cpuid_intel_getids()
747 cpi->cpi_clogid = cpi->cpi_apicid & ((1 << chipid_shift) - 1); in cpuid_intel_getids()
755 if (cpi->cpi_ncore_per_chip == 1) in cpuid_intel_getids()
756 ncpu_per_core = cpi->cpi_ncpu_per_chip; in cpuid_intel_getids()
757 else if (cpi->cpi_ncore_per_chip > 1) in cpuid_intel_getids()
758 ncpu_per_core = cpi->cpi_ncpu_per_chip / in cpuid_intel_getids()
759 cpi->cpi_ncore_per_chip; in cpuid_intel_getids()
783 cpi->cpi_coreid = cpi->cpi_apicid >> coreid_shift; in cpuid_intel_getids()
784 cpi->cpi_pkgcoreid = cpi->cpi_clogid >> coreid_shift; in cpuid_intel_getids()
789 cpi->cpi_coreid = cpi->cpi_chipid; in cpuid_intel_getids()
790 cpi->cpi_pkgcoreid = 0; in cpuid_intel_getids()
792 cpi->cpi_procnodeid = cpi->cpi_chipid; in cpuid_intel_getids()
793 cpi->cpi_compunitid = cpi->cpi_coreid; in cpuid_intel_getids()
802 struct cpuid_info *cpi = cpu->cpu_m.mcpu_cpi; in cpuid_amd_getids() local
828 cpi->cpi_coreid = cpu->cpu_id; in cpuid_amd_getids()
829 cpi->cpi_compunitid = cpu->cpu_id; in cpuid_amd_getids()
831 if (cpi->cpi_xmaxeax >= 0x80000008) { in cpuid_amd_getids()
833 coreidsz = BITX((cpi)->cpi_extd[8].cp_ecx, 15, 12); in cpuid_amd_getids()
839 cpi->cpi_ncore_per_chip = in cpuid_amd_getids()
840 BITX((cpi)->cpi_extd[8].cp_ecx, 7, 0) + 1; in cpuid_amd_getids()
843 for (i = 1; i < cpi->cpi_ncore_per_chip; i <<= 1) in cpuid_amd_getids()
850 cpi->cpi_ncore_per_chip = 1; in cpuid_amd_getids()
854 cpi->cpi_clogid = cpi->cpi_pkgcoreid = in cpuid_amd_getids()
855 cpi->cpi_apicid & ((1<<coreidsz) - 1); in cpuid_amd_getids()
856 cpi->cpi_ncpu_per_chip = cpi->cpi_ncore_per_chip; in cpuid_amd_getids()
860 cpi->cpi_xmaxeax >= 0x8000001e) { in cpuid_amd_getids()
861 cp = &cpi->cpi_extd[0x1e]; in cpuid_amd_getids()
865 cpi->cpi_procnodes_per_pkg = BITX(cp->cp_ecx, 10, 8) + 1; in cpuid_amd_getids()
866 cpi->cpi_procnodeid = BITX(cp->cp_ecx, 7, 0); in cpuid_amd_getids()
867 cpi->cpi_cores_per_compunit = BITX(cp->cp_ebx, 15, 8) + 1; in cpuid_amd_getids()
868 cpi->cpi_compunitid = BITX(cp->cp_ebx, 7, 0) in cpuid_amd_getids()
869 + (cpi->cpi_ncore_per_chip / cpi->cpi_cores_per_compunit) in cpuid_amd_getids()
870 * (cpi->cpi_procnodeid / cpi->cpi_procnodes_per_pkg); in cpuid_amd_getids()
871 } else if (cpi->cpi_family == 0xf || cpi->cpi_family >= 0x11) { in cpuid_amd_getids()
872 cpi->cpi_procnodeid = (cpi->cpi_apicid >> coreidsz) & 7; in cpuid_amd_getids()
873 } else if (cpi->cpi_family == 0x10) { in cpuid_amd_getids()
879 if ((cpi->cpi_model < 8) || BITX(nb_caps_reg, 29, 29) == 0) { in cpuid_amd_getids()
881 cpi->cpi_procnodeid = BITX(cpi->cpi_apicid, 5, in cpuid_amd_getids()
889 cpi->cpi_procnodes_per_pkg = 2; in cpuid_amd_getids()
891 first_half = (cpi->cpi_pkgcoreid <= in cpuid_amd_getids()
892 (cpi->cpi_ncore_per_chip/2 - 1)); in cpuid_amd_getids()
894 if (cpi->cpi_apicid == cpi->cpi_pkgcoreid) { in cpuid_amd_getids()
896 cpi->cpi_procnodeid = (first_half ? 0 : 1); in cpuid_amd_getids()
901 node2_1 = BITX(cpi->cpi_apicid, 5, 4) << 1; in cpuid_amd_getids()
911 cpi->cpi_procnodeid = node2_1 + in cpuid_amd_getids()
914 cpi->cpi_procnodeid = node2_1 + in cpuid_amd_getids()
919 cpi->cpi_procnodeid = 0; in cpuid_amd_getids()
922 cpi->cpi_chipid = in cpuid_amd_getids()
923 cpi->cpi_procnodeid / cpi->cpi_procnodes_per_pkg; in cpuid_amd_getids()
951 struct cpuid_info *cpi; in cpuid_pass1() local
968 cpi = cpu->cpu_m.mcpu_cpi; in cpuid_pass1()
969 ASSERT(cpi != NULL); in cpuid_pass1()
970 cp = &cpi->cpi_std[0]; in cpuid_pass1()
972 cpi->cpi_maxeax = __cpuid_insn(cp); in cpuid_pass1()
974 uint32_t *iptr = (uint32_t *)cpi->cpi_vendorstr; in cpuid_pass1()
978 *(char *)&cpi->cpi_vendorstr[12] = '\0'; in cpuid_pass1()
981 cpi->cpi_vendor = _cpuid_vendorstr_to_vendorcode(cpi->cpi_vendorstr); in cpuid_pass1()
982 x86_vendor = cpi->cpi_vendor; /* for compatibility */ in cpuid_pass1()
987 if (cpi->cpi_maxeax > CPI_MAXEAX_MAX) in cpuid_pass1()
988 cpi->cpi_maxeax = CPI_MAXEAX_MAX; in cpuid_pass1()
989 if (cpi->cpi_maxeax < 1) in cpuid_pass1()
992 cp = &cpi->cpi_std[1]; in cpuid_pass1()
999 cpi->cpi_model = CPI_MODEL(cpi); in cpuid_pass1()
1000 cpi->cpi_family = CPI_FAMILY(cpi); in cpuid_pass1()
1002 if (cpi->cpi_family == 0xf) in cpuid_pass1()
1003 cpi->cpi_family += CPI_FAMILY_XTD(cpi); in cpuid_pass1()
1011 switch (cpi->cpi_vendor) { in cpuid_pass1()
1013 if (IS_EXTENDED_MODEL_INTEL(cpi)) in cpuid_pass1()
1014 cpi->cpi_model += CPI_MODEL_XTD(cpi) << 4; in cpuid_pass1()
1017 if (CPI_FAMILY(cpi) == 0xf) in cpuid_pass1()
1018 cpi->cpi_model += CPI_MODEL_XTD(cpi) << 4; in cpuid_pass1()
1021 if (cpi->cpi_model == 0xf) in cpuid_pass1()
1022 cpi->cpi_model += CPI_MODEL_XTD(cpi) << 4; in cpuid_pass1()
1026 cpi->cpi_step = CPI_STEP(cpi); in cpuid_pass1()
1027 cpi->cpi_brandid = CPI_BRANDID(cpi); in cpuid_pass1()
1038 cpi->cpi_pabits = cpi->cpi_vabits = 32; in cpuid_pass1()
1040 switch (cpi->cpi_vendor) { in cpuid_pass1()
1042 if (cpi->cpi_family == 5) in cpuid_pass1()
1044 else if (IS_LEGACY_P6(cpi)) { in cpuid_pass1()
1050 if (cpi->cpi_model < 3 && cpi->cpi_step < 3) in cpuid_pass1()
1052 } else if (IS_NEW_F6(cpi) || cpi->cpi_family == 0xf) { in cpuid_pass1()
1061 } else if (cpi->cpi_family > 0xf) in cpuid_pass1()
1067 if (cpi->cpi_maxeax < 5) in cpuid_pass1()
1075 if (cpi->cpi_family == 0xf && cpi->cpi_model == 0xe) { in cpuid_pass1()
1077 cpi->cpi_model = 0xc; in cpuid_pass1()
1080 if (cpi->cpi_family == 5) { in cpuid_pass1()
1093 if (cpi->cpi_model == 0) { in cpuid_pass1()
1103 if (cpi->cpi_model < 6) in cpuid_pass1()
1111 if (cpi->cpi_family >= 0xf) in cpuid_pass1()
1117 if (cpi->cpi_maxeax < 5) in cpuid_pass1()
1136 if (cpi->cpi_family == 5 && cpi->cpi_model == 4 && in cpuid_pass1()
1137 (cpi->cpi_step == 2 || cpi->cpi_step == 3)) in cpuid_pass1()
1144 if (cpi->cpi_family == 6) in cpuid_pass1()
1233 platform_cpuid_mangle(cpi->cpi_vendor, 1, cp); in cpuid_pass1()
1239 if (cpi->cpi_vendor == X86_VENDOR_Intel && cpi->cpi_maxeax >= 7) { in cpuid_pass1()
1241 ecp = &cpi->cpi_std[7]; in cpuid_pass1()
1365 if (cpi->cpi_std[7].cp_ebx & in cpuid_pass1()
1370 if (cpi->cpi_std[7].cp_ebx & in cpuid_pass1()
1375 if (cpi->cpi_std[7].cp_ebx & in cpuid_pass1()
1396 cpi->cpi_mwait.support |= MWAIT_SUPPORT; in cpuid_pass1()
1429 cpi->cpi_pabits = 36; in cpuid_pass1()
1441 cpi->cpi_ncpu_per_chip = CPI_CPU_COUNT(cpi); in cpuid_pass1()
1442 if (cpi->cpi_ncpu_per_chip > 1) in cpuid_pass1()
1445 cpi->cpi_ncpu_per_chip = 1; in cpuid_pass1()
1453 switch (cpi->cpi_vendor) { in cpuid_pass1()
1455 if (IS_NEW_F6(cpi) || cpi->cpi_family >= 0xf) in cpuid_pass1()
1459 if (cpi->cpi_family > 5 || in cpuid_pass1()
1460 (cpi->cpi_family == 5 && cpi->cpi_model >= 1)) in cpuid_pass1()
1480 cp = &cpi->cpi_extd[0]; in cpuid_pass1()
1482 cpi->cpi_xmaxeax = __cpuid_insn(cp); in cpuid_pass1()
1485 if (cpi->cpi_xmaxeax & 0x80000000) { in cpuid_pass1()
1487 if (cpi->cpi_xmaxeax > CPI_XMAXEAX_MAX) in cpuid_pass1()
1488 cpi->cpi_xmaxeax = CPI_XMAXEAX_MAX; in cpuid_pass1()
1490 switch (cpi->cpi_vendor) { in cpuid_pass1()
1493 if (cpi->cpi_xmaxeax < 0x80000001) in cpuid_pass1()
1495 cp = &cpi->cpi_extd[1]; in cpuid_pass1()
1499 if (cpi->cpi_vendor == X86_VENDOR_AMD && in cpuid_pass1()
1500 cpi->cpi_family == 5 && in cpuid_pass1()
1501 cpi->cpi_model == 6 && in cpuid_pass1()
1502 cpi->cpi_step == 6) { in cpuid_pass1()
1513 platform_cpuid_mangle(cpi->cpi_vendor, 0x80000001, cp); in cpuid_pass1()
1538 if ((cpi->cpi_vendor == X86_VENDOR_AMD) && in cpuid_pass1()
1539 (cpi->cpi_std[1].cp_edx & CPUID_INTC_EDX_FXSR) && in cpuid_pass1()
1549 if (cpi->cpi_vendor == X86_VENDOR_AMD && in cpuid_pass1()
1598 switch (cpi->cpi_vendor) { in cpuid_pass1()
1600 if (cpi->cpi_maxeax >= 4) { in cpuid_pass1()
1601 cp = &cpi->cpi_std[4]; in cpuid_pass1()
1605 platform_cpuid_mangle(cpi->cpi_vendor, 4, cp); in cpuid_pass1()
1609 if (cpi->cpi_xmaxeax < 0x80000008) in cpuid_pass1()
1611 cp = &cpi->cpi_extd[8]; in cpuid_pass1()
1614 platform_cpuid_mangle(cpi->cpi_vendor, 0x80000008, cp); in cpuid_pass1()
1620 cpi->cpi_pabits = BITX(cp->cp_eax, 7, 0); in cpuid_pass1()
1621 cpi->cpi_vabits = BITX(cp->cp_eax, 15, 8); in cpuid_pass1()
1630 switch (cpi->cpi_vendor) { in cpuid_pass1()
1632 if (cpi->cpi_maxeax < 4) { in cpuid_pass1()
1633 cpi->cpi_ncore_per_chip = 1; in cpuid_pass1()
1636 cpi->cpi_ncore_per_chip = in cpuid_pass1()
1637 BITX((cpi)->cpi_std[4].cp_eax, 31, 26) + 1; in cpuid_pass1()
1641 if (cpi->cpi_xmaxeax < 0x80000008) { in cpuid_pass1()
1642 cpi->cpi_ncore_per_chip = 1; in cpuid_pass1()
1653 cpi->cpi_ncore_per_chip = in cpuid_pass1()
1654 BITX((cpi)->cpi_extd[8].cp_ecx, 7, 0) + 1; in cpuid_pass1()
1658 cpi->cpi_ncore_per_chip = 1; in cpuid_pass1()
1665 switch (cpi->cpi_vendor) { in cpuid_pass1()
1667 if (cpi->cpi_maxeax >= 7) { in cpuid_pass1()
1668 cp = &cpi->cpi_extd[7]; in cpuid_pass1()
1678 cpi->cpi_ncore_per_chip = 1; in cpuid_pass1()
1684 if (cpi->cpi_ncore_per_chip > 1) { in cpuid_pass1()
1692 if (cpi->cpi_ncpu_per_chip == cpi->cpi_ncore_per_chip) { in cpuid_pass1()
1696 cpi->cpi_apicid = CPI_APIC_ID(cpi); in cpuid_pass1()
1697 cpi->cpi_procnodes_per_pkg = 1; in cpuid_pass1()
1698 cpi->cpi_cores_per_compunit = 1; in cpuid_pass1()
1704 cpi->cpi_chipid = -1; in cpuid_pass1()
1705 cpi->cpi_clogid = 0; in cpuid_pass1()
1706 cpi->cpi_coreid = cpu->cpu_id; in cpuid_pass1()
1707 cpi->cpi_pkgcoreid = 0; in cpuid_pass1()
1708 if (cpi->cpi_vendor == X86_VENDOR_AMD) in cpuid_pass1()
1709 cpi->cpi_procnodeid = BITX(cpi->cpi_apicid, 3, 0); in cpuid_pass1()
1711 cpi->cpi_procnodeid = cpi->cpi_chipid; in cpuid_pass1()
1712 } else if (cpi->cpi_ncpu_per_chip > 1) { in cpuid_pass1()
1713 if (cpi->cpi_vendor == X86_VENDOR_Intel) in cpuid_pass1()
1715 else if (cpi->cpi_vendor == X86_VENDOR_AMD) in cpuid_pass1()
1722 cpi->cpi_coreid = cpi->cpi_chipid; in cpuid_pass1()
1723 cpi->cpi_pkgcoreid = 0; in cpuid_pass1()
1724 cpi->cpi_procnodeid = cpi->cpi_chipid; in cpuid_pass1()
1725 cpi->cpi_compunitid = cpi->cpi_chipid; in cpuid_pass1()
1732 cpi->cpi_chiprev = _cpuid_chiprev(cpi->cpi_vendor, cpi->cpi_family, in cpuid_pass1()
1733 cpi->cpi_model, cpi->cpi_step); in cpuid_pass1()
1734 cpi->cpi_chiprevstr = _cpuid_chiprevstr(cpi->cpi_vendor, in cpuid_pass1()
1735 cpi->cpi_family, cpi->cpi_model, cpi->cpi_step); in cpuid_pass1()
1736 cpi->cpi_socket = _cpuid_skt(cpi->cpi_vendor, cpi->cpi_family, in cpuid_pass1()
1737 cpi->cpi_model, cpi->cpi_step); in cpuid_pass1()
1740 cpi->cpi_pass = 1; in cpuid_pass1()
1760 struct cpuid_info *cpi = cpu->cpu_m.mcpu_cpi; in cpuid_pass2() local
1762 ASSERT(cpi->cpi_pass == 1); in cpuid_pass2()
1764 if (cpi->cpi_maxeax < 1) in cpuid_pass2()
1767 if ((nmax = cpi->cpi_maxeax + 1) > NMAX_CPI_STD) in cpuid_pass2()
1772 for (n = 2, cp = &cpi->cpi_std[2]; n < nmax; n++, cp++) { in cpuid_pass2()
1794 platform_cpuid_mangle(cpi->cpi_vendor, n, cp); in cpuid_pass2()
1806 cpi->cpi_ncache = sizeof (*cp) * in cpuid_pass2()
1808 if (cpi->cpi_ncache == 0) in cpuid_pass2()
1810 cpi->cpi_ncache--; /* skip count byte */ in cpuid_pass2()
1817 if (cpi->cpi_ncache > (sizeof (*cp) - 1)) in cpuid_pass2()
1818 cpi->cpi_ncache = sizeof (*cp) - 1; in cpuid_pass2()
1820 dp = cpi->cpi_cacheinfo; in cpuid_pass2()
1860 if (!(cpi->cpi_mwait.support & MWAIT_SUPPORT)) in cpuid_pass2()
1867 mwait_size = (size_t)MWAIT_SIZE_MAX(cpi); in cpuid_pass2()
1877 cpi->cpi_mwait.mon_min = (size_t)MWAIT_SIZE_MIN(cpi); in cpuid_pass2()
1878 cpi->cpi_mwait.mon_max = mwait_size; in cpuid_pass2()
1879 if (MWAIT_EXTENSION(cpi)) { in cpuid_pass2()
1880 cpi->cpi_mwait.support |= MWAIT_EXTENSIONS; in cpuid_pass2()
1881 if (MWAIT_INT_ENABLE(cpi)) in cpuid_pass2()
1882 cpi->cpi_mwait.support |= in cpuid_pass2()
1892 if (cpi->cpi_maxeax >= 0xB && cpi->cpi_vendor == X86_VENDOR_Intel) { in cpuid_pass2()
1933 cpi->cpi_apicid = x2apic_id; in cpuid_pass2()
1934 cpi->cpi_ncpu_per_chip = ncpu_per_chip; in cpuid_pass2()
1935 cpi->cpi_ncore_per_chip = ncpu_per_chip / in cpuid_pass2()
1937 cpi->cpi_chipid = x2apic_id >> chipid_shift; in cpuid_pass2()
1938 cpi->cpi_clogid = x2apic_id & ((1 << chipid_shift) - 1); in cpuid_pass2()
1939 cpi->cpi_coreid = x2apic_id >> coreid_shift; in cpuid_pass2()
1940 cpi->cpi_pkgcoreid = cpi->cpi_clogid >> coreid_shift; in cpuid_pass2()
1950 if (cpi->cpi_maxeax >= 0xD) { in cpuid_pass2()
1968 cpi->cpi_xsave.xsav_hw_features_low = cp->cp_eax; in cpuid_pass2()
1969 cpi->cpi_xsave.xsav_hw_features_high = cp->cp_edx; in cpuid_pass2()
1970 cpi->cpi_xsave.xsav_max_size = cp->cp_ecx; in cpuid_pass2()
1976 if (cpi->cpi_xsave.xsav_hw_features_low & XFEATURE_AVX) { in cpuid_pass2()
1988 cpi->cpi_xsave.ymm_size = cp->cp_eax; in cpuid_pass2()
1989 cpi->cpi_xsave.ymm_offset = cp->cp_ebx; in cpuid_pass2()
1995 xsave_state_size = cpi->cpi_xsave.xsav_max_size; in cpuid_pass2()
2001 cpu->cpu_id, cpi->cpi_xsave.xsav_hw_features_low, in cpuid_pass2()
2002 cpi->cpi_xsave.xsav_hw_features_high, in cpuid_pass2()
2003 (int)cpi->cpi_xsave.xsav_max_size, in cpuid_pass2()
2004 (int)cpi->cpi_xsave.ymm_size, in cpuid_pass2()
2005 (int)cpi->cpi_xsave.ymm_offset); in cpuid_pass2()
2040 CPI_FEATURES_ECX(cpi) &= in cpuid_pass2()
2042 CPI_FEATURES_ECX(cpi) &= in cpuid_pass2()
2044 CPI_FEATURES_ECX(cpi) &= in cpuid_pass2()
2046 CPI_FEATURES_ECX(cpi) &= in cpuid_pass2()
2048 CPI_FEATURES_7_0_EBX(cpi) &= in cpuid_pass2()
2050 CPI_FEATURES_7_0_EBX(cpi) &= in cpuid_pass2()
2052 CPI_FEATURES_7_0_EBX(cpi) &= in cpuid_pass2()
2064 if ((cpi->cpi_xmaxeax & 0x80000000) == 0) in cpuid_pass2()
2067 if ((nmax = cpi->cpi_xmaxeax - 0x80000000 + 1) > NMAX_CPI_EXTD) in cpuid_pass2()
2073 iptr = (void *)cpi->cpi_brandstr; in cpuid_pass2()
2074 for (n = 2, cp = &cpi->cpi_extd[2]; n < nmax; cp++, n++) { in cpuid_pass2()
2077 platform_cpuid_mangle(cpi->cpi_vendor, 0x80000000 + n, cp); in cpuid_pass2()
2091 switch (cpi->cpi_vendor) { in cpuid_pass2()
2099 if (cpi->cpi_family < 6 || in cpuid_pass2()
2100 (cpi->cpi_family == 6 && in cpuid_pass2()
2101 cpi->cpi_model < 1)) in cpuid_pass2()
2109 switch (cpi->cpi_vendor) { in cpuid_pass2()
2116 if (cpi->cpi_family < 6 || in cpuid_pass2()
2117 cpi->cpi_family == 6 && in cpuid_pass2()
2118 cpi->cpi_model < 1) in cpuid_pass2()
2125 if (cpi->cpi_family == 6 && in cpuid_pass2()
2126 cpi->cpi_model == 3 && in cpuid_pass2()
2127 cpi->cpi_step == 0) { in cpuid_pass2()
2137 if (cpi->cpi_family != 6) in cpuid_pass2()
2144 if (cpi->cpi_model == 7 || in cpuid_pass2()
2145 cpi->cpi_model == 8) in cpuid_pass2()
2154 if (cpi->cpi_model == 9 && cpi->cpi_step == 1) in cpuid_pass2()
2172 cpi->cpi_pass = 2; in cpuid_pass2()
2176 intel_cpubrand(const struct cpuid_info *cpi) in intel_cpubrand() argument
2181 cpi->cpi_maxeax < 1 || cpi->cpi_family < 5) in intel_cpubrand()
2184 switch (cpi->cpi_family) { in intel_cpubrand()
2188 switch (cpi->cpi_model) { in intel_cpubrand()
2203 cp = &cpi->cpi_std[2]; /* cache info */ in intel_cpubrand()
2248 return (cpi->cpi_model == 5 ? in intel_cpubrand()
2251 return (cpi->cpi_model == 5 ? in intel_cpubrand()
2262 if (cpi->cpi_brandid != 0) { in intel_cpubrand()
2291 sgn = (cpi->cpi_family << 8) | in intel_cpubrand()
2292 (cpi->cpi_model << 4) | cpi->cpi_step; in intel_cpubrand()
2295 if (brand_tbl[i].bt_bid == cpi->cpi_brandid) in intel_cpubrand()
2298 if (sgn == 0x6b1 && cpi->cpi_brandid == 3) in intel_cpubrand()
2300 if (sgn < 0xf13 && cpi->cpi_brandid == 0xb) in intel_cpubrand()
2302 if (sgn < 0xf13 && cpi->cpi_brandid == 0xe) in intel_cpubrand()
2312 amd_cpubrand(const struct cpuid_info *cpi) in amd_cpubrand() argument
2315 cpi->cpi_maxeax < 1 || cpi->cpi_family < 5) in amd_cpubrand()
2318 switch (cpi->cpi_family) { in amd_cpubrand()
2320 switch (cpi->cpi_model) { in amd_cpubrand()
2339 switch (cpi->cpi_model) { in amd_cpubrand()
2355 return ((cpi->cpi_extd[6].cp_ecx >> 16) >= 256 ? in amd_cpubrand()
2364 if (cpi->cpi_family == 0xf && cpi->cpi_model == 5 && in amd_cpubrand()
2365 cpi->cpi_brandid != 0) { in amd_cpubrand()
2366 switch (BITX(cpi->cpi_brandid, 7, 5)) { in amd_cpubrand()
2382 cyrix_cpubrand(struct cpuid_info *cpi, uint_t type) in cyrix_cpubrand() argument
2385 cpi->cpi_maxeax < 1 || cpi->cpi_family < 5 || in cyrix_cpubrand()
2408 if (cpi->cpi_family == 4 && cpi->cpi_model == 9) in cyrix_cpubrand()
2410 else if (cpi->cpi_family == 5) { in cyrix_cpubrand()
2411 switch (cpi->cpi_model) { in cyrix_cpubrand()
2419 } else if (cpi->cpi_family == 6) { in cyrix_cpubrand()
2420 switch (cpi->cpi_model) { in cyrix_cpubrand()
2444 fabricate_brandstr(struct cpuid_info *cpi) in fabricate_brandstr() argument
2448 switch (cpi->cpi_vendor) { in fabricate_brandstr()
2450 brand = intel_cpubrand(cpi); in fabricate_brandstr()
2453 brand = amd_cpubrand(cpi); in fabricate_brandstr()
2456 brand = cyrix_cpubrand(cpi, x86_type); in fabricate_brandstr()
2459 if (cpi->cpi_family == 5 && cpi->cpi_model == 0) in fabricate_brandstr()
2463 if (cpi->cpi_family == 5) in fabricate_brandstr()
2464 switch (cpi->cpi_model) { in fabricate_brandstr()
2479 if (cpi->cpi_family == 5 && in fabricate_brandstr()
2480 (cpi->cpi_model == 0 || cpi->cpi_model == 2)) in fabricate_brandstr()
2484 if (cpi->cpi_family == 5 && cpi->cpi_model == 0) in fabricate_brandstr()
2488 if (cpi->cpi_family == 5 && cpi->cpi_model == 4) in fabricate_brandstr()
2497 (void) strcpy((char *)cpi->cpi_brandstr, brand); in fabricate_brandstr()
2504 (void) snprintf(cpi->cpi_brandstr, sizeof (cpi->cpi_brandstr), in fabricate_brandstr()
2505 "%s %d.%d.%d", cpi->cpi_vendorstr, cpi->cpi_family, in fabricate_brandstr()
2506 cpi->cpi_model, cpi->cpi_step); in fabricate_brandstr()
2524 struct cpuid_info *cpi = cpu->cpu_m.mcpu_cpi; in cpuid_pass3() local
2526 ASSERT(cpi->cpi_pass == 2); in cpuid_pass3()
2536 cpi->cpi_ncpu_shr_last_cache = 1; in cpuid_pass3()
2537 cpi->cpi_last_lvl_cacheid = cpu->cpu_id; in cpuid_pass3()
2539 if (cpi->cpi_maxeax >= 4 && cpi->cpi_vendor == X86_VENDOR_Intel) { in cpuid_pass3()
2558 cpi->cpi_ncpu_shr_last_cache = in cpuid_pass3()
2562 cpi->cpi_std_4_size = size = i; in cpuid_pass3()
2570 cpi->cpi_std_4 = in cpuid_pass3()
2572 cpi->cpi_std_4[0] = &cpi->cpi_std[4]; in cpuid_pass3()
2582 cp = cpi->cpi_std_4[i] = in cpuid_pass3()
2598 for (i = 1; i < cpi->cpi_ncpu_shr_last_cache; i <<= 1) in cpuid_pass3()
2600 cpi->cpi_last_lvl_cacheid = cpi->cpi_apicid >> shft; in cpuid_pass3()
2606 if ((cpi->cpi_xmaxeax & 0x80000000) == 0) { in cpuid_pass3()
2607 fabricate_brandstr(cpi); in cpuid_pass3()
2615 if (cpi->cpi_brandstr[0]) { in cpuid_pass3()
2616 size_t maxlen = sizeof (cpi->cpi_brandstr); in cpuid_pass3()
2619 dst = src = (char *)cpi->cpi_brandstr; in cpuid_pass3()
2662 while (--dst > cpi->cpi_brandstr) in cpuid_pass3()
2668 fabricate_brandstr(cpi); in cpuid_pass3()
2670 cpi->cpi_pass = 3; in cpuid_pass3()
2682 struct cpuid_info *cpi; in cpuid_pass4() local
2687 cpi = cpu->cpu_m.mcpu_cpi; in cpuid_pass4()
2689 ASSERT(cpi->cpi_pass == 3); in cpuid_pass4()
2691 if (cpi->cpi_maxeax >= 1) { in cpuid_pass4()
2692 uint32_t *edx = &cpi->cpi_support[STD_EDX_FEATURES]; in cpuid_pass4()
2693 uint32_t *ecx = &cpi->cpi_support[STD_ECX_FEATURES]; in cpuid_pass4()
2694 uint32_t *ebx = &cpi->cpi_support[STD_EBX_FEATURES]; in cpuid_pass4()
2696 *edx = CPI_FEATURES_EDX(cpi); in cpuid_pass4()
2697 *ecx = CPI_FEATURES_ECX(cpi); in cpuid_pass4()
2698 *ebx = CPI_FEATURES_7_0_EBX(cpi); in cpuid_pass4()
2824 if (cpi->cpi_xmaxeax < 0x80000001) in cpuid_pass4()
2827 switch (cpi->cpi_vendor) { in cpuid_pass4()
2841 edx = &cpi->cpi_support[AMD_EDX_FEATURES]; in cpuid_pass4()
2842 ecx = &cpi->cpi_support[AMD_ECX_FEATURES]; in cpuid_pass4()
2844 *edx = CPI_FEATURES_XTD_EDX(cpi); in cpuid_pass4()
2845 *ecx = CPI_FEATURES_XTD_ECX(cpi); in cpuid_pass4()
2850 switch (cpi->cpi_vendor) { in cpuid_pass4()
2897 switch (cpi->cpi_vendor) { in cpuid_pass4()
2928 cpi->cpi_support[TM_EDX_FEATURES] = cp.cp_edx; in cpuid_pass4()
2936 cpi->cpi_pass = 4; in cpuid_pass4()
2952 struct cpuid_info *cpi; in cpuid_insn() local
2957 cpi = cpu->cpu_m.mcpu_cpi; in cpuid_insn()
2965 if (cp->cp_eax <= cpi->cpi_maxeax && cp->cp_eax < NMAX_CPI_STD) in cpuid_insn()
2966 xcp = &cpi->cpi_std[cp->cp_eax]; in cpuid_insn()
2967 else if (cp->cp_eax >= 0x80000000 && cp->cp_eax <= cpi->cpi_xmaxeax && in cpuid_insn()
2969 xcp = &cpi->cpi_extd[cp->cp_eax - 0x80000000]; in cpuid_insn()
3035 struct cpuid_info *cpi = cpu->cpu_m.mcpu_cpi; in cpuid_syscall32_insn() local
3037 if (cpi->cpi_vendor == X86_VENDOR_AMD && in cpuid_syscall32_insn()
3038 cpi->cpi_xmaxeax >= 0x80000001 && in cpuid_syscall32_insn()
3039 (CPI_FEATURES_XTD_EDX(cpi) & CPUID_AMD_EDX_SYSC)) in cpuid_syscall32_insn()
3049 struct cpuid_info *cpi = cpu->cpu_m.mcpu_cpi; in cpuid_getidstr() local
3059 return (snprintf(s, n, fmt_ht, cpi->cpi_chipid, in cpuid_getidstr()
3060 cpi->cpi_vendorstr, cpi->cpi_std[1].cp_eax, in cpuid_getidstr()
3061 cpi->cpi_family, cpi->cpi_model, in cpuid_getidstr()
3062 cpi->cpi_step, cpu->cpu_type_info.pi_clock)); in cpuid_getidstr()
3064 cpi->cpi_vendorstr, cpi->cpi_std[1].cp_eax, in cpuid_getidstr()
3065 cpi->cpi_family, cpi->cpi_model, in cpuid_getidstr()
3066 cpi->cpi_step, cpu->cpu_type_info.pi_clock)); in cpuid_getidstr()
3164 struct cpuid_info *cpi; in cpuid_getsocketstr() local
3167 cpi = cpu->cpu_m.mcpu_cpi; in cpuid_getsocketstr()
3171 socketstr = _cpuid_sktstr(cpi->cpi_vendor, cpi->cpi_family, in cpuid_getsocketstr()
3172 cpi->cpi_model, cpi->cpi_step); in cpuid_getsocketstr()
3251 struct cpuid_info *cpi; in cpuid_have_cr8access()
3254 cpi = cpu->cpu_m.mcpu_cpi; in cpuid_have_cr8access()
3255 if (cpi->cpi_vendor == X86_VENDOR_AMD && cpi->cpi_maxeax >= 1 && in cpuid_have_cr8access()
3256 (CPI_FEATURES_XTD_ECX(cpi) & CPUID_AMD_ECX_CR8D) != 0) in cpuid_have_cr8access()
3276 struct cpuid_info *cpi; in cpuid_get_addrsize() local
3280 cpi = cpu->cpu_m.mcpu_cpi; in cpuid_get_addrsize()
3285 *pabits = cpi->cpi_pabits; in cpuid_get_addrsize()
3287 *vabits = cpi->cpi_vabits; in cpuid_get_addrsize()
3300 struct cpuid_info *cpi; in cpuid_get_dtlb_nent() local
3305 cpi = cpu->cpu_m.mcpu_cpi; in cpuid_get_dtlb_nent()
3312 if (cpi->cpi_xmaxeax >= 0x80000006) { in cpuid_get_dtlb_nent()
3313 struct cpuid_regs *cp = &cpi->cpi_extd[6]; in cpuid_get_dtlb_nent()
3347 if (cpi->cpi_xmaxeax >= 0x80000005) { in cpuid_get_dtlb_nent()
3348 struct cpuid_regs *cp = &cpi->cpi_extd[5]; in cpuid_get_dtlb_nent()
3376 struct cpuid_info *cpi = cpu->cpu_m.mcpu_cpi; in cpuid_opteron_erratum() local
3383 if (cpi->cpi_vendor != X86_VENDOR_AMD || in cpuid_opteron_erratum()
3384 cpi->cpi_family == 4 || cpi->cpi_family == 5 || in cpuid_opteron_erratum()
3385 cpi->cpi_family == 6) in cpuid_opteron_erratum()
3389 eax = cpi->cpi_std[1].cp_eax; in cpuid_opteron_erratum()
3429 return (cpi->cpi_family < 0x10); in cpuid_opteron_erratum()
3435 return (cpi->cpi_family <= 0x11); in cpuid_opteron_erratum()
3439 return (cpi->cpi_family <= 0x11); in cpuid_opteron_erratum()
3456 return (cpi->cpi_family < 0x10); in cpuid_opteron_erratum()
3460 return (cpi->cpi_family <= 0x11); in cpuid_opteron_erratum()
3472 return (cpi->cpi_family < 0x10); in cpuid_opteron_erratum()
3482 return (cpi->cpi_family < 0x10); in cpuid_opteron_erratum()
3542 return (cpi->cpi_family < 0x10 || cpi->cpi_family == 0x11); in cpuid_opteron_erratum()
3546 return (cpi->cpi_family < 0x10); in cpuid_opteron_erratum()
3552 if (CPI_FAMILY(cpi) == 0xf) { in cpuid_opteron_erratum()
3568 return (cpi->cpi_family == 0x10 && cpi->cpi_model < 4); in cpuid_opteron_erratum()
3576 return (cpi->cpi_family == 0x10 || cpi->cpi_family == 0x12); in cpuid_opteron_erratum()
3594 struct cpuid_info *cpi; in osvw_opteron_erratum() local
3600 cpi = cpu->cpu_m.mcpu_cpi; in osvw_opteron_erratum()
3604 osvwfeature = cpi->cpi_extd[1].cp_ecx & CPUID_AMD_ECX_OSVW; in osvw_opteron_erratum()
3608 (cpi->cpi_extd[1].cp_ecx & CPUID_AMD_ECX_OSVW)); in osvw_opteron_erratum()
3833 intel_cpuid_4_cache_info(struct cachetab *ct, struct cpuid_info *cpi) in intel_cpuid_4_cache_info() argument
3838 for (i = 0; i < cpi->cpi_std_4_size; i++) { in intel_cpuid_4_cache_info()
3839 level = CPI_CACHE_LVL(cpi->cpi_std_4[i]); in intel_cpuid_4_cache_info()
3842 ct->ct_assoc = CPI_CACHE_WAYS(cpi->cpi_std_4[i]) + 1; in intel_cpuid_4_cache_info()
3844 CPI_CACHE_COH_LN_SZ(cpi->cpi_std_4[i]) + 1; in intel_cpuid_4_cache_info()
3846 (CPI_CACHE_PARTS(cpi->cpi_std_4[i]) + 1) * in intel_cpuid_4_cache_info()
3848 (cpi->cpi_std_4[i]->cp_ecx + 1); in intel_cpuid_4_cache_info()
3867 intel_walk_cacheinfo(struct cpuid_info *cpi, in intel_walk_cacheinfo() argument
3875 if ((dp = cpi->cpi_cacheinfo) == NULL) in intel_walk_cacheinfo()
3877 for (i = 0; i < cpi->cpi_ncache; i++, dp++) { in intel_walk_cacheinfo()
3885 if (*dp == 0x49 && cpi->cpi_maxeax >= 0x4 && in intel_walk_cacheinfo()
3886 intel_cpuid_4_cache_info(&des_49_ct, cpi) == 1) { in intel_walk_cacheinfo()
3916 cyrix_walk_cacheinfo(struct cpuid_info *cpi, in cyrix_walk_cacheinfo() argument
3923 if ((dp = cpi->cpi_cacheinfo) == NULL) in cyrix_walk_cacheinfo()
3925 for (i = 0; i < cpi->cpi_ncache; i++, dp++) { in cyrix_walk_cacheinfo()
4062 amd_cache_info(struct cpuid_info *cpi, dev_info_t *devi) in amd_cache_info() argument
4066 if (cpi->cpi_xmaxeax < 0x80000005) in amd_cache_info()
4068 cp = &cpi->cpi_extd[5]; in amd_cache_info()
4085 switch (cpi->cpi_vendor) { in amd_cache_info()
4088 if (cpi->cpi_family >= 5) { in amd_cache_info()
4128 if (cpi->cpi_xmaxeax < 0x80000006) in amd_cache_info()
4130 cp = &cpi->cpi_extd[6]; in amd_cache_info()
4168 x86_which_cacheinfo(struct cpuid_info *cpi) in x86_which_cacheinfo() argument
4170 switch (cpi->cpi_vendor) { in x86_which_cacheinfo()
4172 if (cpi->cpi_maxeax >= 2) in x86_which_cacheinfo()
4180 if (cpi->cpi_family > 5 || in x86_which_cacheinfo()
4181 (cpi->cpi_family == 5 && cpi->cpi_model >= 1)) in x86_which_cacheinfo()
4185 if (cpi->cpi_family >= 5) in x86_which_cacheinfo()
4200 if (cpi->cpi_xmaxeax >= 0x80000005) in x86_which_cacheinfo()
4202 else if (cpi->cpi_vendor == X86_VENDOR_Cyrix) in x86_which_cacheinfo()
4204 else if (cpi->cpi_maxeax >= 2) in x86_which_cacheinfo()
4213 struct cpuid_info *cpi) in cpuid_set_cpu_properties() argument
4245 "vendor-id", cpi->cpi_vendorstr); in cpuid_set_cpu_properties()
4247 if (cpi->cpi_maxeax == 0) { in cpuid_set_cpu_properties()
4255 "family", CPI_FAMILY(cpi)); in cpuid_set_cpu_properties()
4257 "cpu-model", CPI_MODEL(cpi)); in cpuid_set_cpu_properties()
4259 "stepping-id", CPI_STEP(cpi)); in cpuid_set_cpu_properties()
4262 switch (cpi->cpi_vendor) { in cpuid_set_cpu_properties()
4272 "type", CPI_TYPE(cpi)); in cpuid_set_cpu_properties()
4275 switch (cpi->cpi_vendor) { in cpuid_set_cpu_properties()
4278 create = cpi->cpi_family >= 0xf; in cpuid_set_cpu_properties()
4286 "ext-family", CPI_FAMILY_XTD(cpi)); in cpuid_set_cpu_properties()
4289 switch (cpi->cpi_vendor) { in cpuid_set_cpu_properties()
4291 create = IS_EXTENDED_MODEL_INTEL(cpi); in cpuid_set_cpu_properties()
4294 create = CPI_FAMILY(cpi) == 0xf; in cpuid_set_cpu_properties()
4302 "ext-model", CPI_MODEL_XTD(cpi)); in cpuid_set_cpu_properties()
4305 switch (cpi->cpi_vendor) { in cpuid_set_cpu_properties()
4310 create = cpi->cpi_xmaxeax >= 0x80000001; in cpuid_set_cpu_properties()
4318 "generation", BITX((cpi)->cpi_extd[1].cp_eax, 11, 8)); in cpuid_set_cpu_properties()
4321 switch (cpi->cpi_vendor) { in cpuid_set_cpu_properties()
4327 create = cpi->cpi_family > 6 || in cpuid_set_cpu_properties()
4328 (cpi->cpi_family == 6 && cpi->cpi_model >= 8); in cpuid_set_cpu_properties()
4331 create = cpi->cpi_family >= 0xf; in cpuid_set_cpu_properties()
4337 if (create && cpi->cpi_brandid != 0) { in cpuid_set_cpu_properties()
4339 "brand-id", cpi->cpi_brandid); in cpuid_set_cpu_properties()
4343 switch (cpi->cpi_vendor) { in cpuid_set_cpu_properties()
4348 create = IS_NEW_F6(cpi) || cpi->cpi_family >= 0xf; in cpuid_set_cpu_properties()
4351 create = cpi->cpi_family >= 0xf; in cpuid_set_cpu_properties()
4359 "chunks", CPI_CHUNKS(cpi)); in cpuid_set_cpu_properties()
4361 "apic-id", cpi->cpi_apicid); in cpuid_set_cpu_properties()
4362 if (cpi->cpi_chipid >= 0) { in cpuid_set_cpu_properties()
4364 "chip#", cpi->cpi_chipid); in cpuid_set_cpu_properties()
4366 "clog#", cpi->cpi_clogid); in cpuid_set_cpu_properties()
4372 "cpuid-features", CPI_FEATURES_EDX(cpi)); in cpuid_set_cpu_properties()
4376 switch (cpi->cpi_vendor) { in cpuid_set_cpu_properties()
4378 create = IS_NEW_F6(cpi) || cpi->cpi_family >= 0xf; in cpuid_set_cpu_properties()
4381 create = cpi->cpi_family >= 0xf; in cpuid_set_cpu_properties()
4389 "cpuid-features-ecx", CPI_FEATURES_ECX(cpi)); in cpuid_set_cpu_properties()
4392 switch (cpi->cpi_vendor) { in cpuid_set_cpu_properties()
4398 create = cpi->cpi_xmaxeax >= 0x80000001; in cpuid_set_cpu_properties()
4406 "ext-cpuid-features", CPI_FEATURES_XTD_EDX(cpi)); in cpuid_set_cpu_properties()
4408 "ext-cpuid-features-ecx", CPI_FEATURES_XTD_ECX(cpi)); in cpuid_set_cpu_properties()
4418 "brand-string", cpi->cpi_brandstr); in cpuid_set_cpu_properties()
4423 switch (x86_which_cacheinfo(cpi)) { in cpuid_set_cpu_properties()
4425 intel_walk_cacheinfo(cpi, cpu_devi, add_cacheent_props); in cpuid_set_cpu_properties()
4428 cyrix_walk_cacheinfo(cpi, cpu_devi, add_cacheent_props); in cpuid_set_cpu_properties()
4431 amd_cache_info(cpi, cpu_devi); in cpuid_set_cpu_properties()
4483 amd_l2cacheinfo(struct cpuid_info *cpi, struct l2info *l2i) in amd_l2cacheinfo() argument
4490 if (cpi->cpi_xmaxeax < 0x80000006) in amd_l2cacheinfo()
4492 cp = &cpi->cpi_extd[6]; in amd_l2cacheinfo()
4514 struct cpuid_info *cpi = cpu->cpu_m.mcpu_cpi; in getl2cacheinfo() local
4522 switch (x86_which_cacheinfo(cpi)) { in getl2cacheinfo()
4524 intel_walk_cacheinfo(cpi, l2i, intel_l2cinfo); in getl2cacheinfo()
4527 cyrix_walk_cacheinfo(cpi, l2i, intel_l2cinfo); in getl2cacheinfo()
4530 amd_l2cacheinfo(cpi, l2i); in getl2cacheinfo()
4634 struct cpuid_info *cpi; in cpuid_deep_cstates_supported() local
4639 cpi = CPU->cpu_m.mcpu_cpi; in cpuid_deep_cstates_supported()
4644 switch (cpi->cpi_vendor) { in cpuid_deep_cstates_supported()
4646 if (cpi->cpi_xmaxeax < 0x80000007) in cpuid_deep_cstates_supported()
4724 struct cpuid_info *cpi; in cpuid_arat_supported() local
4730 cpi = CPU->cpu_m.mcpu_cpi; in cpuid_arat_supported()
4732 switch (cpi->cpi_vendor) { in cpuid_arat_supported()
4738 if (cpi->cpi_maxeax >= 6) { in cpuid_arat_supported()
4756 struct cpuid_info *cpi = cp->cpu_m.mcpu_cpi; in cpuid_iepb_supported() local
4770 if ((cpi->cpi_vendor != X86_VENDOR_Intel) || (cpi->cpi_maxeax < 6)) in cpuid_iepb_supported()
4790 struct cpuid_info *cpi = CPU->cpu_m.mcpu_cpi; in cpuid_deadline_tsc_supported() local
4796 switch (cpi->cpi_vendor) { in cpuid_deadline_tsc_supported()
4798 if (cpi->cpi_maxeax >= 1) { in cpuid_deadline_tsc_supported()