/freebsd/sys/amd64/amd64/ |
H A D | initcpu.c | 279 uint32_t cr4; in initializecpu() local 282 cr4 = rcr4(); in initializecpu() 284 cr4 |= CR4_FXSR | CR4_XMM; in initializecpu() 288 cr4 |= CR4_FSGSBASE; in initializecpu() 291 cr4 |= CR4_PKE; in initializecpu() 313 cr4 |= CR4_SMEP; in initializecpu() 315 cr4 |= CR4_SMAP; in initializecpu() 318 load_cr4(cr4); in initializecpu()
|
H A D | locore.S | 118 movq %cr4,%rax 137 movl %cr4,%eax 139 movl %eax,%cr4 150 movl %cr4,%eax /* finally safe to switch bit */ 152 movl %eax,%cr4 172 movq %rax,%cr4 /* re-enable paging controls */
|
H A D | mpboot.S | 95 mov %cr4, %eax 100 1: mov %eax, %cr4
|
H A D | xen-locore.S | 132 movl %cr4, %eax 134 movl %eax, %cr4
|
H A D | fpu.c | 359 uint64_t cr4; in fpuinit() local 368 cr4 = rcr4(); in fpuinit() 379 cr4 &= ~CR4_PKE; in fpuinit() 383 load_cr4(cr4 | CR4_XSAVE); in fpuinit()
|
H A D | cpu_switch.S | 328 movq %cr4,%rax 449 movq %rax,%cr4
|
/freebsd/sys/i386/i386/ |
H A D | mpboot.S | 78 movl %cr4,%eax 88 3: movl %eax,%cr4 95 movl %cr4, %eax 97 movl %eax, %cr4
|
H A D | swtch.S | 360 movl %cr4,%eax 414 movl %eax,%cr4
|
/freebsd/contrib/arm-optimized-routines/math/aarch64/sve/ |
H A D | sv_sincospif_common.h | 53 svfloat32_t cr4 = svmul_x (pt, cr2, cr2); in sv_sincospif_inline() local 73 svfloat32_t cp = svmla_x (pg, cp23, cr4, cp45); in sv_sincospif_inline() 76 cp = svmla_x (pg, cp01, cr4, cp); in sv_sincospif_inline()
|
H A D | sv_sincospi_common.h | 61 svfloat64_t cr4 = svmul_x (pt, cr2, cr2); in sv_sincospi_inline() local 73 pt, sv_lw_pw_horner_9_f64_x (pg, cr2, cr4, &(d->c0), &(d->c1)), cr); in sv_sincospi_inline()
|
/freebsd/stand/efi/loader/arch/i386/ |
H A D | amd64_tramp.S | 47 movl %cr4, %eax /* PAE may be disabled, enable it. */ 49 movl %eax, %cr4
|
/freebsd/contrib/arm-optimized-routines/math/aarch64/advsimd/ |
H A D | v_sincospif_common.h | 46 float32x4_t cr4 = vmulq_f32 (cr2, cr2); in v_sincospif_inline() local 49 float32x4_t cc = vmulq_f32 (v_pw_horner_5_f32 (cr2, cr4, d->poly), cr); in v_sincospif_inline()
|
H A D | v_sincospi_common.h | 52 float64x2_t cr4 = vmulq_f64 (cr2, cr2); in v_sincospi_inline() local 55 float64x2_t cc = vmulq_f64 (v_pw_horner_9_f64 (cr2, cr4, d->poly), cr); in v_sincospi_inline()
|
/freebsd/stand/i386/libi386/ |
H A D | amd64_tramp.S | 84 movl %cr4, %eax 86 movl %eax, %cr4
|
/freebsd/sys/contrib/xen/hvm/ |
H A D | hvm_vcpu.h | 42 uint32_t cr4; member 106 uint64_t cr4; member
|
/freebsd/sys/amd64/acpica/ |
H A D | acpi_wakecode.S | 153 mov %cr4, %eax 160 1: mov %eax, %cr4
|
/freebsd/sys/amd64/vmm/ |
H A D | x86.c | 80 uint64_t cr4; in x86_emulate_cpuid() local 344 VM_REG_GUEST_CR4, &cr4); in x86_emulate_cpuid() 348 if (cr4 & CR4_XSAVE) in x86_emulate_cpuid()
|
/freebsd/sys/x86/x86/ |
H A D | x86_mem.c | 324 u_long cr0, cr4; in x86_mrstoreone() local 331 cr4 = rcr4(); in x86_mrstoreone() 332 load_cr4(cr4 & ~CR4_PGE); in x86_mrstoreone() 417 load_cr4(cr4); in x86_mrstoreone()
|
/freebsd/sys/amd64/include/ |
H A D | cpufunc.h | 484 uint64_t cr4; in invltlb_glob() local 486 cr4 = rcr4(); in invltlb_glob() 487 load_cr4(cr4 & ~CR4_PGE); in invltlb_glob() 495 load_cr4(cr4 | CR4_PGE); in invltlb_glob()
|
/freebsd/sys/amd64/vmm/intel/ |
H A D | vmcs.c | 344 u_long cr0, cr4, efer; in vmcs_init() local 374 cr4 = vmm_get_host_cr4() | CR4_VMXE; in vmcs_init() 375 if ((error = vmwrite(VMCS_HOST_CR4, cr4)) != 0) in vmcs_init()
|
H A D | vmx.h | 174 u_long vmx_fix_cr4(u_long cr4);
|
/freebsd/sys/amd64/vmm/amd/ |
H A D | vmcb.c | 190 *retval = state->cr4; in vmcb_read() 300 state->cr4 = val; in vmcb_write()
|
/freebsd/sys/i386/acpica/ |
H A D | acpi_wakecode.S | 156 mov %eax, %cr4
|
/freebsd/sys/contrib/xen/arch-x86/hvm/ |
H A D | save.h | 83 uint64_t cr4; member 197 uint64_t cr4; member
|
/freebsd/sys/contrib/xen/ |
H A D | vm_event.h | 211 uint64_t cr4; member
|