Searched refs:X86_CR0_PG (Results 1 – 16 of 16) sorted by relevance
/linux/arch/x86/platform/pvh/ |
H A D | head.S | 162 mov $(X86_CR0_PG | X86_CR0_PE), %eax 201 or $(X86_CR0_PG | X86_CR0_PE), %eax 213 and $~X86_CR0_PG, %eax
|
/linux/arch/x86/include/uapi/asm/ |
H A D | processor-flags.h | 72 #define X86_CR0_PG _BITUL(X86_CR0_PG_BIT) macro 179 X86_CR0_PG)
|
/linux/arch/x86/realmode/rm/ |
H A D | trampoline_64.S | 92 movl $(CR0_STATE & ~X86_CR0_PG), %eax 201 movl $(CR0_STATE & ~X86_CR0_PG), %eax
|
H A D | reboot.S | 33 andl $~X86_CR0_PG, %eax
|
/linux/arch/x86/kernel/ |
H A D | relocate_kernel_32.S | 117 andl $~(X86_CR0_PG | X86_CR0_AM | X86_CR0_WP | X86_CR0_TS | X86_CR0_EM), %eax 194 orl $X86_CR0_PG, %eax
|
H A D | head_32.S | 153 movl $(CR0_STATE & ~X86_CR0_PG),%eax
|
H A D | relocate_kernel_64.S | 138 orl $(X86_CR0_PG | X86_CR0_PE), %eax
|
/linux/tools/testing/selftests/kvm/x86_64/ |
H A D | set_sregs_test.c |
|
/linux/arch/x86/boot/compressed/ |
H A D | mem_encrypt.S | 282 movl $(X86_CR0_PG | X86_CR0_PE), %ecx /* Enable Paging and Protected mode */
|
/linux/arch/x86/kvm/vmx/ |
H A D | nested.h | 272 fixed0 &= ~(X86_CR0_PE | X86_CR0_PG); in nested_guest_cr0_valid()
|
H A D | vmx.c | 153 (KVM_VM_CR0_ALWAYS_ON_UNRESTRICTED_GUEST | X86_CR0_PG | X86_CR0_PE) 3312 old_cr0_pg = kvm_read_cr0_bits(vcpu, X86_CR0_PG); in vmx_set_cr0() 3336 if (!old_cr0_pg && (cr0 & X86_CR0_PG)) in vmx_set_cr0() 3338 else if (old_cr0_pg && !(cr0 & X86_CR0_PG)) in vmx_set_cr0() 3368 if (!(cr0 & X86_CR0_PG)) { in vmx_set_cr0() 3380 if ((old_cr0_pg ^ cr0) & X86_CR0_PG) in vmx_set_cr0() 3387 if (!(old_cr0_pg & X86_CR0_PG) && (cr0 & X86_CR0_PG)) in vmx_set_cr0()
|
H A D | nested.c | 3166 if (CC((vmcs12->guest_cr0 & (X86_CR0_PG | X86_CR0_PE)) == X86_CR0_PG)) in nested_vmx_check_guest_state() 3170 CC(ia32e && !(vmcs12->guest_cr0 & X86_CR0_PG))) in nested_vmx_check_guest_state() 3186 CC(((vmcs12->guest_cr0 & X86_CR0_PG) && in nested_vmx_check_guest_state() 7198 #define VMXON_CR0_ALWAYSON (X86_CR0_PE | X86_CR0_PG | X86_CR0_NE) in nested_vmx_setup_cr_fixed()
|
/linux/arch/x86/kvm/ |
H A D | x86.c | 1080 if ((cr0 & X86_CR0_PG) && !(cr0 & X86_CR0_PE)) in kvm_is_valid_cr0() 1097 if (!(cr0 & X86_CR0_PG)) in kvm_post_set_cr0() 1106 if ((cr0 ^ old_cr0) & X86_CR0_PG) { in kvm_post_set_cr0() 1114 if (!(cr0 & X86_CR0_PG)) in kvm_post_set_cr0() 1137 (cr0 & X86_CR0_PG)) { in kvm_set_cr0() 1147 if (!(vcpu->arch.efer & EFER_LME) && (cr0 & X86_CR0_PG) && in kvm_set_cr0() 1152 if (!(cr0 & X86_CR0_PG) && in kvm_set_cr0() 11867 if ((sregs->efer & EFER_LME) && (sregs->cr0 & X86_CR0_PG)) { in kvm_is_valid_sregs() 11989 bool pae = (sregs2->cr0 & X86_CR0_PG) && (sregs2->cr4 & X86_CR4_PAE) && in __set_sregs2() 12520 if (old_cr0 & X86_CR0_PG) { in kvm_vcpu_reset()
|
/linux/arch/x86/kvm/svm/ |
H A D | svm.c | 1864 if (!is_paging(vcpu) && (cr0 & X86_CR0_PG)) { in svm_set_cr0() 1870 if (is_paging(vcpu) && !(cr0 & X86_CR0_PG)) { in svm_set_cr0() 1880 hcr0 |= X86_CR0_PG | X86_CR0_WP; in svm_set_cr0()
|
/linux/arch/x86/include/asm/ |
H A D | kvm_host.h | 131 | X86_CR0_NW | X86_CR0_CD | X86_CR0_PG))
|
/linux/arch/x86/kvm/mmu/ |
H A D | mmu.c | 205 BUILD_MMU_ROLE_REGS_ACCESSOR(cr0, pg, X86_CR0_PG);
|