Searched refs:EFER_LMA (Results 1 – 12 of 12) sorted by relevance
152 trampoline_header->efer = efer & ~EFER_LMA; in setup_real_mode()
742 if (efer & EFER_LMA) in emulator_recalc_and_set_mode()750 if (efer & EFER_LMA) in emulator_recalc_and_set_mode()759 if (efer & EFER_LMA) { in emulator_recalc_and_set_mode()1453 if (!(efer & EFER_LMA)) in get_descriptor_ptr()1502 if (!(efer & EFER_LMA)) in emulator_is_ssp_invalid()1658 if (efer & EFER_LMA) in __load_segment_descriptor()2373 if (efer & EFER_LMA) { in em_syscall()2381 if (efer & EFER_LMA) { in em_syscall()2423 if ((ctxt->mode != X86EMUL_MODE_PROT64) && (efer & EFER_LMA) && in em_sysenter()2439 if (efer & EFER_LMA) { in em_sysenter()[all …]
233 return !!(vcpu->arch.efer & EFER_LMA); in is_long_mode()
536 if (__kvm_emulate_msr_write(vcpu, MSR_EFER, smstate->efer & ~EFER_LMA)) in rsm_load_state_64()
115 u64 __read_mostly efer_reserved_bits = ~((u64)(EFER_SCE | EFER_LME | EFER_LMA));1731 if (efer & (EFER_LME | EFER_LMA) && in __kvm_valid_efer()1768 efer &= ~EFER_LMA; in set_efer()1769 efer |= vcpu->arch.efer & EFER_LMA; in set_efer()12303 if (!(sregs->cr4 & X86_CR4_PAE) || !(sregs->efer & EFER_LMA)) in kvm_is_valid_sregs()12312 if (sregs->efer & EFER_LMA || sregs->cs.l) in kvm_is_valid_sregs()12420 !(sregs2->efer & EFER_LMA); in __set_sregs2()
1174 ignore_bits |= EFER_LMA | EFER_LME; in update_transition_efer()1176 if (guest_efer & EFER_LMA) in update_transition_efer()1187 if (!(guest_efer & EFER_LMA)) in update_transition_efer()3263 if (efer & EFER_LMA) in vmx_set_efer()3268 if (KVM_BUG_ON(efer & EFER_LMA, vcpu->kvm)) in vmx_set_efer()3292 vmx_set_efer(vcpu, vcpu->arch.efer | EFER_LMA); in enter_lmode()3297 vmx_set_efer(vcpu, vcpu->arch.efer & ~EFER_LMA); in exit_lmode()6477 vcpu->arch.efer | (EFER_LMA | EFER_LME)); in dump_vmcs()6480 vcpu->arch.efer & ~(EFER_LMA | EFER_LME)); in dump_vmcs()
2284 return vmx->vcpu.arch.efer | (EFER_LMA | EFER_LME); in nested_vmx_calc_efer()2286 return vmx->vcpu.arch.efer & ~(EFER_LMA | EFER_LME); in nested_vmx_calc_efer()2497 if (guest_efer & EFER_LMA) in prepare_vmcs02_early()3117 !!(vcpu->arch.efer & EFER_LMA))) in nested_vmx_check_address_space_size()3208 CC(ia32e != !!(vmcs12->host_ia32_efer & EFER_LMA)) || in nested_vmx_check_host_state()3333 CC(ia32e != !!(vmcs12->guest_ia32_efer & EFER_LMA)) || in nested_vmx_check_guest_state()4783 vcpu->arch.efer |= (EFER_LMA | EFER_LME); in load_vmcs12_host_state()4785 vcpu->arch.efer &= ~(EFER_LMA | EFER_LME); in load_vmcs12_host_state()
696 vcpu->arch.efer = EFER_SCE | EFER_LME | EFER_LMA | EFER_NX; in tdx_vcpu_create()
33 #define EFER_LMA (1<<_EFER_LMA) macro
211 if (!(efer & EFER_LMA)) in svm_set_efer()1678 vcpu->arch.efer |= EFER_LMA; in svm_set_cr0()1680 svm->vmcb->save.efer |= EFER_LMA | EFER_LME; in svm_set_cr0()1684 vcpu->arch.efer &= ~EFER_LMA; in svm_set_cr0()1686 svm->vmcb->save.efer &= ~(EFER_LMA | EFER_LME); in svm_set_cr0()
543 sregs.efer |= (EFER_LME | EFER_LMA | EFER_NX); in vcpu_init_sregs()
215 BUILD_MMU_ROLE_REGS_ACCESSOR(efer, lma, EFER_LMA);