Searched refs:is_smm (Results 1 – 9 of 9) sorted by relevance
152 static inline bool is_smm(struct kvm_vcpu *vcpu) in is_smm() function163 static inline bool is_smm(struct kvm_vcpu *vcpu) { return false; } in is_smm() function
2164 if (!is_smm(vcpu) && cpuid_fault_enabled(vcpu) && in kvm_emulate_cpuid()
5626 events->smi.smm = is_smm(vcpu); in kvm_vcpu_ioctl_x86_get_vcpu_events()8866 return is_smm(emul_to_vcpu(ctxt)); in emulator_is_smm()8964 .is_smm = emulator_is_smm,10587 if (is_smm(vcpu)) in post_kvm_run_save()13049 WARN_ON_ONCE(is_guest_mode(vcpu) || is_smm(vcpu)); in kvm_vcpu_reset()
2317 if (!ctxt->ops->is_smm(ctxt)) in em_rsm()3599 if (!ctxt->ops->is_smm(ctxt) && in em_cpuid()
1092 if (is_smm(vcpu)) { in nested_svm_vmrun()1923 if (is_smm(vcpu) && (kvm_state->flags & KVM_STATE_NESTED_GUEST_MODE)) in svm_set_nested_state()
245 if (!is_smm(vcpu)) in svm_set_efer()2148 if (is_smm(vcpu)) in shutdown_interception()4920 return is_smm(vcpu); in svm_smi_blocked()
4396 if (vcpu->arch.smi_pending && !is_smm(vcpu)) { in vmx_check_nested_events()6936 if (is_smm(vcpu) ? in vmx_set_nested_state()
3528 if ((cr4 & X86_CR4_VMXE) && is_smm(vcpu)) in vmx_is_valid_cr4()8421 return !is_smm(vcpu); in vmx_smi_allowed()
5725 role.base.smm = is_smm(vcpu); in kvm_calc_cpu_role()5930 WARN_ON_ONCE(is_smm(vcpu)); in kvm_calc_shadow_ept_root_page_role()