Searched refs:X86_CR4_SMEP (Results 1 – 9 of 9) sorted by relevance
563 if ((cr4 & X86_CR4_SMEP) != X86_CR4_SMEP) { in lkdtm_UNSET_SMEP()567 cr4 &= ~(X86_CR4_SMEP); in lkdtm_UNSET_SMEP()573 cr4 |= X86_CR4_SMEP; in lkdtm_UNSET_SMEP()606 if (native_read_cr4() & X86_CR4_SMEP) { in lkdtm_UNSET_SMEP()610 cr4 |= X86_CR4_SMEP; in lkdtm_UNSET_SMEP()
132 #define X86_CR4_SMEP _BITUL(X86_CR4_SMEP_BIT) macro
364 cr4_set_bits(X86_CR4_SMEP); in setup_smep()403 static const unsigned long cr4_pinned_mask = X86_CR4_SMEP | X86_CR4_SMAP | X86_CR4_UMIP |
137 | X86_CR4_OSXSAVE | X86_CR4_SMEP | X86_CR4_FSGSBASE \
1928 cr4 &= ~(X86_CR4_SMEP | X86_CR4_SMAP | X86_CR4_PKE); in svm_set_cr4()4950 smep = kvm_is_cr4_bit_set(vcpu, X86_CR4_SMEP); in svm_check_emulate_instruction()
3515 hw_cr4 &= ~(X86_CR4_SMEP | X86_CR4_SMAP | X86_CR4_PKE); in vmx_set_cr4()7749 cr4_fixed1_update(X86_CR4_SMEP, ebx, feature_bit(SMEP)); in nested_vmx_cr_fixed1_bits_update()
1326 ((cr4 & X86_CR4_SMEP) && !(old_cr4 & X86_CR4_SMEP))) in kvm_post_set_cr4()
209 BUILD_MMU_ROLE_REGS_ACCESSOR(cr4, smep, X86_CR4_SMEP);