Home
last modified time | relevance | path

Searched refs:cr4 (Results 1 – 25 of 54) sorted by relevance

123

/linux/arch/x86/kvm/
H A Dsmm.c30 CHECK_SMRAM32_OFFSET(cr4, 0xFF14); in check_smram_offsets()
96 CHECK_SMRAM64_OFFSET(cr4, 0xFF48); in check_smram_offsets()
220 smram->cr4 = kvm_read_cr4(vcpu); in enter_smm_save_state_32()
245 smram->cr4 = kvm_read_cr4(vcpu); in enter_smm_save_state_64()
420 u64 cr0, u64 cr3, u64 cr4) in rsm_enter_protected_mode() argument
427 if (cr4 & X86_CR4_PCIDE) { in rsm_enter_protected_mode()
441 bad = kvm_set_cr4(vcpu, cr4 & ~X86_CR4_PCIDE); in rsm_enter_protected_mode()
449 if (cr4 & X86_CR4_PCIDE) { in rsm_enter_protected_mode()
450 bad = kvm_set_cr4(vcpu, cr4); in rsm_enter_protected_mode()
504 smstate->cr3, smstate->cr4); in rsm_load_state_32()
[all …]
H A Dsmm.h31 u32 cr4; member
127 u64 cr4; member
/linux/arch/x86/kernel/
H A Dsev_verify_cbit.S35 movq %cr4, %rsi
40 movq %rdx, %cr4
71 movq %rsi, %cr4
H A Dprocess_32.c62 unsigned long cr0 = 0L, cr2 = 0L, cr3 = 0L, cr4 = 0L; in __show_regs() local
83 cr4 = __read_cr4(); in __show_regs()
85 log_lvl, cr0, cr2, cr3, cr4); in __show_regs()
H A Dhead_64.S222 movq %cr4, %rcx
227 movq %rcx, %cr4
233 movq %rcx, %cr4
/linux/arch/x86/kernel/cpu/mtrr/
H A Dcyrix.c135 static u32 cr4, ccr3; variable
143 cr4 = __read_cr4(); in prepare_set()
144 __write_cr4(cr4 & ~X86_CR4_PGE); in prepare_set()
176 __write_cr4(cr4); in post_set()
/linux/arch/x86/power/
H A Dhibernate_asm_64.S37 movq %rdx, %cr4; # turn off PGE
40 movq %rax, %cr4; # turn PGE back on
129 movq %rcx, %cr4; # turn off PGE
132 movq %rbx, %cr4; # turn PGE back on
/linux/arch/x86/mm/
H A Dmem_encrypt_boot.S103 mov %cr4, %rdx
105 mov %rdx, %cr4
107 mov %rdx, %cr4
/linux/arch/x86/platform/pvh/
H A Dhead.S95 mov %cr4, %eax
97 mov %eax, %cr4
218 mov %cr4, %eax
220 mov %eax, %cr4
/linux/include/xen/interface/hvm/
H A Dhvm_vcpu.h25 uint32_t cr4; member
88 uint64_t cr4; member
/linux/tools/testing/selftests/kvm/lib/x86/
H A Dvmx.c119 unsigned long cr4; in prepare_for_vmx_operation() local
131 __asm__ __volatile__("mov %%cr4, %0" : "=r"(cr4) : : "memory"); in prepare_for_vmx_operation()
132 cr4 &= rdmsr(MSR_IA32_VMX_CR4_FIXED1); in prepare_for_vmx_operation()
133 cr4 |= rdmsr(MSR_IA32_VMX_CR4_FIXED0); in prepare_for_vmx_operation()
135 cr4 |= X86_CR4_VMXE; in prepare_for_vmx_operation()
136 __asm__ __volatile__("mov %0, %%cr4" : : "r"(cr4) : "memory"); in prepare_for_vmx_operation()
/linux/arch/x86/platform/olpc/
H A Dxo1-wakeup.S30 movl %eax, %cr4
64 movl %cr4, %edx
/linux/arch/x86/hyperv/
H A Dhv_crash.c61 ulong cr4; member
155 asm volatile("movq %0, %%cr4" : : "r"(ctxt->cr4)); in hv_crash_c_entry()
156 asm volatile("movq %0, %%cr2" : : "r"(ctxt->cr4)); in hv_crash_c_entry()
201 ctxt->cr4 = native_read_cr4(); in hv_hvcrash_ctxt_save()
H A Dhv_trampoline.S49 movl %ecx, %cr4
/linux/arch/x86/kernel/cpu/
H A Dcommon.c480 unsigned long newval, cr4 = this_cpu_read(cpu_tlbstate.cr4); in cr4_update_irqsoff() local
484 newval = (cr4 & ~clear) | set; in cr4_update_irqsoff()
485 if (newval != cr4) { in cr4_update_irqsoff()
486 this_cpu_write(cpu_tlbstate.cr4, newval); in cr4_update_irqsoff()
495 return this_cpu_read(cpu_tlbstate.cr4); in cr4_read_shadow()
501 unsigned long cr4 = __read_cr4(); in cr4_init() local
504 cr4 |= X86_CR4_PCIDE; in cr4_init()
506 cr4 = (cr4 & ~cr4_pinned_mask) | cr4_pinned_bits; in cr4_init()
508 __write_cr4(cr4); in cr4_init()
511 this_cpu_write(cpu_tlbstate.cr4, cr4); in cr4_init()
[all …]
/linux/arch/powerpc/kernel/
H A Dcpu_setup_6xx.S366 cmplwi cr4,r3,0x8002 /* 7457 */
374 cror 4*cr0+eq,4*cr0+eq,4*cr4+eq
437 cmplwi cr4,r3,0x8002 /* 7457 */
445 cror 4*cr0+eq,4*cr0+eq,4*cr4+eq
/linux/scripts/gdb/linux/
H A Dpgtable.py209 cr4 = gdb.parse_and_eval('$cr4')
210 page_levels = 5 if cr4 & (1 << 12) else 4
/linux/arch/x86/realmode/rm/
H A Dtrampoline_64.S157 movl %eax, %cr4 # Enable PAE mode
226 movq %cr4, %rax
H A Dwakeup_asm.S117 movl %eax, %cr4
/linux/arch/x86/include/asm/
H A Drealmode.h53 u32 cr4;
/linux/arch/powerpc/boot/
H A Dppc_asm.h17 #define cr4 4 macro
/linux/arch/x86/kvm/vmx/
H A Dx86_ops.h69 void vmx_set_cr4(struct kvm_vcpu *vcpu, unsigned long cr4);
70 bool vmx_is_valid_cr4(struct kvm_vcpu *vcpu, unsigned long cr4);
H A Dmain.c400 static bool vt_is_valid_cr4(struct kvm_vcpu *vcpu, unsigned long cr4) in vt_is_valid_cr4() argument
405 return vmx_is_valid_cr4(vcpu, cr4); in vt_is_valid_cr4()
408 static void vt_set_cr4(struct kvm_vcpu *vcpu, unsigned long cr4) in vt_set_cr4() argument
413 vmx_set_cr4(vcpu, cr4); in vt_set_cr4()
H A Dvmcs.h36 unsigned long cr4; /* May not match real cr4 */ member
/linux/arch/x86/realmode/
H A Dinit.c155 trampoline_cr4_features = &trampoline_header->cr4; in setup_real_mode()

123