Home
last modified time | relevance | path

Searched refs:vmx (Results 1 – 25 of 40) sorted by relevance

12

/linux/tools/testing/selftests/kvm/lib/x86_64/
H A Dvmx.c78 struct vmx_pages *vmx = addr_gva2hva(vm, vmx_gva); in vcpu_alloc_vmx() local
81 vmx->vmxon = (void *)vm_vaddr_alloc_page(vm); in vcpu_alloc_vmx()
82 vmx->vmxon_hva = addr_gva2hva(vm, (uintptr_t)vmx->vmxon); in vcpu_alloc_vmx()
83 vmx->vmxon_gpa = addr_gva2gpa(vm, (uintptr_t)vmx->vmxon); in vcpu_alloc_vmx()
86 vmx->vmcs = (void *)vm_vaddr_alloc_page(vm); in vcpu_alloc_vmx()
87 vmx->vmcs_hva = addr_gva2hva(vm, (uintptr_t)vmx->vmcs); in vcpu_alloc_vmx()
88 vmx->vmcs_gpa = addr_gva2gpa(vm, (uintptr_t)vmx->vmcs); in vcpu_alloc_vmx()
91 vmx->msr = (void *)vm_vaddr_alloc_page(vm); in vcpu_alloc_vmx()
92 vmx->msr_hva = addr_gva2hva(vm, (uintptr_t)vmx->msr); in vcpu_alloc_vmx()
93 vmx->msr_gpa = addr_gva2gpa(vm, (uintptr_t)vmx->msr); in vcpu_alloc_vmx()
[all …]
H A Dmemstress.c32 static void memstress_l1_guest_code(struct vmx_pages *vmx, uint64_t vcpu_id) in memstress_l1_guest_code() argument
38 GUEST_ASSERT(vmx->vmcs_gpa); in memstress_l1_guest_code()
39 GUEST_ASSERT(prepare_for_vmx_operation(vmx)); in memstress_l1_guest_code()
40 GUEST_ASSERT(load_vmcs(vmx)); in memstress_l1_guest_code()
45 prepare_vmcs(vmx, memstress_l2_guest_entry, rsp); in memstress_l1_guest_code()
62 void memstress_setup_ept(struct vmx_pages *vmx, struct kvm_vm *vm) in memstress_setup_ept() argument
66 prepare_eptp(vmx, vm, 0); in memstress_setup_ept()
73 nested_identity_map_1g(vmx, vm, 0, 0x100000000ULL); in memstress_setup_ept()
77 nested_identity_map_1g(vmx, vm, start, end - start); in memstress_setup_ept()
82 struct vmx_pages *vmx, *vmx0 = NULL; in memstress_setup_nested() local
[all …]
/linux/arch/x86/kvm/vmx/
H A Dnested.c191 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_fail() local
197 if (vmx->nested.current_vmptr == INVALID_GPA && in nested_vmx_fail()
198 !nested_vmx_is_evmptr12_valid(vmx)) in nested_vmx_fail()
221 static void vmx_disable_shadow_vmcs(struct vcpu_vmx *vmx) in vmx_disable_shadow_vmcs() argument
223 secondary_exec_controls_clearbit(vmx, SECONDARY_EXEC_SHADOW_VMCS); in vmx_disable_shadow_vmcs()
225 vmx->nested.need_vmcs12_to_shadow_sync = false; in vmx_disable_shadow_vmcs()
232 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_release_evmcs() local
234 if (nested_vmx_is_evmptr12_valid(vmx)) { in nested_release_evmcs()
235 kvm_vcpu_unmap(vcpu, &vmx->nested.hv_evmcs_map, true); in nested_release_evmcs()
236 vmx->nested.hv_evmcs = NULL; in nested_release_evmcs()
[all …]
H A Dhyperv.h25 static inline bool nested_vmx_is_evmptr12_valid(struct vcpu_vmx *vmx) in nested_vmx_is_evmptr12_valid() argument
27 return evmptr_is_valid(vmx->nested.hv_evmcs_vmptr); in nested_vmx_is_evmptr12_valid()
35 static inline bool nested_vmx_is_evmptr12_set(struct vcpu_vmx *vmx) in nested_vmx_is_evmptr12_set() argument
37 return evmptr_is_set(vmx->nested.hv_evmcs_vmptr); in nested_vmx_is_evmptr12_set()
40 static inline struct hv_enlightened_vmcs *nested_vmx_evmcs(struct vcpu_vmx *vmx) in nested_vmx_evmcs() argument
42 return vmx->nested.hv_evmcs; in nested_vmx_evmcs()
69 static inline bool nested_vmx_is_evmptr12_valid(struct vcpu_vmx *vmx) in nested_vmx_is_evmptr12_valid() argument
79 static inline bool nested_vmx_is_evmptr12_set(struct vcpu_vmx *vmx) in nested_vmx_is_evmptr12_set() argument
84 static inline struct hv_enlightened_vmcs *nested_vmx_evmcs(struct vcpu_vmx *vmx) in nested_vmx_evmcs() argument
H A Dvmx.h383 void vmx_set_constant_host_state(struct vcpu_vmx *vmx);
396 void set_cr4_guest_host_mask(struct vcpu_vmx *vmx);
410 struct vmx_uret_msr *vmx_find_uret_msr(struct vcpu_vmx *vmx, u32 msr);
412 void vmx_update_host_rsp(struct vcpu_vmx *vmx, unsigned long host_rsp);
413 void vmx_spec_ctrl_restore_host(struct vcpu_vmx *vmx, unsigned int flags);
414 unsigned int __vmx_vcpu_run_flags(struct vcpu_vmx *vmx);
415 bool __vmx_vcpu_run(struct vcpu_vmx *vmx, unsigned long *regs,
588 static inline void lname##_controls_set(struct vcpu_vmx *vmx, u##bits val) \
590 if (vmx->loaded_vmcs->controls_shadow.lname != val) { \
592 vmx->loaded_vmcs->controls_shadow.lname = val; \
[all …]
H A Dposted_intr.c56 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_vcpu_pi_load() local
93 list_del(&vmx->pi_wakeup_list); in vmx_vcpu_pi_load()
149 struct vcpu_vmx *vmx = to_vmx(vcpu); in pi_enable_wakeup_handler() local
156 list_add_tail(&vmx->pi_wakeup_list, in pi_enable_wakeup_handler()
223 struct vcpu_vmx *vmx; in pi_wakeup_handler() local
226 list_for_each_entry(vmx, wakeup_list, pi_wakeup_list) { in pi_wakeup_handler()
228 if (pi_test_on(&vmx->pi_desc)) in pi_wakeup_handler()
229 kvm_vcpu_wake_up(&vmx->vcpu); in pi_wakeup_handler()
H A Dnested.h63 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_has_valid_vmcs12() local
66 return vmx->nested.current_vmptr != -1ull || in vmx_has_valid_vmcs12()
67 nested_vmx_is_evmptr12_set(vmx); in vmx_has_valid_vmcs12()
72 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_get_vpid02() local
74 return vmx->nested.vpid02 ? vmx->nested.vpid02 : vmx->vpid; in nested_get_vpid02()
H A Dhyperv.c201 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_enable_evmcs() local
203 vmx->nested.enlightened_vmcs_enabled = true; in nested_enable_evmcs()
214 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_evmcs_l2_tlb_flush_enabled() local
215 struct hv_enlightened_vmcs *evmcs = vmx->nested.hv_evmcs; in nested_evmcs_l2_tlb_flush_enabled()
H A Dsgx.c300 struct vcpu_vmx *vmx = to_vmx(vcpu); in handle_encls_einit() local
332 vmx->msr_ia32_sgxlepubkeyhash, &trapnr); in handle_encls_einit()
430 struct vcpu_vmx *vmx = to_vmx(vcpu); in vcpu_setup_sgx_lepubkeyhash() local
432 memcpy(vmx->msr_ia32_sgxlepubkeyhash, sgx_pubkey_hash, in vcpu_setup_sgx_lepubkeyhash()
/linux/tools/testing/selftests/kvm/x86_64/
H A Dvmx_set_nested_state_test.c82 state->hdr.vmx.vmxon_pa = 0x1000; in set_default_vmx_state()
83 state->hdr.vmx.vmcs12_pa = 0x2000; in set_default_vmx_state()
84 state->hdr.vmx.smm.flags = 0; in set_default_vmx_state()
113 state->hdr.vmx.vmxon_pa = -1ull; in test_vmx_nested_state()
116 state->hdr.vmx.vmcs12_pa = -1ull; in test_vmx_nested_state()
133 state->hdr.vmx.vmxon_pa = -1ull; in test_vmx_nested_state()
134 state->hdr.vmx.vmcs12_pa = -1ull; in test_vmx_nested_state()
149 state->hdr.vmx.smm.flags = 1; in test_vmx_nested_state()
154 state->hdr.vmx.flags = ~0; in test_vmx_nested_state()
159 state->hdr.vmx.vmxon_pa = -1ull; in test_vmx_nested_state()
[all …]
H A Dvmx_dirty_log_test.c56 void l1_guest_code(struct vmx_pages *vmx) in l1_guest_code() argument
62 GUEST_ASSERT(vmx->vmcs_gpa); in l1_guest_code()
63 GUEST_ASSERT(prepare_for_vmx_operation(vmx)); in l1_guest_code()
64 GUEST_ASSERT(load_vmcs(vmx)); in l1_guest_code()
66 if (vmx->eptp_gpa) in l1_guest_code()
71 prepare_vmcs(vmx, l2_rip, &l2_guest_stack[L2_GUEST_STACK_SIZE]); in l1_guest_code()
83 struct vmx_pages *vmx; in test_vmx_dirty_log() local
96 vmx = vcpu_alloc_vmx(vm, &vmx_pages_gva); in test_vmx_dirty_log()
123 prepare_eptp(vmx, vm, 0); in test_vmx_dirty_log()
124 nested_map_memslot(vmx, vm, 0); in test_vmx_dirty_log()
[all …]
H A Dtriple_fault_event_test.c27 void l1_guest_code_vmx(struct vmx_pages *vmx) in l1_guest_code_vmx() argument
30 GUEST_ASSERT(vmx->vmcs_gpa); in l1_guest_code_vmx()
31 GUEST_ASSERT(prepare_for_vmx_operation(vmx)); in l1_guest_code_vmx()
32 GUEST_ASSERT(load_vmcs(vmx)); in l1_guest_code_vmx()
34 prepare_vmcs(vmx, l2_guest_code, in l1_guest_code_vmx()
H A Dvmx_apic_access_test.c77 struct vmx_pages *vmx; in main() local
89 vmx = vcpu_alloc_vmx(vm, &vmx_pages_gva); in main()
90 prepare_virtualize_apic_accesses(vmx, vm); in main()
H A Dnested_exceptions_test.c127 static void l1_vmx_code(struct vmx_pages *vmx) in l1_vmx_code() argument
131 GUEST_ASSERT_EQ(prepare_for_vmx_operation(vmx), true); in l1_vmx_code()
133 GUEST_ASSERT_EQ(load_vmcs(vmx), true); in l1_vmx_code()
135 prepare_vmcs(vmx, NULL, &l2_guest_stack[L2_GUEST_STACK_SIZE]); in l1_vmx_code()
/linux/tools/testing/selftests/powerpc/ptrace/
H A Dptrace-vsx.h31 int validate_vmx(unsigned long vmx[][2], unsigned long *load) in validate_vmx()
37 if ((vmx[i][0] != load[64 + 2 * i]) || in validate_vmx()
38 (vmx[i][1] != load[65 + 2 * i])) { in validate_vmx()
40 i, vmx[i][0], 64 + 2 * i, in validate_vmx()
43 i, vmx[i][1], 65 + 2 * i, in validate_vmx()
51 if ((vmx[i][0] != load[65 + 2 * i]) || in validate_vmx()
52 (vmx[i][1] != load[64 + 2 * i])) { in validate_vmx()
54 i, vmx[i][0], 65 + 2 * i, in validate_vmx()
57 i, vmx[i][1], 64 + 2 * i, in validate_vmx()
109 unsigned long vmx[][2]) in load_vsx_vmx()
[all …]
H A Dptrace-tm-vsx.c87 unsigned long vmx[VMX_MAX + 2][2]; in trace_tm_vsx() local
92 FAIL_IF(show_vmx(child, vmx)); in trace_tm_vsx()
93 FAIL_IF(validate_vmx(vmx, fp_load)); in trace_tm_vsx()
96 FAIL_IF(show_vmx_ckpt(child, vmx)); in trace_tm_vsx()
97 FAIL_IF(validate_vmx(vmx, fp_load_ckpt)); in trace_tm_vsx()
99 memset(vmx, 0, sizeof(vmx)); in trace_tm_vsx()
101 load_vsx_vmx(fp_load_ckpt_new, vsx, vmx); in trace_tm_vsx()
104 FAIL_IF(write_vmx_ckpt(child, vmx)); in trace_tm_vsx()
H A Dptrace-tm-spd-vsx.c99 unsigned long vmx[VMX_MAX + 2][2]; in trace_tm_spd_vsx() local
104 FAIL_IF(show_vmx(child, vmx)); in trace_tm_spd_vsx()
105 FAIL_IF(validate_vmx(vmx, fp_load)); in trace_tm_spd_vsx()
108 FAIL_IF(show_vmx_ckpt(child, vmx)); in trace_tm_spd_vsx()
109 FAIL_IF(validate_vmx(vmx, fp_load_ckpt)); in trace_tm_spd_vsx()
112 memset(vmx, 0, sizeof(vmx)); in trace_tm_spd_vsx()
114 load_vsx_vmx(fp_load_ckpt_new, vsx, vmx); in trace_tm_spd_vsx()
117 FAIL_IF(write_vmx_ckpt(child, vmx)); in trace_tm_spd_vsx()
H A Dptrace-vsx.c40 unsigned long vmx[VMX_MAX + 2][2]; in trace_vsx() local
45 FAIL_IF(show_vmx(child, vmx)); in trace_vsx()
46 FAIL_IF(validate_vmx(vmx, fp_load)); in trace_vsx()
49 memset(vmx, 0, sizeof(vmx)); in trace_vsx()
50 load_vsx_vmx(fp_load_new, vsx, vmx); in trace_vsx()
53 FAIL_IF(write_vmx(child, vmx)); in trace_vsx()
H A Dptrace.h597 int show_vmx(pid_t child, unsigned long vmx[][2]) in show_vmx()
601 ret = ptrace(PTRACE_GETVRREGS, child, 0, vmx); in show_vmx()
609 int show_vmx_ckpt(pid_t child, unsigned long vmx[][2]) in show_vmx_ckpt()
622 memcpy(vmx, regs, sizeof(regs)); in show_vmx_ckpt()
627 int write_vmx(pid_t child, unsigned long vmx[][2]) in write_vmx()
631 ret = ptrace(PTRACE_SETVRREGS, child, 0, vmx); in write_vmx()
639 int write_vmx_ckpt(pid_t child, unsigned long vmx[][2]) in write_vmx_ckpt()
645 memcpy(regs, vmx, sizeof(regs)); in write_vmx_ckpt()
/linux/tools/testing/selftests/kvm/include/x86_64/
H A Dvmx.h558 bool prepare_for_vmx_operation(struct vmx_pages *vmx);
559 void prepare_vmcs(struct vmx_pages *vmx, void *guest_rip, void *guest_rsp);
560 bool load_vmcs(struct vmx_pages *vmx);
564 void nested_pg_map(struct vmx_pages *vmx, struct kvm_vm *vm,
566 void nested_map(struct vmx_pages *vmx, struct kvm_vm *vm,
568 void nested_map_memslot(struct vmx_pages *vmx, struct kvm_vm *vm,
570 void nested_identity_map_1g(struct vmx_pages *vmx, struct kvm_vm *vm,
573 void prepare_eptp(struct vmx_pages *vmx, struct kvm_vm *vm,
575 void prepare_virtualize_apic_accesses(struct vmx_pages *vmx, struct kvm_vm *vm);
/linux/tools/testing/selftests/powerpc/tm/
H A DMakefile3 tm-signal-context-chk-vmx tm-signal-context-chk-vsx
6 tm-vmxcopy tm-fork tm-tar tm-tmspr tm-vmx-unavail tm-unavailable tm-trap \
23 $(OUTPUT)/tm-vmx-unavail: CFLAGS += -pthread -m64
H A D.gitignore13 tm-signal-context-chk-vmx
18 tm-vmx-unavail
/linux/arch/powerpc/crypto/
H A DMakefile19 obj-$(CONFIG_CRYPTO_DEV_VMX_ENCRYPT) += vmx-crypto.o
32 vmx-crypto-objs := vmx.o aesp8-ppc.o ghashp8-ppc.o aes.o aes_cbc.o aes_ctr.o aes_xts.o ghash.o
/linux/arch/x86/virt/
H A DMakefile2 obj-y += svm/ vmx/
/linux/Documentation/virt/kvm/x86/
H A Dindex.rst16 nested-vmx

12