| /linux/arch/loongarch/kvm/ |
| H A D | main.c | 19 static struct kvm_context __percpu *vmcs; variable 202 context = per_cpu_ptr(vcpu->kvm->arch.vmcs, cpu); in kvm_update_vpid() 231 context = per_cpu_ptr(vcpu->kvm->arch.vmcs, cpu); in kvm_check_vpid() 275 kvm->arch.vmcs = vmcs; in kvm_init_vmcs() 325 this_cpu_ptr(vmcs)->last_vcpu = NULL; in kvm_arch_enable_virtualization_cpu() 347 vmcs = alloc_percpu(struct kvm_context); in kvm_loongarch_env_init() 348 if (!vmcs) { in kvm_loongarch_env_init() 355 free_percpu(vmcs); in kvm_loongarch_env_init() 356 vmcs = NULL; in kvm_loongarch_env_init() 371 free_percpu(vmcs); in kvm_loongarch_env_init() [all …]
|
| H A D | vcpu.c | 45 context = this_cpu_ptr(vcpu->kvm->arch.vmcs); in kvm_save_host_pmu() 60 context = this_cpu_ptr(vcpu->kvm->arch.vmcs); in kvm_restore_host_pmu() 1582 context = per_cpu_ptr(vcpu->kvm->arch.vmcs, cpu); in kvm_arch_vcpu_destroy() 1604 context = per_cpu_ptr(vcpu->kvm->arch.vmcs, cpu); in _kvm_vcpu_load()
|
| /linux/arch/x86/kvm/vmx/ |
| H A D | vmcs.h | 21 struct vmcs { struct 27 DECLARE_PER_CPU(struct vmcs *, current_vmcs); argument 62 struct vmcs *vmcs; member 63 struct vmcs *shadow_vmcs;
|
| H A D | vmx.h | 598 static inline u##bits __##lname##_controls_get(struct loaded_vmcs *vmcs) \ 600 return vmcs->controls_shadow.lname; \ 679 struct vmcs *alloc_vmcs_cpu(bool shadow, int cpu, gfp_t flags); 680 void free_vmcs(struct vmcs *vmcs); 684 static inline struct vmcs *alloc_vmcs(bool shadow) in alloc_vmcs()
|
| H A D | tdx.h | 182 TDX_BUILD_TDVPS_ACCESSORS(16, VMCS, vmcs); 183 TDX_BUILD_TDVPS_ACCESSORS(32, VMCS, vmcs); 184 TDX_BUILD_TDVPS_ACCESSORS(64, VMCS, vmcs);
|
| H A D | vmx.c | 557 noinline void vmclear_error(struct vmcs *vmcs, u64 phys_addr) in vmclear_error() argument 560 vmcs, phys_addr, vmcs_read32(VM_INSTRUCTION_ERROR)); in vmclear_error() 563 noinline void vmptrld_error(struct vmcs *vmcs, u64 phys_addr) in vmptrld_error() argument 566 vmcs, phys_addr, vmcs_read32(VM_INSTRUCTION_ERROR)); in vmptrld_error() 580 static DEFINE_PER_CPU(struct vmcs *, vmxarea); 581 DEFINE_PER_CPU(struct vmcs *, current_vmcs); 633 evmcs = (struct hv_enlightened_vmcs *)to_vmx(vcpu)->loaded_vmcs->vmcs; in hv_enable_l2_tlb_flush() 828 vmcs_clear(v->vmcs); in vmx_emergency_disable_virtualization_cpu() 843 if (per_cpu(current_vmcs, cpu) == loaded_vmcs->vmcs) in __loaded_vmcs_clear() 846 vmcs_clear(loaded_vmcs->vmcs); in __loaded_vmcs_clear() [all …]
|
| H A D | nested.c | 294 static void vmx_switch_vmcs(struct kvm_vcpu *vcpu, struct loaded_vmcs *vmcs) in vmx_switch_vmcs() argument 300 if (WARN_ON_ONCE(vmx->loaded_vmcs == vmcs)) in vmx_switch_vmcs() 305 vmx->loaded_vmcs = vmcs; in vmx_switch_vmcs() 1634 struct vmcs *shadow_vmcs = vmx->vmcs01.shadow_vmcs; in copy_shadow_to_vmcs12() 1654 vmcs_load(vmx->loaded_vmcs->vmcs); in copy_shadow_to_vmcs12() 1669 struct vmcs *shadow_vmcs = vmx->vmcs01.shadow_vmcs; in copy_vmcs12_to_shadow() 1690 vmcs_load(vmx->loaded_vmcs->vmcs); in copy_vmcs12_to_shadow() 5394 static struct vmcs *alloc_shadow_vmcs(struct kvm_vcpu *vcpu) in alloc_shadow_vmcs() 5858 vmcs_load(vmx->loaded_vmcs->vmcs); in handle_vmwrite()
|
| /linux/tools/perf/util/intel-pt-decoder/ |
| H A D | intel-pt-decoder.c | 124 struct intel_pt_vmcs_info *(*findnew_vmcs_info)(void *data, uint64_t vmcs); 167 uint64_t vmcs; member 2320 bool tsc, pip, vmcs, tma, psbend; 2358 data->vmcs = true; in intel_pt_vm_psb_lookahead_cb() 2518 payload, decoder->packet.payload, vmcs_info->vmcs, in intel_pt_translate_vm_tsc() 2526 .vmcs = NO_VMCS, in intel_pt_translate_vm_tsc_offset() 2546 vmcs_info->vmcs, vmcs_info->tsc_offset); 2601 uint64_t vmcs; in intel_pt_vm_tm_corr_tsc() 2629 vmcs = data->vmcs in intel_pt_vm_tm_corr_tsc() 2318 bool tsc, pip, vmcs, tma, psbend; global() member 2599 uint64_t vmcs; intel_pt_vm_tm_corr_tsc() local 2752 uint64_t vmcs; intel_pt_vm_tm_corr_pebs_tsc() local [all...] |
| H A D | intel-pt-decoder.h | 210 uint64_t vmcs; member 280 struct intel_pt_vmcs_info *(*findnew_vmcs_info)(void *data, uint64_t vmcs);
|
| /linux/tools/testing/selftests/kvm/lib/x86/ |
| H A D | vmx.c | 84 vmx->vmcs = (void *)vm_vaddr_alloc_page(vm); in vcpu_alloc_vmx() 85 vmx->vmcs_hva = addr_gva2hva(vm, (uintptr_t)vmx->vmcs); in vcpu_alloc_vmx() 86 vmx->vmcs_gpa = addr_gva2gpa(vm, (uintptr_t)vmx->vmcs); in vcpu_alloc_vmx() 161 *(uint32_t *)(vmx->vmcs) = vmcs_revision(); in load_vmcs()
|
| /linux/tools/perf/util/ |
| H A D | intel-pt.c | 319 u64 vmcs, in intel_pt_findnew_vmcs() argument 330 if (v->vmcs == vmcs) in intel_pt_findnew_vmcs() 333 if (vmcs < v->vmcs) in intel_pt_findnew_vmcs() 341 v->vmcs = vmcs; in intel_pt_findnew_vmcs() 352 static struct intel_pt_vmcs_info *intel_pt_findnew_vmcs_info(void *data, uint64_t vmcs) in intel_pt_findnew_vmcs_info() argument 357 if (!vmcs && !pt->dflt_tsc_offset) in intel_pt_findnew_vmcs_info() 360 return intel_pt_findnew_vmcs(&pt->vmcs_info, vmcs, pt->dflt_tsc_offset); in intel_pt_findnew_vmcs_info() 4302 u64 tsc_offset, vmcs; in intel_pt_parse_vm_tm_corr_arg() local 4322 vmcs = strtoull(p, &p, 0); in intel_pt_parse_vm_tm_corr_arg() 4325 if (!vmcs) in intel_pt_parse_vm_tm_corr_arg() [all …]
|
| /linux/arch/loongarch/include/asm/ |
| H A D | kvm_host.h | 133 struct kvm_context __percpu *vmcs; member
|
| /linux/tools/testing/selftests/kvm/include/x86/ |
| H A D | vmx.h | 505 void *vmcs; member
|