Home
last modified time | relevance | path

Searched refs:has_vhe (Results 1 – 9 of 9) sorted by relevance

/linux/arch/arm64/kvm/
H A Dpmu.c24 if (has_vhe() && attr->exclude_user) in kvm_pmu_switch_needed()
136 if (!kvm_arm_support_pmu_v3() || !has_vhe()) in kvm_vcpu_pmu_restore_guest()
157 if (!kvm_arm_support_pmu_v3() || !has_vhe()) in kvm_vcpu_pmu_restore_host()
183 if (!kvm_arm_support_pmu_v3() || !has_vhe()) in kvm_set_pmuserenr()
203 if (!has_vhe() || !in_interrupt()) in kvm_vcpu_pmu_resync_el0()
H A Ddebug.c60 if (has_vhe()) in restore_guest_debug_regs()
77 if (has_vhe())
231 if (has_vhe() || is_protected_kvm_enabled() || in kvm_arm_setup_debug()
241 if (has_vhe() || is_protected_kvm_enabled() || in kvm_arm_setup_debug()
254 if (has_vhe()) { in kvm_arm_setup_debug()
H A Darm.c413 if (!has_vhe()) in kvm_arch_alloc_vm()
592 if (has_vhe()) in kvm_arch_vcpu_load()
633 if (has_vhe()) in kvm_arch_vcpu_put()
H A Dsys_regs.c4933 if (has_vhe() || has_hvhe()) in vcpu_set_hcr()
/linux/arch/arm64/kvm/hyp/
H A Dexception.c51 } else if (has_vhe()) { in __vcpu_write_spsr()
60 if (has_vhe()) in __vcpu_write_spsr_abt()
68 if (has_vhe()) in __vcpu_write_spsr_und()
/linux/arch/arm64/include/asm/
H A Dvirt.h125 static __always_inline bool has_vhe(void) in has_vhe() function
H A Dkvm_host.h1037 if (!has_vhe()) in __vcpu_read_sys_reg_from_cpu()
1087 if (!has_vhe()) in __vcpu_write_sys_reg_to_cpu()
1178 if (has_vhe()) { \
1190 if (has_vhe()) { \
1378 return (!has_vhe() && attr->exclude_host); in kvm_pmu_counter_deferred()
/linux/arch/arm64/kvm/vgic/
H A Dvgic.c859 return !static_branch_unlikely(&kvm_vgic_global_state.gicv3_cpuif) || has_vhe(); in can_access_vgic_from_kernel()
937 if (has_vhe() && static_branch_unlikely(&kvm_vgic_global_state.gicv3_cpuif)) in kvm_vgic_load()
951 if (has_vhe() && static_branch_unlikely(&kvm_vgic_global_state.gicv3_cpuif)) in kvm_vgic_put()
/linux/include/kvm/
H A Darm_pmu.h89 if (!has_vhe() && kvm_arm_support_pmu_v3()) \