Home
last modified time | relevance | path

Searched refs:kvm_has_feat (Results 1 – 9 of 9) sorted by relevance

/linux/arch/arm64/kvm/
H A Dconfig.c231 return !kvm_has_feat(kvm, FEAT_AA64EL3); in not_feat_aa64el3()
236 return ((kvm_has_feat(kvm, ID_AA64MMFR4_EL1, NV_frac, NV2_ONLY) && in feat_nv2()
238 kvm_has_feat(kvm, ID_AA64MMFR2_EL1, NV, NV2)); in feat_nv2()
243 return feat_nv2(kvm) && !kvm_has_feat(kvm, FEAT_E2H0); in feat_nv2_e2h0_ni()
248 return (kvm_has_feat(kvm, ID_AA64PFR0_EL1, RAS, V1P1) || in feat_rasv1p1()
250 kvm_has_feat(kvm, ID_AA64PFR1_EL1, RAS_frac, RASv1p1))); in feat_rasv1p1()
255 return (kvm_has_feat(kvm, ID_AA64PFR0_EL1, CSV2, CSV2_2) || in feat_csv2_2_csv2_1p2()
256 (kvm_has_feat(kvm, ID_AA64PFR1_EL1, CSV2_frac, CSV2_1p2) && in feat_csv2_2_csv2_1p2()
272 return (kvm_has_feat(kvm, ID_AA64MMFR3_EL1, ADERR, FEAT_ADERR) && in feat_aderr()
273 kvm_has_feat(kvm, ID_AA64MMFR3_EL1, SDERR, FEAT_ADERR)); in feat_aderr()
[all …]
H A Dnested.c591 if (!ttl || !kvm_has_feat(kvm, ID_AA64MMFR2_EL1, TTL, IMP)) { in compute_tlb_inval_range()
907 if (!kvm_has_feat(kvm, ID_AA64MMFR4_EL1, NV_frac, NV2_ONLY)) in kvm_invalidate_vncr_ipa()
1248 if (!kvm_has_feat(vcpu->kvm, ID_AA64MMFR4_EL1, NV_frac, NV2_ONLY)) in kvm_vcpu_allocate_vncr_tlb()
1729 if (!kvm_has_feat(kvm, ID_AA64MMFR2_EL1, CnP, IMP)) in kvm_init_nv_sysregs()
1811 if (!kvm_has_feat(kvm, ID_AA64PFR0_EL1, RME, IMP)) in kvm_init_nv_sysregs()
1813 if (!kvm_has_feat(kvm, ID_AA64MMFR0_EL1, ECV, CNTPOFF)) { in kvm_init_nv_sysregs()
1815 if (!kvm_has_feat(kvm, ID_AA64MMFR0_EL1, ECV, IMP)) in kvm_init_nv_sysregs()
1819 if (!kvm_has_feat(kvm, ID_AA64MMFR1_EL1, VH, IMP)) in kvm_init_nv_sysregs()
H A Dat.c48 if (!kvm_has_feat(vcpu->kvm, ID_AA64MMFR0_EL1, TGRAN16, 52_BIT)) in has_52bit_pa()
52 if (!kvm_has_feat(vcpu->kvm, ID_AA64MMFR0_EL1, TGRAN4, 52_BIT)) in has_52bit_pa()
271 wi->hpd = kvm_has_feat(vcpu->kvm, ID_AA64MMFR1_EL1, HPDS, IMP); in setup_s1_walk()
302 lva = kvm_has_feat(vcpu->kvm, ID_AA64MMFR2_EL1, VARange, 52); in setup_s1_walk()
320 if (kvm_has_feat(vcpu->kvm, ID_AA64MMFR2_EL1, E0PD, IMP) && in setup_s1_walk()
350 wi->ha = kvm_has_feat(vcpu->kvm, ID_AA64MMFR1_EL1, HAFDBS, AF); in setup_s1_walk()
805 if (!kvm_has_feat(vcpu->kvm, ID_AA64PFR2_EL1, MTEPERM, IMP)) in compute_par_s12()
933 if (!kvm_has_feat(vcpu->kvm, ID_AA64MMFR1_EL1, PAN, PAN3)) in pan3_enabled()
H A Dsys_regs.c791 if (!kvm_has_feat(vcpu->kvm, ID_AA64MMFR1_EL1, LO, IMP)) in trap_loregion()
2034 if (kvm_has_feat(vcpu->kvm, ID_AA64PFR1_EL1, SME, IMP)) in sme_visibility()
2840 kvm_has_feat(vcpu->kvm, ID_AA64MMFR4_EL1, NV_frac, NV2_ONLY)) in vncr_el2_visibility()
2965 kvm_has_feat(vcpu->kvm, ID_AA64MMFR0_EL1, FGT, FGT2)) in fgt2_visibility()
2975 kvm_has_feat(vcpu->kvm, ID_AA64MMFR0_EL1, FGT, IMP)) in fgt_visibility()
3054 if (!(kvm_has_feat(kvm, ID_AA64PFR0_EL1, RAS, V1P1) || in access_ras()
3056 kvm_has_feat(kvm, ID_AA64PFR1_EL1, RAS_frac, RASv1p1)))) { in access_ras()
3062 if (!kvm_has_feat(kvm, ID_AA64PFR0_EL1, RAS, IMP)) { in access_ras()
3908 !kvm_has_feat(vcpu->kvm, ID_AA64ISAR2_EL1, ATS1A, IMP)) { in handle_at_s1e2()
3936 !kvm_has_feat(kvm, ID_AA64ISAR1_EL1, XS, IMP)) in kvm_supported_tlbi_s12_op()
[all …]
/linux/arch/arm64/include/asm/
H A Dkvm_nested.h127 kvm_has_feat(kvm, ID_AA64MMFR1_EL1, XNX, IMP); in kvm_has_xnx()
184 kvm_has_feat(kvm, ID_AA64ISAR1_EL1, XS, IMP)))) in kvm_supported_tlbi_s1e1_op()
188 !kvm_has_feat(kvm, ID_AA64ISAR0_EL1, TLB, OS)) in kvm_supported_tlbi_s1e1_op()
193 !kvm_has_feat(kvm, ID_AA64ISAR0_EL1, TLB, RANGE)) in kvm_supported_tlbi_s1e1_op()
210 kvm_has_feat(kvm, ID_AA64ISAR1_EL1, XS, IMP)))) in kvm_supported_tlbi_s1e2_op()
217 !kvm_has_feat(kvm, ID_AA64ISAR0_EL1, TLB, OS)) in kvm_supported_tlbi_s1e2_op()
222 !kvm_has_feat(kvm, ID_AA64ISAR0_EL1, TLB, RANGE)) in kvm_supported_tlbi_s1e2_op()
H A Dkvm_host.h1624 #define kvm_has_feat(kvm, ...) __kvm_has_feat(kvm, __VA_ARGS__) macro
1640 pa = kvm_has_feat((k), ID_AA64ISAR1_EL1, APA, l); \
1641 pa &= kvm_has_feat((k), ID_AA64ISAR1_EL1, GPA, IMP); \
1642 pi = kvm_has_feat((k), ID_AA64ISAR1_EL1, API, l); \
1643 pi &= kvm_has_feat((k), ID_AA64ISAR1_EL1, GPI, IMP); \
1644 pa3 = kvm_has_feat((k), ID_AA64ISAR2_EL1, APA3, l); \
1645 pa3 &= kvm_has_feat((k), ID_AA64ISAR2_EL1, GPA3, IMP); \
1652 kvm_has_feat((k), ID_AA64PFR2_EL1, FPMR, IMP))
1655 (kvm_has_feat((k), ID_AA64MMFR3_EL1, TCRX, IMP))
1658 (kvm_has_feat((k), ID_AA64MMFR3_EL1, S1PIE, IMP))
[all …]
H A Dkvm_emulate.h672 if (kvm_has_feat(kvm, ID_AA64ISAR2_EL1, MOPS, IMP)) in vcpu_set_hcrx()
684 if (kvm_has_feat(kvm, ID_AA64ISAR1_EL1, LS64, LS64)) in vcpu_set_hcrx()
687 if (kvm_has_feat(kvm, ID_AA64ISAR1_EL1, LS64, LS64_V)) in vcpu_set_hcrx()
/linux/arch/arm64/kvm/hyp/nvhe/
H A Dpkvm.c79 if (!kvm_has_feat(kvm, ID_AA64PFR0_EL1, RAS, IMP)) { in pvm_init_traps_hcr()
84 if (!kvm_has_feat(kvm, ID_AA64PFR0_EL1, AMU, IMP)) in pvm_init_traps_hcr()
92 if (!kvm_has_feat(kvm, ID_AA64MMFR1_EL1, LO, IMP)) in pvm_init_traps_hcr()
103 if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, PMUVer, IMP)) { in pvm_init_traps_mdcr()
108 if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, DebugVer, IMP)) in pvm_init_traps_mdcr()
111 if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, DoubleLock, IMP)) in pvm_init_traps_mdcr()
114 if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, PMSVer, IMP)) { in pvm_init_traps_mdcr()
119 if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, TraceFilt, IMP)) in pvm_init_traps_mdcr()
122 if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, TraceBuffer, IMP)) in pvm_init_traps_mdcr()
126 if (!kvm_has_feat(kvm, ID_AA64MMFR0_EL1, FGT, IMP)) in pvm_init_traps_mdcr()
[all …]
/linux/arch/arm64/kvm/hyp/include/hyp/
H A Dswitch.h144 if (kvm_has_feat(vcpu->kvm, ID_AA64MMFR3_EL1, S2POE, IMP)) in __activate_cptr_traps_vhe()