xref: /linux/arch/arm64/kvm/config.c (revision 06bc7ff0a1e0f2b0102e1314e3527a7ec0997851)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (C) 2025 Google LLC
4  * Author: Marc Zyngier <maz@kernel.org>
5  */
6 
7 #include <linux/kvm_host.h>
8 #include <asm/kvm_emulate.h>
9 #include <asm/kvm_nested.h>
10 #include <asm/sysreg.h>
11 
12 /*
13  * Describes the dependencies between a set of bits (or the negation
14  * of a set of RES0 bits) and a feature. The flags indicate how the
15  * data is interpreted.
16  */
17 struct reg_bits_to_feat_map {
18 	union {
19 		u64		 bits;
20 		struct fgt_masks *masks;
21 	};
22 
23 #define	NEVER_FGU	BIT(0)	/* Can trap, but never UNDEF */
24 #define	CALL_FUNC	BIT(1)	/* Needs to evaluate tons of crap */
25 #define	FORCE_RESx	BIT(2)	/* Unconditional RESx */
26 #define	MASKS_POINTER	BIT(3)	/* Pointer to fgt_masks struct instead of bits */
27 #define	AS_RES1		BIT(4)	/* RES1 when not supported */
28 #define	REQUIRES_E2H1	BIT(5)	/* Add HCR_EL2.E2H RES1 as a pre-condition */
29 #define	RES1_WHEN_E2H0	BIT(6)	/* RES1 when E2H=0 and not supported */
30 #define	RES1_WHEN_E2H1	BIT(7)	/* RES1 when E2H=1 and not supported */
31 
32 	unsigned long	flags;
33 
34 	union {
35 		struct {
36 			u8	regidx;
37 			u8	shift;
38 			u8	width;
39 			bool	sign;
40 			s8	lo_lim;
41 		};
42 		bool	(*match)(struct kvm *);
43 	};
44 };
45 
46 /*
47  * Describes the dependencies for a given register:
48  *
49  * @feat_map describes the dependency for the whole register. If the
50  * features the register depends on are not present, the whole
51  * register is effectively RES0.
52  *
53  * @bit_feat_map describes the dependencies for a set of bits in that
54  * register. If the features these bits depend on are not present, the
55  * bits are effectively RES0.
56  */
57 struct reg_feat_map_desc {
58 	const char			  *name;
59 	const struct reg_bits_to_feat_map feat_map;
60 	const struct reg_bits_to_feat_map *bit_feat_map;
61 	const unsigned int		  bit_feat_map_sz;
62 };
63 
64 #define __NEEDS_FEAT_3(m, f, w, id, fld, lim)		\
65 	{						\
66 		.w	= (m),				\
67 		.flags = (f),				\
68 		.regidx	= IDREG_IDX(SYS_ ## id),	\
69 		.shift	= id ##_## fld ## _SHIFT,	\
70 		.width	= id ##_## fld ## _WIDTH,	\
71 		.sign	= id ##_## fld ## _SIGNED,	\
72 		.lo_lim	= id ##_## fld ##_## lim	\
73 	}
74 
75 #define __NEEDS_FEAT_1(m, f, w, fun)			\
76 	{						\
77 		.w	= (m),				\
78 		.flags = (f) | CALL_FUNC,		\
79 		.match = (fun),				\
80 	}
81 
82 #define __NEEDS_FEAT_0(m, f, w, ...)			\
83 	{						\
84 		.w	= (m),				\
85 		.flags = (f),				\
86 	}
87 
88 #define __NEEDS_FEAT_FLAG(m, f, w, ...)			\
89 	CONCATENATE(__NEEDS_FEAT_, COUNT_ARGS(__VA_ARGS__))(m, f, w, __VA_ARGS__)
90 
91 #define NEEDS_FEAT_FLAG(m, f, ...)			\
92 	__NEEDS_FEAT_FLAG(m, f, bits, __VA_ARGS__)
93 
94 #define NEEDS_FEAT_MASKS(p, ...)				\
95 	__NEEDS_FEAT_FLAG(p, MASKS_POINTER, masks, __VA_ARGS__)
96 
97 /*
98  * Declare the dependency between a set of bits and a set of features,
99  * generating a struct reg_bit_to_feat_map.
100  */
101 #define NEEDS_FEAT(m, ...)	NEEDS_FEAT_FLAG(m, 0, __VA_ARGS__)
102 
103 /* Declare fixed RESx bits */
104 #define FORCE_RES0(m)		NEEDS_FEAT_FLAG(m, FORCE_RESx)
105 #define FORCE_RES1(m)		NEEDS_FEAT_FLAG(m, FORCE_RESx | AS_RES1)
106 
107 /*
108  * Declare the dependency between a non-FGT register, a set of features,
109  * and the set of individual bits it contains. This generates a struct
110  * reg_feat_map_desc.
111  */
112 #define DECLARE_FEAT_MAP(n, r, m, f)					\
113 	struct reg_feat_map_desc n = {					\
114 		.name			= #r,				\
115 		.feat_map		= NEEDS_FEAT(~(r##_RES0 |	\
116 						       r##_RES1), f),	\
117 		.bit_feat_map		= m,				\
118 		.bit_feat_map_sz	= ARRAY_SIZE(m),		\
119 	}
120 
121 /*
122  * Specialised version of the above for FGT registers that have their
123  * RESx masks described as struct fgt_masks.
124  */
125 #define DECLARE_FEAT_MAP_FGT(n, msk, m, f)				\
126 	struct reg_feat_map_desc n = {					\
127 		.name			= #msk,				\
128 		.feat_map		= NEEDS_FEAT_MASKS(&msk, f),	\
129 		.bit_feat_map		= m,				\
130 		.bit_feat_map_sz	= ARRAY_SIZE(m),		\
131 	}
132 
133 #define FEAT_SPE		ID_AA64DFR0_EL1, PMSVer, IMP
134 #define FEAT_BRBE		ID_AA64DFR0_EL1, BRBE, IMP
135 #define FEAT_TRC_SR		ID_AA64DFR0_EL1, TraceVer, IMP
136 #define FEAT_PMUv3		ID_AA64DFR0_EL1, PMUVer, IMP
137 #define FEAT_TRBE		ID_AA64DFR0_EL1, TraceBuffer, IMP
138 #define FEAT_TRBEv1p1		ID_AA64DFR0_EL1, TraceBuffer, TRBE_V1P1
139 #define FEAT_DoubleLock		ID_AA64DFR0_EL1, DoubleLock, IMP
140 #define FEAT_TRF		ID_AA64DFR0_EL1, TraceFilt, IMP
141 #define FEAT_AA32EL0		ID_AA64PFR0_EL1, EL0, AARCH32
142 #define FEAT_AA32EL1		ID_AA64PFR0_EL1, EL1, AARCH32
143 #define FEAT_AA64EL1		ID_AA64PFR0_EL1, EL1, IMP
144 #define FEAT_AA64EL2		ID_AA64PFR0_EL1, EL2, IMP
145 #define FEAT_AA64EL3		ID_AA64PFR0_EL1, EL3, IMP
146 #define FEAT_SEL2		ID_AA64PFR0_EL1, SEL2, IMP
147 #define FEAT_AIE		ID_AA64MMFR3_EL1, AIE, IMP
148 #define FEAT_S2POE		ID_AA64MMFR3_EL1, S2POE, IMP
149 #define FEAT_S1POE		ID_AA64MMFR3_EL1, S1POE, IMP
150 #define FEAT_S1PIE		ID_AA64MMFR3_EL1, S1PIE, IMP
151 #define FEAT_THE		ID_AA64PFR1_EL1, THE, IMP
152 #define FEAT_SME		ID_AA64PFR1_EL1, SME, IMP
153 #define FEAT_GCS		ID_AA64PFR1_EL1, GCS, IMP
154 #define FEAT_LS64		ID_AA64ISAR1_EL1, LS64, LS64
155 #define FEAT_LS64_V		ID_AA64ISAR1_EL1, LS64, LS64_V
156 #define FEAT_LS64_ACCDATA	ID_AA64ISAR1_EL1, LS64, LS64_ACCDATA
157 #define FEAT_RAS		ID_AA64PFR0_EL1, RAS, IMP
158 #define FEAT_RASv2		ID_AA64PFR0_EL1, RAS, V2
159 #define FEAT_GICv3		ID_AA64PFR0_EL1, GIC, IMP
160 #define FEAT_LOR		ID_AA64MMFR1_EL1, LO, IMP
161 #define FEAT_SPEv1p2		ID_AA64DFR0_EL1, PMSVer, V1P2
162 #define FEAT_SPEv1p4		ID_AA64DFR0_EL1, PMSVer, V1P4
163 #define FEAT_SPEv1p5		ID_AA64DFR0_EL1, PMSVer, V1P5
164 #define FEAT_ATS1A		ID_AA64ISAR2_EL1, ATS1A, IMP
165 #define FEAT_SPECRES2		ID_AA64ISAR1_EL1, SPECRES, COSP_RCTX
166 #define FEAT_SPECRES		ID_AA64ISAR1_EL1, SPECRES, IMP
167 #define FEAT_TLBIRANGE		ID_AA64ISAR0_EL1, TLB, RANGE
168 #define FEAT_TLBIOS		ID_AA64ISAR0_EL1, TLB, OS
169 #define FEAT_PAN2		ID_AA64MMFR1_EL1, PAN, PAN2
170 #define FEAT_DPB2		ID_AA64ISAR1_EL1, DPB, DPB2
171 #define FEAT_AMUv1		ID_AA64PFR0_EL1, AMU, IMP
172 #define FEAT_AMUv1p1		ID_AA64PFR0_EL1, AMU, V1P1
173 #define FEAT_CMOW		ID_AA64MMFR1_EL1, CMOW, IMP
174 #define FEAT_D128		ID_AA64MMFR3_EL1, D128, IMP
175 #define FEAT_DoubleFault2	ID_AA64PFR1_EL1, DF2, IMP
176 #define FEAT_FPMR		ID_AA64PFR2_EL1, FPMR, IMP
177 #define FEAT_MOPS		ID_AA64ISAR2_EL1, MOPS, IMP
178 #define FEAT_NMI		ID_AA64PFR1_EL1, NMI, IMP
179 #define FEAT_SCTLR2		ID_AA64MMFR3_EL1, SCTLRX, IMP
180 #define FEAT_SYSREG128		ID_AA64ISAR2_EL1, SYSREG_128, IMP
181 #define FEAT_TCR2		ID_AA64MMFR3_EL1, TCRX, IMP
182 #define FEAT_XS			ID_AA64ISAR1_EL1, XS, IMP
183 #define FEAT_EVT		ID_AA64MMFR2_EL1, EVT, IMP
184 #define FEAT_EVT_TTLBxS		ID_AA64MMFR2_EL1, EVT, TTLBxS
185 #define FEAT_MTE2		ID_AA64PFR1_EL1, MTE, MTE2
186 #define FEAT_RME		ID_AA64PFR0_EL1, RME, IMP
187 #define FEAT_MPAM		ID_AA64PFR0_EL1, MPAM, 1
188 #define FEAT_S2FWB		ID_AA64MMFR2_EL1, FWB, IMP
189 #define FEAT_TWED		ID_AA64MMFR1_EL1, TWED, IMP
190 #define FEAT_E2H0		ID_AA64MMFR4_EL1, E2H0, IMP
191 #define FEAT_SRMASK		ID_AA64MMFR4_EL1, SRMASK, IMP
192 #define FEAT_PoPS		ID_AA64MMFR4_EL1, PoPS, IMP
193 #define FEAT_PFAR		ID_AA64PFR1_EL1, PFAR, IMP
194 #define FEAT_Debugv8p9		ID_AA64DFR0_EL1, DebugVer, V8P9
195 #define FEAT_PMUv3_SS		ID_AA64DFR0_EL1, PMSS, IMP
196 #define FEAT_SEBEP		ID_AA64DFR0_EL1, SEBEP, IMP
197 #define FEAT_EBEP		ID_AA64DFR1_EL1, EBEP, IMP
198 #define FEAT_ITE		ID_AA64DFR1_EL1, ITE, IMP
199 #define FEAT_PMUv3_ICNTR	ID_AA64DFR1_EL1, PMICNTR, IMP
200 #define FEAT_SPMU		ID_AA64DFR1_EL1, SPMU, IMP
201 #define FEAT_SPE_nVM		ID_AA64DFR2_EL1, SPE_nVM, IMP
202 #define FEAT_STEP2		ID_AA64DFR2_EL1, STEP, IMP
203 #define FEAT_CPA2		ID_AA64ISAR3_EL1, CPA, CPA2
204 #define FEAT_ASID2		ID_AA64MMFR4_EL1, ASID2, IMP
205 #define FEAT_MEC		ID_AA64MMFR3_EL1, MEC, IMP
206 #define FEAT_HAFT		ID_AA64MMFR1_EL1, HAFDBS, HAFT
207 #define FEAT_HDBSS		ID_AA64MMFR1_EL1, HAFDBS, HDBSS
208 #define FEAT_HPDS2		ID_AA64MMFR1_EL1, HPDS, HPDS2
209 #define FEAT_BTI		ID_AA64PFR1_EL1, BT, IMP
210 #define FEAT_ExS		ID_AA64MMFR0_EL1, EXS, IMP
211 #define FEAT_IESB		ID_AA64MMFR2_EL1, IESB, IMP
212 #define FEAT_LSE2		ID_AA64MMFR2_EL1, AT, IMP
213 #define FEAT_LSMAOC		ID_AA64MMFR2_EL1, LSM, IMP
214 #define FEAT_MixedEnd		ID_AA64MMFR0_EL1, BIGEND, IMP
215 #define FEAT_MixedEndEL0	ID_AA64MMFR0_EL1, BIGENDEL0, IMP
216 #define FEAT_MTE_ASYNC		ID_AA64PFR1_EL1, MTE_frac, ASYNC
217 #define FEAT_MTE_STORE_ONLY	ID_AA64PFR2_EL1, MTESTOREONLY, IMP
218 #define FEAT_PAN		ID_AA64MMFR1_EL1, PAN, IMP
219 #define FEAT_PAN3		ID_AA64MMFR1_EL1, PAN, PAN3
220 #define FEAT_SSBS		ID_AA64PFR1_EL1, SSBS, IMP
221 #define FEAT_TIDCP1		ID_AA64MMFR1_EL1, TIDCP1, IMP
222 #define FEAT_FGT		ID_AA64MMFR0_EL1, FGT, IMP
223 #define FEAT_FGT2		ID_AA64MMFR0_EL1, FGT, FGT2
224 #define FEAT_MTPMU		ID_AA64DFR0_EL1, MTPMU, IMP
225 #define FEAT_HCX		ID_AA64MMFR1_EL1, HCX, IMP
226 #define FEAT_S2PIE		ID_AA64MMFR3_EL1, S2PIE, IMP
227 #define FEAT_GCIE		ID_AA64PFR2_EL1, GCIE, IMP
228 
not_feat_aa64el3(struct kvm * kvm)229 static bool not_feat_aa64el3(struct kvm *kvm)
230 {
231 	return !kvm_has_feat(kvm, FEAT_AA64EL3);
232 }
233 
feat_nv2(struct kvm * kvm)234 static bool feat_nv2(struct kvm *kvm)
235 {
236 	return ((kvm_has_feat(kvm, ID_AA64MMFR4_EL1, NV_frac, NV2_ONLY) &&
237 		 kvm_has_feat_enum(kvm, ID_AA64MMFR2_EL1, NV, NI)) ||
238 		kvm_has_feat(kvm, ID_AA64MMFR2_EL1, NV, NV2));
239 }
240 
feat_nv2_e2h0_ni(struct kvm * kvm)241 static bool feat_nv2_e2h0_ni(struct kvm *kvm)
242 {
243 	return feat_nv2(kvm) && !kvm_has_feat(kvm, FEAT_E2H0);
244 }
245 
feat_rasv1p1(struct kvm * kvm)246 static bool feat_rasv1p1(struct kvm *kvm)
247 {
248 	return (kvm_has_feat(kvm, ID_AA64PFR0_EL1, RAS, V1P1) ||
249 		(kvm_has_feat_enum(kvm, ID_AA64PFR0_EL1, RAS, IMP) &&
250 		 kvm_has_feat(kvm, ID_AA64PFR1_EL1, RAS_frac, RASv1p1)));
251 }
252 
feat_csv2_2_csv2_1p2(struct kvm * kvm)253 static bool feat_csv2_2_csv2_1p2(struct kvm *kvm)
254 {
255 	return (kvm_has_feat(kvm,  ID_AA64PFR0_EL1, CSV2, CSV2_2) ||
256 		(kvm_has_feat(kvm, ID_AA64PFR1_EL1, CSV2_frac, CSV2_1p2) &&
257 		 kvm_has_feat_enum(kvm,  ID_AA64PFR0_EL1, CSV2, IMP)));
258 }
259 
feat_pauth(struct kvm * kvm)260 static bool feat_pauth(struct kvm *kvm)
261 {
262 	return kvm_has_pauth(kvm, PAuth);
263 }
264 
feat_pauth_lr(struct kvm * kvm)265 static bool feat_pauth_lr(struct kvm *kvm)
266 {
267 	return kvm_has_pauth(kvm, PAuth_LR);
268 }
269 
feat_aderr(struct kvm * kvm)270 static bool feat_aderr(struct kvm *kvm)
271 {
272 	return (kvm_has_feat(kvm, ID_AA64MMFR3_EL1, ADERR, FEAT_ADERR) &&
273 		kvm_has_feat(kvm, ID_AA64MMFR3_EL1, SDERR, FEAT_ADERR));
274 }
275 
feat_anerr(struct kvm * kvm)276 static bool feat_anerr(struct kvm *kvm)
277 {
278 	return (kvm_has_feat(kvm, ID_AA64MMFR3_EL1, ANERR, FEAT_ANERR) &&
279 		kvm_has_feat(kvm, ID_AA64MMFR3_EL1, SNERR, FEAT_ANERR));
280 }
281 
feat_sme_smps(struct kvm * kvm)282 static bool feat_sme_smps(struct kvm *kvm)
283 {
284 	/*
285 	 * Revisit this if KVM ever supports SME -- this really should
286 	 * look at the guest's view of SMIDR_EL1. Funnily enough, this
287 	 * is not captured in the JSON file, but only as a note in the
288 	 * ARM ARM.
289 	 */
290 	return (kvm_has_feat(kvm, FEAT_SME) &&
291 		(read_sysreg_s(SYS_SMIDR_EL1) & SMIDR_EL1_SMPS));
292 }
293 
feat_spe_fds(struct kvm * kvm)294 static bool feat_spe_fds(struct kvm *kvm)
295 {
296 	/*
297 	 * Revisit this if KVM ever supports SPE -- this really should
298 	 * look at the guest's view of PMSIDR_EL1.
299 	 */
300 	return (kvm_has_feat(kvm, FEAT_SPEv1p4) &&
301 		(read_sysreg_s(SYS_PMSIDR_EL1) & PMSIDR_EL1_FDS));
302 }
303 
feat_spe_fne(struct kvm * kvm)304 static bool feat_spe_fne(struct kvm *kvm)
305 {
306 	/*
307 	 * Revisit this if KVM ever supports SPE -- this really should
308 	 * look at the guest's view of PMSIDR_EL1.
309 	 */
310 	return (kvm_has_feat(kvm, FEAT_SPEv1p2) &&
311 		(read_sysreg_s(SYS_PMSIDR_EL1) & PMSIDR_EL1_FnE));
312 }
313 
feat_trbe_mpam(struct kvm * kvm)314 static bool feat_trbe_mpam(struct kvm *kvm)
315 {
316 	/*
317 	 * Revisit this if KVM ever supports both MPAM and TRBE --
318 	 * this really should look at the guest's view of TRBIDR_EL1.
319 	 */
320 	return (kvm_has_feat(kvm, FEAT_TRBE) &&
321 		kvm_has_feat(kvm, FEAT_MPAM) &&
322 		(read_sysreg_s(SYS_TRBIDR_EL1) & TRBIDR_EL1_MPAM));
323 }
324 
feat_ebep_pmuv3_ss(struct kvm * kvm)325 static bool feat_ebep_pmuv3_ss(struct kvm *kvm)
326 {
327 	return kvm_has_feat(kvm, FEAT_EBEP) || kvm_has_feat(kvm, FEAT_PMUv3_SS);
328 }
329 
feat_mixedendel0(struct kvm * kvm)330 static bool feat_mixedendel0(struct kvm *kvm)
331 {
332 	return kvm_has_feat(kvm, FEAT_MixedEnd) || kvm_has_feat(kvm, FEAT_MixedEndEL0);
333 }
334 
feat_mte_async(struct kvm * kvm)335 static bool feat_mte_async(struct kvm *kvm)
336 {
337 	return kvm_has_feat(kvm, FEAT_MTE2) && kvm_has_feat_enum(kvm, FEAT_MTE_ASYNC);
338 }
339 
340 #define check_pmu_revision(k, r)					\
341 	({								\
342 		(kvm_has_feat((k), ID_AA64DFR0_EL1, PMUVer, r) &&	\
343 		 !kvm_has_feat((k), ID_AA64DFR0_EL1, PMUVer, IMP_DEF));	\
344 	})
345 
feat_pmuv3p1(struct kvm * kvm)346 static bool feat_pmuv3p1(struct kvm *kvm)
347 {
348 	return check_pmu_revision(kvm, V3P1);
349 }
350 
feat_pmuv3p5(struct kvm * kvm)351 static bool feat_pmuv3p5(struct kvm *kvm)
352 {
353 	return check_pmu_revision(kvm, V3P5);
354 }
355 
feat_pmuv3p7(struct kvm * kvm)356 static bool feat_pmuv3p7(struct kvm *kvm)
357 {
358 	return check_pmu_revision(kvm, V3P7);
359 }
360 
feat_pmuv3p9(struct kvm * kvm)361 static bool feat_pmuv3p9(struct kvm *kvm)
362 {
363 	return check_pmu_revision(kvm, V3P9);
364 }
365 
366 #define has_feat_s2tgran(k, s)						\
367   ((kvm_has_feat_enum(kvm, ID_AA64MMFR0_EL1, TGRAN##s##_2, TGRAN##s) && \
368     kvm_has_feat(kvm, ID_AA64MMFR0_EL1, TGRAN##s, IMP))		     ||	\
369    kvm_has_feat(kvm, ID_AA64MMFR0_EL1, TGRAN##s##_2, IMP))
370 
feat_lpa2(struct kvm * kvm)371 static bool feat_lpa2(struct kvm *kvm)
372 {
373 	return ((kvm_has_feat(kvm, ID_AA64MMFR0_EL1, TGRAN4, 52_BIT)    ||
374 		 !kvm_has_feat(kvm, ID_AA64MMFR0_EL1, TGRAN4, IMP))	&&
375 		(kvm_has_feat(kvm, ID_AA64MMFR0_EL1, TGRAN16, 52_BIT)   ||
376 		 !kvm_has_feat(kvm, ID_AA64MMFR0_EL1, TGRAN16, IMP))	&&
377 		(kvm_has_feat(kvm, ID_AA64MMFR0_EL1, TGRAN4_2, 52_BIT)  ||
378 		 !has_feat_s2tgran(kvm, 4))				&&
379 		(kvm_has_feat(kvm, ID_AA64MMFR0_EL1, TGRAN16_2, 52_BIT) ||
380 		 !has_feat_s2tgran(kvm, 16)));
381 }
382 
feat_vmid16(struct kvm * kvm)383 static bool feat_vmid16(struct kvm *kvm)
384 {
385 	return kvm_has_feat_enum(kvm, ID_AA64MMFR1_EL1, VMIDBits, 16);
386 }
387 
388 static const struct reg_bits_to_feat_map hfgrtr_feat_map[] = {
389 	NEEDS_FEAT(HFGRTR_EL2_nAMAIR2_EL1	|
390 		   HFGRTR_EL2_nMAIR2_EL1,
391 		   FEAT_AIE),
392 	NEEDS_FEAT(HFGRTR_EL2_nS2POR_EL1, FEAT_S2POE),
393 	NEEDS_FEAT(HFGRTR_EL2_nPOR_EL1		|
394 		   HFGRTR_EL2_nPOR_EL0,
395 		   FEAT_S1POE),
396 	NEEDS_FEAT(HFGRTR_EL2_nPIR_EL1		|
397 		   HFGRTR_EL2_nPIRE0_EL1,
398 		   FEAT_S1PIE),
399 	NEEDS_FEAT(HFGRTR_EL2_nRCWMASK_EL1, FEAT_THE),
400 	NEEDS_FEAT(HFGRTR_EL2_nTPIDR2_EL0	|
401 		   HFGRTR_EL2_nSMPRI_EL1,
402 		   FEAT_SME),
403 	NEEDS_FEAT(HFGRTR_EL2_nGCS_EL1		|
404 		   HFGRTR_EL2_nGCS_EL0,
405 		   FEAT_GCS),
406 	NEEDS_FEAT(HFGRTR_EL2_nACCDATA_EL1, FEAT_LS64_ACCDATA),
407 	NEEDS_FEAT(HFGRTR_EL2_ERXADDR_EL1	|
408 		   HFGRTR_EL2_ERXMISCn_EL1	|
409 		   HFGRTR_EL2_ERXSTATUS_EL1	|
410 		   HFGRTR_EL2_ERXCTLR_EL1	|
411 		   HFGRTR_EL2_ERXFR_EL1		|
412 		   HFGRTR_EL2_ERRSELR_EL1	|
413 		   HFGRTR_EL2_ERRIDR_EL1,
414 		   FEAT_RAS),
415 	NEEDS_FEAT(HFGRTR_EL2_ERXPFGCDN_EL1	|
416 		   HFGRTR_EL2_ERXPFGCTL_EL1	|
417 		   HFGRTR_EL2_ERXPFGF_EL1,
418 		   feat_rasv1p1),
419 	NEEDS_FEAT(HFGRTR_EL2_ICC_IGRPENn_EL1, FEAT_GICv3),
420 	NEEDS_FEAT(HFGRTR_EL2_SCXTNUM_EL0	|
421 		   HFGRTR_EL2_SCXTNUM_EL1,
422 		   feat_csv2_2_csv2_1p2),
423 	NEEDS_FEAT(HFGRTR_EL2_LORSA_EL1		|
424 		   HFGRTR_EL2_LORN_EL1		|
425 		   HFGRTR_EL2_LORID_EL1		|
426 		   HFGRTR_EL2_LOREA_EL1		|
427 		   HFGRTR_EL2_LORC_EL1,
428 		   FEAT_LOR),
429 	NEEDS_FEAT(HFGRTR_EL2_APIBKey		|
430 		   HFGRTR_EL2_APIAKey		|
431 		   HFGRTR_EL2_APGAKey		|
432 		   HFGRTR_EL2_APDBKey		|
433 		   HFGRTR_EL2_APDAKey,
434 		   feat_pauth),
435 	NEEDS_FEAT_FLAG(HFGRTR_EL2_VBAR_EL1	|
436 			HFGRTR_EL2_TTBR1_EL1	|
437 			HFGRTR_EL2_TTBR0_EL1	|
438 			HFGRTR_EL2_TPIDR_EL0	|
439 			HFGRTR_EL2_TPIDRRO_EL0	|
440 			HFGRTR_EL2_TPIDR_EL1	|
441 			HFGRTR_EL2_TCR_EL1	|
442 			HFGRTR_EL2_SCTLR_EL1	|
443 			HFGRTR_EL2_REVIDR_EL1	|
444 			HFGRTR_EL2_PAR_EL1	|
445 			HFGRTR_EL2_MPIDR_EL1	|
446 			HFGRTR_EL2_MIDR_EL1	|
447 			HFGRTR_EL2_MAIR_EL1	|
448 			HFGRTR_EL2_ISR_EL1	|
449 			HFGRTR_EL2_FAR_EL1	|
450 			HFGRTR_EL2_ESR_EL1	|
451 			HFGRTR_EL2_DCZID_EL0	|
452 			HFGRTR_EL2_CTR_EL0	|
453 			HFGRTR_EL2_CSSELR_EL1	|
454 			HFGRTR_EL2_CPACR_EL1	|
455 			HFGRTR_EL2_CONTEXTIDR_EL1|
456 			HFGRTR_EL2_CLIDR_EL1	|
457 			HFGRTR_EL2_CCSIDR_EL1	|
458 			HFGRTR_EL2_AMAIR_EL1	|
459 			HFGRTR_EL2_AIDR_EL1	|
460 			HFGRTR_EL2_AFSR1_EL1	|
461 			HFGRTR_EL2_AFSR0_EL1,
462 			NEVER_FGU, FEAT_AA64EL1),
463 };
464 
465 
466 static const DECLARE_FEAT_MAP_FGT(hfgrtr_desc, hfgrtr_masks,
467 				  hfgrtr_feat_map, FEAT_FGT);
468 
469 static const struct reg_bits_to_feat_map hfgwtr_feat_map[] = {
470 	NEEDS_FEAT(HFGWTR_EL2_nAMAIR2_EL1	|
471 		   HFGWTR_EL2_nMAIR2_EL1,
472 		   FEAT_AIE),
473 	NEEDS_FEAT(HFGWTR_EL2_nS2POR_EL1, FEAT_S2POE),
474 	NEEDS_FEAT(HFGWTR_EL2_nPOR_EL1		|
475 		   HFGWTR_EL2_nPOR_EL0,
476 		   FEAT_S1POE),
477 	NEEDS_FEAT(HFGWTR_EL2_nPIR_EL1		|
478 		   HFGWTR_EL2_nPIRE0_EL1,
479 		   FEAT_S1PIE),
480 	NEEDS_FEAT(HFGWTR_EL2_nRCWMASK_EL1, FEAT_THE),
481 	NEEDS_FEAT(HFGWTR_EL2_nTPIDR2_EL0	|
482 		   HFGWTR_EL2_nSMPRI_EL1,
483 		   FEAT_SME),
484 	NEEDS_FEAT(HFGWTR_EL2_nGCS_EL1		|
485 		   HFGWTR_EL2_nGCS_EL0,
486 		   FEAT_GCS),
487 	NEEDS_FEAT(HFGWTR_EL2_nACCDATA_EL1, FEAT_LS64_ACCDATA),
488 	NEEDS_FEAT(HFGWTR_EL2_ERXADDR_EL1	|
489 		   HFGWTR_EL2_ERXMISCn_EL1	|
490 		   HFGWTR_EL2_ERXSTATUS_EL1	|
491 		   HFGWTR_EL2_ERXCTLR_EL1	|
492 		   HFGWTR_EL2_ERRSELR_EL1,
493 		   FEAT_RAS),
494 	NEEDS_FEAT(HFGWTR_EL2_ERXPFGCDN_EL1	|
495 		   HFGWTR_EL2_ERXPFGCTL_EL1,
496 		   feat_rasv1p1),
497 	NEEDS_FEAT(HFGWTR_EL2_ICC_IGRPENn_EL1, FEAT_GICv3),
498 	NEEDS_FEAT(HFGWTR_EL2_SCXTNUM_EL0	|
499 		   HFGWTR_EL2_SCXTNUM_EL1,
500 		   feat_csv2_2_csv2_1p2),
501 	NEEDS_FEAT(HFGWTR_EL2_LORSA_EL1		|
502 		   HFGWTR_EL2_LORN_EL1		|
503 		   HFGWTR_EL2_LOREA_EL1		|
504 		   HFGWTR_EL2_LORC_EL1,
505 		   FEAT_LOR),
506 	NEEDS_FEAT(HFGWTR_EL2_APIBKey		|
507 		   HFGWTR_EL2_APIAKey		|
508 		   HFGWTR_EL2_APGAKey		|
509 		   HFGWTR_EL2_APDBKey		|
510 		   HFGWTR_EL2_APDAKey,
511 		   feat_pauth),
512 	NEEDS_FEAT_FLAG(HFGWTR_EL2_VBAR_EL1	|
513 			HFGWTR_EL2_TTBR1_EL1	|
514 			HFGWTR_EL2_TTBR0_EL1	|
515 			HFGWTR_EL2_TPIDR_EL0	|
516 			HFGWTR_EL2_TPIDRRO_EL0	|
517 			HFGWTR_EL2_TPIDR_EL1	|
518 			HFGWTR_EL2_TCR_EL1	|
519 			HFGWTR_EL2_SCTLR_EL1	|
520 			HFGWTR_EL2_PAR_EL1	|
521 			HFGWTR_EL2_MAIR_EL1	|
522 			HFGWTR_EL2_FAR_EL1	|
523 			HFGWTR_EL2_ESR_EL1	|
524 			HFGWTR_EL2_CSSELR_EL1	|
525 			HFGWTR_EL2_CPACR_EL1	|
526 			HFGWTR_EL2_CONTEXTIDR_EL1|
527 			HFGWTR_EL2_AMAIR_EL1	|
528 			HFGWTR_EL2_AFSR1_EL1	|
529 			HFGWTR_EL2_AFSR0_EL1,
530 			NEVER_FGU, FEAT_AA64EL1),
531 };
532 
533 static const DECLARE_FEAT_MAP_FGT(hfgwtr_desc, hfgwtr_masks,
534 				  hfgwtr_feat_map, FEAT_FGT);
535 
536 static const struct reg_bits_to_feat_map hdfgrtr_feat_map[] = {
537 	NEEDS_FEAT(HDFGRTR_EL2_PMBIDR_EL1	|
538 		   HDFGRTR_EL2_PMSLATFR_EL1	|
539 		   HDFGRTR_EL2_PMSIRR_EL1	|
540 		   HDFGRTR_EL2_PMSIDR_EL1	|
541 		   HDFGRTR_EL2_PMSICR_EL1	|
542 		   HDFGRTR_EL2_PMSFCR_EL1	|
543 		   HDFGRTR_EL2_PMSEVFR_EL1	|
544 		   HDFGRTR_EL2_PMSCR_EL1	|
545 		   HDFGRTR_EL2_PMBSR_EL1	|
546 		   HDFGRTR_EL2_PMBPTR_EL1	|
547 		   HDFGRTR_EL2_PMBLIMITR_EL1,
548 		   FEAT_SPE),
549 	NEEDS_FEAT(HDFGRTR_EL2_nPMSNEVFR_EL1, feat_spe_fne),
550 	NEEDS_FEAT(HDFGRTR_EL2_nBRBDATA		|
551 		   HDFGRTR_EL2_nBRBCTL		|
552 		   HDFGRTR_EL2_nBRBIDR,
553 		   FEAT_BRBE),
554 	NEEDS_FEAT(HDFGRTR_EL2_TRCVICTLR	|
555 		   HDFGRTR_EL2_TRCSTATR		|
556 		   HDFGRTR_EL2_TRCSSCSRn	|
557 		   HDFGRTR_EL2_TRCSEQSTR	|
558 		   HDFGRTR_EL2_TRCPRGCTLR	|
559 		   HDFGRTR_EL2_TRCOSLSR		|
560 		   HDFGRTR_EL2_TRCIMSPECn	|
561 		   HDFGRTR_EL2_TRCID		|
562 		   HDFGRTR_EL2_TRCCNTVRn	|
563 		   HDFGRTR_EL2_TRCCLAIM		|
564 		   HDFGRTR_EL2_TRCAUXCTLR	|
565 		   HDFGRTR_EL2_TRCAUTHSTATUS	|
566 		   HDFGRTR_EL2_TRC,
567 		   FEAT_TRC_SR),
568 	NEEDS_FEAT(HDFGRTR_EL2_PMCEIDn_EL0	|
569 		   HDFGRTR_EL2_PMUSERENR_EL0	|
570 		   HDFGRTR_EL2_PMMIR_EL1	|
571 		   HDFGRTR_EL2_PMSELR_EL0	|
572 		   HDFGRTR_EL2_PMOVS		|
573 		   HDFGRTR_EL2_PMINTEN		|
574 		   HDFGRTR_EL2_PMCNTEN		|
575 		   HDFGRTR_EL2_PMCCNTR_EL0	|
576 		   HDFGRTR_EL2_PMCCFILTR_EL0	|
577 		   HDFGRTR_EL2_PMEVTYPERn_EL0	|
578 		   HDFGRTR_EL2_PMEVCNTRn_EL0,
579 		   FEAT_PMUv3),
580 	NEEDS_FEAT(HDFGRTR_EL2_TRBTRG_EL1	|
581 		   HDFGRTR_EL2_TRBSR_EL1	|
582 		   HDFGRTR_EL2_TRBPTR_EL1	|
583 		   HDFGRTR_EL2_TRBMAR_EL1	|
584 		   HDFGRTR_EL2_TRBLIMITR_EL1	|
585 		   HDFGRTR_EL2_TRBIDR_EL1	|
586 		   HDFGRTR_EL2_TRBBASER_EL1,
587 		   FEAT_TRBE),
588 	NEEDS_FEAT_FLAG(HDFGRTR_EL2_OSDLR_EL1, NEVER_FGU,
589 			FEAT_DoubleLock),
590 	NEEDS_FEAT_FLAG(HDFGRTR_EL2_OSECCR_EL1	|
591 			HDFGRTR_EL2_OSLSR_EL1	|
592 			HDFGRTR_EL2_DBGPRCR_EL1	|
593 			HDFGRTR_EL2_DBGAUTHSTATUS_EL1|
594 			HDFGRTR_EL2_DBGCLAIM	|
595 			HDFGRTR_EL2_MDSCR_EL1	|
596 			HDFGRTR_EL2_DBGWVRn_EL1	|
597 			HDFGRTR_EL2_DBGWCRn_EL1	|
598 			HDFGRTR_EL2_DBGBVRn_EL1	|
599 			HDFGRTR_EL2_DBGBCRn_EL1,
600 			NEVER_FGU, FEAT_AA64EL1)
601 };
602 
603 static const DECLARE_FEAT_MAP_FGT(hdfgrtr_desc, hdfgrtr_masks,
604 				  hdfgrtr_feat_map, FEAT_FGT);
605 
606 static const struct reg_bits_to_feat_map hdfgwtr_feat_map[] = {
607 	NEEDS_FEAT(HDFGWTR_EL2_PMSLATFR_EL1	|
608 		   HDFGWTR_EL2_PMSIRR_EL1	|
609 		   HDFGWTR_EL2_PMSICR_EL1	|
610 		   HDFGWTR_EL2_PMSFCR_EL1	|
611 		   HDFGWTR_EL2_PMSEVFR_EL1	|
612 		   HDFGWTR_EL2_PMSCR_EL1	|
613 		   HDFGWTR_EL2_PMBSR_EL1	|
614 		   HDFGWTR_EL2_PMBPTR_EL1	|
615 		   HDFGWTR_EL2_PMBLIMITR_EL1,
616 		   FEAT_SPE),
617 	NEEDS_FEAT(HDFGWTR_EL2_nPMSNEVFR_EL1, feat_spe_fne),
618 	NEEDS_FEAT(HDFGWTR_EL2_nBRBDATA		|
619 		   HDFGWTR_EL2_nBRBCTL,
620 		   FEAT_BRBE),
621 	NEEDS_FEAT(HDFGWTR_EL2_TRCVICTLR	|
622 		   HDFGWTR_EL2_TRCSSCSRn	|
623 		   HDFGWTR_EL2_TRCSEQSTR	|
624 		   HDFGWTR_EL2_TRCPRGCTLR	|
625 		   HDFGWTR_EL2_TRCOSLAR		|
626 		   HDFGWTR_EL2_TRCIMSPECn	|
627 		   HDFGWTR_EL2_TRCCNTVRn	|
628 		   HDFGWTR_EL2_TRCCLAIM		|
629 		   HDFGWTR_EL2_TRCAUXCTLR	|
630 		   HDFGWTR_EL2_TRC,
631 		   FEAT_TRC_SR),
632 	NEEDS_FEAT(HDFGWTR_EL2_PMUSERENR_EL0	|
633 		   HDFGWTR_EL2_PMCR_EL0		|
634 		   HDFGWTR_EL2_PMSWINC_EL0	|
635 		   HDFGWTR_EL2_PMSELR_EL0	|
636 		   HDFGWTR_EL2_PMOVS		|
637 		   HDFGWTR_EL2_PMINTEN		|
638 		   HDFGWTR_EL2_PMCNTEN		|
639 		   HDFGWTR_EL2_PMCCNTR_EL0	|
640 		   HDFGWTR_EL2_PMCCFILTR_EL0	|
641 		   HDFGWTR_EL2_PMEVTYPERn_EL0	|
642 		   HDFGWTR_EL2_PMEVCNTRn_EL0,
643 		   FEAT_PMUv3),
644 	NEEDS_FEAT(HDFGWTR_EL2_TRBTRG_EL1	|
645 		   HDFGWTR_EL2_TRBSR_EL1	|
646 		   HDFGWTR_EL2_TRBPTR_EL1	|
647 		   HDFGWTR_EL2_TRBMAR_EL1	|
648 		   HDFGWTR_EL2_TRBLIMITR_EL1	|
649 		   HDFGWTR_EL2_TRBBASER_EL1,
650 		   FEAT_TRBE),
651 	NEEDS_FEAT_FLAG(HDFGWTR_EL2_OSDLR_EL1,
652 			NEVER_FGU, FEAT_DoubleLock),
653 	NEEDS_FEAT_FLAG(HDFGWTR_EL2_OSECCR_EL1	|
654 			HDFGWTR_EL2_OSLAR_EL1	|
655 			HDFGWTR_EL2_DBGPRCR_EL1	|
656 			HDFGWTR_EL2_DBGCLAIM	|
657 			HDFGWTR_EL2_MDSCR_EL1	|
658 			HDFGWTR_EL2_DBGWVRn_EL1	|
659 			HDFGWTR_EL2_DBGWCRn_EL1	|
660 			HDFGWTR_EL2_DBGBVRn_EL1	|
661 			HDFGWTR_EL2_DBGBCRn_EL1,
662 			NEVER_FGU, FEAT_AA64EL1),
663 	NEEDS_FEAT(HDFGWTR_EL2_TRFCR_EL1, FEAT_TRF),
664 };
665 
666 static const DECLARE_FEAT_MAP_FGT(hdfgwtr_desc, hdfgwtr_masks,
667 				  hdfgwtr_feat_map, FEAT_FGT);
668 
669 static const struct reg_bits_to_feat_map hfgitr_feat_map[] = {
670 	NEEDS_FEAT(HFGITR_EL2_PSBCSYNC, FEAT_SPEv1p5),
671 	NEEDS_FEAT(HFGITR_EL2_ATS1E1A, FEAT_ATS1A),
672 	NEEDS_FEAT(HFGITR_EL2_COSPRCTX, FEAT_SPECRES2),
673 	NEEDS_FEAT(HFGITR_EL2_nGCSEPP		|
674 		   HFGITR_EL2_nGCSSTR_EL1	|
675 		   HFGITR_EL2_nGCSPUSHM_EL1,
676 		   FEAT_GCS),
677 	NEEDS_FEAT(HFGITR_EL2_nBRBIALL		|
678 		   HFGITR_EL2_nBRBINJ,
679 		   FEAT_BRBE),
680 	NEEDS_FEAT(HFGITR_EL2_CPPRCTX		|
681 		   HFGITR_EL2_DVPRCTX		|
682 		   HFGITR_EL2_CFPRCTX,
683 		   FEAT_SPECRES),
684 	NEEDS_FEAT(HFGITR_EL2_TLBIRVAALE1	|
685 		   HFGITR_EL2_TLBIRVALE1	|
686 		   HFGITR_EL2_TLBIRVAAE1	|
687 		   HFGITR_EL2_TLBIRVAE1		|
688 		   HFGITR_EL2_TLBIRVAALE1IS	|
689 		   HFGITR_EL2_TLBIRVALE1IS	|
690 		   HFGITR_EL2_TLBIRVAAE1IS	|
691 		   HFGITR_EL2_TLBIRVAE1IS	|
692 		   HFGITR_EL2_TLBIRVAALE1OS	|
693 		   HFGITR_EL2_TLBIRVALE1OS	|
694 		   HFGITR_EL2_TLBIRVAAE1OS	|
695 		   HFGITR_EL2_TLBIRVAE1OS,
696 		   FEAT_TLBIRANGE),
697 	NEEDS_FEAT(HFGITR_EL2_TLBIVAALE1OS	|
698 		   HFGITR_EL2_TLBIVALE1OS	|
699 		   HFGITR_EL2_TLBIVAAE1OS	|
700 		   HFGITR_EL2_TLBIASIDE1OS	|
701 		   HFGITR_EL2_TLBIVAE1OS	|
702 		   HFGITR_EL2_TLBIVMALLE1OS,
703 		   FEAT_TLBIOS),
704 	NEEDS_FEAT(HFGITR_EL2_ATS1E1WP		|
705 		   HFGITR_EL2_ATS1E1RP,
706 		   FEAT_PAN2),
707 	NEEDS_FEAT(HFGITR_EL2_DCCVADP, FEAT_DPB2),
708 	NEEDS_FEAT_FLAG(HFGITR_EL2_DCCVAC	|
709 			HFGITR_EL2_SVC_EL1	|
710 			HFGITR_EL2_SVC_EL0	|
711 			HFGITR_EL2_ERET		|
712 			HFGITR_EL2_TLBIVAALE1	|
713 			HFGITR_EL2_TLBIVALE1	|
714 			HFGITR_EL2_TLBIVAAE1	|
715 			HFGITR_EL2_TLBIASIDE1	|
716 			HFGITR_EL2_TLBIVAE1	|
717 			HFGITR_EL2_TLBIVMALLE1	|
718 			HFGITR_EL2_TLBIVAALE1IS	|
719 			HFGITR_EL2_TLBIVALE1IS	|
720 			HFGITR_EL2_TLBIVAAE1IS	|
721 			HFGITR_EL2_TLBIASIDE1IS	|
722 			HFGITR_EL2_TLBIVAE1IS	|
723 			HFGITR_EL2_TLBIVMALLE1IS|
724 			HFGITR_EL2_ATS1E0W	|
725 			HFGITR_EL2_ATS1E0R	|
726 			HFGITR_EL2_ATS1E1W	|
727 			HFGITR_EL2_ATS1E1R	|
728 			HFGITR_EL2_DCZVA	|
729 			HFGITR_EL2_DCCIVAC	|
730 			HFGITR_EL2_DCCVAP	|
731 			HFGITR_EL2_DCCVAU	|
732 			HFGITR_EL2_DCCISW	|
733 			HFGITR_EL2_DCCSW	|
734 			HFGITR_EL2_DCISW	|
735 			HFGITR_EL2_DCIVAC	|
736 			HFGITR_EL2_ICIVAU	|
737 			HFGITR_EL2_ICIALLU	|
738 			HFGITR_EL2_ICIALLUIS,
739 			NEVER_FGU, FEAT_AA64EL1),
740 };
741 
742 static const DECLARE_FEAT_MAP_FGT(hfgitr_desc, hfgitr_masks,
743 				  hfgitr_feat_map, FEAT_FGT);
744 
745 static const struct reg_bits_to_feat_map hafgrtr_feat_map[] = {
746 	NEEDS_FEAT(HAFGRTR_EL2_AMEVTYPER115_EL0	|
747 		   HAFGRTR_EL2_AMEVTYPER114_EL0	|
748 		   HAFGRTR_EL2_AMEVTYPER113_EL0	|
749 		   HAFGRTR_EL2_AMEVTYPER112_EL0	|
750 		   HAFGRTR_EL2_AMEVTYPER111_EL0	|
751 		   HAFGRTR_EL2_AMEVTYPER110_EL0	|
752 		   HAFGRTR_EL2_AMEVTYPER19_EL0	|
753 		   HAFGRTR_EL2_AMEVTYPER18_EL0	|
754 		   HAFGRTR_EL2_AMEVTYPER17_EL0	|
755 		   HAFGRTR_EL2_AMEVTYPER16_EL0	|
756 		   HAFGRTR_EL2_AMEVTYPER15_EL0	|
757 		   HAFGRTR_EL2_AMEVTYPER14_EL0	|
758 		   HAFGRTR_EL2_AMEVTYPER13_EL0	|
759 		   HAFGRTR_EL2_AMEVTYPER12_EL0	|
760 		   HAFGRTR_EL2_AMEVTYPER11_EL0	|
761 		   HAFGRTR_EL2_AMEVTYPER10_EL0	|
762 		   HAFGRTR_EL2_AMEVCNTR115_EL0	|
763 		   HAFGRTR_EL2_AMEVCNTR114_EL0	|
764 		   HAFGRTR_EL2_AMEVCNTR113_EL0	|
765 		   HAFGRTR_EL2_AMEVCNTR112_EL0	|
766 		   HAFGRTR_EL2_AMEVCNTR111_EL0	|
767 		   HAFGRTR_EL2_AMEVCNTR110_EL0	|
768 		   HAFGRTR_EL2_AMEVCNTR19_EL0	|
769 		   HAFGRTR_EL2_AMEVCNTR18_EL0	|
770 		   HAFGRTR_EL2_AMEVCNTR17_EL0	|
771 		   HAFGRTR_EL2_AMEVCNTR16_EL0	|
772 		   HAFGRTR_EL2_AMEVCNTR15_EL0	|
773 		   HAFGRTR_EL2_AMEVCNTR14_EL0	|
774 		   HAFGRTR_EL2_AMEVCNTR13_EL0	|
775 		   HAFGRTR_EL2_AMEVCNTR12_EL0	|
776 		   HAFGRTR_EL2_AMEVCNTR11_EL0	|
777 		   HAFGRTR_EL2_AMEVCNTR10_EL0	|
778 		   HAFGRTR_EL2_AMCNTEN1		|
779 		   HAFGRTR_EL2_AMCNTEN0		|
780 		   HAFGRTR_EL2_AMEVCNTR03_EL0	|
781 		   HAFGRTR_EL2_AMEVCNTR02_EL0	|
782 		   HAFGRTR_EL2_AMEVCNTR01_EL0	|
783 		   HAFGRTR_EL2_AMEVCNTR00_EL0,
784 		   FEAT_AMUv1),
785 };
786 
787 static const DECLARE_FEAT_MAP_FGT(hafgrtr_desc, hafgrtr_masks,
788 				  hafgrtr_feat_map, FEAT_FGT);
789 
790 static const struct reg_bits_to_feat_map hfgitr2_feat_map[] = {
791 	NEEDS_FEAT(HFGITR2_EL2_nDCCIVAPS, FEAT_PoPS),
792 	NEEDS_FEAT(HFGITR2_EL2_TSBCSYNC, FEAT_TRBEv1p1)
793 };
794 
795 static const DECLARE_FEAT_MAP_FGT(hfgitr2_desc, hfgitr2_masks,
796 				  hfgitr2_feat_map, FEAT_FGT2);
797 
798 static const struct reg_bits_to_feat_map hfgrtr2_feat_map[] = {
799 	NEEDS_FEAT(HFGRTR2_EL2_nPFAR_EL1, FEAT_PFAR),
800 	NEEDS_FEAT(HFGRTR2_EL2_nERXGSR_EL1, FEAT_RASv2),
801 	NEEDS_FEAT(HFGRTR2_EL2_nACTLRALIAS_EL1	|
802 		   HFGRTR2_EL2_nACTLRMASK_EL1	|
803 		   HFGRTR2_EL2_nCPACRALIAS_EL1	|
804 		   HFGRTR2_EL2_nCPACRMASK_EL1	|
805 		   HFGRTR2_EL2_nSCTLR2MASK_EL1	|
806 		   HFGRTR2_EL2_nSCTLRALIAS2_EL1	|
807 		   HFGRTR2_EL2_nSCTLRALIAS_EL1	|
808 		   HFGRTR2_EL2_nSCTLRMASK_EL1	|
809 		   HFGRTR2_EL2_nTCR2ALIAS_EL1	|
810 		   HFGRTR2_EL2_nTCR2MASK_EL1	|
811 		   HFGRTR2_EL2_nTCRALIAS_EL1	|
812 		   HFGRTR2_EL2_nTCRMASK_EL1,
813 		   FEAT_SRMASK),
814 	NEEDS_FEAT(HFGRTR2_EL2_nRCWSMASK_EL1, FEAT_THE),
815 };
816 
817 static const DECLARE_FEAT_MAP_FGT(hfgrtr2_desc, hfgrtr2_masks,
818 				  hfgrtr2_feat_map, FEAT_FGT2);
819 
820 static const struct reg_bits_to_feat_map hfgwtr2_feat_map[] = {
821 	NEEDS_FEAT(HFGWTR2_EL2_nPFAR_EL1, FEAT_PFAR),
822 	NEEDS_FEAT(HFGWTR2_EL2_nACTLRALIAS_EL1	|
823 		   HFGWTR2_EL2_nACTLRMASK_EL1	|
824 		   HFGWTR2_EL2_nCPACRALIAS_EL1	|
825 		   HFGWTR2_EL2_nCPACRMASK_EL1	|
826 		   HFGWTR2_EL2_nSCTLR2MASK_EL1	|
827 		   HFGWTR2_EL2_nSCTLRALIAS2_EL1	|
828 		   HFGWTR2_EL2_nSCTLRALIAS_EL1	|
829 		   HFGWTR2_EL2_nSCTLRMASK_EL1	|
830 		   HFGWTR2_EL2_nTCR2ALIAS_EL1	|
831 		   HFGWTR2_EL2_nTCR2MASK_EL1	|
832 		   HFGWTR2_EL2_nTCRALIAS_EL1	|
833 		   HFGWTR2_EL2_nTCRMASK_EL1,
834 		   FEAT_SRMASK),
835 	NEEDS_FEAT(HFGWTR2_EL2_nRCWSMASK_EL1, FEAT_THE),
836 };
837 
838 static const DECLARE_FEAT_MAP_FGT(hfgwtr2_desc, hfgwtr2_masks,
839 				  hfgwtr2_feat_map, FEAT_FGT2);
840 
841 static const struct reg_bits_to_feat_map hdfgrtr2_feat_map[] = {
842 	NEEDS_FEAT(HDFGRTR2_EL2_nMDSELR_EL1, FEAT_Debugv8p9),
843 	NEEDS_FEAT(HDFGRTR2_EL2_nPMECR_EL1, feat_ebep_pmuv3_ss),
844 	NEEDS_FEAT(HDFGRTR2_EL2_nTRCITECR_EL1, FEAT_ITE),
845 	NEEDS_FEAT(HDFGRTR2_EL2_nPMICFILTR_EL0	|
846 		   HDFGRTR2_EL2_nPMICNTR_EL0,
847 		   FEAT_PMUv3_ICNTR),
848 	NEEDS_FEAT(HDFGRTR2_EL2_nPMUACR_EL1, feat_pmuv3p9),
849 	NEEDS_FEAT(HDFGRTR2_EL2_nPMSSCR_EL1	|
850 		   HDFGRTR2_EL2_nPMSSDATA,
851 		   FEAT_PMUv3_SS),
852 	NEEDS_FEAT(HDFGRTR2_EL2_nPMIAR_EL1, FEAT_SEBEP),
853 	NEEDS_FEAT(HDFGRTR2_EL2_nPMSDSFR_EL1, feat_spe_fds),
854 	NEEDS_FEAT(HDFGRTR2_EL2_nPMBMAR_EL1, FEAT_SPE_nVM),
855 	NEEDS_FEAT(HDFGRTR2_EL2_nSPMACCESSR_EL1	|
856 		   HDFGRTR2_EL2_nSPMCNTEN	|
857 		   HDFGRTR2_EL2_nSPMCR_EL0	|
858 		   HDFGRTR2_EL2_nSPMDEVAFF_EL1	|
859 		   HDFGRTR2_EL2_nSPMEVCNTRn_EL0	|
860 		   HDFGRTR2_EL2_nSPMEVTYPERn_EL0|
861 		   HDFGRTR2_EL2_nSPMID		|
862 		   HDFGRTR2_EL2_nSPMINTEN	|
863 		   HDFGRTR2_EL2_nSPMOVS		|
864 		   HDFGRTR2_EL2_nSPMSCR_EL1	|
865 		   HDFGRTR2_EL2_nSPMSELR_EL0,
866 		   FEAT_SPMU),
867 	NEEDS_FEAT(HDFGRTR2_EL2_nMDSTEPOP_EL1, FEAT_STEP2),
868 	NEEDS_FEAT(HDFGRTR2_EL2_nTRBMPAM_EL1, feat_trbe_mpam),
869 };
870 
871 static const DECLARE_FEAT_MAP_FGT(hdfgrtr2_desc, hdfgrtr2_masks,
872 				  hdfgrtr2_feat_map, FEAT_FGT2);
873 
874 static const struct reg_bits_to_feat_map hdfgwtr2_feat_map[] = {
875 	NEEDS_FEAT(HDFGWTR2_EL2_nMDSELR_EL1, FEAT_Debugv8p9),
876 	NEEDS_FEAT(HDFGWTR2_EL2_nPMECR_EL1, feat_ebep_pmuv3_ss),
877 	NEEDS_FEAT(HDFGWTR2_EL2_nTRCITECR_EL1, FEAT_ITE),
878 	NEEDS_FEAT(HDFGWTR2_EL2_nPMICFILTR_EL0	|
879 		   HDFGWTR2_EL2_nPMICNTR_EL0,
880 		   FEAT_PMUv3_ICNTR),
881 	NEEDS_FEAT(HDFGWTR2_EL2_nPMUACR_EL1	|
882 		   HDFGWTR2_EL2_nPMZR_EL0,
883 		   feat_pmuv3p9),
884 	NEEDS_FEAT(HDFGWTR2_EL2_nPMSSCR_EL1, FEAT_PMUv3_SS),
885 	NEEDS_FEAT(HDFGWTR2_EL2_nPMIAR_EL1, FEAT_SEBEP),
886 	NEEDS_FEAT(HDFGWTR2_EL2_nPMSDSFR_EL1, feat_spe_fds),
887 	NEEDS_FEAT(HDFGWTR2_EL2_nPMBMAR_EL1, FEAT_SPE_nVM),
888 	NEEDS_FEAT(HDFGWTR2_EL2_nSPMACCESSR_EL1	|
889 		   HDFGWTR2_EL2_nSPMCNTEN	|
890 		   HDFGWTR2_EL2_nSPMCR_EL0	|
891 		   HDFGWTR2_EL2_nSPMEVCNTRn_EL0	|
892 		   HDFGWTR2_EL2_nSPMEVTYPERn_EL0|
893 		   HDFGWTR2_EL2_nSPMINTEN	|
894 		   HDFGWTR2_EL2_nSPMOVS		|
895 		   HDFGWTR2_EL2_nSPMSCR_EL1	|
896 		   HDFGWTR2_EL2_nSPMSELR_EL0,
897 		   FEAT_SPMU),
898 	NEEDS_FEAT(HDFGWTR2_EL2_nMDSTEPOP_EL1, FEAT_STEP2),
899 	NEEDS_FEAT(HDFGWTR2_EL2_nTRBMPAM_EL1, feat_trbe_mpam),
900 };
901 
902 static const DECLARE_FEAT_MAP_FGT(hdfgwtr2_desc, hdfgwtr2_masks,
903 				  hdfgwtr2_feat_map, FEAT_FGT2);
904 
905 
906 static const struct reg_bits_to_feat_map hcrx_feat_map[] = {
907 	NEEDS_FEAT(HCRX_EL2_PACMEn, feat_pauth_lr),
908 	NEEDS_FEAT(HCRX_EL2_EnFPM, FEAT_FPMR),
909 	NEEDS_FEAT(HCRX_EL2_GCSEn, FEAT_GCS),
910 	NEEDS_FEAT(HCRX_EL2_EnIDCP128, FEAT_SYSREG128),
911 	NEEDS_FEAT(HCRX_EL2_EnSDERR, feat_aderr),
912 	NEEDS_FEAT(HCRX_EL2_TMEA, FEAT_DoubleFault2),
913 	NEEDS_FEAT(HCRX_EL2_EnSNERR, feat_anerr),
914 	NEEDS_FEAT(HCRX_EL2_D128En, FEAT_D128),
915 	NEEDS_FEAT(HCRX_EL2_PTTWI, FEAT_THE),
916 	NEEDS_FEAT(HCRX_EL2_SCTLR2En, FEAT_SCTLR2),
917 	NEEDS_FEAT(HCRX_EL2_TCR2En, FEAT_TCR2),
918 	NEEDS_FEAT(HCRX_EL2_MSCEn		|
919 		   HCRX_EL2_MCE2,
920 		   FEAT_MOPS),
921 	NEEDS_FEAT(HCRX_EL2_CMOW, FEAT_CMOW),
922 	NEEDS_FEAT(HCRX_EL2_VFNMI		|
923 		   HCRX_EL2_VINMI		|
924 		   HCRX_EL2_TALLINT,
925 		   FEAT_NMI),
926 	NEEDS_FEAT(HCRX_EL2_SMPME, feat_sme_smps),
927 	NEEDS_FEAT(HCRX_EL2_FGTnXS		|
928 		   HCRX_EL2_FnXS,
929 		   FEAT_XS),
930 	NEEDS_FEAT(HCRX_EL2_EnASR, FEAT_LS64_V),
931 	NEEDS_FEAT(HCRX_EL2_EnALS, FEAT_LS64),
932 	NEEDS_FEAT(HCRX_EL2_EnAS0, FEAT_LS64_ACCDATA),
933 };
934 
935 
936 static const DECLARE_FEAT_MAP(hcrx_desc, __HCRX_EL2,
937 			      hcrx_feat_map, FEAT_HCX);
938 
939 static const struct reg_bits_to_feat_map hcr_feat_map[] = {
940 	NEEDS_FEAT(HCR_EL2_TID0, FEAT_AA32EL0),
941 	NEEDS_FEAT_FLAG(HCR_EL2_RW, AS_RES1, FEAT_AA32EL1),
942 	NEEDS_FEAT(HCR_EL2_HCD, not_feat_aa64el3),
943 	NEEDS_FEAT(HCR_EL2_AMO		|
944 		   HCR_EL2_BSU		|
945 		   HCR_EL2_CD		|
946 		   HCR_EL2_DC		|
947 		   HCR_EL2_FB		|
948 		   HCR_EL2_FMO		|
949 		   HCR_EL2_ID		|
950 		   HCR_EL2_IMO		|
951 		   HCR_EL2_PTW		|
952 		   HCR_EL2_SWIO		|
953 		   HCR_EL2_TACR		|
954 		   HCR_EL2_TDZ		|
955 		   HCR_EL2_TGE		|
956 		   HCR_EL2_TID1		|
957 		   HCR_EL2_TID2		|
958 		   HCR_EL2_TID3		|
959 		   HCR_EL2_TIDCP	|
960 		   HCR_EL2_TPCP		|
961 		   HCR_EL2_TPU		|
962 		   HCR_EL2_TRVM		|
963 		   HCR_EL2_TSC		|
964 		   HCR_EL2_TSW		|
965 		   HCR_EL2_TTLB		|
966 		   HCR_EL2_TVM		|
967 		   HCR_EL2_TWE		|
968 		   HCR_EL2_TWI		|
969 		   HCR_EL2_VF		|
970 		   HCR_EL2_VI		|
971 		   HCR_EL2_VM		|
972 		   HCR_EL2_VSE,
973 		   FEAT_AA64EL1),
974 	NEEDS_FEAT(HCR_EL2_AMVOFFEN, FEAT_AMUv1p1),
975 	NEEDS_FEAT(HCR_EL2_EnSCXT, feat_csv2_2_csv2_1p2),
976 	NEEDS_FEAT(HCR_EL2_TICAB	|
977 		   HCR_EL2_TID4		|
978 		   HCR_EL2_TOCU,
979 		   FEAT_EVT),
980 	NEEDS_FEAT(HCR_EL2_TTLBIS	|
981 		   HCR_EL2_TTLBOS,
982 		   FEAT_EVT_TTLBxS),
983 	NEEDS_FEAT(HCR_EL2_TLOR, FEAT_LOR),
984 	NEEDS_FEAT(HCR_EL2_ATA		|
985 		   HCR_EL2_DCT		|
986 		   HCR_EL2_TID5,
987 		   FEAT_MTE2),
988 	NEEDS_FEAT(HCR_EL2_AT		| /* Ignore the original FEAT_NV */
989 		   HCR_EL2_NV2		|
990 		   HCR_EL2_NV,
991 		   feat_nv2),
992 	NEEDS_FEAT(HCR_EL2_NV1, feat_nv2_e2h0_ni), /* Missing from JSON */
993 	NEEDS_FEAT(HCR_EL2_API		|
994 		   HCR_EL2_APK,
995 		   feat_pauth),
996 	NEEDS_FEAT(HCR_EL2_TEA		|
997 		   HCR_EL2_TERR,
998 		   FEAT_RAS),
999 	NEEDS_FEAT(HCR_EL2_FIEN, feat_rasv1p1),
1000 	NEEDS_FEAT(HCR_EL2_GPF, FEAT_RME),
1001 	NEEDS_FEAT(HCR_EL2_FWB, FEAT_S2FWB),
1002 	NEEDS_FEAT(HCR_EL2_TWEDEL	|
1003 		   HCR_EL2_TWEDEn,
1004 		   FEAT_TWED),
1005 	NEEDS_FEAT_FLAG(HCR_EL2_E2H, RES1_WHEN_E2H1 | FORCE_RESx),
1006 	FORCE_RES0(HCR_EL2_RES0),
1007 	FORCE_RES1(HCR_EL2_RES1),
1008 };
1009 
1010 static const DECLARE_FEAT_MAP(hcr_desc, HCR_EL2,
1011 			      hcr_feat_map, FEAT_AA64EL2);
1012 
1013 static const struct reg_bits_to_feat_map sctlr2_feat_map[] = {
1014 	NEEDS_FEAT(SCTLR2_EL1_NMEA	|
1015 		   SCTLR2_EL1_EASE,
1016 		   FEAT_DoubleFault2),
1017 	NEEDS_FEAT(SCTLR2_EL1_EnADERR, feat_aderr),
1018 	NEEDS_FEAT(SCTLR2_EL1_EnANERR, feat_anerr),
1019 	NEEDS_FEAT(SCTLR2_EL1_EnIDCP128, FEAT_SYSREG128),
1020 	NEEDS_FEAT(SCTLR2_EL1_EnPACM	|
1021 		   SCTLR2_EL1_EnPACM0,
1022 		   feat_pauth_lr),
1023 	NEEDS_FEAT(SCTLR2_EL1_CPTA	|
1024 		   SCTLR2_EL1_CPTA0	|
1025 		   SCTLR2_EL1_CPTM	|
1026 		   SCTLR2_EL1_CPTM0,
1027 		   FEAT_CPA2),
1028 	FORCE_RES0(SCTLR2_EL1_RES0),
1029 	FORCE_RES1(SCTLR2_EL1_RES1),
1030 };
1031 
1032 static const DECLARE_FEAT_MAP(sctlr2_desc, SCTLR2_EL1,
1033 			      sctlr2_feat_map, FEAT_SCTLR2);
1034 
1035 static const struct reg_bits_to_feat_map tcr2_el2_feat_map[] = {
1036 	NEEDS_FEAT_FLAG(TCR2_EL2_FNG1	|
1037 			TCR2_EL2_FNG0	|
1038 			TCR2_EL2_A2,
1039 			REQUIRES_E2H1, FEAT_ASID2),
1040 	NEEDS_FEAT_FLAG(TCR2_EL2_DisCH1	|
1041 			TCR2_EL2_DisCH0	|
1042 			TCR2_EL2_D128,
1043 			REQUIRES_E2H1, FEAT_D128),
1044 	NEEDS_FEAT_FLAG(TCR2_EL2_AMEC1, REQUIRES_E2H1, FEAT_MEC),
1045 	NEEDS_FEAT(TCR2_EL2_AMEC0, FEAT_MEC),
1046 	NEEDS_FEAT(TCR2_EL2_HAFT, FEAT_HAFT),
1047 	NEEDS_FEAT(TCR2_EL2_PTTWI	|
1048 		   TCR2_EL2_PnCH,
1049 		   FEAT_THE),
1050 	NEEDS_FEAT(TCR2_EL2_AIE, FEAT_AIE),
1051 	NEEDS_FEAT(TCR2_EL2_POE		|
1052 		   TCR2_EL2_E0POE,
1053 		   FEAT_S1POE),
1054 	NEEDS_FEAT(TCR2_EL2_PIE, FEAT_S1PIE),
1055 	FORCE_RES0(TCR2_EL2_RES0),
1056 	FORCE_RES1(TCR2_EL2_RES1),
1057 };
1058 
1059 static const DECLARE_FEAT_MAP(tcr2_el2_desc, TCR2_EL2,
1060 			      tcr2_el2_feat_map, FEAT_TCR2);
1061 
1062 static const struct reg_bits_to_feat_map sctlr_el1_feat_map[] = {
1063 	NEEDS_FEAT(SCTLR_EL1_CP15BEN, FEAT_AA32EL0),
1064 	NEEDS_FEAT_FLAG(SCTLR_EL1_ITD	|
1065 			SCTLR_EL1_SED,
1066 			AS_RES1, FEAT_AA32EL0),
1067 	NEEDS_FEAT(SCTLR_EL1_BT0	|
1068 		   SCTLR_EL1_BT1,
1069 		   FEAT_BTI),
1070 	NEEDS_FEAT(SCTLR_EL1_CMOW, FEAT_CMOW),
1071 	NEEDS_FEAT_FLAG(SCTLR_EL1_TSCXT,
1072 			AS_RES1, feat_csv2_2_csv2_1p2),
1073 	NEEDS_FEAT_FLAG(SCTLR_EL1_EIS	|
1074 			SCTLR_EL1_EOS,
1075 			AS_RES1, FEAT_ExS),
1076 	NEEDS_FEAT(SCTLR_EL1_EnFPM, FEAT_FPMR),
1077 	NEEDS_FEAT(SCTLR_EL1_IESB, FEAT_IESB),
1078 	NEEDS_FEAT(SCTLR_EL1_EnALS, FEAT_LS64),
1079 	NEEDS_FEAT(SCTLR_EL1_EnAS0, FEAT_LS64_ACCDATA),
1080 	NEEDS_FEAT(SCTLR_EL1_EnASR, FEAT_LS64_V),
1081 	NEEDS_FEAT(SCTLR_EL1_nAA, FEAT_LSE2),
1082 	NEEDS_FEAT_FLAG(SCTLR_EL1_LSMAOE	|
1083 			SCTLR_EL1_nTLSMD,
1084 			AS_RES1, FEAT_LSMAOC),
1085 	NEEDS_FEAT(SCTLR_EL1_EE, FEAT_MixedEnd),
1086 	NEEDS_FEAT(SCTLR_EL1_E0E, feat_mixedendel0),
1087 	NEEDS_FEAT(SCTLR_EL1_MSCEn, FEAT_MOPS),
1088 	NEEDS_FEAT(SCTLR_EL1_ATA0	|
1089 		   SCTLR_EL1_ATA	|
1090 		   SCTLR_EL1_TCF0	|
1091 		   SCTLR_EL1_TCF,
1092 		   FEAT_MTE2),
1093 	NEEDS_FEAT(SCTLR_EL1_ITFSB, feat_mte_async),
1094 	NEEDS_FEAT(SCTLR_EL1_TCSO0	|
1095 		   SCTLR_EL1_TCSO,
1096 		   FEAT_MTE_STORE_ONLY),
1097 	NEEDS_FEAT(SCTLR_EL1_NMI	|
1098 		   SCTLR_EL1_SPINTMASK,
1099 		   FEAT_NMI),
1100 	NEEDS_FEAT_FLAG(SCTLR_EL1_SPAN,
1101 			AS_RES1, FEAT_PAN),
1102 	NEEDS_FEAT(SCTLR_EL1_EPAN, FEAT_PAN3),
1103 	NEEDS_FEAT(SCTLR_EL1_EnDA	|
1104 		   SCTLR_EL1_EnDB	|
1105 		   SCTLR_EL1_EnIA	|
1106 		   SCTLR_EL1_EnIB,
1107 		   feat_pauth),
1108 	NEEDS_FEAT(SCTLR_EL1_EnTP2, FEAT_SME),
1109 	NEEDS_FEAT(SCTLR_EL1_EnRCTX, FEAT_SPECRES),
1110 	NEEDS_FEAT(SCTLR_EL1_DSSBS, FEAT_SSBS),
1111 	NEEDS_FEAT(SCTLR_EL1_TIDCP, FEAT_TIDCP1),
1112 	NEEDS_FEAT(SCTLR_EL1_TWEDEL	|
1113 		   SCTLR_EL1_TWEDEn,
1114 		   FEAT_TWED),
1115 	NEEDS_FEAT(SCTLR_EL1_UCI	|
1116 		   SCTLR_EL1_WXN	|
1117 		   SCTLR_EL1_nTWE	|
1118 		   SCTLR_EL1_nTWI	|
1119 		   SCTLR_EL1_UCT	|
1120 		   SCTLR_EL1_DZE	|
1121 		   SCTLR_EL1_I		|
1122 		   SCTLR_EL1_UMA	|
1123 		   SCTLR_EL1_SA0	|
1124 		   SCTLR_EL1_SA		|
1125 		   SCTLR_EL1_C		|
1126 		   SCTLR_EL1_A		|
1127 		   SCTLR_EL1_M,
1128 		   FEAT_AA64EL1),
1129 	FORCE_RES0(SCTLR_EL1_RES0),
1130 	FORCE_RES1(SCTLR_EL1_RES1),
1131 };
1132 
1133 static const DECLARE_FEAT_MAP(sctlr_el1_desc, SCTLR_EL1,
1134 			      sctlr_el1_feat_map, FEAT_AA64EL1);
1135 
1136 static const struct reg_bits_to_feat_map sctlr_el2_feat_map[] = {
1137 	NEEDS_FEAT_FLAG(SCTLR_EL2_CP15BEN,
1138 			RES1_WHEN_E2H0 | REQUIRES_E2H1,
1139 			FEAT_AA32EL0),
1140 	NEEDS_FEAT_FLAG(SCTLR_EL2_ITD	|
1141 			SCTLR_EL2_SED,
1142 			RES1_WHEN_E2H1 | REQUIRES_E2H1,
1143 			FEAT_AA32EL0),
1144 	NEEDS_FEAT_FLAG(SCTLR_EL2_BT0, REQUIRES_E2H1, FEAT_BTI),
1145 	NEEDS_FEAT(SCTLR_EL2_BT, FEAT_BTI),
1146 	NEEDS_FEAT_FLAG(SCTLR_EL2_CMOW, REQUIRES_E2H1, FEAT_CMOW),
1147 	NEEDS_FEAT_FLAG(SCTLR_EL2_TSCXT,
1148 			RES1_WHEN_E2H1 | REQUIRES_E2H1,
1149 			feat_csv2_2_csv2_1p2),
1150 	NEEDS_FEAT_FLAG(SCTLR_EL2_EIS	|
1151 			SCTLR_EL2_EOS,
1152 			AS_RES1, FEAT_ExS),
1153 	NEEDS_FEAT(SCTLR_EL2_EnFPM, FEAT_FPMR),
1154 	NEEDS_FEAT(SCTLR_EL2_IESB, FEAT_IESB),
1155 	NEEDS_FEAT_FLAG(SCTLR_EL2_EnALS, REQUIRES_E2H1, FEAT_LS64),
1156 	NEEDS_FEAT_FLAG(SCTLR_EL2_EnAS0, REQUIRES_E2H1, FEAT_LS64_ACCDATA),
1157 	NEEDS_FEAT_FLAG(SCTLR_EL2_EnASR, REQUIRES_E2H1, FEAT_LS64_V),
1158 	NEEDS_FEAT(SCTLR_EL2_nAA, FEAT_LSE2),
1159 	NEEDS_FEAT_FLAG(SCTLR_EL2_LSMAOE	|
1160 			SCTLR_EL2_nTLSMD,
1161 			AS_RES1 | REQUIRES_E2H1, FEAT_LSMAOC),
1162 	NEEDS_FEAT(SCTLR_EL2_EE, FEAT_MixedEnd),
1163 	NEEDS_FEAT_FLAG(SCTLR_EL2_E0E, REQUIRES_E2H1, feat_mixedendel0),
1164 	NEEDS_FEAT_FLAG(SCTLR_EL2_MSCEn, REQUIRES_E2H1, FEAT_MOPS),
1165 	NEEDS_FEAT_FLAG(SCTLR_EL2_ATA0	|
1166 			SCTLR_EL2_TCF0,
1167 			REQUIRES_E2H1, FEAT_MTE2),
1168 	NEEDS_FEAT(SCTLR_EL2_ATA	|
1169 		   SCTLR_EL2_TCF,
1170 		   FEAT_MTE2),
1171 	NEEDS_FEAT(SCTLR_EL2_ITFSB, feat_mte_async),
1172 	NEEDS_FEAT_FLAG(SCTLR_EL2_TCSO0, REQUIRES_E2H1, FEAT_MTE_STORE_ONLY),
1173 	NEEDS_FEAT(SCTLR_EL2_TCSO,
1174 		   FEAT_MTE_STORE_ONLY),
1175 	NEEDS_FEAT(SCTLR_EL2_NMI	|
1176 		   SCTLR_EL2_SPINTMASK,
1177 		   FEAT_NMI),
1178 	NEEDS_FEAT_FLAG(SCTLR_EL2_SPAN,	AS_RES1 | REQUIRES_E2H1, FEAT_PAN),
1179 	NEEDS_FEAT_FLAG(SCTLR_EL2_EPAN, REQUIRES_E2H1, FEAT_PAN3),
1180 	NEEDS_FEAT(SCTLR_EL2_EnDA	|
1181 		   SCTLR_EL2_EnDB	|
1182 		   SCTLR_EL2_EnIA	|
1183 		   SCTLR_EL2_EnIB,
1184 		   feat_pauth),
1185 	NEEDS_FEAT_FLAG(SCTLR_EL2_EnTP2, REQUIRES_E2H1, FEAT_SME),
1186 	NEEDS_FEAT(SCTLR_EL2_EnRCTX, FEAT_SPECRES),
1187 	NEEDS_FEAT(SCTLR_EL2_DSSBS, FEAT_SSBS),
1188 	NEEDS_FEAT_FLAG(SCTLR_EL2_TIDCP, REQUIRES_E2H1, FEAT_TIDCP1),
1189 	NEEDS_FEAT_FLAG(SCTLR_EL2_TWEDEL	|
1190 			SCTLR_EL2_TWEDEn,
1191 			REQUIRES_E2H1, FEAT_TWED),
1192 	NEEDS_FEAT_FLAG(SCTLR_EL2_nTWE	|
1193 			SCTLR_EL2_nTWI,
1194 			AS_RES1 | REQUIRES_E2H1, FEAT_AA64EL2),
1195 	NEEDS_FEAT_FLAG(SCTLR_EL2_UCI	|
1196 			SCTLR_EL2_UCT	|
1197 			SCTLR_EL2_DZE	|
1198 			SCTLR_EL2_SA0,
1199 			REQUIRES_E2H1, FEAT_AA64EL2),
1200 	NEEDS_FEAT(SCTLR_EL2_WXN	|
1201 		   SCTLR_EL2_I		|
1202 		   SCTLR_EL2_SA		|
1203 		   SCTLR_EL2_C		|
1204 		   SCTLR_EL2_A		|
1205 		   SCTLR_EL2_M,
1206 		   FEAT_AA64EL2),
1207 	FORCE_RES0(SCTLR_EL2_RES0),
1208 	FORCE_RES1(SCTLR_EL2_RES1),
1209 };
1210 
1211 static const DECLARE_FEAT_MAP(sctlr_el2_desc, SCTLR_EL2,
1212 			      sctlr_el2_feat_map, FEAT_AA64EL2);
1213 
1214 static const struct reg_bits_to_feat_map mdcr_el2_feat_map[] = {
1215 	NEEDS_FEAT(MDCR_EL2_EBWE, FEAT_Debugv8p9),
1216 	NEEDS_FEAT(MDCR_EL2_TDOSA, FEAT_DoubleLock),
1217 	NEEDS_FEAT(MDCR_EL2_PMEE, FEAT_EBEP),
1218 	NEEDS_FEAT(MDCR_EL2_TDCC, FEAT_FGT),
1219 	NEEDS_FEAT(MDCR_EL2_MTPME, FEAT_MTPMU),
1220 	NEEDS_FEAT(MDCR_EL2_HPME	|
1221 		   MDCR_EL2_HPMN	|
1222 		   MDCR_EL2_TPMCR	|
1223 		   MDCR_EL2_TPM,
1224 		   FEAT_PMUv3),
1225 	NEEDS_FEAT(MDCR_EL2_HPMD, feat_pmuv3p1),
1226 	NEEDS_FEAT(MDCR_EL2_HCCD	|
1227 		   MDCR_EL2_HLP,
1228 		   feat_pmuv3p5),
1229 	NEEDS_FEAT(MDCR_EL2_HPMFZO, feat_pmuv3p7),
1230 	NEEDS_FEAT(MDCR_EL2_PMSSE, FEAT_PMUv3_SS),
1231 	NEEDS_FEAT(MDCR_EL2_E2PB	|
1232 		   MDCR_EL2_TPMS,
1233 		   FEAT_SPE),
1234 	NEEDS_FEAT(MDCR_EL2_HPMFZS, FEAT_SPEv1p2),
1235 	NEEDS_FEAT(MDCR_EL2_EnSPM, FEAT_SPMU),
1236 	NEEDS_FEAT(MDCR_EL2_EnSTEPOP, FEAT_STEP2),
1237 	NEEDS_FEAT(MDCR_EL2_E2TB, FEAT_TRBE),
1238 	NEEDS_FEAT(MDCR_EL2_TTRF, FEAT_TRF),
1239 	NEEDS_FEAT(MDCR_EL2_TDA		|
1240 		   MDCR_EL2_TDE		|
1241 		   MDCR_EL2_TDRA,
1242 		   FEAT_AA64EL1),
1243 	FORCE_RES0(MDCR_EL2_RES0),
1244 	FORCE_RES1(MDCR_EL2_RES1),
1245 };
1246 
1247 static const DECLARE_FEAT_MAP(mdcr_el2_desc, MDCR_EL2,
1248 			      mdcr_el2_feat_map, FEAT_AA64EL2);
1249 
1250 static const struct reg_bits_to_feat_map vtcr_el2_feat_map[] = {
1251 	NEEDS_FEAT(VTCR_EL2_HDBSS, FEAT_HDBSS),
1252 	NEEDS_FEAT(VTCR_EL2_HAFT, FEAT_HAFT),
1253 	NEEDS_FEAT(VTCR_EL2_TL0		|
1254 		   VTCR_EL2_TL1		|
1255 		   VTCR_EL2_AssuredOnly	|
1256 		   VTCR_EL2_GCSH,
1257 		   FEAT_THE),
1258 	NEEDS_FEAT(VTCR_EL2_D128, FEAT_D128),
1259 	NEEDS_FEAT(VTCR_EL2_S2POE, FEAT_S2POE),
1260 	NEEDS_FEAT(VTCR_EL2_S2PIE, FEAT_S2PIE),
1261 	NEEDS_FEAT(VTCR_EL2_SL2		|
1262 		   VTCR_EL2_DS,
1263 		   feat_lpa2),
1264 	NEEDS_FEAT(VTCR_EL2_NSA		|
1265 		   VTCR_EL2_NSW,
1266 		   FEAT_SEL2),
1267 	NEEDS_FEAT(VTCR_EL2_HWU62	|
1268 		   VTCR_EL2_HWU61	|
1269 		   VTCR_EL2_HWU60	|
1270 		   VTCR_EL2_HWU59,
1271 		   FEAT_HPDS2),
1272 	NEEDS_FEAT(VTCR_EL2_HD, ID_AA64MMFR1_EL1, HAFDBS, DBM),
1273 	NEEDS_FEAT(VTCR_EL2_HA, ID_AA64MMFR1_EL1, HAFDBS, AF),
1274 	NEEDS_FEAT(VTCR_EL2_VS, feat_vmid16),
1275 	NEEDS_FEAT(VTCR_EL2_PS		|
1276 		   VTCR_EL2_TG0		|
1277 		   VTCR_EL2_SH0		|
1278 		   VTCR_EL2_ORGN0	|
1279 		   VTCR_EL2_IRGN0	|
1280 		   VTCR_EL2_SL0		|
1281 		   VTCR_EL2_T0SZ,
1282 		   FEAT_AA64EL1),
1283 	FORCE_RES0(VTCR_EL2_RES0),
1284 	FORCE_RES1(VTCR_EL2_RES1),
1285 };
1286 
1287 static const DECLARE_FEAT_MAP(vtcr_el2_desc, VTCR_EL2,
1288 			      vtcr_el2_feat_map, FEAT_AA64EL2);
1289 
1290 static const struct reg_bits_to_feat_map ich_hfgrtr_feat_map[] = {
1291 	NEEDS_FEAT(ICH_HFGRTR_EL2_ICC_APR_EL1 |
1292 		   ICH_HFGRTR_EL2_ICC_IDRn_EL1 |
1293 		   ICH_HFGRTR_EL2_ICC_CR0_EL1 |
1294 		   ICH_HFGRTR_EL2_ICC_HPPIR_EL1 |
1295 		   ICH_HFGRTR_EL2_ICC_PCR_EL1 |
1296 		   ICH_HFGRTR_EL2_ICC_ICSR_EL1 |
1297 		   ICH_HFGRTR_EL2_ICC_IAFFIDR_EL1 |
1298 		   ICH_HFGRTR_EL2_ICC_PPI_HMRn_EL1 |
1299 		   ICH_HFGRTR_EL2_ICC_PPI_ENABLERn_EL1 |
1300 		   ICH_HFGRTR_EL2_ICC_PPI_PENDRn_EL1 |
1301 		   ICH_HFGRTR_EL2_ICC_PPI_PRIORITYRn_EL1 |
1302 		   ICH_HFGRTR_EL2_ICC_PPI_ACTIVERn_EL1,
1303 		   FEAT_GCIE),
1304 };
1305 
1306 static const DECLARE_FEAT_MAP_FGT(ich_hfgrtr_desc, ich_hfgrtr_masks,
1307 				  ich_hfgrtr_feat_map, FEAT_GCIE);
1308 
1309 static const struct reg_bits_to_feat_map ich_hfgwtr_feat_map[] = {
1310 	NEEDS_FEAT(ICH_HFGWTR_EL2_ICC_APR_EL1 |
1311 		   ICH_HFGWTR_EL2_ICC_CR0_EL1 |
1312 		   ICH_HFGWTR_EL2_ICC_PCR_EL1 |
1313 		   ICH_HFGWTR_EL2_ICC_ICSR_EL1 |
1314 		   ICH_HFGWTR_EL2_ICC_PPI_ENABLERn_EL1 |
1315 		   ICH_HFGWTR_EL2_ICC_PPI_PENDRn_EL1 |
1316 		   ICH_HFGWTR_EL2_ICC_PPI_PRIORITYRn_EL1 |
1317 		   ICH_HFGWTR_EL2_ICC_PPI_ACTIVERn_EL1,
1318 		   FEAT_GCIE),
1319 };
1320 
1321 static const DECLARE_FEAT_MAP_FGT(ich_hfgwtr_desc, ich_hfgwtr_masks,
1322 				  ich_hfgwtr_feat_map, FEAT_GCIE);
1323 
1324 static const struct reg_bits_to_feat_map ich_hfgitr_feat_map[] = {
1325 	NEEDS_FEAT(ICH_HFGITR_EL2_GICCDEN |
1326 		   ICH_HFGITR_EL2_GICCDDIS |
1327 		   ICH_HFGITR_EL2_GICCDPRI |
1328 		   ICH_HFGITR_EL2_GICCDAFF |
1329 		   ICH_HFGITR_EL2_GICCDPEND |
1330 		   ICH_HFGITR_EL2_GICCDRCFG |
1331 		   ICH_HFGITR_EL2_GICCDHM |
1332 		   ICH_HFGITR_EL2_GICCDEOI |
1333 		   ICH_HFGITR_EL2_GICCDDI |
1334 		   ICH_HFGITR_EL2_GICRCDIA |
1335 		   ICH_HFGITR_EL2_GICRCDNMIA,
1336 		   FEAT_GCIE),
1337 };
1338 
1339 static const DECLARE_FEAT_MAP_FGT(ich_hfgitr_desc, ich_hfgitr_masks,
1340 				  ich_hfgitr_feat_map, FEAT_GCIE);
1341 
check_feat_map(const struct reg_bits_to_feat_map * map,int map_size,u64 resx,const char * str)1342 static void __init check_feat_map(const struct reg_bits_to_feat_map *map,
1343 				  int map_size, u64 resx, const char *str)
1344 {
1345 	u64 mask = 0;
1346 
1347 	/*
1348 	 * Don't account for FORCE_RESx that are architectural, and
1349 	 * therefore part of the resx parameter. Other FORCE_RESx bits
1350 	 * are implementation choices, and therefore accounted for.
1351 	 */
1352 	for (int i = 0; i < map_size; i++)
1353 		if (!((map[i].flags & FORCE_RESx) && (map[i].bits & resx)))
1354 			mask |= map[i].bits;
1355 
1356 	if (mask != ~resx)
1357 		kvm_err("Undefined %s behaviour, bits %016llx\n",
1358 			str, mask ^ ~resx);
1359 }
1360 
reg_feat_map_bits(const struct reg_bits_to_feat_map * map)1361 static u64 reg_feat_map_bits(const struct reg_bits_to_feat_map *map)
1362 {
1363 	return map->flags & MASKS_POINTER ? (map->masks->mask | map->masks->nmask) : map->bits;
1364 }
1365 
check_reg_desc(const struct reg_feat_map_desc * r)1366 static void __init check_reg_desc(const struct reg_feat_map_desc *r)
1367 {
1368 	check_feat_map(r->bit_feat_map, r->bit_feat_map_sz,
1369 		       ~reg_feat_map_bits(&r->feat_map), r->name);
1370 }
1371 
check_feature_map(void)1372 void __init check_feature_map(void)
1373 {
1374 	check_reg_desc(&hfgrtr_desc);
1375 	check_reg_desc(&hfgwtr_desc);
1376 	check_reg_desc(&hfgitr_desc);
1377 	check_reg_desc(&hdfgrtr_desc);
1378 	check_reg_desc(&hdfgwtr_desc);
1379 	check_reg_desc(&hafgrtr_desc);
1380 	check_reg_desc(&hfgrtr2_desc);
1381 	check_reg_desc(&hfgwtr2_desc);
1382 	check_reg_desc(&hfgitr2_desc);
1383 	check_reg_desc(&hdfgrtr2_desc);
1384 	check_reg_desc(&hdfgwtr2_desc);
1385 	check_reg_desc(&hcrx_desc);
1386 	check_reg_desc(&hcr_desc);
1387 	check_reg_desc(&sctlr2_desc);
1388 	check_reg_desc(&tcr2_el2_desc);
1389 	check_reg_desc(&sctlr_el1_desc);
1390 	check_reg_desc(&sctlr_el2_desc);
1391 	check_reg_desc(&mdcr_el2_desc);
1392 	check_reg_desc(&vtcr_el2_desc);
1393 	check_reg_desc(&ich_hfgrtr_desc);
1394 	check_reg_desc(&ich_hfgwtr_desc);
1395 	check_reg_desc(&ich_hfgitr_desc);
1396 }
1397 
idreg_feat_match(struct kvm * kvm,const struct reg_bits_to_feat_map * map)1398 static bool idreg_feat_match(struct kvm *kvm, const struct reg_bits_to_feat_map *map)
1399 {
1400 	u64 regval = kvm->arch.id_regs[map->regidx];
1401 	u64 regfld = (regval >> map->shift) & GENMASK(map->width - 1, 0);
1402 
1403 	if (map->sign) {
1404 		s64 sfld = sign_extend64(regfld, map->width - 1);
1405 		s64 slim = sign_extend64(map->lo_lim, map->width - 1);
1406 		return sfld >= slim;
1407 	} else {
1408 		return regfld >= map->lo_lim;
1409 	}
1410 }
1411 
compute_resx_bits(struct kvm * kvm,const struct reg_bits_to_feat_map * map,int map_size,unsigned long require,unsigned long exclude)1412 static struct resx compute_resx_bits(struct kvm *kvm,
1413 				     const struct reg_bits_to_feat_map *map,
1414 				     int map_size,
1415 				     unsigned long require,
1416 				     unsigned long exclude)
1417 {
1418 	bool e2h0 = kvm_has_feat(kvm, FEAT_E2H0);
1419 	struct resx resx = {};
1420 
1421 	for (int i = 0; i < map_size; i++) {
1422 		bool match;
1423 
1424 		if ((map[i].flags & require) != require)
1425 			continue;
1426 
1427 		if (map[i].flags & exclude)
1428 			continue;
1429 
1430 		if (map[i].flags & FORCE_RESx)
1431 			match = false;
1432 		else if (map[i].flags & CALL_FUNC)
1433 			match = map[i].match(kvm);
1434 		else
1435 			match = idreg_feat_match(kvm, &map[i]);
1436 
1437 		if (map[i].flags & REQUIRES_E2H1)
1438 			match &= !e2h0;
1439 
1440 		if (!match) {
1441 			u64 bits = reg_feat_map_bits(&map[i]);
1442 
1443 			if ((map[i].flags & AS_RES1)			||
1444 			    (e2h0 && (map[i].flags & RES1_WHEN_E2H0))	||
1445 			    (!e2h0 && (map[i].flags & RES1_WHEN_E2H1)))
1446 				resx.res1 |= bits;
1447 			else
1448 				resx.res0 |= bits;
1449 		}
1450 	}
1451 
1452 	return resx;
1453 }
1454 
compute_reg_resx_bits(struct kvm * kvm,const struct reg_feat_map_desc * r,unsigned long require,unsigned long exclude)1455 static struct resx compute_reg_resx_bits(struct kvm *kvm,
1456 					 const struct reg_feat_map_desc *r,
1457 					 unsigned long require,
1458 					 unsigned long exclude)
1459 {
1460 	struct resx resx;
1461 
1462 	resx = compute_resx_bits(kvm, r->bit_feat_map, r->bit_feat_map_sz,
1463 				 require, exclude);
1464 
1465 	if (r->feat_map.flags & MASKS_POINTER) {
1466 		resx.res0 |= r->feat_map.masks->res0;
1467 		resx.res1 |= r->feat_map.masks->res1;
1468 	}
1469 
1470 	/*
1471 	 * If the register itself was not valid, all the non-RESx bits are
1472 	 * now considered RES0 (this matches the behaviour of registers such
1473 	 * as SCTLR2 and TCR2). Weed out any potential (though unlikely)
1474 	 * overlap with RES1 bits coming from the previous computation.
1475 	 */
1476 	resx.res0 |= compute_resx_bits(kvm, &r->feat_map, 1, require, exclude).res0;
1477 	resx.res1 &= ~resx.res0;
1478 
1479 	return resx;
1480 }
1481 
compute_fgu_bits(struct kvm * kvm,const struct reg_feat_map_desc * r)1482 static u64 compute_fgu_bits(struct kvm *kvm, const struct reg_feat_map_desc *r)
1483 {
1484 	struct resx resx;
1485 
1486 	/*
1487 	 * If computing FGUs, we collect the unsupported feature bits as
1488 	 * RESx bits, but don't take the actual RESx bits or register
1489 	 * existence into account -- we're not computing bits for the
1490 	 * register itself.
1491 	 */
1492 	resx = compute_resx_bits(kvm, r->bit_feat_map, r->bit_feat_map_sz,
1493 				 0, NEVER_FGU);
1494 
1495 	return resx.res0 | resx.res1;
1496 }
1497 
compute_fgu(struct kvm * kvm,enum fgt_group_id fgt)1498 void compute_fgu(struct kvm *kvm, enum fgt_group_id fgt)
1499 {
1500 	u64 val = 0;
1501 
1502 	switch (fgt) {
1503 	case HFGRTR_GROUP:
1504 		val |= compute_fgu_bits(kvm, &hfgrtr_desc);
1505 		val |= compute_fgu_bits(kvm, &hfgwtr_desc);
1506 		break;
1507 	case HFGITR_GROUP:
1508 		val |= compute_fgu_bits(kvm, &hfgitr_desc);
1509 		break;
1510 	case HDFGRTR_GROUP:
1511 		val |= compute_fgu_bits(kvm, &hdfgrtr_desc);
1512 		val |= compute_fgu_bits(kvm, &hdfgwtr_desc);
1513 		break;
1514 	case HAFGRTR_GROUP:
1515 		val |= compute_fgu_bits(kvm, &hafgrtr_desc);
1516 		break;
1517 	case HFGRTR2_GROUP:
1518 		val |= compute_fgu_bits(kvm, &hfgrtr2_desc);
1519 		val |= compute_fgu_bits(kvm, &hfgwtr2_desc);
1520 		break;
1521 	case HFGITR2_GROUP:
1522 		val |= compute_fgu_bits(kvm, &hfgitr2_desc);
1523 		break;
1524 	case HDFGRTR2_GROUP:
1525 		val |= compute_fgu_bits(kvm, &hdfgrtr2_desc);
1526 		val |= compute_fgu_bits(kvm, &hdfgwtr2_desc);
1527 		break;
1528 	case ICH_HFGRTR_GROUP:
1529 		val |= compute_fgu_bits(kvm, &ich_hfgrtr_desc);
1530 		val |= compute_fgu_bits(kvm, &ich_hfgwtr_desc);
1531 		break;
1532 	case ICH_HFGITR_GROUP:
1533 		val |= compute_fgu_bits(kvm, &ich_hfgitr_desc);
1534 		break;
1535 	default:
1536 		BUG();
1537 	}
1538 
1539 	kvm->arch.fgu[fgt] = val;
1540 }
1541 
get_reg_fixed_bits(struct kvm * kvm,enum vcpu_sysreg reg)1542 struct resx get_reg_fixed_bits(struct kvm *kvm, enum vcpu_sysreg reg)
1543 {
1544 	struct resx resx;
1545 
1546 	switch (reg) {
1547 	case HFGRTR_EL2:
1548 		resx = compute_reg_resx_bits(kvm, &hfgrtr_desc, 0, 0);
1549 		break;
1550 	case HFGWTR_EL2:
1551 		resx = compute_reg_resx_bits(kvm, &hfgwtr_desc, 0, 0);
1552 		break;
1553 	case HFGITR_EL2:
1554 		resx = compute_reg_resx_bits(kvm, &hfgitr_desc, 0, 0);
1555 		break;
1556 	case HDFGRTR_EL2:
1557 		resx = compute_reg_resx_bits(kvm, &hdfgrtr_desc, 0, 0);
1558 		break;
1559 	case HDFGWTR_EL2:
1560 		resx = compute_reg_resx_bits(kvm, &hdfgwtr_desc, 0, 0);
1561 		break;
1562 	case HAFGRTR_EL2:
1563 		resx = compute_reg_resx_bits(kvm, &hafgrtr_desc, 0, 0);
1564 		break;
1565 	case HFGRTR2_EL2:
1566 		resx = compute_reg_resx_bits(kvm, &hfgrtr2_desc, 0, 0);
1567 		break;
1568 	case HFGWTR2_EL2:
1569 		resx = compute_reg_resx_bits(kvm, &hfgwtr2_desc, 0, 0);
1570 		break;
1571 	case HFGITR2_EL2:
1572 		resx = compute_reg_resx_bits(kvm, &hfgitr2_desc, 0, 0);
1573 		break;
1574 	case HDFGRTR2_EL2:
1575 		resx = compute_reg_resx_bits(kvm, &hdfgrtr2_desc, 0, 0);
1576 		break;
1577 	case HDFGWTR2_EL2:
1578 		resx = compute_reg_resx_bits(kvm, &hdfgwtr2_desc, 0, 0);
1579 		break;
1580 	case HCRX_EL2:
1581 		resx = compute_reg_resx_bits(kvm, &hcrx_desc, 0, 0);
1582 		resx.res1 |= __HCRX_EL2_RES1;
1583 		break;
1584 	case HCR_EL2:
1585 		resx = compute_reg_resx_bits(kvm, &hcr_desc, 0, 0);
1586 		break;
1587 	case SCTLR2_EL1:
1588 	case SCTLR2_EL2:
1589 		resx = compute_reg_resx_bits(kvm, &sctlr2_desc, 0, 0);
1590 		break;
1591 	case TCR2_EL2:
1592 		resx = compute_reg_resx_bits(kvm, &tcr2_el2_desc, 0, 0);
1593 		break;
1594 	case SCTLR_EL1:
1595 		resx = compute_reg_resx_bits(kvm, &sctlr_el1_desc, 0, 0);
1596 		break;
1597 	case SCTLR_EL2:
1598 		resx = compute_reg_resx_bits(kvm, &sctlr_el2_desc, 0, 0);
1599 		break;
1600 	case MDCR_EL2:
1601 		resx = compute_reg_resx_bits(kvm, &mdcr_el2_desc, 0, 0);
1602 		break;
1603 	case VTCR_EL2:
1604 		resx = compute_reg_resx_bits(kvm, &vtcr_el2_desc, 0, 0);
1605 		break;
1606 	case ICH_HFGRTR_EL2:
1607 		resx = compute_reg_resx_bits(kvm, &ich_hfgrtr_desc, 0, 0);
1608 		break;
1609 	case ICH_HFGWTR_EL2:
1610 		resx = compute_reg_resx_bits(kvm, &ich_hfgwtr_desc, 0, 0);
1611 		break;
1612 	case ICH_HFGITR_EL2:
1613 		resx = compute_reg_resx_bits(kvm, &ich_hfgitr_desc, 0, 0);
1614 		break;
1615 	default:
1616 		WARN_ON_ONCE(1);
1617 		resx = (typeof(resx)){};
1618 		break;
1619 	}
1620 
1621 	return resx;
1622 }
1623 
__fgt_reg_to_masks(enum vcpu_sysreg reg)1624 static __always_inline struct fgt_masks *__fgt_reg_to_masks(enum vcpu_sysreg reg)
1625 {
1626 	switch (reg) {
1627 	case HFGRTR_EL2:
1628 		return &hfgrtr_masks;
1629 	case HFGWTR_EL2:
1630 		return &hfgwtr_masks;
1631 	case HFGITR_EL2:
1632 		return &hfgitr_masks;
1633 	case HDFGRTR_EL2:
1634 		return &hdfgrtr_masks;
1635 	case HDFGWTR_EL2:
1636 		return &hdfgwtr_masks;
1637 	case HAFGRTR_EL2:
1638 		return &hafgrtr_masks;
1639 	case HFGRTR2_EL2:
1640 		return &hfgrtr2_masks;
1641 	case HFGWTR2_EL2:
1642 		return &hfgwtr2_masks;
1643 	case HFGITR2_EL2:
1644 		return &hfgitr2_masks;
1645 	case HDFGRTR2_EL2:
1646 		return &hdfgrtr2_masks;
1647 	case HDFGWTR2_EL2:
1648 		return &hdfgwtr2_masks;
1649 	case ICH_HFGRTR_EL2:
1650 		return &ich_hfgrtr_masks;
1651 	case ICH_HFGWTR_EL2:
1652 		return &ich_hfgwtr_masks;
1653 	case ICH_HFGITR_EL2:
1654 		return &ich_hfgitr_masks;
1655 	default:
1656 		BUILD_BUG_ON(1);
1657 	}
1658 }
1659 
__compute_fgt(struct kvm_vcpu * vcpu,enum vcpu_sysreg reg)1660 static __always_inline void __compute_fgt(struct kvm_vcpu *vcpu, enum vcpu_sysreg reg)
1661 {
1662 	u64 fgu = vcpu->kvm->arch.fgu[__fgt_reg_to_group_id(reg)];
1663 	struct fgt_masks *m = __fgt_reg_to_masks(reg);
1664 	u64 clear = 0, set = 0, val = m->nmask;
1665 
1666 	set |= fgu & m->mask;
1667 	clear |= fgu & m->nmask;
1668 
1669 	if (is_nested_ctxt(vcpu)) {
1670 		u64 nested = __vcpu_sys_reg(vcpu, reg);
1671 		set |= nested & m->mask;
1672 		clear |= ~nested & m->nmask;
1673 	}
1674 
1675 	val |= set | m->res1;
1676 	val &= ~(clear | m->res0);
1677 	*vcpu_fgt(vcpu, reg) = val;
1678 }
1679 
__compute_hfgwtr(struct kvm_vcpu * vcpu)1680 static void __compute_hfgwtr(struct kvm_vcpu *vcpu)
1681 {
1682 	__compute_fgt(vcpu, HFGWTR_EL2);
1683 
1684 	if (cpus_have_final_cap(ARM64_WORKAROUND_AMPERE_AC03_CPU_38))
1685 		*vcpu_fgt(vcpu, HFGWTR_EL2) |= HFGWTR_EL2_TCR_EL1;
1686 }
1687 
__compute_hdfgwtr(struct kvm_vcpu * vcpu)1688 static void __compute_hdfgwtr(struct kvm_vcpu *vcpu)
1689 {
1690 	__compute_fgt(vcpu, HDFGWTR_EL2);
1691 
1692 	if (is_hyp_ctxt(vcpu))
1693 		*vcpu_fgt(vcpu, HDFGWTR_EL2) |= HDFGWTR_EL2_MDSCR_EL1;
1694 }
1695 
__compute_ich_hfgrtr(struct kvm_vcpu * vcpu)1696 static void __compute_ich_hfgrtr(struct kvm_vcpu *vcpu)
1697 {
1698 	__compute_fgt(vcpu, ICH_HFGRTR_EL2);
1699 
1700 	/*
1701 	 * ICC_IAFFIDR_EL1 *always* needs to be trapped when running a guest.
1702 	 *
1703 	 * We also trap accesses to ICC_IDR0_EL1 to allow us to completely hide
1704 	 * FEAT_GCIE_LEGACY from the guest, and to (potentially) present fewer
1705 	 * ID bits than the host supports.
1706 	 */
1707 	*vcpu_fgt(vcpu, ICH_HFGRTR_EL2) &= ~(ICH_HFGRTR_EL2_ICC_IAFFIDR_EL1 |
1708 					     ICH_HFGRTR_EL2_ICC_IDRn_EL1);
1709 }
1710 
__compute_ich_hfgwtr(struct kvm_vcpu * vcpu)1711 static void __compute_ich_hfgwtr(struct kvm_vcpu *vcpu)
1712 {
1713 	__compute_fgt(vcpu, ICH_HFGWTR_EL2);
1714 
1715 	/*
1716 	 * We present a different subset of PPIs the guest from what
1717 	 * exist in real hardware. We only trap writes, not reads.
1718 	 */
1719 	*vcpu_fgt(vcpu, ICH_HFGWTR_EL2) &= ~(ICH_HFGWTR_EL2_ICC_PPI_ENABLERn_EL1);
1720 }
1721 
kvm_vcpu_load_fgt(struct kvm_vcpu * vcpu)1722 void kvm_vcpu_load_fgt(struct kvm_vcpu *vcpu)
1723 {
1724 	if (!cpus_have_final_cap(ARM64_HAS_FGT))
1725 		return;
1726 
1727 	__compute_fgt(vcpu, HFGRTR_EL2);
1728 	__compute_hfgwtr(vcpu);
1729 	__compute_fgt(vcpu, HFGITR_EL2);
1730 	__compute_fgt(vcpu, HDFGRTR_EL2);
1731 	__compute_hdfgwtr(vcpu);
1732 	__compute_fgt(vcpu, HAFGRTR_EL2);
1733 
1734 	if (cpus_have_final_cap(ARM64_HAS_FGT2)) {
1735 		__compute_fgt(vcpu, HFGRTR2_EL2);
1736 		__compute_fgt(vcpu, HFGWTR2_EL2);
1737 		__compute_fgt(vcpu, HFGITR2_EL2);
1738 		__compute_fgt(vcpu, HDFGRTR2_EL2);
1739 		__compute_fgt(vcpu, HDFGWTR2_EL2);
1740 	}
1741 
1742 	if (cpus_have_final_cap(ARM64_HAS_GICV5_CPUIF)) {
1743 		__compute_ich_hfgrtr(vcpu);
1744 		__compute_ich_hfgwtr(vcpu);
1745 		__compute_fgt(vcpu, ICH_HFGITR_EL2);
1746 	}
1747 }
1748