1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * Copyright (C) 2025 Google LLC
4 * Author: Marc Zyngier <maz@kernel.org>
5 */
6
7 #include <linux/kvm_host.h>
8 #include <asm/kvm_emulate.h>
9 #include <asm/kvm_nested.h>
10 #include <asm/sysreg.h>
11
12 /*
13 * Describes the dependencies between a set of bits (or the negation
14 * of a set of RES0 bits) and a feature. The flags indicate how the
15 * data is interpreted.
16 */
17 struct reg_bits_to_feat_map {
18 union {
19 u64 bits;
20 u64 *res0p;
21 };
22
23 #define NEVER_FGU BIT(0) /* Can trap, but never UNDEF */
24 #define CALL_FUNC BIT(1) /* Needs to evaluate tons of crap */
25 #define FIXED_VALUE BIT(2) /* RAZ/WI or RAO/WI in KVM */
26 #define RES0_POINTER BIT(3) /* Pointer to RES0 value instead of bits */
27
28 unsigned long flags;
29
30 union {
31 struct {
32 u8 regidx;
33 u8 shift;
34 u8 width;
35 bool sign;
36 s8 lo_lim;
37 };
38 bool (*match)(struct kvm *);
39 bool (*fval)(struct kvm *, u64 *);
40 };
41 };
42
43 /*
44 * Describes the dependencies for a given register:
45 *
46 * @feat_map describes the dependency for the whole register. If the
47 * features the register depends on are not present, the whole
48 * register is effectively RES0.
49 *
50 * @bit_feat_map describes the dependencies for a set of bits in that
51 * register. If the features these bits depend on are not present, the
52 * bits are effectively RES0.
53 */
54 struct reg_feat_map_desc {
55 const char *name;
56 const struct reg_bits_to_feat_map feat_map;
57 const struct reg_bits_to_feat_map *bit_feat_map;
58 const unsigned int bit_feat_map_sz;
59 };
60
61 #define __NEEDS_FEAT_3(m, f, w, id, fld, lim) \
62 { \
63 .w = (m), \
64 .flags = (f), \
65 .regidx = IDREG_IDX(SYS_ ## id), \
66 .shift = id ##_## fld ## _SHIFT, \
67 .width = id ##_## fld ## _WIDTH, \
68 .sign = id ##_## fld ## _SIGNED, \
69 .lo_lim = id ##_## fld ##_## lim \
70 }
71
72 #define __NEEDS_FEAT_2(m, f, w, fun, dummy) \
73 { \
74 .w = (m), \
75 .flags = (f) | CALL_FUNC, \
76 .fval = (fun), \
77 }
78
79 #define __NEEDS_FEAT_1(m, f, w, fun) \
80 { \
81 .w = (m), \
82 .flags = (f) | CALL_FUNC, \
83 .match = (fun), \
84 }
85
86 #define __NEEDS_FEAT_FLAG(m, f, w, ...) \
87 CONCATENATE(__NEEDS_FEAT_, COUNT_ARGS(__VA_ARGS__))(m, f, w, __VA_ARGS__)
88
89 #define NEEDS_FEAT_FLAG(m, f, ...) \
90 __NEEDS_FEAT_FLAG(m, f, bits, __VA_ARGS__)
91
92 #define NEEDS_FEAT_FIXED(m, ...) \
93 __NEEDS_FEAT_FLAG(m, FIXED_VALUE, bits, __VA_ARGS__, 0)
94
95 #define NEEDS_FEAT_RES0(p, ...) \
96 __NEEDS_FEAT_FLAG(p, RES0_POINTER, res0p, __VA_ARGS__)
97
98 /*
99 * Declare the dependency between a set of bits and a set of features,
100 * generating a struct reg_bit_to_feat_map.
101 */
102 #define NEEDS_FEAT(m, ...) NEEDS_FEAT_FLAG(m, 0, __VA_ARGS__)
103
104 /*
105 * Declare the dependency between a non-FGT register, a set of
106 * feature, and the set of individual bits it contains. This generates
107 * a struct reg_feat_map_desc.
108 */
109 #define DECLARE_FEAT_MAP(n, r, m, f) \
110 struct reg_feat_map_desc n = { \
111 .name = #r, \
112 .feat_map = NEEDS_FEAT(~r##_RES0, f), \
113 .bit_feat_map = m, \
114 .bit_feat_map_sz = ARRAY_SIZE(m), \
115 }
116
117 /*
118 * Specialised version of the above for FGT registers that have their
119 * RES0 masks described as struct fgt_masks.
120 */
121 #define DECLARE_FEAT_MAP_FGT(n, msk, m, f) \
122 struct reg_feat_map_desc n = { \
123 .name = #msk, \
124 .feat_map = NEEDS_FEAT_RES0(&msk.res0, f),\
125 .bit_feat_map = m, \
126 .bit_feat_map_sz = ARRAY_SIZE(m), \
127 }
128
129 #define FEAT_SPE ID_AA64DFR0_EL1, PMSVer, IMP
130 #define FEAT_SPE_FnE ID_AA64DFR0_EL1, PMSVer, V1P2
131 #define FEAT_BRBE ID_AA64DFR0_EL1, BRBE, IMP
132 #define FEAT_TRC_SR ID_AA64DFR0_EL1, TraceVer, IMP
133 #define FEAT_PMUv3 ID_AA64DFR0_EL1, PMUVer, IMP
134 #define FEAT_TRBE ID_AA64DFR0_EL1, TraceBuffer, IMP
135 #define FEAT_TRBEv1p1 ID_AA64DFR0_EL1, TraceBuffer, TRBE_V1P1
136 #define FEAT_DoubleLock ID_AA64DFR0_EL1, DoubleLock, IMP
137 #define FEAT_TRF ID_AA64DFR0_EL1, TraceFilt, IMP
138 #define FEAT_AA32EL0 ID_AA64PFR0_EL1, EL0, AARCH32
139 #define FEAT_AA32EL1 ID_AA64PFR0_EL1, EL1, AARCH32
140 #define FEAT_AA64EL1 ID_AA64PFR0_EL1, EL1, IMP
141 #define FEAT_AA64EL2 ID_AA64PFR0_EL1, EL2, IMP
142 #define FEAT_AA64EL3 ID_AA64PFR0_EL1, EL3, IMP
143 #define FEAT_AIE ID_AA64MMFR3_EL1, AIE, IMP
144 #define FEAT_S2POE ID_AA64MMFR3_EL1, S2POE, IMP
145 #define FEAT_S1POE ID_AA64MMFR3_EL1, S1POE, IMP
146 #define FEAT_S1PIE ID_AA64MMFR3_EL1, S1PIE, IMP
147 #define FEAT_THE ID_AA64PFR1_EL1, THE, IMP
148 #define FEAT_SME ID_AA64PFR1_EL1, SME, IMP
149 #define FEAT_GCS ID_AA64PFR1_EL1, GCS, IMP
150 #define FEAT_LS64 ID_AA64ISAR1_EL1, LS64, LS64
151 #define FEAT_LS64_V ID_AA64ISAR1_EL1, LS64, LS64_V
152 #define FEAT_LS64_ACCDATA ID_AA64ISAR1_EL1, LS64, LS64_ACCDATA
153 #define FEAT_RAS ID_AA64PFR0_EL1, RAS, IMP
154 #define FEAT_RASv2 ID_AA64PFR0_EL1, RAS, V2
155 #define FEAT_GICv3 ID_AA64PFR0_EL1, GIC, IMP
156 #define FEAT_LOR ID_AA64MMFR1_EL1, LO, IMP
157 #define FEAT_SPEv1p2 ID_AA64DFR0_EL1, PMSVer, V1P2
158 #define FEAT_SPEv1p4 ID_AA64DFR0_EL1, PMSVer, V1P4
159 #define FEAT_SPEv1p5 ID_AA64DFR0_EL1, PMSVer, V1P5
160 #define FEAT_ATS1A ID_AA64ISAR2_EL1, ATS1A, IMP
161 #define FEAT_SPECRES2 ID_AA64ISAR1_EL1, SPECRES, COSP_RCTX
162 #define FEAT_SPECRES ID_AA64ISAR1_EL1, SPECRES, IMP
163 #define FEAT_TLBIRANGE ID_AA64ISAR0_EL1, TLB, RANGE
164 #define FEAT_TLBIOS ID_AA64ISAR0_EL1, TLB, OS
165 #define FEAT_PAN2 ID_AA64MMFR1_EL1, PAN, PAN2
166 #define FEAT_DPB2 ID_AA64ISAR1_EL1, DPB, DPB2
167 #define FEAT_AMUv1 ID_AA64PFR0_EL1, AMU, IMP
168 #define FEAT_AMUv1p1 ID_AA64PFR0_EL1, AMU, V1P1
169 #define FEAT_CMOW ID_AA64MMFR1_EL1, CMOW, IMP
170 #define FEAT_D128 ID_AA64MMFR3_EL1, D128, IMP
171 #define FEAT_DoubleFault2 ID_AA64PFR1_EL1, DF2, IMP
172 #define FEAT_FPMR ID_AA64PFR2_EL1, FPMR, IMP
173 #define FEAT_MOPS ID_AA64ISAR2_EL1, MOPS, IMP
174 #define FEAT_NMI ID_AA64PFR1_EL1, NMI, IMP
175 #define FEAT_SCTLR2 ID_AA64MMFR3_EL1, SCTLRX, IMP
176 #define FEAT_SYSREG128 ID_AA64ISAR2_EL1, SYSREG_128, IMP
177 #define FEAT_TCR2 ID_AA64MMFR3_EL1, TCRX, IMP
178 #define FEAT_XS ID_AA64ISAR1_EL1, XS, IMP
179 #define FEAT_EVT ID_AA64MMFR2_EL1, EVT, IMP
180 #define FEAT_EVT_TTLBxS ID_AA64MMFR2_EL1, EVT, TTLBxS
181 #define FEAT_MTE2 ID_AA64PFR1_EL1, MTE, MTE2
182 #define FEAT_RME ID_AA64PFR0_EL1, RME, IMP
183 #define FEAT_MPAM ID_AA64PFR0_EL1, MPAM, 1
184 #define FEAT_S2FWB ID_AA64MMFR2_EL1, FWB, IMP
185 #define FEAT_TME ID_AA64ISAR0_EL1, TME, IMP
186 #define FEAT_TWED ID_AA64MMFR1_EL1, TWED, IMP
187 #define FEAT_E2H0 ID_AA64MMFR4_EL1, E2H0, IMP
188 #define FEAT_SRMASK ID_AA64MMFR4_EL1, SRMASK, IMP
189 #define FEAT_PoPS ID_AA64MMFR4_EL1, PoPS, IMP
190 #define FEAT_PFAR ID_AA64PFR1_EL1, PFAR, IMP
191 #define FEAT_Debugv8p9 ID_AA64DFR0_EL1, PMUVer, V3P9
192 #define FEAT_PMUv3_SS ID_AA64DFR0_EL1, PMSS, IMP
193 #define FEAT_SEBEP ID_AA64DFR0_EL1, SEBEP, IMP
194 #define FEAT_EBEP ID_AA64DFR1_EL1, EBEP, IMP
195 #define FEAT_ITE ID_AA64DFR1_EL1, ITE, IMP
196 #define FEAT_PMUv3_ICNTR ID_AA64DFR1_EL1, PMICNTR, IMP
197 #define FEAT_SPMU ID_AA64DFR1_EL1, SPMU, IMP
198 #define FEAT_SPE_nVM ID_AA64DFR2_EL1, SPE_nVM, IMP
199 #define FEAT_STEP2 ID_AA64DFR2_EL1, STEP, IMP
200 #define FEAT_CPA2 ID_AA64ISAR3_EL1, CPA, CPA2
201 #define FEAT_ASID2 ID_AA64MMFR4_EL1, ASID2, IMP
202 #define FEAT_MEC ID_AA64MMFR3_EL1, MEC, IMP
203 #define FEAT_HAFT ID_AA64MMFR1_EL1, HAFDBS, HAFT
204 #define FEAT_BTI ID_AA64PFR1_EL1, BT, IMP
205 #define FEAT_ExS ID_AA64MMFR0_EL1, EXS, IMP
206 #define FEAT_IESB ID_AA64MMFR2_EL1, IESB, IMP
207 #define FEAT_LSE2 ID_AA64MMFR2_EL1, AT, IMP
208 #define FEAT_LSMAOC ID_AA64MMFR2_EL1, LSM, IMP
209 #define FEAT_MixedEnd ID_AA64MMFR0_EL1, BIGEND, IMP
210 #define FEAT_MixedEndEL0 ID_AA64MMFR0_EL1, BIGENDEL0, IMP
211 #define FEAT_MTE_ASYNC ID_AA64PFR1_EL1, MTE_frac, ASYNC
212 #define FEAT_MTE_STORE_ONLY ID_AA64PFR2_EL1, MTESTOREONLY, IMP
213 #define FEAT_PAN ID_AA64MMFR1_EL1, PAN, IMP
214 #define FEAT_PAN3 ID_AA64MMFR1_EL1, PAN, PAN3
215 #define FEAT_SSBS ID_AA64PFR1_EL1, SSBS, IMP
216 #define FEAT_TIDCP1 ID_AA64MMFR1_EL1, TIDCP1, IMP
217 #define FEAT_FGT ID_AA64MMFR0_EL1, FGT, IMP
218 #define FEAT_FGT2 ID_AA64MMFR0_EL1, FGT, FGT2
219 #define FEAT_MTPMU ID_AA64DFR0_EL1, MTPMU, IMP
220 #define FEAT_HCX ID_AA64MMFR1_EL1, HCX, IMP
221
not_feat_aa64el3(struct kvm * kvm)222 static bool not_feat_aa64el3(struct kvm *kvm)
223 {
224 return !kvm_has_feat(kvm, FEAT_AA64EL3);
225 }
226
feat_nv2(struct kvm * kvm)227 static bool feat_nv2(struct kvm *kvm)
228 {
229 return ((kvm_has_feat(kvm, ID_AA64MMFR4_EL1, NV_frac, NV2_ONLY) &&
230 kvm_has_feat_enum(kvm, ID_AA64MMFR2_EL1, NV, NI)) ||
231 kvm_has_feat(kvm, ID_AA64MMFR2_EL1, NV, NV2));
232 }
233
feat_nv2_e2h0_ni(struct kvm * kvm)234 static bool feat_nv2_e2h0_ni(struct kvm *kvm)
235 {
236 return feat_nv2(kvm) && !kvm_has_feat(kvm, FEAT_E2H0);
237 }
238
feat_rasv1p1(struct kvm * kvm)239 static bool feat_rasv1p1(struct kvm *kvm)
240 {
241 return (kvm_has_feat(kvm, ID_AA64PFR0_EL1, RAS, V1P1) ||
242 (kvm_has_feat_enum(kvm, ID_AA64PFR0_EL1, RAS, IMP) &&
243 kvm_has_feat(kvm, ID_AA64PFR1_EL1, RAS_frac, RASv1p1)));
244 }
245
feat_csv2_2_csv2_1p2(struct kvm * kvm)246 static bool feat_csv2_2_csv2_1p2(struct kvm *kvm)
247 {
248 return (kvm_has_feat(kvm, ID_AA64PFR0_EL1, CSV2, CSV2_2) ||
249 (kvm_has_feat(kvm, ID_AA64PFR1_EL1, CSV2_frac, CSV2_1p2) &&
250 kvm_has_feat_enum(kvm, ID_AA64PFR0_EL1, CSV2, IMP)));
251 }
252
feat_pauth(struct kvm * kvm)253 static bool feat_pauth(struct kvm *kvm)
254 {
255 return kvm_has_pauth(kvm, PAuth);
256 }
257
feat_pauth_lr(struct kvm * kvm)258 static bool feat_pauth_lr(struct kvm *kvm)
259 {
260 return kvm_has_pauth(kvm, PAuth_LR);
261 }
262
feat_aderr(struct kvm * kvm)263 static bool feat_aderr(struct kvm *kvm)
264 {
265 return (kvm_has_feat(kvm, ID_AA64MMFR3_EL1, ADERR, FEAT_ADERR) &&
266 kvm_has_feat(kvm, ID_AA64MMFR3_EL1, SDERR, FEAT_ADERR));
267 }
268
feat_anerr(struct kvm * kvm)269 static bool feat_anerr(struct kvm *kvm)
270 {
271 return (kvm_has_feat(kvm, ID_AA64MMFR3_EL1, ANERR, FEAT_ANERR) &&
272 kvm_has_feat(kvm, ID_AA64MMFR3_EL1, SNERR, FEAT_ANERR));
273 }
274
feat_sme_smps(struct kvm * kvm)275 static bool feat_sme_smps(struct kvm *kvm)
276 {
277 /*
278 * Revists this if KVM ever supports SME -- this really should
279 * look at the guest's view of SMIDR_EL1. Funnily enough, this
280 * is not captured in the JSON file, but only as a note in the
281 * ARM ARM.
282 */
283 return (kvm_has_feat(kvm, FEAT_SME) &&
284 (read_sysreg_s(SYS_SMIDR_EL1) & SMIDR_EL1_SMPS));
285 }
286
feat_spe_fds(struct kvm * kvm)287 static bool feat_spe_fds(struct kvm *kvm)
288 {
289 /*
290 * Revists this if KVM ever supports SPE -- this really should
291 * look at the guest's view of PMSIDR_EL1.
292 */
293 return (kvm_has_feat(kvm, FEAT_SPEv1p4) &&
294 (read_sysreg_s(SYS_PMSIDR_EL1) & PMSIDR_EL1_FDS));
295 }
296
feat_trbe_mpam(struct kvm * kvm)297 static bool feat_trbe_mpam(struct kvm *kvm)
298 {
299 /*
300 * Revists this if KVM ever supports both MPAM and TRBE --
301 * this really should look at the guest's view of TRBIDR_EL1.
302 */
303 return (kvm_has_feat(kvm, FEAT_TRBE) &&
304 kvm_has_feat(kvm, FEAT_MPAM) &&
305 (read_sysreg_s(SYS_TRBIDR_EL1) & TRBIDR_EL1_MPAM));
306 }
307
feat_asid2_e2h1(struct kvm * kvm)308 static bool feat_asid2_e2h1(struct kvm *kvm)
309 {
310 return kvm_has_feat(kvm, FEAT_ASID2) && !kvm_has_feat(kvm, FEAT_E2H0);
311 }
312
feat_d128_e2h1(struct kvm * kvm)313 static bool feat_d128_e2h1(struct kvm *kvm)
314 {
315 return kvm_has_feat(kvm, FEAT_D128) && !kvm_has_feat(kvm, FEAT_E2H0);
316 }
317
feat_mec_e2h1(struct kvm * kvm)318 static bool feat_mec_e2h1(struct kvm *kvm)
319 {
320 return kvm_has_feat(kvm, FEAT_MEC) && !kvm_has_feat(kvm, FEAT_E2H0);
321 }
322
feat_ebep_pmuv3_ss(struct kvm * kvm)323 static bool feat_ebep_pmuv3_ss(struct kvm *kvm)
324 {
325 return kvm_has_feat(kvm, FEAT_EBEP) || kvm_has_feat(kvm, FEAT_PMUv3_SS);
326 }
327
feat_mixedendel0(struct kvm * kvm)328 static bool feat_mixedendel0(struct kvm *kvm)
329 {
330 return kvm_has_feat(kvm, FEAT_MixedEnd) || kvm_has_feat(kvm, FEAT_MixedEndEL0);
331 }
332
feat_mte_async(struct kvm * kvm)333 static bool feat_mte_async(struct kvm *kvm)
334 {
335 return kvm_has_feat(kvm, FEAT_MTE2) && kvm_has_feat_enum(kvm, FEAT_MTE_ASYNC);
336 }
337
338 #define check_pmu_revision(k, r) \
339 ({ \
340 (kvm_has_feat((k), ID_AA64DFR0_EL1, PMUVer, r) && \
341 !kvm_has_feat((k), ID_AA64DFR0_EL1, PMUVer, IMP_DEF)); \
342 })
343
feat_pmuv3p1(struct kvm * kvm)344 static bool feat_pmuv3p1(struct kvm *kvm)
345 {
346 return check_pmu_revision(kvm, V3P1);
347 }
348
feat_pmuv3p5(struct kvm * kvm)349 static bool feat_pmuv3p5(struct kvm *kvm)
350 {
351 return check_pmu_revision(kvm, V3P5);
352 }
353
feat_pmuv3p7(struct kvm * kvm)354 static bool feat_pmuv3p7(struct kvm *kvm)
355 {
356 return check_pmu_revision(kvm, V3P7);
357 }
358
feat_pmuv3p9(struct kvm * kvm)359 static bool feat_pmuv3p9(struct kvm *kvm)
360 {
361 return check_pmu_revision(kvm, V3P9);
362 }
363
compute_hcr_rw(struct kvm * kvm,u64 * bits)364 static bool compute_hcr_rw(struct kvm *kvm, u64 *bits)
365 {
366 /* This is purely academic: AArch32 and NV are mutually exclusive */
367 if (bits) {
368 if (kvm_has_feat(kvm, FEAT_AA32EL1))
369 *bits &= ~HCR_EL2_RW;
370 else
371 *bits |= HCR_EL2_RW;
372 }
373
374 return true;
375 }
376
compute_hcr_e2h(struct kvm * kvm,u64 * bits)377 static bool compute_hcr_e2h(struct kvm *kvm, u64 *bits)
378 {
379 if (bits) {
380 if (kvm_has_feat(kvm, FEAT_E2H0))
381 *bits &= ~HCR_EL2_E2H;
382 else
383 *bits |= HCR_EL2_E2H;
384 }
385
386 return true;
387 }
388
389 static const struct reg_bits_to_feat_map hfgrtr_feat_map[] = {
390 NEEDS_FEAT(HFGRTR_EL2_nAMAIR2_EL1 |
391 HFGRTR_EL2_nMAIR2_EL1,
392 FEAT_AIE),
393 NEEDS_FEAT(HFGRTR_EL2_nS2POR_EL1, FEAT_S2POE),
394 NEEDS_FEAT(HFGRTR_EL2_nPOR_EL1 |
395 HFGRTR_EL2_nPOR_EL0,
396 FEAT_S1POE),
397 NEEDS_FEAT(HFGRTR_EL2_nPIR_EL1 |
398 HFGRTR_EL2_nPIRE0_EL1,
399 FEAT_S1PIE),
400 NEEDS_FEAT(HFGRTR_EL2_nRCWMASK_EL1, FEAT_THE),
401 NEEDS_FEAT(HFGRTR_EL2_nTPIDR2_EL0 |
402 HFGRTR_EL2_nSMPRI_EL1,
403 FEAT_SME),
404 NEEDS_FEAT(HFGRTR_EL2_nGCS_EL1 |
405 HFGRTR_EL2_nGCS_EL0,
406 FEAT_GCS),
407 NEEDS_FEAT(HFGRTR_EL2_nACCDATA_EL1, FEAT_LS64_ACCDATA),
408 NEEDS_FEAT(HFGRTR_EL2_ERXADDR_EL1 |
409 HFGRTR_EL2_ERXMISCn_EL1 |
410 HFGRTR_EL2_ERXSTATUS_EL1 |
411 HFGRTR_EL2_ERXCTLR_EL1 |
412 HFGRTR_EL2_ERXFR_EL1 |
413 HFGRTR_EL2_ERRSELR_EL1 |
414 HFGRTR_EL2_ERRIDR_EL1,
415 FEAT_RAS),
416 NEEDS_FEAT(HFGRTR_EL2_ERXPFGCDN_EL1 |
417 HFGRTR_EL2_ERXPFGCTL_EL1 |
418 HFGRTR_EL2_ERXPFGF_EL1,
419 feat_rasv1p1),
420 NEEDS_FEAT(HFGRTR_EL2_ICC_IGRPENn_EL1, FEAT_GICv3),
421 NEEDS_FEAT(HFGRTR_EL2_SCXTNUM_EL0 |
422 HFGRTR_EL2_SCXTNUM_EL1,
423 feat_csv2_2_csv2_1p2),
424 NEEDS_FEAT(HFGRTR_EL2_LORSA_EL1 |
425 HFGRTR_EL2_LORN_EL1 |
426 HFGRTR_EL2_LORID_EL1 |
427 HFGRTR_EL2_LOREA_EL1 |
428 HFGRTR_EL2_LORC_EL1,
429 FEAT_LOR),
430 NEEDS_FEAT(HFGRTR_EL2_APIBKey |
431 HFGRTR_EL2_APIAKey |
432 HFGRTR_EL2_APGAKey |
433 HFGRTR_EL2_APDBKey |
434 HFGRTR_EL2_APDAKey,
435 feat_pauth),
436 NEEDS_FEAT_FLAG(HFGRTR_EL2_VBAR_EL1 |
437 HFGRTR_EL2_TTBR1_EL1 |
438 HFGRTR_EL2_TTBR0_EL1 |
439 HFGRTR_EL2_TPIDR_EL0 |
440 HFGRTR_EL2_TPIDRRO_EL0 |
441 HFGRTR_EL2_TPIDR_EL1 |
442 HFGRTR_EL2_TCR_EL1 |
443 HFGRTR_EL2_SCTLR_EL1 |
444 HFGRTR_EL2_REVIDR_EL1 |
445 HFGRTR_EL2_PAR_EL1 |
446 HFGRTR_EL2_MPIDR_EL1 |
447 HFGRTR_EL2_MIDR_EL1 |
448 HFGRTR_EL2_MAIR_EL1 |
449 HFGRTR_EL2_ISR_EL1 |
450 HFGRTR_EL2_FAR_EL1 |
451 HFGRTR_EL2_ESR_EL1 |
452 HFGRTR_EL2_DCZID_EL0 |
453 HFGRTR_EL2_CTR_EL0 |
454 HFGRTR_EL2_CSSELR_EL1 |
455 HFGRTR_EL2_CPACR_EL1 |
456 HFGRTR_EL2_CONTEXTIDR_EL1|
457 HFGRTR_EL2_CLIDR_EL1 |
458 HFGRTR_EL2_CCSIDR_EL1 |
459 HFGRTR_EL2_AMAIR_EL1 |
460 HFGRTR_EL2_AIDR_EL1 |
461 HFGRTR_EL2_AFSR1_EL1 |
462 HFGRTR_EL2_AFSR0_EL1,
463 NEVER_FGU, FEAT_AA64EL1),
464 };
465
466
467 static const DECLARE_FEAT_MAP_FGT(hfgrtr_desc, hfgrtr_masks,
468 hfgrtr_feat_map, FEAT_FGT);
469
470 static const struct reg_bits_to_feat_map hfgwtr_feat_map[] = {
471 NEEDS_FEAT(HFGWTR_EL2_nAMAIR2_EL1 |
472 HFGWTR_EL2_nMAIR2_EL1,
473 FEAT_AIE),
474 NEEDS_FEAT(HFGWTR_EL2_nS2POR_EL1, FEAT_S2POE),
475 NEEDS_FEAT(HFGWTR_EL2_nPOR_EL1 |
476 HFGWTR_EL2_nPOR_EL0,
477 FEAT_S1POE),
478 NEEDS_FEAT(HFGWTR_EL2_nPIR_EL1 |
479 HFGWTR_EL2_nPIRE0_EL1,
480 FEAT_S1PIE),
481 NEEDS_FEAT(HFGWTR_EL2_nRCWMASK_EL1, FEAT_THE),
482 NEEDS_FEAT(HFGWTR_EL2_nTPIDR2_EL0 |
483 HFGWTR_EL2_nSMPRI_EL1,
484 FEAT_SME),
485 NEEDS_FEAT(HFGWTR_EL2_nGCS_EL1 |
486 HFGWTR_EL2_nGCS_EL0,
487 FEAT_GCS),
488 NEEDS_FEAT(HFGWTR_EL2_nACCDATA_EL1, FEAT_LS64_ACCDATA),
489 NEEDS_FEAT(HFGWTR_EL2_ERXADDR_EL1 |
490 HFGWTR_EL2_ERXMISCn_EL1 |
491 HFGWTR_EL2_ERXSTATUS_EL1 |
492 HFGWTR_EL2_ERXCTLR_EL1 |
493 HFGWTR_EL2_ERRSELR_EL1,
494 FEAT_RAS),
495 NEEDS_FEAT(HFGWTR_EL2_ERXPFGCDN_EL1 |
496 HFGWTR_EL2_ERXPFGCTL_EL1,
497 feat_rasv1p1),
498 NEEDS_FEAT(HFGWTR_EL2_ICC_IGRPENn_EL1, FEAT_GICv3),
499 NEEDS_FEAT(HFGWTR_EL2_SCXTNUM_EL0 |
500 HFGWTR_EL2_SCXTNUM_EL1,
501 feat_csv2_2_csv2_1p2),
502 NEEDS_FEAT(HFGWTR_EL2_LORSA_EL1 |
503 HFGWTR_EL2_LORN_EL1 |
504 HFGWTR_EL2_LOREA_EL1 |
505 HFGWTR_EL2_LORC_EL1,
506 FEAT_LOR),
507 NEEDS_FEAT(HFGWTR_EL2_APIBKey |
508 HFGWTR_EL2_APIAKey |
509 HFGWTR_EL2_APGAKey |
510 HFGWTR_EL2_APDBKey |
511 HFGWTR_EL2_APDAKey,
512 feat_pauth),
513 NEEDS_FEAT_FLAG(HFGWTR_EL2_VBAR_EL1 |
514 HFGWTR_EL2_TTBR1_EL1 |
515 HFGWTR_EL2_TTBR0_EL1 |
516 HFGWTR_EL2_TPIDR_EL0 |
517 HFGWTR_EL2_TPIDRRO_EL0 |
518 HFGWTR_EL2_TPIDR_EL1 |
519 HFGWTR_EL2_TCR_EL1 |
520 HFGWTR_EL2_SCTLR_EL1 |
521 HFGWTR_EL2_PAR_EL1 |
522 HFGWTR_EL2_MAIR_EL1 |
523 HFGWTR_EL2_FAR_EL1 |
524 HFGWTR_EL2_ESR_EL1 |
525 HFGWTR_EL2_CSSELR_EL1 |
526 HFGWTR_EL2_CPACR_EL1 |
527 HFGWTR_EL2_CONTEXTIDR_EL1|
528 HFGWTR_EL2_AMAIR_EL1 |
529 HFGWTR_EL2_AFSR1_EL1 |
530 HFGWTR_EL2_AFSR0_EL1,
531 NEVER_FGU, FEAT_AA64EL1),
532 };
533
534 static const DECLARE_FEAT_MAP_FGT(hfgwtr_desc, hfgwtr_masks,
535 hfgwtr_feat_map, FEAT_FGT);
536
537 static const struct reg_bits_to_feat_map hdfgrtr_feat_map[] = {
538 NEEDS_FEAT(HDFGRTR_EL2_PMBIDR_EL1 |
539 HDFGRTR_EL2_PMSLATFR_EL1 |
540 HDFGRTR_EL2_PMSIRR_EL1 |
541 HDFGRTR_EL2_PMSIDR_EL1 |
542 HDFGRTR_EL2_PMSICR_EL1 |
543 HDFGRTR_EL2_PMSFCR_EL1 |
544 HDFGRTR_EL2_PMSEVFR_EL1 |
545 HDFGRTR_EL2_PMSCR_EL1 |
546 HDFGRTR_EL2_PMBSR_EL1 |
547 HDFGRTR_EL2_PMBPTR_EL1 |
548 HDFGRTR_EL2_PMBLIMITR_EL1,
549 FEAT_SPE),
550 NEEDS_FEAT(HDFGRTR_EL2_nPMSNEVFR_EL1, FEAT_SPE_FnE),
551 NEEDS_FEAT(HDFGRTR_EL2_nBRBDATA |
552 HDFGRTR_EL2_nBRBCTL |
553 HDFGRTR_EL2_nBRBIDR,
554 FEAT_BRBE),
555 NEEDS_FEAT(HDFGRTR_EL2_TRCVICTLR |
556 HDFGRTR_EL2_TRCSTATR |
557 HDFGRTR_EL2_TRCSSCSRn |
558 HDFGRTR_EL2_TRCSEQSTR |
559 HDFGRTR_EL2_TRCPRGCTLR |
560 HDFGRTR_EL2_TRCOSLSR |
561 HDFGRTR_EL2_TRCIMSPECn |
562 HDFGRTR_EL2_TRCID |
563 HDFGRTR_EL2_TRCCNTVRn |
564 HDFGRTR_EL2_TRCCLAIM |
565 HDFGRTR_EL2_TRCAUXCTLR |
566 HDFGRTR_EL2_TRCAUTHSTATUS |
567 HDFGRTR_EL2_TRC,
568 FEAT_TRC_SR),
569 NEEDS_FEAT(HDFGRTR_EL2_PMCEIDn_EL0 |
570 HDFGRTR_EL2_PMUSERENR_EL0 |
571 HDFGRTR_EL2_PMMIR_EL1 |
572 HDFGRTR_EL2_PMSELR_EL0 |
573 HDFGRTR_EL2_PMOVS |
574 HDFGRTR_EL2_PMINTEN |
575 HDFGRTR_EL2_PMCNTEN |
576 HDFGRTR_EL2_PMCCNTR_EL0 |
577 HDFGRTR_EL2_PMCCFILTR_EL0 |
578 HDFGRTR_EL2_PMEVTYPERn_EL0 |
579 HDFGRTR_EL2_PMEVCNTRn_EL0,
580 FEAT_PMUv3),
581 NEEDS_FEAT(HDFGRTR_EL2_TRBTRG_EL1 |
582 HDFGRTR_EL2_TRBSR_EL1 |
583 HDFGRTR_EL2_TRBPTR_EL1 |
584 HDFGRTR_EL2_TRBMAR_EL1 |
585 HDFGRTR_EL2_TRBLIMITR_EL1 |
586 HDFGRTR_EL2_TRBIDR_EL1 |
587 HDFGRTR_EL2_TRBBASER_EL1,
588 FEAT_TRBE),
589 NEEDS_FEAT_FLAG(HDFGRTR_EL2_OSDLR_EL1, NEVER_FGU,
590 FEAT_DoubleLock),
591 NEEDS_FEAT_FLAG(HDFGRTR_EL2_OSECCR_EL1 |
592 HDFGRTR_EL2_OSLSR_EL1 |
593 HDFGRTR_EL2_DBGPRCR_EL1 |
594 HDFGRTR_EL2_DBGAUTHSTATUS_EL1|
595 HDFGRTR_EL2_DBGCLAIM |
596 HDFGRTR_EL2_MDSCR_EL1 |
597 HDFGRTR_EL2_DBGWVRn_EL1 |
598 HDFGRTR_EL2_DBGWCRn_EL1 |
599 HDFGRTR_EL2_DBGBVRn_EL1 |
600 HDFGRTR_EL2_DBGBCRn_EL1,
601 NEVER_FGU, FEAT_AA64EL1)
602 };
603
604 static const DECLARE_FEAT_MAP_FGT(hdfgrtr_desc, hdfgrtr_masks,
605 hdfgrtr_feat_map, FEAT_FGT);
606
607 static const struct reg_bits_to_feat_map hdfgwtr_feat_map[] = {
608 NEEDS_FEAT(HDFGWTR_EL2_PMSLATFR_EL1 |
609 HDFGWTR_EL2_PMSIRR_EL1 |
610 HDFGWTR_EL2_PMSICR_EL1 |
611 HDFGWTR_EL2_PMSFCR_EL1 |
612 HDFGWTR_EL2_PMSEVFR_EL1 |
613 HDFGWTR_EL2_PMSCR_EL1 |
614 HDFGWTR_EL2_PMBSR_EL1 |
615 HDFGWTR_EL2_PMBPTR_EL1 |
616 HDFGWTR_EL2_PMBLIMITR_EL1,
617 FEAT_SPE),
618 NEEDS_FEAT(HDFGWTR_EL2_nPMSNEVFR_EL1, FEAT_SPE_FnE),
619 NEEDS_FEAT(HDFGWTR_EL2_nBRBDATA |
620 HDFGWTR_EL2_nBRBCTL,
621 FEAT_BRBE),
622 NEEDS_FEAT(HDFGWTR_EL2_TRCVICTLR |
623 HDFGWTR_EL2_TRCSSCSRn |
624 HDFGWTR_EL2_TRCSEQSTR |
625 HDFGWTR_EL2_TRCPRGCTLR |
626 HDFGWTR_EL2_TRCOSLAR |
627 HDFGWTR_EL2_TRCIMSPECn |
628 HDFGWTR_EL2_TRCCNTVRn |
629 HDFGWTR_EL2_TRCCLAIM |
630 HDFGWTR_EL2_TRCAUXCTLR |
631 HDFGWTR_EL2_TRC,
632 FEAT_TRC_SR),
633 NEEDS_FEAT(HDFGWTR_EL2_PMUSERENR_EL0 |
634 HDFGWTR_EL2_PMCR_EL0 |
635 HDFGWTR_EL2_PMSWINC_EL0 |
636 HDFGWTR_EL2_PMSELR_EL0 |
637 HDFGWTR_EL2_PMOVS |
638 HDFGWTR_EL2_PMINTEN |
639 HDFGWTR_EL2_PMCNTEN |
640 HDFGWTR_EL2_PMCCNTR_EL0 |
641 HDFGWTR_EL2_PMCCFILTR_EL0 |
642 HDFGWTR_EL2_PMEVTYPERn_EL0 |
643 HDFGWTR_EL2_PMEVCNTRn_EL0,
644 FEAT_PMUv3),
645 NEEDS_FEAT(HDFGWTR_EL2_TRBTRG_EL1 |
646 HDFGWTR_EL2_TRBSR_EL1 |
647 HDFGWTR_EL2_TRBPTR_EL1 |
648 HDFGWTR_EL2_TRBMAR_EL1 |
649 HDFGWTR_EL2_TRBLIMITR_EL1 |
650 HDFGWTR_EL2_TRBBASER_EL1,
651 FEAT_TRBE),
652 NEEDS_FEAT_FLAG(HDFGWTR_EL2_OSDLR_EL1,
653 NEVER_FGU, FEAT_DoubleLock),
654 NEEDS_FEAT_FLAG(HDFGWTR_EL2_OSECCR_EL1 |
655 HDFGWTR_EL2_OSLAR_EL1 |
656 HDFGWTR_EL2_DBGPRCR_EL1 |
657 HDFGWTR_EL2_DBGCLAIM |
658 HDFGWTR_EL2_MDSCR_EL1 |
659 HDFGWTR_EL2_DBGWVRn_EL1 |
660 HDFGWTR_EL2_DBGWCRn_EL1 |
661 HDFGWTR_EL2_DBGBVRn_EL1 |
662 HDFGWTR_EL2_DBGBCRn_EL1,
663 NEVER_FGU, FEAT_AA64EL1),
664 NEEDS_FEAT(HDFGWTR_EL2_TRFCR_EL1, FEAT_TRF),
665 };
666
667 static const DECLARE_FEAT_MAP_FGT(hdfgwtr_desc, hdfgwtr_masks,
668 hdfgwtr_feat_map, FEAT_FGT);
669
670 static const struct reg_bits_to_feat_map hfgitr_feat_map[] = {
671 NEEDS_FEAT(HFGITR_EL2_PSBCSYNC, FEAT_SPEv1p5),
672 NEEDS_FEAT(HFGITR_EL2_ATS1E1A, FEAT_ATS1A),
673 NEEDS_FEAT(HFGITR_EL2_COSPRCTX, FEAT_SPECRES2),
674 NEEDS_FEAT(HFGITR_EL2_nGCSEPP |
675 HFGITR_EL2_nGCSSTR_EL1 |
676 HFGITR_EL2_nGCSPUSHM_EL1,
677 FEAT_GCS),
678 NEEDS_FEAT(HFGITR_EL2_nBRBIALL |
679 HFGITR_EL2_nBRBINJ,
680 FEAT_BRBE),
681 NEEDS_FEAT(HFGITR_EL2_CPPRCTX |
682 HFGITR_EL2_DVPRCTX |
683 HFGITR_EL2_CFPRCTX,
684 FEAT_SPECRES),
685 NEEDS_FEAT(HFGITR_EL2_TLBIRVAALE1 |
686 HFGITR_EL2_TLBIRVALE1 |
687 HFGITR_EL2_TLBIRVAAE1 |
688 HFGITR_EL2_TLBIRVAE1 |
689 HFGITR_EL2_TLBIRVAALE1IS |
690 HFGITR_EL2_TLBIRVALE1IS |
691 HFGITR_EL2_TLBIRVAAE1IS |
692 HFGITR_EL2_TLBIRVAE1IS |
693 HFGITR_EL2_TLBIRVAALE1OS |
694 HFGITR_EL2_TLBIRVALE1OS |
695 HFGITR_EL2_TLBIRVAAE1OS |
696 HFGITR_EL2_TLBIRVAE1OS,
697 FEAT_TLBIRANGE),
698 NEEDS_FEAT(HFGITR_EL2_TLBIVAALE1OS |
699 HFGITR_EL2_TLBIVALE1OS |
700 HFGITR_EL2_TLBIVAAE1OS |
701 HFGITR_EL2_TLBIASIDE1OS |
702 HFGITR_EL2_TLBIVAE1OS |
703 HFGITR_EL2_TLBIVMALLE1OS,
704 FEAT_TLBIOS),
705 NEEDS_FEAT(HFGITR_EL2_ATS1E1WP |
706 HFGITR_EL2_ATS1E1RP,
707 FEAT_PAN2),
708 NEEDS_FEAT(HFGITR_EL2_DCCVADP, FEAT_DPB2),
709 NEEDS_FEAT_FLAG(HFGITR_EL2_DCCVAC |
710 HFGITR_EL2_SVC_EL1 |
711 HFGITR_EL2_SVC_EL0 |
712 HFGITR_EL2_ERET |
713 HFGITR_EL2_TLBIVAALE1 |
714 HFGITR_EL2_TLBIVALE1 |
715 HFGITR_EL2_TLBIVAAE1 |
716 HFGITR_EL2_TLBIASIDE1 |
717 HFGITR_EL2_TLBIVAE1 |
718 HFGITR_EL2_TLBIVMALLE1 |
719 HFGITR_EL2_TLBIVAALE1IS |
720 HFGITR_EL2_TLBIVALE1IS |
721 HFGITR_EL2_TLBIVAAE1IS |
722 HFGITR_EL2_TLBIASIDE1IS |
723 HFGITR_EL2_TLBIVAE1IS |
724 HFGITR_EL2_TLBIVMALLE1IS|
725 HFGITR_EL2_ATS1E0W |
726 HFGITR_EL2_ATS1E0R |
727 HFGITR_EL2_ATS1E1W |
728 HFGITR_EL2_ATS1E1R |
729 HFGITR_EL2_DCZVA |
730 HFGITR_EL2_DCCIVAC |
731 HFGITR_EL2_DCCVAP |
732 HFGITR_EL2_DCCVAU |
733 HFGITR_EL2_DCCISW |
734 HFGITR_EL2_DCCSW |
735 HFGITR_EL2_DCISW |
736 HFGITR_EL2_DCIVAC |
737 HFGITR_EL2_ICIVAU |
738 HFGITR_EL2_ICIALLU |
739 HFGITR_EL2_ICIALLUIS,
740 NEVER_FGU, FEAT_AA64EL1),
741 };
742
743 static const DECLARE_FEAT_MAP_FGT(hfgitr_desc, hfgitr_masks,
744 hfgitr_feat_map, FEAT_FGT);
745
746 static const struct reg_bits_to_feat_map hafgrtr_feat_map[] = {
747 NEEDS_FEAT(HAFGRTR_EL2_AMEVTYPER115_EL0 |
748 HAFGRTR_EL2_AMEVTYPER114_EL0 |
749 HAFGRTR_EL2_AMEVTYPER113_EL0 |
750 HAFGRTR_EL2_AMEVTYPER112_EL0 |
751 HAFGRTR_EL2_AMEVTYPER111_EL0 |
752 HAFGRTR_EL2_AMEVTYPER110_EL0 |
753 HAFGRTR_EL2_AMEVTYPER19_EL0 |
754 HAFGRTR_EL2_AMEVTYPER18_EL0 |
755 HAFGRTR_EL2_AMEVTYPER17_EL0 |
756 HAFGRTR_EL2_AMEVTYPER16_EL0 |
757 HAFGRTR_EL2_AMEVTYPER15_EL0 |
758 HAFGRTR_EL2_AMEVTYPER14_EL0 |
759 HAFGRTR_EL2_AMEVTYPER13_EL0 |
760 HAFGRTR_EL2_AMEVTYPER12_EL0 |
761 HAFGRTR_EL2_AMEVTYPER11_EL0 |
762 HAFGRTR_EL2_AMEVTYPER10_EL0 |
763 HAFGRTR_EL2_AMEVCNTR115_EL0 |
764 HAFGRTR_EL2_AMEVCNTR114_EL0 |
765 HAFGRTR_EL2_AMEVCNTR113_EL0 |
766 HAFGRTR_EL2_AMEVCNTR112_EL0 |
767 HAFGRTR_EL2_AMEVCNTR111_EL0 |
768 HAFGRTR_EL2_AMEVCNTR110_EL0 |
769 HAFGRTR_EL2_AMEVCNTR19_EL0 |
770 HAFGRTR_EL2_AMEVCNTR18_EL0 |
771 HAFGRTR_EL2_AMEVCNTR17_EL0 |
772 HAFGRTR_EL2_AMEVCNTR16_EL0 |
773 HAFGRTR_EL2_AMEVCNTR15_EL0 |
774 HAFGRTR_EL2_AMEVCNTR14_EL0 |
775 HAFGRTR_EL2_AMEVCNTR13_EL0 |
776 HAFGRTR_EL2_AMEVCNTR12_EL0 |
777 HAFGRTR_EL2_AMEVCNTR11_EL0 |
778 HAFGRTR_EL2_AMEVCNTR10_EL0 |
779 HAFGRTR_EL2_AMCNTEN1 |
780 HAFGRTR_EL2_AMCNTEN0 |
781 HAFGRTR_EL2_AMEVCNTR03_EL0 |
782 HAFGRTR_EL2_AMEVCNTR02_EL0 |
783 HAFGRTR_EL2_AMEVCNTR01_EL0 |
784 HAFGRTR_EL2_AMEVCNTR00_EL0,
785 FEAT_AMUv1),
786 };
787
788 static const DECLARE_FEAT_MAP_FGT(hafgrtr_desc, hafgrtr_masks,
789 hafgrtr_feat_map, FEAT_FGT);
790
791 static const struct reg_bits_to_feat_map hfgitr2_feat_map[] = {
792 NEEDS_FEAT(HFGITR2_EL2_nDCCIVAPS, FEAT_PoPS),
793 NEEDS_FEAT(HFGITR2_EL2_TSBCSYNC, FEAT_TRBEv1p1)
794 };
795
796 static const DECLARE_FEAT_MAP_FGT(hfgitr2_desc, hfgitr2_masks,
797 hfgitr2_feat_map, FEAT_FGT2);
798
799 static const struct reg_bits_to_feat_map hfgrtr2_feat_map[] = {
800 NEEDS_FEAT(HFGRTR2_EL2_nPFAR_EL1, FEAT_PFAR),
801 NEEDS_FEAT(HFGRTR2_EL2_nERXGSR_EL1, FEAT_RASv2),
802 NEEDS_FEAT(HFGRTR2_EL2_nACTLRALIAS_EL1 |
803 HFGRTR2_EL2_nACTLRMASK_EL1 |
804 HFGRTR2_EL2_nCPACRALIAS_EL1 |
805 HFGRTR2_EL2_nCPACRMASK_EL1 |
806 HFGRTR2_EL2_nSCTLR2MASK_EL1 |
807 HFGRTR2_EL2_nSCTLRALIAS2_EL1 |
808 HFGRTR2_EL2_nSCTLRALIAS_EL1 |
809 HFGRTR2_EL2_nSCTLRMASK_EL1 |
810 HFGRTR2_EL2_nTCR2ALIAS_EL1 |
811 HFGRTR2_EL2_nTCR2MASK_EL1 |
812 HFGRTR2_EL2_nTCRALIAS_EL1 |
813 HFGRTR2_EL2_nTCRMASK_EL1,
814 FEAT_SRMASK),
815 NEEDS_FEAT(HFGRTR2_EL2_nRCWSMASK_EL1, FEAT_THE),
816 };
817
818 static const DECLARE_FEAT_MAP_FGT(hfgrtr2_desc, hfgrtr2_masks,
819 hfgrtr2_feat_map, FEAT_FGT2);
820
821 static const struct reg_bits_to_feat_map hfgwtr2_feat_map[] = {
822 NEEDS_FEAT(HFGWTR2_EL2_nPFAR_EL1, FEAT_PFAR),
823 NEEDS_FEAT(HFGWTR2_EL2_nACTLRALIAS_EL1 |
824 HFGWTR2_EL2_nACTLRMASK_EL1 |
825 HFGWTR2_EL2_nCPACRALIAS_EL1 |
826 HFGWTR2_EL2_nCPACRMASK_EL1 |
827 HFGWTR2_EL2_nSCTLR2MASK_EL1 |
828 HFGWTR2_EL2_nSCTLRALIAS2_EL1 |
829 HFGWTR2_EL2_nSCTLRALIAS_EL1 |
830 HFGWTR2_EL2_nSCTLRMASK_EL1 |
831 HFGWTR2_EL2_nTCR2ALIAS_EL1 |
832 HFGWTR2_EL2_nTCR2MASK_EL1 |
833 HFGWTR2_EL2_nTCRALIAS_EL1 |
834 HFGWTR2_EL2_nTCRMASK_EL1,
835 FEAT_SRMASK),
836 NEEDS_FEAT(HFGWTR2_EL2_nRCWSMASK_EL1, FEAT_THE),
837 };
838
839 static const DECLARE_FEAT_MAP_FGT(hfgwtr2_desc, hfgwtr2_masks,
840 hfgwtr2_feat_map, FEAT_FGT2);
841
842 static const struct reg_bits_to_feat_map hdfgrtr2_feat_map[] = {
843 NEEDS_FEAT(HDFGRTR2_EL2_nMDSELR_EL1, FEAT_Debugv8p9),
844 NEEDS_FEAT(HDFGRTR2_EL2_nPMECR_EL1, feat_ebep_pmuv3_ss),
845 NEEDS_FEAT(HDFGRTR2_EL2_nTRCITECR_EL1, FEAT_ITE),
846 NEEDS_FEAT(HDFGRTR2_EL2_nPMICFILTR_EL0 |
847 HDFGRTR2_EL2_nPMICNTR_EL0,
848 FEAT_PMUv3_ICNTR),
849 NEEDS_FEAT(HDFGRTR2_EL2_nPMUACR_EL1, feat_pmuv3p9),
850 NEEDS_FEAT(HDFGRTR2_EL2_nPMSSCR_EL1 |
851 HDFGRTR2_EL2_nPMSSDATA,
852 FEAT_PMUv3_SS),
853 NEEDS_FEAT(HDFGRTR2_EL2_nPMIAR_EL1, FEAT_SEBEP),
854 NEEDS_FEAT(HDFGRTR2_EL2_nPMSDSFR_EL1, feat_spe_fds),
855 NEEDS_FEAT(HDFGRTR2_EL2_nPMBMAR_EL1, FEAT_SPE_nVM),
856 NEEDS_FEAT(HDFGRTR2_EL2_nSPMACCESSR_EL1 |
857 HDFGRTR2_EL2_nSPMCNTEN |
858 HDFGRTR2_EL2_nSPMCR_EL0 |
859 HDFGRTR2_EL2_nSPMDEVAFF_EL1 |
860 HDFGRTR2_EL2_nSPMEVCNTRn_EL0 |
861 HDFGRTR2_EL2_nSPMEVTYPERn_EL0|
862 HDFGRTR2_EL2_nSPMID |
863 HDFGRTR2_EL2_nSPMINTEN |
864 HDFGRTR2_EL2_nSPMOVS |
865 HDFGRTR2_EL2_nSPMSCR_EL1 |
866 HDFGRTR2_EL2_nSPMSELR_EL0,
867 FEAT_SPMU),
868 NEEDS_FEAT(HDFGRTR2_EL2_nMDSTEPOP_EL1, FEAT_STEP2),
869 NEEDS_FEAT(HDFGRTR2_EL2_nTRBMPAM_EL1, feat_trbe_mpam),
870 };
871
872 static const DECLARE_FEAT_MAP_FGT(hdfgrtr2_desc, hdfgrtr2_masks,
873 hdfgrtr2_feat_map, FEAT_FGT2);
874
875 static const struct reg_bits_to_feat_map hdfgwtr2_feat_map[] = {
876 NEEDS_FEAT(HDFGWTR2_EL2_nMDSELR_EL1, FEAT_Debugv8p9),
877 NEEDS_FEAT(HDFGWTR2_EL2_nPMECR_EL1, feat_ebep_pmuv3_ss),
878 NEEDS_FEAT(HDFGWTR2_EL2_nTRCITECR_EL1, FEAT_ITE),
879 NEEDS_FEAT(HDFGWTR2_EL2_nPMICFILTR_EL0 |
880 HDFGWTR2_EL2_nPMICNTR_EL0,
881 FEAT_PMUv3_ICNTR),
882 NEEDS_FEAT(HDFGWTR2_EL2_nPMUACR_EL1 |
883 HDFGWTR2_EL2_nPMZR_EL0,
884 feat_pmuv3p9),
885 NEEDS_FEAT(HDFGWTR2_EL2_nPMSSCR_EL1, FEAT_PMUv3_SS),
886 NEEDS_FEAT(HDFGWTR2_EL2_nPMIAR_EL1, FEAT_SEBEP),
887 NEEDS_FEAT(HDFGWTR2_EL2_nPMSDSFR_EL1, feat_spe_fds),
888 NEEDS_FEAT(HDFGWTR2_EL2_nPMBMAR_EL1, FEAT_SPE_nVM),
889 NEEDS_FEAT(HDFGWTR2_EL2_nSPMACCESSR_EL1 |
890 HDFGWTR2_EL2_nSPMCNTEN |
891 HDFGWTR2_EL2_nSPMCR_EL0 |
892 HDFGWTR2_EL2_nSPMEVCNTRn_EL0 |
893 HDFGWTR2_EL2_nSPMEVTYPERn_EL0|
894 HDFGWTR2_EL2_nSPMINTEN |
895 HDFGWTR2_EL2_nSPMOVS |
896 HDFGWTR2_EL2_nSPMSCR_EL1 |
897 HDFGWTR2_EL2_nSPMSELR_EL0,
898 FEAT_SPMU),
899 NEEDS_FEAT(HDFGWTR2_EL2_nMDSTEPOP_EL1, FEAT_STEP2),
900 NEEDS_FEAT(HDFGWTR2_EL2_nTRBMPAM_EL1, feat_trbe_mpam),
901 };
902
903 static const DECLARE_FEAT_MAP_FGT(hdfgwtr2_desc, hdfgwtr2_masks,
904 hdfgwtr2_feat_map, FEAT_FGT2);
905
906
907 static const struct reg_bits_to_feat_map hcrx_feat_map[] = {
908 NEEDS_FEAT(HCRX_EL2_PACMEn, feat_pauth_lr),
909 NEEDS_FEAT(HCRX_EL2_EnFPM, FEAT_FPMR),
910 NEEDS_FEAT(HCRX_EL2_GCSEn, FEAT_GCS),
911 NEEDS_FEAT(HCRX_EL2_EnIDCP128, FEAT_SYSREG128),
912 NEEDS_FEAT(HCRX_EL2_EnSDERR, feat_aderr),
913 NEEDS_FEAT(HCRX_EL2_TMEA, FEAT_DoubleFault2),
914 NEEDS_FEAT(HCRX_EL2_EnSNERR, feat_anerr),
915 NEEDS_FEAT(HCRX_EL2_D128En, FEAT_D128),
916 NEEDS_FEAT(HCRX_EL2_PTTWI, FEAT_THE),
917 NEEDS_FEAT(HCRX_EL2_SCTLR2En, FEAT_SCTLR2),
918 NEEDS_FEAT(HCRX_EL2_TCR2En, FEAT_TCR2),
919 NEEDS_FEAT(HCRX_EL2_MSCEn |
920 HCRX_EL2_MCE2,
921 FEAT_MOPS),
922 NEEDS_FEAT(HCRX_EL2_CMOW, FEAT_CMOW),
923 NEEDS_FEAT(HCRX_EL2_VFNMI |
924 HCRX_EL2_VINMI |
925 HCRX_EL2_TALLINT,
926 FEAT_NMI),
927 NEEDS_FEAT(HCRX_EL2_SMPME, feat_sme_smps),
928 NEEDS_FEAT(HCRX_EL2_FGTnXS |
929 HCRX_EL2_FnXS,
930 FEAT_XS),
931 NEEDS_FEAT(HCRX_EL2_EnASR, FEAT_LS64_V),
932 NEEDS_FEAT(HCRX_EL2_EnALS, FEAT_LS64),
933 NEEDS_FEAT(HCRX_EL2_EnAS0, FEAT_LS64_ACCDATA),
934 };
935
936
937 static const DECLARE_FEAT_MAP(hcrx_desc, __HCRX_EL2,
938 hcrx_feat_map, FEAT_HCX);
939
940 static const struct reg_bits_to_feat_map hcr_feat_map[] = {
941 NEEDS_FEAT(HCR_EL2_TID0, FEAT_AA32EL0),
942 NEEDS_FEAT_FIXED(HCR_EL2_RW, compute_hcr_rw),
943 NEEDS_FEAT(HCR_EL2_HCD, not_feat_aa64el3),
944 NEEDS_FEAT(HCR_EL2_AMO |
945 HCR_EL2_BSU |
946 HCR_EL2_CD |
947 HCR_EL2_DC |
948 HCR_EL2_FB |
949 HCR_EL2_FMO |
950 HCR_EL2_ID |
951 HCR_EL2_IMO |
952 HCR_EL2_MIOCNCE |
953 HCR_EL2_PTW |
954 HCR_EL2_SWIO |
955 HCR_EL2_TACR |
956 HCR_EL2_TDZ |
957 HCR_EL2_TGE |
958 HCR_EL2_TID1 |
959 HCR_EL2_TID2 |
960 HCR_EL2_TID3 |
961 HCR_EL2_TIDCP |
962 HCR_EL2_TPCP |
963 HCR_EL2_TPU |
964 HCR_EL2_TRVM |
965 HCR_EL2_TSC |
966 HCR_EL2_TSW |
967 HCR_EL2_TTLB |
968 HCR_EL2_TVM |
969 HCR_EL2_TWE |
970 HCR_EL2_TWI |
971 HCR_EL2_VF |
972 HCR_EL2_VI |
973 HCR_EL2_VM |
974 HCR_EL2_VSE,
975 FEAT_AA64EL1),
976 NEEDS_FEAT(HCR_EL2_AMVOFFEN, FEAT_AMUv1p1),
977 NEEDS_FEAT(HCR_EL2_EnSCXT, feat_csv2_2_csv2_1p2),
978 NEEDS_FEAT(HCR_EL2_TICAB |
979 HCR_EL2_TID4 |
980 HCR_EL2_TOCU,
981 FEAT_EVT),
982 NEEDS_FEAT(HCR_EL2_TTLBIS |
983 HCR_EL2_TTLBOS,
984 FEAT_EVT_TTLBxS),
985 NEEDS_FEAT(HCR_EL2_TLOR, FEAT_LOR),
986 NEEDS_FEAT(HCR_EL2_ATA |
987 HCR_EL2_DCT |
988 HCR_EL2_TID5,
989 FEAT_MTE2),
990 NEEDS_FEAT(HCR_EL2_AT | /* Ignore the original FEAT_NV */
991 HCR_EL2_NV2 |
992 HCR_EL2_NV,
993 feat_nv2),
994 NEEDS_FEAT(HCR_EL2_NV1, feat_nv2_e2h0_ni), /* Missing from JSON */
995 NEEDS_FEAT(HCR_EL2_API |
996 HCR_EL2_APK,
997 feat_pauth),
998 NEEDS_FEAT(HCR_EL2_TEA |
999 HCR_EL2_TERR,
1000 FEAT_RAS),
1001 NEEDS_FEAT(HCR_EL2_FIEN, feat_rasv1p1),
1002 NEEDS_FEAT(HCR_EL2_GPF, FEAT_RME),
1003 NEEDS_FEAT(HCR_EL2_FWB, FEAT_S2FWB),
1004 NEEDS_FEAT(HCR_EL2_TME, FEAT_TME),
1005 NEEDS_FEAT(HCR_EL2_TWEDEL |
1006 HCR_EL2_TWEDEn,
1007 FEAT_TWED),
1008 NEEDS_FEAT_FIXED(HCR_EL2_E2H, compute_hcr_e2h),
1009 };
1010
1011 static const DECLARE_FEAT_MAP(hcr_desc, HCR_EL2,
1012 hcr_feat_map, FEAT_AA64EL2);
1013
1014 static const struct reg_bits_to_feat_map sctlr2_feat_map[] = {
1015 NEEDS_FEAT(SCTLR2_EL1_NMEA |
1016 SCTLR2_EL1_EASE,
1017 FEAT_DoubleFault2),
1018 NEEDS_FEAT(SCTLR2_EL1_EnADERR, feat_aderr),
1019 NEEDS_FEAT(SCTLR2_EL1_EnANERR, feat_anerr),
1020 NEEDS_FEAT(SCTLR2_EL1_EnIDCP128, FEAT_SYSREG128),
1021 NEEDS_FEAT(SCTLR2_EL1_EnPACM |
1022 SCTLR2_EL1_EnPACM0,
1023 feat_pauth_lr),
1024 NEEDS_FEAT(SCTLR2_EL1_CPTA |
1025 SCTLR2_EL1_CPTA0 |
1026 SCTLR2_EL1_CPTM |
1027 SCTLR2_EL1_CPTM0,
1028 FEAT_CPA2),
1029 };
1030
1031 static const DECLARE_FEAT_MAP(sctlr2_desc, SCTLR2_EL1,
1032 sctlr2_feat_map, FEAT_SCTLR2);
1033
1034 static const struct reg_bits_to_feat_map tcr2_el2_feat_map[] = {
1035 NEEDS_FEAT(TCR2_EL2_FNG1 |
1036 TCR2_EL2_FNG0 |
1037 TCR2_EL2_A2,
1038 feat_asid2_e2h1),
1039 NEEDS_FEAT(TCR2_EL2_DisCH1 |
1040 TCR2_EL2_DisCH0 |
1041 TCR2_EL2_D128,
1042 feat_d128_e2h1),
1043 NEEDS_FEAT(TCR2_EL2_AMEC1, feat_mec_e2h1),
1044 NEEDS_FEAT(TCR2_EL2_AMEC0, FEAT_MEC),
1045 NEEDS_FEAT(TCR2_EL2_HAFT, FEAT_HAFT),
1046 NEEDS_FEAT(TCR2_EL2_PTTWI |
1047 TCR2_EL2_PnCH,
1048 FEAT_THE),
1049 NEEDS_FEAT(TCR2_EL2_AIE, FEAT_AIE),
1050 NEEDS_FEAT(TCR2_EL2_POE |
1051 TCR2_EL2_E0POE,
1052 FEAT_S1POE),
1053 NEEDS_FEAT(TCR2_EL2_PIE, FEAT_S1PIE),
1054 };
1055
1056 static const DECLARE_FEAT_MAP(tcr2_el2_desc, TCR2_EL2,
1057 tcr2_el2_feat_map, FEAT_TCR2);
1058
1059 static const struct reg_bits_to_feat_map sctlr_el1_feat_map[] = {
1060 NEEDS_FEAT(SCTLR_EL1_CP15BEN |
1061 SCTLR_EL1_ITD |
1062 SCTLR_EL1_SED,
1063 FEAT_AA32EL0),
1064 NEEDS_FEAT(SCTLR_EL1_BT0 |
1065 SCTLR_EL1_BT1,
1066 FEAT_BTI),
1067 NEEDS_FEAT(SCTLR_EL1_CMOW, FEAT_CMOW),
1068 NEEDS_FEAT(SCTLR_EL1_TSCXT, feat_csv2_2_csv2_1p2),
1069 NEEDS_FEAT(SCTLR_EL1_EIS |
1070 SCTLR_EL1_EOS,
1071 FEAT_ExS),
1072 NEEDS_FEAT(SCTLR_EL1_EnFPM, FEAT_FPMR),
1073 NEEDS_FEAT(SCTLR_EL1_IESB, FEAT_IESB),
1074 NEEDS_FEAT(SCTLR_EL1_EnALS, FEAT_LS64),
1075 NEEDS_FEAT(SCTLR_EL1_EnAS0, FEAT_LS64_ACCDATA),
1076 NEEDS_FEAT(SCTLR_EL1_EnASR, FEAT_LS64_V),
1077 NEEDS_FEAT(SCTLR_EL1_nAA, FEAT_LSE2),
1078 NEEDS_FEAT(SCTLR_EL1_LSMAOE |
1079 SCTLR_EL1_nTLSMD,
1080 FEAT_LSMAOC),
1081 NEEDS_FEAT(SCTLR_EL1_EE, FEAT_MixedEnd),
1082 NEEDS_FEAT(SCTLR_EL1_E0E, feat_mixedendel0),
1083 NEEDS_FEAT(SCTLR_EL1_MSCEn, FEAT_MOPS),
1084 NEEDS_FEAT(SCTLR_EL1_ATA0 |
1085 SCTLR_EL1_ATA |
1086 SCTLR_EL1_TCF0 |
1087 SCTLR_EL1_TCF,
1088 FEAT_MTE2),
1089 NEEDS_FEAT(SCTLR_EL1_ITFSB, feat_mte_async),
1090 NEEDS_FEAT(SCTLR_EL1_TCSO0 |
1091 SCTLR_EL1_TCSO,
1092 FEAT_MTE_STORE_ONLY),
1093 NEEDS_FEAT(SCTLR_EL1_NMI |
1094 SCTLR_EL1_SPINTMASK,
1095 FEAT_NMI),
1096 NEEDS_FEAT(SCTLR_EL1_SPAN, FEAT_PAN),
1097 NEEDS_FEAT(SCTLR_EL1_EPAN, FEAT_PAN3),
1098 NEEDS_FEAT(SCTLR_EL1_EnDA |
1099 SCTLR_EL1_EnDB |
1100 SCTLR_EL1_EnIA |
1101 SCTLR_EL1_EnIB,
1102 feat_pauth),
1103 NEEDS_FEAT(SCTLR_EL1_EnTP2, FEAT_SME),
1104 NEEDS_FEAT(SCTLR_EL1_EnRCTX, FEAT_SPECRES),
1105 NEEDS_FEAT(SCTLR_EL1_DSSBS, FEAT_SSBS),
1106 NEEDS_FEAT(SCTLR_EL1_TIDCP, FEAT_TIDCP1),
1107 NEEDS_FEAT(SCTLR_EL1_TME0 |
1108 SCTLR_EL1_TME |
1109 SCTLR_EL1_TMT0 |
1110 SCTLR_EL1_TMT,
1111 FEAT_TME),
1112 NEEDS_FEAT(SCTLR_EL1_TWEDEL |
1113 SCTLR_EL1_TWEDEn,
1114 FEAT_TWED),
1115 NEEDS_FEAT(SCTLR_EL1_UCI |
1116 SCTLR_EL1_EE |
1117 SCTLR_EL1_E0E |
1118 SCTLR_EL1_WXN |
1119 SCTLR_EL1_nTWE |
1120 SCTLR_EL1_nTWI |
1121 SCTLR_EL1_UCT |
1122 SCTLR_EL1_DZE |
1123 SCTLR_EL1_I |
1124 SCTLR_EL1_UMA |
1125 SCTLR_EL1_SA0 |
1126 SCTLR_EL1_SA |
1127 SCTLR_EL1_C |
1128 SCTLR_EL1_A |
1129 SCTLR_EL1_M,
1130 FEAT_AA64EL1),
1131 };
1132
1133 static const DECLARE_FEAT_MAP(sctlr_el1_desc, SCTLR_EL1,
1134 sctlr_el1_feat_map, FEAT_AA64EL1);
1135
1136 static const struct reg_bits_to_feat_map mdcr_el2_feat_map[] = {
1137 NEEDS_FEAT(MDCR_EL2_EBWE, FEAT_Debugv8p9),
1138 NEEDS_FEAT(MDCR_EL2_TDOSA, FEAT_DoubleLock),
1139 NEEDS_FEAT(MDCR_EL2_PMEE, FEAT_EBEP),
1140 NEEDS_FEAT(MDCR_EL2_TDCC, FEAT_FGT),
1141 NEEDS_FEAT(MDCR_EL2_MTPME, FEAT_MTPMU),
1142 NEEDS_FEAT(MDCR_EL2_HPME |
1143 MDCR_EL2_HPMN |
1144 MDCR_EL2_TPMCR |
1145 MDCR_EL2_TPM,
1146 FEAT_PMUv3),
1147 NEEDS_FEAT(MDCR_EL2_HPMD, feat_pmuv3p1),
1148 NEEDS_FEAT(MDCR_EL2_HCCD |
1149 MDCR_EL2_HLP,
1150 feat_pmuv3p5),
1151 NEEDS_FEAT(MDCR_EL2_HPMFZO, feat_pmuv3p7),
1152 NEEDS_FEAT(MDCR_EL2_PMSSE, FEAT_PMUv3_SS),
1153 NEEDS_FEAT(MDCR_EL2_E2PB |
1154 MDCR_EL2_TPMS,
1155 FEAT_SPE),
1156 NEEDS_FEAT(MDCR_EL2_HPMFZS, FEAT_SPEv1p2),
1157 NEEDS_FEAT(MDCR_EL2_EnSPM, FEAT_SPMU),
1158 NEEDS_FEAT(MDCR_EL2_EnSTEPOP, FEAT_STEP2),
1159 NEEDS_FEAT(MDCR_EL2_E2TB, FEAT_TRBE),
1160 NEEDS_FEAT(MDCR_EL2_TTRF, FEAT_TRF),
1161 NEEDS_FEAT(MDCR_EL2_TDA |
1162 MDCR_EL2_TDE |
1163 MDCR_EL2_TDRA,
1164 FEAT_AA64EL1),
1165 };
1166
1167 static const DECLARE_FEAT_MAP(mdcr_el2_desc, MDCR_EL2,
1168 mdcr_el2_feat_map, FEAT_AA64EL2);
1169
check_feat_map(const struct reg_bits_to_feat_map * map,int map_size,u64 res0,const char * str)1170 static void __init check_feat_map(const struct reg_bits_to_feat_map *map,
1171 int map_size, u64 res0, const char *str)
1172 {
1173 u64 mask = 0;
1174
1175 for (int i = 0; i < map_size; i++)
1176 mask |= map[i].bits;
1177
1178 if (mask != ~res0)
1179 kvm_err("Undefined %s behaviour, bits %016llx\n",
1180 str, mask ^ ~res0);
1181 }
1182
reg_feat_map_bits(const struct reg_bits_to_feat_map * map)1183 static u64 reg_feat_map_bits(const struct reg_bits_to_feat_map *map)
1184 {
1185 return map->flags & RES0_POINTER ? ~(*map->res0p) : map->bits;
1186 }
1187
check_reg_desc(const struct reg_feat_map_desc * r)1188 static void __init check_reg_desc(const struct reg_feat_map_desc *r)
1189 {
1190 check_feat_map(r->bit_feat_map, r->bit_feat_map_sz,
1191 ~reg_feat_map_bits(&r->feat_map), r->name);
1192 }
1193
check_feature_map(void)1194 void __init check_feature_map(void)
1195 {
1196 check_reg_desc(&hfgrtr_desc);
1197 check_reg_desc(&hfgwtr_desc);
1198 check_reg_desc(&hfgitr_desc);
1199 check_reg_desc(&hdfgrtr_desc);
1200 check_reg_desc(&hdfgwtr_desc);
1201 check_reg_desc(&hafgrtr_desc);
1202 check_reg_desc(&hfgrtr2_desc);
1203 check_reg_desc(&hfgwtr2_desc);
1204 check_reg_desc(&hfgitr2_desc);
1205 check_reg_desc(&hdfgrtr2_desc);
1206 check_reg_desc(&hdfgwtr2_desc);
1207 check_reg_desc(&hcrx_desc);
1208 check_reg_desc(&hcr_desc);
1209 check_reg_desc(&sctlr2_desc);
1210 check_reg_desc(&tcr2_el2_desc);
1211 check_reg_desc(&sctlr_el1_desc);
1212 check_reg_desc(&mdcr_el2_desc);
1213 }
1214
idreg_feat_match(struct kvm * kvm,const struct reg_bits_to_feat_map * map)1215 static bool idreg_feat_match(struct kvm *kvm, const struct reg_bits_to_feat_map *map)
1216 {
1217 u64 regval = kvm->arch.id_regs[map->regidx];
1218 u64 regfld = (regval >> map->shift) & GENMASK(map->width - 1, 0);
1219
1220 if (map->sign) {
1221 s64 sfld = sign_extend64(regfld, map->width - 1);
1222 s64 slim = sign_extend64(map->lo_lim, map->width - 1);
1223 return sfld >= slim;
1224 } else {
1225 return regfld >= map->lo_lim;
1226 }
1227 }
1228
__compute_fixed_bits(struct kvm * kvm,const struct reg_bits_to_feat_map * map,int map_size,u64 * fixed_bits,unsigned long require,unsigned long exclude)1229 static u64 __compute_fixed_bits(struct kvm *kvm,
1230 const struct reg_bits_to_feat_map *map,
1231 int map_size,
1232 u64 *fixed_bits,
1233 unsigned long require,
1234 unsigned long exclude)
1235 {
1236 u64 val = 0;
1237
1238 for (int i = 0; i < map_size; i++) {
1239 bool match;
1240
1241 if ((map[i].flags & require) != require)
1242 continue;
1243
1244 if (map[i].flags & exclude)
1245 continue;
1246
1247 if (map[i].flags & CALL_FUNC)
1248 match = (map[i].flags & FIXED_VALUE) ?
1249 map[i].fval(kvm, fixed_bits) :
1250 map[i].match(kvm);
1251 else
1252 match = idreg_feat_match(kvm, &map[i]);
1253
1254 if (!match || (map[i].flags & FIXED_VALUE))
1255 val |= reg_feat_map_bits(&map[i]);
1256 }
1257
1258 return val;
1259 }
1260
compute_res0_bits(struct kvm * kvm,const struct reg_bits_to_feat_map * map,int map_size,unsigned long require,unsigned long exclude)1261 static u64 compute_res0_bits(struct kvm *kvm,
1262 const struct reg_bits_to_feat_map *map,
1263 int map_size,
1264 unsigned long require,
1265 unsigned long exclude)
1266 {
1267 return __compute_fixed_bits(kvm, map, map_size, NULL,
1268 require, exclude | FIXED_VALUE);
1269 }
1270
compute_reg_res0_bits(struct kvm * kvm,const struct reg_feat_map_desc * r,unsigned long require,unsigned long exclude)1271 static u64 compute_reg_res0_bits(struct kvm *kvm,
1272 const struct reg_feat_map_desc *r,
1273 unsigned long require, unsigned long exclude)
1274
1275 {
1276 u64 res0;
1277
1278 res0 = compute_res0_bits(kvm, r->bit_feat_map, r->bit_feat_map_sz,
1279 require, exclude);
1280
1281 /*
1282 * If computing FGUs, don't take RES0 or register existence
1283 * into account -- we're not computing bits for the register
1284 * itself.
1285 */
1286 if (!(exclude & NEVER_FGU)) {
1287 res0 |= compute_res0_bits(kvm, &r->feat_map, 1, require, exclude);
1288 res0 |= ~reg_feat_map_bits(&r->feat_map);
1289 }
1290
1291 return res0;
1292 }
1293
compute_reg_fixed_bits(struct kvm * kvm,const struct reg_feat_map_desc * r,u64 * fixed_bits,unsigned long require,unsigned long exclude)1294 static u64 compute_reg_fixed_bits(struct kvm *kvm,
1295 const struct reg_feat_map_desc *r,
1296 u64 *fixed_bits, unsigned long require,
1297 unsigned long exclude)
1298 {
1299 return __compute_fixed_bits(kvm, r->bit_feat_map, r->bit_feat_map_sz,
1300 fixed_bits, require | FIXED_VALUE, exclude);
1301 }
1302
compute_fgu(struct kvm * kvm,enum fgt_group_id fgt)1303 void compute_fgu(struct kvm *kvm, enum fgt_group_id fgt)
1304 {
1305 u64 val = 0;
1306
1307 switch (fgt) {
1308 case HFGRTR_GROUP:
1309 val |= compute_reg_res0_bits(kvm, &hfgrtr_desc,
1310 0, NEVER_FGU);
1311 val |= compute_reg_res0_bits(kvm, &hfgwtr_desc,
1312 0, NEVER_FGU);
1313 break;
1314 case HFGITR_GROUP:
1315 val |= compute_reg_res0_bits(kvm, &hfgitr_desc,
1316 0, NEVER_FGU);
1317 break;
1318 case HDFGRTR_GROUP:
1319 val |= compute_reg_res0_bits(kvm, &hdfgrtr_desc,
1320 0, NEVER_FGU);
1321 val |= compute_reg_res0_bits(kvm, &hdfgwtr_desc,
1322 0, NEVER_FGU);
1323 break;
1324 case HAFGRTR_GROUP:
1325 val |= compute_reg_res0_bits(kvm, &hafgrtr_desc,
1326 0, NEVER_FGU);
1327 break;
1328 case HFGRTR2_GROUP:
1329 val |= compute_reg_res0_bits(kvm, &hfgrtr2_desc,
1330 0, NEVER_FGU);
1331 val |= compute_reg_res0_bits(kvm, &hfgwtr2_desc,
1332 0, NEVER_FGU);
1333 break;
1334 case HFGITR2_GROUP:
1335 val |= compute_reg_res0_bits(kvm, &hfgitr2_desc,
1336 0, NEVER_FGU);
1337 break;
1338 case HDFGRTR2_GROUP:
1339 val |= compute_reg_res0_bits(kvm, &hdfgrtr2_desc,
1340 0, NEVER_FGU);
1341 val |= compute_reg_res0_bits(kvm, &hdfgwtr2_desc,
1342 0, NEVER_FGU);
1343 break;
1344 default:
1345 BUG();
1346 }
1347
1348 kvm->arch.fgu[fgt] = val;
1349 }
1350
get_reg_fixed_bits(struct kvm * kvm,enum vcpu_sysreg reg,u64 * res0,u64 * res1)1351 void get_reg_fixed_bits(struct kvm *kvm, enum vcpu_sysreg reg, u64 *res0, u64 *res1)
1352 {
1353 u64 fixed = 0, mask;
1354
1355 switch (reg) {
1356 case HFGRTR_EL2:
1357 *res0 = compute_reg_res0_bits(kvm, &hfgrtr_desc, 0, 0);
1358 *res1 = HFGRTR_EL2_RES1;
1359 break;
1360 case HFGWTR_EL2:
1361 *res0 = compute_reg_res0_bits(kvm, &hfgwtr_desc, 0, 0);
1362 *res1 = HFGWTR_EL2_RES1;
1363 break;
1364 case HFGITR_EL2:
1365 *res0 = compute_reg_res0_bits(kvm, &hfgitr_desc, 0, 0);
1366 *res1 = HFGITR_EL2_RES1;
1367 break;
1368 case HDFGRTR_EL2:
1369 *res0 = compute_reg_res0_bits(kvm, &hdfgrtr_desc, 0, 0);
1370 *res1 = HDFGRTR_EL2_RES1;
1371 break;
1372 case HDFGWTR_EL2:
1373 *res0 = compute_reg_res0_bits(kvm, &hdfgwtr_desc, 0, 0);
1374 *res1 = HDFGWTR_EL2_RES1;
1375 break;
1376 case HAFGRTR_EL2:
1377 *res0 = compute_reg_res0_bits(kvm, &hafgrtr_desc, 0, 0);
1378 *res1 = HAFGRTR_EL2_RES1;
1379 break;
1380 case HFGRTR2_EL2:
1381 *res0 = compute_reg_res0_bits(kvm, &hfgrtr2_desc, 0, 0);
1382 *res1 = HFGRTR2_EL2_RES1;
1383 break;
1384 case HFGWTR2_EL2:
1385 *res0 = compute_reg_res0_bits(kvm, &hfgwtr2_desc, 0, 0);
1386 *res1 = HFGWTR2_EL2_RES1;
1387 break;
1388 case HFGITR2_EL2:
1389 *res0 = compute_reg_res0_bits(kvm, &hfgitr2_desc, 0, 0);
1390 *res1 = HFGITR2_EL2_RES1;
1391 break;
1392 case HDFGRTR2_EL2:
1393 *res0 = compute_reg_res0_bits(kvm, &hdfgrtr2_desc, 0, 0);
1394 *res1 = HDFGRTR2_EL2_RES1;
1395 break;
1396 case HDFGWTR2_EL2:
1397 *res0 = compute_reg_res0_bits(kvm, &hdfgwtr2_desc, 0, 0);
1398 *res1 = HDFGWTR2_EL2_RES1;
1399 break;
1400 case HCRX_EL2:
1401 *res0 = compute_reg_res0_bits(kvm, &hcrx_desc, 0, 0);
1402 *res1 = __HCRX_EL2_RES1;
1403 break;
1404 case HCR_EL2:
1405 mask = compute_reg_fixed_bits(kvm, &hcr_desc, &fixed, 0, 0);
1406 *res0 = compute_reg_res0_bits(kvm, &hcr_desc, 0, 0);
1407 *res0 |= (mask & ~fixed);
1408 *res1 = HCR_EL2_RES1 | (mask & fixed);
1409 break;
1410 case SCTLR2_EL1:
1411 case SCTLR2_EL2:
1412 *res0 = compute_reg_res0_bits(kvm, &sctlr2_desc, 0, 0);
1413 *res1 = SCTLR2_EL1_RES1;
1414 break;
1415 case TCR2_EL2:
1416 *res0 = compute_reg_res0_bits(kvm, &tcr2_el2_desc, 0, 0);
1417 *res1 = TCR2_EL2_RES1;
1418 break;
1419 case SCTLR_EL1:
1420 *res0 = compute_reg_res0_bits(kvm, &sctlr_el1_desc, 0, 0);
1421 *res1 = SCTLR_EL1_RES1;
1422 break;
1423 case MDCR_EL2:
1424 *res0 = compute_reg_res0_bits(kvm, &mdcr_el2_desc, 0, 0);
1425 *res1 = MDCR_EL2_RES1;
1426 break;
1427 default:
1428 WARN_ON_ONCE(1);
1429 *res0 = *res1 = 0;
1430 break;
1431 }
1432 }
1433
__fgt_reg_to_masks(enum vcpu_sysreg reg)1434 static __always_inline struct fgt_masks *__fgt_reg_to_masks(enum vcpu_sysreg reg)
1435 {
1436 switch (reg) {
1437 case HFGRTR_EL2:
1438 return &hfgrtr_masks;
1439 case HFGWTR_EL2:
1440 return &hfgwtr_masks;
1441 case HFGITR_EL2:
1442 return &hfgitr_masks;
1443 case HDFGRTR_EL2:
1444 return &hdfgrtr_masks;
1445 case HDFGWTR_EL2:
1446 return &hdfgwtr_masks;
1447 case HAFGRTR_EL2:
1448 return &hafgrtr_masks;
1449 case HFGRTR2_EL2:
1450 return &hfgrtr2_masks;
1451 case HFGWTR2_EL2:
1452 return &hfgwtr2_masks;
1453 case HFGITR2_EL2:
1454 return &hfgitr2_masks;
1455 case HDFGRTR2_EL2:
1456 return &hdfgrtr2_masks;
1457 case HDFGWTR2_EL2:
1458 return &hdfgwtr2_masks;
1459 default:
1460 BUILD_BUG_ON(1);
1461 }
1462 }
1463
__compute_fgt(struct kvm_vcpu * vcpu,enum vcpu_sysreg reg)1464 static __always_inline void __compute_fgt(struct kvm_vcpu *vcpu, enum vcpu_sysreg reg)
1465 {
1466 u64 fgu = vcpu->kvm->arch.fgu[__fgt_reg_to_group_id(reg)];
1467 struct fgt_masks *m = __fgt_reg_to_masks(reg);
1468 u64 clear = 0, set = 0, val = m->nmask;
1469
1470 set |= fgu & m->mask;
1471 clear |= fgu & m->nmask;
1472
1473 if (is_nested_ctxt(vcpu)) {
1474 u64 nested = __vcpu_sys_reg(vcpu, reg);
1475 set |= nested & m->mask;
1476 clear |= ~nested & m->nmask;
1477 }
1478
1479 val |= set;
1480 val &= ~clear;
1481 *vcpu_fgt(vcpu, reg) = val;
1482 }
1483
__compute_hfgwtr(struct kvm_vcpu * vcpu)1484 static void __compute_hfgwtr(struct kvm_vcpu *vcpu)
1485 {
1486 __compute_fgt(vcpu, HFGWTR_EL2);
1487
1488 if (cpus_have_final_cap(ARM64_WORKAROUND_AMPERE_AC03_CPU_38))
1489 *vcpu_fgt(vcpu, HFGWTR_EL2) |= HFGWTR_EL2_TCR_EL1;
1490 }
1491
__compute_hdfgwtr(struct kvm_vcpu * vcpu)1492 static void __compute_hdfgwtr(struct kvm_vcpu *vcpu)
1493 {
1494 __compute_fgt(vcpu, HDFGWTR_EL2);
1495
1496 if (is_hyp_ctxt(vcpu))
1497 *vcpu_fgt(vcpu, HDFGWTR_EL2) |= HDFGWTR_EL2_MDSCR_EL1;
1498 }
1499
kvm_vcpu_load_fgt(struct kvm_vcpu * vcpu)1500 void kvm_vcpu_load_fgt(struct kvm_vcpu *vcpu)
1501 {
1502 if (!cpus_have_final_cap(ARM64_HAS_FGT))
1503 return;
1504
1505 __compute_fgt(vcpu, HFGRTR_EL2);
1506 __compute_hfgwtr(vcpu);
1507 __compute_fgt(vcpu, HFGITR_EL2);
1508 __compute_fgt(vcpu, HDFGRTR_EL2);
1509 __compute_hdfgwtr(vcpu);
1510 __compute_fgt(vcpu, HAFGRTR_EL2);
1511
1512 if (!cpus_have_final_cap(ARM64_HAS_FGT2))
1513 return;
1514
1515 __compute_fgt(vcpu, HFGRTR2_EL2);
1516 __compute_fgt(vcpu, HFGWTR2_EL2);
1517 __compute_fgt(vcpu, HFGITR2_EL2);
1518 __compute_fgt(vcpu, HDFGRTR2_EL2);
1519 __compute_fgt(vcpu, HDFGWTR2_EL2);
1520 }
1521