1 // SPDX-License-Identifier: GPL-2.0-only 2 /* 3 * Copyright (C) 2025 Google LLC 4 * Author: Marc Zyngier <maz@kernel.org> 5 */ 6 7 #include <linux/kvm_host.h> 8 #include <asm/kvm_emulate.h> 9 #include <asm/kvm_nested.h> 10 #include <asm/sysreg.h> 11 12 /* 13 * Describes the dependencies between a set of bits (or the negation 14 * of a set of RES0 bits) and a feature. The flags indicate how the 15 * data is interpreted. 16 */ 17 struct reg_bits_to_feat_map { 18 union { 19 u64 bits; 20 struct fgt_masks *masks; 21 }; 22 23 #define NEVER_FGU BIT(0) /* Can trap, but never UNDEF */ 24 #define CALL_FUNC BIT(1) /* Needs to evaluate tons of crap */ 25 #define FORCE_RESx BIT(2) /* Unconditional RESx */ 26 #define MASKS_POINTER BIT(3) /* Pointer to fgt_masks struct instead of bits */ 27 #define AS_RES1 BIT(4) /* RES1 when not supported */ 28 #define REQUIRES_E2H1 BIT(5) /* Add HCR_EL2.E2H RES1 as a pre-condition */ 29 #define RES1_WHEN_E2H0 BIT(6) /* RES1 when E2H=0 and not supported */ 30 #define RES1_WHEN_E2H1 BIT(7) /* RES1 when E2H=1 and not supported */ 31 32 unsigned long flags; 33 34 union { 35 struct { 36 u8 regidx; 37 u8 shift; 38 u8 width; 39 bool sign; 40 s8 lo_lim; 41 }; 42 bool (*match)(struct kvm *); 43 }; 44 }; 45 46 /* 47 * Describes the dependencies for a given register: 48 * 49 * @feat_map describes the dependency for the whole register. If the 50 * features the register depends on are not present, the whole 51 * register is effectively RES0. 52 * 53 * @bit_feat_map describes the dependencies for a set of bits in that 54 * register. If the features these bits depend on are not present, the 55 * bits are effectively RES0. 56 */ 57 struct reg_feat_map_desc { 58 const char *name; 59 const struct reg_bits_to_feat_map feat_map; 60 const struct reg_bits_to_feat_map *bit_feat_map; 61 const unsigned int bit_feat_map_sz; 62 }; 63 64 #define __NEEDS_FEAT_3(m, f, w, id, fld, lim) \ 65 { \ 66 .w = (m), \ 67 .flags = (f), \ 68 .regidx = IDREG_IDX(SYS_ ## id), \ 69 .shift = id ##_## fld ## _SHIFT, \ 70 .width = id ##_## fld ## _WIDTH, \ 71 .sign = id ##_## fld ## _SIGNED, \ 72 .lo_lim = id ##_## fld ##_## lim \ 73 } 74 75 #define __NEEDS_FEAT_1(m, f, w, fun) \ 76 { \ 77 .w = (m), \ 78 .flags = (f) | CALL_FUNC, \ 79 .match = (fun), \ 80 } 81 82 #define __NEEDS_FEAT_0(m, f, w, ...) \ 83 { \ 84 .w = (m), \ 85 .flags = (f), \ 86 } 87 88 #define __NEEDS_FEAT_FLAG(m, f, w, ...) \ 89 CONCATENATE(__NEEDS_FEAT_, COUNT_ARGS(__VA_ARGS__))(m, f, w, __VA_ARGS__) 90 91 #define NEEDS_FEAT_FLAG(m, f, ...) \ 92 __NEEDS_FEAT_FLAG(m, f, bits, __VA_ARGS__) 93 94 #define NEEDS_FEAT_MASKS(p, ...) \ 95 __NEEDS_FEAT_FLAG(p, MASKS_POINTER, masks, __VA_ARGS__) 96 97 /* 98 * Declare the dependency between a set of bits and a set of features, 99 * generating a struct reg_bit_to_feat_map. 100 */ 101 #define NEEDS_FEAT(m, ...) NEEDS_FEAT_FLAG(m, 0, __VA_ARGS__) 102 103 /* Declare fixed RESx bits */ 104 #define FORCE_RES0(m) NEEDS_FEAT_FLAG(m, FORCE_RESx) 105 #define FORCE_RES1(m) NEEDS_FEAT_FLAG(m, FORCE_RESx | AS_RES1) 106 107 /* 108 * Declare the dependency between a non-FGT register, a set of features, 109 * and the set of individual bits it contains. This generates a struct 110 * reg_feat_map_desc. 111 */ 112 #define DECLARE_FEAT_MAP(n, r, m, f) \ 113 struct reg_feat_map_desc n = { \ 114 .name = #r, \ 115 .feat_map = NEEDS_FEAT(~(r##_RES0 | \ 116 r##_RES1), f), \ 117 .bit_feat_map = m, \ 118 .bit_feat_map_sz = ARRAY_SIZE(m), \ 119 } 120 121 /* 122 * Specialised version of the above for FGT registers that have their 123 * RESx masks described as struct fgt_masks. 124 */ 125 #define DECLARE_FEAT_MAP_FGT(n, msk, m, f) \ 126 struct reg_feat_map_desc n = { \ 127 .name = #msk, \ 128 .feat_map = NEEDS_FEAT_MASKS(&msk, f), \ 129 .bit_feat_map = m, \ 130 .bit_feat_map_sz = ARRAY_SIZE(m), \ 131 } 132 133 #define FEAT_SPE ID_AA64DFR0_EL1, PMSVer, IMP 134 #define FEAT_SPE_FnE ID_AA64DFR0_EL1, PMSVer, V1P2 135 #define FEAT_BRBE ID_AA64DFR0_EL1, BRBE, IMP 136 #define FEAT_TRC_SR ID_AA64DFR0_EL1, TraceVer, IMP 137 #define FEAT_PMUv3 ID_AA64DFR0_EL1, PMUVer, IMP 138 #define FEAT_TRBE ID_AA64DFR0_EL1, TraceBuffer, IMP 139 #define FEAT_TRBEv1p1 ID_AA64DFR0_EL1, TraceBuffer, TRBE_V1P1 140 #define FEAT_DoubleLock ID_AA64DFR0_EL1, DoubleLock, IMP 141 #define FEAT_TRF ID_AA64DFR0_EL1, TraceFilt, IMP 142 #define FEAT_AA32EL0 ID_AA64PFR0_EL1, EL0, AARCH32 143 #define FEAT_AA32EL1 ID_AA64PFR0_EL1, EL1, AARCH32 144 #define FEAT_AA64EL1 ID_AA64PFR0_EL1, EL1, IMP 145 #define FEAT_AA64EL2 ID_AA64PFR0_EL1, EL2, IMP 146 #define FEAT_AA64EL3 ID_AA64PFR0_EL1, EL3, IMP 147 #define FEAT_SEL2 ID_AA64PFR0_EL1, SEL2, IMP 148 #define FEAT_AIE ID_AA64MMFR3_EL1, AIE, IMP 149 #define FEAT_S2POE ID_AA64MMFR3_EL1, S2POE, IMP 150 #define FEAT_S1POE ID_AA64MMFR3_EL1, S1POE, IMP 151 #define FEAT_S1PIE ID_AA64MMFR3_EL1, S1PIE, IMP 152 #define FEAT_THE ID_AA64PFR1_EL1, THE, IMP 153 #define FEAT_SME ID_AA64PFR1_EL1, SME, IMP 154 #define FEAT_GCS ID_AA64PFR1_EL1, GCS, IMP 155 #define FEAT_LS64 ID_AA64ISAR1_EL1, LS64, LS64 156 #define FEAT_LS64_V ID_AA64ISAR1_EL1, LS64, LS64_V 157 #define FEAT_LS64_ACCDATA ID_AA64ISAR1_EL1, LS64, LS64_ACCDATA 158 #define FEAT_RAS ID_AA64PFR0_EL1, RAS, IMP 159 #define FEAT_RASv2 ID_AA64PFR0_EL1, RAS, V2 160 #define FEAT_GICv3 ID_AA64PFR0_EL1, GIC, IMP 161 #define FEAT_LOR ID_AA64MMFR1_EL1, LO, IMP 162 #define FEAT_SPEv1p2 ID_AA64DFR0_EL1, PMSVer, V1P2 163 #define FEAT_SPEv1p4 ID_AA64DFR0_EL1, PMSVer, V1P4 164 #define FEAT_SPEv1p5 ID_AA64DFR0_EL1, PMSVer, V1P5 165 #define FEAT_ATS1A ID_AA64ISAR2_EL1, ATS1A, IMP 166 #define FEAT_SPECRES2 ID_AA64ISAR1_EL1, SPECRES, COSP_RCTX 167 #define FEAT_SPECRES ID_AA64ISAR1_EL1, SPECRES, IMP 168 #define FEAT_TLBIRANGE ID_AA64ISAR0_EL1, TLB, RANGE 169 #define FEAT_TLBIOS ID_AA64ISAR0_EL1, TLB, OS 170 #define FEAT_PAN2 ID_AA64MMFR1_EL1, PAN, PAN2 171 #define FEAT_DPB2 ID_AA64ISAR1_EL1, DPB, DPB2 172 #define FEAT_AMUv1 ID_AA64PFR0_EL1, AMU, IMP 173 #define FEAT_AMUv1p1 ID_AA64PFR0_EL1, AMU, V1P1 174 #define FEAT_CMOW ID_AA64MMFR1_EL1, CMOW, IMP 175 #define FEAT_D128 ID_AA64MMFR3_EL1, D128, IMP 176 #define FEAT_DoubleFault2 ID_AA64PFR1_EL1, DF2, IMP 177 #define FEAT_FPMR ID_AA64PFR2_EL1, FPMR, IMP 178 #define FEAT_MOPS ID_AA64ISAR2_EL1, MOPS, IMP 179 #define FEAT_NMI ID_AA64PFR1_EL1, NMI, IMP 180 #define FEAT_SCTLR2 ID_AA64MMFR3_EL1, SCTLRX, IMP 181 #define FEAT_SYSREG128 ID_AA64ISAR2_EL1, SYSREG_128, IMP 182 #define FEAT_TCR2 ID_AA64MMFR3_EL1, TCRX, IMP 183 #define FEAT_XS ID_AA64ISAR1_EL1, XS, IMP 184 #define FEAT_EVT ID_AA64MMFR2_EL1, EVT, IMP 185 #define FEAT_EVT_TTLBxS ID_AA64MMFR2_EL1, EVT, TTLBxS 186 #define FEAT_MTE2 ID_AA64PFR1_EL1, MTE, MTE2 187 #define FEAT_RME ID_AA64PFR0_EL1, RME, IMP 188 #define FEAT_MPAM ID_AA64PFR0_EL1, MPAM, 1 189 #define FEAT_S2FWB ID_AA64MMFR2_EL1, FWB, IMP 190 #define FEAT_TWED ID_AA64MMFR1_EL1, TWED, IMP 191 #define FEAT_E2H0 ID_AA64MMFR4_EL1, E2H0, IMP 192 #define FEAT_SRMASK ID_AA64MMFR4_EL1, SRMASK, IMP 193 #define FEAT_PoPS ID_AA64MMFR4_EL1, PoPS, IMP 194 #define FEAT_PFAR ID_AA64PFR1_EL1, PFAR, IMP 195 #define FEAT_Debugv8p9 ID_AA64DFR0_EL1, PMUVer, V3P9 196 #define FEAT_PMUv3_SS ID_AA64DFR0_EL1, PMSS, IMP 197 #define FEAT_SEBEP ID_AA64DFR0_EL1, SEBEP, IMP 198 #define FEAT_EBEP ID_AA64DFR1_EL1, EBEP, IMP 199 #define FEAT_ITE ID_AA64DFR1_EL1, ITE, IMP 200 #define FEAT_PMUv3_ICNTR ID_AA64DFR1_EL1, PMICNTR, IMP 201 #define FEAT_SPMU ID_AA64DFR1_EL1, SPMU, IMP 202 #define FEAT_SPE_nVM ID_AA64DFR2_EL1, SPE_nVM, IMP 203 #define FEAT_STEP2 ID_AA64DFR2_EL1, STEP, IMP 204 #define FEAT_CPA2 ID_AA64ISAR3_EL1, CPA, CPA2 205 #define FEAT_ASID2 ID_AA64MMFR4_EL1, ASID2, IMP 206 #define FEAT_MEC ID_AA64MMFR3_EL1, MEC, IMP 207 #define FEAT_HAFT ID_AA64MMFR1_EL1, HAFDBS, HAFT 208 #define FEAT_HDBSS ID_AA64MMFR1_EL1, HAFDBS, HDBSS 209 #define FEAT_HPDS2 ID_AA64MMFR1_EL1, HPDS, HPDS2 210 #define FEAT_BTI ID_AA64PFR1_EL1, BT, IMP 211 #define FEAT_ExS ID_AA64MMFR0_EL1, EXS, IMP 212 #define FEAT_IESB ID_AA64MMFR2_EL1, IESB, IMP 213 #define FEAT_LSE2 ID_AA64MMFR2_EL1, AT, IMP 214 #define FEAT_LSMAOC ID_AA64MMFR2_EL1, LSM, IMP 215 #define FEAT_MixedEnd ID_AA64MMFR0_EL1, BIGEND, IMP 216 #define FEAT_MixedEndEL0 ID_AA64MMFR0_EL1, BIGENDEL0, IMP 217 #define FEAT_MTE_ASYNC ID_AA64PFR1_EL1, MTE_frac, ASYNC 218 #define FEAT_MTE_STORE_ONLY ID_AA64PFR2_EL1, MTESTOREONLY, IMP 219 #define FEAT_PAN ID_AA64MMFR1_EL1, PAN, IMP 220 #define FEAT_PAN3 ID_AA64MMFR1_EL1, PAN, PAN3 221 #define FEAT_SSBS ID_AA64PFR1_EL1, SSBS, IMP 222 #define FEAT_TIDCP1 ID_AA64MMFR1_EL1, TIDCP1, IMP 223 #define FEAT_FGT ID_AA64MMFR0_EL1, FGT, IMP 224 #define FEAT_FGT2 ID_AA64MMFR0_EL1, FGT, FGT2 225 #define FEAT_MTPMU ID_AA64DFR0_EL1, MTPMU, IMP 226 #define FEAT_HCX ID_AA64MMFR1_EL1, HCX, IMP 227 #define FEAT_S2PIE ID_AA64MMFR3_EL1, S2PIE, IMP 228 #define FEAT_GCIE ID_AA64PFR2_EL1, GCIE, IMP 229 230 static bool not_feat_aa64el3(struct kvm *kvm) 231 { 232 return !kvm_has_feat(kvm, FEAT_AA64EL3); 233 } 234 235 static bool feat_nv2(struct kvm *kvm) 236 { 237 return ((kvm_has_feat(kvm, ID_AA64MMFR4_EL1, NV_frac, NV2_ONLY) && 238 kvm_has_feat_enum(kvm, ID_AA64MMFR2_EL1, NV, NI)) || 239 kvm_has_feat(kvm, ID_AA64MMFR2_EL1, NV, NV2)); 240 } 241 242 static bool feat_nv2_e2h0_ni(struct kvm *kvm) 243 { 244 return feat_nv2(kvm) && !kvm_has_feat(kvm, FEAT_E2H0); 245 } 246 247 static bool feat_rasv1p1(struct kvm *kvm) 248 { 249 return (kvm_has_feat(kvm, ID_AA64PFR0_EL1, RAS, V1P1) || 250 (kvm_has_feat_enum(kvm, ID_AA64PFR0_EL1, RAS, IMP) && 251 kvm_has_feat(kvm, ID_AA64PFR1_EL1, RAS_frac, RASv1p1))); 252 } 253 254 static bool feat_csv2_2_csv2_1p2(struct kvm *kvm) 255 { 256 return (kvm_has_feat(kvm, ID_AA64PFR0_EL1, CSV2, CSV2_2) || 257 (kvm_has_feat(kvm, ID_AA64PFR1_EL1, CSV2_frac, CSV2_1p2) && 258 kvm_has_feat_enum(kvm, ID_AA64PFR0_EL1, CSV2, IMP))); 259 } 260 261 static bool feat_pauth(struct kvm *kvm) 262 { 263 return kvm_has_pauth(kvm, PAuth); 264 } 265 266 static bool feat_pauth_lr(struct kvm *kvm) 267 { 268 return kvm_has_pauth(kvm, PAuth_LR); 269 } 270 271 static bool feat_aderr(struct kvm *kvm) 272 { 273 return (kvm_has_feat(kvm, ID_AA64MMFR3_EL1, ADERR, FEAT_ADERR) && 274 kvm_has_feat(kvm, ID_AA64MMFR3_EL1, SDERR, FEAT_ADERR)); 275 } 276 277 static bool feat_anerr(struct kvm *kvm) 278 { 279 return (kvm_has_feat(kvm, ID_AA64MMFR3_EL1, ANERR, FEAT_ANERR) && 280 kvm_has_feat(kvm, ID_AA64MMFR3_EL1, SNERR, FEAT_ANERR)); 281 } 282 283 static bool feat_sme_smps(struct kvm *kvm) 284 { 285 /* 286 * Revists this if KVM ever supports SME -- this really should 287 * look at the guest's view of SMIDR_EL1. Funnily enough, this 288 * is not captured in the JSON file, but only as a note in the 289 * ARM ARM. 290 */ 291 return (kvm_has_feat(kvm, FEAT_SME) && 292 (read_sysreg_s(SYS_SMIDR_EL1) & SMIDR_EL1_SMPS)); 293 } 294 295 static bool feat_spe_fds(struct kvm *kvm) 296 { 297 /* 298 * Revists this if KVM ever supports SPE -- this really should 299 * look at the guest's view of PMSIDR_EL1. 300 */ 301 return (kvm_has_feat(kvm, FEAT_SPEv1p4) && 302 (read_sysreg_s(SYS_PMSIDR_EL1) & PMSIDR_EL1_FDS)); 303 } 304 305 static bool feat_trbe_mpam(struct kvm *kvm) 306 { 307 /* 308 * Revists this if KVM ever supports both MPAM and TRBE -- 309 * this really should look at the guest's view of TRBIDR_EL1. 310 */ 311 return (kvm_has_feat(kvm, FEAT_TRBE) && 312 kvm_has_feat(kvm, FEAT_MPAM) && 313 (read_sysreg_s(SYS_TRBIDR_EL1) & TRBIDR_EL1_MPAM)); 314 } 315 316 static bool feat_ebep_pmuv3_ss(struct kvm *kvm) 317 { 318 return kvm_has_feat(kvm, FEAT_EBEP) || kvm_has_feat(kvm, FEAT_PMUv3_SS); 319 } 320 321 static bool feat_mixedendel0(struct kvm *kvm) 322 { 323 return kvm_has_feat(kvm, FEAT_MixedEnd) || kvm_has_feat(kvm, FEAT_MixedEndEL0); 324 } 325 326 static bool feat_mte_async(struct kvm *kvm) 327 { 328 return kvm_has_feat(kvm, FEAT_MTE2) && kvm_has_feat_enum(kvm, FEAT_MTE_ASYNC); 329 } 330 331 #define check_pmu_revision(k, r) \ 332 ({ \ 333 (kvm_has_feat((k), ID_AA64DFR0_EL1, PMUVer, r) && \ 334 !kvm_has_feat((k), ID_AA64DFR0_EL1, PMUVer, IMP_DEF)); \ 335 }) 336 337 static bool feat_pmuv3p1(struct kvm *kvm) 338 { 339 return check_pmu_revision(kvm, V3P1); 340 } 341 342 static bool feat_pmuv3p5(struct kvm *kvm) 343 { 344 return check_pmu_revision(kvm, V3P5); 345 } 346 347 static bool feat_pmuv3p7(struct kvm *kvm) 348 { 349 return check_pmu_revision(kvm, V3P7); 350 } 351 352 static bool feat_pmuv3p9(struct kvm *kvm) 353 { 354 return check_pmu_revision(kvm, V3P9); 355 } 356 357 #define has_feat_s2tgran(k, s) \ 358 ((kvm_has_feat_enum(kvm, ID_AA64MMFR0_EL1, TGRAN##s##_2, TGRAN##s) && \ 359 kvm_has_feat(kvm, ID_AA64MMFR0_EL1, TGRAN##s, IMP)) || \ 360 kvm_has_feat(kvm, ID_AA64MMFR0_EL1, TGRAN##s##_2, IMP)) 361 362 static bool feat_lpa2(struct kvm *kvm) 363 { 364 return ((kvm_has_feat(kvm, ID_AA64MMFR0_EL1, TGRAN4, 52_BIT) || 365 !kvm_has_feat(kvm, ID_AA64MMFR0_EL1, TGRAN4, IMP)) && 366 (kvm_has_feat(kvm, ID_AA64MMFR0_EL1, TGRAN16, 52_BIT) || 367 !kvm_has_feat(kvm, ID_AA64MMFR0_EL1, TGRAN16, IMP)) && 368 (kvm_has_feat(kvm, ID_AA64MMFR0_EL1, TGRAN4_2, 52_BIT) || 369 !has_feat_s2tgran(kvm, 4)) && 370 (kvm_has_feat(kvm, ID_AA64MMFR0_EL1, TGRAN16_2, 52_BIT) || 371 !has_feat_s2tgran(kvm, 16))); 372 } 373 374 static bool feat_vmid16(struct kvm *kvm) 375 { 376 return kvm_has_feat_enum(kvm, ID_AA64MMFR1_EL1, VMIDBits, 16); 377 } 378 379 static const struct reg_bits_to_feat_map hfgrtr_feat_map[] = { 380 NEEDS_FEAT(HFGRTR_EL2_nAMAIR2_EL1 | 381 HFGRTR_EL2_nMAIR2_EL1, 382 FEAT_AIE), 383 NEEDS_FEAT(HFGRTR_EL2_nS2POR_EL1, FEAT_S2POE), 384 NEEDS_FEAT(HFGRTR_EL2_nPOR_EL1 | 385 HFGRTR_EL2_nPOR_EL0, 386 FEAT_S1POE), 387 NEEDS_FEAT(HFGRTR_EL2_nPIR_EL1 | 388 HFGRTR_EL2_nPIRE0_EL1, 389 FEAT_S1PIE), 390 NEEDS_FEAT(HFGRTR_EL2_nRCWMASK_EL1, FEAT_THE), 391 NEEDS_FEAT(HFGRTR_EL2_nTPIDR2_EL0 | 392 HFGRTR_EL2_nSMPRI_EL1, 393 FEAT_SME), 394 NEEDS_FEAT(HFGRTR_EL2_nGCS_EL1 | 395 HFGRTR_EL2_nGCS_EL0, 396 FEAT_GCS), 397 NEEDS_FEAT(HFGRTR_EL2_nACCDATA_EL1, FEAT_LS64_ACCDATA), 398 NEEDS_FEAT(HFGRTR_EL2_ERXADDR_EL1 | 399 HFGRTR_EL2_ERXMISCn_EL1 | 400 HFGRTR_EL2_ERXSTATUS_EL1 | 401 HFGRTR_EL2_ERXCTLR_EL1 | 402 HFGRTR_EL2_ERXFR_EL1 | 403 HFGRTR_EL2_ERRSELR_EL1 | 404 HFGRTR_EL2_ERRIDR_EL1, 405 FEAT_RAS), 406 NEEDS_FEAT(HFGRTR_EL2_ERXPFGCDN_EL1 | 407 HFGRTR_EL2_ERXPFGCTL_EL1 | 408 HFGRTR_EL2_ERXPFGF_EL1, 409 feat_rasv1p1), 410 NEEDS_FEAT(HFGRTR_EL2_ICC_IGRPENn_EL1, FEAT_GICv3), 411 NEEDS_FEAT(HFGRTR_EL2_SCXTNUM_EL0 | 412 HFGRTR_EL2_SCXTNUM_EL1, 413 feat_csv2_2_csv2_1p2), 414 NEEDS_FEAT(HFGRTR_EL2_LORSA_EL1 | 415 HFGRTR_EL2_LORN_EL1 | 416 HFGRTR_EL2_LORID_EL1 | 417 HFGRTR_EL2_LOREA_EL1 | 418 HFGRTR_EL2_LORC_EL1, 419 FEAT_LOR), 420 NEEDS_FEAT(HFGRTR_EL2_APIBKey | 421 HFGRTR_EL2_APIAKey | 422 HFGRTR_EL2_APGAKey | 423 HFGRTR_EL2_APDBKey | 424 HFGRTR_EL2_APDAKey, 425 feat_pauth), 426 NEEDS_FEAT_FLAG(HFGRTR_EL2_VBAR_EL1 | 427 HFGRTR_EL2_TTBR1_EL1 | 428 HFGRTR_EL2_TTBR0_EL1 | 429 HFGRTR_EL2_TPIDR_EL0 | 430 HFGRTR_EL2_TPIDRRO_EL0 | 431 HFGRTR_EL2_TPIDR_EL1 | 432 HFGRTR_EL2_TCR_EL1 | 433 HFGRTR_EL2_SCTLR_EL1 | 434 HFGRTR_EL2_REVIDR_EL1 | 435 HFGRTR_EL2_PAR_EL1 | 436 HFGRTR_EL2_MPIDR_EL1 | 437 HFGRTR_EL2_MIDR_EL1 | 438 HFGRTR_EL2_MAIR_EL1 | 439 HFGRTR_EL2_ISR_EL1 | 440 HFGRTR_EL2_FAR_EL1 | 441 HFGRTR_EL2_ESR_EL1 | 442 HFGRTR_EL2_DCZID_EL0 | 443 HFGRTR_EL2_CTR_EL0 | 444 HFGRTR_EL2_CSSELR_EL1 | 445 HFGRTR_EL2_CPACR_EL1 | 446 HFGRTR_EL2_CONTEXTIDR_EL1| 447 HFGRTR_EL2_CLIDR_EL1 | 448 HFGRTR_EL2_CCSIDR_EL1 | 449 HFGRTR_EL2_AMAIR_EL1 | 450 HFGRTR_EL2_AIDR_EL1 | 451 HFGRTR_EL2_AFSR1_EL1 | 452 HFGRTR_EL2_AFSR0_EL1, 453 NEVER_FGU, FEAT_AA64EL1), 454 }; 455 456 457 static const DECLARE_FEAT_MAP_FGT(hfgrtr_desc, hfgrtr_masks, 458 hfgrtr_feat_map, FEAT_FGT); 459 460 static const struct reg_bits_to_feat_map hfgwtr_feat_map[] = { 461 NEEDS_FEAT(HFGWTR_EL2_nAMAIR2_EL1 | 462 HFGWTR_EL2_nMAIR2_EL1, 463 FEAT_AIE), 464 NEEDS_FEAT(HFGWTR_EL2_nS2POR_EL1, FEAT_S2POE), 465 NEEDS_FEAT(HFGWTR_EL2_nPOR_EL1 | 466 HFGWTR_EL2_nPOR_EL0, 467 FEAT_S1POE), 468 NEEDS_FEAT(HFGWTR_EL2_nPIR_EL1 | 469 HFGWTR_EL2_nPIRE0_EL1, 470 FEAT_S1PIE), 471 NEEDS_FEAT(HFGWTR_EL2_nRCWMASK_EL1, FEAT_THE), 472 NEEDS_FEAT(HFGWTR_EL2_nTPIDR2_EL0 | 473 HFGWTR_EL2_nSMPRI_EL1, 474 FEAT_SME), 475 NEEDS_FEAT(HFGWTR_EL2_nGCS_EL1 | 476 HFGWTR_EL2_nGCS_EL0, 477 FEAT_GCS), 478 NEEDS_FEAT(HFGWTR_EL2_nACCDATA_EL1, FEAT_LS64_ACCDATA), 479 NEEDS_FEAT(HFGWTR_EL2_ERXADDR_EL1 | 480 HFGWTR_EL2_ERXMISCn_EL1 | 481 HFGWTR_EL2_ERXSTATUS_EL1 | 482 HFGWTR_EL2_ERXCTLR_EL1 | 483 HFGWTR_EL2_ERRSELR_EL1, 484 FEAT_RAS), 485 NEEDS_FEAT(HFGWTR_EL2_ERXPFGCDN_EL1 | 486 HFGWTR_EL2_ERXPFGCTL_EL1, 487 feat_rasv1p1), 488 NEEDS_FEAT(HFGWTR_EL2_ICC_IGRPENn_EL1, FEAT_GICv3), 489 NEEDS_FEAT(HFGWTR_EL2_SCXTNUM_EL0 | 490 HFGWTR_EL2_SCXTNUM_EL1, 491 feat_csv2_2_csv2_1p2), 492 NEEDS_FEAT(HFGWTR_EL2_LORSA_EL1 | 493 HFGWTR_EL2_LORN_EL1 | 494 HFGWTR_EL2_LOREA_EL1 | 495 HFGWTR_EL2_LORC_EL1, 496 FEAT_LOR), 497 NEEDS_FEAT(HFGWTR_EL2_APIBKey | 498 HFGWTR_EL2_APIAKey | 499 HFGWTR_EL2_APGAKey | 500 HFGWTR_EL2_APDBKey | 501 HFGWTR_EL2_APDAKey, 502 feat_pauth), 503 NEEDS_FEAT_FLAG(HFGWTR_EL2_VBAR_EL1 | 504 HFGWTR_EL2_TTBR1_EL1 | 505 HFGWTR_EL2_TTBR0_EL1 | 506 HFGWTR_EL2_TPIDR_EL0 | 507 HFGWTR_EL2_TPIDRRO_EL0 | 508 HFGWTR_EL2_TPIDR_EL1 | 509 HFGWTR_EL2_TCR_EL1 | 510 HFGWTR_EL2_SCTLR_EL1 | 511 HFGWTR_EL2_PAR_EL1 | 512 HFGWTR_EL2_MAIR_EL1 | 513 HFGWTR_EL2_FAR_EL1 | 514 HFGWTR_EL2_ESR_EL1 | 515 HFGWTR_EL2_CSSELR_EL1 | 516 HFGWTR_EL2_CPACR_EL1 | 517 HFGWTR_EL2_CONTEXTIDR_EL1| 518 HFGWTR_EL2_AMAIR_EL1 | 519 HFGWTR_EL2_AFSR1_EL1 | 520 HFGWTR_EL2_AFSR0_EL1, 521 NEVER_FGU, FEAT_AA64EL1), 522 }; 523 524 static const DECLARE_FEAT_MAP_FGT(hfgwtr_desc, hfgwtr_masks, 525 hfgwtr_feat_map, FEAT_FGT); 526 527 static const struct reg_bits_to_feat_map hdfgrtr_feat_map[] = { 528 NEEDS_FEAT(HDFGRTR_EL2_PMBIDR_EL1 | 529 HDFGRTR_EL2_PMSLATFR_EL1 | 530 HDFGRTR_EL2_PMSIRR_EL1 | 531 HDFGRTR_EL2_PMSIDR_EL1 | 532 HDFGRTR_EL2_PMSICR_EL1 | 533 HDFGRTR_EL2_PMSFCR_EL1 | 534 HDFGRTR_EL2_PMSEVFR_EL1 | 535 HDFGRTR_EL2_PMSCR_EL1 | 536 HDFGRTR_EL2_PMBSR_EL1 | 537 HDFGRTR_EL2_PMBPTR_EL1 | 538 HDFGRTR_EL2_PMBLIMITR_EL1, 539 FEAT_SPE), 540 NEEDS_FEAT(HDFGRTR_EL2_nPMSNEVFR_EL1, FEAT_SPE_FnE), 541 NEEDS_FEAT(HDFGRTR_EL2_nBRBDATA | 542 HDFGRTR_EL2_nBRBCTL | 543 HDFGRTR_EL2_nBRBIDR, 544 FEAT_BRBE), 545 NEEDS_FEAT(HDFGRTR_EL2_TRCVICTLR | 546 HDFGRTR_EL2_TRCSTATR | 547 HDFGRTR_EL2_TRCSSCSRn | 548 HDFGRTR_EL2_TRCSEQSTR | 549 HDFGRTR_EL2_TRCPRGCTLR | 550 HDFGRTR_EL2_TRCOSLSR | 551 HDFGRTR_EL2_TRCIMSPECn | 552 HDFGRTR_EL2_TRCID | 553 HDFGRTR_EL2_TRCCNTVRn | 554 HDFGRTR_EL2_TRCCLAIM | 555 HDFGRTR_EL2_TRCAUXCTLR | 556 HDFGRTR_EL2_TRCAUTHSTATUS | 557 HDFGRTR_EL2_TRC, 558 FEAT_TRC_SR), 559 NEEDS_FEAT(HDFGRTR_EL2_PMCEIDn_EL0 | 560 HDFGRTR_EL2_PMUSERENR_EL0 | 561 HDFGRTR_EL2_PMMIR_EL1 | 562 HDFGRTR_EL2_PMSELR_EL0 | 563 HDFGRTR_EL2_PMOVS | 564 HDFGRTR_EL2_PMINTEN | 565 HDFGRTR_EL2_PMCNTEN | 566 HDFGRTR_EL2_PMCCNTR_EL0 | 567 HDFGRTR_EL2_PMCCFILTR_EL0 | 568 HDFGRTR_EL2_PMEVTYPERn_EL0 | 569 HDFGRTR_EL2_PMEVCNTRn_EL0, 570 FEAT_PMUv3), 571 NEEDS_FEAT(HDFGRTR_EL2_TRBTRG_EL1 | 572 HDFGRTR_EL2_TRBSR_EL1 | 573 HDFGRTR_EL2_TRBPTR_EL1 | 574 HDFGRTR_EL2_TRBMAR_EL1 | 575 HDFGRTR_EL2_TRBLIMITR_EL1 | 576 HDFGRTR_EL2_TRBIDR_EL1 | 577 HDFGRTR_EL2_TRBBASER_EL1, 578 FEAT_TRBE), 579 NEEDS_FEAT_FLAG(HDFGRTR_EL2_OSDLR_EL1, NEVER_FGU, 580 FEAT_DoubleLock), 581 NEEDS_FEAT_FLAG(HDFGRTR_EL2_OSECCR_EL1 | 582 HDFGRTR_EL2_OSLSR_EL1 | 583 HDFGRTR_EL2_DBGPRCR_EL1 | 584 HDFGRTR_EL2_DBGAUTHSTATUS_EL1| 585 HDFGRTR_EL2_DBGCLAIM | 586 HDFGRTR_EL2_MDSCR_EL1 | 587 HDFGRTR_EL2_DBGWVRn_EL1 | 588 HDFGRTR_EL2_DBGWCRn_EL1 | 589 HDFGRTR_EL2_DBGBVRn_EL1 | 590 HDFGRTR_EL2_DBGBCRn_EL1, 591 NEVER_FGU, FEAT_AA64EL1) 592 }; 593 594 static const DECLARE_FEAT_MAP_FGT(hdfgrtr_desc, hdfgrtr_masks, 595 hdfgrtr_feat_map, FEAT_FGT); 596 597 static const struct reg_bits_to_feat_map hdfgwtr_feat_map[] = { 598 NEEDS_FEAT(HDFGWTR_EL2_PMSLATFR_EL1 | 599 HDFGWTR_EL2_PMSIRR_EL1 | 600 HDFGWTR_EL2_PMSICR_EL1 | 601 HDFGWTR_EL2_PMSFCR_EL1 | 602 HDFGWTR_EL2_PMSEVFR_EL1 | 603 HDFGWTR_EL2_PMSCR_EL1 | 604 HDFGWTR_EL2_PMBSR_EL1 | 605 HDFGWTR_EL2_PMBPTR_EL1 | 606 HDFGWTR_EL2_PMBLIMITR_EL1, 607 FEAT_SPE), 608 NEEDS_FEAT(HDFGWTR_EL2_nPMSNEVFR_EL1, FEAT_SPE_FnE), 609 NEEDS_FEAT(HDFGWTR_EL2_nBRBDATA | 610 HDFGWTR_EL2_nBRBCTL, 611 FEAT_BRBE), 612 NEEDS_FEAT(HDFGWTR_EL2_TRCVICTLR | 613 HDFGWTR_EL2_TRCSSCSRn | 614 HDFGWTR_EL2_TRCSEQSTR | 615 HDFGWTR_EL2_TRCPRGCTLR | 616 HDFGWTR_EL2_TRCOSLAR | 617 HDFGWTR_EL2_TRCIMSPECn | 618 HDFGWTR_EL2_TRCCNTVRn | 619 HDFGWTR_EL2_TRCCLAIM | 620 HDFGWTR_EL2_TRCAUXCTLR | 621 HDFGWTR_EL2_TRC, 622 FEAT_TRC_SR), 623 NEEDS_FEAT(HDFGWTR_EL2_PMUSERENR_EL0 | 624 HDFGWTR_EL2_PMCR_EL0 | 625 HDFGWTR_EL2_PMSWINC_EL0 | 626 HDFGWTR_EL2_PMSELR_EL0 | 627 HDFGWTR_EL2_PMOVS | 628 HDFGWTR_EL2_PMINTEN | 629 HDFGWTR_EL2_PMCNTEN | 630 HDFGWTR_EL2_PMCCNTR_EL0 | 631 HDFGWTR_EL2_PMCCFILTR_EL0 | 632 HDFGWTR_EL2_PMEVTYPERn_EL0 | 633 HDFGWTR_EL2_PMEVCNTRn_EL0, 634 FEAT_PMUv3), 635 NEEDS_FEAT(HDFGWTR_EL2_TRBTRG_EL1 | 636 HDFGWTR_EL2_TRBSR_EL1 | 637 HDFGWTR_EL2_TRBPTR_EL1 | 638 HDFGWTR_EL2_TRBMAR_EL1 | 639 HDFGWTR_EL2_TRBLIMITR_EL1 | 640 HDFGWTR_EL2_TRBBASER_EL1, 641 FEAT_TRBE), 642 NEEDS_FEAT_FLAG(HDFGWTR_EL2_OSDLR_EL1, 643 NEVER_FGU, FEAT_DoubleLock), 644 NEEDS_FEAT_FLAG(HDFGWTR_EL2_OSECCR_EL1 | 645 HDFGWTR_EL2_OSLAR_EL1 | 646 HDFGWTR_EL2_DBGPRCR_EL1 | 647 HDFGWTR_EL2_DBGCLAIM | 648 HDFGWTR_EL2_MDSCR_EL1 | 649 HDFGWTR_EL2_DBGWVRn_EL1 | 650 HDFGWTR_EL2_DBGWCRn_EL1 | 651 HDFGWTR_EL2_DBGBVRn_EL1 | 652 HDFGWTR_EL2_DBGBCRn_EL1, 653 NEVER_FGU, FEAT_AA64EL1), 654 NEEDS_FEAT(HDFGWTR_EL2_TRFCR_EL1, FEAT_TRF), 655 }; 656 657 static const DECLARE_FEAT_MAP_FGT(hdfgwtr_desc, hdfgwtr_masks, 658 hdfgwtr_feat_map, FEAT_FGT); 659 660 static const struct reg_bits_to_feat_map hfgitr_feat_map[] = { 661 NEEDS_FEAT(HFGITR_EL2_PSBCSYNC, FEAT_SPEv1p5), 662 NEEDS_FEAT(HFGITR_EL2_ATS1E1A, FEAT_ATS1A), 663 NEEDS_FEAT(HFGITR_EL2_COSPRCTX, FEAT_SPECRES2), 664 NEEDS_FEAT(HFGITR_EL2_nGCSEPP | 665 HFGITR_EL2_nGCSSTR_EL1 | 666 HFGITR_EL2_nGCSPUSHM_EL1, 667 FEAT_GCS), 668 NEEDS_FEAT(HFGITR_EL2_nBRBIALL | 669 HFGITR_EL2_nBRBINJ, 670 FEAT_BRBE), 671 NEEDS_FEAT(HFGITR_EL2_CPPRCTX | 672 HFGITR_EL2_DVPRCTX | 673 HFGITR_EL2_CFPRCTX, 674 FEAT_SPECRES), 675 NEEDS_FEAT(HFGITR_EL2_TLBIRVAALE1 | 676 HFGITR_EL2_TLBIRVALE1 | 677 HFGITR_EL2_TLBIRVAAE1 | 678 HFGITR_EL2_TLBIRVAE1 | 679 HFGITR_EL2_TLBIRVAALE1IS | 680 HFGITR_EL2_TLBIRVALE1IS | 681 HFGITR_EL2_TLBIRVAAE1IS | 682 HFGITR_EL2_TLBIRVAE1IS | 683 HFGITR_EL2_TLBIRVAALE1OS | 684 HFGITR_EL2_TLBIRVALE1OS | 685 HFGITR_EL2_TLBIRVAAE1OS | 686 HFGITR_EL2_TLBIRVAE1OS, 687 FEAT_TLBIRANGE), 688 NEEDS_FEAT(HFGITR_EL2_TLBIVAALE1OS | 689 HFGITR_EL2_TLBIVALE1OS | 690 HFGITR_EL2_TLBIVAAE1OS | 691 HFGITR_EL2_TLBIASIDE1OS | 692 HFGITR_EL2_TLBIVAE1OS | 693 HFGITR_EL2_TLBIVMALLE1OS, 694 FEAT_TLBIOS), 695 NEEDS_FEAT(HFGITR_EL2_ATS1E1WP | 696 HFGITR_EL2_ATS1E1RP, 697 FEAT_PAN2), 698 NEEDS_FEAT(HFGITR_EL2_DCCVADP, FEAT_DPB2), 699 NEEDS_FEAT_FLAG(HFGITR_EL2_DCCVAC | 700 HFGITR_EL2_SVC_EL1 | 701 HFGITR_EL2_SVC_EL0 | 702 HFGITR_EL2_ERET | 703 HFGITR_EL2_TLBIVAALE1 | 704 HFGITR_EL2_TLBIVALE1 | 705 HFGITR_EL2_TLBIVAAE1 | 706 HFGITR_EL2_TLBIASIDE1 | 707 HFGITR_EL2_TLBIVAE1 | 708 HFGITR_EL2_TLBIVMALLE1 | 709 HFGITR_EL2_TLBIVAALE1IS | 710 HFGITR_EL2_TLBIVALE1IS | 711 HFGITR_EL2_TLBIVAAE1IS | 712 HFGITR_EL2_TLBIASIDE1IS | 713 HFGITR_EL2_TLBIVAE1IS | 714 HFGITR_EL2_TLBIVMALLE1IS| 715 HFGITR_EL2_ATS1E0W | 716 HFGITR_EL2_ATS1E0R | 717 HFGITR_EL2_ATS1E1W | 718 HFGITR_EL2_ATS1E1R | 719 HFGITR_EL2_DCZVA | 720 HFGITR_EL2_DCCIVAC | 721 HFGITR_EL2_DCCVAP | 722 HFGITR_EL2_DCCVAU | 723 HFGITR_EL2_DCCISW | 724 HFGITR_EL2_DCCSW | 725 HFGITR_EL2_DCISW | 726 HFGITR_EL2_DCIVAC | 727 HFGITR_EL2_ICIVAU | 728 HFGITR_EL2_ICIALLU | 729 HFGITR_EL2_ICIALLUIS, 730 NEVER_FGU, FEAT_AA64EL1), 731 }; 732 733 static const DECLARE_FEAT_MAP_FGT(hfgitr_desc, hfgitr_masks, 734 hfgitr_feat_map, FEAT_FGT); 735 736 static const struct reg_bits_to_feat_map hafgrtr_feat_map[] = { 737 NEEDS_FEAT(HAFGRTR_EL2_AMEVTYPER115_EL0 | 738 HAFGRTR_EL2_AMEVTYPER114_EL0 | 739 HAFGRTR_EL2_AMEVTYPER113_EL0 | 740 HAFGRTR_EL2_AMEVTYPER112_EL0 | 741 HAFGRTR_EL2_AMEVTYPER111_EL0 | 742 HAFGRTR_EL2_AMEVTYPER110_EL0 | 743 HAFGRTR_EL2_AMEVTYPER19_EL0 | 744 HAFGRTR_EL2_AMEVTYPER18_EL0 | 745 HAFGRTR_EL2_AMEVTYPER17_EL0 | 746 HAFGRTR_EL2_AMEVTYPER16_EL0 | 747 HAFGRTR_EL2_AMEVTYPER15_EL0 | 748 HAFGRTR_EL2_AMEVTYPER14_EL0 | 749 HAFGRTR_EL2_AMEVTYPER13_EL0 | 750 HAFGRTR_EL2_AMEVTYPER12_EL0 | 751 HAFGRTR_EL2_AMEVTYPER11_EL0 | 752 HAFGRTR_EL2_AMEVTYPER10_EL0 | 753 HAFGRTR_EL2_AMEVCNTR115_EL0 | 754 HAFGRTR_EL2_AMEVCNTR114_EL0 | 755 HAFGRTR_EL2_AMEVCNTR113_EL0 | 756 HAFGRTR_EL2_AMEVCNTR112_EL0 | 757 HAFGRTR_EL2_AMEVCNTR111_EL0 | 758 HAFGRTR_EL2_AMEVCNTR110_EL0 | 759 HAFGRTR_EL2_AMEVCNTR19_EL0 | 760 HAFGRTR_EL2_AMEVCNTR18_EL0 | 761 HAFGRTR_EL2_AMEVCNTR17_EL0 | 762 HAFGRTR_EL2_AMEVCNTR16_EL0 | 763 HAFGRTR_EL2_AMEVCNTR15_EL0 | 764 HAFGRTR_EL2_AMEVCNTR14_EL0 | 765 HAFGRTR_EL2_AMEVCNTR13_EL0 | 766 HAFGRTR_EL2_AMEVCNTR12_EL0 | 767 HAFGRTR_EL2_AMEVCNTR11_EL0 | 768 HAFGRTR_EL2_AMEVCNTR10_EL0 | 769 HAFGRTR_EL2_AMCNTEN1 | 770 HAFGRTR_EL2_AMCNTEN0 | 771 HAFGRTR_EL2_AMEVCNTR03_EL0 | 772 HAFGRTR_EL2_AMEVCNTR02_EL0 | 773 HAFGRTR_EL2_AMEVCNTR01_EL0 | 774 HAFGRTR_EL2_AMEVCNTR00_EL0, 775 FEAT_AMUv1), 776 }; 777 778 static const DECLARE_FEAT_MAP_FGT(hafgrtr_desc, hafgrtr_masks, 779 hafgrtr_feat_map, FEAT_FGT); 780 781 static const struct reg_bits_to_feat_map hfgitr2_feat_map[] = { 782 NEEDS_FEAT(HFGITR2_EL2_nDCCIVAPS, FEAT_PoPS), 783 NEEDS_FEAT(HFGITR2_EL2_TSBCSYNC, FEAT_TRBEv1p1) 784 }; 785 786 static const DECLARE_FEAT_MAP_FGT(hfgitr2_desc, hfgitr2_masks, 787 hfgitr2_feat_map, FEAT_FGT2); 788 789 static const struct reg_bits_to_feat_map hfgrtr2_feat_map[] = { 790 NEEDS_FEAT(HFGRTR2_EL2_nPFAR_EL1, FEAT_PFAR), 791 NEEDS_FEAT(HFGRTR2_EL2_nERXGSR_EL1, FEAT_RASv2), 792 NEEDS_FEAT(HFGRTR2_EL2_nACTLRALIAS_EL1 | 793 HFGRTR2_EL2_nACTLRMASK_EL1 | 794 HFGRTR2_EL2_nCPACRALIAS_EL1 | 795 HFGRTR2_EL2_nCPACRMASK_EL1 | 796 HFGRTR2_EL2_nSCTLR2MASK_EL1 | 797 HFGRTR2_EL2_nSCTLRALIAS2_EL1 | 798 HFGRTR2_EL2_nSCTLRALIAS_EL1 | 799 HFGRTR2_EL2_nSCTLRMASK_EL1 | 800 HFGRTR2_EL2_nTCR2ALIAS_EL1 | 801 HFGRTR2_EL2_nTCR2MASK_EL1 | 802 HFGRTR2_EL2_nTCRALIAS_EL1 | 803 HFGRTR2_EL2_nTCRMASK_EL1, 804 FEAT_SRMASK), 805 NEEDS_FEAT(HFGRTR2_EL2_nRCWSMASK_EL1, FEAT_THE), 806 }; 807 808 static const DECLARE_FEAT_MAP_FGT(hfgrtr2_desc, hfgrtr2_masks, 809 hfgrtr2_feat_map, FEAT_FGT2); 810 811 static const struct reg_bits_to_feat_map hfgwtr2_feat_map[] = { 812 NEEDS_FEAT(HFGWTR2_EL2_nPFAR_EL1, FEAT_PFAR), 813 NEEDS_FEAT(HFGWTR2_EL2_nACTLRALIAS_EL1 | 814 HFGWTR2_EL2_nACTLRMASK_EL1 | 815 HFGWTR2_EL2_nCPACRALIAS_EL1 | 816 HFGWTR2_EL2_nCPACRMASK_EL1 | 817 HFGWTR2_EL2_nSCTLR2MASK_EL1 | 818 HFGWTR2_EL2_nSCTLRALIAS2_EL1 | 819 HFGWTR2_EL2_nSCTLRALIAS_EL1 | 820 HFGWTR2_EL2_nSCTLRMASK_EL1 | 821 HFGWTR2_EL2_nTCR2ALIAS_EL1 | 822 HFGWTR2_EL2_nTCR2MASK_EL1 | 823 HFGWTR2_EL2_nTCRALIAS_EL1 | 824 HFGWTR2_EL2_nTCRMASK_EL1, 825 FEAT_SRMASK), 826 NEEDS_FEAT(HFGWTR2_EL2_nRCWSMASK_EL1, FEAT_THE), 827 }; 828 829 static const DECLARE_FEAT_MAP_FGT(hfgwtr2_desc, hfgwtr2_masks, 830 hfgwtr2_feat_map, FEAT_FGT2); 831 832 static const struct reg_bits_to_feat_map hdfgrtr2_feat_map[] = { 833 NEEDS_FEAT(HDFGRTR2_EL2_nMDSELR_EL1, FEAT_Debugv8p9), 834 NEEDS_FEAT(HDFGRTR2_EL2_nPMECR_EL1, feat_ebep_pmuv3_ss), 835 NEEDS_FEAT(HDFGRTR2_EL2_nTRCITECR_EL1, FEAT_ITE), 836 NEEDS_FEAT(HDFGRTR2_EL2_nPMICFILTR_EL0 | 837 HDFGRTR2_EL2_nPMICNTR_EL0, 838 FEAT_PMUv3_ICNTR), 839 NEEDS_FEAT(HDFGRTR2_EL2_nPMUACR_EL1, feat_pmuv3p9), 840 NEEDS_FEAT(HDFGRTR2_EL2_nPMSSCR_EL1 | 841 HDFGRTR2_EL2_nPMSSDATA, 842 FEAT_PMUv3_SS), 843 NEEDS_FEAT(HDFGRTR2_EL2_nPMIAR_EL1, FEAT_SEBEP), 844 NEEDS_FEAT(HDFGRTR2_EL2_nPMSDSFR_EL1, feat_spe_fds), 845 NEEDS_FEAT(HDFGRTR2_EL2_nPMBMAR_EL1, FEAT_SPE_nVM), 846 NEEDS_FEAT(HDFGRTR2_EL2_nSPMACCESSR_EL1 | 847 HDFGRTR2_EL2_nSPMCNTEN | 848 HDFGRTR2_EL2_nSPMCR_EL0 | 849 HDFGRTR2_EL2_nSPMDEVAFF_EL1 | 850 HDFGRTR2_EL2_nSPMEVCNTRn_EL0 | 851 HDFGRTR2_EL2_nSPMEVTYPERn_EL0| 852 HDFGRTR2_EL2_nSPMID | 853 HDFGRTR2_EL2_nSPMINTEN | 854 HDFGRTR2_EL2_nSPMOVS | 855 HDFGRTR2_EL2_nSPMSCR_EL1 | 856 HDFGRTR2_EL2_nSPMSELR_EL0, 857 FEAT_SPMU), 858 NEEDS_FEAT(HDFGRTR2_EL2_nMDSTEPOP_EL1, FEAT_STEP2), 859 NEEDS_FEAT(HDFGRTR2_EL2_nTRBMPAM_EL1, feat_trbe_mpam), 860 }; 861 862 static const DECLARE_FEAT_MAP_FGT(hdfgrtr2_desc, hdfgrtr2_masks, 863 hdfgrtr2_feat_map, FEAT_FGT2); 864 865 static const struct reg_bits_to_feat_map hdfgwtr2_feat_map[] = { 866 NEEDS_FEAT(HDFGWTR2_EL2_nMDSELR_EL1, FEAT_Debugv8p9), 867 NEEDS_FEAT(HDFGWTR2_EL2_nPMECR_EL1, feat_ebep_pmuv3_ss), 868 NEEDS_FEAT(HDFGWTR2_EL2_nTRCITECR_EL1, FEAT_ITE), 869 NEEDS_FEAT(HDFGWTR2_EL2_nPMICFILTR_EL0 | 870 HDFGWTR2_EL2_nPMICNTR_EL0, 871 FEAT_PMUv3_ICNTR), 872 NEEDS_FEAT(HDFGWTR2_EL2_nPMUACR_EL1 | 873 HDFGWTR2_EL2_nPMZR_EL0, 874 feat_pmuv3p9), 875 NEEDS_FEAT(HDFGWTR2_EL2_nPMSSCR_EL1, FEAT_PMUv3_SS), 876 NEEDS_FEAT(HDFGWTR2_EL2_nPMIAR_EL1, FEAT_SEBEP), 877 NEEDS_FEAT(HDFGWTR2_EL2_nPMSDSFR_EL1, feat_spe_fds), 878 NEEDS_FEAT(HDFGWTR2_EL2_nPMBMAR_EL1, FEAT_SPE_nVM), 879 NEEDS_FEAT(HDFGWTR2_EL2_nSPMACCESSR_EL1 | 880 HDFGWTR2_EL2_nSPMCNTEN | 881 HDFGWTR2_EL2_nSPMCR_EL0 | 882 HDFGWTR2_EL2_nSPMEVCNTRn_EL0 | 883 HDFGWTR2_EL2_nSPMEVTYPERn_EL0| 884 HDFGWTR2_EL2_nSPMINTEN | 885 HDFGWTR2_EL2_nSPMOVS | 886 HDFGWTR2_EL2_nSPMSCR_EL1 | 887 HDFGWTR2_EL2_nSPMSELR_EL0, 888 FEAT_SPMU), 889 NEEDS_FEAT(HDFGWTR2_EL2_nMDSTEPOP_EL1, FEAT_STEP2), 890 NEEDS_FEAT(HDFGWTR2_EL2_nTRBMPAM_EL1, feat_trbe_mpam), 891 }; 892 893 static const DECLARE_FEAT_MAP_FGT(hdfgwtr2_desc, hdfgwtr2_masks, 894 hdfgwtr2_feat_map, FEAT_FGT2); 895 896 897 static const struct reg_bits_to_feat_map hcrx_feat_map[] = { 898 NEEDS_FEAT(HCRX_EL2_PACMEn, feat_pauth_lr), 899 NEEDS_FEAT(HCRX_EL2_EnFPM, FEAT_FPMR), 900 NEEDS_FEAT(HCRX_EL2_GCSEn, FEAT_GCS), 901 NEEDS_FEAT(HCRX_EL2_EnIDCP128, FEAT_SYSREG128), 902 NEEDS_FEAT(HCRX_EL2_EnSDERR, feat_aderr), 903 NEEDS_FEAT(HCRX_EL2_TMEA, FEAT_DoubleFault2), 904 NEEDS_FEAT(HCRX_EL2_EnSNERR, feat_anerr), 905 NEEDS_FEAT(HCRX_EL2_D128En, FEAT_D128), 906 NEEDS_FEAT(HCRX_EL2_PTTWI, FEAT_THE), 907 NEEDS_FEAT(HCRX_EL2_SCTLR2En, FEAT_SCTLR2), 908 NEEDS_FEAT(HCRX_EL2_TCR2En, FEAT_TCR2), 909 NEEDS_FEAT(HCRX_EL2_MSCEn | 910 HCRX_EL2_MCE2, 911 FEAT_MOPS), 912 NEEDS_FEAT(HCRX_EL2_CMOW, FEAT_CMOW), 913 NEEDS_FEAT(HCRX_EL2_VFNMI | 914 HCRX_EL2_VINMI | 915 HCRX_EL2_TALLINT, 916 FEAT_NMI), 917 NEEDS_FEAT(HCRX_EL2_SMPME, feat_sme_smps), 918 NEEDS_FEAT(HCRX_EL2_FGTnXS | 919 HCRX_EL2_FnXS, 920 FEAT_XS), 921 NEEDS_FEAT(HCRX_EL2_EnASR, FEAT_LS64_V), 922 NEEDS_FEAT(HCRX_EL2_EnALS, FEAT_LS64), 923 NEEDS_FEAT(HCRX_EL2_EnAS0, FEAT_LS64_ACCDATA), 924 }; 925 926 927 static const DECLARE_FEAT_MAP(hcrx_desc, __HCRX_EL2, 928 hcrx_feat_map, FEAT_HCX); 929 930 static const struct reg_bits_to_feat_map hcr_feat_map[] = { 931 NEEDS_FEAT(HCR_EL2_TID0, FEAT_AA32EL0), 932 NEEDS_FEAT_FLAG(HCR_EL2_RW, AS_RES1, FEAT_AA32EL1), 933 NEEDS_FEAT(HCR_EL2_HCD, not_feat_aa64el3), 934 NEEDS_FEAT(HCR_EL2_AMO | 935 HCR_EL2_BSU | 936 HCR_EL2_CD | 937 HCR_EL2_DC | 938 HCR_EL2_FB | 939 HCR_EL2_FMO | 940 HCR_EL2_ID | 941 HCR_EL2_IMO | 942 HCR_EL2_PTW | 943 HCR_EL2_SWIO | 944 HCR_EL2_TACR | 945 HCR_EL2_TDZ | 946 HCR_EL2_TGE | 947 HCR_EL2_TID1 | 948 HCR_EL2_TID2 | 949 HCR_EL2_TID3 | 950 HCR_EL2_TIDCP | 951 HCR_EL2_TPCP | 952 HCR_EL2_TPU | 953 HCR_EL2_TRVM | 954 HCR_EL2_TSC | 955 HCR_EL2_TSW | 956 HCR_EL2_TTLB | 957 HCR_EL2_TVM | 958 HCR_EL2_TWE | 959 HCR_EL2_TWI | 960 HCR_EL2_VF | 961 HCR_EL2_VI | 962 HCR_EL2_VM | 963 HCR_EL2_VSE, 964 FEAT_AA64EL1), 965 NEEDS_FEAT(HCR_EL2_AMVOFFEN, FEAT_AMUv1p1), 966 NEEDS_FEAT(HCR_EL2_EnSCXT, feat_csv2_2_csv2_1p2), 967 NEEDS_FEAT(HCR_EL2_TICAB | 968 HCR_EL2_TID4 | 969 HCR_EL2_TOCU, 970 FEAT_EVT), 971 NEEDS_FEAT(HCR_EL2_TTLBIS | 972 HCR_EL2_TTLBOS, 973 FEAT_EVT_TTLBxS), 974 NEEDS_FEAT(HCR_EL2_TLOR, FEAT_LOR), 975 NEEDS_FEAT(HCR_EL2_ATA | 976 HCR_EL2_DCT | 977 HCR_EL2_TID5, 978 FEAT_MTE2), 979 NEEDS_FEAT(HCR_EL2_AT | /* Ignore the original FEAT_NV */ 980 HCR_EL2_NV2 | 981 HCR_EL2_NV, 982 feat_nv2), 983 NEEDS_FEAT(HCR_EL2_NV1, feat_nv2_e2h0_ni), /* Missing from JSON */ 984 NEEDS_FEAT(HCR_EL2_API | 985 HCR_EL2_APK, 986 feat_pauth), 987 NEEDS_FEAT(HCR_EL2_TEA | 988 HCR_EL2_TERR, 989 FEAT_RAS), 990 NEEDS_FEAT(HCR_EL2_FIEN, feat_rasv1p1), 991 NEEDS_FEAT(HCR_EL2_GPF, FEAT_RME), 992 NEEDS_FEAT(HCR_EL2_FWB, FEAT_S2FWB), 993 NEEDS_FEAT(HCR_EL2_TWEDEL | 994 HCR_EL2_TWEDEn, 995 FEAT_TWED), 996 NEEDS_FEAT_FLAG(HCR_EL2_E2H, RES1_WHEN_E2H1 | FORCE_RESx), 997 FORCE_RES0(HCR_EL2_RES0), 998 FORCE_RES1(HCR_EL2_RES1), 999 }; 1000 1001 static const DECLARE_FEAT_MAP(hcr_desc, HCR_EL2, 1002 hcr_feat_map, FEAT_AA64EL2); 1003 1004 static const struct reg_bits_to_feat_map sctlr2_feat_map[] = { 1005 NEEDS_FEAT(SCTLR2_EL1_NMEA | 1006 SCTLR2_EL1_EASE, 1007 FEAT_DoubleFault2), 1008 NEEDS_FEAT(SCTLR2_EL1_EnADERR, feat_aderr), 1009 NEEDS_FEAT(SCTLR2_EL1_EnANERR, feat_anerr), 1010 NEEDS_FEAT(SCTLR2_EL1_EnIDCP128, FEAT_SYSREG128), 1011 NEEDS_FEAT(SCTLR2_EL1_EnPACM | 1012 SCTLR2_EL1_EnPACM0, 1013 feat_pauth_lr), 1014 NEEDS_FEAT(SCTLR2_EL1_CPTA | 1015 SCTLR2_EL1_CPTA0 | 1016 SCTLR2_EL1_CPTM | 1017 SCTLR2_EL1_CPTM0, 1018 FEAT_CPA2), 1019 FORCE_RES0(SCTLR2_EL1_RES0), 1020 FORCE_RES1(SCTLR2_EL1_RES1), 1021 }; 1022 1023 static const DECLARE_FEAT_MAP(sctlr2_desc, SCTLR2_EL1, 1024 sctlr2_feat_map, FEAT_SCTLR2); 1025 1026 static const struct reg_bits_to_feat_map tcr2_el2_feat_map[] = { 1027 NEEDS_FEAT_FLAG(TCR2_EL2_FNG1 | 1028 TCR2_EL2_FNG0 | 1029 TCR2_EL2_A2, 1030 REQUIRES_E2H1, FEAT_ASID2), 1031 NEEDS_FEAT_FLAG(TCR2_EL2_DisCH1 | 1032 TCR2_EL2_DisCH0 | 1033 TCR2_EL2_D128, 1034 REQUIRES_E2H1, FEAT_D128), 1035 NEEDS_FEAT_FLAG(TCR2_EL2_AMEC1, REQUIRES_E2H1, FEAT_MEC), 1036 NEEDS_FEAT(TCR2_EL2_AMEC0, FEAT_MEC), 1037 NEEDS_FEAT(TCR2_EL2_HAFT, FEAT_HAFT), 1038 NEEDS_FEAT(TCR2_EL2_PTTWI | 1039 TCR2_EL2_PnCH, 1040 FEAT_THE), 1041 NEEDS_FEAT(TCR2_EL2_AIE, FEAT_AIE), 1042 NEEDS_FEAT(TCR2_EL2_POE | 1043 TCR2_EL2_E0POE, 1044 FEAT_S1POE), 1045 NEEDS_FEAT(TCR2_EL2_PIE, FEAT_S1PIE), 1046 FORCE_RES0(TCR2_EL2_RES0), 1047 FORCE_RES1(TCR2_EL2_RES1), 1048 }; 1049 1050 static const DECLARE_FEAT_MAP(tcr2_el2_desc, TCR2_EL2, 1051 tcr2_el2_feat_map, FEAT_TCR2); 1052 1053 static const struct reg_bits_to_feat_map sctlr_el1_feat_map[] = { 1054 NEEDS_FEAT(SCTLR_EL1_CP15BEN, FEAT_AA32EL0), 1055 NEEDS_FEAT_FLAG(SCTLR_EL1_ITD | 1056 SCTLR_EL1_SED, 1057 AS_RES1, FEAT_AA32EL0), 1058 NEEDS_FEAT(SCTLR_EL1_BT0 | 1059 SCTLR_EL1_BT1, 1060 FEAT_BTI), 1061 NEEDS_FEAT(SCTLR_EL1_CMOW, FEAT_CMOW), 1062 NEEDS_FEAT_FLAG(SCTLR_EL1_TSCXT, 1063 AS_RES1, feat_csv2_2_csv2_1p2), 1064 NEEDS_FEAT_FLAG(SCTLR_EL1_EIS | 1065 SCTLR_EL1_EOS, 1066 AS_RES1, FEAT_ExS), 1067 NEEDS_FEAT(SCTLR_EL1_EnFPM, FEAT_FPMR), 1068 NEEDS_FEAT(SCTLR_EL1_IESB, FEAT_IESB), 1069 NEEDS_FEAT(SCTLR_EL1_EnALS, FEAT_LS64), 1070 NEEDS_FEAT(SCTLR_EL1_EnAS0, FEAT_LS64_ACCDATA), 1071 NEEDS_FEAT(SCTLR_EL1_EnASR, FEAT_LS64_V), 1072 NEEDS_FEAT(SCTLR_EL1_nAA, FEAT_LSE2), 1073 NEEDS_FEAT_FLAG(SCTLR_EL1_LSMAOE | 1074 SCTLR_EL1_nTLSMD, 1075 AS_RES1, FEAT_LSMAOC), 1076 NEEDS_FEAT(SCTLR_EL1_EE, FEAT_MixedEnd), 1077 NEEDS_FEAT(SCTLR_EL1_E0E, feat_mixedendel0), 1078 NEEDS_FEAT(SCTLR_EL1_MSCEn, FEAT_MOPS), 1079 NEEDS_FEAT(SCTLR_EL1_ATA0 | 1080 SCTLR_EL1_ATA | 1081 SCTLR_EL1_TCF0 | 1082 SCTLR_EL1_TCF, 1083 FEAT_MTE2), 1084 NEEDS_FEAT(SCTLR_EL1_ITFSB, feat_mte_async), 1085 NEEDS_FEAT(SCTLR_EL1_TCSO0 | 1086 SCTLR_EL1_TCSO, 1087 FEAT_MTE_STORE_ONLY), 1088 NEEDS_FEAT(SCTLR_EL1_NMI | 1089 SCTLR_EL1_SPINTMASK, 1090 FEAT_NMI), 1091 NEEDS_FEAT_FLAG(SCTLR_EL1_SPAN, 1092 AS_RES1, FEAT_PAN), 1093 NEEDS_FEAT(SCTLR_EL1_EPAN, FEAT_PAN3), 1094 NEEDS_FEAT(SCTLR_EL1_EnDA | 1095 SCTLR_EL1_EnDB | 1096 SCTLR_EL1_EnIA | 1097 SCTLR_EL1_EnIB, 1098 feat_pauth), 1099 NEEDS_FEAT(SCTLR_EL1_EnTP2, FEAT_SME), 1100 NEEDS_FEAT(SCTLR_EL1_EnRCTX, FEAT_SPECRES), 1101 NEEDS_FEAT(SCTLR_EL1_DSSBS, FEAT_SSBS), 1102 NEEDS_FEAT(SCTLR_EL1_TIDCP, FEAT_TIDCP1), 1103 NEEDS_FEAT(SCTLR_EL1_TWEDEL | 1104 SCTLR_EL1_TWEDEn, 1105 FEAT_TWED), 1106 NEEDS_FEAT(SCTLR_EL1_UCI | 1107 SCTLR_EL1_WXN | 1108 SCTLR_EL1_nTWE | 1109 SCTLR_EL1_nTWI | 1110 SCTLR_EL1_UCT | 1111 SCTLR_EL1_DZE | 1112 SCTLR_EL1_I | 1113 SCTLR_EL1_UMA | 1114 SCTLR_EL1_SA0 | 1115 SCTLR_EL1_SA | 1116 SCTLR_EL1_C | 1117 SCTLR_EL1_A | 1118 SCTLR_EL1_M, 1119 FEAT_AA64EL1), 1120 FORCE_RES0(SCTLR_EL1_RES0), 1121 FORCE_RES1(SCTLR_EL1_RES1), 1122 }; 1123 1124 static const DECLARE_FEAT_MAP(sctlr_el1_desc, SCTLR_EL1, 1125 sctlr_el1_feat_map, FEAT_AA64EL1); 1126 1127 static const struct reg_bits_to_feat_map sctlr_el2_feat_map[] = { 1128 NEEDS_FEAT_FLAG(SCTLR_EL2_CP15BEN, 1129 RES1_WHEN_E2H0 | REQUIRES_E2H1, 1130 FEAT_AA32EL0), 1131 NEEDS_FEAT_FLAG(SCTLR_EL2_ITD | 1132 SCTLR_EL2_SED, 1133 RES1_WHEN_E2H1 | REQUIRES_E2H1, 1134 FEAT_AA32EL0), 1135 NEEDS_FEAT_FLAG(SCTLR_EL2_BT0, REQUIRES_E2H1, FEAT_BTI), 1136 NEEDS_FEAT(SCTLR_EL2_BT, FEAT_BTI), 1137 NEEDS_FEAT_FLAG(SCTLR_EL2_CMOW, REQUIRES_E2H1, FEAT_CMOW), 1138 NEEDS_FEAT_FLAG(SCTLR_EL2_TSCXT, 1139 RES1_WHEN_E2H1 | REQUIRES_E2H1, 1140 feat_csv2_2_csv2_1p2), 1141 NEEDS_FEAT_FLAG(SCTLR_EL2_EIS | 1142 SCTLR_EL2_EOS, 1143 AS_RES1, FEAT_ExS), 1144 NEEDS_FEAT(SCTLR_EL2_EnFPM, FEAT_FPMR), 1145 NEEDS_FEAT(SCTLR_EL2_IESB, FEAT_IESB), 1146 NEEDS_FEAT_FLAG(SCTLR_EL2_EnALS, REQUIRES_E2H1, FEAT_LS64), 1147 NEEDS_FEAT_FLAG(SCTLR_EL2_EnAS0, REQUIRES_E2H1, FEAT_LS64_ACCDATA), 1148 NEEDS_FEAT_FLAG(SCTLR_EL2_EnASR, REQUIRES_E2H1, FEAT_LS64_V), 1149 NEEDS_FEAT(SCTLR_EL2_nAA, FEAT_LSE2), 1150 NEEDS_FEAT_FLAG(SCTLR_EL2_LSMAOE | 1151 SCTLR_EL2_nTLSMD, 1152 AS_RES1 | REQUIRES_E2H1, FEAT_LSMAOC), 1153 NEEDS_FEAT(SCTLR_EL2_EE, FEAT_MixedEnd), 1154 NEEDS_FEAT_FLAG(SCTLR_EL2_E0E, REQUIRES_E2H1, feat_mixedendel0), 1155 NEEDS_FEAT_FLAG(SCTLR_EL2_MSCEn, REQUIRES_E2H1, FEAT_MOPS), 1156 NEEDS_FEAT_FLAG(SCTLR_EL2_ATA0 | 1157 SCTLR_EL2_TCF0, 1158 REQUIRES_E2H1, FEAT_MTE2), 1159 NEEDS_FEAT(SCTLR_EL2_ATA | 1160 SCTLR_EL2_TCF, 1161 FEAT_MTE2), 1162 NEEDS_FEAT(SCTLR_EL2_ITFSB, feat_mte_async), 1163 NEEDS_FEAT_FLAG(SCTLR_EL2_TCSO0, REQUIRES_E2H1, FEAT_MTE_STORE_ONLY), 1164 NEEDS_FEAT(SCTLR_EL2_TCSO, 1165 FEAT_MTE_STORE_ONLY), 1166 NEEDS_FEAT(SCTLR_EL2_NMI | 1167 SCTLR_EL2_SPINTMASK, 1168 FEAT_NMI), 1169 NEEDS_FEAT_FLAG(SCTLR_EL2_SPAN, AS_RES1 | REQUIRES_E2H1, FEAT_PAN), 1170 NEEDS_FEAT_FLAG(SCTLR_EL2_EPAN, REQUIRES_E2H1, FEAT_PAN3), 1171 NEEDS_FEAT(SCTLR_EL2_EnDA | 1172 SCTLR_EL2_EnDB | 1173 SCTLR_EL2_EnIA | 1174 SCTLR_EL2_EnIB, 1175 feat_pauth), 1176 NEEDS_FEAT_FLAG(SCTLR_EL2_EnTP2, REQUIRES_E2H1, FEAT_SME), 1177 NEEDS_FEAT(SCTLR_EL2_EnRCTX, FEAT_SPECRES), 1178 NEEDS_FEAT(SCTLR_EL2_DSSBS, FEAT_SSBS), 1179 NEEDS_FEAT_FLAG(SCTLR_EL2_TIDCP, REQUIRES_E2H1, FEAT_TIDCP1), 1180 NEEDS_FEAT_FLAG(SCTLR_EL2_TWEDEL | 1181 SCTLR_EL2_TWEDEn, 1182 REQUIRES_E2H1, FEAT_TWED), 1183 NEEDS_FEAT_FLAG(SCTLR_EL2_nTWE | 1184 SCTLR_EL2_nTWI, 1185 AS_RES1 | REQUIRES_E2H1, FEAT_AA64EL2), 1186 NEEDS_FEAT_FLAG(SCTLR_EL2_UCI | 1187 SCTLR_EL2_UCT | 1188 SCTLR_EL2_DZE | 1189 SCTLR_EL2_SA0, 1190 REQUIRES_E2H1, FEAT_AA64EL2), 1191 NEEDS_FEAT(SCTLR_EL2_WXN | 1192 SCTLR_EL2_I | 1193 SCTLR_EL2_SA | 1194 SCTLR_EL2_C | 1195 SCTLR_EL2_A | 1196 SCTLR_EL2_M, 1197 FEAT_AA64EL2), 1198 FORCE_RES0(SCTLR_EL2_RES0), 1199 FORCE_RES1(SCTLR_EL2_RES1), 1200 }; 1201 1202 static const DECLARE_FEAT_MAP(sctlr_el2_desc, SCTLR_EL2, 1203 sctlr_el2_feat_map, FEAT_AA64EL2); 1204 1205 static const struct reg_bits_to_feat_map mdcr_el2_feat_map[] = { 1206 NEEDS_FEAT(MDCR_EL2_EBWE, FEAT_Debugv8p9), 1207 NEEDS_FEAT(MDCR_EL2_TDOSA, FEAT_DoubleLock), 1208 NEEDS_FEAT(MDCR_EL2_PMEE, FEAT_EBEP), 1209 NEEDS_FEAT(MDCR_EL2_TDCC, FEAT_FGT), 1210 NEEDS_FEAT(MDCR_EL2_MTPME, FEAT_MTPMU), 1211 NEEDS_FEAT(MDCR_EL2_HPME | 1212 MDCR_EL2_HPMN | 1213 MDCR_EL2_TPMCR | 1214 MDCR_EL2_TPM, 1215 FEAT_PMUv3), 1216 NEEDS_FEAT(MDCR_EL2_HPMD, feat_pmuv3p1), 1217 NEEDS_FEAT(MDCR_EL2_HCCD | 1218 MDCR_EL2_HLP, 1219 feat_pmuv3p5), 1220 NEEDS_FEAT(MDCR_EL2_HPMFZO, feat_pmuv3p7), 1221 NEEDS_FEAT(MDCR_EL2_PMSSE, FEAT_PMUv3_SS), 1222 NEEDS_FEAT(MDCR_EL2_E2PB | 1223 MDCR_EL2_TPMS, 1224 FEAT_SPE), 1225 NEEDS_FEAT(MDCR_EL2_HPMFZS, FEAT_SPEv1p2), 1226 NEEDS_FEAT(MDCR_EL2_EnSPM, FEAT_SPMU), 1227 NEEDS_FEAT(MDCR_EL2_EnSTEPOP, FEAT_STEP2), 1228 NEEDS_FEAT(MDCR_EL2_E2TB, FEAT_TRBE), 1229 NEEDS_FEAT(MDCR_EL2_TTRF, FEAT_TRF), 1230 NEEDS_FEAT(MDCR_EL2_TDA | 1231 MDCR_EL2_TDE | 1232 MDCR_EL2_TDRA, 1233 FEAT_AA64EL1), 1234 FORCE_RES0(MDCR_EL2_RES0), 1235 FORCE_RES1(MDCR_EL2_RES1), 1236 }; 1237 1238 static const DECLARE_FEAT_MAP(mdcr_el2_desc, MDCR_EL2, 1239 mdcr_el2_feat_map, FEAT_AA64EL2); 1240 1241 static const struct reg_bits_to_feat_map vtcr_el2_feat_map[] = { 1242 NEEDS_FEAT(VTCR_EL2_HDBSS, FEAT_HDBSS), 1243 NEEDS_FEAT(VTCR_EL2_HAFT, FEAT_HAFT), 1244 NEEDS_FEAT(VTCR_EL2_TL0 | 1245 VTCR_EL2_TL1 | 1246 VTCR_EL2_AssuredOnly | 1247 VTCR_EL2_GCSH, 1248 FEAT_THE), 1249 NEEDS_FEAT(VTCR_EL2_D128, FEAT_D128), 1250 NEEDS_FEAT(VTCR_EL2_S2POE, FEAT_S2POE), 1251 NEEDS_FEAT(VTCR_EL2_S2PIE, FEAT_S2PIE), 1252 NEEDS_FEAT(VTCR_EL2_SL2 | 1253 VTCR_EL2_DS, 1254 feat_lpa2), 1255 NEEDS_FEAT(VTCR_EL2_NSA | 1256 VTCR_EL2_NSW, 1257 FEAT_SEL2), 1258 NEEDS_FEAT(VTCR_EL2_HWU62 | 1259 VTCR_EL2_HWU61 | 1260 VTCR_EL2_HWU60 | 1261 VTCR_EL2_HWU59, 1262 FEAT_HPDS2), 1263 NEEDS_FEAT(VTCR_EL2_HD, ID_AA64MMFR1_EL1, HAFDBS, DBM), 1264 NEEDS_FEAT(VTCR_EL2_HA, ID_AA64MMFR1_EL1, HAFDBS, AF), 1265 NEEDS_FEAT(VTCR_EL2_VS, feat_vmid16), 1266 NEEDS_FEAT(VTCR_EL2_PS | 1267 VTCR_EL2_TG0 | 1268 VTCR_EL2_SH0 | 1269 VTCR_EL2_ORGN0 | 1270 VTCR_EL2_IRGN0 | 1271 VTCR_EL2_SL0 | 1272 VTCR_EL2_T0SZ, 1273 FEAT_AA64EL1), 1274 FORCE_RES0(VTCR_EL2_RES0), 1275 FORCE_RES1(VTCR_EL2_RES1), 1276 }; 1277 1278 static const DECLARE_FEAT_MAP(vtcr_el2_desc, VTCR_EL2, 1279 vtcr_el2_feat_map, FEAT_AA64EL2); 1280 1281 static const struct reg_bits_to_feat_map ich_hfgrtr_feat_map[] = { 1282 NEEDS_FEAT(ICH_HFGRTR_EL2_ICC_APR_EL1 | 1283 ICH_HFGRTR_EL2_ICC_IDRn_EL1 | 1284 ICH_HFGRTR_EL2_ICC_CR0_EL1 | 1285 ICH_HFGRTR_EL2_ICC_HPPIR_EL1 | 1286 ICH_HFGRTR_EL2_ICC_PCR_EL1 | 1287 ICH_HFGRTR_EL2_ICC_ICSR_EL1 | 1288 ICH_HFGRTR_EL2_ICC_IAFFIDR_EL1 | 1289 ICH_HFGRTR_EL2_ICC_PPI_HMRn_EL1 | 1290 ICH_HFGRTR_EL2_ICC_PPI_ENABLERn_EL1 | 1291 ICH_HFGRTR_EL2_ICC_PPI_PENDRn_EL1 | 1292 ICH_HFGRTR_EL2_ICC_PPI_PRIORITYRn_EL1 | 1293 ICH_HFGRTR_EL2_ICC_PPI_ACTIVERn_EL1, 1294 FEAT_GCIE), 1295 }; 1296 1297 static const DECLARE_FEAT_MAP_FGT(ich_hfgrtr_desc, ich_hfgrtr_masks, 1298 ich_hfgrtr_feat_map, FEAT_GCIE); 1299 1300 static const struct reg_bits_to_feat_map ich_hfgwtr_feat_map[] = { 1301 NEEDS_FEAT(ICH_HFGWTR_EL2_ICC_APR_EL1 | 1302 ICH_HFGWTR_EL2_ICC_CR0_EL1 | 1303 ICH_HFGWTR_EL2_ICC_PCR_EL1 | 1304 ICH_HFGWTR_EL2_ICC_ICSR_EL1 | 1305 ICH_HFGWTR_EL2_ICC_PPI_ENABLERn_EL1 | 1306 ICH_HFGWTR_EL2_ICC_PPI_PENDRn_EL1 | 1307 ICH_HFGWTR_EL2_ICC_PPI_PRIORITYRn_EL1 | 1308 ICH_HFGWTR_EL2_ICC_PPI_ACTIVERn_EL1, 1309 FEAT_GCIE), 1310 }; 1311 1312 static const DECLARE_FEAT_MAP_FGT(ich_hfgwtr_desc, ich_hfgwtr_masks, 1313 ich_hfgwtr_feat_map, FEAT_GCIE); 1314 1315 static const struct reg_bits_to_feat_map ich_hfgitr_feat_map[] = { 1316 NEEDS_FEAT(ICH_HFGITR_EL2_GICCDEN | 1317 ICH_HFGITR_EL2_GICCDDIS | 1318 ICH_HFGITR_EL2_GICCDPRI | 1319 ICH_HFGITR_EL2_GICCDAFF | 1320 ICH_HFGITR_EL2_GICCDPEND | 1321 ICH_HFGITR_EL2_GICCDRCFG | 1322 ICH_HFGITR_EL2_GICCDHM | 1323 ICH_HFGITR_EL2_GICCDEOI | 1324 ICH_HFGITR_EL2_GICCDDI | 1325 ICH_HFGITR_EL2_GICRCDIA | 1326 ICH_HFGITR_EL2_GICRCDNMIA, 1327 FEAT_GCIE), 1328 }; 1329 1330 static const DECLARE_FEAT_MAP_FGT(ich_hfgitr_desc, ich_hfgitr_masks, 1331 ich_hfgitr_feat_map, FEAT_GCIE); 1332 1333 static void __init check_feat_map(const struct reg_bits_to_feat_map *map, 1334 int map_size, u64 resx, const char *str) 1335 { 1336 u64 mask = 0; 1337 1338 /* 1339 * Don't account for FORCE_RESx that are architectural, and 1340 * therefore part of the resx parameter. Other FORCE_RESx bits 1341 * are implementation choices, and therefore accounted for. 1342 */ 1343 for (int i = 0; i < map_size; i++) 1344 if (!((map[i].flags & FORCE_RESx) && (map[i].bits & resx))) 1345 mask |= map[i].bits; 1346 1347 if (mask != ~resx) 1348 kvm_err("Undefined %s behaviour, bits %016llx\n", 1349 str, mask ^ ~resx); 1350 } 1351 1352 static u64 reg_feat_map_bits(const struct reg_bits_to_feat_map *map) 1353 { 1354 return map->flags & MASKS_POINTER ? (map->masks->mask | map->masks->nmask) : map->bits; 1355 } 1356 1357 static void __init check_reg_desc(const struct reg_feat_map_desc *r) 1358 { 1359 check_feat_map(r->bit_feat_map, r->bit_feat_map_sz, 1360 ~reg_feat_map_bits(&r->feat_map), r->name); 1361 } 1362 1363 void __init check_feature_map(void) 1364 { 1365 check_reg_desc(&hfgrtr_desc); 1366 check_reg_desc(&hfgwtr_desc); 1367 check_reg_desc(&hfgitr_desc); 1368 check_reg_desc(&hdfgrtr_desc); 1369 check_reg_desc(&hdfgwtr_desc); 1370 check_reg_desc(&hafgrtr_desc); 1371 check_reg_desc(&hfgrtr2_desc); 1372 check_reg_desc(&hfgwtr2_desc); 1373 check_reg_desc(&hfgitr2_desc); 1374 check_reg_desc(&hdfgrtr2_desc); 1375 check_reg_desc(&hdfgwtr2_desc); 1376 check_reg_desc(&hcrx_desc); 1377 check_reg_desc(&hcr_desc); 1378 check_reg_desc(&sctlr2_desc); 1379 check_reg_desc(&tcr2_el2_desc); 1380 check_reg_desc(&sctlr_el1_desc); 1381 check_reg_desc(&sctlr_el2_desc); 1382 check_reg_desc(&mdcr_el2_desc); 1383 check_reg_desc(&vtcr_el2_desc); 1384 check_reg_desc(&ich_hfgrtr_desc); 1385 check_reg_desc(&ich_hfgwtr_desc); 1386 check_reg_desc(&ich_hfgitr_desc); 1387 } 1388 1389 static bool idreg_feat_match(struct kvm *kvm, const struct reg_bits_to_feat_map *map) 1390 { 1391 u64 regval = kvm->arch.id_regs[map->regidx]; 1392 u64 regfld = (regval >> map->shift) & GENMASK(map->width - 1, 0); 1393 1394 if (map->sign) { 1395 s64 sfld = sign_extend64(regfld, map->width - 1); 1396 s64 slim = sign_extend64(map->lo_lim, map->width - 1); 1397 return sfld >= slim; 1398 } else { 1399 return regfld >= map->lo_lim; 1400 } 1401 } 1402 1403 static struct resx compute_resx_bits(struct kvm *kvm, 1404 const struct reg_bits_to_feat_map *map, 1405 int map_size, 1406 unsigned long require, 1407 unsigned long exclude) 1408 { 1409 bool e2h0 = kvm_has_feat(kvm, FEAT_E2H0); 1410 struct resx resx = {}; 1411 1412 for (int i = 0; i < map_size; i++) { 1413 bool match; 1414 1415 if ((map[i].flags & require) != require) 1416 continue; 1417 1418 if (map[i].flags & exclude) 1419 continue; 1420 1421 if (map[i].flags & FORCE_RESx) 1422 match = false; 1423 else if (map[i].flags & CALL_FUNC) 1424 match = map[i].match(kvm); 1425 else 1426 match = idreg_feat_match(kvm, &map[i]); 1427 1428 if (map[i].flags & REQUIRES_E2H1) 1429 match &= !e2h0; 1430 1431 if (!match) { 1432 u64 bits = reg_feat_map_bits(&map[i]); 1433 1434 if ((map[i].flags & AS_RES1) || 1435 (e2h0 && (map[i].flags & RES1_WHEN_E2H0)) || 1436 (!e2h0 && (map[i].flags & RES1_WHEN_E2H1))) 1437 resx.res1 |= bits; 1438 else 1439 resx.res0 |= bits; 1440 } 1441 } 1442 1443 return resx; 1444 } 1445 1446 static struct resx compute_reg_resx_bits(struct kvm *kvm, 1447 const struct reg_feat_map_desc *r, 1448 unsigned long require, 1449 unsigned long exclude) 1450 { 1451 struct resx resx; 1452 1453 resx = compute_resx_bits(kvm, r->bit_feat_map, r->bit_feat_map_sz, 1454 require, exclude); 1455 1456 if (r->feat_map.flags & MASKS_POINTER) { 1457 resx.res0 |= r->feat_map.masks->res0; 1458 resx.res1 |= r->feat_map.masks->res1; 1459 } 1460 1461 /* 1462 * If the register itself was not valid, all the non-RESx bits are 1463 * now considered RES0 (this matches the behaviour of registers such 1464 * as SCTLR2 and TCR2). Weed out any potential (though unlikely) 1465 * overlap with RES1 bits coming from the previous computation. 1466 */ 1467 resx.res0 |= compute_resx_bits(kvm, &r->feat_map, 1, require, exclude).res0; 1468 resx.res1 &= ~resx.res0; 1469 1470 return resx; 1471 } 1472 1473 static u64 compute_fgu_bits(struct kvm *kvm, const struct reg_feat_map_desc *r) 1474 { 1475 struct resx resx; 1476 1477 /* 1478 * If computing FGUs, we collect the unsupported feature bits as 1479 * RESx bits, but don't take the actual RESx bits or register 1480 * existence into account -- we're not computing bits for the 1481 * register itself. 1482 */ 1483 resx = compute_resx_bits(kvm, r->bit_feat_map, r->bit_feat_map_sz, 1484 0, NEVER_FGU); 1485 1486 return resx.res0 | resx.res1; 1487 } 1488 1489 void compute_fgu(struct kvm *kvm, enum fgt_group_id fgt) 1490 { 1491 u64 val = 0; 1492 1493 switch (fgt) { 1494 case HFGRTR_GROUP: 1495 val |= compute_fgu_bits(kvm, &hfgrtr_desc); 1496 val |= compute_fgu_bits(kvm, &hfgwtr_desc); 1497 break; 1498 case HFGITR_GROUP: 1499 val |= compute_fgu_bits(kvm, &hfgitr_desc); 1500 break; 1501 case HDFGRTR_GROUP: 1502 val |= compute_fgu_bits(kvm, &hdfgrtr_desc); 1503 val |= compute_fgu_bits(kvm, &hdfgwtr_desc); 1504 break; 1505 case HAFGRTR_GROUP: 1506 val |= compute_fgu_bits(kvm, &hafgrtr_desc); 1507 break; 1508 case HFGRTR2_GROUP: 1509 val |= compute_fgu_bits(kvm, &hfgrtr2_desc); 1510 val |= compute_fgu_bits(kvm, &hfgwtr2_desc); 1511 break; 1512 case HFGITR2_GROUP: 1513 val |= compute_fgu_bits(kvm, &hfgitr2_desc); 1514 break; 1515 case HDFGRTR2_GROUP: 1516 val |= compute_fgu_bits(kvm, &hdfgrtr2_desc); 1517 val |= compute_fgu_bits(kvm, &hdfgwtr2_desc); 1518 break; 1519 case ICH_HFGRTR_GROUP: 1520 val |= compute_fgu_bits(kvm, &ich_hfgrtr_desc); 1521 val |= compute_fgu_bits(kvm, &ich_hfgwtr_desc); 1522 break; 1523 case ICH_HFGITR_GROUP: 1524 val |= compute_fgu_bits(kvm, &ich_hfgitr_desc); 1525 break; 1526 default: 1527 BUG(); 1528 } 1529 1530 kvm->arch.fgu[fgt] = val; 1531 } 1532 1533 struct resx get_reg_fixed_bits(struct kvm *kvm, enum vcpu_sysreg reg) 1534 { 1535 struct resx resx; 1536 1537 switch (reg) { 1538 case HFGRTR_EL2: 1539 resx = compute_reg_resx_bits(kvm, &hfgrtr_desc, 0, 0); 1540 break; 1541 case HFGWTR_EL2: 1542 resx = compute_reg_resx_bits(kvm, &hfgwtr_desc, 0, 0); 1543 break; 1544 case HFGITR_EL2: 1545 resx = compute_reg_resx_bits(kvm, &hfgitr_desc, 0, 0); 1546 break; 1547 case HDFGRTR_EL2: 1548 resx = compute_reg_resx_bits(kvm, &hdfgrtr_desc, 0, 0); 1549 break; 1550 case HDFGWTR_EL2: 1551 resx = compute_reg_resx_bits(kvm, &hdfgwtr_desc, 0, 0); 1552 break; 1553 case HAFGRTR_EL2: 1554 resx = compute_reg_resx_bits(kvm, &hafgrtr_desc, 0, 0); 1555 break; 1556 case HFGRTR2_EL2: 1557 resx = compute_reg_resx_bits(kvm, &hfgrtr2_desc, 0, 0); 1558 break; 1559 case HFGWTR2_EL2: 1560 resx = compute_reg_resx_bits(kvm, &hfgwtr2_desc, 0, 0); 1561 break; 1562 case HFGITR2_EL2: 1563 resx = compute_reg_resx_bits(kvm, &hfgitr2_desc, 0, 0); 1564 break; 1565 case HDFGRTR2_EL2: 1566 resx = compute_reg_resx_bits(kvm, &hdfgrtr2_desc, 0, 0); 1567 break; 1568 case HDFGWTR2_EL2: 1569 resx = compute_reg_resx_bits(kvm, &hdfgwtr2_desc, 0, 0); 1570 break; 1571 case HCRX_EL2: 1572 resx = compute_reg_resx_bits(kvm, &hcrx_desc, 0, 0); 1573 resx.res1 |= __HCRX_EL2_RES1; 1574 break; 1575 case HCR_EL2: 1576 resx = compute_reg_resx_bits(kvm, &hcr_desc, 0, 0); 1577 break; 1578 case SCTLR2_EL1: 1579 case SCTLR2_EL2: 1580 resx = compute_reg_resx_bits(kvm, &sctlr2_desc, 0, 0); 1581 break; 1582 case TCR2_EL2: 1583 resx = compute_reg_resx_bits(kvm, &tcr2_el2_desc, 0, 0); 1584 break; 1585 case SCTLR_EL1: 1586 resx = compute_reg_resx_bits(kvm, &sctlr_el1_desc, 0, 0); 1587 break; 1588 case SCTLR_EL2: 1589 resx = compute_reg_resx_bits(kvm, &sctlr_el2_desc, 0, 0); 1590 break; 1591 case MDCR_EL2: 1592 resx = compute_reg_resx_bits(kvm, &mdcr_el2_desc, 0, 0); 1593 break; 1594 case VTCR_EL2: 1595 resx = compute_reg_resx_bits(kvm, &vtcr_el2_desc, 0, 0); 1596 break; 1597 case ICH_HFGRTR_EL2: 1598 resx = compute_reg_resx_bits(kvm, &ich_hfgrtr_desc, 0, 0); 1599 break; 1600 case ICH_HFGWTR_EL2: 1601 resx = compute_reg_resx_bits(kvm, &ich_hfgwtr_desc, 0, 0); 1602 break; 1603 case ICH_HFGITR_EL2: 1604 resx = compute_reg_resx_bits(kvm, &ich_hfgitr_desc, 0, 0); 1605 break; 1606 default: 1607 WARN_ON_ONCE(1); 1608 resx = (typeof(resx)){}; 1609 break; 1610 } 1611 1612 return resx; 1613 } 1614 1615 static __always_inline struct fgt_masks *__fgt_reg_to_masks(enum vcpu_sysreg reg) 1616 { 1617 switch (reg) { 1618 case HFGRTR_EL2: 1619 return &hfgrtr_masks; 1620 case HFGWTR_EL2: 1621 return &hfgwtr_masks; 1622 case HFGITR_EL2: 1623 return &hfgitr_masks; 1624 case HDFGRTR_EL2: 1625 return &hdfgrtr_masks; 1626 case HDFGWTR_EL2: 1627 return &hdfgwtr_masks; 1628 case HAFGRTR_EL2: 1629 return &hafgrtr_masks; 1630 case HFGRTR2_EL2: 1631 return &hfgrtr2_masks; 1632 case HFGWTR2_EL2: 1633 return &hfgwtr2_masks; 1634 case HFGITR2_EL2: 1635 return &hfgitr2_masks; 1636 case HDFGRTR2_EL2: 1637 return &hdfgrtr2_masks; 1638 case HDFGWTR2_EL2: 1639 return &hdfgwtr2_masks; 1640 case ICH_HFGRTR_EL2: 1641 return &ich_hfgrtr_masks; 1642 case ICH_HFGWTR_EL2: 1643 return &ich_hfgwtr_masks; 1644 case ICH_HFGITR_EL2: 1645 return &ich_hfgitr_masks; 1646 default: 1647 BUILD_BUG_ON(1); 1648 } 1649 } 1650 1651 static __always_inline void __compute_fgt(struct kvm_vcpu *vcpu, enum vcpu_sysreg reg) 1652 { 1653 u64 fgu = vcpu->kvm->arch.fgu[__fgt_reg_to_group_id(reg)]; 1654 struct fgt_masks *m = __fgt_reg_to_masks(reg); 1655 u64 clear = 0, set = 0, val = m->nmask; 1656 1657 set |= fgu & m->mask; 1658 clear |= fgu & m->nmask; 1659 1660 if (is_nested_ctxt(vcpu)) { 1661 u64 nested = __vcpu_sys_reg(vcpu, reg); 1662 set |= nested & m->mask; 1663 clear |= ~nested & m->nmask; 1664 } 1665 1666 val |= set | m->res1; 1667 val &= ~(clear | m->res0); 1668 *vcpu_fgt(vcpu, reg) = val; 1669 } 1670 1671 static void __compute_hfgwtr(struct kvm_vcpu *vcpu) 1672 { 1673 __compute_fgt(vcpu, HFGWTR_EL2); 1674 1675 if (cpus_have_final_cap(ARM64_WORKAROUND_AMPERE_AC03_CPU_38)) 1676 *vcpu_fgt(vcpu, HFGWTR_EL2) |= HFGWTR_EL2_TCR_EL1; 1677 } 1678 1679 static void __compute_hdfgwtr(struct kvm_vcpu *vcpu) 1680 { 1681 __compute_fgt(vcpu, HDFGWTR_EL2); 1682 1683 if (is_hyp_ctxt(vcpu)) 1684 *vcpu_fgt(vcpu, HDFGWTR_EL2) |= HDFGWTR_EL2_MDSCR_EL1; 1685 } 1686 1687 static void __compute_ich_hfgrtr(struct kvm_vcpu *vcpu) 1688 { 1689 __compute_fgt(vcpu, ICH_HFGRTR_EL2); 1690 1691 /* 1692 * ICC_IAFFIDR_EL1 *always* needs to be trapped when running a guest. 1693 * 1694 * We also trap accesses to ICC_IDR0_EL1 to allow us to completely hide 1695 * FEAT_GCIE_LEGACY from the guest, and to (potentially) present fewer 1696 * ID bits than the host supports. 1697 */ 1698 *vcpu_fgt(vcpu, ICH_HFGRTR_EL2) &= ~(ICH_HFGRTR_EL2_ICC_IAFFIDR_EL1 | 1699 ICH_HFGRTR_EL2_ICC_IDRn_EL1); 1700 } 1701 1702 static void __compute_ich_hfgwtr(struct kvm_vcpu *vcpu) 1703 { 1704 __compute_fgt(vcpu, ICH_HFGWTR_EL2); 1705 1706 /* 1707 * We present a different subset of PPIs the guest from what 1708 * exist in real hardware. We only trap writes, not reads. 1709 */ 1710 *vcpu_fgt(vcpu, ICH_HFGWTR_EL2) &= ~(ICH_HFGWTR_EL2_ICC_PPI_ENABLERn_EL1); 1711 } 1712 1713 void kvm_vcpu_load_fgt(struct kvm_vcpu *vcpu) 1714 { 1715 if (!cpus_have_final_cap(ARM64_HAS_FGT)) 1716 return; 1717 1718 __compute_fgt(vcpu, HFGRTR_EL2); 1719 __compute_hfgwtr(vcpu); 1720 __compute_fgt(vcpu, HFGITR_EL2); 1721 __compute_fgt(vcpu, HDFGRTR_EL2); 1722 __compute_hdfgwtr(vcpu); 1723 __compute_fgt(vcpu, HAFGRTR_EL2); 1724 1725 if (cpus_have_final_cap(ARM64_HAS_FGT2)) { 1726 __compute_fgt(vcpu, HFGRTR2_EL2); 1727 __compute_fgt(vcpu, HFGWTR2_EL2); 1728 __compute_fgt(vcpu, HFGITR2_EL2); 1729 __compute_fgt(vcpu, HDFGRTR2_EL2); 1730 __compute_fgt(vcpu, HDFGWTR2_EL2); 1731 } 1732 1733 if (cpus_have_final_cap(ARM64_HAS_GICV5_CPUIF)) { 1734 __compute_ich_hfgrtr(vcpu); 1735 __compute_ich_hfgwtr(vcpu); 1736 __compute_fgt(vcpu, ICH_HFGITR_EL2); 1737 } 1738 } 1739