1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * Copyright (C) 2025 Google LLC
4 * Author: Marc Zyngier <maz@kernel.org>
5 */
6
7 #include <linux/kvm_host.h>
8 #include <asm/kvm_emulate.h>
9 #include <asm/kvm_nested.h>
10 #include <asm/sysreg.h>
11
12 /*
13 * Describes the dependencies between a set of bits (or the negation
14 * of a set of RES0 bits) and a feature. The flags indicate how the
15 * data is interpreted.
16 */
17 struct reg_bits_to_feat_map {
18 union {
19 u64 bits;
20 struct fgt_masks *masks;
21 };
22
23 #define NEVER_FGU BIT(0) /* Can trap, but never UNDEF */
24 #define CALL_FUNC BIT(1) /* Needs to evaluate tons of crap */
25 #define FORCE_RESx BIT(2) /* Unconditional RESx */
26 #define MASKS_POINTER BIT(3) /* Pointer to fgt_masks struct instead of bits */
27 #define AS_RES1 BIT(4) /* RES1 when not supported */
28 #define REQUIRES_E2H1 BIT(5) /* Add HCR_EL2.E2H RES1 as a pre-condition */
29 #define RES1_WHEN_E2H0 BIT(6) /* RES1 when E2H=0 and not supported */
30 #define RES1_WHEN_E2H1 BIT(7) /* RES1 when E2H=1 and not supported */
31
32 unsigned long flags;
33
34 union {
35 struct {
36 u8 regidx;
37 u8 shift;
38 u8 width;
39 bool sign;
40 s8 lo_lim;
41 };
42 bool (*match)(struct kvm *);
43 };
44 };
45
46 /*
47 * Describes the dependencies for a given register:
48 *
49 * @feat_map describes the dependency for the whole register. If the
50 * features the register depends on are not present, the whole
51 * register is effectively RES0.
52 *
53 * @bit_feat_map describes the dependencies for a set of bits in that
54 * register. If the features these bits depend on are not present, the
55 * bits are effectively RES0.
56 */
57 struct reg_feat_map_desc {
58 const char *name;
59 const struct reg_bits_to_feat_map feat_map;
60 const struct reg_bits_to_feat_map *bit_feat_map;
61 const unsigned int bit_feat_map_sz;
62 };
63
64 #define __NEEDS_FEAT_3(m, f, w, id, fld, lim) \
65 { \
66 .w = (m), \
67 .flags = (f), \
68 .regidx = IDREG_IDX(SYS_ ## id), \
69 .shift = id ##_## fld ## _SHIFT, \
70 .width = id ##_## fld ## _WIDTH, \
71 .sign = id ##_## fld ## _SIGNED, \
72 .lo_lim = id ##_## fld ##_## lim \
73 }
74
75 #define __NEEDS_FEAT_1(m, f, w, fun) \
76 { \
77 .w = (m), \
78 .flags = (f) | CALL_FUNC, \
79 .match = (fun), \
80 }
81
82 #define __NEEDS_FEAT_0(m, f, w, ...) \
83 { \
84 .w = (m), \
85 .flags = (f), \
86 }
87
88 #define __NEEDS_FEAT_FLAG(m, f, w, ...) \
89 CONCATENATE(__NEEDS_FEAT_, COUNT_ARGS(__VA_ARGS__))(m, f, w, __VA_ARGS__)
90
91 #define NEEDS_FEAT_FLAG(m, f, ...) \
92 __NEEDS_FEAT_FLAG(m, f, bits, __VA_ARGS__)
93
94 #define NEEDS_FEAT_MASKS(p, ...) \
95 __NEEDS_FEAT_FLAG(p, MASKS_POINTER, masks, __VA_ARGS__)
96
97 /*
98 * Declare the dependency between a set of bits and a set of features,
99 * generating a struct reg_bit_to_feat_map.
100 */
101 #define NEEDS_FEAT(m, ...) NEEDS_FEAT_FLAG(m, 0, __VA_ARGS__)
102
103 /* Declare fixed RESx bits */
104 #define FORCE_RES0(m) NEEDS_FEAT_FLAG(m, FORCE_RESx)
105 #define FORCE_RES1(m) NEEDS_FEAT_FLAG(m, FORCE_RESx | AS_RES1)
106
107 /*
108 * Declare the dependency between a non-FGT register, a set of features,
109 * and the set of individual bits it contains. This generates a struct
110 * reg_feat_map_desc.
111 */
112 #define DECLARE_FEAT_MAP(n, r, m, f) \
113 struct reg_feat_map_desc n = { \
114 .name = #r, \
115 .feat_map = NEEDS_FEAT(~(r##_RES0 | \
116 r##_RES1), f), \
117 .bit_feat_map = m, \
118 .bit_feat_map_sz = ARRAY_SIZE(m), \
119 }
120
121 /*
122 * Specialised version of the above for FGT registers that have their
123 * RESx masks described as struct fgt_masks.
124 */
125 #define DECLARE_FEAT_MAP_FGT(n, msk, m, f) \
126 struct reg_feat_map_desc n = { \
127 .name = #msk, \
128 .feat_map = NEEDS_FEAT_MASKS(&msk, f), \
129 .bit_feat_map = m, \
130 .bit_feat_map_sz = ARRAY_SIZE(m), \
131 }
132
133 #define FEAT_SPE ID_AA64DFR0_EL1, PMSVer, IMP
134 #define FEAT_SPE_FnE ID_AA64DFR0_EL1, PMSVer, V1P2
135 #define FEAT_BRBE ID_AA64DFR0_EL1, BRBE, IMP
136 #define FEAT_TRC_SR ID_AA64DFR0_EL1, TraceVer, IMP
137 #define FEAT_PMUv3 ID_AA64DFR0_EL1, PMUVer, IMP
138 #define FEAT_TRBE ID_AA64DFR0_EL1, TraceBuffer, IMP
139 #define FEAT_TRBEv1p1 ID_AA64DFR0_EL1, TraceBuffer, TRBE_V1P1
140 #define FEAT_DoubleLock ID_AA64DFR0_EL1, DoubleLock, IMP
141 #define FEAT_TRF ID_AA64DFR0_EL1, TraceFilt, IMP
142 #define FEAT_AA32EL0 ID_AA64PFR0_EL1, EL0, AARCH32
143 #define FEAT_AA32EL1 ID_AA64PFR0_EL1, EL1, AARCH32
144 #define FEAT_AA64EL1 ID_AA64PFR0_EL1, EL1, IMP
145 #define FEAT_AA64EL2 ID_AA64PFR0_EL1, EL2, IMP
146 #define FEAT_AA64EL3 ID_AA64PFR0_EL1, EL3, IMP
147 #define FEAT_SEL2 ID_AA64PFR0_EL1, SEL2, IMP
148 #define FEAT_AIE ID_AA64MMFR3_EL1, AIE, IMP
149 #define FEAT_S2POE ID_AA64MMFR3_EL1, S2POE, IMP
150 #define FEAT_S1POE ID_AA64MMFR3_EL1, S1POE, IMP
151 #define FEAT_S1PIE ID_AA64MMFR3_EL1, S1PIE, IMP
152 #define FEAT_THE ID_AA64PFR1_EL1, THE, IMP
153 #define FEAT_SME ID_AA64PFR1_EL1, SME, IMP
154 #define FEAT_GCS ID_AA64PFR1_EL1, GCS, IMP
155 #define FEAT_LS64 ID_AA64ISAR1_EL1, LS64, LS64
156 #define FEAT_LS64_V ID_AA64ISAR1_EL1, LS64, LS64_V
157 #define FEAT_LS64_ACCDATA ID_AA64ISAR1_EL1, LS64, LS64_ACCDATA
158 #define FEAT_RAS ID_AA64PFR0_EL1, RAS, IMP
159 #define FEAT_RASv2 ID_AA64PFR0_EL1, RAS, V2
160 #define FEAT_GICv3 ID_AA64PFR0_EL1, GIC, IMP
161 #define FEAT_LOR ID_AA64MMFR1_EL1, LO, IMP
162 #define FEAT_SPEv1p2 ID_AA64DFR0_EL1, PMSVer, V1P2
163 #define FEAT_SPEv1p4 ID_AA64DFR0_EL1, PMSVer, V1P4
164 #define FEAT_SPEv1p5 ID_AA64DFR0_EL1, PMSVer, V1P5
165 #define FEAT_ATS1A ID_AA64ISAR2_EL1, ATS1A, IMP
166 #define FEAT_SPECRES2 ID_AA64ISAR1_EL1, SPECRES, COSP_RCTX
167 #define FEAT_SPECRES ID_AA64ISAR1_EL1, SPECRES, IMP
168 #define FEAT_TLBIRANGE ID_AA64ISAR0_EL1, TLB, RANGE
169 #define FEAT_TLBIOS ID_AA64ISAR0_EL1, TLB, OS
170 #define FEAT_PAN2 ID_AA64MMFR1_EL1, PAN, PAN2
171 #define FEAT_DPB2 ID_AA64ISAR1_EL1, DPB, DPB2
172 #define FEAT_AMUv1 ID_AA64PFR0_EL1, AMU, IMP
173 #define FEAT_AMUv1p1 ID_AA64PFR0_EL1, AMU, V1P1
174 #define FEAT_CMOW ID_AA64MMFR1_EL1, CMOW, IMP
175 #define FEAT_D128 ID_AA64MMFR3_EL1, D128, IMP
176 #define FEAT_DoubleFault2 ID_AA64PFR1_EL1, DF2, IMP
177 #define FEAT_FPMR ID_AA64PFR2_EL1, FPMR, IMP
178 #define FEAT_MOPS ID_AA64ISAR2_EL1, MOPS, IMP
179 #define FEAT_NMI ID_AA64PFR1_EL1, NMI, IMP
180 #define FEAT_SCTLR2 ID_AA64MMFR3_EL1, SCTLRX, IMP
181 #define FEAT_SYSREG128 ID_AA64ISAR2_EL1, SYSREG_128, IMP
182 #define FEAT_TCR2 ID_AA64MMFR3_EL1, TCRX, IMP
183 #define FEAT_XS ID_AA64ISAR1_EL1, XS, IMP
184 #define FEAT_EVT ID_AA64MMFR2_EL1, EVT, IMP
185 #define FEAT_EVT_TTLBxS ID_AA64MMFR2_EL1, EVT, TTLBxS
186 #define FEAT_MTE2 ID_AA64PFR1_EL1, MTE, MTE2
187 #define FEAT_RME ID_AA64PFR0_EL1, RME, IMP
188 #define FEAT_MPAM ID_AA64PFR0_EL1, MPAM, 1
189 #define FEAT_S2FWB ID_AA64MMFR2_EL1, FWB, IMP
190 #define FEAT_TWED ID_AA64MMFR1_EL1, TWED, IMP
191 #define FEAT_E2H0 ID_AA64MMFR4_EL1, E2H0, IMP
192 #define FEAT_SRMASK ID_AA64MMFR4_EL1, SRMASK, IMP
193 #define FEAT_PoPS ID_AA64MMFR4_EL1, PoPS, IMP
194 #define FEAT_PFAR ID_AA64PFR1_EL1, PFAR, IMP
195 #define FEAT_Debugv8p9 ID_AA64DFR0_EL1, PMUVer, V3P9
196 #define FEAT_PMUv3_SS ID_AA64DFR0_EL1, PMSS, IMP
197 #define FEAT_SEBEP ID_AA64DFR0_EL1, SEBEP, IMP
198 #define FEAT_EBEP ID_AA64DFR1_EL1, EBEP, IMP
199 #define FEAT_ITE ID_AA64DFR1_EL1, ITE, IMP
200 #define FEAT_PMUv3_ICNTR ID_AA64DFR1_EL1, PMICNTR, IMP
201 #define FEAT_SPMU ID_AA64DFR1_EL1, SPMU, IMP
202 #define FEAT_SPE_nVM ID_AA64DFR2_EL1, SPE_nVM, IMP
203 #define FEAT_STEP2 ID_AA64DFR2_EL1, STEP, IMP
204 #define FEAT_CPA2 ID_AA64ISAR3_EL1, CPA, CPA2
205 #define FEAT_ASID2 ID_AA64MMFR4_EL1, ASID2, IMP
206 #define FEAT_MEC ID_AA64MMFR3_EL1, MEC, IMP
207 #define FEAT_HAFT ID_AA64MMFR1_EL1, HAFDBS, HAFT
208 #define FEAT_HDBSS ID_AA64MMFR1_EL1, HAFDBS, HDBSS
209 #define FEAT_HPDS2 ID_AA64MMFR1_EL1, HPDS, HPDS2
210 #define FEAT_BTI ID_AA64PFR1_EL1, BT, IMP
211 #define FEAT_ExS ID_AA64MMFR0_EL1, EXS, IMP
212 #define FEAT_IESB ID_AA64MMFR2_EL1, IESB, IMP
213 #define FEAT_LSE2 ID_AA64MMFR2_EL1, AT, IMP
214 #define FEAT_LSMAOC ID_AA64MMFR2_EL1, LSM, IMP
215 #define FEAT_MixedEnd ID_AA64MMFR0_EL1, BIGEND, IMP
216 #define FEAT_MixedEndEL0 ID_AA64MMFR0_EL1, BIGENDEL0, IMP
217 #define FEAT_MTE_ASYNC ID_AA64PFR1_EL1, MTE_frac, ASYNC
218 #define FEAT_MTE_STORE_ONLY ID_AA64PFR2_EL1, MTESTOREONLY, IMP
219 #define FEAT_PAN ID_AA64MMFR1_EL1, PAN, IMP
220 #define FEAT_PAN3 ID_AA64MMFR1_EL1, PAN, PAN3
221 #define FEAT_SSBS ID_AA64PFR1_EL1, SSBS, IMP
222 #define FEAT_TIDCP1 ID_AA64MMFR1_EL1, TIDCP1, IMP
223 #define FEAT_FGT ID_AA64MMFR0_EL1, FGT, IMP
224 #define FEAT_FGT2 ID_AA64MMFR0_EL1, FGT, FGT2
225 #define FEAT_MTPMU ID_AA64DFR0_EL1, MTPMU, IMP
226 #define FEAT_HCX ID_AA64MMFR1_EL1, HCX, IMP
227 #define FEAT_S2PIE ID_AA64MMFR3_EL1, S2PIE, IMP
228
not_feat_aa64el3(struct kvm * kvm)229 static bool not_feat_aa64el3(struct kvm *kvm)
230 {
231 return !kvm_has_feat(kvm, FEAT_AA64EL3);
232 }
233
feat_nv2(struct kvm * kvm)234 static bool feat_nv2(struct kvm *kvm)
235 {
236 return ((kvm_has_feat(kvm, ID_AA64MMFR4_EL1, NV_frac, NV2_ONLY) &&
237 kvm_has_feat_enum(kvm, ID_AA64MMFR2_EL1, NV, NI)) ||
238 kvm_has_feat(kvm, ID_AA64MMFR2_EL1, NV, NV2));
239 }
240
feat_nv2_e2h0_ni(struct kvm * kvm)241 static bool feat_nv2_e2h0_ni(struct kvm *kvm)
242 {
243 return feat_nv2(kvm) && !kvm_has_feat(kvm, FEAT_E2H0);
244 }
245
feat_rasv1p1(struct kvm * kvm)246 static bool feat_rasv1p1(struct kvm *kvm)
247 {
248 return (kvm_has_feat(kvm, ID_AA64PFR0_EL1, RAS, V1P1) ||
249 (kvm_has_feat_enum(kvm, ID_AA64PFR0_EL1, RAS, IMP) &&
250 kvm_has_feat(kvm, ID_AA64PFR1_EL1, RAS_frac, RASv1p1)));
251 }
252
feat_csv2_2_csv2_1p2(struct kvm * kvm)253 static bool feat_csv2_2_csv2_1p2(struct kvm *kvm)
254 {
255 return (kvm_has_feat(kvm, ID_AA64PFR0_EL1, CSV2, CSV2_2) ||
256 (kvm_has_feat(kvm, ID_AA64PFR1_EL1, CSV2_frac, CSV2_1p2) &&
257 kvm_has_feat_enum(kvm, ID_AA64PFR0_EL1, CSV2, IMP)));
258 }
259
feat_pauth(struct kvm * kvm)260 static bool feat_pauth(struct kvm *kvm)
261 {
262 return kvm_has_pauth(kvm, PAuth);
263 }
264
feat_pauth_lr(struct kvm * kvm)265 static bool feat_pauth_lr(struct kvm *kvm)
266 {
267 return kvm_has_pauth(kvm, PAuth_LR);
268 }
269
feat_aderr(struct kvm * kvm)270 static bool feat_aderr(struct kvm *kvm)
271 {
272 return (kvm_has_feat(kvm, ID_AA64MMFR3_EL1, ADERR, FEAT_ADERR) &&
273 kvm_has_feat(kvm, ID_AA64MMFR3_EL1, SDERR, FEAT_ADERR));
274 }
275
feat_anerr(struct kvm * kvm)276 static bool feat_anerr(struct kvm *kvm)
277 {
278 return (kvm_has_feat(kvm, ID_AA64MMFR3_EL1, ANERR, FEAT_ANERR) &&
279 kvm_has_feat(kvm, ID_AA64MMFR3_EL1, SNERR, FEAT_ANERR));
280 }
281
feat_sme_smps(struct kvm * kvm)282 static bool feat_sme_smps(struct kvm *kvm)
283 {
284 /*
285 * Revists this if KVM ever supports SME -- this really should
286 * look at the guest's view of SMIDR_EL1. Funnily enough, this
287 * is not captured in the JSON file, but only as a note in the
288 * ARM ARM.
289 */
290 return (kvm_has_feat(kvm, FEAT_SME) &&
291 (read_sysreg_s(SYS_SMIDR_EL1) & SMIDR_EL1_SMPS));
292 }
293
feat_spe_fds(struct kvm * kvm)294 static bool feat_spe_fds(struct kvm *kvm)
295 {
296 /*
297 * Revists this if KVM ever supports SPE -- this really should
298 * look at the guest's view of PMSIDR_EL1.
299 */
300 return (kvm_has_feat(kvm, FEAT_SPEv1p4) &&
301 (read_sysreg_s(SYS_PMSIDR_EL1) & PMSIDR_EL1_FDS));
302 }
303
feat_trbe_mpam(struct kvm * kvm)304 static bool feat_trbe_mpam(struct kvm *kvm)
305 {
306 /*
307 * Revists this if KVM ever supports both MPAM and TRBE --
308 * this really should look at the guest's view of TRBIDR_EL1.
309 */
310 return (kvm_has_feat(kvm, FEAT_TRBE) &&
311 kvm_has_feat(kvm, FEAT_MPAM) &&
312 (read_sysreg_s(SYS_TRBIDR_EL1) & TRBIDR_EL1_MPAM));
313 }
314
feat_ebep_pmuv3_ss(struct kvm * kvm)315 static bool feat_ebep_pmuv3_ss(struct kvm *kvm)
316 {
317 return kvm_has_feat(kvm, FEAT_EBEP) || kvm_has_feat(kvm, FEAT_PMUv3_SS);
318 }
319
feat_mixedendel0(struct kvm * kvm)320 static bool feat_mixedendel0(struct kvm *kvm)
321 {
322 return kvm_has_feat(kvm, FEAT_MixedEnd) || kvm_has_feat(kvm, FEAT_MixedEndEL0);
323 }
324
feat_mte_async(struct kvm * kvm)325 static bool feat_mte_async(struct kvm *kvm)
326 {
327 return kvm_has_feat(kvm, FEAT_MTE2) && kvm_has_feat_enum(kvm, FEAT_MTE_ASYNC);
328 }
329
330 #define check_pmu_revision(k, r) \
331 ({ \
332 (kvm_has_feat((k), ID_AA64DFR0_EL1, PMUVer, r) && \
333 !kvm_has_feat((k), ID_AA64DFR0_EL1, PMUVer, IMP_DEF)); \
334 })
335
feat_pmuv3p1(struct kvm * kvm)336 static bool feat_pmuv3p1(struct kvm *kvm)
337 {
338 return check_pmu_revision(kvm, V3P1);
339 }
340
feat_pmuv3p5(struct kvm * kvm)341 static bool feat_pmuv3p5(struct kvm *kvm)
342 {
343 return check_pmu_revision(kvm, V3P5);
344 }
345
feat_pmuv3p7(struct kvm * kvm)346 static bool feat_pmuv3p7(struct kvm *kvm)
347 {
348 return check_pmu_revision(kvm, V3P7);
349 }
350
feat_pmuv3p9(struct kvm * kvm)351 static bool feat_pmuv3p9(struct kvm *kvm)
352 {
353 return check_pmu_revision(kvm, V3P9);
354 }
355
356 #define has_feat_s2tgran(k, s) \
357 ((kvm_has_feat_enum(kvm, ID_AA64MMFR0_EL1, TGRAN##s##_2, TGRAN##s) && \
358 kvm_has_feat(kvm, ID_AA64MMFR0_EL1, TGRAN##s, IMP)) || \
359 kvm_has_feat(kvm, ID_AA64MMFR0_EL1, TGRAN##s##_2, IMP))
360
feat_lpa2(struct kvm * kvm)361 static bool feat_lpa2(struct kvm *kvm)
362 {
363 return ((kvm_has_feat(kvm, ID_AA64MMFR0_EL1, TGRAN4, 52_BIT) ||
364 !kvm_has_feat(kvm, ID_AA64MMFR0_EL1, TGRAN4, IMP)) &&
365 (kvm_has_feat(kvm, ID_AA64MMFR0_EL1, TGRAN16, 52_BIT) ||
366 !kvm_has_feat(kvm, ID_AA64MMFR0_EL1, TGRAN16, IMP)) &&
367 (kvm_has_feat(kvm, ID_AA64MMFR0_EL1, TGRAN4_2, 52_BIT) ||
368 !has_feat_s2tgran(kvm, 4)) &&
369 (kvm_has_feat(kvm, ID_AA64MMFR0_EL1, TGRAN16_2, 52_BIT) ||
370 !has_feat_s2tgran(kvm, 16)));
371 }
372
feat_vmid16(struct kvm * kvm)373 static bool feat_vmid16(struct kvm *kvm)
374 {
375 return kvm_has_feat_enum(kvm, ID_AA64MMFR1_EL1, VMIDBits, 16);
376 }
377
378 static const struct reg_bits_to_feat_map hfgrtr_feat_map[] = {
379 NEEDS_FEAT(HFGRTR_EL2_nAMAIR2_EL1 |
380 HFGRTR_EL2_nMAIR2_EL1,
381 FEAT_AIE),
382 NEEDS_FEAT(HFGRTR_EL2_nS2POR_EL1, FEAT_S2POE),
383 NEEDS_FEAT(HFGRTR_EL2_nPOR_EL1 |
384 HFGRTR_EL2_nPOR_EL0,
385 FEAT_S1POE),
386 NEEDS_FEAT(HFGRTR_EL2_nPIR_EL1 |
387 HFGRTR_EL2_nPIRE0_EL1,
388 FEAT_S1PIE),
389 NEEDS_FEAT(HFGRTR_EL2_nRCWMASK_EL1, FEAT_THE),
390 NEEDS_FEAT(HFGRTR_EL2_nTPIDR2_EL0 |
391 HFGRTR_EL2_nSMPRI_EL1,
392 FEAT_SME),
393 NEEDS_FEAT(HFGRTR_EL2_nGCS_EL1 |
394 HFGRTR_EL2_nGCS_EL0,
395 FEAT_GCS),
396 NEEDS_FEAT(HFGRTR_EL2_nACCDATA_EL1, FEAT_LS64_ACCDATA),
397 NEEDS_FEAT(HFGRTR_EL2_ERXADDR_EL1 |
398 HFGRTR_EL2_ERXMISCn_EL1 |
399 HFGRTR_EL2_ERXSTATUS_EL1 |
400 HFGRTR_EL2_ERXCTLR_EL1 |
401 HFGRTR_EL2_ERXFR_EL1 |
402 HFGRTR_EL2_ERRSELR_EL1 |
403 HFGRTR_EL2_ERRIDR_EL1,
404 FEAT_RAS),
405 NEEDS_FEAT(HFGRTR_EL2_ERXPFGCDN_EL1 |
406 HFGRTR_EL2_ERXPFGCTL_EL1 |
407 HFGRTR_EL2_ERXPFGF_EL1,
408 feat_rasv1p1),
409 NEEDS_FEAT(HFGRTR_EL2_ICC_IGRPENn_EL1, FEAT_GICv3),
410 NEEDS_FEAT(HFGRTR_EL2_SCXTNUM_EL0 |
411 HFGRTR_EL2_SCXTNUM_EL1,
412 feat_csv2_2_csv2_1p2),
413 NEEDS_FEAT(HFGRTR_EL2_LORSA_EL1 |
414 HFGRTR_EL2_LORN_EL1 |
415 HFGRTR_EL2_LORID_EL1 |
416 HFGRTR_EL2_LOREA_EL1 |
417 HFGRTR_EL2_LORC_EL1,
418 FEAT_LOR),
419 NEEDS_FEAT(HFGRTR_EL2_APIBKey |
420 HFGRTR_EL2_APIAKey |
421 HFGRTR_EL2_APGAKey |
422 HFGRTR_EL2_APDBKey |
423 HFGRTR_EL2_APDAKey,
424 feat_pauth),
425 NEEDS_FEAT_FLAG(HFGRTR_EL2_VBAR_EL1 |
426 HFGRTR_EL2_TTBR1_EL1 |
427 HFGRTR_EL2_TTBR0_EL1 |
428 HFGRTR_EL2_TPIDR_EL0 |
429 HFGRTR_EL2_TPIDRRO_EL0 |
430 HFGRTR_EL2_TPIDR_EL1 |
431 HFGRTR_EL2_TCR_EL1 |
432 HFGRTR_EL2_SCTLR_EL1 |
433 HFGRTR_EL2_REVIDR_EL1 |
434 HFGRTR_EL2_PAR_EL1 |
435 HFGRTR_EL2_MPIDR_EL1 |
436 HFGRTR_EL2_MIDR_EL1 |
437 HFGRTR_EL2_MAIR_EL1 |
438 HFGRTR_EL2_ISR_EL1 |
439 HFGRTR_EL2_FAR_EL1 |
440 HFGRTR_EL2_ESR_EL1 |
441 HFGRTR_EL2_DCZID_EL0 |
442 HFGRTR_EL2_CTR_EL0 |
443 HFGRTR_EL2_CSSELR_EL1 |
444 HFGRTR_EL2_CPACR_EL1 |
445 HFGRTR_EL2_CONTEXTIDR_EL1|
446 HFGRTR_EL2_CLIDR_EL1 |
447 HFGRTR_EL2_CCSIDR_EL1 |
448 HFGRTR_EL2_AMAIR_EL1 |
449 HFGRTR_EL2_AIDR_EL1 |
450 HFGRTR_EL2_AFSR1_EL1 |
451 HFGRTR_EL2_AFSR0_EL1,
452 NEVER_FGU, FEAT_AA64EL1),
453 };
454
455
456 static const DECLARE_FEAT_MAP_FGT(hfgrtr_desc, hfgrtr_masks,
457 hfgrtr_feat_map, FEAT_FGT);
458
459 static const struct reg_bits_to_feat_map hfgwtr_feat_map[] = {
460 NEEDS_FEAT(HFGWTR_EL2_nAMAIR2_EL1 |
461 HFGWTR_EL2_nMAIR2_EL1,
462 FEAT_AIE),
463 NEEDS_FEAT(HFGWTR_EL2_nS2POR_EL1, FEAT_S2POE),
464 NEEDS_FEAT(HFGWTR_EL2_nPOR_EL1 |
465 HFGWTR_EL2_nPOR_EL0,
466 FEAT_S1POE),
467 NEEDS_FEAT(HFGWTR_EL2_nPIR_EL1 |
468 HFGWTR_EL2_nPIRE0_EL1,
469 FEAT_S1PIE),
470 NEEDS_FEAT(HFGWTR_EL2_nRCWMASK_EL1, FEAT_THE),
471 NEEDS_FEAT(HFGWTR_EL2_nTPIDR2_EL0 |
472 HFGWTR_EL2_nSMPRI_EL1,
473 FEAT_SME),
474 NEEDS_FEAT(HFGWTR_EL2_nGCS_EL1 |
475 HFGWTR_EL2_nGCS_EL0,
476 FEAT_GCS),
477 NEEDS_FEAT(HFGWTR_EL2_nACCDATA_EL1, FEAT_LS64_ACCDATA),
478 NEEDS_FEAT(HFGWTR_EL2_ERXADDR_EL1 |
479 HFGWTR_EL2_ERXMISCn_EL1 |
480 HFGWTR_EL2_ERXSTATUS_EL1 |
481 HFGWTR_EL2_ERXCTLR_EL1 |
482 HFGWTR_EL2_ERRSELR_EL1,
483 FEAT_RAS),
484 NEEDS_FEAT(HFGWTR_EL2_ERXPFGCDN_EL1 |
485 HFGWTR_EL2_ERXPFGCTL_EL1,
486 feat_rasv1p1),
487 NEEDS_FEAT(HFGWTR_EL2_ICC_IGRPENn_EL1, FEAT_GICv3),
488 NEEDS_FEAT(HFGWTR_EL2_SCXTNUM_EL0 |
489 HFGWTR_EL2_SCXTNUM_EL1,
490 feat_csv2_2_csv2_1p2),
491 NEEDS_FEAT(HFGWTR_EL2_LORSA_EL1 |
492 HFGWTR_EL2_LORN_EL1 |
493 HFGWTR_EL2_LOREA_EL1 |
494 HFGWTR_EL2_LORC_EL1,
495 FEAT_LOR),
496 NEEDS_FEAT(HFGWTR_EL2_APIBKey |
497 HFGWTR_EL2_APIAKey |
498 HFGWTR_EL2_APGAKey |
499 HFGWTR_EL2_APDBKey |
500 HFGWTR_EL2_APDAKey,
501 feat_pauth),
502 NEEDS_FEAT_FLAG(HFGWTR_EL2_VBAR_EL1 |
503 HFGWTR_EL2_TTBR1_EL1 |
504 HFGWTR_EL2_TTBR0_EL1 |
505 HFGWTR_EL2_TPIDR_EL0 |
506 HFGWTR_EL2_TPIDRRO_EL0 |
507 HFGWTR_EL2_TPIDR_EL1 |
508 HFGWTR_EL2_TCR_EL1 |
509 HFGWTR_EL2_SCTLR_EL1 |
510 HFGWTR_EL2_PAR_EL1 |
511 HFGWTR_EL2_MAIR_EL1 |
512 HFGWTR_EL2_FAR_EL1 |
513 HFGWTR_EL2_ESR_EL1 |
514 HFGWTR_EL2_CSSELR_EL1 |
515 HFGWTR_EL2_CPACR_EL1 |
516 HFGWTR_EL2_CONTEXTIDR_EL1|
517 HFGWTR_EL2_AMAIR_EL1 |
518 HFGWTR_EL2_AFSR1_EL1 |
519 HFGWTR_EL2_AFSR0_EL1,
520 NEVER_FGU, FEAT_AA64EL1),
521 };
522
523 static const DECLARE_FEAT_MAP_FGT(hfgwtr_desc, hfgwtr_masks,
524 hfgwtr_feat_map, FEAT_FGT);
525
526 static const struct reg_bits_to_feat_map hdfgrtr_feat_map[] = {
527 NEEDS_FEAT(HDFGRTR_EL2_PMBIDR_EL1 |
528 HDFGRTR_EL2_PMSLATFR_EL1 |
529 HDFGRTR_EL2_PMSIRR_EL1 |
530 HDFGRTR_EL2_PMSIDR_EL1 |
531 HDFGRTR_EL2_PMSICR_EL1 |
532 HDFGRTR_EL2_PMSFCR_EL1 |
533 HDFGRTR_EL2_PMSEVFR_EL1 |
534 HDFGRTR_EL2_PMSCR_EL1 |
535 HDFGRTR_EL2_PMBSR_EL1 |
536 HDFGRTR_EL2_PMBPTR_EL1 |
537 HDFGRTR_EL2_PMBLIMITR_EL1,
538 FEAT_SPE),
539 NEEDS_FEAT(HDFGRTR_EL2_nPMSNEVFR_EL1, FEAT_SPE_FnE),
540 NEEDS_FEAT(HDFGRTR_EL2_nBRBDATA |
541 HDFGRTR_EL2_nBRBCTL |
542 HDFGRTR_EL2_nBRBIDR,
543 FEAT_BRBE),
544 NEEDS_FEAT(HDFGRTR_EL2_TRCVICTLR |
545 HDFGRTR_EL2_TRCSTATR |
546 HDFGRTR_EL2_TRCSSCSRn |
547 HDFGRTR_EL2_TRCSEQSTR |
548 HDFGRTR_EL2_TRCPRGCTLR |
549 HDFGRTR_EL2_TRCOSLSR |
550 HDFGRTR_EL2_TRCIMSPECn |
551 HDFGRTR_EL2_TRCID |
552 HDFGRTR_EL2_TRCCNTVRn |
553 HDFGRTR_EL2_TRCCLAIM |
554 HDFGRTR_EL2_TRCAUXCTLR |
555 HDFGRTR_EL2_TRCAUTHSTATUS |
556 HDFGRTR_EL2_TRC,
557 FEAT_TRC_SR),
558 NEEDS_FEAT(HDFGRTR_EL2_PMCEIDn_EL0 |
559 HDFGRTR_EL2_PMUSERENR_EL0 |
560 HDFGRTR_EL2_PMMIR_EL1 |
561 HDFGRTR_EL2_PMSELR_EL0 |
562 HDFGRTR_EL2_PMOVS |
563 HDFGRTR_EL2_PMINTEN |
564 HDFGRTR_EL2_PMCNTEN |
565 HDFGRTR_EL2_PMCCNTR_EL0 |
566 HDFGRTR_EL2_PMCCFILTR_EL0 |
567 HDFGRTR_EL2_PMEVTYPERn_EL0 |
568 HDFGRTR_EL2_PMEVCNTRn_EL0,
569 FEAT_PMUv3),
570 NEEDS_FEAT(HDFGRTR_EL2_TRBTRG_EL1 |
571 HDFGRTR_EL2_TRBSR_EL1 |
572 HDFGRTR_EL2_TRBPTR_EL1 |
573 HDFGRTR_EL2_TRBMAR_EL1 |
574 HDFGRTR_EL2_TRBLIMITR_EL1 |
575 HDFGRTR_EL2_TRBIDR_EL1 |
576 HDFGRTR_EL2_TRBBASER_EL1,
577 FEAT_TRBE),
578 NEEDS_FEAT_FLAG(HDFGRTR_EL2_OSDLR_EL1, NEVER_FGU,
579 FEAT_DoubleLock),
580 NEEDS_FEAT_FLAG(HDFGRTR_EL2_OSECCR_EL1 |
581 HDFGRTR_EL2_OSLSR_EL1 |
582 HDFGRTR_EL2_DBGPRCR_EL1 |
583 HDFGRTR_EL2_DBGAUTHSTATUS_EL1|
584 HDFGRTR_EL2_DBGCLAIM |
585 HDFGRTR_EL2_MDSCR_EL1 |
586 HDFGRTR_EL2_DBGWVRn_EL1 |
587 HDFGRTR_EL2_DBGWCRn_EL1 |
588 HDFGRTR_EL2_DBGBVRn_EL1 |
589 HDFGRTR_EL2_DBGBCRn_EL1,
590 NEVER_FGU, FEAT_AA64EL1)
591 };
592
593 static const DECLARE_FEAT_MAP_FGT(hdfgrtr_desc, hdfgrtr_masks,
594 hdfgrtr_feat_map, FEAT_FGT);
595
596 static const struct reg_bits_to_feat_map hdfgwtr_feat_map[] = {
597 NEEDS_FEAT(HDFGWTR_EL2_PMSLATFR_EL1 |
598 HDFGWTR_EL2_PMSIRR_EL1 |
599 HDFGWTR_EL2_PMSICR_EL1 |
600 HDFGWTR_EL2_PMSFCR_EL1 |
601 HDFGWTR_EL2_PMSEVFR_EL1 |
602 HDFGWTR_EL2_PMSCR_EL1 |
603 HDFGWTR_EL2_PMBSR_EL1 |
604 HDFGWTR_EL2_PMBPTR_EL1 |
605 HDFGWTR_EL2_PMBLIMITR_EL1,
606 FEAT_SPE),
607 NEEDS_FEAT(HDFGWTR_EL2_nPMSNEVFR_EL1, FEAT_SPE_FnE),
608 NEEDS_FEAT(HDFGWTR_EL2_nBRBDATA |
609 HDFGWTR_EL2_nBRBCTL,
610 FEAT_BRBE),
611 NEEDS_FEAT(HDFGWTR_EL2_TRCVICTLR |
612 HDFGWTR_EL2_TRCSSCSRn |
613 HDFGWTR_EL2_TRCSEQSTR |
614 HDFGWTR_EL2_TRCPRGCTLR |
615 HDFGWTR_EL2_TRCOSLAR |
616 HDFGWTR_EL2_TRCIMSPECn |
617 HDFGWTR_EL2_TRCCNTVRn |
618 HDFGWTR_EL2_TRCCLAIM |
619 HDFGWTR_EL2_TRCAUXCTLR |
620 HDFGWTR_EL2_TRC,
621 FEAT_TRC_SR),
622 NEEDS_FEAT(HDFGWTR_EL2_PMUSERENR_EL0 |
623 HDFGWTR_EL2_PMCR_EL0 |
624 HDFGWTR_EL2_PMSWINC_EL0 |
625 HDFGWTR_EL2_PMSELR_EL0 |
626 HDFGWTR_EL2_PMOVS |
627 HDFGWTR_EL2_PMINTEN |
628 HDFGWTR_EL2_PMCNTEN |
629 HDFGWTR_EL2_PMCCNTR_EL0 |
630 HDFGWTR_EL2_PMCCFILTR_EL0 |
631 HDFGWTR_EL2_PMEVTYPERn_EL0 |
632 HDFGWTR_EL2_PMEVCNTRn_EL0,
633 FEAT_PMUv3),
634 NEEDS_FEAT(HDFGWTR_EL2_TRBTRG_EL1 |
635 HDFGWTR_EL2_TRBSR_EL1 |
636 HDFGWTR_EL2_TRBPTR_EL1 |
637 HDFGWTR_EL2_TRBMAR_EL1 |
638 HDFGWTR_EL2_TRBLIMITR_EL1 |
639 HDFGWTR_EL2_TRBBASER_EL1,
640 FEAT_TRBE),
641 NEEDS_FEAT_FLAG(HDFGWTR_EL2_OSDLR_EL1,
642 NEVER_FGU, FEAT_DoubleLock),
643 NEEDS_FEAT_FLAG(HDFGWTR_EL2_OSECCR_EL1 |
644 HDFGWTR_EL2_OSLAR_EL1 |
645 HDFGWTR_EL2_DBGPRCR_EL1 |
646 HDFGWTR_EL2_DBGCLAIM |
647 HDFGWTR_EL2_MDSCR_EL1 |
648 HDFGWTR_EL2_DBGWVRn_EL1 |
649 HDFGWTR_EL2_DBGWCRn_EL1 |
650 HDFGWTR_EL2_DBGBVRn_EL1 |
651 HDFGWTR_EL2_DBGBCRn_EL1,
652 NEVER_FGU, FEAT_AA64EL1),
653 NEEDS_FEAT(HDFGWTR_EL2_TRFCR_EL1, FEAT_TRF),
654 };
655
656 static const DECLARE_FEAT_MAP_FGT(hdfgwtr_desc, hdfgwtr_masks,
657 hdfgwtr_feat_map, FEAT_FGT);
658
659 static const struct reg_bits_to_feat_map hfgitr_feat_map[] = {
660 NEEDS_FEAT(HFGITR_EL2_PSBCSYNC, FEAT_SPEv1p5),
661 NEEDS_FEAT(HFGITR_EL2_ATS1E1A, FEAT_ATS1A),
662 NEEDS_FEAT(HFGITR_EL2_COSPRCTX, FEAT_SPECRES2),
663 NEEDS_FEAT(HFGITR_EL2_nGCSEPP |
664 HFGITR_EL2_nGCSSTR_EL1 |
665 HFGITR_EL2_nGCSPUSHM_EL1,
666 FEAT_GCS),
667 NEEDS_FEAT(HFGITR_EL2_nBRBIALL |
668 HFGITR_EL2_nBRBINJ,
669 FEAT_BRBE),
670 NEEDS_FEAT(HFGITR_EL2_CPPRCTX |
671 HFGITR_EL2_DVPRCTX |
672 HFGITR_EL2_CFPRCTX,
673 FEAT_SPECRES),
674 NEEDS_FEAT(HFGITR_EL2_TLBIRVAALE1 |
675 HFGITR_EL2_TLBIRVALE1 |
676 HFGITR_EL2_TLBIRVAAE1 |
677 HFGITR_EL2_TLBIRVAE1 |
678 HFGITR_EL2_TLBIRVAALE1IS |
679 HFGITR_EL2_TLBIRVALE1IS |
680 HFGITR_EL2_TLBIRVAAE1IS |
681 HFGITR_EL2_TLBIRVAE1IS |
682 HFGITR_EL2_TLBIRVAALE1OS |
683 HFGITR_EL2_TLBIRVALE1OS |
684 HFGITR_EL2_TLBIRVAAE1OS |
685 HFGITR_EL2_TLBIRVAE1OS,
686 FEAT_TLBIRANGE),
687 NEEDS_FEAT(HFGITR_EL2_TLBIVAALE1OS |
688 HFGITR_EL2_TLBIVALE1OS |
689 HFGITR_EL2_TLBIVAAE1OS |
690 HFGITR_EL2_TLBIASIDE1OS |
691 HFGITR_EL2_TLBIVAE1OS |
692 HFGITR_EL2_TLBIVMALLE1OS,
693 FEAT_TLBIOS),
694 NEEDS_FEAT(HFGITR_EL2_ATS1E1WP |
695 HFGITR_EL2_ATS1E1RP,
696 FEAT_PAN2),
697 NEEDS_FEAT(HFGITR_EL2_DCCVADP, FEAT_DPB2),
698 NEEDS_FEAT_FLAG(HFGITR_EL2_DCCVAC |
699 HFGITR_EL2_SVC_EL1 |
700 HFGITR_EL2_SVC_EL0 |
701 HFGITR_EL2_ERET |
702 HFGITR_EL2_TLBIVAALE1 |
703 HFGITR_EL2_TLBIVALE1 |
704 HFGITR_EL2_TLBIVAAE1 |
705 HFGITR_EL2_TLBIASIDE1 |
706 HFGITR_EL2_TLBIVAE1 |
707 HFGITR_EL2_TLBIVMALLE1 |
708 HFGITR_EL2_TLBIVAALE1IS |
709 HFGITR_EL2_TLBIVALE1IS |
710 HFGITR_EL2_TLBIVAAE1IS |
711 HFGITR_EL2_TLBIASIDE1IS |
712 HFGITR_EL2_TLBIVAE1IS |
713 HFGITR_EL2_TLBIVMALLE1IS|
714 HFGITR_EL2_ATS1E0W |
715 HFGITR_EL2_ATS1E0R |
716 HFGITR_EL2_ATS1E1W |
717 HFGITR_EL2_ATS1E1R |
718 HFGITR_EL2_DCZVA |
719 HFGITR_EL2_DCCIVAC |
720 HFGITR_EL2_DCCVAP |
721 HFGITR_EL2_DCCVAU |
722 HFGITR_EL2_DCCISW |
723 HFGITR_EL2_DCCSW |
724 HFGITR_EL2_DCISW |
725 HFGITR_EL2_DCIVAC |
726 HFGITR_EL2_ICIVAU |
727 HFGITR_EL2_ICIALLU |
728 HFGITR_EL2_ICIALLUIS,
729 NEVER_FGU, FEAT_AA64EL1),
730 };
731
732 static const DECLARE_FEAT_MAP_FGT(hfgitr_desc, hfgitr_masks,
733 hfgitr_feat_map, FEAT_FGT);
734
735 static const struct reg_bits_to_feat_map hafgrtr_feat_map[] = {
736 NEEDS_FEAT(HAFGRTR_EL2_AMEVTYPER115_EL0 |
737 HAFGRTR_EL2_AMEVTYPER114_EL0 |
738 HAFGRTR_EL2_AMEVTYPER113_EL0 |
739 HAFGRTR_EL2_AMEVTYPER112_EL0 |
740 HAFGRTR_EL2_AMEVTYPER111_EL0 |
741 HAFGRTR_EL2_AMEVTYPER110_EL0 |
742 HAFGRTR_EL2_AMEVTYPER19_EL0 |
743 HAFGRTR_EL2_AMEVTYPER18_EL0 |
744 HAFGRTR_EL2_AMEVTYPER17_EL0 |
745 HAFGRTR_EL2_AMEVTYPER16_EL0 |
746 HAFGRTR_EL2_AMEVTYPER15_EL0 |
747 HAFGRTR_EL2_AMEVTYPER14_EL0 |
748 HAFGRTR_EL2_AMEVTYPER13_EL0 |
749 HAFGRTR_EL2_AMEVTYPER12_EL0 |
750 HAFGRTR_EL2_AMEVTYPER11_EL0 |
751 HAFGRTR_EL2_AMEVTYPER10_EL0 |
752 HAFGRTR_EL2_AMEVCNTR115_EL0 |
753 HAFGRTR_EL2_AMEVCNTR114_EL0 |
754 HAFGRTR_EL2_AMEVCNTR113_EL0 |
755 HAFGRTR_EL2_AMEVCNTR112_EL0 |
756 HAFGRTR_EL2_AMEVCNTR111_EL0 |
757 HAFGRTR_EL2_AMEVCNTR110_EL0 |
758 HAFGRTR_EL2_AMEVCNTR19_EL0 |
759 HAFGRTR_EL2_AMEVCNTR18_EL0 |
760 HAFGRTR_EL2_AMEVCNTR17_EL0 |
761 HAFGRTR_EL2_AMEVCNTR16_EL0 |
762 HAFGRTR_EL2_AMEVCNTR15_EL0 |
763 HAFGRTR_EL2_AMEVCNTR14_EL0 |
764 HAFGRTR_EL2_AMEVCNTR13_EL0 |
765 HAFGRTR_EL2_AMEVCNTR12_EL0 |
766 HAFGRTR_EL2_AMEVCNTR11_EL0 |
767 HAFGRTR_EL2_AMEVCNTR10_EL0 |
768 HAFGRTR_EL2_AMCNTEN1 |
769 HAFGRTR_EL2_AMCNTEN0 |
770 HAFGRTR_EL2_AMEVCNTR03_EL0 |
771 HAFGRTR_EL2_AMEVCNTR02_EL0 |
772 HAFGRTR_EL2_AMEVCNTR01_EL0 |
773 HAFGRTR_EL2_AMEVCNTR00_EL0,
774 FEAT_AMUv1),
775 };
776
777 static const DECLARE_FEAT_MAP_FGT(hafgrtr_desc, hafgrtr_masks,
778 hafgrtr_feat_map, FEAT_FGT);
779
780 static const struct reg_bits_to_feat_map hfgitr2_feat_map[] = {
781 NEEDS_FEAT(HFGITR2_EL2_nDCCIVAPS, FEAT_PoPS),
782 NEEDS_FEAT(HFGITR2_EL2_TSBCSYNC, FEAT_TRBEv1p1)
783 };
784
785 static const DECLARE_FEAT_MAP_FGT(hfgitr2_desc, hfgitr2_masks,
786 hfgitr2_feat_map, FEAT_FGT2);
787
788 static const struct reg_bits_to_feat_map hfgrtr2_feat_map[] = {
789 NEEDS_FEAT(HFGRTR2_EL2_nPFAR_EL1, FEAT_PFAR),
790 NEEDS_FEAT(HFGRTR2_EL2_nERXGSR_EL1, FEAT_RASv2),
791 NEEDS_FEAT(HFGRTR2_EL2_nACTLRALIAS_EL1 |
792 HFGRTR2_EL2_nACTLRMASK_EL1 |
793 HFGRTR2_EL2_nCPACRALIAS_EL1 |
794 HFGRTR2_EL2_nCPACRMASK_EL1 |
795 HFGRTR2_EL2_nSCTLR2MASK_EL1 |
796 HFGRTR2_EL2_nSCTLRALIAS2_EL1 |
797 HFGRTR2_EL2_nSCTLRALIAS_EL1 |
798 HFGRTR2_EL2_nSCTLRMASK_EL1 |
799 HFGRTR2_EL2_nTCR2ALIAS_EL1 |
800 HFGRTR2_EL2_nTCR2MASK_EL1 |
801 HFGRTR2_EL2_nTCRALIAS_EL1 |
802 HFGRTR2_EL2_nTCRMASK_EL1,
803 FEAT_SRMASK),
804 NEEDS_FEAT(HFGRTR2_EL2_nRCWSMASK_EL1, FEAT_THE),
805 };
806
807 static const DECLARE_FEAT_MAP_FGT(hfgrtr2_desc, hfgrtr2_masks,
808 hfgrtr2_feat_map, FEAT_FGT2);
809
810 static const struct reg_bits_to_feat_map hfgwtr2_feat_map[] = {
811 NEEDS_FEAT(HFGWTR2_EL2_nPFAR_EL1, FEAT_PFAR),
812 NEEDS_FEAT(HFGWTR2_EL2_nACTLRALIAS_EL1 |
813 HFGWTR2_EL2_nACTLRMASK_EL1 |
814 HFGWTR2_EL2_nCPACRALIAS_EL1 |
815 HFGWTR2_EL2_nCPACRMASK_EL1 |
816 HFGWTR2_EL2_nSCTLR2MASK_EL1 |
817 HFGWTR2_EL2_nSCTLRALIAS2_EL1 |
818 HFGWTR2_EL2_nSCTLRALIAS_EL1 |
819 HFGWTR2_EL2_nSCTLRMASK_EL1 |
820 HFGWTR2_EL2_nTCR2ALIAS_EL1 |
821 HFGWTR2_EL2_nTCR2MASK_EL1 |
822 HFGWTR2_EL2_nTCRALIAS_EL1 |
823 HFGWTR2_EL2_nTCRMASK_EL1,
824 FEAT_SRMASK),
825 NEEDS_FEAT(HFGWTR2_EL2_nRCWSMASK_EL1, FEAT_THE),
826 };
827
828 static const DECLARE_FEAT_MAP_FGT(hfgwtr2_desc, hfgwtr2_masks,
829 hfgwtr2_feat_map, FEAT_FGT2);
830
831 static const struct reg_bits_to_feat_map hdfgrtr2_feat_map[] = {
832 NEEDS_FEAT(HDFGRTR2_EL2_nMDSELR_EL1, FEAT_Debugv8p9),
833 NEEDS_FEAT(HDFGRTR2_EL2_nPMECR_EL1, feat_ebep_pmuv3_ss),
834 NEEDS_FEAT(HDFGRTR2_EL2_nTRCITECR_EL1, FEAT_ITE),
835 NEEDS_FEAT(HDFGRTR2_EL2_nPMICFILTR_EL0 |
836 HDFGRTR2_EL2_nPMICNTR_EL0,
837 FEAT_PMUv3_ICNTR),
838 NEEDS_FEAT(HDFGRTR2_EL2_nPMUACR_EL1, feat_pmuv3p9),
839 NEEDS_FEAT(HDFGRTR2_EL2_nPMSSCR_EL1 |
840 HDFGRTR2_EL2_nPMSSDATA,
841 FEAT_PMUv3_SS),
842 NEEDS_FEAT(HDFGRTR2_EL2_nPMIAR_EL1, FEAT_SEBEP),
843 NEEDS_FEAT(HDFGRTR2_EL2_nPMSDSFR_EL1, feat_spe_fds),
844 NEEDS_FEAT(HDFGRTR2_EL2_nPMBMAR_EL1, FEAT_SPE_nVM),
845 NEEDS_FEAT(HDFGRTR2_EL2_nSPMACCESSR_EL1 |
846 HDFGRTR2_EL2_nSPMCNTEN |
847 HDFGRTR2_EL2_nSPMCR_EL0 |
848 HDFGRTR2_EL2_nSPMDEVAFF_EL1 |
849 HDFGRTR2_EL2_nSPMEVCNTRn_EL0 |
850 HDFGRTR2_EL2_nSPMEVTYPERn_EL0|
851 HDFGRTR2_EL2_nSPMID |
852 HDFGRTR2_EL2_nSPMINTEN |
853 HDFGRTR2_EL2_nSPMOVS |
854 HDFGRTR2_EL2_nSPMSCR_EL1 |
855 HDFGRTR2_EL2_nSPMSELR_EL0,
856 FEAT_SPMU),
857 NEEDS_FEAT(HDFGRTR2_EL2_nMDSTEPOP_EL1, FEAT_STEP2),
858 NEEDS_FEAT(HDFGRTR2_EL2_nTRBMPAM_EL1, feat_trbe_mpam),
859 };
860
861 static const DECLARE_FEAT_MAP_FGT(hdfgrtr2_desc, hdfgrtr2_masks,
862 hdfgrtr2_feat_map, FEAT_FGT2);
863
864 static const struct reg_bits_to_feat_map hdfgwtr2_feat_map[] = {
865 NEEDS_FEAT(HDFGWTR2_EL2_nMDSELR_EL1, FEAT_Debugv8p9),
866 NEEDS_FEAT(HDFGWTR2_EL2_nPMECR_EL1, feat_ebep_pmuv3_ss),
867 NEEDS_FEAT(HDFGWTR2_EL2_nTRCITECR_EL1, FEAT_ITE),
868 NEEDS_FEAT(HDFGWTR2_EL2_nPMICFILTR_EL0 |
869 HDFGWTR2_EL2_nPMICNTR_EL0,
870 FEAT_PMUv3_ICNTR),
871 NEEDS_FEAT(HDFGWTR2_EL2_nPMUACR_EL1 |
872 HDFGWTR2_EL2_nPMZR_EL0,
873 feat_pmuv3p9),
874 NEEDS_FEAT(HDFGWTR2_EL2_nPMSSCR_EL1, FEAT_PMUv3_SS),
875 NEEDS_FEAT(HDFGWTR2_EL2_nPMIAR_EL1, FEAT_SEBEP),
876 NEEDS_FEAT(HDFGWTR2_EL2_nPMSDSFR_EL1, feat_spe_fds),
877 NEEDS_FEAT(HDFGWTR2_EL2_nPMBMAR_EL1, FEAT_SPE_nVM),
878 NEEDS_FEAT(HDFGWTR2_EL2_nSPMACCESSR_EL1 |
879 HDFGWTR2_EL2_nSPMCNTEN |
880 HDFGWTR2_EL2_nSPMCR_EL0 |
881 HDFGWTR2_EL2_nSPMEVCNTRn_EL0 |
882 HDFGWTR2_EL2_nSPMEVTYPERn_EL0|
883 HDFGWTR2_EL2_nSPMINTEN |
884 HDFGWTR2_EL2_nSPMOVS |
885 HDFGWTR2_EL2_nSPMSCR_EL1 |
886 HDFGWTR2_EL2_nSPMSELR_EL0,
887 FEAT_SPMU),
888 NEEDS_FEAT(HDFGWTR2_EL2_nMDSTEPOP_EL1, FEAT_STEP2),
889 NEEDS_FEAT(HDFGWTR2_EL2_nTRBMPAM_EL1, feat_trbe_mpam),
890 };
891
892 static const DECLARE_FEAT_MAP_FGT(hdfgwtr2_desc, hdfgwtr2_masks,
893 hdfgwtr2_feat_map, FEAT_FGT2);
894
895
896 static const struct reg_bits_to_feat_map hcrx_feat_map[] = {
897 NEEDS_FEAT(HCRX_EL2_PACMEn, feat_pauth_lr),
898 NEEDS_FEAT(HCRX_EL2_EnFPM, FEAT_FPMR),
899 NEEDS_FEAT(HCRX_EL2_GCSEn, FEAT_GCS),
900 NEEDS_FEAT(HCRX_EL2_EnIDCP128, FEAT_SYSREG128),
901 NEEDS_FEAT(HCRX_EL2_EnSDERR, feat_aderr),
902 NEEDS_FEAT(HCRX_EL2_TMEA, FEAT_DoubleFault2),
903 NEEDS_FEAT(HCRX_EL2_EnSNERR, feat_anerr),
904 NEEDS_FEAT(HCRX_EL2_D128En, FEAT_D128),
905 NEEDS_FEAT(HCRX_EL2_PTTWI, FEAT_THE),
906 NEEDS_FEAT(HCRX_EL2_SCTLR2En, FEAT_SCTLR2),
907 NEEDS_FEAT(HCRX_EL2_TCR2En, FEAT_TCR2),
908 NEEDS_FEAT(HCRX_EL2_MSCEn |
909 HCRX_EL2_MCE2,
910 FEAT_MOPS),
911 NEEDS_FEAT(HCRX_EL2_CMOW, FEAT_CMOW),
912 NEEDS_FEAT(HCRX_EL2_VFNMI |
913 HCRX_EL2_VINMI |
914 HCRX_EL2_TALLINT,
915 FEAT_NMI),
916 NEEDS_FEAT(HCRX_EL2_SMPME, feat_sme_smps),
917 NEEDS_FEAT(HCRX_EL2_FGTnXS |
918 HCRX_EL2_FnXS,
919 FEAT_XS),
920 NEEDS_FEAT(HCRX_EL2_EnASR, FEAT_LS64_V),
921 NEEDS_FEAT(HCRX_EL2_EnALS, FEAT_LS64),
922 NEEDS_FEAT(HCRX_EL2_EnAS0, FEAT_LS64_ACCDATA),
923 };
924
925
926 static const DECLARE_FEAT_MAP(hcrx_desc, __HCRX_EL2,
927 hcrx_feat_map, FEAT_HCX);
928
929 static const struct reg_bits_to_feat_map hcr_feat_map[] = {
930 NEEDS_FEAT(HCR_EL2_TID0, FEAT_AA32EL0),
931 NEEDS_FEAT_FLAG(HCR_EL2_RW, AS_RES1, FEAT_AA32EL1),
932 NEEDS_FEAT(HCR_EL2_HCD, not_feat_aa64el3),
933 NEEDS_FEAT(HCR_EL2_AMO |
934 HCR_EL2_BSU |
935 HCR_EL2_CD |
936 HCR_EL2_DC |
937 HCR_EL2_FB |
938 HCR_EL2_FMO |
939 HCR_EL2_ID |
940 HCR_EL2_IMO |
941 HCR_EL2_PTW |
942 HCR_EL2_SWIO |
943 HCR_EL2_TACR |
944 HCR_EL2_TDZ |
945 HCR_EL2_TGE |
946 HCR_EL2_TID1 |
947 HCR_EL2_TID2 |
948 HCR_EL2_TID3 |
949 HCR_EL2_TIDCP |
950 HCR_EL2_TPCP |
951 HCR_EL2_TPU |
952 HCR_EL2_TRVM |
953 HCR_EL2_TSC |
954 HCR_EL2_TSW |
955 HCR_EL2_TTLB |
956 HCR_EL2_TVM |
957 HCR_EL2_TWE |
958 HCR_EL2_TWI |
959 HCR_EL2_VF |
960 HCR_EL2_VI |
961 HCR_EL2_VM |
962 HCR_EL2_VSE,
963 FEAT_AA64EL1),
964 NEEDS_FEAT(HCR_EL2_AMVOFFEN, FEAT_AMUv1p1),
965 NEEDS_FEAT(HCR_EL2_EnSCXT, feat_csv2_2_csv2_1p2),
966 NEEDS_FEAT(HCR_EL2_TICAB |
967 HCR_EL2_TID4 |
968 HCR_EL2_TOCU,
969 FEAT_EVT),
970 NEEDS_FEAT(HCR_EL2_TTLBIS |
971 HCR_EL2_TTLBOS,
972 FEAT_EVT_TTLBxS),
973 NEEDS_FEAT(HCR_EL2_TLOR, FEAT_LOR),
974 NEEDS_FEAT(HCR_EL2_ATA |
975 HCR_EL2_DCT |
976 HCR_EL2_TID5,
977 FEAT_MTE2),
978 NEEDS_FEAT(HCR_EL2_AT | /* Ignore the original FEAT_NV */
979 HCR_EL2_NV2 |
980 HCR_EL2_NV,
981 feat_nv2),
982 NEEDS_FEAT(HCR_EL2_NV1, feat_nv2_e2h0_ni), /* Missing from JSON */
983 NEEDS_FEAT(HCR_EL2_API |
984 HCR_EL2_APK,
985 feat_pauth),
986 NEEDS_FEAT(HCR_EL2_TEA |
987 HCR_EL2_TERR,
988 FEAT_RAS),
989 NEEDS_FEAT(HCR_EL2_FIEN, feat_rasv1p1),
990 NEEDS_FEAT(HCR_EL2_GPF, FEAT_RME),
991 NEEDS_FEAT(HCR_EL2_FWB, FEAT_S2FWB),
992 NEEDS_FEAT(HCR_EL2_TWEDEL |
993 HCR_EL2_TWEDEn,
994 FEAT_TWED),
995 NEEDS_FEAT_FLAG(HCR_EL2_E2H, RES1_WHEN_E2H1 | FORCE_RESx),
996 FORCE_RES0(HCR_EL2_RES0),
997 FORCE_RES1(HCR_EL2_RES1),
998 };
999
1000 static const DECLARE_FEAT_MAP(hcr_desc, HCR_EL2,
1001 hcr_feat_map, FEAT_AA64EL2);
1002
1003 static const struct reg_bits_to_feat_map sctlr2_feat_map[] = {
1004 NEEDS_FEAT(SCTLR2_EL1_NMEA |
1005 SCTLR2_EL1_EASE,
1006 FEAT_DoubleFault2),
1007 NEEDS_FEAT(SCTLR2_EL1_EnADERR, feat_aderr),
1008 NEEDS_FEAT(SCTLR2_EL1_EnANERR, feat_anerr),
1009 NEEDS_FEAT(SCTLR2_EL1_EnIDCP128, FEAT_SYSREG128),
1010 NEEDS_FEAT(SCTLR2_EL1_EnPACM |
1011 SCTLR2_EL1_EnPACM0,
1012 feat_pauth_lr),
1013 NEEDS_FEAT(SCTLR2_EL1_CPTA |
1014 SCTLR2_EL1_CPTA0 |
1015 SCTLR2_EL1_CPTM |
1016 SCTLR2_EL1_CPTM0,
1017 FEAT_CPA2),
1018 FORCE_RES0(SCTLR2_EL1_RES0),
1019 FORCE_RES1(SCTLR2_EL1_RES1),
1020 };
1021
1022 static const DECLARE_FEAT_MAP(sctlr2_desc, SCTLR2_EL1,
1023 sctlr2_feat_map, FEAT_SCTLR2);
1024
1025 static const struct reg_bits_to_feat_map tcr2_el2_feat_map[] = {
1026 NEEDS_FEAT_FLAG(TCR2_EL2_FNG1 |
1027 TCR2_EL2_FNG0 |
1028 TCR2_EL2_A2,
1029 REQUIRES_E2H1, FEAT_ASID2),
1030 NEEDS_FEAT_FLAG(TCR2_EL2_DisCH1 |
1031 TCR2_EL2_DisCH0 |
1032 TCR2_EL2_D128,
1033 REQUIRES_E2H1, FEAT_D128),
1034 NEEDS_FEAT_FLAG(TCR2_EL2_AMEC1, REQUIRES_E2H1, FEAT_MEC),
1035 NEEDS_FEAT(TCR2_EL2_AMEC0, FEAT_MEC),
1036 NEEDS_FEAT(TCR2_EL2_HAFT, FEAT_HAFT),
1037 NEEDS_FEAT(TCR2_EL2_PTTWI |
1038 TCR2_EL2_PnCH,
1039 FEAT_THE),
1040 NEEDS_FEAT(TCR2_EL2_AIE, FEAT_AIE),
1041 NEEDS_FEAT(TCR2_EL2_POE |
1042 TCR2_EL2_E0POE,
1043 FEAT_S1POE),
1044 NEEDS_FEAT(TCR2_EL2_PIE, FEAT_S1PIE),
1045 FORCE_RES0(TCR2_EL2_RES0),
1046 FORCE_RES1(TCR2_EL2_RES1),
1047 };
1048
1049 static const DECLARE_FEAT_MAP(tcr2_el2_desc, TCR2_EL2,
1050 tcr2_el2_feat_map, FEAT_TCR2);
1051
1052 static const struct reg_bits_to_feat_map sctlr_el1_feat_map[] = {
1053 NEEDS_FEAT(SCTLR_EL1_CP15BEN, FEAT_AA32EL0),
1054 NEEDS_FEAT_FLAG(SCTLR_EL1_ITD |
1055 SCTLR_EL1_SED,
1056 AS_RES1, FEAT_AA32EL0),
1057 NEEDS_FEAT(SCTLR_EL1_BT0 |
1058 SCTLR_EL1_BT1,
1059 FEAT_BTI),
1060 NEEDS_FEAT(SCTLR_EL1_CMOW, FEAT_CMOW),
1061 NEEDS_FEAT_FLAG(SCTLR_EL1_TSCXT,
1062 AS_RES1, feat_csv2_2_csv2_1p2),
1063 NEEDS_FEAT_FLAG(SCTLR_EL1_EIS |
1064 SCTLR_EL1_EOS,
1065 AS_RES1, FEAT_ExS),
1066 NEEDS_FEAT(SCTLR_EL1_EnFPM, FEAT_FPMR),
1067 NEEDS_FEAT(SCTLR_EL1_IESB, FEAT_IESB),
1068 NEEDS_FEAT(SCTLR_EL1_EnALS, FEAT_LS64),
1069 NEEDS_FEAT(SCTLR_EL1_EnAS0, FEAT_LS64_ACCDATA),
1070 NEEDS_FEAT(SCTLR_EL1_EnASR, FEAT_LS64_V),
1071 NEEDS_FEAT(SCTLR_EL1_nAA, FEAT_LSE2),
1072 NEEDS_FEAT_FLAG(SCTLR_EL1_LSMAOE |
1073 SCTLR_EL1_nTLSMD,
1074 AS_RES1, FEAT_LSMAOC),
1075 NEEDS_FEAT(SCTLR_EL1_EE, FEAT_MixedEnd),
1076 NEEDS_FEAT(SCTLR_EL1_E0E, feat_mixedendel0),
1077 NEEDS_FEAT(SCTLR_EL1_MSCEn, FEAT_MOPS),
1078 NEEDS_FEAT(SCTLR_EL1_ATA0 |
1079 SCTLR_EL1_ATA |
1080 SCTLR_EL1_TCF0 |
1081 SCTLR_EL1_TCF,
1082 FEAT_MTE2),
1083 NEEDS_FEAT(SCTLR_EL1_ITFSB, feat_mte_async),
1084 NEEDS_FEAT(SCTLR_EL1_TCSO0 |
1085 SCTLR_EL1_TCSO,
1086 FEAT_MTE_STORE_ONLY),
1087 NEEDS_FEAT(SCTLR_EL1_NMI |
1088 SCTLR_EL1_SPINTMASK,
1089 FEAT_NMI),
1090 NEEDS_FEAT_FLAG(SCTLR_EL1_SPAN,
1091 AS_RES1, FEAT_PAN),
1092 NEEDS_FEAT(SCTLR_EL1_EPAN, FEAT_PAN3),
1093 NEEDS_FEAT(SCTLR_EL1_EnDA |
1094 SCTLR_EL1_EnDB |
1095 SCTLR_EL1_EnIA |
1096 SCTLR_EL1_EnIB,
1097 feat_pauth),
1098 NEEDS_FEAT(SCTLR_EL1_EnTP2, FEAT_SME),
1099 NEEDS_FEAT(SCTLR_EL1_EnRCTX, FEAT_SPECRES),
1100 NEEDS_FEAT(SCTLR_EL1_DSSBS, FEAT_SSBS),
1101 NEEDS_FEAT(SCTLR_EL1_TIDCP, FEAT_TIDCP1),
1102 NEEDS_FEAT(SCTLR_EL1_TWEDEL |
1103 SCTLR_EL1_TWEDEn,
1104 FEAT_TWED),
1105 NEEDS_FEAT(SCTLR_EL1_UCI |
1106 SCTLR_EL1_WXN |
1107 SCTLR_EL1_nTWE |
1108 SCTLR_EL1_nTWI |
1109 SCTLR_EL1_UCT |
1110 SCTLR_EL1_DZE |
1111 SCTLR_EL1_I |
1112 SCTLR_EL1_UMA |
1113 SCTLR_EL1_SA0 |
1114 SCTLR_EL1_SA |
1115 SCTLR_EL1_C |
1116 SCTLR_EL1_A |
1117 SCTLR_EL1_M,
1118 FEAT_AA64EL1),
1119 FORCE_RES0(SCTLR_EL1_RES0),
1120 FORCE_RES1(SCTLR_EL1_RES1),
1121 };
1122
1123 static const DECLARE_FEAT_MAP(sctlr_el1_desc, SCTLR_EL1,
1124 sctlr_el1_feat_map, FEAT_AA64EL1);
1125
1126 static const struct reg_bits_to_feat_map sctlr_el2_feat_map[] = {
1127 NEEDS_FEAT_FLAG(SCTLR_EL2_CP15BEN,
1128 RES1_WHEN_E2H0 | REQUIRES_E2H1,
1129 FEAT_AA32EL0),
1130 NEEDS_FEAT_FLAG(SCTLR_EL2_ITD |
1131 SCTLR_EL2_SED,
1132 RES1_WHEN_E2H1 | REQUIRES_E2H1,
1133 FEAT_AA32EL0),
1134 NEEDS_FEAT_FLAG(SCTLR_EL2_BT0, REQUIRES_E2H1, FEAT_BTI),
1135 NEEDS_FEAT(SCTLR_EL2_BT, FEAT_BTI),
1136 NEEDS_FEAT_FLAG(SCTLR_EL2_CMOW, REQUIRES_E2H1, FEAT_CMOW),
1137 NEEDS_FEAT_FLAG(SCTLR_EL2_TSCXT,
1138 RES1_WHEN_E2H1 | REQUIRES_E2H1,
1139 feat_csv2_2_csv2_1p2),
1140 NEEDS_FEAT_FLAG(SCTLR_EL2_EIS |
1141 SCTLR_EL2_EOS,
1142 AS_RES1, FEAT_ExS),
1143 NEEDS_FEAT(SCTLR_EL2_EnFPM, FEAT_FPMR),
1144 NEEDS_FEAT(SCTLR_EL2_IESB, FEAT_IESB),
1145 NEEDS_FEAT_FLAG(SCTLR_EL2_EnALS, REQUIRES_E2H1, FEAT_LS64),
1146 NEEDS_FEAT_FLAG(SCTLR_EL2_EnAS0, REQUIRES_E2H1, FEAT_LS64_ACCDATA),
1147 NEEDS_FEAT_FLAG(SCTLR_EL2_EnASR, REQUIRES_E2H1, FEAT_LS64_V),
1148 NEEDS_FEAT(SCTLR_EL2_nAA, FEAT_LSE2),
1149 NEEDS_FEAT_FLAG(SCTLR_EL2_LSMAOE |
1150 SCTLR_EL2_nTLSMD,
1151 AS_RES1 | REQUIRES_E2H1, FEAT_LSMAOC),
1152 NEEDS_FEAT(SCTLR_EL2_EE, FEAT_MixedEnd),
1153 NEEDS_FEAT_FLAG(SCTLR_EL2_E0E, REQUIRES_E2H1, feat_mixedendel0),
1154 NEEDS_FEAT_FLAG(SCTLR_EL2_MSCEn, REQUIRES_E2H1, FEAT_MOPS),
1155 NEEDS_FEAT_FLAG(SCTLR_EL2_ATA0 |
1156 SCTLR_EL2_TCF0,
1157 REQUIRES_E2H1, FEAT_MTE2),
1158 NEEDS_FEAT(SCTLR_EL2_ATA |
1159 SCTLR_EL2_TCF,
1160 FEAT_MTE2),
1161 NEEDS_FEAT(SCTLR_EL2_ITFSB, feat_mte_async),
1162 NEEDS_FEAT_FLAG(SCTLR_EL2_TCSO0, REQUIRES_E2H1, FEAT_MTE_STORE_ONLY),
1163 NEEDS_FEAT(SCTLR_EL2_TCSO,
1164 FEAT_MTE_STORE_ONLY),
1165 NEEDS_FEAT(SCTLR_EL2_NMI |
1166 SCTLR_EL2_SPINTMASK,
1167 FEAT_NMI),
1168 NEEDS_FEAT_FLAG(SCTLR_EL2_SPAN, AS_RES1 | REQUIRES_E2H1, FEAT_PAN),
1169 NEEDS_FEAT_FLAG(SCTLR_EL2_EPAN, REQUIRES_E2H1, FEAT_PAN3),
1170 NEEDS_FEAT(SCTLR_EL2_EnDA |
1171 SCTLR_EL2_EnDB |
1172 SCTLR_EL2_EnIA |
1173 SCTLR_EL2_EnIB,
1174 feat_pauth),
1175 NEEDS_FEAT_FLAG(SCTLR_EL2_EnTP2, REQUIRES_E2H1, FEAT_SME),
1176 NEEDS_FEAT(SCTLR_EL2_EnRCTX, FEAT_SPECRES),
1177 NEEDS_FEAT(SCTLR_EL2_DSSBS, FEAT_SSBS),
1178 NEEDS_FEAT_FLAG(SCTLR_EL2_TIDCP, REQUIRES_E2H1, FEAT_TIDCP1),
1179 NEEDS_FEAT_FLAG(SCTLR_EL2_TWEDEL |
1180 SCTLR_EL2_TWEDEn,
1181 REQUIRES_E2H1, FEAT_TWED),
1182 NEEDS_FEAT_FLAG(SCTLR_EL2_nTWE |
1183 SCTLR_EL2_nTWI,
1184 AS_RES1 | REQUIRES_E2H1, FEAT_AA64EL2),
1185 NEEDS_FEAT_FLAG(SCTLR_EL2_UCI |
1186 SCTLR_EL2_UCT |
1187 SCTLR_EL2_DZE |
1188 SCTLR_EL2_SA0,
1189 REQUIRES_E2H1, FEAT_AA64EL2),
1190 NEEDS_FEAT(SCTLR_EL2_WXN |
1191 SCTLR_EL2_I |
1192 SCTLR_EL2_SA |
1193 SCTLR_EL2_C |
1194 SCTLR_EL2_A |
1195 SCTLR_EL2_M,
1196 FEAT_AA64EL2),
1197 FORCE_RES0(SCTLR_EL2_RES0),
1198 FORCE_RES1(SCTLR_EL2_RES1),
1199 };
1200
1201 static const DECLARE_FEAT_MAP(sctlr_el2_desc, SCTLR_EL2,
1202 sctlr_el2_feat_map, FEAT_AA64EL2);
1203
1204 static const struct reg_bits_to_feat_map mdcr_el2_feat_map[] = {
1205 NEEDS_FEAT(MDCR_EL2_EBWE, FEAT_Debugv8p9),
1206 NEEDS_FEAT(MDCR_EL2_TDOSA, FEAT_DoubleLock),
1207 NEEDS_FEAT(MDCR_EL2_PMEE, FEAT_EBEP),
1208 NEEDS_FEAT(MDCR_EL2_TDCC, FEAT_FGT),
1209 NEEDS_FEAT(MDCR_EL2_MTPME, FEAT_MTPMU),
1210 NEEDS_FEAT(MDCR_EL2_HPME |
1211 MDCR_EL2_HPMN |
1212 MDCR_EL2_TPMCR |
1213 MDCR_EL2_TPM,
1214 FEAT_PMUv3),
1215 NEEDS_FEAT(MDCR_EL2_HPMD, feat_pmuv3p1),
1216 NEEDS_FEAT(MDCR_EL2_HCCD |
1217 MDCR_EL2_HLP,
1218 feat_pmuv3p5),
1219 NEEDS_FEAT(MDCR_EL2_HPMFZO, feat_pmuv3p7),
1220 NEEDS_FEAT(MDCR_EL2_PMSSE, FEAT_PMUv3_SS),
1221 NEEDS_FEAT(MDCR_EL2_E2PB |
1222 MDCR_EL2_TPMS,
1223 FEAT_SPE),
1224 NEEDS_FEAT(MDCR_EL2_HPMFZS, FEAT_SPEv1p2),
1225 NEEDS_FEAT(MDCR_EL2_EnSPM, FEAT_SPMU),
1226 NEEDS_FEAT(MDCR_EL2_EnSTEPOP, FEAT_STEP2),
1227 NEEDS_FEAT(MDCR_EL2_E2TB, FEAT_TRBE),
1228 NEEDS_FEAT(MDCR_EL2_TTRF, FEAT_TRF),
1229 NEEDS_FEAT(MDCR_EL2_TDA |
1230 MDCR_EL2_TDE |
1231 MDCR_EL2_TDRA,
1232 FEAT_AA64EL1),
1233 FORCE_RES0(MDCR_EL2_RES0),
1234 FORCE_RES1(MDCR_EL2_RES1),
1235 };
1236
1237 static const DECLARE_FEAT_MAP(mdcr_el2_desc, MDCR_EL2,
1238 mdcr_el2_feat_map, FEAT_AA64EL2);
1239
1240 static const struct reg_bits_to_feat_map vtcr_el2_feat_map[] = {
1241 NEEDS_FEAT(VTCR_EL2_HDBSS, FEAT_HDBSS),
1242 NEEDS_FEAT(VTCR_EL2_HAFT, FEAT_HAFT),
1243 NEEDS_FEAT(VTCR_EL2_TL0 |
1244 VTCR_EL2_TL1 |
1245 VTCR_EL2_AssuredOnly |
1246 VTCR_EL2_GCSH,
1247 FEAT_THE),
1248 NEEDS_FEAT(VTCR_EL2_D128, FEAT_D128),
1249 NEEDS_FEAT(VTCR_EL2_S2POE, FEAT_S2POE),
1250 NEEDS_FEAT(VTCR_EL2_S2PIE, FEAT_S2PIE),
1251 NEEDS_FEAT(VTCR_EL2_SL2 |
1252 VTCR_EL2_DS,
1253 feat_lpa2),
1254 NEEDS_FEAT(VTCR_EL2_NSA |
1255 VTCR_EL2_NSW,
1256 FEAT_SEL2),
1257 NEEDS_FEAT(VTCR_EL2_HWU62 |
1258 VTCR_EL2_HWU61 |
1259 VTCR_EL2_HWU60 |
1260 VTCR_EL2_HWU59,
1261 FEAT_HPDS2),
1262 NEEDS_FEAT(VTCR_EL2_HD, ID_AA64MMFR1_EL1, HAFDBS, DBM),
1263 NEEDS_FEAT(VTCR_EL2_HA, ID_AA64MMFR1_EL1, HAFDBS, AF),
1264 NEEDS_FEAT(VTCR_EL2_VS, feat_vmid16),
1265 NEEDS_FEAT(VTCR_EL2_PS |
1266 VTCR_EL2_TG0 |
1267 VTCR_EL2_SH0 |
1268 VTCR_EL2_ORGN0 |
1269 VTCR_EL2_IRGN0 |
1270 VTCR_EL2_SL0 |
1271 VTCR_EL2_T0SZ,
1272 FEAT_AA64EL1),
1273 FORCE_RES0(VTCR_EL2_RES0),
1274 FORCE_RES1(VTCR_EL2_RES1),
1275 };
1276
1277 static const DECLARE_FEAT_MAP(vtcr_el2_desc, VTCR_EL2,
1278 vtcr_el2_feat_map, FEAT_AA64EL2);
1279
check_feat_map(const struct reg_bits_to_feat_map * map,int map_size,u64 resx,const char * str)1280 static void __init check_feat_map(const struct reg_bits_to_feat_map *map,
1281 int map_size, u64 resx, const char *str)
1282 {
1283 u64 mask = 0;
1284
1285 /*
1286 * Don't account for FORCE_RESx that are architectural, and
1287 * therefore part of the resx parameter. Other FORCE_RESx bits
1288 * are implementation choices, and therefore accounted for.
1289 */
1290 for (int i = 0; i < map_size; i++)
1291 if (!((map[i].flags & FORCE_RESx) && (map[i].bits & resx)))
1292 mask |= map[i].bits;
1293
1294 if (mask != ~resx)
1295 kvm_err("Undefined %s behaviour, bits %016llx\n",
1296 str, mask ^ ~resx);
1297 }
1298
reg_feat_map_bits(const struct reg_bits_to_feat_map * map)1299 static u64 reg_feat_map_bits(const struct reg_bits_to_feat_map *map)
1300 {
1301 return map->flags & MASKS_POINTER ? (map->masks->mask | map->masks->nmask) : map->bits;
1302 }
1303
check_reg_desc(const struct reg_feat_map_desc * r)1304 static void __init check_reg_desc(const struct reg_feat_map_desc *r)
1305 {
1306 check_feat_map(r->bit_feat_map, r->bit_feat_map_sz,
1307 ~reg_feat_map_bits(&r->feat_map), r->name);
1308 }
1309
check_feature_map(void)1310 void __init check_feature_map(void)
1311 {
1312 check_reg_desc(&hfgrtr_desc);
1313 check_reg_desc(&hfgwtr_desc);
1314 check_reg_desc(&hfgitr_desc);
1315 check_reg_desc(&hdfgrtr_desc);
1316 check_reg_desc(&hdfgwtr_desc);
1317 check_reg_desc(&hafgrtr_desc);
1318 check_reg_desc(&hfgrtr2_desc);
1319 check_reg_desc(&hfgwtr2_desc);
1320 check_reg_desc(&hfgitr2_desc);
1321 check_reg_desc(&hdfgrtr2_desc);
1322 check_reg_desc(&hdfgwtr2_desc);
1323 check_reg_desc(&hcrx_desc);
1324 check_reg_desc(&hcr_desc);
1325 check_reg_desc(&sctlr2_desc);
1326 check_reg_desc(&tcr2_el2_desc);
1327 check_reg_desc(&sctlr_el1_desc);
1328 check_reg_desc(&sctlr_el2_desc);
1329 check_reg_desc(&mdcr_el2_desc);
1330 check_reg_desc(&vtcr_el2_desc);
1331 }
1332
idreg_feat_match(struct kvm * kvm,const struct reg_bits_to_feat_map * map)1333 static bool idreg_feat_match(struct kvm *kvm, const struct reg_bits_to_feat_map *map)
1334 {
1335 u64 regval = kvm->arch.id_regs[map->regidx];
1336 u64 regfld = (regval >> map->shift) & GENMASK(map->width - 1, 0);
1337
1338 if (map->sign) {
1339 s64 sfld = sign_extend64(regfld, map->width - 1);
1340 s64 slim = sign_extend64(map->lo_lim, map->width - 1);
1341 return sfld >= slim;
1342 } else {
1343 return regfld >= map->lo_lim;
1344 }
1345 }
1346
compute_resx_bits(struct kvm * kvm,const struct reg_bits_to_feat_map * map,int map_size,unsigned long require,unsigned long exclude)1347 static struct resx compute_resx_bits(struct kvm *kvm,
1348 const struct reg_bits_to_feat_map *map,
1349 int map_size,
1350 unsigned long require,
1351 unsigned long exclude)
1352 {
1353 bool e2h0 = kvm_has_feat(kvm, FEAT_E2H0);
1354 struct resx resx = {};
1355
1356 for (int i = 0; i < map_size; i++) {
1357 bool match;
1358
1359 if ((map[i].flags & require) != require)
1360 continue;
1361
1362 if (map[i].flags & exclude)
1363 continue;
1364
1365 if (map[i].flags & FORCE_RESx)
1366 match = false;
1367 else if (map[i].flags & CALL_FUNC)
1368 match = map[i].match(kvm);
1369 else
1370 match = idreg_feat_match(kvm, &map[i]);
1371
1372 if (map[i].flags & REQUIRES_E2H1)
1373 match &= !e2h0;
1374
1375 if (!match) {
1376 u64 bits = reg_feat_map_bits(&map[i]);
1377
1378 if ((map[i].flags & AS_RES1) ||
1379 (e2h0 && (map[i].flags & RES1_WHEN_E2H0)) ||
1380 (!e2h0 && (map[i].flags & RES1_WHEN_E2H1)))
1381 resx.res1 |= bits;
1382 else
1383 resx.res0 |= bits;
1384 }
1385 }
1386
1387 return resx;
1388 }
1389
compute_reg_resx_bits(struct kvm * kvm,const struct reg_feat_map_desc * r,unsigned long require,unsigned long exclude)1390 static struct resx compute_reg_resx_bits(struct kvm *kvm,
1391 const struct reg_feat_map_desc *r,
1392 unsigned long require,
1393 unsigned long exclude)
1394 {
1395 struct resx resx;
1396
1397 resx = compute_resx_bits(kvm, r->bit_feat_map, r->bit_feat_map_sz,
1398 require, exclude);
1399
1400 if (r->feat_map.flags & MASKS_POINTER) {
1401 resx.res0 |= r->feat_map.masks->res0;
1402 resx.res1 |= r->feat_map.masks->res1;
1403 }
1404
1405 /*
1406 * If the register itself was not valid, all the non-RESx bits are
1407 * now considered RES0 (this matches the behaviour of registers such
1408 * as SCTLR2 and TCR2). Weed out any potential (though unlikely)
1409 * overlap with RES1 bits coming from the previous computation.
1410 */
1411 resx.res0 |= compute_resx_bits(kvm, &r->feat_map, 1, require, exclude).res0;
1412 resx.res1 &= ~resx.res0;
1413
1414 return resx;
1415 }
1416
compute_fgu_bits(struct kvm * kvm,const struct reg_feat_map_desc * r)1417 static u64 compute_fgu_bits(struct kvm *kvm, const struct reg_feat_map_desc *r)
1418 {
1419 struct resx resx;
1420
1421 /*
1422 * If computing FGUs, we collect the unsupported feature bits as
1423 * RESx bits, but don't take the actual RESx bits or register
1424 * existence into account -- we're not computing bits for the
1425 * register itself.
1426 */
1427 resx = compute_resx_bits(kvm, r->bit_feat_map, r->bit_feat_map_sz,
1428 0, NEVER_FGU);
1429
1430 return resx.res0 | resx.res1;
1431 }
1432
compute_fgu(struct kvm * kvm,enum fgt_group_id fgt)1433 void compute_fgu(struct kvm *kvm, enum fgt_group_id fgt)
1434 {
1435 u64 val = 0;
1436
1437 switch (fgt) {
1438 case HFGRTR_GROUP:
1439 val |= compute_fgu_bits(kvm, &hfgrtr_desc);
1440 val |= compute_fgu_bits(kvm, &hfgwtr_desc);
1441 break;
1442 case HFGITR_GROUP:
1443 val |= compute_fgu_bits(kvm, &hfgitr_desc);
1444 break;
1445 case HDFGRTR_GROUP:
1446 val |= compute_fgu_bits(kvm, &hdfgrtr_desc);
1447 val |= compute_fgu_bits(kvm, &hdfgwtr_desc);
1448 break;
1449 case HAFGRTR_GROUP:
1450 val |= compute_fgu_bits(kvm, &hafgrtr_desc);
1451 break;
1452 case HFGRTR2_GROUP:
1453 val |= compute_fgu_bits(kvm, &hfgrtr2_desc);
1454 val |= compute_fgu_bits(kvm, &hfgwtr2_desc);
1455 break;
1456 case HFGITR2_GROUP:
1457 val |= compute_fgu_bits(kvm, &hfgitr2_desc);
1458 break;
1459 case HDFGRTR2_GROUP:
1460 val |= compute_fgu_bits(kvm, &hdfgrtr2_desc);
1461 val |= compute_fgu_bits(kvm, &hdfgwtr2_desc);
1462 break;
1463 default:
1464 BUG();
1465 }
1466
1467 kvm->arch.fgu[fgt] = val;
1468 }
1469
get_reg_fixed_bits(struct kvm * kvm,enum vcpu_sysreg reg)1470 struct resx get_reg_fixed_bits(struct kvm *kvm, enum vcpu_sysreg reg)
1471 {
1472 struct resx resx;
1473
1474 switch (reg) {
1475 case HFGRTR_EL2:
1476 resx = compute_reg_resx_bits(kvm, &hfgrtr_desc, 0, 0);
1477 break;
1478 case HFGWTR_EL2:
1479 resx = compute_reg_resx_bits(kvm, &hfgwtr_desc, 0, 0);
1480 break;
1481 case HFGITR_EL2:
1482 resx = compute_reg_resx_bits(kvm, &hfgitr_desc, 0, 0);
1483 break;
1484 case HDFGRTR_EL2:
1485 resx = compute_reg_resx_bits(kvm, &hdfgrtr_desc, 0, 0);
1486 break;
1487 case HDFGWTR_EL2:
1488 resx = compute_reg_resx_bits(kvm, &hdfgwtr_desc, 0, 0);
1489 break;
1490 case HAFGRTR_EL2:
1491 resx = compute_reg_resx_bits(kvm, &hafgrtr_desc, 0, 0);
1492 break;
1493 case HFGRTR2_EL2:
1494 resx = compute_reg_resx_bits(kvm, &hfgrtr2_desc, 0, 0);
1495 break;
1496 case HFGWTR2_EL2:
1497 resx = compute_reg_resx_bits(kvm, &hfgwtr2_desc, 0, 0);
1498 break;
1499 case HFGITR2_EL2:
1500 resx = compute_reg_resx_bits(kvm, &hfgitr2_desc, 0, 0);
1501 break;
1502 case HDFGRTR2_EL2:
1503 resx = compute_reg_resx_bits(kvm, &hdfgrtr2_desc, 0, 0);
1504 break;
1505 case HDFGWTR2_EL2:
1506 resx = compute_reg_resx_bits(kvm, &hdfgwtr2_desc, 0, 0);
1507 break;
1508 case HCRX_EL2:
1509 resx = compute_reg_resx_bits(kvm, &hcrx_desc, 0, 0);
1510 resx.res1 |= __HCRX_EL2_RES1;
1511 break;
1512 case HCR_EL2:
1513 resx = compute_reg_resx_bits(kvm, &hcr_desc, 0, 0);
1514 break;
1515 case SCTLR2_EL1:
1516 case SCTLR2_EL2:
1517 resx = compute_reg_resx_bits(kvm, &sctlr2_desc, 0, 0);
1518 break;
1519 case TCR2_EL2:
1520 resx = compute_reg_resx_bits(kvm, &tcr2_el2_desc, 0, 0);
1521 break;
1522 case SCTLR_EL1:
1523 resx = compute_reg_resx_bits(kvm, &sctlr_el1_desc, 0, 0);
1524 break;
1525 case SCTLR_EL2:
1526 resx = compute_reg_resx_bits(kvm, &sctlr_el2_desc, 0, 0);
1527 break;
1528 case MDCR_EL2:
1529 resx = compute_reg_resx_bits(kvm, &mdcr_el2_desc, 0, 0);
1530 break;
1531 case VTCR_EL2:
1532 resx = compute_reg_resx_bits(kvm, &vtcr_el2_desc, 0, 0);
1533 break;
1534 default:
1535 WARN_ON_ONCE(1);
1536 resx = (typeof(resx)){};
1537 break;
1538 }
1539
1540 return resx;
1541 }
1542
__fgt_reg_to_masks(enum vcpu_sysreg reg)1543 static __always_inline struct fgt_masks *__fgt_reg_to_masks(enum vcpu_sysreg reg)
1544 {
1545 switch (reg) {
1546 case HFGRTR_EL2:
1547 return &hfgrtr_masks;
1548 case HFGWTR_EL2:
1549 return &hfgwtr_masks;
1550 case HFGITR_EL2:
1551 return &hfgitr_masks;
1552 case HDFGRTR_EL2:
1553 return &hdfgrtr_masks;
1554 case HDFGWTR_EL2:
1555 return &hdfgwtr_masks;
1556 case HAFGRTR_EL2:
1557 return &hafgrtr_masks;
1558 case HFGRTR2_EL2:
1559 return &hfgrtr2_masks;
1560 case HFGWTR2_EL2:
1561 return &hfgwtr2_masks;
1562 case HFGITR2_EL2:
1563 return &hfgitr2_masks;
1564 case HDFGRTR2_EL2:
1565 return &hdfgrtr2_masks;
1566 case HDFGWTR2_EL2:
1567 return &hdfgwtr2_masks;
1568 default:
1569 BUILD_BUG_ON(1);
1570 }
1571 }
1572
__compute_fgt(struct kvm_vcpu * vcpu,enum vcpu_sysreg reg)1573 static __always_inline void __compute_fgt(struct kvm_vcpu *vcpu, enum vcpu_sysreg reg)
1574 {
1575 u64 fgu = vcpu->kvm->arch.fgu[__fgt_reg_to_group_id(reg)];
1576 struct fgt_masks *m = __fgt_reg_to_masks(reg);
1577 u64 clear = 0, set = 0, val = m->nmask;
1578
1579 set |= fgu & m->mask;
1580 clear |= fgu & m->nmask;
1581
1582 if (is_nested_ctxt(vcpu)) {
1583 u64 nested = __vcpu_sys_reg(vcpu, reg);
1584 set |= nested & m->mask;
1585 clear |= ~nested & m->nmask;
1586 }
1587
1588 val |= set;
1589 val &= ~clear;
1590 *vcpu_fgt(vcpu, reg) = val;
1591 }
1592
__compute_hfgwtr(struct kvm_vcpu * vcpu)1593 static void __compute_hfgwtr(struct kvm_vcpu *vcpu)
1594 {
1595 __compute_fgt(vcpu, HFGWTR_EL2);
1596
1597 if (cpus_have_final_cap(ARM64_WORKAROUND_AMPERE_AC03_CPU_38))
1598 *vcpu_fgt(vcpu, HFGWTR_EL2) |= HFGWTR_EL2_TCR_EL1;
1599 }
1600
__compute_hdfgwtr(struct kvm_vcpu * vcpu)1601 static void __compute_hdfgwtr(struct kvm_vcpu *vcpu)
1602 {
1603 __compute_fgt(vcpu, HDFGWTR_EL2);
1604
1605 if (is_hyp_ctxt(vcpu))
1606 *vcpu_fgt(vcpu, HDFGWTR_EL2) |= HDFGWTR_EL2_MDSCR_EL1;
1607 }
1608
kvm_vcpu_load_fgt(struct kvm_vcpu * vcpu)1609 void kvm_vcpu_load_fgt(struct kvm_vcpu *vcpu)
1610 {
1611 if (!cpus_have_final_cap(ARM64_HAS_FGT))
1612 return;
1613
1614 __compute_fgt(vcpu, HFGRTR_EL2);
1615 __compute_hfgwtr(vcpu);
1616 __compute_fgt(vcpu, HFGITR_EL2);
1617 __compute_fgt(vcpu, HDFGRTR_EL2);
1618 __compute_hdfgwtr(vcpu);
1619 __compute_fgt(vcpu, HAFGRTR_EL2);
1620
1621 if (!cpus_have_final_cap(ARM64_HAS_FGT2))
1622 return;
1623
1624 __compute_fgt(vcpu, HFGRTR2_EL2);
1625 __compute_fgt(vcpu, HFGWTR2_EL2);
1626 __compute_fgt(vcpu, HFGITR2_EL2);
1627 __compute_fgt(vcpu, HDFGRTR2_EL2);
1628 __compute_fgt(vcpu, HDFGWTR2_EL2);
1629 }
1630