1 /*
2 * Copyright (c) 2019-2026, Arm Limited. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7 #ifndef ARCH_FEATURES_H
8 #define ARCH_FEATURES_H
9
10 #include <stdbool.h>
11
12 #include <arch_helpers.h>
13 #include <common/feat_detect.h>
14 #include <lib/cpus/errata.h>
15 #include <lib/el3_runtime/context_mgmt.h>
16 #include <lib/el3_runtime/cpu_data.h>
17
18 #if ENABLE_RME
19 #define FEAT_ENABLE_ALL_WORLDS \
20 ((1u << CPU_CONTEXT_SECURE) | \
21 (1u << CPU_CONTEXT_NS) | \
22 (1u << CPU_CONTEXT_REALM))
23 #define FEAT_ENABLE_REALM (1 << CPU_CONTEXT_REALM)
24 #else
25 #define FEAT_ENABLE_ALL_WORLDS \
26 ((1u << CPU_CONTEXT_SECURE) | \
27 (1u << CPU_CONTEXT_NS))
28 #define FEAT_ENABLE_REALM U(0)
29 #endif
30
31 #define FEAT_ENABLE_SECURE (1 << CPU_CONTEXT_SECURE)
32 #define FEAT_ENABLE_NS (1 << CPU_CONTEXT_NS)
33
34 #define ISOLATE_FIELD(reg, feat, mask) \
35 ((unsigned int)(((reg) >> (feat)) & mask))
36
37 #define SHOULD_ID_FIELD_DISABLE(guard, enabled_worlds, world) \
38 (((guard) == 0U) || ((((enabled_worlds) >> (world)) & 1U) == 0U))
39
40
41 #define CREATE_FEATURE_SUPPORTED(name, read_func, guard) \
42 __attribute__((always_inline)) \
43 static inline bool is_ ## name ## _supported(void) \
44 { \
45 if ((guard) == FEAT_STATE_DISABLED) { \
46 return false; \
47 } \
48 if ((guard) == FEAT_STATE_ALWAYS) { \
49 return true; \
50 } \
51 return read_func(); \
52 }
53
54 /*
55 * CREATE_IDREG_UPDATE and CREATE_PERCPU_IDREG_UPDATE are two macros that
56 * generate the update_feat_abc_idreg_field() function based on how its
57 * corresponding ID register is cached.
58 * The function disables ID register fields related to a feature if the build
59 * flag for that feature is 0 or if the feature should be disabled for that
60 * world. If the particular field has to be disabled, its field in the cached
61 * ID register is set to 0.
62 *
63 * Note: For most ID register fields, a value of 0 represents
64 * the Unimplemented state, and hence we use this macro to show features
65 * disabled in EL3 as unimplemented to lower ELs. However, certain feature's
66 * ID Register fields (like ID_AA64MMFR4_EL1.E2H0) deviate from this convention,
67 * where 0 does not represent Unimplemented.
68 * For those features, a custom update_feat_abc_idreg_field()
69 * needs to be created. This custom function should set the field to the
70 * feature's unimplemented state value if the feature is disabled in EL3.
71 *
72 * For example:
73 *
74 * __attribute__((always_inline))
75 * static inline void update_feat_abc_idreg_field(size_t security_state)
76 * {
77 * if (SHOULD_ID_FIELD_DISABLE(guard, enabled_worlds, security_state)) {
78 * per_world_context_t *per_world_ctx =
79 * &per_world_context[security_state];
80 * perworld_idregs_t *perworld_idregs = &(per_world_ctx->idregs);
81 *
82 * perworld_idregs->idreg &=
83 * ~((u_register_t)mask << idfield);
84 * perworld_idregs->idreg |=
85 * (((u_register_t)<unimplemented state value> & mask) << idfield);
86 * }
87 * }
88 */
89
90 #if (ENABLE_FEAT_IDTE3 && IMAGE_BL31)
91 #define CREATE_IDREG_UPDATE(name, idreg, idfield, mask, guard, enabled_worlds) \
92 __attribute__((always_inline)) \
93 static inline void update_ ## name ## _idreg_field(size_t security_state) \
94 { \
95 if (SHOULD_ID_FIELD_DISABLE(guard, enabled_worlds, security_state)) { \
96 per_world_context_t *per_world_ctx = \
97 &per_world_context[security_state]; \
98 perworld_idregs_t *perworld_idregs = &(per_world_ctx->idregs); \
99 perworld_idregs->idreg &= ~((u_register_t)mask << idfield); \
100 } \
101 }
102 #define CREATE_PERCPU_IDREG_UPDATE(name, idreg, idfield, mask, guard, \
103 enabled_worlds) \
104 __attribute__((always_inline)) \
105 static inline void update_ ## name ## _idreg_field(size_t security_state) \
106 { \
107 if (SHOULD_ID_FIELD_DISABLE(guard, enabled_worlds, security_state)) { \
108 percpu_idregs_t *percpu_idregs = \
109 &(get_cpu_data(idregs[security_state]));\
110 percpu_idregs->idreg &= ~((u_register_t)mask << idfield); \
111 } \
112 }
113 #else
114 #define CREATE_IDREG_UPDATE(name, idreg, idfield, mask, guard, enabled_worlds)
115 #define CREATE_PERCPU_IDREG_UPDATE(name, idreg, idfield, mask, guard, \
116 enabled_worlds)
117 #endif
118
119 #define _CREATE_FEATURE_PRESENT(name, idreg, idfield, mask, idval) \
120 __attribute__((always_inline)) \
121 static inline bool is_ ## name ## _present(void) \
122 { \
123 return (ISOLATE_FIELD(read_ ## idreg(), idfield, mask) >= idval) \
124 ? true : false; \
125 }
126
127 #define CREATE_FEATURE_PRESENT(name, idreg, idfield, mask, idval, \
128 enabled_worlds) \
129 _CREATE_FEATURE_PRESENT(name, idreg, idfield, mask, idval) \
130 CREATE_IDREG_UPDATE(name, idreg, idfield, mask, 1U, enabled_worlds)
131
132 #define CREATE_PERCPU_FEATURE_PRESENT(name, idreg, idfield, mask, idval, \
133 enabled_worlds) \
134 _CREATE_FEATURE_PRESENT(name, idreg, idfield, mask, idval) \
135 CREATE_PERCPU_IDREG_UPDATE(name, idreg, idfield, mask, 1U, \
136 enabled_worlds)
137
138 #define CREATE_FEATURE_FUNCS(name, idreg, idfield, mask, idval, guard, \
139 enabled_worlds) \
140 CREATE_FEATURE_PRESENT(name, idreg, idfield, mask, idval, \
141 enabled_worlds) \
142 CREATE_FEATURE_SUPPORTED(name, is_ ## name ## _present, guard)
143
144 #define CREATE_PERCPU_FEATURE_FUNCS(name, idreg, idfield, mask, idval, guard, \
145 enabled_worlds) \
146 CREATE_PERCPU_FEATURE_PRESENT(name, idreg, idfield, mask, idval, \
147 enabled_worlds) \
148 CREATE_FEATURE_SUPPORTED(name, is_ ## name ## _present, guard)
149
150 /* +----------------------------+
151 * | Features supported |
152 * +----------------------------+
153 * | GENTIMER |
154 * +----------------------------+
155 * | FEAT_PAN |
156 * +----------------------------+
157 * | FEAT_VHE |
158 * +----------------------------+
159 * | FEAT_TTCNP |
160 * +----------------------------+
161 * | FEAT_UAO |
162 * +----------------------------+
163 * | FEAT_PACQARMA3 |
164 * +----------------------------+
165 * | FEAT_PAUTH |
166 * +----------------------------+
167 * | FEAT_TTST |
168 * +----------------------------+
169 * | FEAT_BTI |
170 * +----------------------------+
171 * | FEAT_MTE2 |
172 * +----------------------------+
173 * | FEAT_SSBS |
174 * +----------------------------+
175 * | FEAT_NMI |
176 * +----------------------------+
177 * | FEAT_GCS |
178 * +----------------------------+
179 * | FEAT_EBEP |
180 * +----------------------------+
181 * | FEAT_SEBEP |
182 * +----------------------------+
183 * | FEAT_SEL2 |
184 * +----------------------------+
185 * | FEAT_TWED |
186 * +----------------------------+
187 * | FEAT_FGT |
188 * +----------------------------+
189 * | FEAT_EC/ECV2 |
190 * +----------------------------+
191 * | FEAT_RNG |
192 * +----------------------------+
193 * | FEAT_TCR2 |
194 * +----------------------------+
195 * | FEAT_S2POE |
196 * +----------------------------+
197 * | FEAT_S1POE |
198 * +----------------------------+
199 * | FEAT_S2PIE |
200 * +----------------------------+
201 * | FEAT_S1PIE |
202 * +----------------------------+
203 * | FEAT_AMU/AMUV1P1 |
204 * +----------------------------+
205 * | FEAT_MPAM |
206 * +----------------------------+
207 * | FEAT_HCX |
208 * +----------------------------+
209 * | FEAT_RNG_TRAP |
210 * +----------------------------+
211 * | FEAT_RME |
212 * +----------------------------+
213 * | FEAT_SB |
214 * +----------------------------+
215 * | FEAT_CSV2_2/CSV2_3 |
216 * +----------------------------+
217 * | FEAT_SPE |
218 * +----------------------------+
219 * | FEAT_SVE |
220 * +----------------------------+
221 * | FEAT_RAS |
222 * +----------------------------+
223 * | FEAT_DIT |
224 * +----------------------------+
225 * | FEAT_SYS_REG_TRACE |
226 * +----------------------------+
227 * | FEAT_TRF |
228 * +----------------------------+
229 * | FEAT_NV2 |
230 * +----------------------------+
231 * | FEAT_BRBE |
232 * +----------------------------+
233 * | FEAT_TRBE |
234 * +----------------------------+
235 * | FEAT_SME/SME2 |
236 * +----------------------------+
237 * | FEAT_PMUV3 |
238 * +----------------------------+
239 * | FEAT_MTPMU |
240 * +----------------------------+
241 * | FEAT_FGT2 |
242 * +----------------------------+
243 * | FEAT_THE |
244 * +----------------------------+
245 * | FEAT_SCTLR2 |
246 * +----------------------------+
247 * | FEAT_D128 |
248 * +----------------------------+
249 * | FEAT_LS64_ACCDATA |
250 * +----------------------------+
251 * | FEAT_FPMR |
252 * +----------------------------+
253 * | FEAT_MOPS |
254 * +----------------------------+
255 * | FEAT_PAUTH_LR |
256 * +----------------------------+
257 * | FEAT_FGWTE3 |
258 * +----------------------------+
259 * | FEAT_MPAM_PE_BW_CTRL |
260 * +----------------------------+
261 * | FEAT_CPA2 |
262 * +----------------------------+
263 * | FEAT_AIE |
264 * +----------------------------+
265 * | FEAT_PFAR |
266 * +----------------------------+
267 * | FEAT_RME_GPC2 |
268 * +----------------------------+
269 * | FEAT_RME_GDI |
270 * +----------------------------+
271 * | FEAT_IDTE3 |
272 * +----------------------------+
273 * | FEAT_UINJ |
274 * +----------------------------+
275 * | FEAT_LSE |
276 * +----------------------------+
277 * | FEAT_MORELLO |
278 * +----------------------------+
279 */
280
281 __attribute__((always_inline))
is_armv7_gentimer_present(void)282 static inline bool is_armv7_gentimer_present(void)
283 {
284 /* The Generic Timer is always present in an ARMv8-A implementation */
285 return true;
286 }
287
288 /* FEAT_PAN: Privileged access never */
289 CREATE_FEATURE_FUNCS(feat_pan, id_aa64mmfr1_el1, ID_AA64MMFR1_EL1_PAN_SHIFT,
290 ID_AA64MMFR1_EL1_PAN_MASK, 1U, ENABLE_FEAT_PAN,
291 FEAT_ENABLE_ALL_WORLDS)
292
293 /* FEAT_VHE: Virtualization Host Extensions */
294 CREATE_FEATURE_FUNCS(feat_vhe, id_aa64mmfr1_el1, ID_AA64MMFR1_EL1_VHE_SHIFT,
295 ID_AA64MMFR1_EL1_VHE_MASK, 1U, ENABLE_FEAT_VHE,
296 FEAT_ENABLE_ALL_WORLDS)
297
298 /* FEAT_TTCNP: Translation table common not private */
299 CREATE_FEATURE_PRESENT(feat_ttcnp, id_aa64mmfr2_el1, ID_AA64MMFR2_EL1_CNP_SHIFT,
300 ID_AA64MMFR2_EL1_CNP_MASK, 1U,
301 FEAT_ENABLE_ALL_WORLDS)
302
303 /* FEAT_UAO: User access override */
304 CREATE_FEATURE_PRESENT(feat_uao, id_aa64mmfr2_el1, ID_AA64MMFR2_EL1_UAO_SHIFT,
305 ID_AA64MMFR2_EL1_UAO_MASK, 1U,
306 FEAT_ENABLE_ALL_WORLDS)
307
308 /* If any of the fields is not zero, QARMA3 algorithm is present */
309 CREATE_FEATURE_PRESENT(feat_pacqarma3, id_aa64isar2_el1, 0,
310 ((ID_AA64ISAR2_GPA3_MASK << ID_AA64ISAR2_GPA3_SHIFT) |
311 (ID_AA64ISAR2_APA3_MASK << ID_AA64ISAR2_APA3_SHIFT)), 1U,
312 FEAT_ENABLE_ALL_WORLDS)
313
314 /* FEAT_PAUTH: Pointer Authentication */
315 __attribute__((always_inline))
is_feat_pauth_present(void)316 static inline bool is_feat_pauth_present(void)
317 {
318 uint64_t mask_id_aa64isar1 =
319 (ID_AA64ISAR1_GPI_MASK << ID_AA64ISAR1_GPI_SHIFT) |
320 (ID_AA64ISAR1_GPA_MASK << ID_AA64ISAR1_GPA_SHIFT) |
321 (ID_AA64ISAR1_API_MASK << ID_AA64ISAR1_API_SHIFT) |
322 (ID_AA64ISAR1_APA_MASK << ID_AA64ISAR1_APA_SHIFT);
323
324 /*
325 * If any of the fields is not zero or QARMA3 is present,
326 * PAuth is present
327 */
328 return ((read_id_aa64isar1_el1() & mask_id_aa64isar1) != 0U ||
329 is_feat_pacqarma3_present());
330 }
CREATE_FEATURE_SUPPORTED(feat_pauth,is_feat_pauth_present,ENABLE_PAUTH)331 CREATE_FEATURE_SUPPORTED(feat_pauth, is_feat_pauth_present, ENABLE_PAUTH)
332 CREATE_FEATURE_SUPPORTED(ctx_pauth, is_feat_pauth_present, CTX_INCLUDE_PAUTH_REGS)
333
334 /* FEAT_CRYPTO: SIMD Crypto Extensions */
335 __attribute__((always_inline))
336 static inline bool is_feat_crypto_present(void)
337 {
338 uint64_t mask_id_aa64isar0 =
339 (ID_AA64ISAR0_AES_MASK << ID_AA64ISAR0_AES_SHIFT) |
340 (ID_AA64ISAR0_SHA1_MASK << ID_AA64ISAR0_SHA1_SHIFT) |
341 (ID_AA64ISAR0_SHA2_MASK << ID_AA64ISAR0_SHA2_SHIFT);
342
343 /*
344 * Check if AES, SHA1, SHA2 extension presents.
345 */
346 return ((read_id_aa64isar0_el1() & mask_id_aa64isar0) != 0U);
347 }
CREATE_FEATURE_SUPPORTED(feat_crypto,is_feat_crypto_present,ENABLE_FEAT_CRYPTO)348 CREATE_FEATURE_SUPPORTED(feat_crypto, is_feat_crypto_present, ENABLE_FEAT_CRYPTO)
349
350 #if (ENABLE_FEAT_IDTE3 && IMAGE_BL31)
351 __attribute__((always_inline))
352 static inline void update_feat_pauth_idreg_field(size_t security_state)
353 {
354 uint64_t mask_id_aa64isar1 =
355 (ID_AA64ISAR1_GPI_MASK << ID_AA64ISAR1_GPI_SHIFT) |
356 (ID_AA64ISAR1_GPA_MASK << ID_AA64ISAR1_GPA_SHIFT) |
357 (ID_AA64ISAR1_API_MASK << ID_AA64ISAR1_API_SHIFT) |
358 (ID_AA64ISAR1_APA_MASK << ID_AA64ISAR1_APA_SHIFT);
359
360 uint64_t mask_id_aa64isar2 =
361 (ID_AA64ISAR2_APA3_MASK << ID_AA64ISAR2_APA3_MASK) |
362 (ID_AA64ISAR2_GPA3_MASK << ID_AA64ISAR2_GPA3_MASK);
363
364 per_world_context_t *per_world_ctx = &per_world_context[security_state];
365 perworld_idregs_t *perworld_idregs =
366 &(per_world_ctx->idregs);
367
368 if ((SHOULD_ID_FIELD_DISABLE(ENABLE_PAUTH, FEAT_ENABLE_NS,
369 security_state)) &&
370 (SHOULD_ID_FIELD_DISABLE(CTX_INCLUDE_PAUTH_REGS,
371 FEAT_ENABLE_ALL_WORLDS,
372 security_state))) {
373 perworld_idregs->id_aa64isar1_el1 &= ~(mask_id_aa64isar1);
374 perworld_idregs->id_aa64isar2_el1 &= ~(mask_id_aa64isar2);
375 }
376 }
377 #endif
378
379 /*
380 * FEAT_PAUTH_LR
381 * This feature has a non-standard discovery method so define this function
382 * manually then call use the CREATE_FEATURE_SUPPORTED macro with it. This
383 * feature is enabled with ENABLE_PAUTH when present.
384 */
385 __attribute__((always_inline))
is_feat_pauth_lr_present(void)386 static inline bool is_feat_pauth_lr_present(void)
387 {
388 /*
389 * FEAT_PAUTH_LR support is indicated by up to 3 fields, if one or more
390 * of these is 0b0110 then the feature is present.
391 * 1) id_aa64isr1_el1.api
392 * 2) id_aa64isr1_el1.apa
393 * 3) id_aa64isr2_el1.apa3
394 */
395 if (ISOLATE_FIELD(read_id_aa64isar1_el1(), ID_AA64ISAR1_API_SHIFT, ID_AA64ISAR1_API_MASK) == 0b0110) {
396 return true;
397 }
398 if (ISOLATE_FIELD(read_id_aa64isar1_el1(), ID_AA64ISAR1_APA_SHIFT, ID_AA64ISAR1_APA_MASK) == 0b0110) {
399 return true;
400 }
401 if (ISOLATE_FIELD(read_id_aa64isar2_el1(), ID_AA64ISAR2_APA3_SHIFT, ID_AA64ISAR2_APA3_MASK) == 0b0110) {
402 return true;
403 }
404 return false;
405 }
CREATE_FEATURE_SUPPORTED(feat_pauth_lr,is_feat_pauth_lr_present,ENABLE_FEAT_PAUTH_LR)406 CREATE_FEATURE_SUPPORTED(feat_pauth_lr, is_feat_pauth_lr_present, ENABLE_FEAT_PAUTH_LR)
407
408 /* FEAT_TTST: Small translation tables */
409 CREATE_FEATURE_PRESENT(feat_ttst, id_aa64mmfr2_el1, ID_AA64MMFR2_EL1_ST_SHIFT,
410 ID_AA64MMFR2_EL1_ST_MASK, 1U,
411 FEAT_ENABLE_ALL_WORLDS)
412
413 /* FEAT_BTI: Branch target identification */
414 CREATE_FEATURE_FUNCS(feat_bti, id_aa64pfr1_el1, ID_AA64PFR1_EL1_BT_SHIFT,
415 ID_AA64PFR1_EL1_BT_MASK, BTI_IMPLEMENTED, ENABLE_BTI,
416 FEAT_ENABLE_ALL_WORLDS)
417
418 /* FEAT_MTE2: Memory tagging extension */
419 CREATE_FEATURE_FUNCS(feat_mte2, id_aa64pfr1_el1, ID_AA64PFR1_EL1_MTE_SHIFT,
420 ID_AA64PFR1_EL1_MTE_MASK, MTE_IMPLEMENTED_ELX, ENABLE_FEAT_MTE2,
421 FEAT_ENABLE_SECURE | FEAT_ENABLE_NS)
422
423 /* FEAT_SSBS: Speculative store bypass safe */
424 CREATE_FEATURE_PRESENT(feat_ssbs, id_aa64pfr1_el1, ID_AA64PFR1_EL1_SSBS_SHIFT,
425 ID_AA64PFR1_EL1_SSBS_MASK, 1U,
426 FEAT_ENABLE_ALL_WORLDS)
427
428 /* FEAT_NMI: Non-maskable interrupts */
429 CREATE_FEATURE_PRESENT(feat_nmi, id_aa64pfr1_el1, ID_AA64PFR1_EL1_NMI_SHIFT,
430 ID_AA64PFR1_EL1_NMI_MASK, NMI_IMPLEMENTED,
431 FEAT_ENABLE_ALL_WORLDS)
432
433 /* FEAT_EBEP */
434 CREATE_PERCPU_FEATURE_FUNCS(feat_ebep, id_aa64dfr1_el1, ID_AA64DFR1_EBEP_SHIFT,
435 ID_AA64DFR1_EBEP_MASK, 1U, ENABLE_FEAT_EBEP,
436 FEAT_ENABLE_ALL_WORLDS)
437
438 /* FEAT_SEBEP */
439 CREATE_PERCPU_FEATURE_PRESENT(feat_sebep, id_aa64dfr0_el1, ID_AA64DFR0_SEBEP_SHIFT,
440 ID_AA64DFR0_SEBEP_MASK, SEBEP_IMPLEMENTED,
441 FEAT_ENABLE_ALL_WORLDS)
442
443 /* FEAT_SEL2: Secure EL2 */
444 CREATE_FEATURE_FUNCS(feat_sel2, id_aa64pfr0_el1, ID_AA64PFR0_SEL2_SHIFT,
445 ID_AA64PFR0_SEL2_MASK, 1U, ENABLE_FEAT_SEL2,
446 FEAT_ENABLE_ALL_WORLDS)
447
448 /* FEAT_TWED: Delayed trapping of WFE */
449 CREATE_FEATURE_FUNCS(feat_twed, id_aa64mmfr1_el1, ID_AA64MMFR1_EL1_TWED_SHIFT,
450 ID_AA64MMFR1_EL1_TWED_MASK, 1U, ENABLE_FEAT_TWED,
451 FEAT_ENABLE_ALL_WORLDS)
452
453 /* FEAT_FGT: Fine-grained traps */
454 CREATE_FEATURE_FUNCS(feat_fgt, id_aa64mmfr0_el1, ID_AA64MMFR0_EL1_FGT_SHIFT,
455 ID_AA64MMFR0_EL1_FGT_MASK, 1U, ENABLE_FEAT_FGT,
456 FEAT_ENABLE_ALL_WORLDS)
457
458 /* FEAT_FGT2: Fine-grained traps extended */
459 CREATE_FEATURE_FUNCS(feat_fgt2, id_aa64mmfr0_el1, ID_AA64MMFR0_EL1_FGT_SHIFT,
460 ID_AA64MMFR0_EL1_FGT_MASK, FGT2_IMPLEMENTED, ENABLE_FEAT_FGT2,
461 FEAT_ENABLE_ALL_WORLDS)
462
463 /* FEAT_FGWTE3: Fine-grained write traps EL3 */
464 CREATE_FEATURE_FUNCS(feat_fgwte3, id_aa64mmfr4_el1, ID_AA64MMFR4_EL1_FGWTE3_SHIFT,
465 ID_AA64MMFR4_EL1_FGWTE3_MASK, FGWTE3_IMPLEMENTED,
466 ENABLE_FEAT_FGWTE3, FEAT_ENABLE_ALL_WORLDS)
467
468 /* FEAT_ECV: Enhanced Counter Virtualization */
469 CREATE_FEATURE_FUNCS(feat_ecv, id_aa64mmfr0_el1, ID_AA64MMFR0_EL1_ECV_SHIFT,
470 ID_AA64MMFR0_EL1_ECV_MASK, 1U, ENABLE_FEAT_ECV,
471 FEAT_ENABLE_ALL_WORLDS)
472 CREATE_FEATURE_FUNCS(feat_ecv_v2, id_aa64mmfr0_el1, ID_AA64MMFR0_EL1_ECV_SHIFT,
473 ID_AA64MMFR0_EL1_ECV_MASK, ID_AA64MMFR0_EL1_ECV_SELF_SYNCH,
474 ENABLE_FEAT_ECV, FEAT_ENABLE_ALL_WORLDS)
475
476 /* FEAT_RNG: Random number generator */
477 CREATE_FEATURE_FUNCS(feat_rng, id_aa64isar0_el1, ID_AA64ISAR0_RNDR_SHIFT,
478 ID_AA64ISAR0_RNDR_MASK, 1U, ENABLE_FEAT_RNG,
479 FEAT_ENABLE_ALL_WORLDS)
480
481 /* FEAT_TCR2: Support TCR2_ELx regs */
482 CREATE_FEATURE_FUNCS(feat_tcr2, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_TCRX_SHIFT,
483 ID_AA64MMFR3_EL1_TCRX_MASK, 1U, ENABLE_FEAT_TCR2,
484 FEAT_ENABLE_ALL_WORLDS)
485
486 /* FEAT_S2POE */
487 CREATE_FEATURE_FUNCS(feat_s2poe, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_S2POE_SHIFT,
488 ID_AA64MMFR3_EL1_S2POE_MASK, 1U, ENABLE_FEAT_S2POE,
489 FEAT_ENABLE_ALL_WORLDS)
490
491 /* FEAT_S1POE */
492 CREATE_FEATURE_FUNCS(feat_s1poe, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_S1POE_SHIFT,
493 ID_AA64MMFR3_EL1_S1POE_MASK, 1U, ENABLE_FEAT_S1POE,
494 FEAT_ENABLE_ALL_WORLDS)
495
496 __attribute__((always_inline))
497 static inline bool is_feat_sxpoe_supported(void)
498 {
499 return is_feat_s1poe_supported() || is_feat_s2poe_supported();
500 }
501
502 /* FEAT_S2PIE */
503 CREATE_FEATURE_FUNCS(feat_s2pie, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_S2PIE_SHIFT,
504 ID_AA64MMFR3_EL1_S2PIE_MASK, 1U, ENABLE_FEAT_S2PIE,
505 FEAT_ENABLE_ALL_WORLDS)
506
507 /* FEAT_S1PIE */
508 CREATE_FEATURE_FUNCS(feat_s1pie, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_S1PIE_SHIFT,
509 ID_AA64MMFR3_EL1_S1PIE_MASK, 1U, ENABLE_FEAT_S1PIE,
510 FEAT_ENABLE_ALL_WORLDS)
511
512 /* FEAT_THE: Translation Hardening Extension */
CREATE_FEATURE_FUNCS(feat_the,id_aa64pfr1_el1,ID_AA64PFR1_EL1_THE_SHIFT,ID_AA64PFR1_EL1_THE_MASK,THE_IMPLEMENTED,ENABLE_FEAT_THE,FEAT_ENABLE_NS)513 CREATE_FEATURE_FUNCS(feat_the, id_aa64pfr1_el1, ID_AA64PFR1_EL1_THE_SHIFT,
514 ID_AA64PFR1_EL1_THE_MASK, THE_IMPLEMENTED, ENABLE_FEAT_THE,
515 FEAT_ENABLE_NS)
516
517 /* FEAT_SCTLR2 */
518 CREATE_FEATURE_FUNCS(feat_sctlr2, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_SCTLR2_SHIFT,
519 ID_AA64MMFR3_EL1_SCTLR2_MASK, SCTLR2_IMPLEMENTED,
520 ENABLE_FEAT_SCTLR2,
521 FEAT_ENABLE_NS | FEAT_ENABLE_REALM)
522
523 /* FEAT_D128 */
524 CREATE_FEATURE_FUNCS(feat_d128, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_D128_SHIFT,
525 ID_AA64MMFR3_EL1_D128_MASK, D128_IMPLEMENTED,
526 ENABLE_FEAT_D128, FEAT_ENABLE_NS | FEAT_ENABLE_REALM)
527
528 /* FEAT_RME_GPC2 */
529 _CREATE_FEATURE_PRESENT(feat_rme_gpc2, id_aa64pfr0_el1,
530 ID_AA64PFR0_FEAT_RME_SHIFT, ID_AA64PFR0_FEAT_RME_MASK,
531 RME_GPC2_IMPLEMENTED)
532
533 /* FEAT_RME_GDI */
534 CREATE_FEATURE_FUNCS(feat_rme_gdi, id_aa64mmfr4_el1,
535 ID_AA64MMFR4_EL1_RME_GDI_SHIFT,
536 ID_AA64MMFR4_EL1_RME_GDI_MASK, RME_GDI_IMPLEMENTED,
537 ENABLE_FEAT_RME_GDI, FEAT_ENABLE_ALL_WORLDS)
538
539 /* FEAT_FPMR */
540 CREATE_FEATURE_FUNCS(feat_fpmr, id_aa64pfr2_el1, ID_AA64PFR2_EL1_FPMR_SHIFT,
541 ID_AA64PFR2_EL1_FPMR_MASK, FPMR_IMPLEMENTED,
542 ENABLE_FEAT_FPMR, FEAT_ENABLE_NS)
543 /* FEAT_MOPS */
544 CREATE_FEATURE_FUNCS(feat_mops, id_aa64isar2_el1, ID_AA64ISAR2_EL1_MOPS_SHIFT,
545 ID_AA64ISAR2_EL1_MOPS_MASK, MOPS_IMPLEMENTED,
546 ENABLE_FEAT_MOPS, FEAT_ENABLE_ALL_WORLDS)
547
548 __attribute__((always_inline))
549 static inline bool is_feat_sxpie_supported(void)
550 {
551 return is_feat_s1pie_supported() || is_feat_s2pie_supported();
552 }
553
554 /* FEAT_GCS: Guarded Control Stack */
555 CREATE_FEATURE_FUNCS(feat_gcs, id_aa64pfr1_el1, ID_AA64PFR1_EL1_GCS_SHIFT,
556 ID_AA64PFR1_EL1_GCS_MASK, 1U, ENABLE_FEAT_GCS,
557 FEAT_ENABLE_ALL_WORLDS)
558
559 /* FEAT_AMU: Activity Monitors Extension */
560 CREATE_FEATURE_FUNCS(feat_amu, id_aa64pfr0_el1, ID_AA64PFR0_AMU_SHIFT,
561 ID_AA64PFR0_AMU_MASK, 1U, ENABLE_FEAT_AMU,
562 FEAT_ENABLE_NS)
563
564 /* Auxiliary counters for FEAT_AMU */
565 _CREATE_FEATURE_PRESENT(feat_amu_aux, amcfgr_el0,
566 AMCFGR_EL0_NCG_SHIFT, AMCFGR_EL0_NCG_MASK, 1U)
567
CREATE_FEATURE_SUPPORTED(feat_amu_aux,is_feat_amu_aux_present,ENABLE_AMU_AUXILIARY_COUNTERS)568 CREATE_FEATURE_SUPPORTED(feat_amu_aux, is_feat_amu_aux_present,
569 ENABLE_AMU_AUXILIARY_COUNTERS)
570
571 /* FEAT_AMUV1P1: AMU Extension v1.1 */
572 CREATE_FEATURE_FUNCS(feat_amuv1p1, id_aa64pfr0_el1, ID_AA64PFR0_AMU_SHIFT,
573 ID_AA64PFR0_AMU_MASK, ID_AA64PFR0_AMU_V1P1, ENABLE_FEAT_AMUv1p1,
574 FEAT_ENABLE_NS)
575
576 /*
577 * Return MPAM version:
578 *
579 * 0x00: None Armv8.0 or later
580 * 0x01: v0.1 Armv8.4 or later
581 * 0x10: v1.0 Armv8.2 or later
582 * 0x11: v1.1 Armv8.4 or later
583 *
584 */
585 __attribute__((always_inline))
586 static inline bool is_feat_mpam_present(void)
587 {
588 unsigned int ret = (unsigned int)((((read_id_aa64pfr0_el1() >>
589 ID_AA64PFR0_MPAM_SHIFT) & ID_AA64PFR0_MPAM_MASK) << 4) |
590 ((read_id_aa64pfr1_el1() >> ID_AA64PFR1_MPAM_FRAC_SHIFT)
591 & ID_AA64PFR1_MPAM_FRAC_MASK));
592 return ret;
593 }
594
CREATE_FEATURE_SUPPORTED(feat_mpam,is_feat_mpam_present,ENABLE_FEAT_MPAM)595 CREATE_FEATURE_SUPPORTED(feat_mpam, is_feat_mpam_present, ENABLE_FEAT_MPAM)
596
597
598 #if (ENABLE_FEAT_IDTE3 && IMAGE_BL31)
599 __attribute__((always_inline))
600 static inline void update_feat_mpam_idreg_field(size_t security_state)
601 {
602 if (SHOULD_ID_FIELD_DISABLE(ENABLE_FEAT_MPAM,
603 FEAT_ENABLE_NS | FEAT_ENABLE_REALM, security_state)) {
604 per_world_context_t *per_world_ctx =
605 &per_world_context[security_state];
606 perworld_idregs_t *perworld_idregs =
607 &(per_world_ctx->idregs);
608
609 perworld_idregs->id_aa64pfr0_el1 &=
610 ~((u_register_t)ID_AA64PFR0_MPAM_MASK
611 << ID_AA64PFR0_MPAM_SHIFT);
612
613 perworld_idregs->id_aa64pfr1_el1 &=
614 ~((u_register_t)ID_AA64PFR1_MPAM_FRAC_MASK
615 << ID_AA64PFR1_MPAM_FRAC_SHIFT);
616 }
617 }
618 #endif
619
620 /* FEAT_MPAM_PE_BW_CTRL: MPAM PE-side bandwidth controls */
621 __attribute__((always_inline))
is_feat_mpam_pe_bw_ctrl_present(void)622 static inline bool is_feat_mpam_pe_bw_ctrl_present(void)
623 {
624 if (is_feat_mpam_present()) {
625 return ((unsigned long long)(read_mpamidr_el1() &
626 MPAMIDR_HAS_BW_CTRL_BIT) != 0U);
627 }
628 return false;
629 }
630
CREATE_FEATURE_SUPPORTED(feat_mpam_pe_bw_ctrl,is_feat_mpam_pe_bw_ctrl_present,ENABLE_FEAT_MPAM_PE_BW_CTRL)631 CREATE_FEATURE_SUPPORTED(feat_mpam_pe_bw_ctrl, is_feat_mpam_pe_bw_ctrl_present,
632 ENABLE_FEAT_MPAM_PE_BW_CTRL)
633
634 /*
635 * FEAT_DebugV8P9: Debug extension. This function checks the field 3:0 of
636 * ID_AA64DFR0 Aarch64 Debug Feature Register 0 for the version of
637 * Feat_Debug supported. The value of the field determines feature presence
638 *
639 * 0b0110 - Arm v8.0 debug
640 * 0b0111 - Arm v8.0 debug architecture with Virtualization host extensions
641 * 0x1000 - FEAT_Debugv8p2 is supported
642 * 0x1001 - FEAT_Debugv8p4 is supported
643 * 0x1010 - FEAT_Debugv8p8 is supported
644 * 0x1011 - FEAT_Debugv8p9 is supported
645 *
646 */
647 CREATE_PERCPU_FEATURE_FUNCS(feat_debugv8p9, id_aa64dfr0_el1,
648 ID_AA64DFR0_DEBUGVER_SHIFT, ID_AA64DFR0_DEBUGVER_MASK,
649 DEBUGVER_V8P9_IMPLEMENTED, ENABLE_FEAT_DEBUGV8P9,
650 FEAT_ENABLE_NS | FEAT_ENABLE_REALM)
651
652 /* FEAT_HCX: Extended Hypervisor Configuration Register */
653 CREATE_FEATURE_FUNCS(feat_hcx, id_aa64mmfr1_el1, ID_AA64MMFR1_EL1_HCX_SHIFT,
654 ID_AA64MMFR1_EL1_HCX_MASK, 1U, ENABLE_FEAT_HCX,
655 FEAT_ENABLE_ALL_WORLDS)
656
657 /* FEAT_RNG_TRAP: Trapping support */
658 CREATE_FEATURE_FUNCS(feat_rng_trap, id_aa64pfr1_el1, ID_AA64PFR1_EL1_RNDR_TRAP_SHIFT,
659 ID_AA64PFR1_EL1_RNDR_TRAP_MASK, RNG_TRAP_IMPLEMENTED, ENABLE_FEAT_RNG_TRAP,
660 FEAT_ENABLE_ALL_WORLDS)
661
662 /* Return the RME version, zero if not supported. */
663 _CREATE_FEATURE_PRESENT(feat_rme, id_aa64pfr0_el1,
664 ID_AA64PFR0_FEAT_RME_SHIFT, ID_AA64PFR0_FEAT_RME_MASK, 1U)
665
666 CREATE_FEATURE_SUPPORTED(feat_rme, is_feat_rme_present, ENABLE_RME)
667
668 /* FEAT_SB: Speculation barrier instruction */
669 CREATE_FEATURE_PRESENT(feat_sb, id_aa64isar1_el1, ID_AA64ISAR1_SB_SHIFT,
670 ID_AA64ISAR1_SB_MASK, 1U,
671 FEAT_ENABLE_ALL_WORLDS)
672
673 /* FEAT_MEC: Memory Encryption Contexts */
674 CREATE_FEATURE_FUNCS(feat_mec, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_MEC_SHIFT,
675 ID_AA64MMFR3_EL1_MEC_MASK, 1U, ENABLE_FEAT_MEC,
676 FEAT_ENABLE_ALL_WORLDS)
677
678 /*
679 * FEAT_CSV2: Cache Speculation Variant 2. This checks bit fields[56-59]
680 * of id_aa64pfr0_el1 register and can be used to check for below features:
681 * FEAT_CSV2_2: Cache Speculation Variant CSV2_2.
682 * FEAT_CSV2_3: Cache Speculation Variant CSV2_3.
683 * 0b0000 - Feature FEAT_CSV2 is not implemented.
684 * 0b0001 - Feature FEAT_CSV2 is implemented, but FEAT_CSV2_2 and FEAT_CSV2_3
685 * are not implemented.
686 * 0b0010 - Feature FEAT_CSV2_2 is implemented but FEAT_CSV2_3 is not
687 * implemented.
688 * 0b0011 - Feature FEAT_CSV2_3 is implemented.
689 */
690
691 CREATE_FEATURE_FUNCS(feat_csv2_2, id_aa64pfr0_el1, ID_AA64PFR0_CSV2_SHIFT,
692 ID_AA64PFR0_CSV2_MASK, CSV2_2_IMPLEMENTED, ENABLE_FEAT_CSV2_2,
693 FEAT_ENABLE_NS | FEAT_ENABLE_REALM)
694 CREATE_FEATURE_FUNCS(feat_csv2_3, id_aa64pfr0_el1, ID_AA64PFR0_CSV2_SHIFT,
695 ID_AA64PFR0_CSV2_MASK, CSV2_3_IMPLEMENTED, ENABLE_FEAT_CSV2_3,
696 FEAT_ENABLE_ALL_WORLDS)
697
698 /* FEAT_SPE: Statistical Profiling Extension */
699 CREATE_PERCPU_FEATURE_FUNCS(feat_spe, id_aa64dfr0_el1, ID_AA64DFR0_PMS_SHIFT,
700 ID_AA64DFR0_PMS_MASK, 1U, ENABLE_SPE_FOR_NS,
701 FEAT_ENABLE_ALL_WORLDS)
702
703 /* FEAT_SVE: Scalable Vector Extension */
704 CREATE_FEATURE_FUNCS(feat_sve, id_aa64pfr0_el1, ID_AA64PFR0_SVE_SHIFT,
705 ID_AA64PFR0_SVE_MASK, 1U, ENABLE_SVE_FOR_NS,
706 FEAT_ENABLE_ALL_WORLDS)
707
708 /* FEAT_RAS: Reliability, Accessibility, Serviceability */
709 CREATE_FEATURE_FUNCS(feat_ras, id_aa64pfr0_el1, ID_AA64PFR0_RAS_SHIFT,
710 ID_AA64PFR0_RAS_MASK, 1U, ENABLE_FEAT_RAS,
711 FEAT_ENABLE_ALL_WORLDS)
712
713 /* FEAT_DIT: Data Independent Timing instructions */
714 CREATE_FEATURE_FUNCS(feat_dit, id_aa64pfr0_el1, ID_AA64PFR0_DIT_SHIFT,
715 ID_AA64PFR0_DIT_MASK, 1U, ENABLE_FEAT_DIT,
716 FEAT_ENABLE_ALL_WORLDS)
717
718 /* FEAT_SYS_REG_TRACE */
719 CREATE_PERCPU_FEATURE_FUNCS(feat_sys_reg_trace, id_aa64dfr0_el1,
720 ID_AA64DFR0_TRACEVER_SHIFT, ID_AA64DFR0_TRACEVER_MASK,
721 1U, ENABLE_SYS_REG_TRACE_FOR_NS,
722 FEAT_ENABLE_ALL_WORLDS)
723
724 /* FEAT_TRF: TraceFilter */
725 CREATE_PERCPU_FEATURE_FUNCS(feat_trf, id_aa64dfr0_el1, ID_AA64DFR0_TRACEFILT_SHIFT,
726 ID_AA64DFR0_TRACEFILT_MASK, 1U, ENABLE_TRF_FOR_NS,
727 FEAT_ENABLE_ALL_WORLDS)
728
729 /* FEAT_NV2: Enhanced Nested Virtualization */
730 CREATE_FEATURE_FUNCS(feat_nv2, id_aa64mmfr2_el1, ID_AA64MMFR2_EL1_NV_SHIFT,
731 ID_AA64MMFR2_EL1_NV_MASK, NV2_IMPLEMENTED, CTX_INCLUDE_NEVE_REGS,
732 FEAT_ENABLE_ALL_WORLDS)
733
734 /* FEAT_BRBE: Branch Record Buffer Extension */
735 CREATE_PERCPU_FEATURE_FUNCS(feat_brbe, id_aa64dfr0_el1, ID_AA64DFR0_BRBE_SHIFT,
736 ID_AA64DFR0_BRBE_MASK, 1U, ENABLE_BRBE_FOR_NS,
737 FEAT_ENABLE_NS | FEAT_ENABLE_REALM)
738
739 /* FEAT_TRBE: Trace Buffer Extension */
740 _CREATE_FEATURE_PRESENT(feat_trbe, id_aa64dfr0_el1, ID_AA64DFR0_TRACEBUFFER_SHIFT,
741 ID_AA64DFR0_TRACEBUFFER_MASK, 1U)
742
743 CREATE_FEATURE_SUPPORTED(feat_trbe, is_feat_trbe_present, ENABLE_TRBE_FOR_NS)
744
745 CREATE_PERCPU_IDREG_UPDATE(feat_trbe, id_aa64dfr0_el1, ID_AA64DFR0_TRACEBUFFER_SHIFT,
746 ID_AA64DFR0_TRACEBUFFER_MASK,
747 ENABLE_TRBE_FOR_NS && !check_if_trbe_disable_affected_core(),
748 FEAT_ENABLE_NS)
749
750 /* FEAT_SME_FA64: Full A64 Instruction support in streaming SVE mode */
751 CREATE_FEATURE_PRESENT(feat_sme_fa64, id_aa64smfr0_el1, ID_AA64SMFR0_EL1_SME_FA64_SHIFT,
752 ID_AA64SMFR0_EL1_SME_FA64_MASK, 1U,
753 FEAT_ENABLE_ALL_WORLDS)
754
755 /* FEAT_SMEx: Scalar Matrix Extension */
756 CREATE_FEATURE_FUNCS(feat_sme, id_aa64pfr1_el1, ID_AA64PFR1_EL1_SME_SHIFT,
757 ID_AA64PFR1_EL1_SME_MASK, 1U, ENABLE_SME_FOR_NS,
758 FEAT_ENABLE_ALL_WORLDS)
759
760 CREATE_FEATURE_FUNCS(feat_sme2, id_aa64pfr1_el1, ID_AA64PFR1_EL1_SME_SHIFT,
761 ID_AA64PFR1_EL1_SME_MASK, SME2_IMPLEMENTED, ENABLE_SME2_FOR_NS,
762 FEAT_ENABLE_ALL_WORLDS)
763
764 /* FEAT_LS64_ACCDATA: Support for 64-byte EL0 stores with status */
765 CREATE_FEATURE_FUNCS(feat_ls64_accdata, id_aa64isar1_el1, ID_AA64ISAR1_LS64_SHIFT,
766 ID_AA64ISAR1_LS64_MASK, LS64_ACCDATA_IMPLEMENTED,
767 ENABLE_FEAT_LS64_ACCDATA, FEAT_ENABLE_ALL_WORLDS)
768
769 /* FEAT_AIE: Memory Attribute Index Enhancement */
770 CREATE_FEATURE_FUNCS(feat_aie, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_AIE_SHIFT,
771 ID_AA64MMFR3_EL1_AIE_MASK, 1U, ENABLE_FEAT_AIE,
772 FEAT_ENABLE_NS)
773
774 /* FEAT_PFAR: Physical Fault Address Register Extension */
775 CREATE_FEATURE_FUNCS(feat_pfar, id_aa64pfr1_el1, ID_AA64PFR1_EL1_PFAR_SHIFT,
776 ID_AA64PFR1_EL1_PFAR_MASK, 1U, ENABLE_FEAT_PFAR,
777 FEAT_ENABLE_NS)
778
779 /* FEAT_IDTE3: Trapping lower EL ID Register access to EL3 */
780 CREATE_FEATURE_FUNCS(feat_idte3, id_aa64mmfr2_el1, ID_AA64MMFR2_EL1_IDS_SHIFT,
781 ID_AA64MMFR2_EL1_IDS_MASK, 2U, ENABLE_FEAT_IDTE3,
782 FEAT_ENABLE_ALL_WORLDS)
783
784 /* FEAT_LSE: Atomic instructions */
785 CREATE_FEATURE_FUNCS(feat_lse, id_aa64isar0_el1, ID_AA64ISAR0_ATOMIC_SHIFT,
786 ID_AA64ISAR0_ATOMIC_MASK, 1U, USE_SPINLOCK_CAS,
787 FEAT_ENABLE_ALL_WORLDS)
788
789
790 /*******************************************************************************
791 * Function to get hardware granularity support
792 ******************************************************************************/
793
794 __attribute__((always_inline))
795 static inline bool is_feat_tgran4K_present(void)
796 {
797 unsigned int tgranx = ISOLATE_FIELD(read_id_aa64mmfr0_el1(),
798 ID_AA64MMFR0_EL1_TGRAN4_SHIFT, ID_REG_FIELD_MASK);
799 return (tgranx < 8U);
800 }
801
CREATE_FEATURE_PRESENT(feat_tgran16K,id_aa64mmfr0_el1,ID_AA64MMFR0_EL1_TGRAN16_SHIFT,ID_AA64MMFR0_EL1_TGRAN16_MASK,TGRAN16_IMPLEMENTED,FEAT_ENABLE_ALL_WORLDS)802 CREATE_FEATURE_PRESENT(feat_tgran16K, id_aa64mmfr0_el1, ID_AA64MMFR0_EL1_TGRAN16_SHIFT,
803 ID_AA64MMFR0_EL1_TGRAN16_MASK, TGRAN16_IMPLEMENTED,
804 FEAT_ENABLE_ALL_WORLDS)
805
806 __attribute__((always_inline))
807 static inline bool is_feat_tgran64K_present(void)
808 {
809 unsigned int tgranx = ISOLATE_FIELD(read_id_aa64mmfr0_el1(),
810 ID_AA64MMFR0_EL1_TGRAN64_SHIFT, ID_REG_FIELD_MASK);
811 return (tgranx < 8U);
812 }
813
814 /* FEAT_PMUV3 */
815 _CREATE_FEATURE_PRESENT(feat_pmuv3, id_aa64dfr0_el1, ID_AA64DFR0_PMUVER_SHIFT,
816 ID_AA64DFR0_PMUVER_MASK, 1U)
817
818 /* FEAT_MTPMU */
819 __attribute__((always_inline))
is_feat_mtpmu_present(void)820 static inline bool is_feat_mtpmu_present(void)
821 {
822 unsigned int mtpmu = ISOLATE_FIELD(read_id_aa64dfr0_el1(), ID_AA64DFR0_MTPMU_SHIFT,
823 ID_AA64DFR0_MTPMU_MASK);
824 return (mtpmu != 0U) && (mtpmu != MTPMU_NOT_IMPLEMENTED);
825 }
826
827 CREATE_FEATURE_SUPPORTED(feat_mtpmu, is_feat_mtpmu_present, DISABLE_MTPMU)
828
829 CREATE_PERCPU_IDREG_UPDATE(feat_mtpmu, id_aa64dfr0_el1, ID_AA64DFR0_MTPMU_SHIFT,
830 ID_AA64DFR0_MTPMU_MASK, DISABLE_MTPMU,
831 FEAT_ENABLE_ALL_WORLDS)
832
833 /*************************************************************************
834 * Function to identify the presence of FEAT_GCIE (GICv5 CPU interface
835 * extension).
836 ************************************************************************/
837 CREATE_FEATURE_FUNCS(feat_gcie, id_aa64pfr2_el1, ID_AA64PFR2_EL1_GCIE_SHIFT,
838 ID_AA64PFR2_EL1_GCIE_MASK, 1U, ENABLE_FEAT_GCIE,
839 FEAT_ENABLE_ALL_WORLDS)
840
841 CREATE_FEATURE_FUNCS(feat_cpa2, id_aa64isar3_el1, ID_AA64ISAR3_EL1_CPA_SHIFT,
842 ID_AA64ISAR3_EL1_CPA_MASK, CPA2_IMPLEMENTED,
843 ENABLE_FEAT_CPA2, FEAT_ENABLE_ALL_WORLDS)
844
845 /* FEAT_UINJ: Injection of Undefined Instruction exceptions */
846 CREATE_FEATURE_FUNCS(feat_uinj, id_aa64pfr2_el1, ID_AA64PFR2_EL1_UINJ_SHIFT,
847 ID_AA64PFR2_EL1_UINJ_MASK, UINJ_IMPLEMENTED,
848 ENABLE_FEAT_UINJ, FEAT_ENABLE_ALL_WORLDS)
849
850 /* FEAT_MORELLO_PRESENT */
851 CREATE_FEATURE_FUNCS(feat_morello, id_aa64pfr1_el1, ID_AA64PFR1_EL1_CE_SHIFT,
852 ID_AA64PFR1_EL1_CE_MASK, MORELLO_EXTENSION_IMPLEMENTED,
853 ENABLE_FEAT_MORELLO, FEAT_ENABLE_ALL_WORLDS)
854 #endif /* ARCH_FEATURES_H */
855