xref: /rk3399_ARM-atf/include/arch/aarch64/arch_features.h (revision d508bab30dfc96fc837ae82bfa0e52f33d617c79)
1 /*
2  * Copyright (c) 2019-2025, Arm Limited. All rights reserved.
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #ifndef ARCH_FEATURES_H
8 #define ARCH_FEATURES_H
9 
10 #include <stdbool.h>
11 
12 #include <arch_helpers.h>
13 #include <common/feat_detect.h>
14 
15 #if ENABLE_RME
16 #define FEAT_ENABLE_ALL_WORLDS			\
17 	((1u << CPU_CONTEXT_SECURE)	|	\
18 	(1u << CPU_CONTEXT_NS)		|	\
19 	(1u << CPU_CONTEXT_REALM))
20 #define FEAT_ENABLE_REALM		(1 << CPU_CONTEXT_REALM)
21 #else
22 #define FEAT_ENABLE_ALL_WORLDS			\
23 	((1u << CPU_CONTEXT_SECURE)	|	\
24 	(1u << CPU_CONTEXT_NS))
25 #define FEAT_ENABLE_REALM		U(0)
26 #endif
27 
28 #define FEAT_ENABLE_SECURE		(1 << CPU_CONTEXT_SECURE)
29 #define FEAT_ENABLE_NS			(1 << CPU_CONTEXT_NS)
30 
31 #define ISOLATE_FIELD(reg, feat, mask)						\
32 	((unsigned int)(((reg) >> (feat)) & mask))
33 
34 #define CREATE_FEATURE_SUPPORTED(name, read_func, guard)			\
35 __attribute__((always_inline))							\
36 static inline bool is_ ## name ## _supported(void)				\
37 {										\
38 	if ((guard) == FEAT_STATE_DISABLED) {					\
39 		return false;							\
40 	}									\
41 	if ((guard) == FEAT_STATE_ALWAYS) {					\
42 		return true;							\
43 	}									\
44 	return read_func();							\
45 }
46 
47 #define CREATE_FEATURE_PRESENT(name, idreg, idfield, mask, idval)		\
48 __attribute__((always_inline))							\
49 static inline bool is_ ## name ## _present(void)				\
50 {										\
51 	return (ISOLATE_FIELD(read_ ## idreg(), idfield, mask) >= idval) 	\
52 		? true : false; 						\
53 }
54 
55 #define CREATE_FEATURE_FUNCS(name, idreg, idfield, mask, idval, guard,		\
56 			     enabled_worlds)					\
57 CREATE_FEATURE_PRESENT(name, idreg, idfield, mask, idval)			\
58 CREATE_FEATURE_SUPPORTED(name, is_ ## name ## _present, guard)
59 
60 
61 /* +----------------------------+
62  * |	Features supported	|
63  * +----------------------------+
64  * |	GENTIMER		|
65  * +----------------------------+
66  * |	FEAT_PAN		|
67  * +----------------------------+
68  * |	FEAT_VHE		|
69  * +----------------------------+
70  * |	FEAT_TTCNP		|
71  * +----------------------------+
72  * |	FEAT_UAO		|
73  * +----------------------------+
74  * |	FEAT_PACQARMA3		|
75  * +----------------------------+
76  * |	FEAT_PAUTH		|
77  * +----------------------------+
78  * |	FEAT_TTST		|
79  * +----------------------------+
80  * |	FEAT_BTI		|
81  * +----------------------------+
82  * |	FEAT_MTE2		|
83  * +----------------------------+
84  * |	FEAT_SSBS		|
85  * +----------------------------+
86  * |	FEAT_NMI		|
87  * +----------------------------+
88  * |	FEAT_GCS		|
89  * +----------------------------+
90  * |	FEAT_EBEP		|
91  * +----------------------------+
92  * |	FEAT_SEBEP		|
93  * +----------------------------+
94  * |	FEAT_SEL2		|
95  * +----------------------------+
96  * |	FEAT_TWED		|
97  * +----------------------------+
98  * |	FEAT_FGT		|
99  * +----------------------------+
100  * |	FEAT_EC/ECV2		|
101  * +----------------------------+
102  * |	FEAT_RNG		|
103  * +----------------------------+
104  * |	FEAT_TCR2		|
105  * +----------------------------+
106  * |	FEAT_S2POE		|
107  * +----------------------------+
108  * |	FEAT_S1POE		|
109  * +----------------------------+
110  * |	FEAT_S2PIE		|
111  * +----------------------------+
112  * |	FEAT_S1PIE		|
113  * +----------------------------+
114  * |	FEAT_AMU/AMUV1P1	|
115  * +----------------------------+
116  * |	FEAT_MPAM		|
117  * +----------------------------+
118  * |	FEAT_HCX		|
119  * +----------------------------+
120  * |	FEAT_RNG_TRAP		|
121  * +----------------------------+
122  * |	FEAT_RME		|
123  * +----------------------------+
124  * |	FEAT_SB			|
125  * +----------------------------+
126  * |	FEAT_CSV2/CSV3		|
127  * +----------------------------+
128  * |	FEAT_SPE		|
129  * +----------------------------+
130  * |	FEAT_SVE		|
131  * +----------------------------+
132  * |	FEAT_RAS		|
133  * +----------------------------+
134  * |	FEAT_DIT		|
135  * +----------------------------+
136  * |	FEAT_SYS_REG_TRACE	|
137  * +----------------------------+
138  * |	FEAT_TRF		|
139  * +----------------------------+
140  * |	FEAT_NV2		|
141  * +----------------------------+
142  * |	FEAT_BRBE		|
143  * +----------------------------+
144  * |	FEAT_TRBE		|
145  * +----------------------------+
146  * |	FEAT_SME/SME2		|
147  * +----------------------------+
148  * |	FEAT_PMUV3		|
149  * +----------------------------+
150  * |	FEAT_MTPMU		|
151  * +----------------------------+
152  * |	FEAT_FGT2		|
153  * +----------------------------+
154  * |	FEAT_THE		|
155  * +----------------------------+
156  * |	FEAT_SCTLR2		|
157  * +----------------------------+
158  * |	FEAT_D128		|
159  * +----------------------------+
160  * |	FEAT_LS64_ACCDATA	|
161  * +----------------------------+
162  * |	FEAT_FPMR		|
163  * +----------------------------+
164  * |	FEAT_MOPS		|
165  * +----------------------------+
166  * |	FEAT_PAUTH_LR		|
167  * +----------------------------+
168  * |	FEAT_FGWTE3		|
169  * +----------------------------+
170  * |	FEAT_MPAM_PE_BW_CTRL	|
171  * +----------------------------+
172  * |	FEAT_CPA2		|
173  * +----------------------------+
174  * |	FEAT_AIE		|
175  * +----------------------------+
176  * |	FEAT_PFAR		|
177  * +----------------------------+
178  * |	FEAT_RME_GPC2		|
179  * +----------------------------+
180  * |	FEAT_RME_GDI		|
181  * +----------------------------+
182  */
183 
184 __attribute__((always_inline))
185 static inline bool is_armv7_gentimer_present(void)
186 {
187 	/* The Generic Timer is always present in an ARMv8-A implementation */
188 	return true;
189 }
190 
191 /* FEAT_PAN: Privileged access never */
192 CREATE_FEATURE_FUNCS(feat_pan, id_aa64mmfr1_el1, ID_AA64MMFR1_EL1_PAN_SHIFT,
193 		     ID_AA64MMFR1_EL1_PAN_MASK, 1U, ENABLE_FEAT_PAN,
194 		     FEAT_ENABLE_ALL_WORLDS)
195 
196 /* FEAT_VHE: Virtualization Host Extensions */
197 CREATE_FEATURE_FUNCS(feat_vhe, id_aa64mmfr1_el1, ID_AA64MMFR1_EL1_VHE_SHIFT,
198 		     ID_AA64MMFR1_EL1_VHE_MASK, 1U, ENABLE_FEAT_VHE,
199 		     FEAT_ENABLE_ALL_WORLDS)
200 
201 /* FEAT_TTCNP: Translation table common not private */
202 CREATE_FEATURE_PRESENT(feat_ttcnp, id_aa64mmfr2_el1, ID_AA64MMFR2_EL1_CNP_SHIFT,
203 			ID_AA64MMFR2_EL1_CNP_MASK, 1U)
204 
205 /* FEAT_UAO: User access override */
206 CREATE_FEATURE_PRESENT(feat_uao, id_aa64mmfr2_el1, ID_AA64MMFR2_EL1_UAO_SHIFT,
207 			ID_AA64MMFR2_EL1_UAO_MASK, 1U)
208 
209 /* If any of the fields is not zero, QARMA3 algorithm is present */
210 CREATE_FEATURE_PRESENT(feat_pacqarma3, id_aa64isar2_el1, 0,
211 			((ID_AA64ISAR2_GPA3_MASK << ID_AA64ISAR2_GPA3_SHIFT) |
212 			(ID_AA64ISAR2_APA3_MASK << ID_AA64ISAR2_APA3_SHIFT)), 1U)
213 
214 /* FEAT_PAUTH: Pointer Authentication */
215 __attribute__((always_inline))
216 static inline bool is_feat_pauth_present(void)
217 {
218 	uint64_t mask_id_aa64isar1 =
219 		(ID_AA64ISAR1_GPI_MASK << ID_AA64ISAR1_GPI_SHIFT) |
220 		(ID_AA64ISAR1_GPA_MASK << ID_AA64ISAR1_GPA_SHIFT) |
221 		(ID_AA64ISAR1_API_MASK << ID_AA64ISAR1_API_SHIFT) |
222 		(ID_AA64ISAR1_APA_MASK << ID_AA64ISAR1_APA_SHIFT);
223 
224 	/*
225 	 * If any of the fields is not zero or QARMA3 is present,
226 	 * PAuth is present
227 	 */
228 	return ((read_id_aa64isar1_el1() & mask_id_aa64isar1) != 0U ||
229 		is_feat_pacqarma3_present());
230 }
231 CREATE_FEATURE_SUPPORTED(feat_pauth, is_feat_pauth_present, ENABLE_PAUTH)
232 CREATE_FEATURE_SUPPORTED(ctx_pauth, is_feat_pauth_present, CTX_INCLUDE_PAUTH_REGS)
233 
234 /*
235  * FEAT_PAUTH_LR
236  * This feature has a non-standard discovery method so define this function
237  * manually then call use the CREATE_FEATURE_SUPPORTED macro with it. This
238  * feature is enabled with ENABLE_PAUTH when present.
239  */
240 __attribute__((always_inline))
241 static inline bool is_feat_pauth_lr_present(void)
242 {
243 	/*
244 	 * FEAT_PAUTH_LR support is indicated by up to 3 fields, if one or more
245 	 * of these is 0b0110 then the feature is present.
246 	 *   1) id_aa64isr1_el1.api
247 	 *   2) id_aa64isr1_el1.apa
248 	 *   3) id_aa64isr2_el1.apa3
249 	 */
250 	if (ISOLATE_FIELD(read_id_aa64isar1_el1(), ID_AA64ISAR1_API_SHIFT, ID_AA64ISAR1_API_MASK) == 0b0110) {
251 		return true;
252 	}
253 	if (ISOLATE_FIELD(read_id_aa64isar1_el1(), ID_AA64ISAR1_APA_SHIFT, ID_AA64ISAR1_APA_MASK) == 0b0110) {
254 		return true;
255 	}
256 	if (ISOLATE_FIELD(read_id_aa64isar2_el1(), ID_AA64ISAR2_APA3_SHIFT, ID_AA64ISAR2_APA3_MASK) == 0b0110) {
257 		return true;
258 	}
259 	return false;
260 }
261 CREATE_FEATURE_SUPPORTED(feat_pauth_lr, is_feat_pauth_lr_present, ENABLE_FEAT_PAUTH_LR)
262 
263 /* FEAT_TTST: Small translation tables */
264 CREATE_FEATURE_PRESENT(feat_ttst, id_aa64mmfr2_el1, ID_AA64MMFR2_EL1_ST_SHIFT,
265 			ID_AA64MMFR2_EL1_ST_MASK, 1U)
266 
267 /* FEAT_BTI: Branch target identification */
268 CREATE_FEATURE_FUNCS(feat_bti, id_aa64pfr1_el1, ID_AA64PFR1_EL1_BT_SHIFT,
269 			ID_AA64PFR1_EL1_BT_MASK, BTI_IMPLEMENTED, ENABLE_BTI,
270 			FEAT_ENABLE_ALL_WORLDS)
271 
272 /* FEAT_MTE2: Memory tagging extension */
273 CREATE_FEATURE_FUNCS(feat_mte2, id_aa64pfr1_el1, ID_AA64PFR1_EL1_MTE_SHIFT,
274 		     ID_AA64PFR1_EL1_MTE_MASK, MTE_IMPLEMENTED_ELX, ENABLE_FEAT_MTE2,
275 		     FEAT_ENABLE_SECURE | FEAT_ENABLE_NS)
276 
277 /* FEAT_SSBS: Speculative store bypass safe */
278 CREATE_FEATURE_PRESENT(feat_ssbs, id_aa64pfr1_el1, ID_AA64PFR1_EL1_SSBS_SHIFT,
279 			ID_AA64PFR1_EL1_SSBS_MASK, 1U)
280 
281 /* FEAT_NMI: Non-maskable interrupts */
282 CREATE_FEATURE_PRESENT(feat_nmi, id_aa64pfr1_el1, ID_AA64PFR1_EL1_NMI_SHIFT,
283 			ID_AA64PFR1_EL1_NMI_MASK, NMI_IMPLEMENTED)
284 
285 /* FEAT_EBEP */
286 CREATE_FEATURE_FUNCS(feat_ebep, id_aa64dfr1_el1, ID_AA64DFR1_EBEP_SHIFT,
287 		     ID_AA64DFR1_EBEP_MASK, 1U,  ENABLE_FEAT_EBEP)
288 
289 /* FEAT_SEBEP */
290 CREATE_FEATURE_PRESENT(feat_sebep, id_aa64dfr0_el1, ID_AA64DFR0_SEBEP_SHIFT,
291 			ID_AA64DFR0_SEBEP_MASK, SEBEP_IMPLEMENTED)
292 
293 /* FEAT_SEL2: Secure EL2 */
294 CREATE_FEATURE_FUNCS(feat_sel2, id_aa64pfr0_el1, ID_AA64PFR0_SEL2_SHIFT,
295 		     ID_AA64PFR0_SEL2_MASK, 1U, ENABLE_FEAT_SEL2,
296 		     FEAT_ENABLE_ALL_WORLDS)
297 
298 /* FEAT_TWED: Delayed trapping of WFE */
299 CREATE_FEATURE_FUNCS(feat_twed, id_aa64mmfr1_el1, ID_AA64MMFR1_EL1_TWED_SHIFT,
300 		     ID_AA64MMFR1_EL1_TWED_MASK, 1U, ENABLE_FEAT_TWED,
301 		     FEAT_ENABLE_ALL_WORLDS)
302 
303 /* FEAT_FGT: Fine-grained traps */
304 CREATE_FEATURE_FUNCS(feat_fgt, id_aa64mmfr0_el1, ID_AA64MMFR0_EL1_FGT_SHIFT,
305 		     ID_AA64MMFR0_EL1_FGT_MASK, 1U, ENABLE_FEAT_FGT,
306 		     FEAT_ENABLE_ALL_WORLDS)
307 
308 /* FEAT_FGT2: Fine-grained traps extended */
309 CREATE_FEATURE_FUNCS(feat_fgt2, id_aa64mmfr0_el1, ID_AA64MMFR0_EL1_FGT_SHIFT,
310 		     ID_AA64MMFR0_EL1_FGT_MASK, FGT2_IMPLEMENTED, ENABLE_FEAT_FGT2,
311 		     FEAT_ENABLE_ALL_WORLDS)
312 
313 /* FEAT_FGWTE3: Fine-grained write traps EL3 */
314 CREATE_FEATURE_FUNCS(feat_fgwte3, id_aa64mmfr4_el1, ID_AA64MMFR4_EL1_FGWTE3_SHIFT,
315 		     ID_AA64MMFR4_EL1_FGWTE3_MASK, FGWTE3_IMPLEMENTED,
316 		     ENABLE_FEAT_FGWTE3, FEAT_ENABLE_ALL_WORLDS)
317 
318 /* FEAT_ECV: Enhanced Counter Virtualization */
319 CREATE_FEATURE_FUNCS(feat_ecv, id_aa64mmfr0_el1, ID_AA64MMFR0_EL1_ECV_SHIFT,
320 		     ID_AA64MMFR0_EL1_ECV_MASK, 1U, ENABLE_FEAT_ECV,
321 		     FEAT_ENABLE_ALL_WORLDS)
322 CREATE_FEATURE_FUNCS(feat_ecv_v2, id_aa64mmfr0_el1, ID_AA64MMFR0_EL1_ECV_SHIFT,
323 		     ID_AA64MMFR0_EL1_ECV_MASK, ID_AA64MMFR0_EL1_ECV_SELF_SYNCH,
324 		     ENABLE_FEAT_ECV, FEAT_ENABLE_ALL_WORLDS)
325 
326 /* FEAT_RNG: Random number generator */
327 CREATE_FEATURE_FUNCS(feat_rng, id_aa64isar0_el1, ID_AA64ISAR0_RNDR_SHIFT,
328 		     ID_AA64ISAR0_RNDR_MASK, 1U, ENABLE_FEAT_RNG,
329 		     FEAT_ENABLE_ALL_WORLDS)
330 
331 /* FEAT_TCR2: Support TCR2_ELx regs */
332 CREATE_FEATURE_FUNCS(feat_tcr2, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_TCRX_SHIFT,
333 		     ID_AA64MMFR3_EL1_TCRX_MASK, 1U, ENABLE_FEAT_TCR2,
334 		     FEAT_ENABLE_ALL_WORLDS)
335 
336 /* FEAT_S2POE */
337 CREATE_FEATURE_FUNCS(feat_s2poe, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_S2POE_SHIFT,
338 		     ID_AA64MMFR3_EL1_S2POE_MASK, 1U, ENABLE_FEAT_S2POE,
339 		     FEAT_ENABLE_ALL_WORLDS)
340 
341 /* FEAT_S1POE */
342 CREATE_FEATURE_FUNCS(feat_s1poe, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_S1POE_SHIFT,
343 		     ID_AA64MMFR3_EL1_S1POE_MASK, 1U, ENABLE_FEAT_S1POE,
344 		     FEAT_ENABLE_ALL_WORLDS)
345 
346 __attribute__((always_inline))
347 static inline bool is_feat_sxpoe_supported(void)
348 {
349 	return is_feat_s1poe_supported() || is_feat_s2poe_supported();
350 }
351 
352 /* FEAT_S2PIE */
353 CREATE_FEATURE_FUNCS(feat_s2pie, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_S2PIE_SHIFT,
354 		     ID_AA64MMFR3_EL1_S2PIE_MASK, 1U, ENABLE_FEAT_S2PIE,
355 		     FEAT_ENABLE_ALL_WORLDS)
356 
357 /* FEAT_S1PIE */
358 CREATE_FEATURE_FUNCS(feat_s1pie, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_S1PIE_SHIFT,
359 		     ID_AA64MMFR3_EL1_S1PIE_MASK, 1U, ENABLE_FEAT_S1PIE,
360 		     FEAT_ENABLE_ALL_WORLDS)
361 
362 /* FEAT_THE: Translation Hardening Extension */
363 CREATE_FEATURE_FUNCS(feat_the, id_aa64pfr1_el1, ID_AA64PFR1_EL1_THE_SHIFT,
364 		     ID_AA64PFR1_EL1_THE_MASK, THE_IMPLEMENTED, ENABLE_FEAT_THE,
365 		     FEAT_ENABLE_NS)
366 
367 /* FEAT_SCTLR2 */
368 CREATE_FEATURE_FUNCS(feat_sctlr2, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_SCTLR2_SHIFT,
369 		     ID_AA64MMFR3_EL1_SCTLR2_MASK, SCTLR2_IMPLEMENTED,
370 		     ENABLE_FEAT_SCTLR2,
371 		     FEAT_ENABLE_NS | FEAT_ENABLE_REALM)
372 
373 /* FEAT_D128 */
374 CREATE_FEATURE_FUNCS(feat_d128, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_D128_SHIFT,
375 		     ID_AA64MMFR3_EL1_D128_MASK, D128_IMPLEMENTED,
376 		     ENABLE_FEAT_D128, FEAT_ENABLE_NS | FEAT_ENABLE_REALM)
377 
378 /* FEAT_RME_GPC2 */
379 CREATE_FEATURE_PRESENT(feat_rme_gpc2, id_aa64pfr0_el1,
380 		       ID_AA64PFR0_FEAT_RME_SHIFT, ID_AA64PFR0_FEAT_RME_MASK,
381 		       RME_GPC2_IMPLEMENTED)
382 
383 /* FEAT_RME_GDI */
384 CREATE_FEATURE_FUNCS(feat_rme_gdi, id_aa64mmfr4_el1,
385 		     ID_AA64MMFR4_EL1_RME_GDI_SHIFT,
386 		     ID_AA64MMFR4_EL1_RME_GDI_MASK, RME_GDI_IMPLEMENTED,
387 		     ENABLE_FEAT_RME_GDI)
388 
389 /* FEAT_FPMR */
390 CREATE_FEATURE_FUNCS(feat_fpmr, id_aa64pfr2_el1, ID_AA64PFR2_EL1_FPMR_SHIFT,
391 		     ID_AA64PFR2_EL1_FPMR_MASK, FPMR_IMPLEMENTED,
392 		     ENABLE_FEAT_FPMR, FEAT_ENABLE_NS)
393 /* FEAT_MOPS */
394 CREATE_FEATURE_FUNCS(feat_mops, id_aa64isar2_el1, ID_AA64ISAR2_EL1_MOPS_SHIFT,
395 		     ID_AA64ISAR2_EL1_MOPS_MASK, MOPS_IMPLEMENTED,
396 		     ENABLE_FEAT_MOPS, FEAT_ENABLE_ALL_WORLDS)
397 
398 __attribute__((always_inline))
399 static inline bool is_feat_sxpie_supported(void)
400 {
401 	return is_feat_s1pie_supported() || is_feat_s2pie_supported();
402 }
403 
404 /* FEAT_GCS: Guarded Control Stack */
405 CREATE_FEATURE_FUNCS(feat_gcs, id_aa64pfr1_el1, ID_AA64PFR1_EL1_GCS_SHIFT,
406 		     ID_AA64PFR1_EL1_GCS_MASK, 1U, ENABLE_FEAT_GCS,
407 		     FEAT_ENABLE_ALL_WORLDS)
408 
409 /* FEAT_AMU: Activity Monitors Extension */
410 CREATE_FEATURE_FUNCS(feat_amu, id_aa64pfr0_el1, ID_AA64PFR0_AMU_SHIFT,
411 		     ID_AA64PFR0_AMU_MASK, 1U, ENABLE_FEAT_AMU,
412 		     FEAT_ENABLE_NS)
413 
414 /* Auxiliary counters for FEAT_AMU */
415 CREATE_FEATURE_FUNCS(feat_amu_aux, amcfgr_el0, AMCFGR_EL0_NCG_SHIFT,
416 		     AMCFGR_EL0_NCG_MASK, 1U, ENABLE_AMU_AUXILIARY_COUNTERS)
417 
418 /* FEAT_AMUV1P1: AMU Extension v1.1 */
419 CREATE_FEATURE_FUNCS(feat_amuv1p1, id_aa64pfr0_el1, ID_AA64PFR0_AMU_SHIFT,
420 		     ID_AA64PFR0_AMU_MASK, ID_AA64PFR0_AMU_V1P1, ENABLE_FEAT_AMUv1p1,
421 		     FEAT_ENABLE_NS)
422 
423 /*
424  * Return MPAM version:
425  *
426  * 0x00: None Armv8.0 or later
427  * 0x01: v0.1 Armv8.4 or later
428  * 0x10: v1.0 Armv8.2 or later
429  * 0x11: v1.1 Armv8.4 or later
430  *
431  */
432 __attribute__((always_inline))
433 static inline bool is_feat_mpam_present(void)
434 {
435 	unsigned int ret = (unsigned int)((((read_id_aa64pfr0_el1() >>
436 		ID_AA64PFR0_MPAM_SHIFT) & ID_AA64PFR0_MPAM_MASK) << 4) |
437 		((read_id_aa64pfr1_el1() >> ID_AA64PFR1_MPAM_FRAC_SHIFT)
438 			& ID_AA64PFR1_MPAM_FRAC_MASK));
439 	return ret;
440 }
441 
442 CREATE_FEATURE_SUPPORTED(feat_mpam, is_feat_mpam_present, ENABLE_FEAT_MPAM)
443 
444 
445 
446 /* FEAT_MPAM_PE_BW_CTRL: MPAM PE-side bandwidth controls */
447 __attribute__((always_inline))
448 static inline bool is_feat_mpam_pe_bw_ctrl_present(void)
449 {
450 	if (is_feat_mpam_present()) {
451 		return ((unsigned long long)(read_mpamidr_el1() &
452 				MPAMIDR_HAS_BW_CTRL_BIT) != 0U);
453 	}
454 	return false;
455 }
456 
457 CREATE_FEATURE_SUPPORTED(feat_mpam_pe_bw_ctrl, is_feat_mpam_pe_bw_ctrl_present,
458 		ENABLE_FEAT_MPAM_PE_BW_CTRL)
459 
460 /*
461  * FEAT_DebugV8P9: Debug extension. This function checks the field 3:0 of
462  * ID_AA64DFR0 Aarch64 Debug Feature Register 0 for the version of
463  * Feat_Debug supported. The value of the field determines feature presence
464  *
465  * 0b0110 - Arm v8.0 debug
466  * 0b0111 - Arm v8.0 debug architecture with Virtualization host extensions
467  * 0x1000 - FEAT_Debugv8p2 is supported
468  * 0x1001 - FEAT_Debugv8p4 is supported
469  * 0x1010 - FEAT_Debugv8p8 is supported
470  * 0x1011 - FEAT_Debugv8p9 is supported
471  *
472  */
473 CREATE_FEATURE_FUNCS(feat_debugv8p9, id_aa64dfr0_el1, ID_AA64DFR0_DEBUGVER_SHIFT,
474 		ID_AA64DFR0_DEBUGVER_MASK, DEBUGVER_V8P9_IMPLEMENTED,
475 		ENABLE_FEAT_DEBUGV8P9, FEAT_ENABLE_NS | FEAT_ENABLE_REALM)
476 
477 /* FEAT_HCX: Extended Hypervisor Configuration Register */
478 CREATE_FEATURE_FUNCS(feat_hcx, id_aa64mmfr1_el1, ID_AA64MMFR1_EL1_HCX_SHIFT,
479 		     ID_AA64MMFR1_EL1_HCX_MASK, 1U, ENABLE_FEAT_HCX,
480 		     FEAT_ENABLE_ALL_WORLDS)
481 
482 /* FEAT_RNG_TRAP: Trapping support */
483 CREATE_FEATURE_FUNCS(feat_rng_trap, id_aa64pfr1_el1, ID_AA64PFR1_EL1_RNDR_TRAP_SHIFT,
484 		      ID_AA64PFR1_EL1_RNDR_TRAP_MASK, RNG_TRAP_IMPLEMENTED, ENABLE_FEAT_RNG_TRAP,
485 		      FEAT_ENABLE_ALL_WORLDS)
486 
487 /* Return the RME version, zero if not supported. */
488 CREATE_FEATURE_FUNCS(feat_rme, id_aa64pfr0_el1, ID_AA64PFR0_FEAT_RME_SHIFT,
489 		    ID_AA64PFR0_FEAT_RME_MASK, 1U, ENABLE_RME)
490 
491 /* FEAT_SB: Speculation barrier instruction */
492 CREATE_FEATURE_PRESENT(feat_sb, id_aa64isar1_el1, ID_AA64ISAR1_SB_SHIFT,
493 		       ID_AA64ISAR1_SB_MASK, 1U)
494 
495 /* FEAT_MEC: Memory Encryption Contexts */
496 CREATE_FEATURE_FUNCS(feat_mec, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_MEC_SHIFT,
497 		ID_AA64MMFR3_EL1_MEC_MASK, 1U, ENABLE_FEAT_MEC,
498 		FEAT_ENABLE_ALL_WORLDS)
499 
500 /*
501  * FEAT_CSV2: Cache Speculation Variant 2. This checks bit fields[56-59]
502  * of id_aa64pfr0_el1 register and can be used to check for below features:
503  * FEAT_CSV2_2: Cache Speculation Variant CSV2_2.
504  * FEAT_CSV2_3: Cache Speculation Variant CSV2_3.
505  * 0b0000 - Feature FEAT_CSV2 is not implemented.
506  * 0b0001 - Feature FEAT_CSV2 is implemented, but FEAT_CSV2_2 and FEAT_CSV2_3
507  *          are not implemented.
508  * 0b0010 - Feature FEAT_CSV2_2 is implemented but FEAT_CSV2_3 is not
509  *          implemented.
510  * 0b0011 - Feature FEAT_CSV2_3 is implemented.
511  */
512 
513 CREATE_FEATURE_FUNCS(feat_csv2_2, id_aa64pfr0_el1, ID_AA64PFR0_CSV2_SHIFT,
514 		     ID_AA64PFR0_CSV2_MASK, CSV2_2_IMPLEMENTED, ENABLE_FEAT_CSV2_2,
515 		     FEAT_ENABLE_NS | FEAT_ENABLE_REALM)
516 CREATE_FEATURE_FUNCS(feat_csv2_3, id_aa64pfr0_el1, ID_AA64PFR0_CSV2_SHIFT,
517 		     ID_AA64PFR0_CSV2_MASK, CSV2_3_IMPLEMENTED, ENABLE_FEAT_CSV2_3,
518 		     FEAT_ENABLE_ALL_WORLDS)
519 
520 /* FEAT_SPE: Statistical Profiling Extension */
521 CREATE_FEATURE_FUNCS(feat_spe, id_aa64dfr0_el1, ID_AA64DFR0_PMS_SHIFT,
522 		     ID_AA64DFR0_PMS_MASK, 1U, ENABLE_SPE_FOR_NS,
523 		     FEAT_ENABLE_ALL_WORLDS)
524 
525 /* FEAT_SVE: Scalable Vector Extension */
526 CREATE_FEATURE_FUNCS(feat_sve, id_aa64pfr0_el1, ID_AA64PFR0_SVE_SHIFT,
527 		     ID_AA64PFR0_SVE_MASK, 1U, ENABLE_SVE_FOR_NS,
528 		     FEAT_ENABLE_ALL_WORLDS)
529 
530 /* FEAT_RAS: Reliability, Accessibility, Serviceability */
531 CREATE_FEATURE_FUNCS(feat_ras, id_aa64pfr0_el1, ID_AA64PFR0_RAS_SHIFT,
532 		     ID_AA64PFR0_RAS_MASK, 1U, ENABLE_FEAT_RAS,
533 		     FEAT_ENABLE_ALL_WORLDS)
534 
535 /* FEAT_DIT: Data Independent Timing instructions */
536 CREATE_FEATURE_FUNCS(feat_dit, id_aa64pfr0_el1, ID_AA64PFR0_DIT_SHIFT,
537 		     ID_AA64PFR0_DIT_MASK, 1U, ENABLE_FEAT_DIT,
538 		     FEAT_ENABLE_ALL_WORLDS)
539 
540 /* FEAT_SYS_REG_TRACE */
541 CREATE_FEATURE_FUNCS(feat_sys_reg_trace, id_aa64dfr0_el1, ID_AA64DFR0_TRACEVER_SHIFT,
542 		    ID_AA64DFR0_TRACEVER_MASK, 1U, ENABLE_SYS_REG_TRACE_FOR_NS,
543 		    FEAT_ENABLE_ALL_WORLDS)
544 
545 /* FEAT_TRF: TraceFilter */
546 CREATE_FEATURE_FUNCS(feat_trf, id_aa64dfr0_el1, ID_AA64DFR0_TRACEFILT_SHIFT,
547 		     ID_AA64DFR0_TRACEFILT_MASK, 1U, ENABLE_TRF_FOR_NS,
548 		     FEAT_ENABLE_ALL_WORLDS)
549 
550 /* FEAT_NV2: Enhanced Nested Virtualization */
551 CREATE_FEATURE_FUNCS(feat_nv2, id_aa64mmfr2_el1, ID_AA64MMFR2_EL1_NV_SHIFT,
552 		     ID_AA64MMFR2_EL1_NV_MASK, NV2_IMPLEMENTED, CTX_INCLUDE_NEVE_REGS,
553 		     FEAT_ENABLE_ALL_WORLDS)
554 
555 /* FEAT_BRBE: Branch Record Buffer Extension */
556 CREATE_FEATURE_FUNCS(feat_brbe, id_aa64dfr0_el1, ID_AA64DFR0_BRBE_SHIFT,
557 		     ID_AA64DFR0_BRBE_MASK, 1U, ENABLE_BRBE_FOR_NS,
558 		     FEAT_ENABLE_NS | FEAT_ENABLE_REALM)
559 
560 /* FEAT_TRBE: Trace Buffer Extension */
561 CREATE_FEATURE_FUNCS(feat_trbe, id_aa64dfr0_el1, ID_AA64DFR0_TRACEBUFFER_SHIFT,
562 		     ID_AA64DFR0_TRACEBUFFER_MASK, 1U, ENABLE_TRBE_FOR_NS)
563 
564 /* FEAT_SME_FA64: Full A64 Instruction support in streaming SVE mode */
565 CREATE_FEATURE_PRESENT(feat_sme_fa64, id_aa64smfr0_el1, ID_AA64SMFR0_EL1_SME_FA64_SHIFT,
566 		    ID_AA64SMFR0_EL1_SME_FA64_MASK, 1U)
567 
568 /* FEAT_SMEx: Scalar Matrix Extension */
569 CREATE_FEATURE_FUNCS(feat_sme, id_aa64pfr1_el1, ID_AA64PFR1_EL1_SME_SHIFT,
570 		     ID_AA64PFR1_EL1_SME_MASK, 1U, ENABLE_SME_FOR_NS,
571 		     FEAT_ENABLE_ALL_WORLDS)
572 
573 CREATE_FEATURE_FUNCS(feat_sme2, id_aa64pfr1_el1, ID_AA64PFR1_EL1_SME_SHIFT,
574 		     ID_AA64PFR1_EL1_SME_MASK, SME2_IMPLEMENTED, ENABLE_SME2_FOR_NS,
575 		     FEAT_ENABLE_ALL_WORLDS)
576 
577 /* FEAT_LS64_ACCDATA: */
578 CREATE_FEATURE_FUNCS(feat_ls64_accdata, id_aa64isar1_el1, ID_AA64ISAR1_LS64_SHIFT,
579 		     ID_AA64ISAR1_LS64_MASK, LS64_ACCDATA_IMPLEMENTED,
580 		     ENABLE_FEAT_LS64_ACCDATA, FEAT_ENABLE_ALL_WORLDS)
581 
582 /* FEAT_AIE: */
583 CREATE_FEATURE_FUNCS(feat_aie, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_AIE_SHIFT,
584 		     ID_AA64MMFR3_EL1_AIE_MASK, 1U, ENABLE_FEAT_AIE,
585 		     FEAT_ENABLE_NS)
586 
587 /* FEAT_PFAR: */
588 CREATE_FEATURE_FUNCS(feat_pfar, id_aa64pfr1_el1, ID_AA64PFR1_EL1_PFAR_SHIFT,
589 		     ID_AA64PFR1_EL1_PFAR_MASK, 1U, ENABLE_FEAT_PFAR,
590 		     FEAT_ENABLE_NS)
591 
592 /*******************************************************************************
593  * Function to get hardware granularity support
594  ******************************************************************************/
595 
596 __attribute__((always_inline))
597 static inline bool is_feat_tgran4K_present(void)
598 {
599 	unsigned int tgranx = ISOLATE_FIELD(read_id_aa64mmfr0_el1(),
600 			     ID_AA64MMFR0_EL1_TGRAN4_SHIFT, ID_REG_FIELD_MASK);
601 	return (tgranx < 8U);
602 }
603 
604 CREATE_FEATURE_PRESENT(feat_tgran16K, id_aa64mmfr0_el1, ID_AA64MMFR0_EL1_TGRAN16_SHIFT,
605 		       ID_AA64MMFR0_EL1_TGRAN16_MASK, TGRAN16_IMPLEMENTED)
606 
607 __attribute__((always_inline))
608 static inline bool is_feat_tgran64K_present(void)
609 {
610 	unsigned int tgranx = ISOLATE_FIELD(read_id_aa64mmfr0_el1(),
611 			     ID_AA64MMFR0_EL1_TGRAN64_SHIFT, ID_REG_FIELD_MASK);
612 	return (tgranx < 8U);
613 }
614 
615 /* FEAT_PMUV3 */
616 CREATE_FEATURE_PRESENT(feat_pmuv3, id_aa64dfr0_el1, ID_AA64DFR0_PMUVER_SHIFT,
617 		      ID_AA64DFR0_PMUVER_MASK, 1U)
618 
619 /* FEAT_MTPMU */
620 __attribute__((always_inline))
621 static inline bool is_feat_mtpmu_present(void)
622 {
623 	unsigned int mtpmu = ISOLATE_FIELD(read_id_aa64dfr0_el1(), ID_AA64DFR0_MTPMU_SHIFT,
624 					   ID_AA64DFR0_MTPMU_MASK);
625 	return (mtpmu != 0U) && (mtpmu != MTPMU_NOT_IMPLEMENTED);
626 }
627 
628 CREATE_FEATURE_SUPPORTED(feat_mtpmu, is_feat_mtpmu_present, DISABLE_MTPMU)
629 
630 /*************************************************************************
631  * Function to identify the presence of FEAT_GCIE (GICv5 CPU interface
632  * extension).
633  ************************************************************************/
634 CREATE_FEATURE_FUNCS(feat_gcie, id_aa64pfr2_el1, ID_AA64PFR2_EL1_GCIE_SHIFT,
635 		     ID_AA64PFR2_EL1_GCIE_MASK, 1U, ENABLE_FEAT_GCIE,
636 		     FEAT_ENABLE_ALL_WORLDS)
637 
638 CREATE_FEATURE_FUNCS(feat_cpa2, id_aa64isar3_el1, ID_AA64ISAR3_EL1_CPA_SHIFT,
639 		     ID_AA64ISAR3_EL1_CPA_MASK, CPA2_IMPLEMENTED,
640 		     ENABLE_FEAT_CPA2, FEAT_ENABLE_ALL_WORLDS)
641 
642 #endif /* ARCH_FEATURES_H */
643