xref: /rk3399_ARM-atf/include/arch/aarch64/arch_features.h (revision d1a1abeca9bcd40d313ead4ae6ad0ee87d5e1f96)
1 /*
2  * Copyright (c) 2019-2025, Arm Limited. All rights reserved.
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #ifndef ARCH_FEATURES_H
8 #define ARCH_FEATURES_H
9 
10 #include <stdbool.h>
11 
12 #include <arch_helpers.h>
13 #include <common/feat_detect.h>
14 
15 #define ISOLATE_FIELD(reg, feat, mask)						\
16 	((unsigned int)(((reg) >> (feat)) & mask))
17 
18 #define CREATE_FEATURE_SUPPORTED(name, read_func, guard)			\
19 __attribute__((always_inline))							\
20 static inline bool is_ ## name ## _supported(void)				\
21 {										\
22 	if ((guard) == FEAT_STATE_DISABLED) {					\
23 		return false;							\
24 	}									\
25 	if ((guard) == FEAT_STATE_ALWAYS) {					\
26 		return true;							\
27 	}									\
28 	return read_func();							\
29 }
30 
31 #define CREATE_FEATURE_PRESENT(name, idreg, idfield, mask, idval)		\
32 __attribute__((always_inline))							\
33 static inline bool is_ ## name ## _present(void)				\
34 {										\
35 	return (ISOLATE_FIELD(read_ ## idreg(), idfield, mask) >= idval) 	\
36 		? true : false; 						\
37 }
38 
39 #define CREATE_FEATURE_FUNCS(name, idreg, idfield, mask, idval, guard)		\
40 CREATE_FEATURE_PRESENT(name, idreg, idfield, mask, idval)			\
41 CREATE_FEATURE_SUPPORTED(name, is_ ## name ## _present, guard)
42 
43 
44 /* +----------------------------+
45  * |	Features supported	|
46  * +----------------------------+
47  * |	GENTIMER		|
48  * +----------------------------+
49  * |	FEAT_PAN		|
50  * +----------------------------+
51  * |	FEAT_VHE		|
52  * +----------------------------+
53  * |	FEAT_TTCNP		|
54  * +----------------------------+
55  * |	FEAT_UAO		|
56  * +----------------------------+
57  * |	FEAT_PACQARMA3		|
58  * +----------------------------+
59  * |	FEAT_PAUTH		|
60  * +----------------------------+
61  * |	FEAT_TTST		|
62  * +----------------------------+
63  * |	FEAT_BTI		|
64  * +----------------------------+
65  * |	FEAT_MTE2		|
66  * +----------------------------+
67  * |	FEAT_SSBS		|
68  * +----------------------------+
69  * |	FEAT_NMI		|
70  * +----------------------------+
71  * |	FEAT_GCS		|
72  * +----------------------------+
73  * |	FEAT_EBEP		|
74  * +----------------------------+
75  * |	FEAT_SEBEP		|
76  * +----------------------------+
77  * |	FEAT_SEL2		|
78  * +----------------------------+
79  * |	FEAT_TWED		|
80  * +----------------------------+
81  * |	FEAT_FGT		|
82  * +----------------------------+
83  * |	FEAT_EC/ECV2		|
84  * +----------------------------+
85  * |	FEAT_RNG		|
86  * +----------------------------+
87  * |	FEAT_TCR2		|
88  * +----------------------------+
89  * |	FEAT_S2POE		|
90  * +----------------------------+
91  * |	FEAT_S1POE		|
92  * +----------------------------+
93  * |	FEAT_S2PIE		|
94  * +----------------------------+
95  * |	FEAT_S1PIE		|
96  * +----------------------------+
97  * |	FEAT_AMU/AMUV1P1	|
98  * +----------------------------+
99  * |	FEAT_MPAM		|
100  * +----------------------------+
101  * |	FEAT_HCX		|
102  * +----------------------------+
103  * |	FEAT_RNG_TRAP		|
104  * +----------------------------+
105  * |	FEAT_RME		|
106  * +----------------------------+
107  * |	FEAT_SB			|
108  * +----------------------------+
109  * |	FEAT_CSV2/CSV3		|
110  * +----------------------------+
111  * |	FEAT_SPE		|
112  * +----------------------------+
113  * |	FEAT_SVE		|
114  * +----------------------------+
115  * |	FEAT_RAS		|
116  * +----------------------------+
117  * |	FEAT_DIT		|
118  * +----------------------------+
119  * |	FEAT_SYS_REG_TRACE	|
120  * +----------------------------+
121  * |	FEAT_TRF		|
122  * +----------------------------+
123  * |	FEAT_NV/NV2		|
124  * +----------------------------+
125  * |	FEAT_BRBE		|
126  * +----------------------------+
127  * |	FEAT_TRBE		|
128  * +----------------------------+
129  * |	FEAT_SME/SME2		|
130  * +----------------------------+
131  * |	FEAT_PMUV3		|
132  * +----------------------------+
133  * |	FEAT_MTPMU		|
134  * +----------------------------+
135  * |	FEAT_FGT2		|
136  * +----------------------------+
137  * |	FEAT_THE		|
138  * +----------------------------+
139  * |	FEAT_SCTLR2		|
140  * +----------------------------+
141  * |	FEAT_D128		|
142  * +----------------------------+
143  * |	FEAT_LS64_ACCDATA	|
144  * +----------------------------+
145  * |	FEAT_FPMR		|
146  * +----------------------------+
147  * |	FEAT_MOPS		|
148  * +----------------------------+
149  * |	FEAT_PAUTH_LR		|
150  * +----------------------------+
151  * |    FEAT_FGWTE3             |
152  * +----------------------------+
153  * |	FEAT_MPAM_PE_BW_CTRL	|
154  * +----------------------------+
155  * |    FEAT_CPA2               |
156  * +----------------------------+
157  */
158 
159 __attribute__((always_inline))
160 static inline bool is_armv7_gentimer_present(void)
161 {
162 	/* The Generic Timer is always present in an ARMv8-A implementation */
163 	return true;
164 }
165 
166 /* FEAT_PAN: Privileged access never */
167 CREATE_FEATURE_FUNCS(feat_pan, id_aa64mmfr1_el1, ID_AA64MMFR1_EL1_PAN_SHIFT,
168 		     ID_AA64MMFR1_EL1_PAN_MASK, 1U, ENABLE_FEAT_PAN)
169 
170 /* FEAT_VHE: Virtualization Host Extensions */
171 CREATE_FEATURE_FUNCS(feat_vhe, id_aa64mmfr1_el1, ID_AA64MMFR1_EL1_VHE_SHIFT,
172 		     ID_AA64MMFR1_EL1_VHE_MASK, 1U, ENABLE_FEAT_VHE)
173 
174 /* FEAT_TTCNP: Translation table common not private */
175 CREATE_FEATURE_PRESENT(feat_ttcnp, id_aa64mmfr2_el1, ID_AA64MMFR2_EL1_CNP_SHIFT,
176 			ID_AA64MMFR2_EL1_CNP_MASK, 1U)
177 
178 /* FEAT_UAO: User access override */
179 CREATE_FEATURE_PRESENT(feat_uao, id_aa64mmfr2_el1, ID_AA64MMFR2_EL1_UAO_SHIFT,
180 			ID_AA64MMFR2_EL1_UAO_MASK, 1U)
181 
182 /* If any of the fields is not zero, QARMA3 algorithm is present */
183 CREATE_FEATURE_PRESENT(feat_pacqarma3, id_aa64isar2_el1, 0,
184 			((ID_AA64ISAR2_GPA3_MASK << ID_AA64ISAR2_GPA3_SHIFT) |
185 			(ID_AA64ISAR2_APA3_MASK << ID_AA64ISAR2_APA3_SHIFT)), 1U)
186 
187 /* FEAT_PAUTH: Pointer Authentication */
188 __attribute__((always_inline))
189 static inline bool is_feat_pauth_present(void)
190 {
191 	uint64_t mask_id_aa64isar1 =
192 		(ID_AA64ISAR1_GPI_MASK << ID_AA64ISAR1_GPI_SHIFT) |
193 		(ID_AA64ISAR1_GPA_MASK << ID_AA64ISAR1_GPA_SHIFT) |
194 		(ID_AA64ISAR1_API_MASK << ID_AA64ISAR1_API_SHIFT) |
195 		(ID_AA64ISAR1_APA_MASK << ID_AA64ISAR1_APA_SHIFT);
196 
197 	/*
198 	 * If any of the fields is not zero or QARMA3 is present,
199 	 * PAuth is present
200 	 */
201 	return ((read_id_aa64isar1_el1() & mask_id_aa64isar1) != 0U ||
202 		is_feat_pacqarma3_present());
203 }
204 CREATE_FEATURE_SUPPORTED(feat_pauth, is_feat_pauth_present, ENABLE_PAUTH)
205 CREATE_FEATURE_SUPPORTED(ctx_pauth, is_feat_pauth_present, CTX_INCLUDE_PAUTH_REGS)
206 
207 /*
208  * FEAT_PAUTH_LR
209  * This feature has a non-standard discovery method so define this function
210  * manually then call use the CREATE_FEATURE_SUPPORTED macro with it. This
211  * feature is enabled with ENABLE_PAUTH when present.
212  */
213 __attribute__((always_inline))
214 static inline bool is_feat_pauth_lr_present(void)
215 {
216 	/*
217 	 * FEAT_PAUTH_LR support is indicated by up to 3 fields, if one or more
218 	 * of these is 0b0110 then the feature is present.
219 	 *   1) id_aa64isr1_el1.api
220 	 *   2) id_aa64isr1_el1.apa
221 	 *   3) id_aa64isr2_el1.apa3
222 	 */
223 	if (ISOLATE_FIELD(read_id_aa64isar1_el1(), ID_AA64ISAR1_API_SHIFT, ID_AA64ISAR1_API_MASK) == 0b0110) {
224 		return true;
225 	}
226 	if (ISOLATE_FIELD(read_id_aa64isar1_el1(), ID_AA64ISAR1_APA_SHIFT, ID_AA64ISAR1_APA_MASK) == 0b0110) {
227 		return true;
228 	}
229 	if (ISOLATE_FIELD(read_id_aa64isar2_el1(), ID_AA64ISAR2_APA3_SHIFT, ID_AA64ISAR2_APA3_MASK) == 0b0110) {
230 		return true;
231 	}
232 	return false;
233 }
234 CREATE_FEATURE_SUPPORTED(feat_pauth_lr, is_feat_pauth_lr_present, ENABLE_FEAT_PAUTH_LR)
235 
236 /* FEAT_TTST: Small translation tables */
237 CREATE_FEATURE_PRESENT(feat_ttst, id_aa64mmfr2_el1, ID_AA64MMFR2_EL1_ST_SHIFT,
238 			ID_AA64MMFR2_EL1_ST_MASK, 1U)
239 
240 /* FEAT_BTI: Branch target identification */
241 CREATE_FEATURE_FUNCS(feat_bti, id_aa64pfr1_el1, ID_AA64PFR1_EL1_BT_SHIFT,
242 			ID_AA64PFR1_EL1_BT_MASK, BTI_IMPLEMENTED, ENABLE_BTI)
243 
244 /* FEAT_MTE2: Memory tagging extension */
245 CREATE_FEATURE_FUNCS(feat_mte2, id_aa64pfr1_el1, ID_AA64PFR1_EL1_MTE_SHIFT,
246 		     ID_AA64PFR1_EL1_MTE_MASK, MTE_IMPLEMENTED_ELX, ENABLE_FEAT_MTE2)
247 
248 /* FEAT_SSBS: Speculative store bypass safe */
249 CREATE_FEATURE_PRESENT(feat_ssbs, id_aa64pfr1_el1, ID_AA64PFR1_EL1_SSBS_SHIFT,
250 			ID_AA64PFR1_EL1_SSBS_MASK, 1U)
251 
252 /* FEAT_NMI: Non-maskable interrupts */
253 CREATE_FEATURE_PRESENT(feat_nmi, id_aa64pfr1_el1, ID_AA64PFR1_EL1_NMI_SHIFT,
254 			ID_AA64PFR1_EL1_NMI_MASK, NMI_IMPLEMENTED)
255 
256 /* FEAT_EBEP */
257 CREATE_FEATURE_PRESENT(feat_ebep, id_aa64dfr1_el1, ID_AA64DFR1_EBEP_SHIFT,
258 			ID_AA64DFR1_EBEP_MASK, EBEP_IMPLEMENTED)
259 
260 /* FEAT_SEBEP */
261 CREATE_FEATURE_PRESENT(feat_sebep, id_aa64dfr0_el1, ID_AA64DFR0_SEBEP_SHIFT,
262 			ID_AA64DFR0_SEBEP_MASK, SEBEP_IMPLEMENTED)
263 
264 /* FEAT_SEL2: Secure EL2 */
265 CREATE_FEATURE_FUNCS(feat_sel2, id_aa64pfr0_el1, ID_AA64PFR0_SEL2_SHIFT,
266 		     ID_AA64PFR0_SEL2_MASK, 1U, ENABLE_FEAT_SEL2)
267 
268 /* FEAT_TWED: Delayed trapping of WFE */
269 CREATE_FEATURE_FUNCS(feat_twed, id_aa64mmfr1_el1, ID_AA64MMFR1_EL1_TWED_SHIFT,
270 		     ID_AA64MMFR1_EL1_TWED_MASK, 1U, ENABLE_FEAT_TWED)
271 
272 /* FEAT_FGT: Fine-grained traps */
273 CREATE_FEATURE_FUNCS(feat_fgt, id_aa64mmfr0_el1, ID_AA64MMFR0_EL1_FGT_SHIFT,
274 		     ID_AA64MMFR0_EL1_FGT_MASK, 1U, ENABLE_FEAT_FGT)
275 
276 /* FEAT_FGT2: Fine-grained traps extended */
277 CREATE_FEATURE_FUNCS(feat_fgt2, id_aa64mmfr0_el1, ID_AA64MMFR0_EL1_FGT_SHIFT,
278 		     ID_AA64MMFR0_EL1_FGT_MASK, FGT2_IMPLEMENTED, ENABLE_FEAT_FGT2)
279 
280 /* FEAT_FGWTE3: Fine-grained write traps EL3 */
281 CREATE_FEATURE_FUNCS(feat_fgwte3, id_aa64mmfr4_el1, ID_AA64MMFR4_EL1_FGWTE3_SHIFT,
282 		     ID_AA64MMFR4_EL1_FGWTE3_MASK, FGWTE3_IMPLEMENTED,
283 		     ENABLE_FEAT_FGWTE3)
284 
285 /* FEAT_ECV: Enhanced Counter Virtualization */
286 CREATE_FEATURE_FUNCS(feat_ecv, id_aa64mmfr0_el1, ID_AA64MMFR0_EL1_ECV_SHIFT,
287 		     ID_AA64MMFR0_EL1_ECV_MASK, 1U, ENABLE_FEAT_ECV)
288 CREATE_FEATURE_FUNCS(feat_ecv_v2, id_aa64mmfr0_el1, ID_AA64MMFR0_EL1_ECV_SHIFT,
289 		     ID_AA64MMFR0_EL1_ECV_MASK, ID_AA64MMFR0_EL1_ECV_SELF_SYNCH, ENABLE_FEAT_ECV)
290 
291 /* FEAT_RNG: Random number generator */
292 CREATE_FEATURE_FUNCS(feat_rng, id_aa64isar0_el1, ID_AA64ISAR0_RNDR_SHIFT,
293 		     ID_AA64ISAR0_RNDR_MASK, 1U, ENABLE_FEAT_RNG)
294 
295 /* FEAT_TCR2: Support TCR2_ELx regs */
296 CREATE_FEATURE_FUNCS(feat_tcr2, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_TCRX_SHIFT,
297 		     ID_AA64MMFR3_EL1_TCRX_MASK, 1U, ENABLE_FEAT_TCR2)
298 
299 /* FEAT_S2POE */
300 CREATE_FEATURE_FUNCS(feat_s2poe, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_S2POE_SHIFT,
301 		     ID_AA64MMFR3_EL1_S2POE_MASK, 1U, ENABLE_FEAT_S2POE)
302 
303 /* FEAT_S1POE */
304 CREATE_FEATURE_FUNCS(feat_s1poe, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_S1POE_SHIFT,
305 		     ID_AA64MMFR3_EL1_S1POE_MASK, 1U, ENABLE_FEAT_S1POE)
306 
307 __attribute__((always_inline))
308 static inline bool is_feat_sxpoe_supported(void)
309 {
310 	return is_feat_s1poe_supported() || is_feat_s2poe_supported();
311 }
312 
313 /* FEAT_S2PIE */
314 CREATE_FEATURE_FUNCS(feat_s2pie, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_S2PIE_SHIFT,
315 		     ID_AA64MMFR3_EL1_S2PIE_MASK, 1U, ENABLE_FEAT_S2PIE)
316 
317 /* FEAT_S1PIE */
318 CREATE_FEATURE_FUNCS(feat_s1pie, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_S1PIE_SHIFT,
319 		     ID_AA64MMFR3_EL1_S1PIE_MASK, 1U, ENABLE_FEAT_S1PIE)
320 
321 /* FEAT_THE: Translation Hardening Extension */
322 CREATE_FEATURE_FUNCS(feat_the, id_aa64pfr1_el1, ID_AA64PFR1_EL1_THE_SHIFT,
323 		     ID_AA64PFR1_EL1_THE_MASK, THE_IMPLEMENTED, ENABLE_FEAT_THE)
324 
325 /* FEAT_SCTLR2 */
326 CREATE_FEATURE_FUNCS(feat_sctlr2, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_SCTLR2_SHIFT,
327 		     ID_AA64MMFR3_EL1_SCTLR2_MASK, SCTLR2_IMPLEMENTED,
328 		     ENABLE_FEAT_SCTLR2)
329 
330 /* FEAT_D128 */
331 CREATE_FEATURE_FUNCS(feat_d128, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_D128_SHIFT,
332 		     ID_AA64MMFR3_EL1_D128_MASK, D128_IMPLEMENTED,
333 		     ENABLE_FEAT_D128)
334 
335 /* FEAT_FPMR */
336 CREATE_FEATURE_FUNCS(feat_fpmr, id_aa64pfr2_el1, ID_AA64PFR2_EL1_FPMR_SHIFT,
337 		     ID_AA64PFR2_EL1_FPMR_MASK, FPMR_IMPLEMENTED,
338 		     ENABLE_FEAT_FPMR)
339 /* FEAT_MOPS */
340 CREATE_FEATURE_FUNCS(feat_mops, id_aa64isar2_el1, ID_AA64ISAR2_EL1_MOPS_SHIFT,
341 		     ID_AA64ISAR2_EL1_MOPS_MASK, MOPS_IMPLEMENTED,
342 		     ENABLE_FEAT_MOPS)
343 
344 __attribute__((always_inline))
345 static inline bool is_feat_sxpie_supported(void)
346 {
347 	return is_feat_s1pie_supported() || is_feat_s2pie_supported();
348 }
349 
350 /* FEAT_GCS: Guarded Control Stack */
351 CREATE_FEATURE_FUNCS(feat_gcs, id_aa64pfr1_el1, ID_AA64PFR1_EL1_GCS_SHIFT,
352 		     ID_AA64PFR1_EL1_GCS_MASK, 1U, ENABLE_FEAT_GCS)
353 
354 /* FEAT_AMU: Activity Monitors Extension */
355 CREATE_FEATURE_FUNCS(feat_amu, id_aa64pfr0_el1, ID_AA64PFR0_AMU_SHIFT,
356 		     ID_AA64PFR0_AMU_MASK, 1U, ENABLE_FEAT_AMU)
357 
358 /* Auxiliary counters for FEAT_AMU */
359 CREATE_FEATURE_FUNCS(feat_amu_aux, amcfgr_el0, AMCFGR_EL0_NCG_SHIFT,
360 		     AMCFGR_EL0_NCG_MASK, 1U, ENABLE_AMU_AUXILIARY_COUNTERS)
361 
362 /* FEAT_AMUV1P1: AMU Extension v1.1 */
363 CREATE_FEATURE_FUNCS(feat_amuv1p1, id_aa64pfr0_el1, ID_AA64PFR0_AMU_SHIFT,
364 		     ID_AA64PFR0_AMU_MASK, ID_AA64PFR0_AMU_V1P1, ENABLE_FEAT_AMUv1p1)
365 
366 /*
367  * Return MPAM version:
368  *
369  * 0x00: None Armv8.0 or later
370  * 0x01: v0.1 Armv8.4 or later
371  * 0x10: v1.0 Armv8.2 or later
372  * 0x11: v1.1 Armv8.4 or later
373  *
374  */
375 __attribute__((always_inline))
376 static inline bool is_feat_mpam_present(void)
377 {
378 	unsigned int ret = (unsigned int)((((read_id_aa64pfr0_el1() >>
379 		ID_AA64PFR0_MPAM_SHIFT) & ID_AA64PFR0_MPAM_MASK) << 4) |
380 		((read_id_aa64pfr1_el1() >> ID_AA64PFR1_MPAM_FRAC_SHIFT)
381 			& ID_AA64PFR1_MPAM_FRAC_MASK));
382 	return ret;
383 }
384 
385 CREATE_FEATURE_SUPPORTED(feat_mpam, is_feat_mpam_present, ENABLE_FEAT_MPAM)
386 
387 
388 
389 /* FEAT_MPAM_PE_BW_CTRL: MPAM PE-side bandwidth controls */
390 __attribute__((always_inline))
391 static inline bool is_feat_mpam_pe_bw_ctrl_present(void)
392 {
393 	if (is_feat_mpam_present()) {
394 		return ((unsigned long long)(read_mpamidr_el1() &
395 				MPAMIDR_HAS_BW_CTRL_BIT) != 0U);
396 	}
397 	return false;
398 }
399 
400 CREATE_FEATURE_SUPPORTED(feat_mpam_pe_bw_ctrl, is_feat_mpam_pe_bw_ctrl_present,
401 		ENABLE_FEAT_MPAM_PE_BW_CTRL)
402 
403 /*
404  * FEAT_DebugV8P9: Debug extension. This function checks the field 3:0 of
405  * ID_AA64DFR0 Aarch64 Debug Feature Register 0 for the version of
406  * Feat_Debug supported. The value of the field determines feature presence
407  *
408  * 0b0110 - Arm v8.0 debug
409  * 0b0111 - Arm v8.0 debug architecture with Virtualization host extensions
410  * 0x1000 - FEAT_Debugv8p2 is supported
411  * 0x1001 - FEAT_Debugv8p4 is supported
412  * 0x1010 - FEAT_Debugv8p8 is supported
413  * 0x1011 - FEAT_Debugv8p9 is supported
414  *
415  */
416 CREATE_FEATURE_FUNCS(feat_debugv8p9, id_aa64dfr0_el1, ID_AA64DFR0_DEBUGVER_SHIFT,
417 		ID_AA64DFR0_DEBUGVER_MASK, DEBUGVER_V8P9_IMPLEMENTED,
418 		ENABLE_FEAT_DEBUGV8P9)
419 
420 /* FEAT_HCX: Extended Hypervisor Configuration Register */
421 CREATE_FEATURE_FUNCS(feat_hcx, id_aa64mmfr1_el1, ID_AA64MMFR1_EL1_HCX_SHIFT,
422 		     ID_AA64MMFR1_EL1_HCX_MASK, 1U, ENABLE_FEAT_HCX)
423 
424 /* FEAT_RNG_TRAP: Trapping support */
425 CREATE_FEATURE_FUNCS(feat_rng_trap, id_aa64pfr1_el1, ID_AA64PFR1_EL1_RNDR_TRAP_SHIFT,
426 		      ID_AA64PFR1_EL1_RNDR_TRAP_MASK, RNG_TRAP_IMPLEMENTED, ENABLE_FEAT_RNG_TRAP)
427 
428 /* Return the RME version, zero if not supported. */
429 CREATE_FEATURE_FUNCS(feat_rme, id_aa64pfr0_el1, ID_AA64PFR0_FEAT_RME_SHIFT,
430 		    ID_AA64PFR0_FEAT_RME_MASK, 1U, ENABLE_RME)
431 
432 /* FEAT_SB: Speculation barrier instruction */
433 CREATE_FEATURE_PRESENT(feat_sb, id_aa64isar1_el1, ID_AA64ISAR1_SB_SHIFT,
434 		       ID_AA64ISAR1_SB_MASK, 1U)
435 
436 /* FEAT_MEC: Memory Encryption Contexts */
437 CREATE_FEATURE_FUNCS(feat_mec, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_MEC_SHIFT,
438 		ID_AA64MMFR3_EL1_MEC_MASK, 1U, ENABLE_FEAT_MEC)
439 
440 /*
441  * FEAT_CSV2: Cache Speculation Variant 2. This checks bit fields[56-59]
442  * of id_aa64pfr0_el1 register and can be used to check for below features:
443  * FEAT_CSV2_2: Cache Speculation Variant CSV2_2.
444  * FEAT_CSV2_3: Cache Speculation Variant CSV2_3.
445  * 0b0000 - Feature FEAT_CSV2 is not implemented.
446  * 0b0001 - Feature FEAT_CSV2 is implemented, but FEAT_CSV2_2 and FEAT_CSV2_3
447  *          are not implemented.
448  * 0b0010 - Feature FEAT_CSV2_2 is implemented but FEAT_CSV2_3 is not
449  *          implemented.
450  * 0b0011 - Feature FEAT_CSV2_3 is implemented.
451  */
452 
453 CREATE_FEATURE_FUNCS(feat_csv2_2, id_aa64pfr0_el1, ID_AA64PFR0_CSV2_SHIFT,
454 		     ID_AA64PFR0_CSV2_MASK, CSV2_2_IMPLEMENTED, ENABLE_FEAT_CSV2_2)
455 CREATE_FEATURE_FUNCS(feat_csv2_3, id_aa64pfr0_el1, ID_AA64PFR0_CSV2_SHIFT,
456 		     ID_AA64PFR0_CSV2_MASK, CSV2_3_IMPLEMENTED, ENABLE_FEAT_CSV2_3)
457 
458 /* FEAT_SPE: Statistical Profiling Extension */
459 CREATE_FEATURE_FUNCS(feat_spe, id_aa64dfr0_el1, ID_AA64DFR0_PMS_SHIFT,
460 		     ID_AA64DFR0_PMS_MASK, 1U, ENABLE_SPE_FOR_NS)
461 
462 /* FEAT_SVE: Scalable Vector Extension */
463 CREATE_FEATURE_FUNCS(feat_sve, id_aa64pfr0_el1, ID_AA64PFR0_SVE_SHIFT,
464 		     ID_AA64PFR0_SVE_MASK, 1U, ENABLE_SVE_FOR_NS)
465 
466 /* FEAT_RAS: Reliability, Accessibility, Serviceability */
467 CREATE_FEATURE_FUNCS(feat_ras, id_aa64pfr0_el1, ID_AA64PFR0_RAS_SHIFT,
468 		     ID_AA64PFR0_RAS_MASK, 1U, ENABLE_FEAT_RAS)
469 
470 /* FEAT_DIT: Data Independent Timing instructions */
471 CREATE_FEATURE_FUNCS(feat_dit, id_aa64pfr0_el1, ID_AA64PFR0_DIT_SHIFT,
472 		     ID_AA64PFR0_DIT_MASK, 1U, ENABLE_FEAT_DIT)
473 
474 /* FEAT_SYS_REG_TRACE */
475 CREATE_FEATURE_FUNCS(feat_sys_reg_trace, id_aa64dfr0_el1, ID_AA64DFR0_TRACEVER_SHIFT,
476 		    ID_AA64DFR0_TRACEVER_MASK, 1U, ENABLE_SYS_REG_TRACE_FOR_NS)
477 
478 /* FEAT_TRF: TraceFilter */
479 CREATE_FEATURE_FUNCS(feat_trf, id_aa64dfr0_el1, ID_AA64DFR0_TRACEFILT_SHIFT,
480 		     ID_AA64DFR0_TRACEFILT_MASK, 1U, ENABLE_TRF_FOR_NS)
481 
482 /* FEAT_NV2: Enhanced Nested Virtualization */
483 CREATE_FEATURE_FUNCS(feat_nv, id_aa64mmfr2_el1, ID_AA64MMFR2_EL1_NV_SHIFT,
484 		     ID_AA64MMFR2_EL1_NV_MASK, 1U, 0U)
485 CREATE_FEATURE_FUNCS(feat_nv2, id_aa64mmfr2_el1, ID_AA64MMFR2_EL1_NV_SHIFT,
486 		     ID_AA64MMFR2_EL1_NV_MASK, NV2_IMPLEMENTED, CTX_INCLUDE_NEVE_REGS)
487 
488 /* FEAT_BRBE: Branch Record Buffer Extension */
489 CREATE_FEATURE_FUNCS(feat_brbe, id_aa64dfr0_el1, ID_AA64DFR0_BRBE_SHIFT,
490 		     ID_AA64DFR0_BRBE_MASK, 1U, ENABLE_BRBE_FOR_NS)
491 
492 /* FEAT_TRBE: Trace Buffer Extension */
493 CREATE_FEATURE_FUNCS(feat_trbe, id_aa64dfr0_el1, ID_AA64DFR0_TRACEBUFFER_SHIFT,
494 		     ID_AA64DFR0_TRACEBUFFER_MASK, 1U, ENABLE_TRBE_FOR_NS)
495 
496 /* FEAT_SME_FA64: Full A64 Instruction support in streaming SVE mode */
497 CREATE_FEATURE_PRESENT(feat_sme_fa64, id_aa64smfr0_el1, ID_AA64SMFR0_EL1_SME_FA64_SHIFT,
498 		    ID_AA64SMFR0_EL1_SME_FA64_MASK, 1U)
499 
500 /* FEAT_SMEx: Scalar Matrix Extension */
501 CREATE_FEATURE_FUNCS(feat_sme, id_aa64pfr1_el1, ID_AA64PFR1_EL1_SME_SHIFT,
502 		     ID_AA64PFR1_EL1_SME_MASK, 1U, ENABLE_SME_FOR_NS)
503 
504 CREATE_FEATURE_FUNCS(feat_sme2, id_aa64pfr1_el1, ID_AA64PFR1_EL1_SME_SHIFT,
505 		     ID_AA64PFR1_EL1_SME_MASK, SME2_IMPLEMENTED, ENABLE_SME2_FOR_NS)
506 
507 /* FEAT_LS64_ACCDATA: */
508 CREATE_FEATURE_FUNCS(feat_ls64_accdata, id_aa64isar1_el1, ID_AA64ISAR1_LS64_SHIFT,
509 		     ID_AA64ISAR1_LS64_MASK, LS64_ACCDATA_IMPLEMENTED,
510 		     ENABLE_FEAT_LS64_ACCDATA)
511 
512 /*******************************************************************************
513  * Function to get hardware granularity support
514  ******************************************************************************/
515 
516 __attribute__((always_inline))
517 static inline bool is_feat_tgran4K_present(void)
518 {
519 	unsigned int tgranx = ISOLATE_FIELD(read_id_aa64mmfr0_el1(),
520 			     ID_AA64MMFR0_EL1_TGRAN4_SHIFT, ID_REG_FIELD_MASK);
521 	return (tgranx < 8U);
522 }
523 
524 CREATE_FEATURE_PRESENT(feat_tgran16K, id_aa64mmfr0_el1, ID_AA64MMFR0_EL1_TGRAN16_SHIFT,
525 		       ID_AA64MMFR0_EL1_TGRAN16_MASK, TGRAN16_IMPLEMENTED)
526 
527 __attribute__((always_inline))
528 static inline bool is_feat_tgran64K_present(void)
529 {
530 	unsigned int tgranx = ISOLATE_FIELD(read_id_aa64mmfr0_el1(),
531 			     ID_AA64MMFR0_EL1_TGRAN64_SHIFT, ID_REG_FIELD_MASK);
532 	return (tgranx < 8U);
533 }
534 
535 /* FEAT_PMUV3 */
536 CREATE_FEATURE_PRESENT(feat_pmuv3, id_aa64dfr0_el1, ID_AA64DFR0_PMUVER_SHIFT,
537 		      ID_AA64DFR0_PMUVER_MASK, 1U)
538 
539 /* FEAT_MTPMU */
540 __attribute__((always_inline))
541 static inline bool is_feat_mtpmu_present(void)
542 {
543 	unsigned int mtpmu = ISOLATE_FIELD(read_id_aa64dfr0_el1(), ID_AA64DFR0_MTPMU_SHIFT,
544 					   ID_AA64DFR0_MTPMU_MASK);
545 	return (mtpmu != 0U) && (mtpmu != MTPMU_NOT_IMPLEMENTED);
546 }
547 
548 CREATE_FEATURE_SUPPORTED(feat_mtpmu, is_feat_mtpmu_present, DISABLE_MTPMU)
549 
550 /*************************************************************************
551  * Function to identify the presence of FEAT_GCIE (GICv5 CPU interface
552  * extension).
553  ************************************************************************/
554 CREATE_FEATURE_FUNCS(feat_gcie, id_aa64pfr2_el1, ID_AA64PFR2_EL1_GCIE_SHIFT,
555 		     ID_AA64PFR2_EL1_GCIE_MASK, 1U, ENABLE_FEAT_GCIE)
556 
557 CREATE_FEATURE_FUNCS(feat_cpa2, id_aa64isar3_el1, ID_AA64ISAR3_EL1_CPA_SHIFT,
558 		     ID_AA64ISAR3_EL1_CPA_MASK, CPA2_IMPLEMENTED,
559 		     ENABLE_FEAT_CPA2)
560 
561 #endif /* ARCH_FEATURES_H */
562