xref: /rk3399_ARM-atf/include/arch/aarch64/arch_features.h (revision 758ccb802d4f2a5fe55ec936a21ad4ae8cbd7b4f)
1 /*
2  * Copyright (c) 2019-2024, Arm Limited. All rights reserved.
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #ifndef ARCH_FEATURES_H
8 #define ARCH_FEATURES_H
9 
10 #include <stdbool.h>
11 
12 #include <arch_helpers.h>
13 #include <common/feat_detect.h>
14 
15 #define ISOLATE_FIELD(reg, feat)					\
16 	((unsigned int)(((reg) >> (feat)) & ID_REG_FIELD_MASK))
17 
18 #define CREATE_FEATURE_FUNCS_VER(name, read_func, idvalue, guard)	\
19 static inline bool is_ ## name ## _supported(void)			\
20 {									\
21 	if ((guard) == FEAT_STATE_DISABLED) {				\
22 		return false;						\
23 	}								\
24 	if ((guard) == FEAT_STATE_ALWAYS) {				\
25 		return true;						\
26 	}								\
27 	return read_func() >= (idvalue);				\
28 }
29 
30 #define CREATE_FEATURE_FUNCS(name, idreg, idfield, guard)		\
31 static unsigned int read_ ## name ## _id_field(void)			\
32 {									\
33 	return ISOLATE_FIELD(read_ ## idreg(), idfield);		\
34 }									\
35 CREATE_FEATURE_FUNCS_VER(name, read_ ## name ## _id_field, 1U, guard)
36 
37 static inline bool is_armv7_gentimer_present(void)
38 {
39 	/* The Generic Timer is always present in an ARMv8-A implementation */
40 	return true;
41 }
42 
43 CREATE_FEATURE_FUNCS(feat_pan, id_aa64mmfr1_el1, ID_AA64MMFR1_EL1_PAN_SHIFT,
44 		     ENABLE_FEAT_PAN)
45 static inline bool is_feat_pan_present(void)
46 {
47 	return read_feat_pan_id_field() != 0U;
48 }
49 
50 CREATE_FEATURE_FUNCS(feat_vhe, id_aa64mmfr1_el1, ID_AA64MMFR1_EL1_VHE_SHIFT,
51 		     ENABLE_FEAT_VHE)
52 
53 static inline bool is_armv8_2_ttcnp_present(void)
54 {
55 	return ((read_id_aa64mmfr2_el1() >> ID_AA64MMFR2_EL1_CNP_SHIFT) &
56 		ID_AA64MMFR2_EL1_CNP_MASK) != 0U;
57 }
58 
59 static inline bool is_feat_uao_present(void)
60 {
61 	return ((read_id_aa64mmfr2_el1() >> ID_AA64MMFR2_EL1_UAO_SHIFT) &
62 		ID_AA64MMFR2_EL1_UAO_MASK) != 0U;
63 }
64 
65 static inline bool is_feat_pacqarma3_present(void)
66 {
67 	uint64_t mask_id_aa64isar2 =
68 			(ID_AA64ISAR2_GPA3_MASK << ID_AA64ISAR2_GPA3_SHIFT) |
69 			(ID_AA64ISAR2_APA3_MASK << ID_AA64ISAR2_APA3_SHIFT);
70 
71 	/* If any of the fields is not zero, QARMA3 algorithm is present */
72 	return (read_id_aa64isar2_el1() & mask_id_aa64isar2) != 0U;
73 }
74 
75 static inline bool is_armv8_3_pauth_present(void)
76 {
77 	uint64_t mask_id_aa64isar1 =
78 		(ID_AA64ISAR1_GPI_MASK << ID_AA64ISAR1_GPI_SHIFT) |
79 		(ID_AA64ISAR1_GPA_MASK << ID_AA64ISAR1_GPA_SHIFT) |
80 		(ID_AA64ISAR1_API_MASK << ID_AA64ISAR1_API_SHIFT) |
81 		(ID_AA64ISAR1_APA_MASK << ID_AA64ISAR1_APA_SHIFT);
82 
83 	/*
84 	 * If any of the fields is not zero or QARMA3 is present,
85 	 * PAuth is present
86 	 */
87 	return ((read_id_aa64isar1_el1() & mask_id_aa64isar1) != 0U ||
88 		is_feat_pacqarma3_present());
89 }
90 
91 static inline bool is_armv8_4_ttst_present(void)
92 {
93 	return ((read_id_aa64mmfr2_el1() >> ID_AA64MMFR2_EL1_ST_SHIFT) &
94 		ID_AA64MMFR2_EL1_ST_MASK) == 1U;
95 }
96 
97 static inline bool is_armv8_5_bti_present(void)
98 {
99 	return ((read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_BT_SHIFT) &
100 		ID_AA64PFR1_EL1_BT_MASK) == BTI_IMPLEMENTED;
101 }
102 
103 static inline unsigned int get_armv8_5_mte_support(void)
104 {
105 	return ((read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_MTE_SHIFT) &
106 		ID_AA64PFR1_EL1_MTE_MASK);
107 }
108 static inline unsigned int is_feat_mte2_present(void)
109 {
110 	return get_armv8_5_mte_support() >= MTE_IMPLEMENTED_ELX;
111 }
112 
113 static inline bool is_feat_ssbs_present(void)
114 {
115 	return ((read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_SSBS_SHIFT) &
116 		ID_AA64PFR1_EL1_SSBS_MASK) != SSBS_UNAVAILABLE;
117 }
118 
119 static inline bool is_feat_nmi_present(void)
120 {
121 	return ((read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_NMI_SHIFT) &
122 		ID_AA64PFR1_EL1_NMI_MASK) == NMI_IMPLEMENTED;
123 }
124 
125 static inline bool is_feat_gcs_present(void)
126 {
127 	return ((read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_GCS_SHIFT) &
128 		ID_AA64PFR1_EL1_GCS_MASK) == GCS_IMPLEMENTED;
129 }
130 
131 static inline bool is_feat_ebep_present(void)
132 {
133 	return ((read_id_aa64dfr1_el1() >> ID_AA64DFR1_EBEP_SHIFT) &
134 		ID_AA64DFR1_EBEP_MASK) == EBEP_IMPLEMENTED;
135 }
136 
137 static inline bool is_feat_sebep_present(void)
138 {
139 	return ((read_id_aa64dfr0_el1() >> ID_AA64DFR0_SEBEP_SHIFT) &
140 		ID_AA64DFR0_SEBEP_MASK) == SEBEP_IMPLEMENTED;
141 }
142 
143 CREATE_FEATURE_FUNCS_VER(feat_mte2, get_armv8_5_mte_support, MTE_IMPLEMENTED_ELX,
144 			 ENABLE_FEAT_MTE2)
145 CREATE_FEATURE_FUNCS(feat_sel2, id_aa64pfr0_el1, ID_AA64PFR0_SEL2_SHIFT,
146 		     ENABLE_FEAT_SEL2)
147 CREATE_FEATURE_FUNCS(feat_twed, id_aa64mmfr1_el1, ID_AA64MMFR1_EL1_TWED_SHIFT,
148 		     ENABLE_FEAT_TWED)
149 CREATE_FEATURE_FUNCS(feat_fgt, id_aa64mmfr0_el1, ID_AA64MMFR0_EL1_FGT_SHIFT,
150 		     ENABLE_FEAT_FGT)
151 CREATE_FEATURE_FUNCS(feat_ecv, id_aa64mmfr0_el1, ID_AA64MMFR0_EL1_ECV_SHIFT,
152 		     ENABLE_FEAT_ECV)
153 CREATE_FEATURE_FUNCS_VER(feat_ecv_v2, read_feat_ecv_id_field,
154 			 ID_AA64MMFR0_EL1_ECV_SELF_SYNCH, ENABLE_FEAT_ECV)
155 
156 CREATE_FEATURE_FUNCS(feat_rng, id_aa64isar0_el1, ID_AA64ISAR0_RNDR_SHIFT,
157 		     ENABLE_FEAT_RNG)
158 CREATE_FEATURE_FUNCS(feat_tcr2, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_TCRX_SHIFT,
159 		     ENABLE_FEAT_TCR2)
160 
161 CREATE_FEATURE_FUNCS(feat_s2poe, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_S2POE_SHIFT,
162 		     ENABLE_FEAT_S2POE)
163 CREATE_FEATURE_FUNCS(feat_s1poe, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_S1POE_SHIFT,
164 		     ENABLE_FEAT_S1POE)
165 static inline bool is_feat_sxpoe_supported(void)
166 {
167 	return is_feat_s1poe_supported() || is_feat_s2poe_supported();
168 }
169 
170 CREATE_FEATURE_FUNCS(feat_s2pie, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_S2PIE_SHIFT,
171 		     ENABLE_FEAT_S2PIE)
172 CREATE_FEATURE_FUNCS(feat_s1pie, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_S1PIE_SHIFT,
173 		     ENABLE_FEAT_S1PIE)
174 static inline bool is_feat_sxpie_supported(void)
175 {
176 	return is_feat_s1pie_supported() || is_feat_s2pie_supported();
177 }
178 
179 /* FEAT_GCS: Guarded Control Stack */
180 CREATE_FEATURE_FUNCS(feat_gcs, id_aa64pfr1_el1, ID_AA64PFR1_EL1_GCS_SHIFT,
181 		     ENABLE_FEAT_GCS)
182 
183 /* FEAT_AMU: Activity Monitors Extension */
184 CREATE_FEATURE_FUNCS(feat_amu, id_aa64pfr0_el1, ID_AA64PFR0_AMU_SHIFT,
185 		     ENABLE_FEAT_AMU)
186 CREATE_FEATURE_FUNCS_VER(feat_amuv1p1, read_feat_amu_id_field,
187 			 ID_AA64PFR0_AMU_V1P1, ENABLE_FEAT_AMUv1p1)
188 
189 /*
190  * Return MPAM version:
191  *
192  * 0x00: None Armv8.0 or later
193  * 0x01: v0.1 Armv8.4 or later
194  * 0x10: v1.0 Armv8.2 or later
195  * 0x11: v1.1 Armv8.4 or later
196  *
197  */
198 static inline unsigned int read_feat_mpam_version(void)
199 {
200 	return (unsigned int)((((read_id_aa64pfr0_el1() >>
201 		ID_AA64PFR0_MPAM_SHIFT) & ID_AA64PFR0_MPAM_MASK) << 4) |
202 				((read_id_aa64pfr1_el1() >>
203 		ID_AA64PFR1_MPAM_FRAC_SHIFT) & ID_AA64PFR1_MPAM_FRAC_MASK));
204 }
205 
206 CREATE_FEATURE_FUNCS_VER(feat_mpam, read_feat_mpam_version, 1U,
207 			 ENABLE_FEAT_MPAM)
208 
209 /* FEAT_HCX: Extended Hypervisor Configuration Register */
210 CREATE_FEATURE_FUNCS(feat_hcx, id_aa64mmfr1_el1, ID_AA64MMFR1_EL1_HCX_SHIFT,
211 		     ENABLE_FEAT_HCX)
212 
213 static inline bool is_feat_rng_trap_present(void)
214 {
215 	return (((read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_RNDR_TRAP_SHIFT) &
216 			ID_AA64PFR1_EL1_RNDR_TRAP_MASK)
217 			== ID_AA64PFR1_EL1_RNG_TRAP_SUPPORTED);
218 }
219 
220 static inline unsigned int get_armv9_2_feat_rme_support(void)
221 {
222 	/*
223 	 * Return the RME version, zero if not supported.  This function can be
224 	 * used as both an integer value for the RME version or compared to zero
225 	 * to detect RME presence.
226 	 */
227 	return (unsigned int)(read_id_aa64pfr0_el1() >>
228 		ID_AA64PFR0_FEAT_RME_SHIFT) & ID_AA64PFR0_FEAT_RME_MASK;
229 }
230 
231 /*********************************************************************************
232  * Function to identify the presence of FEAT_SB (Speculation Barrier Instruction)
233  ********************************************************************************/
234 static inline unsigned int read_feat_sb_id_field(void)
235 {
236 	return ISOLATE_FIELD(read_id_aa64isar1_el1(), ID_AA64ISAR1_SB_SHIFT);
237 }
238 
239 /*
240  * FEAT_CSV2: Cache Speculation Variant 2. This checks bit fields[56-59]
241  * of id_aa64pfr0_el1 register and can be used to check for below features:
242  * FEAT_CSV2_2: Cache Speculation Variant CSV2_2.
243  * FEAT_CSV2_3: Cache Speculation Variant CSV2_3.
244  * 0b0000 - Feature FEAT_CSV2 is not implemented.
245  * 0b0001 - Feature FEAT_CSV2 is implemented, but FEAT_CSV2_2 and FEAT_CSV2_3
246  *          are not implemented.
247  * 0b0010 - Feature FEAT_CSV2_2 is implemented but FEAT_CSV2_3 is not
248  *          implemented.
249  * 0b0011 - Feature FEAT_CSV2_3 is implemented.
250  */
251 static inline unsigned int read_feat_csv2_id_field(void)
252 {
253 	return (unsigned int)(read_id_aa64pfr0_el1() >>
254 		ID_AA64PFR0_CSV2_SHIFT) & ID_AA64PFR0_CSV2_MASK;
255 }
256 
257 CREATE_FEATURE_FUNCS_VER(feat_csv2_2, read_feat_csv2_id_field,
258 			 ID_AA64PFR0_CSV2_2_SUPPORTED, ENABLE_FEAT_CSV2_2)
259 CREATE_FEATURE_FUNCS_VER(feat_csv2_3, read_feat_csv2_id_field,
260 			 ID_AA64PFR0_CSV2_3_SUPPORTED, ENABLE_FEAT_CSV2_3)
261 
262 /* FEAT_SPE: Statistical Profiling Extension */
263 CREATE_FEATURE_FUNCS(feat_spe, id_aa64dfr0_el1, ID_AA64DFR0_PMS_SHIFT,
264 		     ENABLE_SPE_FOR_NS)
265 
266 /* FEAT_SVE: Scalable Vector Extension */
267 CREATE_FEATURE_FUNCS(feat_sve, id_aa64pfr0_el1, ID_AA64PFR0_SVE_SHIFT,
268 		     ENABLE_SVE_FOR_NS)
269 
270 /* FEAT_RAS: Reliability, Accessibility, Serviceability */
271 CREATE_FEATURE_FUNCS(feat_ras, id_aa64pfr0_el1,
272 		     ID_AA64PFR0_RAS_SHIFT, ENABLE_FEAT_RAS)
273 
274 /* FEAT_DIT: Data Independent Timing instructions */
275 CREATE_FEATURE_FUNCS(feat_dit, id_aa64pfr0_el1,
276 		     ID_AA64PFR0_DIT_SHIFT, ENABLE_FEAT_DIT)
277 
278 CREATE_FEATURE_FUNCS(feat_sys_reg_trace, id_aa64dfr0_el1,
279 		     ID_AA64DFR0_TRACEVER_SHIFT, ENABLE_SYS_REG_TRACE_FOR_NS)
280 
281 /* FEAT_TRF: TraceFilter */
282 CREATE_FEATURE_FUNCS(feat_trf, id_aa64dfr0_el1, ID_AA64DFR0_TRACEFILT_SHIFT,
283 		     ENABLE_TRF_FOR_NS)
284 
285 /* FEAT_NV2: Enhanced Nested Virtualization */
286 CREATE_FEATURE_FUNCS(feat_nv, id_aa64mmfr2_el1, ID_AA64MMFR2_EL1_NV_SHIFT, 0)
287 CREATE_FEATURE_FUNCS_VER(feat_nv2, read_feat_nv_id_field,
288 			 ID_AA64MMFR2_EL1_NV2_SUPPORTED, CTX_INCLUDE_NEVE_REGS)
289 
290 /* FEAT_BRBE: Branch Record Buffer Extension */
291 CREATE_FEATURE_FUNCS(feat_brbe, id_aa64dfr0_el1, ID_AA64DFR0_BRBE_SHIFT,
292 		     ENABLE_BRBE_FOR_NS)
293 
294 /* FEAT_TRBE: Trace Buffer Extension */
295 CREATE_FEATURE_FUNCS(feat_trbe, id_aa64dfr0_el1, ID_AA64DFR0_TRACEBUFFER_SHIFT,
296 		     ENABLE_TRBE_FOR_NS)
297 
298 static inline unsigned int read_feat_sme_fa64_id_field(void)
299 {
300 	return ISOLATE_FIELD(read_id_aa64smfr0_el1(),
301 			     ID_AA64SMFR0_EL1_SME_FA64_SHIFT);
302 }
303 /* FEAT_SMEx: Scalar Matrix Extension */
304 CREATE_FEATURE_FUNCS(feat_sme, id_aa64pfr1_el1, ID_AA64PFR1_EL1_SME_SHIFT,
305 		     ENABLE_SME_FOR_NS)
306 CREATE_FEATURE_FUNCS_VER(feat_sme2, read_feat_sme_id_field,
307 			 ID_AA64PFR1_EL1_SME2_SUPPORTED, ENABLE_SME2_FOR_NS)
308 
309 /*******************************************************************************
310  * Function to get hardware granularity support
311  ******************************************************************************/
312 
313 static inline unsigned int read_id_aa64mmfr0_el0_tgran4_field(void)
314 {
315 	return ISOLATE_FIELD(read_id_aa64mmfr0_el1(),
316 			     ID_AA64MMFR0_EL1_TGRAN4_SHIFT);
317 }
318 
319 static inline unsigned int read_id_aa64mmfr0_el0_tgran16_field(void)
320 {
321 	return ISOLATE_FIELD(read_id_aa64mmfr0_el1(),
322 			     ID_AA64MMFR0_EL1_TGRAN16_SHIFT);
323 }
324 
325 static inline unsigned int read_id_aa64mmfr0_el0_tgran64_field(void)
326 {
327 	return ISOLATE_FIELD(read_id_aa64mmfr0_el1(),
328 			     ID_AA64MMFR0_EL1_TGRAN64_SHIFT);
329 }
330 
331 static inline unsigned int read_feat_pmuv3_id_field(void)
332 {
333 	return ISOLATE_FIELD(read_id_aa64dfr0_el1(), ID_AA64DFR0_PMUVER_SHIFT);
334 }
335 
336 static inline unsigned int read_feat_mtpmu_id_field(void)
337 {
338 	return ISOLATE_FIELD(read_id_aa64dfr0_el1(), ID_AA64DFR0_MTPMU_SHIFT);
339 }
340 
341 static inline bool is_feat_mtpmu_supported(void)
342 {
343 	if (DISABLE_MTPMU == FEAT_STATE_DISABLED) {
344 		return false;
345 	}
346 
347 	if (DISABLE_MTPMU == FEAT_STATE_ALWAYS) {
348 		return true;
349 	}
350 
351 	unsigned int mtpmu = read_feat_mtpmu_id_field();
352 
353 	return (mtpmu != 0U) && (mtpmu != ID_AA64DFR0_MTPMU_DISABLED);
354 }
355 
356 #endif /* ARCH_FEATURES_H */
357