xref: /rk3399_ARM-atf/include/arch/aarch64/arch_features.h (revision b0980e584398fc5adc908cd68f1a6deefa943d29)
1 /*
2  * Copyright (c) 2019-2022, Arm Limited. All rights reserved.
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #ifndef ARCH_FEATURES_H
8 #define ARCH_FEATURES_H
9 
10 #include <stdbool.h>
11 
12 #include <arch_helpers.h>
13 
14 static inline bool is_armv7_gentimer_present(void)
15 {
16 	/* The Generic Timer is always present in an ARMv8-A implementation */
17 	return true;
18 }
19 
20 static inline bool is_armv8_1_pan_present(void)
21 {
22 	return ((read_id_aa64mmfr1_el1() >> ID_AA64MMFR1_EL1_PAN_SHIFT) &
23 		ID_AA64MMFR1_EL1_PAN_MASK) != 0U;
24 }
25 
26 static inline bool is_armv8_1_vhe_present(void)
27 {
28 	return ((read_id_aa64mmfr1_el1() >> ID_AA64MMFR1_EL1_VHE_SHIFT) &
29 		ID_AA64MMFR1_EL1_VHE_MASK) != 0U;
30 }
31 
32 static inline bool is_armv8_2_ttcnp_present(void)
33 {
34 	return ((read_id_aa64mmfr2_el1() >> ID_AA64MMFR2_EL1_CNP_SHIFT) &
35 		ID_AA64MMFR2_EL1_CNP_MASK) != 0U;
36 }
37 
38 static inline bool is_armv8_3_pauth_present(void)
39 {
40 	uint64_t mask = (ID_AA64ISAR1_GPI_MASK << ID_AA64ISAR1_GPI_SHIFT) |
41 			(ID_AA64ISAR1_GPA_MASK << ID_AA64ISAR1_GPA_SHIFT) |
42 			(ID_AA64ISAR1_API_MASK << ID_AA64ISAR1_API_SHIFT) |
43 			(ID_AA64ISAR1_APA_MASK << ID_AA64ISAR1_APA_SHIFT);
44 
45 	/* If any of the fields is not zero, PAuth is present */
46 	return (read_id_aa64isar1_el1() & mask) != 0U;
47 }
48 
49 static inline bool is_armv8_4_dit_present(void)
50 {
51 	return ((read_id_aa64pfr0_el1() >> ID_AA64PFR0_DIT_SHIFT) &
52 		ID_AA64PFR0_DIT_MASK) == 1U;
53 }
54 
55 static inline bool is_armv8_4_ttst_present(void)
56 {
57 	return ((read_id_aa64mmfr2_el1() >> ID_AA64MMFR2_EL1_ST_SHIFT) &
58 		ID_AA64MMFR2_EL1_ST_MASK) == 1U;
59 }
60 
61 static inline bool is_armv8_5_bti_present(void)
62 {
63 	return ((read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_BT_SHIFT) &
64 		ID_AA64PFR1_EL1_BT_MASK) == BTI_IMPLEMENTED;
65 }
66 
67 static inline unsigned int get_armv8_5_mte_support(void)
68 {
69 	return ((read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_MTE_SHIFT) &
70 		ID_AA64PFR1_EL1_MTE_MASK);
71 }
72 
73 static inline bool is_armv8_4_sel2_present(void)
74 {
75 	return ((read_id_aa64pfr0_el1() >> ID_AA64PFR0_SEL2_SHIFT) &
76 		ID_AA64PFR0_SEL2_MASK) == 1ULL;
77 }
78 
79 static inline bool is_armv8_6_twed_present(void)
80 {
81 	return (((read_id_aa64mmfr1_el1() >> ID_AA64MMFR1_EL1_TWED_SHIFT) &
82 		ID_AA64MMFR1_EL1_TWED_MASK) == ID_AA64MMFR1_EL1_TWED_SUPPORTED);
83 }
84 
85 static inline bool is_armv8_6_fgt_present(void)
86 {
87 	return ((read_id_aa64mmfr0_el1() >> ID_AA64MMFR0_EL1_FGT_SHIFT) &
88 		ID_AA64MMFR0_EL1_FGT_MASK) == ID_AA64MMFR0_EL1_FGT_SUPPORTED;
89 }
90 
91 static inline unsigned long int get_armv8_6_ecv_support(void)
92 {
93 	return ((read_id_aa64mmfr0_el1() >> ID_AA64MMFR0_EL1_ECV_SHIFT) &
94 		ID_AA64MMFR0_EL1_ECV_MASK);
95 }
96 
97 static inline bool is_armv8_5_rng_present(void)
98 {
99 	return ((read_id_aa64isar0_el1() >> ID_AA64ISAR0_RNDR_SHIFT) &
100 		ID_AA64ISAR0_RNDR_MASK);
101 }
102 
103 static inline bool is_armv8_6_feat_amuv1p1_present(void)
104 {
105 	return (((read_id_aa64pfr0_el1() >> ID_AA64PFR0_AMU_SHIFT) &
106 		ID_AA64PFR0_AMU_MASK) >= ID_AA64PFR0_AMU_V1P1);
107 }
108 
109 /*
110  * Return MPAM version:
111  *
112  * 0x00: None Armv8.0 or later
113  * 0x01: v0.1 Armv8.4 or later
114  * 0x10: v1.0 Armv8.2 or later
115  * 0x11: v1.1 Armv8.4 or later
116  *
117  */
118 static inline unsigned int get_mpam_version(void)
119 {
120 	return (unsigned int)((((read_id_aa64pfr0_el1() >>
121 		ID_AA64PFR0_MPAM_SHIFT) & ID_AA64PFR0_MPAM_MASK) << 4) |
122 				((read_id_aa64pfr1_el1() >>
123 		ID_AA64PFR1_MPAM_FRAC_SHIFT) & ID_AA64PFR1_MPAM_FRAC_MASK));
124 }
125 
126 static inline bool is_feat_hcx_present(void)
127 {
128 	return (((read_id_aa64mmfr1_el1() >> ID_AA64MMFR1_EL1_HCX_SHIFT) &
129 		ID_AA64MMFR1_EL1_HCX_MASK) == ID_AA64MMFR1_EL1_HCX_SUPPORTED);
130 }
131 
132 static inline bool is_feat_rng_trap_present(void)
133 {
134 	return (((read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_RNDR_TRAP_SHIFT) &
135 			ID_AA64PFR1_EL1_RNDR_TRAP_MASK)
136 			== ID_AA64PFR1_EL1_RNG_TRAP_SUPPORTED);
137 }
138 
139 static inline unsigned int get_armv9_2_feat_rme_support(void)
140 {
141 	/*
142 	 * Return the RME version, zero if not supported.  This function can be
143 	 * used as both an integer value for the RME version or compared to zero
144 	 * to detect RME presence.
145 	 */
146 	return (unsigned int)(read_id_aa64pfr0_el1() >>
147 		ID_AA64PFR0_FEAT_RME_SHIFT) & ID_AA64PFR0_FEAT_RME_MASK;
148 }
149 
150 /*********************************************************************************
151  * Function to identify the presence of FEAT_SB (Speculation Barrier Instruction)
152  ********************************************************************************/
153 static inline bool is_armv8_0_feat_sb_present(void)
154 {
155 	return (((read_id_aa64isar1_el1() >> ID_AA64ISAR1_SB_SHIFT) &
156 		ID_AA64ISAR1_SB_MASK) == ID_AA64ISAR1_SB_SUPPORTED);
157 }
158 
159 /*********************************************************************************
160  * Function to identify the presence of FEAT_CSV2_2 (Cache Speculation Variant 2)
161  ********************************************************************************/
162 static inline bool is_armv8_0_feat_csv2_2_present(void)
163 {
164 	return (((read_id_aa64pfr0_el1() >> ID_AA64PFR0_CSV2_SHIFT) &
165 		ID_AA64PFR0_CSV2_MASK) == ID_AA64PFR0_CSV2_2_SUPPORTED);
166 }
167 
168 /**********************************************************************************
169  * Function to identify the presence of FEAT_SPE (Statistical Profiling Extension)
170  *********************************************************************************/
171 static inline bool is_armv8_2_feat_spe_present(void)
172 {
173 	return (((read_id_aa64dfr0_el1() >> ID_AA64DFR0_PMS_SHIFT) &
174 		ID_AA64DFR0_PMS_MASK) != ID_AA64DFR0_SPE_NOT_SUPPORTED);
175 }
176 
177 /*******************************************************************************
178  * Function to identify the presence of FEAT_SVE (Scalable Vector Extension)
179  ******************************************************************************/
180 static inline bool is_armv8_2_feat_sve_present(void)
181 {
182 	return (((read_id_aa64pfr0_el1() >> ID_AA64PFR0_SVE_SHIFT) &
183 		ID_AA64PFR0_SVE_MASK) == ID_AA64PFR0_SVE_SUPPORTED);
184 }
185 
186 /*******************************************************************************
187  * Function to identify the presence of FEAT_RAS (Reliability,Availability,
188  * and Serviceability Extension)
189  ******************************************************************************/
190 static inline bool is_armv8_2_feat_ras_present(void)
191 {
192 	return (((read_id_aa64pfr0_el1() >> ID_AA64PFR0_RAS_SHIFT) &
193 		ID_AA64PFR0_RAS_MASK) != ID_AA64PFR0_RAS_NOT_SUPPORTED);
194 }
195 
196 /**************************************************************************
197  * Function to identify the presence of FEAT_DIT (Data Independent Timing)
198  *************************************************************************/
199 static inline bool is_armv8_4_feat_dit_present(void)
200 {
201 	return (((read_id_aa64pfr0_el1() >> ID_AA64PFR0_DIT_SHIFT) &
202 		ID_AA64PFR0_DIT_MASK) == ID_AA64PFR0_DIT_SUPPORTED);
203 }
204 
205 /*************************************************************************
206  * Function to identify the presence of FEAT_TRF (TraceLift)
207  ************************************************************************/
208 static inline bool is_arm8_4_feat_trf_present(void)
209 {
210 	return (((read_id_aa64dfr0_el1() >> ID_AA64DFR0_TRACEFILT_SHIFT) &
211 		ID_AA64DFR0_TRACEFILT_MASK) == ID_AA64DFR0_TRACEFILT_SUPPORTED);
212 }
213 
214 /*******************************************************************************
215  * Function to identify the presence of FEAT_AMUv1 (Activity Monitors-
216  * Extension v1)
217  ******************************************************************************/
218 static inline bool is_armv8_4_feat_amuv1_present(void)
219 {
220 	return (((read_id_aa64pfr0_el1() >> ID_AA64PFR0_AMU_SHIFT) &
221 		ID_AA64PFR0_AMU_MASK) >= ID_AA64PFR0_AMU_V1);
222 }
223 
224 /********************************************************************************
225  * Function to identify the presence of FEAT_NV2 (Enhanced Nested Virtualization
226  * Support)
227  *******************************************************************************/
228 static inline unsigned int get_armv8_4_feat_nv_support(void)
229 {
230 	return (((read_id_aa64mmfr2_el1() >> ID_AA64MMFR2_EL1_NV_SHIFT) &
231 		ID_AA64MMFR2_EL1_NV_MASK));
232 }
233 
234 /*******************************************************************************
235  * Function to identify the presence of FEAT_BRBE (Branch Record Buffer
236  * Extension)
237  ******************************************************************************/
238 static inline bool is_feat_brbe_present(void)
239 {
240 	return (((read_id_aa64dfr0_el1() >> ID_AA64DFR0_BRBE_SHIFT) &
241 		ID_AA64DFR0_BRBE_MASK) == ID_AA64DFR0_BRBE_SUPPORTED);
242 }
243 
244 /*******************************************************************************
245  * Function to identify the presence of FEAT_TRBE (Trace Buffer Extension)
246  ******************************************************************************/
247 static inline bool is_feat_trbe_present(void)
248 {
249 	return (((read_id_aa64dfr0_el1() >> ID_AA64DFR0_TRACEBUFFER_SHIFT) &
250 		ID_AA64DFR0_TRACEBUFFER_MASK) == ID_AA64DFR0_TRACEBUFFER_SUPPORTED);
251 }
252 
253 #endif /* ARCH_FEATURES_H */
254