xref: /rk3399_ARM-atf/include/services/arm_arch_svc.h (revision 10ecd58093a34e95e2dfad65b1180610f29397cc)
1 /*
2  * Copyright (c) 2018-2025, Arm Limited and Contributors. All rights reserved.
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #ifndef ARM_ARCH_SVC_H
8 #define ARM_ARCH_SVC_H
9 
10 #define SMCCC_VERSION			U(0x80000000)
11 #define SMCCC_ARCH_FEATURES		U(0x80000001)
12 #define SMCCC_ARCH_SOC_ID		U(0x80000002)
13 #define SMCCC_ARCH_WORKAROUND_1		U(0x80008000)
14 #define SMCCC_ARCH_WORKAROUND_2		U(0x80007FFF)
15 #define SMCCC_ARCH_WORKAROUND_3		U(0x80003FFF)
16 #define SMCCC_ARCH_FEATURE_AVAILABILITY		U(0x80000003)
17 #define SMCCC_ARCH_WORKAROUND_4		U(0x80000004)
18 
19 #define SMCCC_GET_SOC_VERSION		U(0)
20 #define SMCCC_GET_SOC_REVISION		U(1)
21 
22 #ifndef __ASSEMBLER__
23 #if ARCH_FEATURE_AVAILABILITY
24 #include <lib/cassert.h>
25 
26 #if ENABLE_FEAT_FGT2
27 #define SCR_FEAT_FGT2 SCR_FGTEN2_BIT
28 #else
29 #define SCR_FEAT_FGT2 (0)
30 #endif
31 
32 #if ENABLE_FEAT_FPMR
33 #define SCR_FEAT_FPMR SCR_EnFPM_BIT
34 #else
35 #define SCR_FEAT_FPMR
36 #endif
37 
38 #if ENABLE_FEAT_D128
39 #define SCR_FEAT_D128 SCR_D128En_BIT
40 #else
41 #define SCR_FEAT_D128 (0)
42 #endif
43 
44 #if ENABLE_FEAT_S1PIE
45 #define SCR_FEAT_S1PIE SCR_PIEN_BIT
46 #else
47 #define SCR_FEAT_S1PIE (0)
48 #endif
49 
50 #if ENABLE_FEAT_SCTLR2
51 #define SCR_FEAT_SCTLR2 SCR_SCTLR2En_BIT
52 #else
53 #define SCR_FEAT_SCTLR2 (0)
54 #endif
55 
56 #if ENABLE_FEAT_TCR2
57 #define SCR_FEAT_TCR2 SCR_TCR2EN_BIT
58 #else
59 #define SCR_FEAT_TCR2 (0)
60 #endif
61 
62 #if ENABLE_FEAT_THE
63 #define SCR_FEAT_THE SCR_RCWMASKEn_BIT
64 #else
65 #define SCR_FEAT_THE (0)
66 #endif
67 
68 #if ENABLE_SME_FOR_NS
69 #define SCR_FEAT_SME SCR_ENTP2_BIT
70 #else
71 #define SCR_FEAT_SME (0)
72 #endif
73 
74 #if ENABLE_FEAT_GCS
75 #define SCR_FEAT_GCS SCR_GCSEn_BIT
76 #else
77 #define SCR_FEAT_GCS (0)
78 #endif
79 
80 #if ENABLE_FEAT_HCX
81 #define SCR_FEAT_HCX SCR_HXEn_BIT
82 #else
83 #define SCR_FEAT_HCX (0)
84 #endif
85 
86 #if ENABLE_FEAT_LS64_ACCDATA
87 #define SCR_FEAT_LS64_ACCDATA (SCR_ADEn_BIT | SCR_EnAS0_BIT)
88 #else
89 #define SCR_FEAT_LS64_ACCDATA (0)
90 #endif
91 
92 #if ENABLE_FEAT_AMUv1p1
93 #define SCR_FEAT_AMUv1p1 SCR_AMVOFFEN_BIT
94 #else
95 #define SCR_FEAT_AMUv1p1 (0)
96 #endif
97 
98 #if ENABLE_FEAT_ECV
99 #define SCR_FEAT_ECV SCR_ECVEN_BIT
100 #else
101 #define SCR_FEAT_ECV (0)
102 #endif
103 
104 #if ENABLE_FEAT_FGT
105 #define SCR_FEAT_FGT SCR_FGTEN_BIT
106 #else
107 #define SCR_FEAT_FGT (0)
108 #endif
109 
110 #if ENABLE_FEAT_MTE2
111 #define SCR_FEAT_MTE2 SCR_ATA_BIT
112 #else
113 #define SCR_FEAT_MTE2 (0)
114 #endif
115 
116 #if ENABLE_FEAT_CSV2_2
117 #define SCR_FEAT_CSV2_2 SCR_EnSCXT_BIT
118 #else
119 #define SCR_FEAT_CSV2_2 (0)
120 #endif
121 
122 #if ENABLE_FEAT_RAS
123 #define SCR_FEAT_RAS SCR_TERR_BIT
124 #else
125 #define SCR_FEAT_RAS (0)
126 #endif
127 
128 #if ENABLE_FEAT_MEC
129 #define SCR_FEAT_MEC SCR_MECEn_BIT
130 #else
131 #define SCR_FEAT_MEC (0)
132 #endif
133 
134 #ifndef SCR_PLAT_FEATS
135 #define SCR_PLAT_FEATS (0)
136 #endif
137 #ifndef SCR_PLAT_FLIPPED
138 #define SCR_PLAT_FLIPPED (0)
139 #endif
140 #ifndef SCR_PLAT_IGNORED
141 #define SCR_PLAT_IGNORED (0)
142 #endif
143 
144 #ifndef CPTR_PLAT_FEATS
145 #define CPTR_PLAT_FEATS (0)
146 #endif
147 #ifndef CPTR_PLAT_FLIPPED
148 #define CPTR_PLAT_FLIPPED (0)
149 #endif
150 
151 #ifndef MDCR_PLAT_FEATS
152 #define MDCR_PLAT_FEATS (0)
153 #endif
154 #ifndef MDCR_PLAT_FLIPPED
155 #define MDCR_PLAT_FLIPPED (0)
156 #endif
157 #ifndef MDCR_PLAT_IGNORED
158 #define MDCR_PLAT_IGNORED (0)
159 #endif
160 /*
161  * XYZ_EL3_FEATS - list all bits that are relevant for feature enablement. It's
162  * a constant list based on what features are expected. This relies on the fact
163  * that if the feature is in any way disabled, then the relevant bit will not be
164  * written by context management.
165  *
166  * XYZ_EL3_FLIPPED - bits with an active 0, rather than the usual active 1. The
167  * spec always uses active 1 to mean that the feature will not trap.
168  *
169  * XYZ_EL3_IGNORED - list of all bits that are not relevant for feature
170  * enablement and should not be reported to lower ELs
171  */
172 #define SCR_EL3_FEATS (								\
173 	SCR_FEAT_FGT2		|						\
174 	SCR_FEAT_FPMR		|						\
175 	SCR_FEAT_D128		|						\
176 	SCR_FEAT_S1PIE		|						\
177 	SCR_FEAT_SCTLR2		|						\
178 	SCR_FEAT_TCR2		|						\
179 	SCR_FEAT_THE		|						\
180 	SCR_FEAT_SME		|						\
181 	SCR_FEAT_GCS		|						\
182 	SCR_FEAT_HCX		|						\
183 	SCR_FEAT_LS64_ACCDATA	|						\
184 	SCR_FEAT_AMUv1p1	|						\
185 	SCR_FEAT_ECV		|						\
186 	SCR_FEAT_FGT		|						\
187 	SCR_FEAT_MTE2		|						\
188 	SCR_FEAT_CSV2_2		|						\
189 	SCR_APK_BIT		| /* FEAT_Pauth */				\
190 	SCR_FEAT_RAS		|						\
191 	SCR_PLAT_FEATS)
192 #define SCR_EL3_FLIPPED (							\
193 	SCR_FEAT_RAS		|						\
194 	SCR_PLAT_FLIPPED)
195 #define SCR_EL3_IGNORED (							\
196 	SCR_API_BIT		|						\
197 	SCR_RW_BIT		|						\
198 	SCR_SIF_BIT		|						\
199 	SCR_HCE_BIT		|						\
200 	SCR_FIQ_BIT		|						\
201 	SCR_IRQ_BIT		|						\
202 	SCR_NS_BIT		|						\
203 	SCR_RES1_BITS		|						\
204 	SCR_FEAT_MEC		|						\
205 	SCR_PLAT_IGNORED)
206 CASSERT((SCR_EL3_FEATS & SCR_EL3_IGNORED) == 0, scr_feat_is_ignored);
207 CASSERT((SCR_EL3_FLIPPED & SCR_EL3_FEATS) == SCR_EL3_FLIPPED, scr_flipped_not_a_feat);
208 
209 #if ENABLE_SYS_REG_TRACE_FOR_NS
210 #define CPTR_SYS_REG_TRACE (TCPAC_BIT | TTA_BIT)
211 #else
212 #define CPTR_SYS_REG_TRACE (0)
213 #endif
214 
215 #if ENABLE_FEAT_AMU
216 #define CPTR_FEAT_AMU TAM_BIT
217 #else
218 #define CPTR_FEAT_AMU (0)
219 #endif
220 
221 #if ENABLE_SME_FOR_NS
222 #define CPTR_FEAT_SME ESM_BIT
223 #else
224 #define CPTR_FEAT_SME (0)
225 #endif
226 
227 #if ENABLE_SVE_FOR_NS
228 #define CPTR_FEAT_SVE CPTR_EZ_BIT
229 #else
230 #define CPTR_FEAT_SVE (0)
231 #endif
232 
233 #define CPTR_EL3_FEATS (							\
234 	CPTR_SYS_REG_TRACE	|						\
235 	CPTR_FEAT_AMU		|						\
236 	CPTR_FEAT_SME		|						\
237 	TFP_BIT			|						\
238 	CPTR_FEAT_SVE		|						\
239 	CPTR_PLAT_FEATS)
240 #define CPTR_EL3_FLIPPED (							\
241 	CPTR_SYS_REG_TRACE	|						\
242 	CPTR_FEAT_AMU		|						\
243 	TFP_BIT			|						\
244 	CPTR_PLAT_FLIPPED)
245 CASSERT((CPTR_EL3_FLIPPED & CPTR_EL3_FEATS) == CPTR_EL3_FLIPPED, cptr_flipped_not_a_feat);
246 
247 /*
248  * Some features enables are expressed with more than 1 bit in order to cater
249  * for multi world enablement. In those cases (BRB, TRB, SPE) only the last bit
250  * is used and reported. This (ab)uses the convenient fact that the last bit
251  * always means "enabled for this world" when context switched correctly.
252  * The per-world values have been adjusted such that this is always true.
253  */
254 #if ENABLE_BRBE_FOR_NS
255 #define MDCR_FEAT_BRBE MDCR_SBRBE(1UL)
256 #else
257 #define MDCR_FEAT_BRBE (0)
258 #endif
259 
260 #if ENABLE_FEAT_FGT
261 #define MDCR_FEAT_FGT MDCR_TDCC_BIT
262 #else
263 #define MDCR_FEAT_FGT (0)
264 #endif
265 
266 #if ENABLE_TRBE_FOR_NS
267 #define MDCR_FEAT_TRBE MDCR_NSTB(1UL)
268 #else
269 #define MDCR_FEAT_TRBE (0)
270 #endif
271 
272 #if ENABLE_TRF_FOR_NS
273 #define MDCR_FEAT_TRF MDCR_TTRF_BIT
274 #else
275 #define MDCR_FEAT_TRF (0)
276 #endif
277 
278 #if ENABLE_SPE_FOR_NS
279 #define MDCR_FEAT_SPE MDCR_NSPB(1UL)
280 #else
281 #define MDCR_FEAT_SPE (0)
282 #endif
283 
284 #define MDCR_EL3_FEATS (							\
285 	MDCR_FEAT_BRBE		|						\
286 	MDCR_FEAT_FGT		|						\
287 	MDCR_FEAT_TRBE		|						\
288 	MDCR_FEAT_TRF		|						\
289 	MDCR_FEAT_SPE		|						\
290 	MDCR_TDOSA_BIT		|						\
291 	MDCR_TDA_BIT		|						\
292 	MDCR_TPM_BIT		| /* FEAT_PMUv3 */				\
293 	MDCR_PLAT_FEATS)
294 #define MDCR_EL3_FLIPPED (							\
295 	MDCR_FEAT_FGT		|						\
296 	MDCR_FEAT_TRF		|						\
297 	MDCR_TDOSA_BIT		|						\
298 	MDCR_TDA_BIT		|						\
299 	MDCR_TPM_BIT		|						\
300 	MDCR_PLAT_FLIPPED)
301 #define MDCR_EL3_IGNORED (							\
302 	MDCR_EBWE_BIT		|						\
303 	MDCR_EnPMSN_BIT		|						\
304 	MDCR_SBRBE(2UL)		|						\
305 	MDCR_MTPME_BIT		|						\
306 	MDCR_NSTBE_BIT		|						\
307 	MDCR_NSTB(2UL)		|						\
308 	MDCR_MCCD_BIT		|						\
309 	MDCR_SCCD_BIT		|						\
310 	MDCR_SDD_BIT		|						\
311 	MDCR_SPD32(3UL)		|						\
312 	MDCR_NSPB(2UL)		|						\
313 	MDCR_NSPBE_BIT		|						\
314 	MDCR_PLAT_IGNORED)
315 CASSERT((MDCR_EL3_FEATS & MDCR_EL3_IGNORED) == 0, mdcr_feat_is_ignored);
316 CASSERT((MDCR_EL3_FLIPPED & MDCR_EL3_FEATS) == MDCR_EL3_FLIPPED, mdcr_flipped_not_a_feat);
317 
318 #define MPAM3_EL3_FEATS		(MPAM3_EL3_TRAPLOWER_BIT)
319 #define MPAM3_EL3_FLIPPED	(MPAM3_EL3_TRAPLOWER_BIT)
320 #define MPAM3_EL3_IGNORED	(MPAM3_EL3_MPAMEN_BIT)
321 CASSERT((MPAM3_EL3_FEATS & MPAM3_EL3_IGNORED) == 0, mpam3_feat_is_ignored);
322 CASSERT((MPAM3_EL3_FLIPPED & MPAM3_EL3_FEATS) == MPAM3_EL3_FLIPPED, mpam3_flipped_not_a_feat);
323 
324 /* The hex representations of these registers' S3 encoding */
325 #define SCR_EL3_OPCODE  			U(0x1E1100)
326 #define CPTR_EL3_OPCODE 			U(0x1E1140)
327 #define MDCR_EL3_OPCODE 			U(0x1E1320)
328 #define MPAM3_EL3_OPCODE 			U(0x1EA500)
329 
330 #endif /* ARCH_FEATURE_AVAILABILITY */
331 #endif /* __ASSEMBLER__ */
332 #endif /* ARM_ARCH_SVC_H */
333