xref: /rk3399_ARM-atf/include/services/arm_arch_svc.h (revision ab8e9f8410a37f907ad708c74d3d3bed54fbbafa)
1 /*
2  * Copyright (c) 2018-2026, Arm Limited and Contributors. All rights reserved.
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #ifndef ARM_ARCH_SVC_H
8 #define ARM_ARCH_SVC_H
9 
10 #define SMCCC_VERSION			U(0x80000000)
11 #define SMCCC_ARCH_FEATURES		U(0x80000001)
12 #define SMCCC_ARCH_SOC_ID		U(0x80000002)
13 #define SMCCC_ARCH_WORKAROUND_1		U(0x80008000)
14 #define SMCCC_ARCH_WORKAROUND_2		U(0x80007FFF)
15 #define SMCCC_ARCH_WORKAROUND_3		U(0x80003FFF)
16 #define SMCCC_ARCH_FEATURE_AVAILABILITY		U(0x80000003)
17 #define SMCCC_ARCH_WORKAROUND_4		U(0x80000004)
18 
19 #define SMCCC_GET_SOC_VERSION		U(0)
20 #define SMCCC_GET_SOC_REVISION		U(1)
21 #define SMCCC_GET_SOC_NAME		U(2)
22 
23 #define SMCCC_SOC_NAME_LEN		U(136)
24 
25 #ifndef __ASSEMBLER__
26 #if ARCH_FEATURE_AVAILABILITY
27 #include <lib/cassert.h>
28 
29 #if ENABLE_FEAT_FGT2
30 #define SCR_FEAT_FGT2 SCR_FGTEN2_BIT
31 #else
32 #define SCR_FEAT_FGT2 (0)
33 #endif
34 
35 #if ENABLE_FEAT_FPMR
36 #define SCR_FEAT_FPMR SCR_EnFPM_BIT
37 #else
38 #define SCR_FEAT_FPMR (0)
39 #endif
40 
41 #if ENABLE_FEAT_D128
42 #define SCR_FEAT_D128 SCR_D128En_BIT
43 #else
44 #define SCR_FEAT_D128 (0)
45 #endif
46 
47 #if ENABLE_FEAT_S1PIE
48 #define SCR_FEAT_S1PIE SCR_PIEN_BIT
49 #else
50 #define SCR_FEAT_S1PIE (0)
51 #endif
52 
53 #if ENABLE_FEAT_SCTLR2
54 #define SCR_FEAT_SCTLR2 SCR_SCTLR2En_BIT
55 #else
56 #define SCR_FEAT_SCTLR2 (0)
57 #endif
58 
59 #if ENABLE_FEAT_TCR2
60 #define SCR_FEAT_TCR2 SCR_TCR2EN_BIT
61 #else
62 #define SCR_FEAT_TCR2 (0)
63 #endif
64 
65 #if ENABLE_FEAT_THE
66 #define SCR_FEAT_THE SCR_RCWMASKEn_BIT
67 #else
68 #define SCR_FEAT_THE (0)
69 #endif
70 
71 #if ENABLE_SME_FOR_NS
72 #define SCR_FEAT_SME SCR_ENTP2_BIT
73 #else
74 #define SCR_FEAT_SME (0)
75 #endif
76 
77 #if ENABLE_FEAT_GCS
78 #define SCR_FEAT_GCS SCR_GCSEn_BIT
79 #else
80 #define SCR_FEAT_GCS (0)
81 #endif
82 
83 #if ENABLE_FEAT_HCX
84 #define SCR_FEAT_HCX SCR_HXEn_BIT
85 #else
86 #define SCR_FEAT_HCX (0)
87 #endif
88 
89 #if ENABLE_FEAT_LS64_ACCDATA
90 #define SCR_FEAT_LS64_ACCDATA (SCR_ADEn_BIT | SCR_EnAS0_BIT)
91 #else
92 #define SCR_FEAT_LS64_ACCDATA (0)
93 #endif
94 
95 #if ENABLE_FEAT_AMUv1p1
96 #define SCR_FEAT_AMUv1p1 SCR_AMVOFFEN_BIT
97 #else
98 #define SCR_FEAT_AMUv1p1 (0)
99 #endif
100 
101 #if ENABLE_FEAT_TWED
102 #define SCR_FEAT_TWED SCR_TWEDEn_BIT
103 #else
104 #define SCR_FEAT_TWED (0)
105 #endif
106 
107 #if ENABLE_FEAT_ECV
108 #define SCR_FEAT_ECV SCR_ECVEN_BIT
109 #else
110 #define SCR_FEAT_ECV (0)
111 #endif
112 
113 #if ENABLE_FEAT_FGT
114 #define SCR_FEAT_FGT SCR_FGTEN_BIT
115 #else
116 #define SCR_FEAT_FGT (0)
117 #endif
118 
119 #if ENABLE_FEAT_MTE2
120 #define SCR_FEAT_MTE2 SCR_ATA_BIT
121 #else
122 #define SCR_FEAT_MTE2 (0)
123 #endif
124 
125 #if ENABLE_FEAT_CSV2_2
126 #define SCR_FEAT_CSV2_2 SCR_EnSCXT_BIT
127 #else
128 #define SCR_FEAT_CSV2_2 (0)
129 #endif
130 
131 #if !RAS_TRAP_NS_ERR_REC_ACCESS
132 #define SCR_FEAT_RAS SCR_TERR_BIT
133 #else
134 #define SCR_FEAT_RAS (0)
135 #endif
136 
137 #if ENABLE_FEAT_MEC
138 #define SCR_FEAT_MEC SCR_MECEn_BIT
139 #else
140 #define SCR_FEAT_MEC (0)
141 #endif
142 
143 #if ENABLE_FEAT_AIE
144 #define SCR_FEAT_AIE SCR_AIEn_BIT
145 #else
146 #define SCR_FEAT_AIE (0)
147 #endif
148 
149 #if ENABLE_FEAT_PFAR
150 #define SCR_FEAT_PFAR SCR_PFAREn_BIT
151 #else
152 #define SCR_FEAT_PFAR (0)
153 #endif
154 
155 #if ENABLE_FEAT_IDTE3
156 #define SCR_FEAT_IDTE3 (SCR_TID3_BIT | SCR_TID5_BIT)
157 #else
158 #define SCR_FEAT_IDTE3 (0)
159 #endif
160 
161 #if ENABLE_FEAT_HDBSS
162 #define SCR_FEAT_HDBSS SCR_HDBSSEn_BIT
163 #else
164 #define SCR_FEAT_HDBSS (0)
165 #endif
166 
167 #if ENABLE_FEAT_HACDBS
168 #define SCR_FEAT_HACDBS SCR_HACDBSEn_BIT
169 #else
170 #define SCR_FEAT_HACDBS (0)
171 #endif
172 
173 #ifndef SCR_PLAT_FEATS
174 #define SCR_PLAT_FEATS (0)
175 #endif
176 #ifndef SCR_PLAT_FLIPPED
177 #define SCR_PLAT_FLIPPED (0)
178 #endif
179 #ifndef SCR_PLAT_IGNORED
180 #define SCR_PLAT_IGNORED (0)
181 #endif
182 
183 #ifndef CPTR_PLAT_FEATS
184 #define CPTR_PLAT_FEATS (0)
185 #endif
186 #ifndef CPTR_PLAT_FLIPPED
187 #define CPTR_PLAT_FLIPPED (0)
188 #endif
189 
190 #ifndef MDCR_PLAT_FEATS
191 #define MDCR_PLAT_FEATS (0)
192 #endif
193 #ifndef MDCR_PLAT_FLIPPED
194 #define MDCR_PLAT_FLIPPED (0)
195 #endif
196 #ifndef MDCR_PLAT_IGNORED
197 #define MDCR_PLAT_IGNORED (0)
198 #endif
199 /*
200  * XYZ_EL3_FEATS - list all bits that are relevant for feature enablement. It's
201  * a constant list based on what features are expected. This relies on the fact
202  * that if the feature is in any way disabled, then the relevant bit will not be
203  * written by context management.
204  *
205  * XYZ_EL3_FLIPPED - bits with an active 0, rather than the usual active 1. The
206  * spec always uses active 1 to mean that the feature will not trap.
207  *
208  * XYZ_EL3_IGNORED - list of all bits that are not relevant for feature
209  * enablement and should not be reported to lower ELs
210  */
211 #define SCR_EL3_FEATS (								\
212 	SCR_FEAT_FGT2		|						\
213 	SCR_FEAT_FPMR		|						\
214 	SCR_FEAT_MEC		|						\
215 	SCR_FEAT_D128		|						\
216 	SCR_FEAT_S1PIE		|						\
217 	SCR_FEAT_SCTLR2		|						\
218 	SCR_FEAT_TCR2		|						\
219 	SCR_FEAT_THE		|						\
220 	SCR_FEAT_SME		|						\
221 	SCR_FEAT_GCS		|						\
222 	SCR_FEAT_HCX		|						\
223 	SCR_FEAT_LS64_ACCDATA	|						\
224 	SCR_FEAT_AMUv1p1	|						\
225 	SCR_FEAT_TWED		|						\
226 	SCR_FEAT_ECV		|						\
227 	SCR_FEAT_FGT		|						\
228 	SCR_FEAT_MTE2		|						\
229 	SCR_FEAT_CSV2_2		|						\
230 	SCR_APK_BIT		| /* FEAT_Pauth */				\
231 	SCR_FEAT_RAS		|						\
232 	SCR_FEAT_AIE		|						\
233 	SCR_FEAT_PFAR		|						\
234 	SCR_FEAT_IDTE3		|						\
235 	SCR_FEAT_HDBSS		|						\
236 	SCR_FEAT_HACDBS		|						\
237 	SCR_PLAT_FEATS)
238 #define SCR_EL3_FLIPPED (							\
239 	SCR_FEAT_RAS		|						\
240 	SCR_PLAT_FLIPPED)
241 #define SCR_EL3_IGNORED (							\
242 	SCR_API_BIT		|						\
243 	SCR_RW_BIT		|						\
244 	SCR_SIF_BIT		|						\
245 	SCR_HCE_BIT		|						\
246 	SCR_FIQ_BIT		|						\
247 	SCR_IRQ_BIT		|						\
248 	SCR_NS_BIT		|						\
249 	SCR_NSE_BIT		|						\
250 	SCR_RES1_BITS		|						\
251 	SCR_EEL2_BIT		|						\
252 	SCR_PLAT_IGNORED)
253 CASSERT((SCR_EL3_FEATS & SCR_EL3_IGNORED) == 0, scr_feat_is_ignored);
254 CASSERT((SCR_EL3_FLIPPED & SCR_EL3_FEATS) == SCR_EL3_FLIPPED, scr_flipped_not_a_feat);
255 
256 #if ENABLE_SYS_REG_TRACE_FOR_NS
257 #define CPTR_SYS_REG_TRACE (TCPAC_BIT | TTA_BIT)
258 #else
259 #define CPTR_SYS_REG_TRACE (0)
260 #endif
261 
262 #if ENABLE_FEAT_AMU
263 #define CPTR_FEAT_AMU TAM_BIT
264 #else
265 #define CPTR_FEAT_AMU (0)
266 #endif
267 
268 #if ENABLE_SME_FOR_NS
269 #define CPTR_FEAT_SME ESM_BIT
270 #else
271 #define CPTR_FEAT_SME (0)
272 #endif
273 
274 #if ENABLE_SVE_FOR_NS
275 #define CPTR_FEAT_SVE CPTR_EZ_BIT
276 #else
277 #define CPTR_FEAT_SVE (0)
278 #endif
279 
280 #define CPTR_EL3_FEATS (							\
281 	CPTR_SYS_REG_TRACE	|						\
282 	CPTR_FEAT_AMU		|						\
283 	CPTR_FEAT_SME		|						\
284 	TFP_BIT			|						\
285 	CPTR_FEAT_SVE		|						\
286 	CPTR_PLAT_FEATS)
287 #define CPTR_EL3_FLIPPED (							\
288 	CPTR_SYS_REG_TRACE	|						\
289 	CPTR_FEAT_AMU		|						\
290 	TFP_BIT			|						\
291 	CPTR_PLAT_FLIPPED)
292 CASSERT((CPTR_EL3_FLIPPED & CPTR_EL3_FEATS) == CPTR_EL3_FLIPPED, cptr_flipped_not_a_feat);
293 
294 /*
295  * Some features enables are expressed with more than 1 bit in order to cater
296  * for multi world enablement. In those cases (BRB, TRB, SPE) only the last bit
297  * is used and reported. This (ab)uses the convenient fact that the last bit
298  * always means "enabled for this world" when context switched correctly.
299  * The per-world values have been adjusted such that this is always true.
300  */
301 #if ENABLE_BRBE_FOR_NS
302 #define MDCR_FEAT_BRBE MDCR_SBRBE(1UL)
303 #else
304 #define MDCR_FEAT_BRBE (0)
305 #endif
306 
307 #if ENABLE_FEAT_FGT
308 #define MDCR_FEAT_FGT MDCR_TDCC_BIT
309 #else
310 #define MDCR_FEAT_FGT (0)
311 #endif
312 
313 #if ENABLE_TRBE_FOR_NS
314 #define MDCR_FEAT_TRBE MDCR_NSTB_EN_BIT
315 #else
316 #define MDCR_FEAT_TRBE (0)
317 #endif
318 
319 #if ENABLE_TRF_FOR_NS
320 #define MDCR_FEAT_TRF MDCR_TTRF_BIT
321 #else
322 #define MDCR_FEAT_TRF (0)
323 #endif
324 
325 #if ENABLE_SPE_FOR_NS
326 #define MDCR_FEAT_SPE MDCR_NSPB_EN_BIT
327 #else
328 #define MDCR_FEAT_SPE (0)
329 #endif
330 
331 #if ENABLE_FEAT_DEBUGV8P9
332 #define MDCR_DEBUGV8P9 MDCR_EBWE_BIT
333 #else
334 #define MDCR_DEBUGV8P9 (0)
335 #endif
336 
337 #if ENABLE_FEAT_EBEP
338 #define MDCR_FEAT_EBEP MDCR_PMEE(MDCR_PMEE_CTRL_EL2)
339 #else
340 #define MDCR_FEAT_EBEP (0)
341 #endif
342 
343 #if ENABLE_FEAT_STEP2
344 #define MDCR_FEAT_STEP2 MDCR_EnSTEPOP_BIT
345 #else
346 #define MDCR_FEAT_STEP2 (0)
347 #endif
348 
349 #define MDCR_EL3_FEATS (							\
350 	MDCR_FEAT_STEP2		|						\
351 	MDCR_DEBUGV8P9		|						\
352 	MDCR_FEAT_BRBE		|						\
353 	MDCR_FEAT_FGT		|						\
354 	MDCR_FEAT_TRBE		|						\
355 	MDCR_FEAT_TRF		|						\
356 	MDCR_FEAT_SPE		|						\
357 	MDCR_FEAT_EBEP		|						\
358 	MDCR_TDOSA_BIT		|						\
359 	MDCR_TDA_BIT		|						\
360 	MDCR_EnPM2_BIT		|						\
361 	MDCR_TPM_BIT		| /* FEAT_PMUv3 */				\
362 	MDCR_PLAT_FEATS)
363 #define MDCR_EL3_FLIPPED (							\
364 	MDCR_FEAT_FGT		|						\
365 	MDCR_FEAT_TRF		|						\
366 	MDCR_TDOSA_BIT		|						\
367 	MDCR_TDA_BIT		|						\
368 	MDCR_TPM_BIT		|						\
369 	MDCR_PLAT_FLIPPED)
370 #define MDCR_EL3_IGNORED (							\
371 	MDCR_EnPMS3_BIT		|						\
372 	MDCR_EnPMSN_BIT		|						\
373 	MDCR_SBRBE(2UL)		|						\
374 	MDCR_MTPME_BIT		|						\
375 	MDCR_NSTBE_BIT		|						\
376 	MDCR_NSTB_SS_BIT	|						\
377 	MDCR_MCCD_BIT		|						\
378 	MDCR_SCCD_BIT		|						\
379 	MDCR_SDD_BIT		|						\
380 	MDCR_SPD32(3UL)		|						\
381 	MDCR_NSPB_SS_BIT	|						\
382 	MDCR_NSPBE_BIT		|						\
383 	MDCR_PLAT_IGNORED)
384 CASSERT((MDCR_EL3_FEATS & MDCR_EL3_IGNORED) == 0, mdcr_feat_is_ignored);
385 CASSERT((MDCR_EL3_FLIPPED & MDCR_EL3_FEATS) == MDCR_EL3_FLIPPED, mdcr_flipped_not_a_feat);
386 
387 #define MPAM3_EL3_FEATS		(MPAM3_EL3_TRAPLOWER_BIT)
388 #define MPAM3_EL3_FLIPPED	(MPAM3_EL3_TRAPLOWER_BIT)
389 #define MPAM3_EL3_IGNORED	(MPAM3_EL3_MPAMEN_BIT)
390 CASSERT((MPAM3_EL3_FEATS & MPAM3_EL3_IGNORED) == 0, mpam3_feat_is_ignored);
391 CASSERT((MPAM3_EL3_FLIPPED & MPAM3_EL3_FEATS) == MPAM3_EL3_FLIPPED, mpam3_flipped_not_a_feat);
392 
393 /* The hex representations of these registers' S3 encoding */
394 #define SCR_EL3_OPCODE  			U(0x1E1100)
395 #define CPTR_EL3_OPCODE 			U(0x1E1140)
396 #define MDCR_EL3_OPCODE 			U(0x1E1320)
397 #define MPAM3_EL3_OPCODE 			U(0x1EA500)
398 
399 #endif /* ARCH_FEATURE_AVAILABILITY */
400 #endif /* __ASSEMBLER__ */
401 #endif /* ARM_ARCH_SVC_H */
402