1 /* 2 * Copyright (c) 2018-2025, Arm Limited and Contributors. All rights reserved. 3 * 4 * SPDX-License-Identifier: BSD-3-Clause 5 */ 6 7 #ifndef ARM_ARCH_SVC_H 8 #define ARM_ARCH_SVC_H 9 10 #define SMCCC_VERSION U(0x80000000) 11 #define SMCCC_ARCH_FEATURES U(0x80000001) 12 #define SMCCC_ARCH_SOC_ID U(0x80000002) 13 #define SMCCC_ARCH_WORKAROUND_1 U(0x80008000) 14 #define SMCCC_ARCH_WORKAROUND_2 U(0x80007FFF) 15 #define SMCCC_ARCH_WORKAROUND_3 U(0x80003FFF) 16 #define SMCCC_ARCH_FEATURE_AVAILABILITY U(0x80000003) 17 #define SMCCC_ARCH_WORKAROUND_4 U(0x80000004) 18 19 #define SMCCC_GET_SOC_VERSION U(0) 20 #define SMCCC_GET_SOC_REVISION U(1) 21 #define SMCCC_GET_SOC_NAME U(2) 22 23 #define SMCCC_SOC_NAME_LEN U(136) 24 25 #ifndef __ASSEMBLER__ 26 #if ARCH_FEATURE_AVAILABILITY 27 #include <lib/cassert.h> 28 29 #if ENABLE_FEAT_FGT2 30 #define SCR_FEAT_FGT2 SCR_FGTEN2_BIT 31 #else 32 #define SCR_FEAT_FGT2 (0) 33 #endif 34 35 #if ENABLE_FEAT_FPMR 36 #define SCR_FEAT_FPMR SCR_EnFPM_BIT 37 #else 38 #define SCR_FEAT_FPMR (0) 39 #endif 40 41 #if ENABLE_FEAT_D128 42 #define SCR_FEAT_D128 SCR_D128En_BIT 43 #else 44 #define SCR_FEAT_D128 (0) 45 #endif 46 47 #if ENABLE_FEAT_S1PIE 48 #define SCR_FEAT_S1PIE SCR_PIEN_BIT 49 #else 50 #define SCR_FEAT_S1PIE (0) 51 #endif 52 53 #if ENABLE_FEAT_SCTLR2 54 #define SCR_FEAT_SCTLR2 SCR_SCTLR2En_BIT 55 #else 56 #define SCR_FEAT_SCTLR2 (0) 57 #endif 58 59 #if ENABLE_FEAT_TCR2 60 #define SCR_FEAT_TCR2 SCR_TCR2EN_BIT 61 #else 62 #define SCR_FEAT_TCR2 (0) 63 #endif 64 65 #if ENABLE_FEAT_THE 66 #define SCR_FEAT_THE SCR_RCWMASKEn_BIT 67 #else 68 #define SCR_FEAT_THE (0) 69 #endif 70 71 #if ENABLE_SME_FOR_NS 72 #define SCR_FEAT_SME SCR_ENTP2_BIT 73 #else 74 #define SCR_FEAT_SME (0) 75 #endif 76 77 #if ENABLE_FEAT_GCS 78 #define SCR_FEAT_GCS SCR_GCSEn_BIT 79 #else 80 #define SCR_FEAT_GCS (0) 81 #endif 82 83 #if ENABLE_FEAT_HCX 84 #define SCR_FEAT_HCX SCR_HXEn_BIT 85 #else 86 #define SCR_FEAT_HCX (0) 87 #endif 88 89 #if ENABLE_FEAT_LS64_ACCDATA 90 #define SCR_FEAT_LS64_ACCDATA (SCR_ADEn_BIT | SCR_EnAS0_BIT) 91 #else 92 #define SCR_FEAT_LS64_ACCDATA (0) 93 #endif 94 95 #if ENABLE_FEAT_AMUv1p1 96 #define SCR_FEAT_AMUv1p1 SCR_AMVOFFEN_BIT 97 #else 98 #define SCR_FEAT_AMUv1p1 (0) 99 #endif 100 101 #if ENABLE_FEAT_TWED 102 #define SCR_FEAT_TWED SCR_TWEDEn_BIT 103 #else 104 #define SCR_FEAT_TWED (0) 105 #endif 106 107 #if ENABLE_FEAT_ECV 108 #define SCR_FEAT_ECV SCR_ECVEN_BIT 109 #else 110 #define SCR_FEAT_ECV (0) 111 #endif 112 113 #if ENABLE_FEAT_FGT 114 #define SCR_FEAT_FGT SCR_FGTEN_BIT 115 #else 116 #define SCR_FEAT_FGT (0) 117 #endif 118 119 #if ENABLE_FEAT_MTE2 120 #define SCR_FEAT_MTE2 SCR_ATA_BIT 121 #else 122 #define SCR_FEAT_MTE2 (0) 123 #endif 124 125 #if ENABLE_FEAT_CSV2_2 126 #define SCR_FEAT_CSV2_2 SCR_EnSCXT_BIT 127 #else 128 #define SCR_FEAT_CSV2_2 (0) 129 #endif 130 131 #if !RAS_TRAP_NS_ERR_REC_ACCESS 132 #define SCR_FEAT_RAS SCR_TERR_BIT 133 #else 134 #define SCR_FEAT_RAS (0) 135 #endif 136 137 #if ENABLE_FEAT_MEC 138 #define SCR_FEAT_MEC SCR_MECEn_BIT 139 #else 140 #define SCR_FEAT_MEC (0) 141 #endif 142 143 #if ENABLE_FEAT_AIE 144 #define SCR_FEAT_AIE SCR_AIEn_BIT 145 #else 146 #define SCR_FEAT_AIE (0) 147 #endif 148 149 #if ENABLE_FEAT_PFAR 150 #define SCR_FEAT_PFAR SCR_PFAREn_BIT 151 #else 152 #define SCR_FEAT_PFAR (0) 153 #endif 154 155 #if ENABLE_FEAT_IDTE3 156 #define SCR_FEAT_IDTE3 (SCR_TID3_BIT | SCR_TID5_BIT) 157 #else 158 #define SCR_FEAT_IDTE3 (0) 159 #endif 160 161 #ifndef SCR_PLAT_FEATS 162 #define SCR_PLAT_FEATS (0) 163 #endif 164 #ifndef SCR_PLAT_FLIPPED 165 #define SCR_PLAT_FLIPPED (0) 166 #endif 167 #ifndef SCR_PLAT_IGNORED 168 #define SCR_PLAT_IGNORED (0) 169 #endif 170 171 #ifndef CPTR_PLAT_FEATS 172 #define CPTR_PLAT_FEATS (0) 173 #endif 174 #ifndef CPTR_PLAT_FLIPPED 175 #define CPTR_PLAT_FLIPPED (0) 176 #endif 177 178 #ifndef MDCR_PLAT_FEATS 179 #define MDCR_PLAT_FEATS (0) 180 #endif 181 #ifndef MDCR_PLAT_FLIPPED 182 #define MDCR_PLAT_FLIPPED (0) 183 #endif 184 #ifndef MDCR_PLAT_IGNORED 185 #define MDCR_PLAT_IGNORED (0) 186 #endif 187 /* 188 * XYZ_EL3_FEATS - list all bits that are relevant for feature enablement. It's 189 * a constant list based on what features are expected. This relies on the fact 190 * that if the feature is in any way disabled, then the relevant bit will not be 191 * written by context management. 192 * 193 * XYZ_EL3_FLIPPED - bits with an active 0, rather than the usual active 1. The 194 * spec always uses active 1 to mean that the feature will not trap. 195 * 196 * XYZ_EL3_IGNORED - list of all bits that are not relevant for feature 197 * enablement and should not be reported to lower ELs 198 */ 199 #define SCR_EL3_FEATS ( \ 200 SCR_FEAT_FGT2 | \ 201 SCR_FEAT_FPMR | \ 202 SCR_FEAT_MEC | \ 203 SCR_FEAT_D128 | \ 204 SCR_FEAT_S1PIE | \ 205 SCR_FEAT_SCTLR2 | \ 206 SCR_FEAT_TCR2 | \ 207 SCR_FEAT_THE | \ 208 SCR_FEAT_SME | \ 209 SCR_FEAT_GCS | \ 210 SCR_FEAT_HCX | \ 211 SCR_FEAT_LS64_ACCDATA | \ 212 SCR_FEAT_AMUv1p1 | \ 213 SCR_FEAT_TWED | \ 214 SCR_FEAT_ECV | \ 215 SCR_FEAT_FGT | \ 216 SCR_FEAT_MTE2 | \ 217 SCR_FEAT_CSV2_2 | \ 218 SCR_APK_BIT | /* FEAT_Pauth */ \ 219 SCR_FEAT_RAS | \ 220 SCR_FEAT_AIE | \ 221 SCR_FEAT_PFAR | \ 222 SCR_FEAT_IDTE3 | \ 223 SCR_PLAT_FEATS) 224 #define SCR_EL3_FLIPPED ( \ 225 SCR_FEAT_RAS | \ 226 SCR_PLAT_FLIPPED) 227 #define SCR_EL3_IGNORED ( \ 228 SCR_API_BIT | \ 229 SCR_RW_BIT | \ 230 SCR_SIF_BIT | \ 231 SCR_HCE_BIT | \ 232 SCR_FIQ_BIT | \ 233 SCR_IRQ_BIT | \ 234 SCR_NS_BIT | \ 235 SCR_NSE_BIT | \ 236 SCR_RES1_BITS | \ 237 SCR_EEL2_BIT | \ 238 SCR_PLAT_IGNORED) 239 CASSERT((SCR_EL3_FEATS & SCR_EL3_IGNORED) == 0, scr_feat_is_ignored); 240 CASSERT((SCR_EL3_FLIPPED & SCR_EL3_FEATS) == SCR_EL3_FLIPPED, scr_flipped_not_a_feat); 241 242 #if ENABLE_SYS_REG_TRACE_FOR_NS 243 #define CPTR_SYS_REG_TRACE (TCPAC_BIT | TTA_BIT) 244 #else 245 #define CPTR_SYS_REG_TRACE (0) 246 #endif 247 248 #if ENABLE_FEAT_AMU 249 #define CPTR_FEAT_AMU TAM_BIT 250 #else 251 #define CPTR_FEAT_AMU (0) 252 #endif 253 254 #if ENABLE_SME_FOR_NS 255 #define CPTR_FEAT_SME ESM_BIT 256 #else 257 #define CPTR_FEAT_SME (0) 258 #endif 259 260 #if ENABLE_SVE_FOR_NS 261 #define CPTR_FEAT_SVE CPTR_EZ_BIT 262 #else 263 #define CPTR_FEAT_SVE (0) 264 #endif 265 266 #define CPTR_EL3_FEATS ( \ 267 CPTR_SYS_REG_TRACE | \ 268 CPTR_FEAT_AMU | \ 269 CPTR_FEAT_SME | \ 270 TFP_BIT | \ 271 CPTR_FEAT_SVE | \ 272 CPTR_PLAT_FEATS) 273 #define CPTR_EL3_FLIPPED ( \ 274 CPTR_SYS_REG_TRACE | \ 275 CPTR_FEAT_AMU | \ 276 TFP_BIT | \ 277 CPTR_PLAT_FLIPPED) 278 CASSERT((CPTR_EL3_FLIPPED & CPTR_EL3_FEATS) == CPTR_EL3_FLIPPED, cptr_flipped_not_a_feat); 279 280 /* 281 * Some features enables are expressed with more than 1 bit in order to cater 282 * for multi world enablement. In those cases (BRB, TRB, SPE) only the last bit 283 * is used and reported. This (ab)uses the convenient fact that the last bit 284 * always means "enabled for this world" when context switched correctly. 285 * The per-world values have been adjusted such that this is always true. 286 */ 287 #if ENABLE_BRBE_FOR_NS 288 #define MDCR_FEAT_BRBE MDCR_SBRBE(1UL) 289 #else 290 #define MDCR_FEAT_BRBE (0) 291 #endif 292 293 #if ENABLE_FEAT_FGT 294 #define MDCR_FEAT_FGT MDCR_TDCC_BIT 295 #else 296 #define MDCR_FEAT_FGT (0) 297 #endif 298 299 #if ENABLE_TRBE_FOR_NS 300 #define MDCR_FEAT_TRBE MDCR_NSTB_EN_BIT 301 #else 302 #define MDCR_FEAT_TRBE (0) 303 #endif 304 305 #if ENABLE_TRF_FOR_NS 306 #define MDCR_FEAT_TRF MDCR_TTRF_BIT 307 #else 308 #define MDCR_FEAT_TRF (0) 309 #endif 310 311 #if ENABLE_SPE_FOR_NS 312 #define MDCR_FEAT_SPE MDCR_NSPB_EN_BIT 313 #else 314 #define MDCR_FEAT_SPE (0) 315 #endif 316 317 #if ENABLE_FEAT_DEBUGV8P9 318 #define MDCR_DEBUGV8P9 MDCR_EBWE_BIT 319 #else 320 #define MDCR_DEBUGV8P9 (0) 321 #endif 322 323 #if ENABLE_FEAT_EBEP 324 #define MDCR_FEAT_EBEP MDCR_PMEE(MDCR_PMEE_CTRL_EL2) 325 #else 326 #define MDCR_FEAT_EBEP (0) 327 #endif 328 329 #define MDCR_EL3_FEATS ( \ 330 MDCR_DEBUGV8P9 | \ 331 MDCR_FEAT_BRBE | \ 332 MDCR_FEAT_FGT | \ 333 MDCR_FEAT_TRBE | \ 334 MDCR_FEAT_TRF | \ 335 MDCR_FEAT_SPE | \ 336 MDCR_FEAT_EBEP | \ 337 MDCR_TDOSA_BIT | \ 338 MDCR_TDA_BIT | \ 339 MDCR_EnPM2_BIT | \ 340 MDCR_TPM_BIT | /* FEAT_PMUv3 */ \ 341 MDCR_PLAT_FEATS) 342 #define MDCR_EL3_FLIPPED ( \ 343 MDCR_FEAT_FGT | \ 344 MDCR_FEAT_TRF | \ 345 MDCR_TDOSA_BIT | \ 346 MDCR_TDA_BIT | \ 347 MDCR_TPM_BIT | \ 348 MDCR_PLAT_FLIPPED) 349 #define MDCR_EL3_IGNORED ( \ 350 MDCR_EnPMS3_BIT | \ 351 MDCR_EnPMSN_BIT | \ 352 MDCR_SBRBE(2UL) | \ 353 MDCR_MTPME_BIT | \ 354 MDCR_NSTBE_BIT | \ 355 MDCR_NSTB_SS_BIT | \ 356 MDCR_MCCD_BIT | \ 357 MDCR_SCCD_BIT | \ 358 MDCR_SDD_BIT | \ 359 MDCR_SPD32(3UL) | \ 360 MDCR_NSPB_SS_BIT | \ 361 MDCR_NSPBE_BIT | \ 362 MDCR_PLAT_IGNORED) 363 CASSERT((MDCR_EL3_FEATS & MDCR_EL3_IGNORED) == 0, mdcr_feat_is_ignored); 364 CASSERT((MDCR_EL3_FLIPPED & MDCR_EL3_FEATS) == MDCR_EL3_FLIPPED, mdcr_flipped_not_a_feat); 365 366 #define MPAM3_EL3_FEATS (MPAM3_EL3_TRAPLOWER_BIT) 367 #define MPAM3_EL3_FLIPPED (MPAM3_EL3_TRAPLOWER_BIT) 368 #define MPAM3_EL3_IGNORED (MPAM3_EL3_MPAMEN_BIT) 369 CASSERT((MPAM3_EL3_FEATS & MPAM3_EL3_IGNORED) == 0, mpam3_feat_is_ignored); 370 CASSERT((MPAM3_EL3_FLIPPED & MPAM3_EL3_FEATS) == MPAM3_EL3_FLIPPED, mpam3_flipped_not_a_feat); 371 372 /* The hex representations of these registers' S3 encoding */ 373 #define SCR_EL3_OPCODE U(0x1E1100) 374 #define CPTR_EL3_OPCODE U(0x1E1140) 375 #define MDCR_EL3_OPCODE U(0x1E1320) 376 #define MPAM3_EL3_OPCODE U(0x1EA500) 377 378 #endif /* ARCH_FEATURE_AVAILABILITY */ 379 #endif /* __ASSEMBLER__ */ 380 #endif /* ARM_ARCH_SVC_H */ 381