1 /* 2 * Copyright (c) 2018-2025, Arm Limited and Contributors. All rights reserved. 3 * 4 * SPDX-License-Identifier: BSD-3-Clause 5 */ 6 7 #ifndef ARM_ARCH_SVC_H 8 #define ARM_ARCH_SVC_H 9 10 #define SMCCC_VERSION U(0x80000000) 11 #define SMCCC_ARCH_FEATURES U(0x80000001) 12 #define SMCCC_ARCH_SOC_ID U(0x80000002) 13 #define SMCCC_ARCH_WORKAROUND_1 U(0x80008000) 14 #define SMCCC_ARCH_WORKAROUND_2 U(0x80007FFF) 15 #define SMCCC_ARCH_WORKAROUND_3 U(0x80003FFF) 16 #define SMCCC_ARCH_FEATURE_AVAILABILITY U(0x80000003) 17 #define SMCCC_ARCH_WORKAROUND_4 U(0x80000004) 18 19 #define SMCCC_GET_SOC_VERSION U(0) 20 #define SMCCC_GET_SOC_REVISION U(1) 21 22 #ifndef __ASSEMBLER__ 23 #if ARCH_FEATURE_AVAILABILITY 24 #include <lib/cassert.h> 25 26 #if ENABLE_FEAT_FGT2 27 #define SCR_FEAT_FGT2 SCR_FGTEN2_BIT 28 #else 29 #define SCR_FEAT_FGT2 (0) 30 #endif 31 32 #if ENABLE_FEAT_FPMR 33 #define SCR_FEAT_FPMR SCR_EnFPM_BIT 34 #else 35 #define SCR_FEAT_FPMR 36 #endif 37 38 #if ENABLE_FEAT_D128 39 #define SCR_FEAT_D128 SCR_D128En_BIT 40 #else 41 #define SCR_FEAT_D128 (0) 42 #endif 43 44 #if ENABLE_FEAT_S1PIE 45 #define SCR_FEAT_S1PIE SCR_PIEN_BIT 46 #else 47 #define SCR_FEAT_S1PIE (0) 48 #endif 49 50 #if ENABLE_FEAT_SCTLR2 51 #define SCR_FEAT_SCTLR2 SCR_SCTLR2En_BIT 52 #else 53 #define SCR_FEAT_SCTLR2 (0) 54 #endif 55 56 #if ENABLE_FEAT_TCR2 57 #define SCR_FEAT_TCR2 SCR_TCR2EN_BIT 58 #else 59 #define SCR_FEAT_TCR2 (0) 60 #endif 61 62 #if ENABLE_FEAT_THE 63 #define SCR_FEAT_THE SCR_RCWMASKEn_BIT 64 #else 65 #define SCR_FEAT_THE (0) 66 #endif 67 68 #if ENABLE_SME_FOR_NS 69 #define SCR_FEAT_SME SCR_ENTP2_BIT 70 #else 71 #define SCR_FEAT_SME (0) 72 #endif 73 74 #if ENABLE_FEAT_GCS 75 #define SCR_FEAT_GCS SCR_GCSEn_BIT 76 #else 77 #define SCR_FEAT_GCS (0) 78 #endif 79 80 #if ENABLE_FEAT_HCX 81 #define SCR_FEAT_HCX SCR_HXEn_BIT 82 #else 83 #define SCR_FEAT_HCX (0) 84 #endif 85 86 #if ENABLE_FEAT_LS64_ACCDATA 87 #define SCR_FEAT_LS64_ACCDATA (SCR_ADEn_BIT | SCR_EnAS0_BIT) 88 #else 89 #define SCR_FEAT_LS64_ACCDATA (0) 90 #endif 91 92 #if ENABLE_FEAT_AMUv1p1 93 #define SCR_FEAT_AMUv1p1 SCR_AMVOFFEN_BIT 94 #else 95 #define SCR_FEAT_AMUv1p1 (0) 96 #endif 97 98 #if ENABLE_FEAT_TWED 99 #define SCR_FEAT_TWED SCR_TWEDEn_BIT 100 #else 101 #define SCR_FEAT_TWED (0) 102 #endif 103 104 #if ENABLE_FEAT_ECV 105 #define SCR_FEAT_ECV SCR_ECVEN_BIT 106 #else 107 #define SCR_FEAT_ECV (0) 108 #endif 109 110 #if ENABLE_FEAT_FGT 111 #define SCR_FEAT_FGT SCR_FGTEN_BIT 112 #else 113 #define SCR_FEAT_FGT (0) 114 #endif 115 116 #if ENABLE_FEAT_MTE2 117 #define SCR_FEAT_MTE2 SCR_ATA_BIT 118 #else 119 #define SCR_FEAT_MTE2 (0) 120 #endif 121 122 #if ENABLE_FEAT_CSV2_2 123 #define SCR_FEAT_CSV2_2 SCR_EnSCXT_BIT 124 #else 125 #define SCR_FEAT_CSV2_2 (0) 126 #endif 127 128 #if !RAS_TRAP_NS_ERR_REC_ACCESS 129 #define SCR_FEAT_RAS SCR_TERR_BIT 130 #else 131 #define SCR_FEAT_RAS (0) 132 #endif 133 134 #if ENABLE_FEAT_MEC 135 #define SCR_FEAT_MEC SCR_MECEn_BIT 136 #else 137 #define SCR_FEAT_MEC (0) 138 #endif 139 140 #ifndef SCR_PLAT_FEATS 141 #define SCR_PLAT_FEATS (0) 142 #endif 143 #ifndef SCR_PLAT_FLIPPED 144 #define SCR_PLAT_FLIPPED (0) 145 #endif 146 #ifndef SCR_PLAT_IGNORED 147 #define SCR_PLAT_IGNORED (0) 148 #endif 149 150 #ifndef CPTR_PLAT_FEATS 151 #define CPTR_PLAT_FEATS (0) 152 #endif 153 #ifndef CPTR_PLAT_FLIPPED 154 #define CPTR_PLAT_FLIPPED (0) 155 #endif 156 157 #ifndef MDCR_PLAT_FEATS 158 #define MDCR_PLAT_FEATS (0) 159 #endif 160 #ifndef MDCR_PLAT_FLIPPED 161 #define MDCR_PLAT_FLIPPED (0) 162 #endif 163 #ifndef MDCR_PLAT_IGNORED 164 #define MDCR_PLAT_IGNORED (0) 165 #endif 166 /* 167 * XYZ_EL3_FEATS - list all bits that are relevant for feature enablement. It's 168 * a constant list based on what features are expected. This relies on the fact 169 * that if the feature is in any way disabled, then the relevant bit will not be 170 * written by context management. 171 * 172 * XYZ_EL3_FLIPPED - bits with an active 0, rather than the usual active 1. The 173 * spec always uses active 1 to mean that the feature will not trap. 174 * 175 * XYZ_EL3_IGNORED - list of all bits that are not relevant for feature 176 * enablement and should not be reported to lower ELs 177 */ 178 #define SCR_EL3_FEATS ( \ 179 SCR_FEAT_FGT2 | \ 180 SCR_FEAT_FPMR | \ 181 SCR_FEAT_D128 | \ 182 SCR_FEAT_S1PIE | \ 183 SCR_FEAT_SCTLR2 | \ 184 SCR_FEAT_TCR2 | \ 185 SCR_FEAT_THE | \ 186 SCR_FEAT_SME | \ 187 SCR_FEAT_GCS | \ 188 SCR_FEAT_HCX | \ 189 SCR_FEAT_LS64_ACCDATA | \ 190 SCR_FEAT_AMUv1p1 | \ 191 SCR_FEAT_TWED | \ 192 SCR_FEAT_ECV | \ 193 SCR_FEAT_FGT | \ 194 SCR_FEAT_MTE2 | \ 195 SCR_FEAT_CSV2_2 | \ 196 SCR_APK_BIT | /* FEAT_Pauth */ \ 197 SCR_FEAT_RAS | \ 198 SCR_PLAT_FEATS) 199 #define SCR_EL3_FLIPPED ( \ 200 SCR_FEAT_RAS | \ 201 SCR_PLAT_FLIPPED) 202 #define SCR_EL3_IGNORED ( \ 203 SCR_API_BIT | \ 204 SCR_RW_BIT | \ 205 SCR_SIF_BIT | \ 206 SCR_HCE_BIT | \ 207 SCR_FIQ_BIT | \ 208 SCR_IRQ_BIT | \ 209 SCR_NS_BIT | \ 210 SCR_NSE_BIT | \ 211 SCR_RES1_BITS | \ 212 SCR_FEAT_MEC | \ 213 SCR_PLAT_IGNORED) 214 CASSERT((SCR_EL3_FEATS & SCR_EL3_IGNORED) == 0, scr_feat_is_ignored); 215 CASSERT((SCR_EL3_FLIPPED & SCR_EL3_FEATS) == SCR_EL3_FLIPPED, scr_flipped_not_a_feat); 216 217 #if ENABLE_SYS_REG_TRACE_FOR_NS 218 #define CPTR_SYS_REG_TRACE (TCPAC_BIT | TTA_BIT) 219 #else 220 #define CPTR_SYS_REG_TRACE (0) 221 #endif 222 223 #if ENABLE_FEAT_AMU 224 #define CPTR_FEAT_AMU TAM_BIT 225 #else 226 #define CPTR_FEAT_AMU (0) 227 #endif 228 229 #if ENABLE_SME_FOR_NS 230 #define CPTR_FEAT_SME ESM_BIT 231 #else 232 #define CPTR_FEAT_SME (0) 233 #endif 234 235 #if ENABLE_SVE_FOR_NS 236 #define CPTR_FEAT_SVE CPTR_EZ_BIT 237 #else 238 #define CPTR_FEAT_SVE (0) 239 #endif 240 241 #define CPTR_EL3_FEATS ( \ 242 CPTR_SYS_REG_TRACE | \ 243 CPTR_FEAT_AMU | \ 244 CPTR_FEAT_SME | \ 245 TFP_BIT | \ 246 CPTR_FEAT_SVE | \ 247 CPTR_PLAT_FEATS) 248 #define CPTR_EL3_FLIPPED ( \ 249 CPTR_SYS_REG_TRACE | \ 250 CPTR_FEAT_AMU | \ 251 TFP_BIT | \ 252 CPTR_PLAT_FLIPPED) 253 CASSERT((CPTR_EL3_FLIPPED & CPTR_EL3_FEATS) == CPTR_EL3_FLIPPED, cptr_flipped_not_a_feat); 254 255 /* 256 * Some features enables are expressed with more than 1 bit in order to cater 257 * for multi world enablement. In those cases (BRB, TRB, SPE) only the last bit 258 * is used and reported. This (ab)uses the convenient fact that the last bit 259 * always means "enabled for this world" when context switched correctly. 260 * The per-world values have been adjusted such that this is always true. 261 */ 262 #if ENABLE_BRBE_FOR_NS 263 #define MDCR_FEAT_BRBE MDCR_SBRBE(1UL) 264 #else 265 #define MDCR_FEAT_BRBE (0) 266 #endif 267 268 #if ENABLE_FEAT_FGT 269 #define MDCR_FEAT_FGT MDCR_TDCC_BIT 270 #else 271 #define MDCR_FEAT_FGT (0) 272 #endif 273 274 #if ENABLE_TRBE_FOR_NS 275 #define MDCR_FEAT_TRBE MDCR_NSTB_EN_BIT 276 #else 277 #define MDCR_FEAT_TRBE (0) 278 #endif 279 280 #if ENABLE_TRF_FOR_NS 281 #define MDCR_FEAT_TRF MDCR_TTRF_BIT 282 #else 283 #define MDCR_FEAT_TRF (0) 284 #endif 285 286 #if ENABLE_SPE_FOR_NS 287 #define MDCR_FEAT_SPE MDCR_NSPB_EN_BIT 288 #else 289 #define MDCR_FEAT_SPE (0) 290 #endif 291 292 #define MDCR_EL3_FEATS ( \ 293 MDCR_FEAT_BRBE | \ 294 MDCR_FEAT_FGT | \ 295 MDCR_FEAT_TRBE | \ 296 MDCR_FEAT_TRF | \ 297 MDCR_FEAT_SPE | \ 298 MDCR_TDOSA_BIT | \ 299 MDCR_TDA_BIT | \ 300 MDCR_EnPM2_BIT | \ 301 MDCR_TPM_BIT | /* FEAT_PMUv3 */ \ 302 MDCR_PLAT_FEATS) 303 #define MDCR_EL3_FLIPPED ( \ 304 MDCR_FEAT_FGT | \ 305 MDCR_FEAT_TRF | \ 306 MDCR_TDOSA_BIT | \ 307 MDCR_TDA_BIT | \ 308 MDCR_TPM_BIT | \ 309 MDCR_PLAT_FLIPPED) 310 #define MDCR_EL3_IGNORED ( \ 311 MDCR_EBWE_BIT | \ 312 MDCR_EnPMS3_BIT | \ 313 MDCR_EnPMSN_BIT | \ 314 MDCR_SBRBE(2UL) | \ 315 MDCR_MTPME_BIT | \ 316 MDCR_NSTBE_BIT | \ 317 MDCR_NSTB_SS_BIT | \ 318 MDCR_MCCD_BIT | \ 319 MDCR_SCCD_BIT | \ 320 MDCR_SDD_BIT | \ 321 MDCR_SPD32(3UL) | \ 322 MDCR_NSPB_SS_BIT | \ 323 MDCR_NSPBE_BIT | \ 324 MDCR_PLAT_IGNORED) 325 CASSERT((MDCR_EL3_FEATS & MDCR_EL3_IGNORED) == 0, mdcr_feat_is_ignored); 326 CASSERT((MDCR_EL3_FLIPPED & MDCR_EL3_FEATS) == MDCR_EL3_FLIPPED, mdcr_flipped_not_a_feat); 327 328 #define MPAM3_EL3_FEATS (MPAM3_EL3_TRAPLOWER_BIT) 329 #define MPAM3_EL3_FLIPPED (MPAM3_EL3_TRAPLOWER_BIT) 330 #define MPAM3_EL3_IGNORED (MPAM3_EL3_MPAMEN_BIT) 331 CASSERT((MPAM3_EL3_FEATS & MPAM3_EL3_IGNORED) == 0, mpam3_feat_is_ignored); 332 CASSERT((MPAM3_EL3_FLIPPED & MPAM3_EL3_FEATS) == MPAM3_EL3_FLIPPED, mpam3_flipped_not_a_feat); 333 334 /* The hex representations of these registers' S3 encoding */ 335 #define SCR_EL3_OPCODE U(0x1E1100) 336 #define CPTR_EL3_OPCODE U(0x1E1140) 337 #define MDCR_EL3_OPCODE U(0x1E1320) 338 #define MPAM3_EL3_OPCODE U(0x1EA500) 339 340 #endif /* ARCH_FEATURE_AVAILABILITY */ 341 #endif /* __ASSEMBLER__ */ 342 #endif /* ARM_ARCH_SVC_H */ 343