1 /* 2 * Copyright (c) 2019-2023, Arm Limited. All rights reserved. 3 * 4 * SPDX-License-Identifier: BSD-3-Clause 5 */ 6 7 #ifndef ARCH_FEATURES_H 8 #define ARCH_FEATURES_H 9 10 #include <stdbool.h> 11 12 #include <arch_helpers.h> 13 #include <common/feat_detect.h> 14 15 #define ISOLATE_FIELD(reg, feat) \ 16 ((unsigned int)(((reg) >> (feat)) & ID_REG_FIELD_MASK)) 17 18 #define CREATE_FEATURE_FUNCS_VER(name, read_func, idvalue, guard) \ 19 static inline bool is_ ## name ## _supported(void) \ 20 { \ 21 if ((guard) == FEAT_STATE_DISABLED) { \ 22 return false; \ 23 } \ 24 if ((guard) == FEAT_STATE_ALWAYS) { \ 25 return true; \ 26 } \ 27 return read_func() >= (idvalue); \ 28 } 29 30 #define CREATE_FEATURE_FUNCS(name, idreg, idfield, guard) \ 31 static unsigned int read_ ## name ## _id_field(void) \ 32 { \ 33 return ISOLATE_FIELD(read_ ## idreg(), idfield); \ 34 } \ 35 CREATE_FEATURE_FUNCS_VER(name, read_ ## name ## _id_field, 1U, guard) 36 37 static inline bool is_armv7_gentimer_present(void) 38 { 39 /* The Generic Timer is always present in an ARMv8-A implementation */ 40 return true; 41 } 42 43 CREATE_FEATURE_FUNCS(feat_pan, id_aa64mmfr1_el1, ID_AA64MMFR1_EL1_PAN_SHIFT, 44 ENABLE_FEAT_PAN) 45 CREATE_FEATURE_FUNCS(feat_vhe, id_aa64mmfr1_el1, ID_AA64MMFR1_EL1_VHE_SHIFT, 46 ENABLE_FEAT_VHE) 47 48 static inline bool is_armv8_2_ttcnp_present(void) 49 { 50 return ((read_id_aa64mmfr2_el1() >> ID_AA64MMFR2_EL1_CNP_SHIFT) & 51 ID_AA64MMFR2_EL1_CNP_MASK) != 0U; 52 } 53 54 static inline bool is_feat_pacqarma3_present(void) 55 { 56 uint64_t mask_id_aa64isar2 = 57 (ID_AA64ISAR2_GPA3_MASK << ID_AA64ISAR2_GPA3_SHIFT) | 58 (ID_AA64ISAR2_APA3_MASK << ID_AA64ISAR2_APA3_SHIFT); 59 60 /* If any of the fields is not zero, QARMA3 algorithm is present */ 61 return (read_id_aa64isar2_el1() & mask_id_aa64isar2) != 0U; 62 } 63 64 static inline bool is_armv8_3_pauth_present(void) 65 { 66 uint64_t mask_id_aa64isar1 = 67 (ID_AA64ISAR1_GPI_MASK << ID_AA64ISAR1_GPI_SHIFT) | 68 (ID_AA64ISAR1_GPA_MASK << ID_AA64ISAR1_GPA_SHIFT) | 69 (ID_AA64ISAR1_API_MASK << ID_AA64ISAR1_API_SHIFT) | 70 (ID_AA64ISAR1_APA_MASK << ID_AA64ISAR1_APA_SHIFT); 71 72 /* 73 * If any of the fields is not zero or QARMA3 is present, 74 * PAuth is present 75 */ 76 return ((read_id_aa64isar1_el1() & mask_id_aa64isar1) != 0U || 77 is_feat_pacqarma3_present()); 78 } 79 80 static inline bool is_armv8_4_ttst_present(void) 81 { 82 return ((read_id_aa64mmfr2_el1() >> ID_AA64MMFR2_EL1_ST_SHIFT) & 83 ID_AA64MMFR2_EL1_ST_MASK) == 1U; 84 } 85 86 static inline bool is_armv8_5_bti_present(void) 87 { 88 return ((read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_BT_SHIFT) & 89 ID_AA64PFR1_EL1_BT_MASK) == BTI_IMPLEMENTED; 90 } 91 92 static inline unsigned int get_armv8_5_mte_support(void) 93 { 94 return ((read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_MTE_SHIFT) & 95 ID_AA64PFR1_EL1_MTE_MASK); 96 } 97 98 CREATE_FEATURE_FUNCS(feat_sel2, id_aa64pfr0_el1, ID_AA64PFR0_SEL2_SHIFT, 99 ENABLE_FEAT_SEL2) 100 CREATE_FEATURE_FUNCS(feat_twed, id_aa64mmfr1_el1, ID_AA64MMFR1_EL1_TWED_SHIFT, 101 ENABLE_FEAT_TWED) 102 CREATE_FEATURE_FUNCS(feat_fgt, id_aa64mmfr0_el1, ID_AA64MMFR0_EL1_FGT_SHIFT, 103 ENABLE_FEAT_FGT) 104 CREATE_FEATURE_FUNCS(feat_mte_perm, id_aa64pfr2_el1, 105 ID_AA64PFR2_EL1_MTEPERM_SHIFT, ENABLE_FEAT_MTE_PERM) 106 CREATE_FEATURE_FUNCS(feat_ecv, id_aa64mmfr0_el1, ID_AA64MMFR0_EL1_ECV_SHIFT, 107 ENABLE_FEAT_ECV) 108 CREATE_FEATURE_FUNCS_VER(feat_ecv_v2, read_feat_ecv_id_field, 109 ID_AA64MMFR0_EL1_ECV_SELF_SYNCH, ENABLE_FEAT_ECV) 110 111 CREATE_FEATURE_FUNCS(feat_rng, id_aa64isar0_el1, ID_AA64ISAR0_RNDR_SHIFT, 112 ENABLE_FEAT_RNG) 113 CREATE_FEATURE_FUNCS(feat_tcr2, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_TCRX_SHIFT, 114 ENABLE_FEAT_TCR2) 115 116 CREATE_FEATURE_FUNCS(feat_s2poe, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_S2POE_SHIFT, 117 ENABLE_FEAT_S2POE) 118 CREATE_FEATURE_FUNCS(feat_s1poe, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_S1POE_SHIFT, 119 ENABLE_FEAT_S1POE) 120 static inline bool is_feat_sxpoe_supported(void) 121 { 122 return is_feat_s1poe_supported() || is_feat_s2poe_supported(); 123 } 124 125 CREATE_FEATURE_FUNCS(feat_s2pie, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_S2PIE_SHIFT, 126 ENABLE_FEAT_S2PIE) 127 CREATE_FEATURE_FUNCS(feat_s1pie, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_S1PIE_SHIFT, 128 ENABLE_FEAT_S1PIE) 129 static inline bool is_feat_sxpie_supported(void) 130 { 131 return is_feat_s1pie_supported() || is_feat_s2pie_supported(); 132 } 133 134 /* FEAT_GCS: Guarded Control Stack */ 135 CREATE_FEATURE_FUNCS(feat_gcs, id_aa64pfr1_el1, ID_AA64PFR1_EL1_GCS_SHIFT, 136 ENABLE_FEAT_GCS) 137 138 /* FEAT_AMU: Activity Monitors Extension */ 139 CREATE_FEATURE_FUNCS(feat_amu, id_aa64pfr0_el1, ID_AA64PFR0_AMU_SHIFT, 140 ENABLE_FEAT_AMU) 141 CREATE_FEATURE_FUNCS_VER(feat_amuv1p1, read_feat_amu_id_field, 142 ID_AA64PFR0_AMU_V1P1, ENABLE_FEAT_AMUv1p1) 143 144 /* 145 * Return MPAM version: 146 * 147 * 0x00: None Armv8.0 or later 148 * 0x01: v0.1 Armv8.4 or later 149 * 0x10: v1.0 Armv8.2 or later 150 * 0x11: v1.1 Armv8.4 or later 151 * 152 */ 153 static inline unsigned int read_feat_mpam_version(void) 154 { 155 return (unsigned int)((((read_id_aa64pfr0_el1() >> 156 ID_AA64PFR0_MPAM_SHIFT) & ID_AA64PFR0_MPAM_MASK) << 4) | 157 ((read_id_aa64pfr1_el1() >> 158 ID_AA64PFR1_MPAM_FRAC_SHIFT) & ID_AA64PFR1_MPAM_FRAC_MASK)); 159 } 160 161 CREATE_FEATURE_FUNCS_VER(feat_mpam, read_feat_mpam_version, 1U, 162 ENABLE_MPAM_FOR_LOWER_ELS) 163 164 /* FEAT_HCX: Extended Hypervisor Configuration Register */ 165 CREATE_FEATURE_FUNCS(feat_hcx, id_aa64mmfr1_el1, ID_AA64MMFR1_EL1_HCX_SHIFT, 166 ENABLE_FEAT_HCX) 167 168 static inline bool is_feat_rng_trap_present(void) 169 { 170 return (((read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_RNDR_TRAP_SHIFT) & 171 ID_AA64PFR1_EL1_RNDR_TRAP_MASK) 172 == ID_AA64PFR1_EL1_RNG_TRAP_SUPPORTED); 173 } 174 175 static inline unsigned int get_armv9_2_feat_rme_support(void) 176 { 177 /* 178 * Return the RME version, zero if not supported. This function can be 179 * used as both an integer value for the RME version or compared to zero 180 * to detect RME presence. 181 */ 182 return (unsigned int)(read_id_aa64pfr0_el1() >> 183 ID_AA64PFR0_FEAT_RME_SHIFT) & ID_AA64PFR0_FEAT_RME_MASK; 184 } 185 186 /********************************************************************************* 187 * Function to identify the presence of FEAT_SB (Speculation Barrier Instruction) 188 ********************************************************************************/ 189 static inline unsigned int read_feat_sb_id_field(void) 190 { 191 return ISOLATE_FIELD(read_id_aa64isar1_el1(), ID_AA64ISAR1_SB_SHIFT); 192 } 193 194 /* FEAT_CSV2_2: Cache Speculation Variant 2 */ 195 CREATE_FEATURE_FUNCS(feat_csv2, id_aa64pfr0_el1, ID_AA64PFR0_CSV2_SHIFT, 0) 196 CREATE_FEATURE_FUNCS_VER(feat_csv2_2, read_feat_csv2_id_field, 197 ID_AA64PFR0_CSV2_2_SUPPORTED, ENABLE_FEAT_CSV2_2) 198 199 /* FEAT_SPE: Statistical Profiling Extension */ 200 CREATE_FEATURE_FUNCS(feat_spe, id_aa64dfr0_el1, ID_AA64DFR0_PMS_SHIFT, 201 ENABLE_SPE_FOR_NS) 202 203 /* FEAT_SVE: Scalable Vector Extension */ 204 CREATE_FEATURE_FUNCS(feat_sve, id_aa64pfr0_el1, ID_AA64PFR0_SVE_SHIFT, 205 ENABLE_SVE_FOR_NS) 206 207 /* FEAT_RAS: Reliability, Accessibility, Serviceability */ 208 CREATE_FEATURE_FUNCS(feat_ras, id_aa64pfr0_el1, 209 ID_AA64PFR0_RAS_SHIFT, ENABLE_FEAT_RAS) 210 211 /* FEAT_DIT: Data Independent Timing instructions */ 212 CREATE_FEATURE_FUNCS(feat_dit, id_aa64pfr0_el1, 213 ID_AA64PFR0_DIT_SHIFT, ENABLE_FEAT_DIT) 214 215 CREATE_FEATURE_FUNCS(feat_sys_reg_trace, id_aa64dfr0_el1, 216 ID_AA64DFR0_TRACEVER_SHIFT, ENABLE_SYS_REG_TRACE_FOR_NS) 217 218 /* FEAT_TRF: TraceFilter */ 219 CREATE_FEATURE_FUNCS(feat_trf, id_aa64dfr0_el1, ID_AA64DFR0_TRACEFILT_SHIFT, 220 ENABLE_TRF_FOR_NS) 221 222 /* FEAT_NV2: Enhanced Nested Virtualization */ 223 CREATE_FEATURE_FUNCS(feat_nv, id_aa64mmfr2_el1, ID_AA64MMFR2_EL1_NV_SHIFT, 0) 224 CREATE_FEATURE_FUNCS_VER(feat_nv2, read_feat_nv_id_field, 225 ID_AA64MMFR2_EL1_NV2_SUPPORTED, CTX_INCLUDE_NEVE_REGS) 226 227 /* FEAT_BRBE: Branch Record Buffer Extension */ 228 CREATE_FEATURE_FUNCS(feat_brbe, id_aa64dfr0_el1, ID_AA64DFR0_BRBE_SHIFT, 229 ENABLE_BRBE_FOR_NS) 230 231 /* FEAT_TRBE: Trace Buffer Extension */ 232 CREATE_FEATURE_FUNCS(feat_trbe, id_aa64dfr0_el1, ID_AA64DFR0_TRACEBUFFER_SHIFT, 233 ENABLE_TRBE_FOR_NS) 234 235 static inline unsigned int read_feat_sme_fa64_id_field(void) 236 { 237 return ISOLATE_FIELD(read_id_aa64smfr0_el1(), 238 ID_AA64SMFR0_EL1_SME_FA64_SHIFT); 239 } 240 /* FEAT_SMEx: Scalar Matrix Extension */ 241 CREATE_FEATURE_FUNCS(feat_sme, id_aa64pfr1_el1, ID_AA64PFR1_EL1_SME_SHIFT, 242 ENABLE_SME_FOR_NS) 243 CREATE_FEATURE_FUNCS_VER(feat_sme2, read_feat_sme_id_field, 244 ID_AA64PFR1_EL1_SME2_SUPPORTED, ENABLE_SME2_FOR_NS) 245 246 /******************************************************************************* 247 * Function to get hardware granularity support 248 ******************************************************************************/ 249 250 static inline unsigned int read_id_aa64mmfr0_el0_tgran4_field(void) 251 { 252 return ISOLATE_FIELD(read_id_aa64mmfr0_el1(), 253 ID_AA64MMFR0_EL1_TGRAN4_SHIFT); 254 } 255 256 static inline unsigned int read_id_aa64mmfr0_el0_tgran16_field(void) 257 { 258 return ISOLATE_FIELD(read_id_aa64mmfr0_el1(), 259 ID_AA64MMFR0_EL1_TGRAN16_SHIFT); 260 } 261 262 static inline unsigned int read_id_aa64mmfr0_el0_tgran64_field(void) 263 { 264 return ISOLATE_FIELD(read_id_aa64mmfr0_el1(), 265 ID_AA64MMFR0_EL1_TGRAN64_SHIFT); 266 } 267 268 static inline unsigned int read_feat_pmuv3_id_field(void) 269 { 270 return ISOLATE_FIELD(read_id_aa64dfr0_el1(), ID_AA64DFR0_PMUVER_SHIFT); 271 } 272 273 static inline unsigned int read_feat_mtpmu_id_field(void) 274 { 275 return ISOLATE_FIELD(read_id_aa64dfr0_el1(), ID_AA64DFR0_MTPMU_SHIFT); 276 } 277 278 static inline bool is_feat_mtpmu_supported(void) 279 { 280 if (DISABLE_MTPMU == FEAT_STATE_DISABLED) { 281 return false; 282 } 283 284 if (DISABLE_MTPMU == FEAT_STATE_ALWAYS) { 285 return true; 286 } 287 288 unsigned int mtpmu = read_feat_mtpmu_id_field(); 289 290 return (mtpmu != 0U) && (mtpmu != ID_AA64DFR0_MTPMU_DISABLED); 291 } 292 293 #endif /* ARCH_FEATURES_H */ 294