1 /* 2 * Copyright (c) 2019-2022, Arm Limited. All rights reserved. 3 * 4 * SPDX-License-Identifier: BSD-3-Clause 5 */ 6 7 #ifndef ARCH_FEATURES_H 8 #define ARCH_FEATURES_H 9 10 #include <stdbool.h> 11 12 #include <arch_helpers.h> 13 #include <common/feat_detect.h> 14 15 static inline bool is_armv7_gentimer_present(void) 16 { 17 /* The Generic Timer is always present in an ARMv8-A implementation */ 18 return true; 19 } 20 21 static inline bool is_armv8_1_pan_present(void) 22 { 23 return ((read_id_aa64mmfr1_el1() >> ID_AA64MMFR1_EL1_PAN_SHIFT) & 24 ID_AA64MMFR1_EL1_PAN_MASK) != 0U; 25 } 26 27 static inline bool is_armv8_1_vhe_present(void) 28 { 29 return ((read_id_aa64mmfr1_el1() >> ID_AA64MMFR1_EL1_VHE_SHIFT) & 30 ID_AA64MMFR1_EL1_VHE_MASK) != 0U; 31 } 32 33 static inline bool is_armv8_2_ttcnp_present(void) 34 { 35 return ((read_id_aa64mmfr2_el1() >> ID_AA64MMFR2_EL1_CNP_SHIFT) & 36 ID_AA64MMFR2_EL1_CNP_MASK) != 0U; 37 } 38 39 static inline bool is_feat_pacqarma3_present(void) 40 { 41 uint64_t mask_id_aa64isar2 = 42 (ID_AA64ISAR2_GPA3_MASK << ID_AA64ISAR2_GPA3_SHIFT) | 43 (ID_AA64ISAR2_APA3_MASK << ID_AA64ISAR2_APA3_SHIFT); 44 45 /* If any of the fields is not zero, QARMA3 algorithm is present */ 46 return (read_id_aa64isar2_el1() & mask_id_aa64isar2) != 0U; 47 } 48 49 static inline bool is_armv8_3_pauth_present(void) 50 { 51 uint64_t mask_id_aa64isar1 = 52 (ID_AA64ISAR1_GPI_MASK << ID_AA64ISAR1_GPI_SHIFT) | 53 (ID_AA64ISAR1_GPA_MASK << ID_AA64ISAR1_GPA_SHIFT) | 54 (ID_AA64ISAR1_API_MASK << ID_AA64ISAR1_API_SHIFT) | 55 (ID_AA64ISAR1_APA_MASK << ID_AA64ISAR1_APA_SHIFT); 56 57 /* 58 * If any of the fields is not zero or QARMA3 is present, 59 * PAuth is present 60 */ 61 return ((read_id_aa64isar1_el1() & mask_id_aa64isar1) != 0U || 62 is_feat_pacqarma3_present()); 63 } 64 65 static inline bool is_armv8_4_dit_present(void) 66 { 67 return ((read_id_aa64pfr0_el1() >> ID_AA64PFR0_DIT_SHIFT) & 68 ID_AA64PFR0_DIT_MASK) == 1U; 69 } 70 71 static inline bool is_armv8_4_ttst_present(void) 72 { 73 return ((read_id_aa64mmfr2_el1() >> ID_AA64MMFR2_EL1_ST_SHIFT) & 74 ID_AA64MMFR2_EL1_ST_MASK) == 1U; 75 } 76 77 static inline bool is_armv8_5_bti_present(void) 78 { 79 return ((read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_BT_SHIFT) & 80 ID_AA64PFR1_EL1_BT_MASK) == BTI_IMPLEMENTED; 81 } 82 83 static inline unsigned int get_armv8_5_mte_support(void) 84 { 85 return ((read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_MTE_SHIFT) & 86 ID_AA64PFR1_EL1_MTE_MASK); 87 } 88 89 static inline bool is_armv8_4_sel2_present(void) 90 { 91 return ((read_id_aa64pfr0_el1() >> ID_AA64PFR0_SEL2_SHIFT) & 92 ID_AA64PFR0_SEL2_MASK) == 1ULL; 93 } 94 95 static inline bool is_armv8_6_twed_present(void) 96 { 97 return (((read_id_aa64mmfr1_el1() >> ID_AA64MMFR1_EL1_TWED_SHIFT) & 98 ID_AA64MMFR1_EL1_TWED_MASK) == ID_AA64MMFR1_EL1_TWED_SUPPORTED); 99 } 100 101 static unsigned int read_feat_fgt_id_field(void) 102 { 103 return (read_id_aa64mmfr0_el1() >> ID_AA64MMFR0_EL1_FGT_SHIFT) & 104 ID_AA64MMFR0_EL1_FGT_MASK; 105 } 106 107 static inline bool is_feat_fgt_supported(void) 108 { 109 if (ENABLE_FEAT_FGT == FEAT_STATE_DISABLED) { 110 return false; 111 } 112 113 if (ENABLE_FEAT_FGT == FEAT_STATE_ALWAYS) { 114 return true; 115 } 116 117 return read_feat_fgt_id_field() != 0U; 118 } 119 120 static inline unsigned long int get_armv8_6_ecv_support(void) 121 { 122 return ((read_id_aa64mmfr0_el1() >> ID_AA64MMFR0_EL1_ECV_SHIFT) & 123 ID_AA64MMFR0_EL1_ECV_MASK); 124 } 125 126 static inline bool is_armv8_5_rng_present(void) 127 { 128 return ((read_id_aa64isar0_el1() >> ID_AA64ISAR0_RNDR_SHIFT) & 129 ID_AA64ISAR0_RNDR_MASK); 130 } 131 132 static inline bool is_armv8_6_feat_amuv1p1_present(void) 133 { 134 return (((read_id_aa64pfr0_el1() >> ID_AA64PFR0_AMU_SHIFT) & 135 ID_AA64PFR0_AMU_MASK) >= ID_AA64PFR0_AMU_V1P1); 136 } 137 138 /* 139 * Return MPAM version: 140 * 141 * 0x00: None Armv8.0 or later 142 * 0x01: v0.1 Armv8.4 or later 143 * 0x10: v1.0 Armv8.2 or later 144 * 0x11: v1.1 Armv8.4 or later 145 * 146 */ 147 static inline unsigned int get_mpam_version(void) 148 { 149 return (unsigned int)((((read_id_aa64pfr0_el1() >> 150 ID_AA64PFR0_MPAM_SHIFT) & ID_AA64PFR0_MPAM_MASK) << 4) | 151 ((read_id_aa64pfr1_el1() >> 152 ID_AA64PFR1_MPAM_FRAC_SHIFT) & ID_AA64PFR1_MPAM_FRAC_MASK)); 153 } 154 155 static inline bool is_feat_hcx_present(void) 156 { 157 return (((read_id_aa64mmfr1_el1() >> ID_AA64MMFR1_EL1_HCX_SHIFT) & 158 ID_AA64MMFR1_EL1_HCX_MASK) == ID_AA64MMFR1_EL1_HCX_SUPPORTED); 159 } 160 161 static inline bool is_feat_rng_trap_present(void) 162 { 163 return (((read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_RNDR_TRAP_SHIFT) & 164 ID_AA64PFR1_EL1_RNDR_TRAP_MASK) 165 == ID_AA64PFR1_EL1_RNG_TRAP_SUPPORTED); 166 } 167 168 static inline unsigned int get_armv9_2_feat_rme_support(void) 169 { 170 /* 171 * Return the RME version, zero if not supported. This function can be 172 * used as both an integer value for the RME version or compared to zero 173 * to detect RME presence. 174 */ 175 return (unsigned int)(read_id_aa64pfr0_el1() >> 176 ID_AA64PFR0_FEAT_RME_SHIFT) & ID_AA64PFR0_FEAT_RME_MASK; 177 } 178 179 /********************************************************************************* 180 * Function to identify the presence of FEAT_SB (Speculation Barrier Instruction) 181 ********************************************************************************/ 182 static inline bool is_armv8_0_feat_sb_present(void) 183 { 184 return (((read_id_aa64isar1_el1() >> ID_AA64ISAR1_SB_SHIFT) & 185 ID_AA64ISAR1_SB_MASK) == ID_AA64ISAR1_SB_SUPPORTED); 186 } 187 188 /********************************************************************************* 189 * Function to identify the presence of FEAT_CSV2_2 (Cache Speculation Variant 2) 190 ********************************************************************************/ 191 static inline bool is_armv8_0_feat_csv2_2_present(void) 192 { 193 return (((read_id_aa64pfr0_el1() >> ID_AA64PFR0_CSV2_SHIFT) & 194 ID_AA64PFR0_CSV2_MASK) == ID_AA64PFR0_CSV2_2_SUPPORTED); 195 } 196 197 /********************************************************************************** 198 * Function to identify the presence of FEAT_SPE (Statistical Profiling Extension) 199 *********************************************************************************/ 200 static inline bool is_armv8_2_feat_spe_present(void) 201 { 202 return (((read_id_aa64dfr0_el1() >> ID_AA64DFR0_PMS_SHIFT) & 203 ID_AA64DFR0_PMS_MASK) != ID_AA64DFR0_SPE_NOT_SUPPORTED); 204 } 205 206 /******************************************************************************* 207 * Function to identify the presence of FEAT_SVE (Scalable Vector Extension) 208 ******************************************************************************/ 209 static inline bool is_armv8_2_feat_sve_present(void) 210 { 211 return (((read_id_aa64pfr0_el1() >> ID_AA64PFR0_SVE_SHIFT) & 212 ID_AA64PFR0_SVE_MASK) == ID_AA64PFR0_SVE_SUPPORTED); 213 } 214 215 /******************************************************************************* 216 * Function to identify the presence of FEAT_RAS (Reliability,Availability, 217 * and Serviceability Extension) 218 ******************************************************************************/ 219 static inline bool is_armv8_2_feat_ras_present(void) 220 { 221 return (((read_id_aa64pfr0_el1() >> ID_AA64PFR0_RAS_SHIFT) & 222 ID_AA64PFR0_RAS_MASK) != ID_AA64PFR0_RAS_NOT_SUPPORTED); 223 } 224 225 /************************************************************************** 226 * Function to identify the presence of FEAT_DIT (Data Independent Timing) 227 *************************************************************************/ 228 static inline bool is_armv8_4_feat_dit_present(void) 229 { 230 return (((read_id_aa64pfr0_el1() >> ID_AA64PFR0_DIT_SHIFT) & 231 ID_AA64PFR0_DIT_MASK) == ID_AA64PFR0_DIT_SUPPORTED); 232 } 233 234 /************************************************************************* 235 * Function to identify the presence of FEAT_TRF (TraceLift) 236 ************************************************************************/ 237 static inline bool is_arm8_4_feat_trf_present(void) 238 { 239 return (((read_id_aa64dfr0_el1() >> ID_AA64DFR0_TRACEFILT_SHIFT) & 240 ID_AA64DFR0_TRACEFILT_MASK) == ID_AA64DFR0_TRACEFILT_SUPPORTED); 241 } 242 243 /******************************************************************************* 244 * Function to identify the presence of FEAT_AMUv1 (Activity Monitors- 245 * Extension v1) 246 ******************************************************************************/ 247 static inline bool is_armv8_4_feat_amuv1_present(void) 248 { 249 return (((read_id_aa64pfr0_el1() >> ID_AA64PFR0_AMU_SHIFT) & 250 ID_AA64PFR0_AMU_MASK) >= ID_AA64PFR0_AMU_V1); 251 } 252 253 /******************************************************************************** 254 * Function to identify the presence of FEAT_NV2 (Enhanced Nested Virtualization 255 * Support) 256 *******************************************************************************/ 257 static inline unsigned int get_armv8_4_feat_nv_support(void) 258 { 259 return (((read_id_aa64mmfr2_el1() >> ID_AA64MMFR2_EL1_NV_SHIFT) & 260 ID_AA64MMFR2_EL1_NV_MASK)); 261 } 262 263 /******************************************************************************* 264 * Function to identify the presence of FEAT_BRBE (Branch Record Buffer 265 * Extension) 266 ******************************************************************************/ 267 static inline bool is_feat_brbe_present(void) 268 { 269 return (((read_id_aa64dfr0_el1() >> ID_AA64DFR0_BRBE_SHIFT) & 270 ID_AA64DFR0_BRBE_MASK) == ID_AA64DFR0_BRBE_SUPPORTED); 271 } 272 273 /******************************************************************************* 274 * Function to identify the presence of FEAT_TRBE (Trace Buffer Extension) 275 ******************************************************************************/ 276 static inline bool is_feat_trbe_present(void) 277 { 278 return (((read_id_aa64dfr0_el1() >> ID_AA64DFR0_TRACEBUFFER_SHIFT) & 279 ID_AA64DFR0_TRACEBUFFER_MASK) == ID_AA64DFR0_TRACEBUFFER_SUPPORTED); 280 } 281 282 #endif /* ARCH_FEATURES_H */ 283