1/* 2 * Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved. 3 * 4 * SPDX-License-Identifier: BSD-3-Clause 5 */ 6 7#include <arch.h> 8#include <asm_macros.S> 9#include <common/bl_common.h> 10#include <context.h> 11#include <cortex_a76.h> 12#include <cpu_macros.S> 13#include <plat_macros.S> 14#include <services/arm_arch_svc.h> 15 16#if !DYNAMIC_WORKAROUND_CVE_2018_3639 17#error Cortex A76 requires DYNAMIC_WORKAROUND_CVE_2018_3639=1 18#endif 19 20#define ESR_EL3_A64_SMC0 0x5e000000 21#define ESR_EL3_A32_SMC0 0x4e000000 22 23 /* 24 * This macro applies the mitigation for CVE-2018-3639. 25 * It implements a fash path where `SMCCC_ARCH_WORKAROUND_2` 26 * SMC calls from a lower EL running in AArch32 or AArch64 27 * will go through the fast and return early. 28 * 29 * The macro saves x2-x3 to the context. In the fast path 30 * x0-x3 registers do not need to be restored as the calling 31 * context will have saved them. 32 */ 33 .macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val 34 stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 35 36 .if \_is_sync_exception 37 /* 38 * Ensure SMC is coming from A64/A32 state on #0 39 * with W0 = SMCCC_ARCH_WORKAROUND_2 40 * 41 * This sequence evaluates as: 42 * (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE) 43 * allowing use of a single branch operation 44 */ 45 orr w2, wzr, #SMCCC_ARCH_WORKAROUND_2 46 cmp x0, x2 47 mrs x3, esr_el3 48 mov_imm w2, \_esr_el3_val 49 ccmp w2, w3, #0, eq 50 /* 51 * Static predictor will predict a fall-through, optimizing 52 * the `SMCCC_ARCH_WORKAROUND_2` fast path. 53 */ 54 bne 1f 55 56 /* 57 * The sequence below implements the `SMCCC_ARCH_WORKAROUND_2` 58 * fast path. 59 */ 60 cmp x1, xzr /* enable/disable check */ 61 62 /* 63 * When the calling context wants mitigation disabled, 64 * we program the mitigation disable function in the 65 * CPU context, which gets invoked on subsequent exits from 66 * EL3 via the `el3_exit` function. Otherwise NULL is 67 * programmed in the CPU context, which results in caller's 68 * inheriting the EL3 mitigation state (enabled) on subsequent 69 * `el3_exit`. 70 */ 71 mov x0, xzr 72 adr x1, cortex_a76_disable_wa_cve_2018_3639 73 csel x1, x1, x0, eq 74 str x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE] 75 76 mrs x2, CORTEX_A76_CPUACTLR2_EL1 77 orr x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE 78 bic x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE 79 csel x3, x3, x1, eq 80 msr CORTEX_A76_CPUACTLR2_EL1, x3 81 eret /* ERET implies ISB */ 82 .endif 831: 84 /* 85 * Always enable v4 mitigation during EL3 execution. This is not 86 * required for the fast path above because it does not perform any 87 * memory loads. 88 */ 89 mrs x2, CORTEX_A76_CPUACTLR2_EL1 90 orr x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE 91 msr CORTEX_A76_CPUACTLR2_EL1, x2 92 isb 93 94 /* 95 * The caller may have passed arguments to EL3 via x2-x3. 96 * Restore these registers from the context before jumping to the 97 * main runtime vector table entry. 98 */ 99 ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 100 .endm 101 102vector_base cortex_a76_wa_cve_2018_3639_a76_vbar 103 104 /* --------------------------------------------------------------------- 105 * Current EL with SP_EL0 : 0x0 - 0x200 106 * --------------------------------------------------------------------- 107 */ 108vector_entry cortex_a76_sync_exception_sp_el0 109 b sync_exception_sp_el0 110end_vector_entry cortex_a76_sync_exception_sp_el0 111 112vector_entry cortex_a76_irq_sp_el0 113 b irq_sp_el0 114end_vector_entry cortex_a76_irq_sp_el0 115 116vector_entry cortex_a76_fiq_sp_el0 117 b fiq_sp_el0 118end_vector_entry cortex_a76_fiq_sp_el0 119 120vector_entry cortex_a76_serror_sp_el0 121 b serror_sp_el0 122end_vector_entry cortex_a76_serror_sp_el0 123 124 /* --------------------------------------------------------------------- 125 * Current EL with SP_ELx: 0x200 - 0x400 126 * --------------------------------------------------------------------- 127 */ 128vector_entry cortex_a76_sync_exception_sp_elx 129 b sync_exception_sp_elx 130end_vector_entry cortex_a76_sync_exception_sp_elx 131 132vector_entry cortex_a76_irq_sp_elx 133 b irq_sp_elx 134end_vector_entry cortex_a76_irq_sp_elx 135 136vector_entry cortex_a76_fiq_sp_elx 137 b fiq_sp_elx 138end_vector_entry cortex_a76_fiq_sp_elx 139 140vector_entry cortex_a76_serror_sp_elx 141 b serror_sp_elx 142end_vector_entry cortex_a76_serror_sp_elx 143 144 /* --------------------------------------------------------------------- 145 * Lower EL using AArch64 : 0x400 - 0x600 146 * --------------------------------------------------------------------- 147 */ 148vector_entry cortex_a76_sync_exception_aarch64 149 apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0 150 b sync_exception_aarch64 151end_vector_entry cortex_a76_sync_exception_aarch64 152 153vector_entry cortex_a76_irq_aarch64 154 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0 155 b irq_aarch64 156end_vector_entry cortex_a76_irq_aarch64 157 158vector_entry cortex_a76_fiq_aarch64 159 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0 160 b fiq_aarch64 161end_vector_entry cortex_a76_fiq_aarch64 162 163vector_entry cortex_a76_serror_aarch64 164 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0 165 b serror_aarch64 166end_vector_entry cortex_a76_serror_aarch64 167 168 /* --------------------------------------------------------------------- 169 * Lower EL using AArch32 : 0x600 - 0x800 170 * --------------------------------------------------------------------- 171 */ 172vector_entry cortex_a76_sync_exception_aarch32 173 apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0 174 b sync_exception_aarch32 175end_vector_entry cortex_a76_sync_exception_aarch32 176 177vector_entry cortex_a76_irq_aarch32 178 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0 179 b irq_aarch32 180end_vector_entry cortex_a76_irq_aarch32 181 182vector_entry cortex_a76_fiq_aarch32 183 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0 184 b fiq_aarch32 185end_vector_entry cortex_a76_fiq_aarch32 186 187vector_entry cortex_a76_serror_aarch32 188 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0 189 b serror_aarch32 190end_vector_entry cortex_a76_serror_aarch32 191 192 /* -------------------------------------------------- 193 * Errata Workaround for Cortex A76 Errata #1073348. 194 * This applies only to revision <= r1p0 of Cortex A76. 195 * Inputs: 196 * x0: variant[4:7] and revision[0:3] of current cpu. 197 * Shall clobber: x0-x17 198 * -------------------------------------------------- 199 */ 200func errata_a76_1073348_wa 201 /* 202 * Compare x0 against revision r1p0 203 */ 204 mov x17, x30 205 bl check_errata_1073348 206 cbz x0, 1f 207 mrs x1, CORTEX_A76_CPUACTLR_EL1 208 orr x1, x1 ,#CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION 209 msr CORTEX_A76_CPUACTLR_EL1, x1 210 isb 2111: 212 ret x17 213 endfunc errata_a76_1073348_wa 214 215func check_errata_1073348 216 mov x1, #0x10 217 b cpu_rev_var_ls 218endfunc check_errata_1073348 219 220 /* -------------------------------------------------- 221 * Errata Workaround for Cortex A76 Errata #1130799. 222 * This applies only to revision <= r2p0 of Cortex A76. 223 * Inputs: 224 * x0: variant[4:7] and revision[0:3] of current cpu. 225 * Shall clobber: x0-x17 226 * -------------------------------------------------- 227 */ 228func errata_a76_1130799_wa 229 /* 230 * Compare x0 against revision r2p0 231 */ 232 mov x17, x30 233 bl check_errata_1130799 234 cbz x0, 1f 235 mrs x1, CORTEX_A76_CPUACTLR2_EL1 236 orr x1, x1 ,#(1 << 59) 237 msr CORTEX_A76_CPUACTLR2_EL1, x1 238 isb 2391: 240 ret x17 241endfunc errata_a76_1130799_wa 242 243func check_errata_1130799 244 mov x1, #0x20 245 b cpu_rev_var_ls 246endfunc check_errata_1130799 247 248 /* -------------------------------------------------- 249 * Errata Workaround for Cortex A76 Errata #1220197. 250 * This applies only to revision <= r2p0 of Cortex A76. 251 * Inputs: 252 * x0: variant[4:7] and revision[0:3] of current cpu. 253 * Shall clobber: x0-x17 254 * -------------------------------------------------- 255 */ 256func errata_a76_1220197_wa 257/* 258 * Compare x0 against revision r2p0 259 */ 260 mov x17, x30 261 bl check_errata_1220197 262 cbz x0, 1f 263 mrs x1, CORTEX_A76_CPUECTLR_EL1 264 orr x1, x1, #CORTEX_A76_CPUECTLR_EL1_WS_THR_L2 265 msr CORTEX_A76_CPUECTLR_EL1, x1 266 isb 2671: 268 ret x17 269endfunc errata_a76_1220197_wa 270 271func check_errata_1220197 272 mov x1, #0x20 273 b cpu_rev_var_ls 274endfunc check_errata_1220197 275 276func check_errata_cve_2018_3639 277#if WORKAROUND_CVE_2018_3639 278 mov x0, #ERRATA_APPLIES 279#else 280 mov x0, #ERRATA_MISSING 281#endif 282 ret 283endfunc check_errata_cve_2018_3639 284 285func cortex_a76_disable_wa_cve_2018_3639 286 mrs x0, CORTEX_A76_CPUACTLR2_EL1 287 bic x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE 288 msr CORTEX_A76_CPUACTLR2_EL1, x0 289 isb 290 ret 291endfunc cortex_a76_disable_wa_cve_2018_3639 292 293 /* ------------------------------------------------- 294 * The CPU Ops reset function for Cortex-A76. 295 * Shall clobber: x0-x19 296 * ------------------------------------------------- 297 */ 298func cortex_a76_reset_func 299 mov x19, x30 300 bl cpu_get_rev_var 301 mov x18, x0 302 303#if ERRATA_A76_1073348 304 mov x0, x18 305 bl errata_a76_1073348_wa 306#endif 307 308#if ERRATA_A76_1130799 309 mov x0, x18 310 bl errata_a76_1130799_wa 311#endif 312 313#if ERRATA_A76_1220197 314 mov x0, x18 315 bl errata_a76_1220197_wa 316#endif 317 318#if WORKAROUND_CVE_2018_3639 319 /* If the PE implements SSBS, we don't need the dynamic workaround */ 320 mrs x0, id_aa64pfr1_el1 321 lsr x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT 322 and x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK 323 cbnz x0, 1f 324 325 mrs x0, CORTEX_A76_CPUACTLR2_EL1 326 orr x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE 327 msr CORTEX_A76_CPUACTLR2_EL1, x0 328 isb 329 330#ifdef IMAGE_BL31 331 /* 332 * The Cortex-A76 generic vectors are overwritten to use the vectors 333 * defined above. This is required in order to apply mitigation 334 * against CVE-2018-3639 on exception entry from lower ELs. 335 */ 336 adr x0, cortex_a76_wa_cve_2018_3639_a76_vbar 337 msr vbar_el3, x0 338 isb 339#endif 340 3411: 342#endif 343 344#if ERRATA_DSU_936184 345 bl errata_dsu_936184_wa 346#endif 347 ret x19 348endfunc cortex_a76_reset_func 349 350 /* --------------------------------------------- 351 * HW will do the cache maintenance while powering down 352 * --------------------------------------------- 353 */ 354func cortex_a76_core_pwr_dwn 355 /* --------------------------------------------- 356 * Enable CPU power down bit in power control register 357 * --------------------------------------------- 358 */ 359 mrs x0, CORTEX_A76_CPUPWRCTLR_EL1 360 orr x0, x0, #CORTEX_A76_CORE_PWRDN_EN_MASK 361 msr CORTEX_A76_CPUPWRCTLR_EL1, x0 362 isb 363 ret 364endfunc cortex_a76_core_pwr_dwn 365 366#if REPORT_ERRATA 367/* 368 * Errata printing function for Cortex Cortex A76. Must follow AAPCS. 369 */ 370func cortex_a76_errata_report 371 stp x8, x30, [sp, #-16]! 372 373 bl cpu_get_rev_var 374 mov x8, x0 375 376 /* 377 * Report all errata. The revision-variant information is passed to 378 * checking functions of each errata. 379 */ 380 report_errata ERRATA_A76_1073348, cortex_a76, 1073348 381 report_errata ERRATA_A76_1130799, cortex_a76, 1130799 382 report_errata ERRATA_A76_1220197, cortex_a76, 1220197 383 report_errata WORKAROUND_CVE_2018_3639, cortex_a76, cve_2018_3639 384 report_errata ERRATA_DSU_936184, cortex_a76, dsu_936184 385 386 ldp x8, x30, [sp], #16 387 ret 388endfunc cortex_a76_errata_report 389#endif 390 391 /* --------------------------------------------- 392 * This function provides cortex_a76 specific 393 * register information for crash reporting. 394 * It needs to return with x6 pointing to 395 * a list of register names in ascii and 396 * x8 - x15 having values of registers to be 397 * reported. 398 * --------------------------------------------- 399 */ 400.section .rodata.cortex_a76_regs, "aS" 401cortex_a76_regs: /* The ascii list of register names to be reported */ 402 .asciz "cpuectlr_el1", "" 403 404func cortex_a76_cpu_reg_dump 405 adr x6, cortex_a76_regs 406 mrs x8, CORTEX_A76_CPUECTLR_EL1 407 ret 408endfunc cortex_a76_cpu_reg_dump 409 410declare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \ 411 cortex_a76_reset_func, \ 412 CPU_NO_EXTRA1_FUNC, \ 413 cortex_a76_disable_wa_cve_2018_3639, \ 414 cortex_a76_core_pwr_dwn 415