1/* 2 * Copyright (c) 2026, Arm Limited and Contributors. All rights reserved. 3 * 4 * SPDX-License-Identifier: BSD-3-Clause 5 */ 6 7#include <arch.h> 8#include <asm_macros.S> 9#include <context.h> 10#include <wa_cve_2025_0647_cpprctx.h> 11 12 .globl wa_cve_2025_0647_instruction_patch 13 .globl wa_cve_2025_0647_do_cpp_wa 14 .globl wa_cve_2025_0647_execute_cpp_el3 15 16/* 17 * wa_cve_2025_0647_instruction_patch 18 * 19 * Function to enable EL3 traps for all CPP RCTX instruction calls from lower 20 * ELs to address CVE-2025-0647. 21 * 22 * Argument 23 * x0 - bit[3] flag to use T32 opcode format 24 * bit[2:0] patch slot to use 25 * 26 * Clobbers 27 * x0 - x3 28 */ 29func wa_cve_2025_0647_instruction_patch 30 /* Check bit 3 to see if we need T32 opcode format. */ 31 tbnz x0, #WA_USE_T32_OPCODE_SHIFT, use_t32_opcode 32 ldr x2, =0x00D50B73E0 33 ldr x3, =0x00FFFFFFE0 34 b apply_patch 35use_t32_opcode: 36 ldr x2, =0x00EE670DF3 37 ldr x3, =0x00FFFF0FFF 38 39apply_patch: 40 and x1, x0, #WA_PATCH_SLOT_MASK 41 msr WA_CPUPSELR_EL3, x1 42 msr WA_CPUPOR_EL3, x2 43 msr WA_CPUPMR_EL3, x3 44 ldr x1, =0x800002001FF 45 msr WA_CPUPCR_EL3, x1 46 isb 47 48 ret 49endfunc wa_cve_2025_0647_instruction_patch 50 51/* 52 * wa_cve_2025_0647_do_cpp_wa 53 * 54 * This function is also called by the trap handler when CPP RCTX is trapped 55 * from lower ELs and also used by the EL3 API when the workaround is enabled. 56 * It performs the core workaround procedure for the CPP RCTX bug. The CPP RCTX 57 * instruction usually takes an argument in the form of a register but that is 58 * ignored for this workaround. 59 * 60 * Arguments 61 * x0 - Config flags for the workaround 62 * bit[0] - indicates context is a trap handler and should ERET when done 63 * bit[1] - perform ls rcg alwayson workaround 64 * 65 * Clobbers 66 * x0 - x5 67 * 68 * Register Purposes 69 * x0 - Config flags 70 * x1 - Backup SCR_EL3 71 * x2 - Backup CPUACTLR2 72 * x3 - Backup CPUACTLR 73 * x4 - Backup CPUECTLR 74 * x5 - Scratch register 75 */ 76func wa_cve_2025_0647_do_cpp_wa 77 psb csync 78 tsb csync 79 dsb osh 80 81 /* Stash SCR_EL3 so we can restore it later. */ 82 mrs x1, SCR_EL3 83 84 /* 85 * There is an issue on some cores where disabling hardware prefetch can 86 * result in a deadlock, setting this bit enables LS RCG AlwaysOn which 87 * will prevent this issue, at the expense of increased power consumption 88 * for the duration of this handler. 89 */ 90 tbz x0, #WA_LS_RCG_EN_BIT, skip_ls_rcg_alwayson_enable 91 mrs x2, WA_CPUACTLR2_EL1 92 orr x5, x2, #BIT(29) 93 msr WA_CPUACTLR2_EL1, x5 94 isb 95skip_ls_rcg_alwayson_enable: 96 97 /* Disable branch prediction and stash CPUACTLR_EL1 in x3. */ 98 mrs x3, WA_CPUACTLR_EL1 99 orr x5, x3, #BIT(0) 100 msr WA_CPUACTLR_EL1, x5 101 102 /* Disable hardware prefetch and stash CPUECTLR_EL1 in x4. */ 103 mrs x4, WA_CPUECTLR_EL1 104 orr x5, x4, #BIT(15) 105 msr WA_CPUECTLR_EL1, x5 106 107 isb 108 109 /* 110 * Execute CPP instruction for EL3 / root state 111 * EL3_rt: 0x000100000b010000 {GVMID,NSE,NS,EL,GASID} = {1,1,0,11,1} 112 */ 113 movz x5, #0x0001, LSL #48 114 movk x5, #0x0B01, LSL #16 115 cpp rctx, x5 116 117#if ENABLE_RME 118 /* 119 * Execute CPP instructions for realm state 120 * RL-EL2: 0x000100000e010000 {GVMID,NSE,NS,EL,GASID} = {1,1,1,10,1} 121 */ 122 movk x5, #0x0E01, LSL #16 123 cpp rctx, x5 124 125 /* RL-EL1: 0x000100000d010000 {GVMID,NSE,NS,EL,GASID} = {1,1,1,01,1} */ 126 movk x5, #0x0D01, LSL #16 127 cpp rctx, x5 128 129 /* RL-EL0: 0x000100000c010000 {GVMID,NSE,NS,EL,GASID} = {1,1,1,00,1} */ 130 movk x5, #0x0C01, LSL #16 131 cpp rctx, x5 132#endif /* ENABLE_RME */ 133 134 /* 135 * Execute CPP instructions for non-secure state 136 * EL2_ns: 0x0001000006010000 {GVMID,NSE,NS,EL,GASID} = {1,0,1,10,1} 137 */ 138 movk x5, #0x0601, LSL #16 139 cpp rctx, x5 140 141 /* NS-EL1: 0x0001000005010000 {GVMID,NSE,NS,EL,GASID} = {1,0,1,01,1} */ 142 movk x5, #0x0501, LSL #16 143 cpp rctx, x5 144 145 /* NS-EL0: 0x0001000004010000 {GVMID,NSE,NS,EL,GASID} = {1,0,1,00,1} */ 146 movk x5, #0x0401, LSL #16 147 cpp rctx, x5 148 149 /* 150 * Execute CPP instructions for secure state 151 * EL1_s: 0x0001000001010000 {GVMID,NSE,NS,EL,GASID} = {1,0,0,01,1} 152 */ 153 movk x5, #0x0101, LSL #16 154 cpp rctx, x5 155 156 /* S-EL0: 0x0001000000010000 {GVMID,NSE,NS,EL,GASID} = {1,0,0,00,1} */ 157 movk x5, #0x0001, LSL #16 158 cpp rctx, x5 159 160 /* Check secure EL2 presence */ 161 tbz x1, #SCR_EEL2_SHIFT, el3_handler_skip_sel2_cpp 162 163 /* S-EL2: 0x0001000002010000 {GVMID,NSE,NS,EL,GASID} = {1,0,0,10,1} */ 164 movk x5, #0x0201, LSL #16 165 cpp rctx, x5 166 167el3_handler_skip_sel2_cpp: 168 dsb sy 169 170 /* EL3 / root state TLBI */ 171 tlbi alle3 172 173#if ENABLE_RME 174 /* Realm state TLBI {NSE,NS} = {1,1} */ 175 orr x5, x1, #SCR_NS_BIT 176 orr x5, x5, #SCR_NSE_BIT 177 msr SCR_EL3, x5 178 isb 179 tlbi alle1 180 tlbi alle2 181#endif /* ENABLE_RME */ 182 183 /* Non-secure state TLBI {NSE,NS} = {0,1} */ 184 orr x5, x1, #SCR_NS_BIT 185 bic x5, x5, #SCR_NSE_BIT 186 msr SCR_EL3, x5 187 isb 188 tlbi alle1 189 tlbi alle2 190 191 /* Secure state TLBI {NSE,NS} = {0,0} */ 192 bic x5, x5, #SCR_NS_BIT 193 msr SCR_EL3, x5 194 isb 195 tlbi alle1 196 197 /* Check if we need to invalidate for S-EL2. */ 198 tbz x1, #SCR_EEL2_SHIFT, el3_handler_skip_sel2_tlbi 199 tlbi alle2 200 201el3_handler_skip_sel2_tlbi: 202 /* Clean up and restore register values. */ 203 dsb sy 204 msr SCR_EL3, x1 205 206 /* Restore ECTLR and ACTLR values. */ 207 msr WA_CPUACTLR_EL1, x3 208 msr WA_CPUECTLR_EL1, x4 209 210 isb 211 212 /* Restore ACTLR2 if needed. */ 213 tbz x0, #WA_LS_RCG_EN_BIT, skip_ls_rcg_alwayson_disable 214 msr WA_CPUACTLR2_EL1, x2 215 isb 216skip_ls_rcg_alwayson_disable: 217 218 /* Skip ERET if this is not an exception handler call. */ 219 tbz x0, #WA_IS_TRAP_HANDLER_BIT, skip_eret 220 221 /* 222 * Update ELR_EL3 to skip the triggering instruction 223 */ 224 mrs x5, ELR_EL3 225 add x5, x5, #4 226 msr ELR_EL3, x5 227 228 /* Restore context and ERET */ 229 ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] 230 ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 231 ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] 232 ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] 233 234 exception_return 235 236skip_eret: 237 ret 238endfunc wa_cve_2025_0647_do_cpp_wa 239 240/* 241 * wa_cve_2025_0647_execute_cpp_el3 242 * 243 * If a CPP RCTX instruction is needed in EL3 firmware this API can be used. It 244 * performs the workaround steps without requiring a trap or exception handling 245 * overhead and simplifies the code since we do not generally support nested 246 * exceptions in EL3 outside of specific circumstances. 247 * 248 * Arguments 249 * x0 - CPP RCTX argument to use when the workaround is not needed, this 250 * argument is ignored on systems with the workaround enabled since 251 * the workaround procedure does not use the argument and does CPP RCTX 252 * for all contexts. This is here for compatibility in multi-core 253 * systems where some cores might need this workaround and others do not. 254 * 255 * Clobbers 256 * x0 - x7 257 */ 258func wa_cve_2025_0647_execute_cpp_el3 259 mov x7, x0 260 mov x6, lr 261 262 /* Get the CPU ops so we can access the trap handler. */ 263 bl get_cpu_ops_ptr 264 mov lr, x6 265 ldr x0, [x0, #CPU_E_HANDLER_FUNC] 266 267 /* If no handler exists, skip the workaround as its not enabled. */ 268 cbz x0, skip_wa 269 270 /* 271 * The EL3 handler expects x1 to contain EC=0x1F when handling a trap, 272 * so clear x1 so it knows it came from this API instead. 273 */ 274 mov x1, #0 275 276 br x0 277 278skip_wa: 279 cpp rctx, x7 280 ret 281endfunc wa_cve_2025_0647_execute_cpp_el3 282