xref: /rk3399_ARM-atf/lib/cpus/aarch64/wa_cve_2025_0647_cpprctx.S (revision 47d1aa275adaf0a30b188fcc47b946277b7592d9)
1/*
2 * Copyright (c) 2026, Arm Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <context.h>
10#include <wa_cve_2025_0647_cpprctx.h>
11
12	.globl	wa_cve_2025_0647_instruction_patch
13	.globl	wa_cve_2025_0647_do_cpp_wa
14	.globl	wa_cve_2025_0647_execute_cpp_el3
15
16/*
17 * wa_cve_2025_0647_instruction_patch
18 *
19 * Function to enable EL3 traps for all CPP RCTX instruction calls from lower
20 * ELs to address CVE-2025-0647.
21 *
22 * Argument
23 *   x0 - bit[3]   flag to use T32 opcode format
24 *        bit[2:0] patch slot to use
25 *
26 * Clobbers
27 *   x0 - x3
28 */
29func wa_cve_2025_0647_instruction_patch
30	/* Check bit 3 to see if we need T32 opcode format. */
31	tbnz	x0, #WA_USE_T32_OPCODE_SHIFT, use_t32_opcode
32	ldr	x2, =0x00D50B73E0
33	ldr	x3, =0x00FFFFFFE0
34	b	apply_patch
35use_t32_opcode:
36	ldr	x2, =0x00EE670DF3
37	ldr	x3, =0x00FFFF0FFF
38
39apply_patch:
40	and	x1, x0, #WA_PATCH_SLOT_MASK
41	msr	WA_CPUPSELR_EL3, x1
42	msr	WA_CPUPOR_EL3, x2
43	msr	WA_CPUPMR_EL3, x3
44	ldr	x1, =0x800002001FF
45	msr	WA_CPUPCR_EL3, x1
46	isb
47
48	ret
49endfunc wa_cve_2025_0647_instruction_patch
50
51/*
52 * wa_cve_2025_0647_do_cpp_wa
53 *
54 * This function is also called by the trap handler when CPP RCTX is trapped
55 * from lower ELs and also used by the EL3 API when the workaround is enabled.
56 * It performs the core workaround procedure for the CPP RCTX bug. The CPP RCTX
57 * instruction usually takes an argument in the form of a register but that is
58 * ignored for this workaround.
59 *
60 * Arguments
61 *   x0 - Config flags for the workaround
62 *        bit[0] - indicates context is a trap handler and should ERET when done
63 *        bit[1] - perform ls rcg alwayson workaround
64 *
65 * Clobbers
66 *   x0 - x5
67 *
68 * Register Purposes
69 *   x0 - Config flags
70 *   x1 - Backup SCR_EL3
71 *   x2 - Backup CPUACTLR2
72 *   x3 - Backup CPUACTLR
73 *   x4 - Backup CPUECTLR
74 *   x5 - Scratch register
75 */
76func wa_cve_2025_0647_do_cpp_wa
77	/* give the assembler access to the cpp rctx instruction */
78	.arch_extension predres
79	psb_csync
80	tsb_csync
81	dsb	osh
82
83	/* Stash SCR_EL3 so we can restore it later. */
84	mrs	x1, SCR_EL3
85
86	/*
87	 * There is an issue on some cores where disabling hardware prefetch can
88	 * result in a deadlock, setting this bit enables LS RCG AlwaysOn which
89	 * will prevent this issue, at the expense of increased power consumption
90	 * for the duration of this handler.
91	 */
92	tbz	x0, #WA_LS_RCG_EN_BIT, skip_ls_rcg_alwayson_enable
93	mrs	x2, WA_CPUACTLR2_EL1
94	orr	x5, x2, #BIT(29)
95	msr	WA_CPUACTLR2_EL1, x5
96	isb
97skip_ls_rcg_alwayson_enable:
98
99	/* Disable branch prediction and stash CPUACTLR_EL1 in x3. */
100	mrs	x3, WA_CPUACTLR_EL1
101	orr	x5, x3, #BIT(0)
102	msr	WA_CPUACTLR_EL1, x5
103
104	/* Disable hardware prefetch and stash CPUECTLR_EL1 in x4. */
105	mrs	x4, WA_CPUECTLR_EL1
106	orr	x5, x4, #BIT(15)
107	msr	WA_CPUECTLR_EL1, x5
108
109	isb
110
111	/*
112	 * Execute CPP instruction for EL3 / root state
113	 * EL3_rt: 0x000100000b010000 {GVMID,NSE,NS,EL,GASID} = {1,1,0,11,1}
114	 */
115	movz	x5, #0x0001, LSL #48
116	movk	x5, #0x0B01, LSL #16
117	cpp	rctx, x5
118
119#if ENABLE_RMM
120	/*
121	 * Execute CPP instructions for realm state
122	 * RL-EL2: 0x000100000e010000 {GVMID,NSE,NS,EL,GASID} = {1,1,1,10,1}
123	 */
124	movk	x5, #0x0E01, LSL #16
125	cpp	rctx, x5
126
127	/* RL-EL1: 0x000100000d010000 {GVMID,NSE,NS,EL,GASID} = {1,1,1,01,1} */
128	movk	x5, #0x0D01, LSL #16
129	cpp	rctx, x5
130
131	/* RL-EL0: 0x000100000c010000 {GVMID,NSE,NS,EL,GASID} = {1,1,1,00,1} */
132	movk	x5, #0x0C01, LSL #16
133	cpp	rctx, x5
134#endif /* ENABLE_RMM */
135
136	/*
137	 * Execute CPP instructions for non-secure state
138	 * EL2_ns: 0x0001000006010000 {GVMID,NSE,NS,EL,GASID} = {1,0,1,10,1}
139	 */
140	movk	x5, #0x0601, LSL #16
141	cpp	rctx, x5
142
143	/* NS-EL1: 0x0001000005010000 {GVMID,NSE,NS,EL,GASID} = {1,0,1,01,1} */
144	movk	x5, #0x0501, LSL #16
145	cpp	rctx, x5
146
147	/* NS-EL0: 0x0001000004010000 {GVMID,NSE,NS,EL,GASID} = {1,0,1,00,1} */
148	movk	x5, #0x0401, LSL #16
149	cpp	rctx, x5
150
151	/*
152	 * Execute CPP instructions for secure state
153	 * EL1_s: 0x0001000001010000 {GVMID,NSE,NS,EL,GASID} = {1,0,0,01,1}
154	 */
155	movk	x5, #0x0101, LSL #16
156	cpp	rctx, x5
157
158	/* S-EL0: 0x0001000000010000 {GVMID,NSE,NS,EL,GASID} = {1,0,0,00,1} */
159	movk	x5, #0x0001, LSL #16
160	cpp	rctx, x5
161
162	/* Check secure EL2 presence */
163	tbz	x1, #SCR_EEL2_SHIFT, el3_handler_skip_sel2_cpp
164
165	/* S-EL2: 0x0001000002010000 {GVMID,NSE,NS,EL,GASID} = {1,0,0,10,1} */
166	movk	x5, #0x0201, LSL #16
167	cpp	rctx, x5
168
169el3_handler_skip_sel2_cpp:
170	dsb	sy
171
172	/* EL3 / root state TLBI */
173	tlbi	alle3
174
175#if ENABLE_RMM
176	/* Realm state TLBI {NSE,NS} = {1,1} */
177	orr	x5, x1, #SCR_NS_BIT
178	orr	x5, x5, #SCR_NSE_BIT
179	msr	SCR_EL3, x5
180	isb
181	tlbi	alle1
182	tlbi	alle2
183#endif /* ENABLE_RMM */
184
185	/* Non-secure state TLBI {NSE,NS} = {0,1} */
186	orr	x5, x1, #SCR_NS_BIT
187	bic	x5, x5, #SCR_NSE_BIT
188	msr	SCR_EL3, x5
189	isb
190	tlbi	alle1
191	tlbi	alle2
192
193	/* Secure state TLBI {NSE,NS} = {0,0} */
194	bic	x5, x5, #SCR_NS_BIT
195	msr	SCR_EL3, x5
196	isb
197	tlbi	alle1
198
199	/* Check if we need to invalidate for S-EL2. */
200	tbz	x1, #SCR_EEL2_SHIFT, el3_handler_skip_sel2_tlbi
201	tlbi	alle2
202
203el3_handler_skip_sel2_tlbi:
204	/* Clean up and restore register values. */
205	dsb	sy
206	msr	SCR_EL3, x1
207
208	/* Restore ECTLR and ACTLR values. */
209	msr	WA_CPUACTLR_EL1, x3
210	msr	WA_CPUECTLR_EL1, x4
211
212	isb
213
214	/* Restore ACTLR2 if needed. */
215	tbz	x0, #WA_LS_RCG_EN_BIT, skip_ls_rcg_alwayson_disable
216	msr	WA_CPUACTLR2_EL1, x2
217	isb
218skip_ls_rcg_alwayson_disable:
219
220	/* Skip ERET if this is not an exception handler call. */
221	tbz	x0, #WA_IS_TRAP_HANDLER_BIT, skip_eret
222
223	/*
224	 * Update ELR_EL3 to skip the triggering instruction
225	 */
226	mrs	x5, ELR_EL3
227	add	x5, x5, #4
228	msr	ELR_EL3, x5
229
230	/* Restore context and ERET */
231	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
232	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
233	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
234	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
235
236	exception_return
237
238skip_eret:
239	ret
240endfunc wa_cve_2025_0647_do_cpp_wa
241
242/*
243 * wa_cve_2025_0647_execute_cpp_el3
244 *
245 * If a CPP RCTX instruction is needed in EL3 firmware this API can be used. It
246 * performs the workaround steps without requiring a trap or exception handling
247 * overhead and simplifies the code since we do not generally support nested
248 * exceptions in EL3 outside of specific circumstances.
249 *
250 * Arguments
251 *   x0 - CPP RCTX argument to use when the workaround is not needed, this
252 *        argument is ignored on systems with the workaround enabled since
253 *        the workaround procedure does not use the argument and does CPP RCTX
254 *        for all contexts. This is here for compatibility in multi-core
255 *        systems where some cores might need this workaround and others do not.
256 *
257 * Clobbers
258 *   x0 - x7
259 */
260func wa_cve_2025_0647_execute_cpp_el3
261	mov	x7, x0
262	mov	x6, lr
263
264	/* Get the CPU ops so we can access the trap handler. */
265	bl	get_cpu_ops_ptr
266	mov	lr, x6
267	ldr	x0, [x0, #CPU_E_HANDLER_FUNC]
268
269	/* If no handler exists, skip the workaround as its not enabled. */
270	cbz	x0, skip_wa
271
272	/*
273	 * The EL3 handler expects x1 to contain EC=0x1F when handling a trap,
274	 * so clear x1 so it knows it came from this API instead.
275	 */
276	mov	x1, #0
277
278	br	x0
279
280skip_wa:
281	cpp	rctx, x7
282	ret
283endfunc wa_cve_2025_0647_execute_cpp_el3
284