xref: /rk3399_ARM-atf/lib/cpus/aarch64/wa_cve_2025_0647_cpprctx.S (revision 666a488beaf67f8de2c553810ca3fbcb1616351c)
1*416b8613SJohn Powell/*
2*416b8613SJohn Powell * Copyright (c) 2026, Arm Limited and Contributors. All rights reserved.
3*416b8613SJohn Powell *
4*416b8613SJohn Powell * SPDX-License-Identifier: BSD-3-Clause
5*416b8613SJohn Powell */
6*416b8613SJohn Powell
7*416b8613SJohn Powell#include <arch.h>
8*416b8613SJohn Powell#include <asm_macros.S>
9*416b8613SJohn Powell#include <context.h>
10*416b8613SJohn Powell#include <wa_cve_2025_0647_cpprctx.h>
11*416b8613SJohn Powell
12*416b8613SJohn Powell	.globl	wa_cve_2025_0647_instruction_patch
13*416b8613SJohn Powell	.globl	wa_cve_2025_0647_do_cpp_wa
14*416b8613SJohn Powell	.globl	wa_cve_2025_0647_execute_cpp_el3
15*416b8613SJohn Powell
16*416b8613SJohn Powell/*
17*416b8613SJohn Powell * wa_cve_2025_0647_instruction_patch
18*416b8613SJohn Powell *
19*416b8613SJohn Powell * Function to enable EL3 traps for all CPP RCTX instruction calls from lower
20*416b8613SJohn Powell * ELs to address CVE-2025-0647.
21*416b8613SJohn Powell *
22*416b8613SJohn Powell * Argument
23*416b8613SJohn Powell *   x0 - bit[3]   flag to use T32 opcode format
24*416b8613SJohn Powell *        bit[2:0] patch slot to use
25*416b8613SJohn Powell *
26*416b8613SJohn Powell * Clobbers
27*416b8613SJohn Powell *   x0 - x3
28*416b8613SJohn Powell */
29*416b8613SJohn Powellfunc wa_cve_2025_0647_instruction_patch
30*416b8613SJohn Powell	/* Check bit 3 to see if we need T32 opcode format. */
31*416b8613SJohn Powell	tbnz	x0, #WA_USE_T32_OPCODE_SHIFT, use_t32_opcode
32*416b8613SJohn Powell	ldr	x2, =0x00D50B73E0
33*416b8613SJohn Powell	ldr	x3, =0x00FFFFFFE0
34*416b8613SJohn Powell	b	apply_patch
35*416b8613SJohn Powelluse_t32_opcode:
36*416b8613SJohn Powell	ldr	x2, =0x00EE670DF3
37*416b8613SJohn Powell	ldr	x3, =0x00FFFF0FFF
38*416b8613SJohn Powell
39*416b8613SJohn Powellapply_patch:
40*416b8613SJohn Powell	and	x1, x0, #WA_PATCH_SLOT_MASK
41*416b8613SJohn Powell	msr	WA_CPUPSELR_EL3, x1
42*416b8613SJohn Powell	msr	WA_CPUPOR_EL3, x2
43*416b8613SJohn Powell	msr	WA_CPUPMR_EL3, x3
44*416b8613SJohn Powell	ldr	x1, =0x800002001FF
45*416b8613SJohn Powell	msr	WA_CPUPCR_EL3, x1
46*416b8613SJohn Powell	isb
47*416b8613SJohn Powell
48*416b8613SJohn Powell	ret
49*416b8613SJohn Powellendfunc wa_cve_2025_0647_instruction_patch
50*416b8613SJohn Powell
51*416b8613SJohn Powell/*
52*416b8613SJohn Powell * wa_cve_2025_0647_do_cpp_wa
53*416b8613SJohn Powell *
54*416b8613SJohn Powell * This function is also called by the trap handler when CPP RCTX is trapped
55*416b8613SJohn Powell * from lower ELs and also used by the EL3 API when the workaround is enabled.
56*416b8613SJohn Powell * It performs the core workaround procedure for the CPP RCTX bug. The CPP RCTX
57*416b8613SJohn Powell * instruction usually takes an argument in the form of a register but that is
58*416b8613SJohn Powell * ignored for this workaround.
59*416b8613SJohn Powell *
60*416b8613SJohn Powell * Arguments
61*416b8613SJohn Powell *   x0 - Config flags for the workaround
62*416b8613SJohn Powell *        bit[0] - indicates context is a trap handler and should ERET when done
63*416b8613SJohn Powell *        bit[1] - perform ls rcg alwayson workaround
64*416b8613SJohn Powell *
65*416b8613SJohn Powell * Clobbers
66*416b8613SJohn Powell *   x0 - x5
67*416b8613SJohn Powell *
68*416b8613SJohn Powell * Register Purposes
69*416b8613SJohn Powell *   x0 - Config flags
70*416b8613SJohn Powell *   x1 - Backup SCR_EL3
71*416b8613SJohn Powell *   x2 - Backup CPUACTLR2
72*416b8613SJohn Powell *   x3 - Backup CPUACTLR
73*416b8613SJohn Powell *   x4 - Backup CPUECTLR
74*416b8613SJohn Powell *   x5 - Scratch register
75*416b8613SJohn Powell */
76*416b8613SJohn Powellfunc wa_cve_2025_0647_do_cpp_wa
77*416b8613SJohn Powell	psb	csync
78*416b8613SJohn Powell	tsb	csync
79*416b8613SJohn Powell	dsb	osh
80*416b8613SJohn Powell
81*416b8613SJohn Powell	/* Stash SCR_EL3 so we can restore it later. */
82*416b8613SJohn Powell	mrs	x1, SCR_EL3
83*416b8613SJohn Powell
84*416b8613SJohn Powell	/*
85*416b8613SJohn Powell	 * There is an issue on some cores where disabling hardware prefetch can
86*416b8613SJohn Powell	 * result in a deadlock, setting this bit enables LS RCG AlwaysOn which
87*416b8613SJohn Powell	 * will prevent this issue, at the expense of increased power consumption
88*416b8613SJohn Powell	 * for the duration of this handler.
89*416b8613SJohn Powell	 */
90*416b8613SJohn Powell	tbz	x0, #WA_LS_RCG_EN_BIT, skip_ls_rcg_alwayson_enable
91*416b8613SJohn Powell	mrs	x2, WA_CPUACTLR2_EL1
92*416b8613SJohn Powell	orr	x5, x2, #BIT(29)
93*416b8613SJohn Powell	msr	WA_CPUACTLR2_EL1, x5
94*416b8613SJohn Powell	isb
95*416b8613SJohn Powellskip_ls_rcg_alwayson_enable:
96*416b8613SJohn Powell
97*416b8613SJohn Powell	/* Disable branch prediction and stash CPUACTLR_EL1 in x3. */
98*416b8613SJohn Powell	mrs	x3, WA_CPUACTLR_EL1
99*416b8613SJohn Powell	orr	x5, x3, #BIT(0)
100*416b8613SJohn Powell	msr	WA_CPUACTLR_EL1, x5
101*416b8613SJohn Powell
102*416b8613SJohn Powell	/* Disable hardware prefetch and stash CPUECTLR_EL1 in x4. */
103*416b8613SJohn Powell	mrs	x4, WA_CPUECTLR_EL1
104*416b8613SJohn Powell	orr	x5, x4, #BIT(15)
105*416b8613SJohn Powell	msr	WA_CPUECTLR_EL1, x5
106*416b8613SJohn Powell
107*416b8613SJohn Powell	isb
108*416b8613SJohn Powell
109*416b8613SJohn Powell	/*
110*416b8613SJohn Powell	 * Execute CPP instruction for EL3 / root state
111*416b8613SJohn Powell	 * EL3_rt: 0x000100000b010000 {GVMID,NSE,NS,EL,GASID} = {1,1,0,11,1}
112*416b8613SJohn Powell	 */
113*416b8613SJohn Powell	movz	x5, #0x0001, LSL #48
114*416b8613SJohn Powell	movk	x5, #0x0B01, LSL #16
115*416b8613SJohn Powell	cpp	rctx, x5
116*416b8613SJohn Powell
117*416b8613SJohn Powell#if ENABLE_RME
118*416b8613SJohn Powell	/*
119*416b8613SJohn Powell	 * Execute CPP instructions for realm state
120*416b8613SJohn Powell	 * RL-EL2: 0x000100000e010000 {GVMID,NSE,NS,EL,GASID} = {1,1,1,10,1}
121*416b8613SJohn Powell	 */
122*416b8613SJohn Powell	movk	x5, #0x0E01, LSL #16
123*416b8613SJohn Powell	cpp	rctx, x5
124*416b8613SJohn Powell
125*416b8613SJohn Powell	/* RL-EL1: 0x000100000d010000 {GVMID,NSE,NS,EL,GASID} = {1,1,1,01,1} */
126*416b8613SJohn Powell	movk	x5, #0x0D01, LSL #16
127*416b8613SJohn Powell	cpp	rctx, x5
128*416b8613SJohn Powell
129*416b8613SJohn Powell	/* RL-EL0: 0x000100000c010000 {GVMID,NSE,NS,EL,GASID} = {1,1,1,00,1} */
130*416b8613SJohn Powell	movk	x5, #0x0C01, LSL #16
131*416b8613SJohn Powell	cpp	rctx, x5
132*416b8613SJohn Powell#endif /* ENABLE_RME */
133*416b8613SJohn Powell
134*416b8613SJohn Powell	/*
135*416b8613SJohn Powell	 * Execute CPP instructions for non-secure state
136*416b8613SJohn Powell	 * EL2_ns: 0x0001000006010000 {GVMID,NSE,NS,EL,GASID} = {1,0,1,10,1}
137*416b8613SJohn Powell	 */
138*416b8613SJohn Powell	movk	x5, #0x0601, LSL #16
139*416b8613SJohn Powell	cpp	rctx, x5
140*416b8613SJohn Powell
141*416b8613SJohn Powell	/* NS-EL1: 0x0001000005010000 {GVMID,NSE,NS,EL,GASID} = {1,0,1,01,1} */
142*416b8613SJohn Powell	movk	x5, #0x0501, LSL #16
143*416b8613SJohn Powell	cpp	rctx, x5
144*416b8613SJohn Powell
145*416b8613SJohn Powell	/* NS-EL0: 0x0001000004010000 {GVMID,NSE,NS,EL,GASID} = {1,0,1,00,1} */
146*416b8613SJohn Powell	movk	x5, #0x0401, LSL #16
147*416b8613SJohn Powell	cpp	rctx, x5
148*416b8613SJohn Powell
149*416b8613SJohn Powell	/*
150*416b8613SJohn Powell	 * Execute CPP instructions for secure state
151*416b8613SJohn Powell	 * EL1_s: 0x0001000001010000 {GVMID,NSE,NS,EL,GASID} = {1,0,0,01,1}
152*416b8613SJohn Powell	 */
153*416b8613SJohn Powell	movk	x5, #0x0101, LSL #16
154*416b8613SJohn Powell	cpp	rctx, x5
155*416b8613SJohn Powell
156*416b8613SJohn Powell	/* S-EL0: 0x0001000000010000 {GVMID,NSE,NS,EL,GASID} = {1,0,0,00,1} */
157*416b8613SJohn Powell	movk	x5, #0x0001, LSL #16
158*416b8613SJohn Powell	cpp	rctx, x5
159*416b8613SJohn Powell
160*416b8613SJohn Powell	/* Check secure EL2 presence */
161*416b8613SJohn Powell	tbz	x1, #SCR_EEL2_SHIFT, el3_handler_skip_sel2_cpp
162*416b8613SJohn Powell
163*416b8613SJohn Powell	/* S-EL2: 0x0001000002010000 {GVMID,NSE,NS,EL,GASID} = {1,0,0,10,1} */
164*416b8613SJohn Powell	movk	x5, #0x0201, LSL #16
165*416b8613SJohn Powell	cpp	rctx, x5
166*416b8613SJohn Powell
167*416b8613SJohn Powellel3_handler_skip_sel2_cpp:
168*416b8613SJohn Powell	dsb	sy
169*416b8613SJohn Powell
170*416b8613SJohn Powell	/* EL3 / root state TLBI */
171*416b8613SJohn Powell	tlbi	alle3
172*416b8613SJohn Powell
173*416b8613SJohn Powell#if ENABLE_RME
174*416b8613SJohn Powell	/* Realm state TLBI {NSE,NS} = {1,1} */
175*416b8613SJohn Powell	orr	x5, x1, #SCR_NS_BIT
176*416b8613SJohn Powell	orr	x5, x5, #SCR_NSE_BIT
177*416b8613SJohn Powell	msr	SCR_EL3, x5
178*416b8613SJohn Powell	isb
179*416b8613SJohn Powell	tlbi	alle1
180*416b8613SJohn Powell	tlbi	alle2
181*416b8613SJohn Powell#endif /* ENABLE_RME */
182*416b8613SJohn Powell
183*416b8613SJohn Powell	/* Non-secure state TLBI {NSE,NS} = {0,1} */
184*416b8613SJohn Powell	orr	x5, x1, #SCR_NS_BIT
185*416b8613SJohn Powell	bic	x5, x5, #SCR_NSE_BIT
186*416b8613SJohn Powell	msr	SCR_EL3, x5
187*416b8613SJohn Powell	isb
188*416b8613SJohn Powell	tlbi	alle1
189*416b8613SJohn Powell	tlbi	alle2
190*416b8613SJohn Powell
191*416b8613SJohn Powell	/* Secure state TLBI {NSE,NS} = {0,0} */
192*416b8613SJohn Powell	bic	x5, x5, #SCR_NS_BIT
193*416b8613SJohn Powell	msr	SCR_EL3, x5
194*416b8613SJohn Powell	isb
195*416b8613SJohn Powell	tlbi	alle1
196*416b8613SJohn Powell
197*416b8613SJohn Powell	/* Check if we need to invalidate for S-EL2. */
198*416b8613SJohn Powell	tbz	x1, #SCR_EEL2_SHIFT, el3_handler_skip_sel2_tlbi
199*416b8613SJohn Powell	tlbi	alle2
200*416b8613SJohn Powell
201*416b8613SJohn Powellel3_handler_skip_sel2_tlbi:
202*416b8613SJohn Powell	/* Clean up and restore register values. */
203*416b8613SJohn Powell	dsb	sy
204*416b8613SJohn Powell	msr	SCR_EL3, x1
205*416b8613SJohn Powell
206*416b8613SJohn Powell	/* Restore ECTLR and ACTLR values. */
207*416b8613SJohn Powell	msr	WA_CPUACTLR_EL1, x3
208*416b8613SJohn Powell	msr	WA_CPUECTLR_EL1, x4
209*416b8613SJohn Powell
210*416b8613SJohn Powell	isb
211*416b8613SJohn Powell
212*416b8613SJohn Powell	/* Restore ACTLR2 if needed. */
213*416b8613SJohn Powell	tbz	x0, #WA_LS_RCG_EN_BIT, skip_ls_rcg_alwayson_disable
214*416b8613SJohn Powell	msr	WA_CPUACTLR2_EL1, x2
215*416b8613SJohn Powell	isb
216*416b8613SJohn Powellskip_ls_rcg_alwayson_disable:
217*416b8613SJohn Powell
218*416b8613SJohn Powell	/* Skip ERET if this is not an exception handler call. */
219*416b8613SJohn Powell	tbz	x0, #WA_IS_TRAP_HANDLER_BIT, skip_eret
220*416b8613SJohn Powell
221*416b8613SJohn Powell	/*
222*416b8613SJohn Powell	 * Update ELR_EL3 to skip the triggering instruction
223*416b8613SJohn Powell	 */
224*416b8613SJohn Powell	mrs	x5, ELR_EL3
225*416b8613SJohn Powell	add	x5, x5, #4
226*416b8613SJohn Powell	msr	ELR_EL3, x5
227*416b8613SJohn Powell
228*416b8613SJohn Powell	/* Restore context and ERET */
229*416b8613SJohn Powell	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
230*416b8613SJohn Powell	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
231*416b8613SJohn Powell	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
232*416b8613SJohn Powell	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
233*416b8613SJohn Powell
234*416b8613SJohn Powell	exception_return
235*416b8613SJohn Powell
236*416b8613SJohn Powellskip_eret:
237*416b8613SJohn Powell	ret
238*416b8613SJohn Powellendfunc wa_cve_2025_0647_do_cpp_wa
239*416b8613SJohn Powell
240*416b8613SJohn Powell/*
241*416b8613SJohn Powell * wa_cve_2025_0647_execute_cpp_el3
242*416b8613SJohn Powell *
243*416b8613SJohn Powell * If a CPP RCTX instruction is needed in EL3 firmware this API can be used. It
244*416b8613SJohn Powell * performs the workaround steps without requiring a trap or exception handling
245*416b8613SJohn Powell * overhead and simplifies the code since we do not generally support nested
246*416b8613SJohn Powell * exceptions in EL3 outside of specific circumstances.
247*416b8613SJohn Powell *
248*416b8613SJohn Powell * Arguments
249*416b8613SJohn Powell *   x0 - CPP RCTX argument to use when the workaround is not needed, this
250*416b8613SJohn Powell *        argument is ignored on systems with the workaround enabled since
251*416b8613SJohn Powell *        the workaround procedure does not use the argument and does CPP RCTX
252*416b8613SJohn Powell *        for all contexts. This is here for compatibility in multi-core
253*416b8613SJohn Powell *        systems where some cores might need this workaround and others do not.
254*416b8613SJohn Powell *
255*416b8613SJohn Powell * Clobbers
256*416b8613SJohn Powell *   x0 - x7
257*416b8613SJohn Powell */
258*416b8613SJohn Powellfunc wa_cve_2025_0647_execute_cpp_el3
259*416b8613SJohn Powell	mov	x7, x0
260*416b8613SJohn Powell	mov	x6, lr
261*416b8613SJohn Powell
262*416b8613SJohn Powell	/* Get the CPU ops so we can access the trap handler. */
263*416b8613SJohn Powell	bl	get_cpu_ops_ptr
264*416b8613SJohn Powell	mov	lr, x6
265*416b8613SJohn Powell	ldr	x0, [x0, #CPU_E_HANDLER_FUNC]
266*416b8613SJohn Powell
267*416b8613SJohn Powell	/* If no handler exists, skip the workaround as its not enabled. */
268*416b8613SJohn Powell	cbz	x0, skip_wa
269*416b8613SJohn Powell
270*416b8613SJohn Powell	/*
271*416b8613SJohn Powell	 * The EL3 handler expects x1 to contain EC=0x1F when handling a trap,
272*416b8613SJohn Powell	 * so clear x1 so it knows it came from this API instead.
273*416b8613SJohn Powell	 */
274*416b8613SJohn Powell	mov	x1, #0
275*416b8613SJohn Powell
276*416b8613SJohn Powell	br	x0
277*416b8613SJohn Powell
278*416b8613SJohn Powellskip_wa:
279*416b8613SJohn Powell	cpp	rctx, x7
280*416b8613SJohn Powell	ret
281*416b8613SJohn Powellendfunc wa_cve_2025_0647_execute_cpp_el3
282