xref: /rk3399_ARM-atf/lib/el3_runtime/aarch64/context.S (revision bb7b85a397f4eddef84b2deaa8f3f7a66cb3a09b)
1/*
2 * Copyright (c) 2013-2022, Arm Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <assert_macros.S>
10#include <context.h>
11#include <el3_common_macros.S>
12
13#if CTX_INCLUDE_EL2_REGS
14	.global	el2_sysregs_context_save_common
15	.global	el2_sysregs_context_restore_common
16#if ENABLE_SPE_FOR_LOWER_ELS
17	.global	el2_sysregs_context_save_spe
18	.global	el2_sysregs_context_restore_spe
19#endif /* ENABLE_SPE_FOR_LOWER_ELS */
20#if CTX_INCLUDE_MTE_REGS
21	.global	el2_sysregs_context_save_mte
22	.global	el2_sysregs_context_restore_mte
23#endif /* CTX_INCLUDE_MTE_REGS */
24#if ENABLE_MPAM_FOR_LOWER_ELS
25	.global	el2_sysregs_context_save_mpam
26	.global	el2_sysregs_context_restore_mpam
27#endif /* ENABLE_MPAM_FOR_LOWER_ELS */
28#if ENABLE_FEAT_ECV
29	.global	el2_sysregs_context_save_ecv
30	.global	el2_sysregs_context_restore_ecv
31#endif /* ENABLE_FEAT_ECV */
32#if ENABLE_FEAT_VHE
33	.global	el2_sysregs_context_save_vhe
34	.global	el2_sysregs_context_restore_vhe
35#endif /* ENABLE_FEAT_VHE */
36#if RAS_EXTENSION
37	.global	el2_sysregs_context_save_ras
38	.global	el2_sysregs_context_restore_ras
39#endif /* RAS_EXTENSION */
40#if CTX_INCLUDE_NEVE_REGS
41	.global	el2_sysregs_context_save_nv2
42	.global	el2_sysregs_context_restore_nv2
43#endif /* CTX_INCLUDE_NEVE_REGS */
44#if ENABLE_TRF_FOR_NS
45	.global	el2_sysregs_context_save_trf
46	.global	el2_sysregs_context_restore_trf
47#endif /* ENABLE_TRF_FOR_NS */
48#if ENABLE_FEAT_CSV2_2
49	.global	el2_sysregs_context_save_csv2
50	.global	el2_sysregs_context_restore_csv2
51#endif /* ENABLE_FEAT_CSV2_2 */
52#if ENABLE_FEAT_HCX
53	.global	el2_sysregs_context_save_hcx
54	.global	el2_sysregs_context_restore_hcx
55#endif /* ENABLE_FEAT_HCX */
56#endif /* CTX_INCLUDE_EL2_REGS */
57
58	.global	el1_sysregs_context_save
59	.global	el1_sysregs_context_restore
60#if CTX_INCLUDE_FPREGS
61	.global	fpregs_context_save
62	.global	fpregs_context_restore
63#endif /* CTX_INCLUDE_FPREGS */
64	.global	prepare_el3_entry
65	.global	restore_gp_pmcr_pauth_regs
66	.global save_and_update_ptw_el1_sys_regs
67	.global	el3_exit
68
69#if CTX_INCLUDE_EL2_REGS
70
71/* -----------------------------------------------------
72 * The following functions strictly follow the AArch64
73 * PCS to use x9-x16 (temporary caller-saved registers)
74 * to save/restore EL2 system register context.
75 * el2_sysregs_context_save/restore_common functions
76 * save and restore registers that are common to all
77 * configurations. The rest of the functions save and
78 * restore EL2 system registers that are present when a
79 * particular feature is enabled. All functions assume
80 * that 'x0' is pointing to a 'el2_sys_regs' structure
81 * where the register context will be saved/restored.
82 *
83 * The following registers are not added.
84 * AMEVCNTVOFF0<n>_EL2
85 * AMEVCNTVOFF1<n>_EL2
86 * ICH_AP0R<n>_EL2
87 * ICH_AP1R<n>_EL2
88 * ICH_LR<n>_EL2
89 * -----------------------------------------------------
90 */
91func el2_sysregs_context_save_common
92	mrs	x9, actlr_el2
93	mrs	x10, afsr0_el2
94	stp	x9, x10, [x0, #CTX_ACTLR_EL2]
95
96	mrs	x11, afsr1_el2
97	mrs	x12, amair_el2
98	stp	x11, x12, [x0, #CTX_AFSR1_EL2]
99
100	mrs	x13, cnthctl_el2
101	mrs	x14, cntvoff_el2
102	stp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
103
104	mrs	x15, cptr_el2
105	str	x15, [x0, #CTX_CPTR_EL2]
106
107#if CTX_INCLUDE_AARCH32_REGS
108	mrs	x16, dbgvcr32_el2
109	str	x16, [x0, #CTX_DBGVCR32_EL2]
110#endif /* CTX_INCLUDE_AARCH32_REGS */
111
112	mrs	x9, elr_el2
113	mrs	x10, esr_el2
114	stp	x9, x10, [x0, #CTX_ELR_EL2]
115
116	mrs	x11, far_el2
117	mrs	x12, hacr_el2
118	stp	x11, x12, [x0, #CTX_FAR_EL2]
119
120	mrs	x13, hcr_el2
121	mrs	x14, hpfar_el2
122	stp	x13, x14, [x0, #CTX_HCR_EL2]
123
124	mrs	x15, hstr_el2
125	mrs	x16, ICC_SRE_EL2
126	stp	x15, x16, [x0, #CTX_HSTR_EL2]
127
128	mrs	x9, ICH_HCR_EL2
129	mrs	x10, ICH_VMCR_EL2
130	stp	x9, x10, [x0, #CTX_ICH_HCR_EL2]
131
132	mrs	x11, mair_el2
133	mrs	x12, mdcr_el2
134	stp	x11, x12, [x0, #CTX_MAIR_EL2]
135
136	mrs	x14, sctlr_el2
137	str	x14, [x0, #CTX_SCTLR_EL2]
138
139	mrs	x15, spsr_el2
140	mrs	x16, sp_el2
141	stp	x15, x16, [x0, #CTX_SPSR_EL2]
142
143	mrs	x9, tcr_el2
144	mrs	x10, tpidr_el2
145	stp	x9, x10, [x0, #CTX_TCR_EL2]
146
147	mrs	x11, ttbr0_el2
148	mrs	x12, vbar_el2
149	stp	x11, x12, [x0, #CTX_TTBR0_EL2]
150
151	mrs	x13, vmpidr_el2
152	mrs	x14, vpidr_el2
153	stp	x13, x14, [x0, #CTX_VMPIDR_EL2]
154
155	mrs	x15, vtcr_el2
156	mrs	x16, vttbr_el2
157	stp	x15, x16, [x0, #CTX_VTCR_EL2]
158	ret
159endfunc el2_sysregs_context_save_common
160
161func el2_sysregs_context_restore_common
162	ldp	x9, x10, [x0, #CTX_ACTLR_EL2]
163	msr	actlr_el2, x9
164	msr	afsr0_el2, x10
165
166	ldp	x11, x12, [x0, #CTX_AFSR1_EL2]
167	msr	afsr1_el2, x11
168	msr	amair_el2, x12
169
170	ldp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
171	msr	cnthctl_el2, x13
172	msr	cntvoff_el2, x14
173
174	ldr	x15, [x0, #CTX_CPTR_EL2]
175	msr	cptr_el2, x15
176
177#if CTX_INCLUDE_AARCH32_REGS
178	ldr	x16, [x0, #CTX_DBGVCR32_EL2]
179	msr	dbgvcr32_el2, x16
180#endif /* CTX_INCLUDE_AARCH32_REGS */
181
182	ldp	x9, x10, [x0, #CTX_ELR_EL2]
183	msr	elr_el2, x9
184	msr	esr_el2, x10
185
186	ldp	x11, x12, [x0, #CTX_FAR_EL2]
187	msr	far_el2, x11
188	msr	hacr_el2, x12
189
190	ldp	x13, x14, [x0, #CTX_HCR_EL2]
191	msr	hcr_el2, x13
192	msr	hpfar_el2, x14
193
194	ldp	x15, x16, [x0, #CTX_HSTR_EL2]
195	msr	hstr_el2, x15
196	msr	ICC_SRE_EL2, x16
197
198	ldp	x9, x10, [x0, #CTX_ICH_HCR_EL2]
199	msr	ICH_HCR_EL2, x9
200	msr	ICH_VMCR_EL2, x10
201
202	ldp	x11, x12, [x0, #CTX_MAIR_EL2]
203	msr	mair_el2, x11
204	msr	mdcr_el2, x12
205
206	ldr	x14, [x0, #CTX_SCTLR_EL2]
207	msr	sctlr_el2, x14
208
209	ldp	x15, x16, [x0, #CTX_SPSR_EL2]
210	msr	spsr_el2, x15
211	msr	sp_el2, x16
212
213	ldp	x9, x10, [x0, #CTX_TCR_EL2]
214	msr	tcr_el2, x9
215	msr	tpidr_el2, x10
216
217	ldp	x11, x12, [x0, #CTX_TTBR0_EL2]
218	msr	ttbr0_el2, x11
219	msr	vbar_el2, x12
220
221	ldp	x13, x14, [x0, #CTX_VMPIDR_EL2]
222	msr	vmpidr_el2, x13
223	msr	vpidr_el2, x14
224
225	ldp	x15, x16, [x0, #CTX_VTCR_EL2]
226	msr	vtcr_el2, x15
227	msr	vttbr_el2, x16
228	ret
229endfunc el2_sysregs_context_restore_common
230
231#if ENABLE_SPE_FOR_LOWER_ELS
232func el2_sysregs_context_save_spe
233	mrs	x13, PMSCR_EL2
234	str	x13, [x0, #CTX_PMSCR_EL2]
235	ret
236endfunc el2_sysregs_context_save_spe
237
238func el2_sysregs_context_restore_spe
239	ldr	x13, [x0, #CTX_PMSCR_EL2]
240	msr	PMSCR_EL2, x13
241	ret
242endfunc el2_sysregs_context_restore_spe
243#endif /* ENABLE_SPE_FOR_LOWER_ELS */
244
245#if CTX_INCLUDE_MTE_REGS
246func el2_sysregs_context_save_mte
247	mrs	x9, TFSR_EL2
248	str	x9, [x0, #CTX_TFSR_EL2]
249	ret
250endfunc el2_sysregs_context_save_mte
251
252func el2_sysregs_context_restore_mte
253	ldr	x9, [x0, #CTX_TFSR_EL2]
254	msr	TFSR_EL2, x9
255	ret
256endfunc el2_sysregs_context_restore_mte
257#endif /* CTX_INCLUDE_MTE_REGS */
258
259#if ENABLE_MPAM_FOR_LOWER_ELS
260func el2_sysregs_context_save_mpam
261	mrs	x10, MPAM2_EL2
262	str	x10, [x0, #CTX_MPAM2_EL2]
263
264	mrs	x11, MPAMHCR_EL2
265	mrs	x12, MPAMVPM0_EL2
266	stp	x11, x12, [x0, #CTX_MPAMHCR_EL2]
267
268	mrs	x13, MPAMVPM1_EL2
269	mrs	x14, MPAMVPM2_EL2
270	stp	x13, x14, [x0, #CTX_MPAMVPM1_EL2]
271
272	mrs	x15, MPAMVPM3_EL2
273	mrs	x16, MPAMVPM4_EL2
274	stp	x15, x16, [x0, #CTX_MPAMVPM3_EL2]
275
276	mrs	x9, MPAMVPM5_EL2
277	mrs	x10, MPAMVPM6_EL2
278	stp	x9, x10, [x0, #CTX_MPAMVPM5_EL2]
279
280	mrs	x11, MPAMVPM7_EL2
281	mrs	x12, MPAMVPMV_EL2
282	stp	x11, x12, [x0, #CTX_MPAMVPM7_EL2]
283	ret
284endfunc func el2_sysregs_context_save_mpam
285
286func el2_sysregs_context_restore_mpam
287	ldr	x10, [x0, #CTX_MPAM2_EL2]
288	msr	MPAM2_EL2, x10
289
290	ldp	x11, x12, [x0, #CTX_MPAMHCR_EL2]
291	msr	MPAMHCR_EL2, x11
292	msr	MPAMVPM0_EL2, x12
293
294	ldp	x13, x14, [x0, #CTX_MPAMVPM1_EL2]
295	msr	MPAMVPM1_EL2, x13
296	msr	MPAMVPM2_EL2, x14
297
298	ldp	x15, x16, [x0, #CTX_MPAMVPM3_EL2]
299	msr	MPAMVPM3_EL2, x15
300	msr	MPAMVPM4_EL2, x16
301
302	ldp	x9, x10, [x0, #CTX_MPAMVPM5_EL2]
303	msr	MPAMVPM5_EL2, x9
304	msr	MPAMVPM6_EL2, x10
305
306	ldp	x11, x12, [x0, #CTX_MPAMVPM7_EL2]
307	msr	MPAMVPM7_EL2, x11
308	msr	MPAMVPMV_EL2, x12
309	ret
310endfunc el2_sysregs_context_restore_mpam
311#endif /* ENABLE_MPAM_FOR_LOWER_ELS */
312
313#if ENABLE_FEAT_ECV
314func el2_sysregs_context_save_ecv
315	mrs	x11, CNTPOFF_EL2
316	str	x11, [x0, #CTX_CNTPOFF_EL2]
317	ret
318endfunc el2_sysregs_context_save_ecv
319
320func el2_sysregs_context_restore_ecv
321	ldr	x11, [x0, #CTX_CNTPOFF_EL2]
322	msr	CNTPOFF_EL2, x11
323	ret
324endfunc el2_sysregs_context_restore_ecv
325#endif /* ENABLE_FEAT_ECV */
326
327#if ENABLE_FEAT_VHE
328func el2_sysregs_context_save_vhe
329	/*
330	 * CONTEXTIDR_EL2 register is saved only when FEAT_VHE or
331	 * FEAT_Debugv8p2 (currently not in TF-A) is supported.
332	 */
333	mrs	x9, contextidr_el2
334	mrs	x10, ttbr1_el2
335	stp	x9, x10, [x0, #CTX_CONTEXTIDR_EL2]
336	ret
337endfunc el2_sysregs_context_save_vhe
338
339func el2_sysregs_context_restore_vhe
340	/*
341	 * CONTEXTIDR_EL2 register is restored only when FEAT_VHE or
342	 * FEAT_Debugv8p2 (currently not in TF-A) is supported.
343	 */
344	ldp	x9, x10, [x0, #CTX_CONTEXTIDR_EL2]
345	msr	contextidr_el2, x9
346	msr	ttbr1_el2, x10
347	ret
348endfunc el2_sysregs_context_restore_vhe
349#endif /* ENABLE_FEAT_VHE */
350
351#if RAS_EXTENSION
352func el2_sysregs_context_save_ras
353	/*
354	 * VDISR_EL2 and VSESR_EL2 registers are saved only when
355	 * FEAT_RAS is supported.
356	 */
357	mrs	x11, vdisr_el2
358	mrs	x12, vsesr_el2
359	stp	x11, x12, [x0, #CTX_VDISR_EL2]
360	ret
361endfunc el2_sysregs_context_save_ras
362
363func el2_sysregs_context_restore_ras
364	/*
365	 * VDISR_EL2 and VSESR_EL2 registers are restored only when FEAT_RAS
366	 * is supported.
367	 */
368	ldp	x11, x12, [x0, #CTX_VDISR_EL2]
369	msr	vdisr_el2, x11
370	msr	vsesr_el2, x12
371	ret
372endfunc el2_sysregs_context_restore_ras
373#endif /* RAS_EXTENSION */
374
375#if CTX_INCLUDE_NEVE_REGS
376func el2_sysregs_context_save_nv2
377	/*
378	 * VNCR_EL2 register is saved only when FEAT_NV2 is supported.
379	 */
380	mrs	x16, vncr_el2
381	str	x16, [x0, #CTX_VNCR_EL2]
382	ret
383endfunc el2_sysregs_context_save_nv2
384
385func el2_sysregs_context_restore_nv2
386	/*
387	 * VNCR_EL2 register is restored only when FEAT_NV2 is supported.
388	 */
389	ldr	x16, [x0, #CTX_VNCR_EL2]
390	msr	vncr_el2, x16
391	ret
392endfunc el2_sysregs_context_restore_nv2
393#endif /* CTX_INCLUDE_NEVE_REGS */
394
395#if ENABLE_TRF_FOR_NS
396func el2_sysregs_context_save_trf
397	/*
398	 * TRFCR_EL2 register is saved only when FEAT_TRF is supported.
399	 */
400	mrs	x12, TRFCR_EL2
401	str	x12, [x0, #CTX_TRFCR_EL2]
402	ret
403endfunc el2_sysregs_context_save_trf
404
405func el2_sysregs_context_restore_trf
406	/*
407	 * TRFCR_EL2 register is restored only when FEAT_TRF is supported.
408	 */
409	ldr	x12, [x0, #CTX_TRFCR_EL2]
410	msr	TRFCR_EL2, x12
411	ret
412endfunc el2_sysregs_context_restore_trf
413#endif /* ENABLE_TRF_FOR_NS */
414
415#if ENABLE_FEAT_CSV2_2
416func el2_sysregs_context_save_csv2
417	/*
418	 * SCXTNUM_EL2 register is saved only when FEAT_CSV2_2 is supported.
419	 */
420	mrs	x13, scxtnum_el2
421	str	x13, [x0, #CTX_SCXTNUM_EL2]
422	ret
423endfunc el2_sysregs_context_save_csv2
424
425func el2_sysregs_context_restore_csv2
426	/*
427	 * SCXTNUM_EL2 register is restored only when FEAT_CSV2_2 is supported.
428	 */
429	ldr	x13, [x0, #CTX_SCXTNUM_EL2]
430	msr	scxtnum_el2, x13
431	ret
432endfunc el2_sysregs_context_restore_csv2
433#endif /* ENABLE_FEAT_CSV2_2 */
434
435#if ENABLE_FEAT_HCX
436func el2_sysregs_context_save_hcx
437	mrs	x14, hcrx_el2
438	str	x14, [x0, #CTX_HCRX_EL2]
439	ret
440endfunc el2_sysregs_context_save_hcx
441
442func el2_sysregs_context_restore_hcx
443	ldr	x14, [x0, #CTX_HCRX_EL2]
444	msr	hcrx_el2, x14
445	ret
446endfunc el2_sysregs_context_restore_hcx
447#endif /* ENABLE_FEAT_HCX */
448#endif /* CTX_INCLUDE_EL2_REGS */
449
450/* ------------------------------------------------------------------
451 * The following function strictly follows the AArch64 PCS to use
452 * x9-x17 (temporary caller-saved registers) to save EL1 system
453 * register context. It assumes that 'x0' is pointing to a
454 * 'el1_sys_regs' structure where the register context will be saved.
455 * ------------------------------------------------------------------
456 */
457func el1_sysregs_context_save
458
459	mrs	x9, spsr_el1
460	mrs	x10, elr_el1
461	stp	x9, x10, [x0, #CTX_SPSR_EL1]
462
463#if !ERRATA_SPECULATIVE_AT
464	mrs	x15, sctlr_el1
465	mrs	x16, tcr_el1
466	stp	x15, x16, [x0, #CTX_SCTLR_EL1]
467#endif /* ERRATA_SPECULATIVE_AT */
468
469	mrs	x17, cpacr_el1
470	mrs	x9, csselr_el1
471	stp	x17, x9, [x0, #CTX_CPACR_EL1]
472
473	mrs	x10, sp_el1
474	mrs	x11, esr_el1
475	stp	x10, x11, [x0, #CTX_SP_EL1]
476
477	mrs	x12, ttbr0_el1
478	mrs	x13, ttbr1_el1
479	stp	x12, x13, [x0, #CTX_TTBR0_EL1]
480
481	mrs	x14, mair_el1
482	mrs	x15, amair_el1
483	stp	x14, x15, [x0, #CTX_MAIR_EL1]
484
485	mrs	x16, actlr_el1
486	mrs	x17, tpidr_el1
487	stp	x16, x17, [x0, #CTX_ACTLR_EL1]
488
489	mrs	x9, tpidr_el0
490	mrs	x10, tpidrro_el0
491	stp	x9, x10, [x0, #CTX_TPIDR_EL0]
492
493	mrs	x13, par_el1
494	mrs	x14, far_el1
495	stp	x13, x14, [x0, #CTX_PAR_EL1]
496
497	mrs	x15, afsr0_el1
498	mrs	x16, afsr1_el1
499	stp	x15, x16, [x0, #CTX_AFSR0_EL1]
500
501	mrs	x17, contextidr_el1
502	mrs	x9, vbar_el1
503	stp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
504
505	/* Save AArch32 system registers if the build has instructed so */
506#if CTX_INCLUDE_AARCH32_REGS
507	mrs	x11, spsr_abt
508	mrs	x12, spsr_und
509	stp	x11, x12, [x0, #CTX_SPSR_ABT]
510
511	mrs	x13, spsr_irq
512	mrs	x14, spsr_fiq
513	stp	x13, x14, [x0, #CTX_SPSR_IRQ]
514
515	mrs	x15, dacr32_el2
516	mrs	x16, ifsr32_el2
517	stp	x15, x16, [x0, #CTX_DACR32_EL2]
518#endif /* CTX_INCLUDE_AARCH32_REGS */
519
520	/* Save NS timer registers if the build has instructed so */
521#if NS_TIMER_SWITCH
522	mrs	x10, cntp_ctl_el0
523	mrs	x11, cntp_cval_el0
524	stp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
525
526	mrs	x12, cntv_ctl_el0
527	mrs	x13, cntv_cval_el0
528	stp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
529
530	mrs	x14, cntkctl_el1
531	str	x14, [x0, #CTX_CNTKCTL_EL1]
532#endif /* NS_TIMER_SWITCH */
533
534	/* Save MTE system registers if the build has instructed so */
535#if CTX_INCLUDE_MTE_REGS
536	mrs	x15, TFSRE0_EL1
537	mrs	x16, TFSR_EL1
538	stp	x15, x16, [x0, #CTX_TFSRE0_EL1]
539
540	mrs	x9, RGSR_EL1
541	mrs	x10, GCR_EL1
542	stp	x9, x10, [x0, #CTX_RGSR_EL1]
543#endif /* CTX_INCLUDE_MTE_REGS */
544
545	ret
546endfunc el1_sysregs_context_save
547
548/* ------------------------------------------------------------------
549 * The following function strictly follows the AArch64 PCS to use
550 * x9-x17 (temporary caller-saved registers) to restore EL1 system
551 * register context.  It assumes that 'x0' is pointing to a
552 * 'el1_sys_regs' structure from where the register context will be
553 * restored
554 * ------------------------------------------------------------------
555 */
556func el1_sysregs_context_restore
557
558	ldp	x9, x10, [x0, #CTX_SPSR_EL1]
559	msr	spsr_el1, x9
560	msr	elr_el1, x10
561
562#if !ERRATA_SPECULATIVE_AT
563	ldp	x15, x16, [x0, #CTX_SCTLR_EL1]
564	msr	sctlr_el1, x15
565	msr	tcr_el1, x16
566#endif /* ERRATA_SPECULATIVE_AT */
567
568	ldp	x17, x9, [x0, #CTX_CPACR_EL1]
569	msr	cpacr_el1, x17
570	msr	csselr_el1, x9
571
572	ldp	x10, x11, [x0, #CTX_SP_EL1]
573	msr	sp_el1, x10
574	msr	esr_el1, x11
575
576	ldp	x12, x13, [x0, #CTX_TTBR0_EL1]
577	msr	ttbr0_el1, x12
578	msr	ttbr1_el1, x13
579
580	ldp	x14, x15, [x0, #CTX_MAIR_EL1]
581	msr	mair_el1, x14
582	msr	amair_el1, x15
583
584	ldp 	x16, x17, [x0, #CTX_ACTLR_EL1]
585	msr	actlr_el1, x16
586	msr	tpidr_el1, x17
587
588	ldp	x9, x10, [x0, #CTX_TPIDR_EL0]
589	msr	tpidr_el0, x9
590	msr	tpidrro_el0, x10
591
592	ldp	x13, x14, [x0, #CTX_PAR_EL1]
593	msr	par_el1, x13
594	msr	far_el1, x14
595
596	ldp	x15, x16, [x0, #CTX_AFSR0_EL1]
597	msr	afsr0_el1, x15
598	msr	afsr1_el1, x16
599
600	ldp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
601	msr	contextidr_el1, x17
602	msr	vbar_el1, x9
603
604	/* Restore AArch32 system registers if the build has instructed so */
605#if CTX_INCLUDE_AARCH32_REGS
606	ldp	x11, x12, [x0, #CTX_SPSR_ABT]
607	msr	spsr_abt, x11
608	msr	spsr_und, x12
609
610	ldp	x13, x14, [x0, #CTX_SPSR_IRQ]
611	msr	spsr_irq, x13
612	msr	spsr_fiq, x14
613
614	ldp	x15, x16, [x0, #CTX_DACR32_EL2]
615	msr	dacr32_el2, x15
616	msr	ifsr32_el2, x16
617#endif /* CTX_INCLUDE_AARCH32_REGS */
618
619	/* Restore NS timer registers if the build has instructed so */
620#if NS_TIMER_SWITCH
621	ldp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
622	msr	cntp_ctl_el0, x10
623	msr	cntp_cval_el0, x11
624
625	ldp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
626	msr	cntv_ctl_el0, x12
627	msr	cntv_cval_el0, x13
628
629	ldr	x14, [x0, #CTX_CNTKCTL_EL1]
630	msr	cntkctl_el1, x14
631#endif /* NS_TIMER_SWITCH */
632
633	/* Restore MTE system registers if the build has instructed so */
634#if CTX_INCLUDE_MTE_REGS
635	ldp	x11, x12, [x0, #CTX_TFSRE0_EL1]
636	msr	TFSRE0_EL1, x11
637	msr	TFSR_EL1, x12
638
639	ldp	x13, x14, [x0, #CTX_RGSR_EL1]
640	msr	RGSR_EL1, x13
641	msr	GCR_EL1, x14
642#endif /* CTX_INCLUDE_MTE_REGS */
643
644	/* No explict ISB required here as ERET covers it */
645	ret
646endfunc el1_sysregs_context_restore
647
648/* ------------------------------------------------------------------
649 * The following function follows the aapcs_64 strictly to use
650 * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
651 * to save floating point register context. It assumes that 'x0' is
652 * pointing to a 'fp_regs' structure where the register context will
653 * be saved.
654 *
655 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
656 * However currently we don't use VFP registers nor set traps in
657 * Trusted Firmware, and assume it's cleared.
658 *
659 * TODO: Revisit when VFP is used in secure world
660 * ------------------------------------------------------------------
661 */
662#if CTX_INCLUDE_FPREGS
663func fpregs_context_save
664	stp	q0, q1, [x0, #CTX_FP_Q0]
665	stp	q2, q3, [x0, #CTX_FP_Q2]
666	stp	q4, q5, [x0, #CTX_FP_Q4]
667	stp	q6, q7, [x0, #CTX_FP_Q6]
668	stp	q8, q9, [x0, #CTX_FP_Q8]
669	stp	q10, q11, [x0, #CTX_FP_Q10]
670	stp	q12, q13, [x0, #CTX_FP_Q12]
671	stp	q14, q15, [x0, #CTX_FP_Q14]
672	stp	q16, q17, [x0, #CTX_FP_Q16]
673	stp	q18, q19, [x0, #CTX_FP_Q18]
674	stp	q20, q21, [x0, #CTX_FP_Q20]
675	stp	q22, q23, [x0, #CTX_FP_Q22]
676	stp	q24, q25, [x0, #CTX_FP_Q24]
677	stp	q26, q27, [x0, #CTX_FP_Q26]
678	stp	q28, q29, [x0, #CTX_FP_Q28]
679	stp	q30, q31, [x0, #CTX_FP_Q30]
680
681	mrs	x9, fpsr
682	str	x9, [x0, #CTX_FP_FPSR]
683
684	mrs	x10, fpcr
685	str	x10, [x0, #CTX_FP_FPCR]
686
687#if CTX_INCLUDE_AARCH32_REGS
688	mrs	x11, fpexc32_el2
689	str	x11, [x0, #CTX_FP_FPEXC32_EL2]
690#endif /* CTX_INCLUDE_AARCH32_REGS */
691	ret
692endfunc fpregs_context_save
693
694/* ------------------------------------------------------------------
695 * The following function follows the aapcs_64 strictly to use x9-x17
696 * (temporary caller-saved registers according to AArch64 PCS) to
697 * restore floating point register context. It assumes that 'x0' is
698 * pointing to a 'fp_regs' structure from where the register context
699 * will be restored.
700 *
701 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
702 * However currently we don't use VFP registers nor set traps in
703 * Trusted Firmware, and assume it's cleared.
704 *
705 * TODO: Revisit when VFP is used in secure world
706 * ------------------------------------------------------------------
707 */
708func fpregs_context_restore
709	ldp	q0, q1, [x0, #CTX_FP_Q0]
710	ldp	q2, q3, [x0, #CTX_FP_Q2]
711	ldp	q4, q5, [x0, #CTX_FP_Q4]
712	ldp	q6, q7, [x0, #CTX_FP_Q6]
713	ldp	q8, q9, [x0, #CTX_FP_Q8]
714	ldp	q10, q11, [x0, #CTX_FP_Q10]
715	ldp	q12, q13, [x0, #CTX_FP_Q12]
716	ldp	q14, q15, [x0, #CTX_FP_Q14]
717	ldp	q16, q17, [x0, #CTX_FP_Q16]
718	ldp	q18, q19, [x0, #CTX_FP_Q18]
719	ldp	q20, q21, [x0, #CTX_FP_Q20]
720	ldp	q22, q23, [x0, #CTX_FP_Q22]
721	ldp	q24, q25, [x0, #CTX_FP_Q24]
722	ldp	q26, q27, [x0, #CTX_FP_Q26]
723	ldp	q28, q29, [x0, #CTX_FP_Q28]
724	ldp	q30, q31, [x0, #CTX_FP_Q30]
725
726	ldr	x9, [x0, #CTX_FP_FPSR]
727	msr	fpsr, x9
728
729	ldr	x10, [x0, #CTX_FP_FPCR]
730	msr	fpcr, x10
731
732#if CTX_INCLUDE_AARCH32_REGS
733	ldr	x11, [x0, #CTX_FP_FPEXC32_EL2]
734	msr	fpexc32_el2, x11
735#endif /* CTX_INCLUDE_AARCH32_REGS */
736
737	/*
738	 * No explict ISB required here as ERET to
739	 * switch to secure EL1 or non-secure world
740	 * covers it
741	 */
742
743	ret
744endfunc fpregs_context_restore
745#endif /* CTX_INCLUDE_FPREGS */
746
747	/*
748	 * Set SCR_EL3.EA bit to enable SErrors at EL3
749	 */
750	.macro enable_serror_at_el3
751	mrs     x8, scr_el3
752	orr     x8, x8, #SCR_EA_BIT
753	msr     scr_el3, x8
754	.endm
755
756	/*
757	 * Set the PSTATE bits not set when the exception was taken as
758	 * described in the AArch64.TakeException() pseudocode function
759	 * in ARM DDI 0487F.c page J1-7635 to a default value.
760	 */
761	.macro set_unset_pstate_bits
762	/*
763	 * If Data Independent Timing (DIT) functionality is implemented,
764	 * always enable DIT in EL3
765	 */
766#if ENABLE_FEAT_DIT
767	mov     x8, #DIT_BIT
768	msr     DIT, x8
769#endif /* ENABLE_FEAT_DIT */
770	.endm /* set_unset_pstate_bits */
771
772/* ------------------------------------------------------------------
773 * The following macro is used to save and restore all the general
774 * purpose and ARMv8.3-PAuth (if enabled) registers.
775 * It also checks if the Secure Cycle Counter (PMCCNTR_EL0)
776 * is disabled in EL3/Secure (ARMv8.5-PMU), wherein PMCCNTR_EL0
777 * needs not to be saved/restored during world switch.
778 *
779 * Ideally we would only save and restore the callee saved registers
780 * when a world switch occurs but that type of implementation is more
781 * complex. So currently we will always save and restore these
782 * registers on entry and exit of EL3.
783 * clobbers: x18
784 * ------------------------------------------------------------------
785 */
786	.macro save_gp_pmcr_pauth_regs
787	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
788	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
789	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
790	stp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
791	stp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
792	stp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
793	stp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
794	stp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
795	stp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
796	stp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
797	stp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
798	stp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
799	stp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
800	stp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
801	stp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
802	mrs	x18, sp_el0
803	str	x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
804
805	/* ----------------------------------------------------------
806	 * Check if earlier initialization of MDCR_EL3.SCCD/MCCD to 1
807	 * has failed.
808	 *
809	 * MDCR_EL3:
810	 * MCCD bit set, Prohibits the Cycle Counter PMCCNTR_EL0 from
811	 * counting at EL3.
812	 * SCCD bit set, Secure Cycle Counter Disable. Prohibits PMCCNTR_EL0
813	 * from counting in Secure state.
814	 * If these bits are not set, meaning that FEAT_PMUv3p5/7 is
815	 * not implemented and PMCR_EL0 should be saved in non-secure
816	 * context.
817	 * ----------------------------------------------------------
818	 */
819	mov_imm	x10, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
820	mrs	x9, mdcr_el3
821	tst	x9, x10
822	bne	1f
823
824	/* ----------------------------------------------------------
825	 * If control reaches here, it ensures the Secure Cycle
826	 * Counter (PMCCNTR_EL0) is not prohibited from counting at
827	 * EL3 and in secure states.
828	 * Henceforth, PMCR_EL0 to be saved before world switch.
829	 * ----------------------------------------------------------
830	 */
831	mrs	x9, pmcr_el0
832
833	/* Check caller's security state */
834	mrs	x10, scr_el3
835	tst	x10, #SCR_NS_BIT
836	beq	2f
837
838	/* Save PMCR_EL0 if called from Non-secure state */
839	str	x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
840
841	/* Disable cycle counter when event counting is prohibited */
8422:	orr	x9, x9, #PMCR_EL0_DP_BIT
843	msr	pmcr_el0, x9
844	isb
8451:
846#if CTX_INCLUDE_PAUTH_REGS
847	/* ----------------------------------------------------------
848 	 * Save the ARMv8.3-PAuth keys as they are not banked
849 	 * by exception level
850	 * ----------------------------------------------------------
851	 */
852	add	x19, sp, #CTX_PAUTH_REGS_OFFSET
853
854	mrs	x20, APIAKeyLo_EL1	/* x21:x20 = APIAKey */
855	mrs	x21, APIAKeyHi_EL1
856	mrs	x22, APIBKeyLo_EL1	/* x23:x22 = APIBKey */
857	mrs	x23, APIBKeyHi_EL1
858	mrs	x24, APDAKeyLo_EL1	/* x25:x24 = APDAKey */
859	mrs	x25, APDAKeyHi_EL1
860	mrs	x26, APDBKeyLo_EL1	/* x27:x26 = APDBKey */
861	mrs	x27, APDBKeyHi_EL1
862	mrs	x28, APGAKeyLo_EL1	/* x29:x28 = APGAKey */
863	mrs	x29, APGAKeyHi_EL1
864
865	stp	x20, x21, [x19, #CTX_PACIAKEY_LO]
866	stp	x22, x23, [x19, #CTX_PACIBKEY_LO]
867	stp	x24, x25, [x19, #CTX_PACDAKEY_LO]
868	stp	x26, x27, [x19, #CTX_PACDBKEY_LO]
869	stp	x28, x29, [x19, #CTX_PACGAKEY_LO]
870#endif /* CTX_INCLUDE_PAUTH_REGS */
871	.endm /* save_gp_pmcr_pauth_regs */
872
873/* -----------------------------------------------------------------
874 * This function saves the context and sets the PSTATE to a known
875 * state, preparing entry to el3.
876 * Save all the general purpose and ARMv8.3-PAuth (if enabled)
877 * registers.
878 * Then set any of the PSTATE bits that are not set by hardware
879 * according to the Aarch64.TakeException pseudocode in the Arm
880 * Architecture Reference Manual to a default value for EL3.
881 * clobbers: x17
882 * -----------------------------------------------------------------
883 */
884func prepare_el3_entry
885	save_gp_pmcr_pauth_regs
886	enable_serror_at_el3
887	/*
888	 * Set the PSTATE bits not described in the Aarch64.TakeException
889	 * pseudocode to their default values.
890	 */
891	set_unset_pstate_bits
892	ret
893endfunc prepare_el3_entry
894
895/* ------------------------------------------------------------------
896 * This function restores ARMv8.3-PAuth (if enabled) and all general
897 * purpose registers except x30 from the CPU context.
898 * x30 register must be explicitly restored by the caller.
899 * ------------------------------------------------------------------
900 */
901func restore_gp_pmcr_pauth_regs
902#if CTX_INCLUDE_PAUTH_REGS
903 	/* Restore the ARMv8.3 PAuth keys */
904	add	x10, sp, #CTX_PAUTH_REGS_OFFSET
905
906	ldp	x0, x1, [x10, #CTX_PACIAKEY_LO]	/* x1:x0 = APIAKey */
907	ldp	x2, x3, [x10, #CTX_PACIBKEY_LO]	/* x3:x2 = APIBKey */
908	ldp	x4, x5, [x10, #CTX_PACDAKEY_LO]	/* x5:x4 = APDAKey */
909	ldp	x6, x7, [x10, #CTX_PACDBKEY_LO]	/* x7:x6 = APDBKey */
910	ldp	x8, x9, [x10, #CTX_PACGAKEY_LO]	/* x9:x8 = APGAKey */
911
912	msr	APIAKeyLo_EL1, x0
913	msr	APIAKeyHi_EL1, x1
914	msr	APIBKeyLo_EL1, x2
915	msr	APIBKeyHi_EL1, x3
916	msr	APDAKeyLo_EL1, x4
917	msr	APDAKeyHi_EL1, x5
918	msr	APDBKeyLo_EL1, x6
919	msr	APDBKeyHi_EL1, x7
920	msr	APGAKeyLo_EL1, x8
921	msr	APGAKeyHi_EL1, x9
922#endif /* CTX_INCLUDE_PAUTH_REGS */
923
924	/* ----------------------------------------------------------
925	 * Restore PMCR_EL0 when returning to Non-secure state if
926	 * Secure Cycle Counter is not disabled in MDCR_EL3 when
927	 * ARMv8.5-PMU is implemented.
928	 * ----------------------------------------------------------
929	 */
930	mrs	x0, scr_el3
931	tst	x0, #SCR_NS_BIT
932	beq	2f
933
934	/* ----------------------------------------------------------
935	 * Back to Non-secure state.
936	 * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1
937	 * failed, meaning that FEAT_PMUv3p5/7 is not implemented and
938	 * PMCR_EL0 should be restored from non-secure context.
939	 * ----------------------------------------------------------
940	 */
941	mov_imm	x1, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
942	mrs	x0, mdcr_el3
943	tst	x0, x1
944	bne	2f
945	ldr	x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
946	msr	pmcr_el0, x0
9472:
948	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
949	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
950	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
951	ldp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
952	ldp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
953	ldp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
954	ldp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
955	ldp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
956	ldp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
957	ldp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
958	ldp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
959	ldp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
960	ldp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
961	ldp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
962	ldr	x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
963	msr	sp_el0, x28
964	ldp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
965	ret
966endfunc restore_gp_pmcr_pauth_regs
967
968/*
969 * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
970 * registers and update EL1 registers to disable stage1 and stage2
971 * page table walk
972 */
973func save_and_update_ptw_el1_sys_regs
974	/* ----------------------------------------------------------
975	 * Save only sctlr_el1 and tcr_el1 registers
976	 * ----------------------------------------------------------
977	 */
978	mrs	x29, sctlr_el1
979	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)]
980	mrs	x29, tcr_el1
981	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)]
982
983	/* ------------------------------------------------------------
984	 * Must follow below order in order to disable page table
985	 * walk for lower ELs (EL1 and EL0). First step ensures that
986	 * page table walk is disabled for stage1 and second step
987	 * ensures that page table walker should use TCR_EL1.EPDx
988	 * bits to perform address translation. ISB ensures that CPU
989	 * does these 2 steps in order.
990	 *
991	 * 1. Update TCR_EL1.EPDx bits to disable page table walk by
992	 *    stage1.
993	 * 2. Enable MMU bit to avoid identity mapping via stage2
994	 *    and force TCR_EL1.EPDx to be used by the page table
995	 *    walker.
996	 * ------------------------------------------------------------
997	 */
998	orr	x29, x29, #(TCR_EPD0_BIT)
999	orr	x29, x29, #(TCR_EPD1_BIT)
1000	msr	tcr_el1, x29
1001	isb
1002	mrs	x29, sctlr_el1
1003	orr	x29, x29, #SCTLR_M_BIT
1004	msr	sctlr_el1, x29
1005	isb
1006
1007	ret
1008endfunc save_and_update_ptw_el1_sys_regs
1009
1010/* ------------------------------------------------------------------
1011 * This routine assumes that the SP_EL3 is pointing to a valid
1012 * context structure from where the gp regs and other special
1013 * registers can be retrieved.
1014 * ------------------------------------------------------------------
1015 */
1016func el3_exit
1017#if ENABLE_ASSERTIONS
1018	/* el3_exit assumes SP_EL0 on entry */
1019	mrs	x17, spsel
1020	cmp	x17, #MODE_SP_EL0
1021	ASM_ASSERT(eq)
1022#endif /* ENABLE_ASSERTIONS */
1023
1024	/* ----------------------------------------------------------
1025	 * Save the current SP_EL0 i.e. the EL3 runtime stack which
1026	 * will be used for handling the next SMC.
1027	 * Then switch to SP_EL3.
1028	 * ----------------------------------------------------------
1029	 */
1030	mov	x17, sp
1031	msr	spsel, #MODE_SP_ELX
1032	str	x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
1033
1034#if IMAGE_BL31
1035	/* ----------------------------------------------------------
1036	 * Restore CPTR_EL3.
1037	 * ZCR is only restored if SVE is supported and enabled.
1038	 * Synchronization is required before zcr_el3 is addressed.
1039	 * ----------------------------------------------------------
1040	 */
1041	ldp	x19, x20, [sp, #CTX_EL3STATE_OFFSET + CTX_CPTR_EL3]
1042	msr	cptr_el3, x19
1043
1044	ands	x19, x19, #CPTR_EZ_BIT
1045	beq	sve_not_enabled
1046
1047	isb
1048	msr	S3_6_C1_C2_0, x20 /* zcr_el3 */
1049sve_not_enabled:
1050#endif /* IMAGE_BL31 */
1051
1052#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
1053	/* ----------------------------------------------------------
1054	 * Restore mitigation state as it was on entry to EL3
1055	 * ----------------------------------------------------------
1056	 */
1057	ldr	x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
1058	cbz	x17, 1f
1059	blr	x17
10601:
1061#endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */
1062
1063#if IMAGE_BL31 && RAS_EXTENSION
1064	/* ----------------------------------------------------------
1065	 * Issue Error Synchronization Barrier to synchronize SErrors
1066	 * before exiting EL3. We're running with EAs unmasked, so
1067	 * any synchronized errors would be taken immediately;
1068	 * therefore no need to inspect DISR_EL1 register.
1069 	 * ----------------------------------------------------------
1070	 */
1071	esb
1072#else
1073	dsb	sy
1074#endif /* IMAGE_BL31 && RAS_EXTENSION */
1075
1076	/* ----------------------------------------------------------
1077	 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
1078	 * ----------------------------------------------------------
1079	 */
1080	ldr	x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
1081	ldp	x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
1082	msr	scr_el3, x18
1083	msr	spsr_el3, x16
1084	msr	elr_el3, x17
1085
1086	restore_ptw_el1_sys_regs
1087
1088	/* ----------------------------------------------------------
1089	 * Restore general purpose (including x30), PMCR_EL0 and
1090	 * ARMv8.3-PAuth registers.
1091	 * Exit EL3 via ERET to a lower exception level.
1092 	 * ----------------------------------------------------------
1093 	 */
1094	bl	restore_gp_pmcr_pauth_regs
1095	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
1096
1097#ifdef IMAGE_BL31
1098	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
1099#endif /* IMAGE_BL31 */
1100
1101	exception_return
1102
1103endfunc el3_exit
1104