xref: /rk3399_ARM-atf/lib/el3_runtime/aarch64/context.S (revision 601e2d4325a7def628990f4a25889f374c81ca06)
1/*
2 * Copyright (c) 2013-2022, Arm Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <assert_macros.S>
10#include <context.h>
11#include <el3_common_macros.S>
12
13#if CTX_INCLUDE_EL2_REGS
14	.global	el2_sysregs_context_save_common
15	.global	el2_sysregs_context_restore_common
16#if ENABLE_SPE_FOR_LOWER_ELS
17	.global	el2_sysregs_context_save_spe
18	.global	el2_sysregs_context_restore_spe
19#endif /* ENABLE_SPE_FOR_LOWER_ELS */
20#if CTX_INCLUDE_MTE_REGS
21	.global	el2_sysregs_context_save_mte
22	.global	el2_sysregs_context_restore_mte
23#endif /* CTX_INCLUDE_MTE_REGS */
24#if ENABLE_MPAM_FOR_LOWER_ELS
25	.global	el2_sysregs_context_save_mpam
26	.global	el2_sysregs_context_restore_mpam
27#endif /* ENABLE_MPAM_FOR_LOWER_ELS */
28#if ENABLE_FEAT_FGT
29	.global	el2_sysregs_context_save_fgt
30	.global	el2_sysregs_context_restore_fgt
31#endif /* ENABLE_FEAT_FGT */
32#if ENABLE_FEAT_ECV
33	.global	el2_sysregs_context_save_ecv
34	.global	el2_sysregs_context_restore_ecv
35#endif /* ENABLE_FEAT_ECV */
36#if ENABLE_FEAT_VHE
37	.global	el2_sysregs_context_save_vhe
38	.global	el2_sysregs_context_restore_vhe
39#endif /* ENABLE_FEAT_VHE */
40#if RAS_EXTENSION
41	.global	el2_sysregs_context_save_ras
42	.global	el2_sysregs_context_restore_ras
43#endif /* RAS_EXTENSION */
44#if CTX_INCLUDE_NEVE_REGS
45	.global	el2_sysregs_context_save_nv2
46	.global	el2_sysregs_context_restore_nv2
47#endif /* CTX_INCLUDE_NEVE_REGS */
48#if ENABLE_TRF_FOR_NS
49	.global	el2_sysregs_context_save_trf
50	.global	el2_sysregs_context_restore_trf
51#endif /* ENABLE_TRF_FOR_NS */
52#if ENABLE_FEAT_CSV2_2
53	.global	el2_sysregs_context_save_csv2
54	.global	el2_sysregs_context_restore_csv2
55#endif /* ENABLE_FEAT_CSV2_2 */
56#if ENABLE_FEAT_HCX
57	.global	el2_sysregs_context_save_hcx
58	.global	el2_sysregs_context_restore_hcx
59#endif /* ENABLE_FEAT_HCX */
60#endif /* CTX_INCLUDE_EL2_REGS */
61
62	.global	el1_sysregs_context_save
63	.global	el1_sysregs_context_restore
64#if CTX_INCLUDE_FPREGS
65	.global	fpregs_context_save
66	.global	fpregs_context_restore
67#endif /* CTX_INCLUDE_FPREGS */
68	.global	prepare_el3_entry
69	.global	restore_gp_pmcr_pauth_regs
70	.global save_and_update_ptw_el1_sys_regs
71	.global	el3_exit
72
73#if CTX_INCLUDE_EL2_REGS
74
75/* -----------------------------------------------------
76 * The following functions strictly follow the AArch64
77 * PCS to use x9-x16 (temporary caller-saved registers)
78 * to save/restore EL2 system register context.
79 * el2_sysregs_context_save/restore_common functions
80 * save and restore registers that are common to all
81 * configurations. The rest of the functions save and
82 * restore EL2 system registers that are present when a
83 * particular feature is enabled. All functions assume
84 * that 'x0' is pointing to a 'el2_sys_regs' structure
85 * where the register context will be saved/restored.
86 *
87 * The following registers are not added.
88 * AMEVCNTVOFF0<n>_EL2
89 * AMEVCNTVOFF1<n>_EL2
90 * ICH_AP0R<n>_EL2
91 * ICH_AP1R<n>_EL2
92 * ICH_LR<n>_EL2
93 * -----------------------------------------------------
94 */
95func el2_sysregs_context_save_common
96	mrs	x9, actlr_el2
97	mrs	x10, afsr0_el2
98	stp	x9, x10, [x0, #CTX_ACTLR_EL2]
99
100	mrs	x11, afsr1_el2
101	mrs	x12, amair_el2
102	stp	x11, x12, [x0, #CTX_AFSR1_EL2]
103
104	mrs	x13, cnthctl_el2
105	mrs	x14, cntvoff_el2
106	stp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
107
108	mrs	x15, cptr_el2
109	str	x15, [x0, #CTX_CPTR_EL2]
110
111#if CTX_INCLUDE_AARCH32_REGS
112	mrs	x16, dbgvcr32_el2
113	str	x16, [x0, #CTX_DBGVCR32_EL2]
114#endif /* CTX_INCLUDE_AARCH32_REGS */
115
116	mrs	x9, elr_el2
117	mrs	x10, esr_el2
118	stp	x9, x10, [x0, #CTX_ELR_EL2]
119
120	mrs	x11, far_el2
121	mrs	x12, hacr_el2
122	stp	x11, x12, [x0, #CTX_FAR_EL2]
123
124	mrs	x13, hcr_el2
125	mrs	x14, hpfar_el2
126	stp	x13, x14, [x0, #CTX_HCR_EL2]
127
128	mrs	x15, hstr_el2
129	mrs	x16, ICC_SRE_EL2
130	stp	x15, x16, [x0, #CTX_HSTR_EL2]
131
132	mrs	x9, ICH_HCR_EL2
133	mrs	x10, ICH_VMCR_EL2
134	stp	x9, x10, [x0, #CTX_ICH_HCR_EL2]
135
136	mrs	x11, mair_el2
137	mrs	x12, mdcr_el2
138	stp	x11, x12, [x0, #CTX_MAIR_EL2]
139
140	mrs	x14, sctlr_el2
141	str	x14, [x0, #CTX_SCTLR_EL2]
142
143	mrs	x15, spsr_el2
144	mrs	x16, sp_el2
145	stp	x15, x16, [x0, #CTX_SPSR_EL2]
146
147	mrs	x9, tcr_el2
148	mrs	x10, tpidr_el2
149	stp	x9, x10, [x0, #CTX_TCR_EL2]
150
151	mrs	x11, ttbr0_el2
152	mrs	x12, vbar_el2
153	stp	x11, x12, [x0, #CTX_TTBR0_EL2]
154
155	mrs	x13, vmpidr_el2
156	mrs	x14, vpidr_el2
157	stp	x13, x14, [x0, #CTX_VMPIDR_EL2]
158
159	mrs	x15, vtcr_el2
160	mrs	x16, vttbr_el2
161	stp	x15, x16, [x0, #CTX_VTCR_EL2]
162	ret
163endfunc el2_sysregs_context_save_common
164
165func el2_sysregs_context_restore_common
166	ldp	x9, x10, [x0, #CTX_ACTLR_EL2]
167	msr	actlr_el2, x9
168	msr	afsr0_el2, x10
169
170	ldp	x11, x12, [x0, #CTX_AFSR1_EL2]
171	msr	afsr1_el2, x11
172	msr	amair_el2, x12
173
174	ldp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
175	msr	cnthctl_el2, x13
176	msr	cntvoff_el2, x14
177
178	ldr	x15, [x0, #CTX_CPTR_EL2]
179	msr	cptr_el2, x15
180
181#if CTX_INCLUDE_AARCH32_REGS
182	ldr	x16, [x0, #CTX_DBGVCR32_EL2]
183	msr	dbgvcr32_el2, x16
184#endif /* CTX_INCLUDE_AARCH32_REGS */
185
186	ldp	x9, x10, [x0, #CTX_ELR_EL2]
187	msr	elr_el2, x9
188	msr	esr_el2, x10
189
190	ldp	x11, x12, [x0, #CTX_FAR_EL2]
191	msr	far_el2, x11
192	msr	hacr_el2, x12
193
194	ldp	x13, x14, [x0, #CTX_HCR_EL2]
195	msr	hcr_el2, x13
196	msr	hpfar_el2, x14
197
198	ldp	x15, x16, [x0, #CTX_HSTR_EL2]
199	msr	hstr_el2, x15
200	msr	ICC_SRE_EL2, x16
201
202	ldp	x9, x10, [x0, #CTX_ICH_HCR_EL2]
203	msr	ICH_HCR_EL2, x9
204	msr	ICH_VMCR_EL2, x10
205
206	ldp	x11, x12, [x0, #CTX_MAIR_EL2]
207	msr	mair_el2, x11
208	msr	mdcr_el2, x12
209
210	ldr	x14, [x0, #CTX_SCTLR_EL2]
211	msr	sctlr_el2, x14
212
213	ldp	x15, x16, [x0, #CTX_SPSR_EL2]
214	msr	spsr_el2, x15
215	msr	sp_el2, x16
216
217	ldp	x9, x10, [x0, #CTX_TCR_EL2]
218	msr	tcr_el2, x9
219	msr	tpidr_el2, x10
220
221	ldp	x11, x12, [x0, #CTX_TTBR0_EL2]
222	msr	ttbr0_el2, x11
223	msr	vbar_el2, x12
224
225	ldp	x13, x14, [x0, #CTX_VMPIDR_EL2]
226	msr	vmpidr_el2, x13
227	msr	vpidr_el2, x14
228
229	ldp	x15, x16, [x0, #CTX_VTCR_EL2]
230	msr	vtcr_el2, x15
231	msr	vttbr_el2, x16
232	ret
233endfunc el2_sysregs_context_restore_common
234
235#if ENABLE_SPE_FOR_LOWER_ELS
236func el2_sysregs_context_save_spe
237	mrs	x13, PMSCR_EL2
238	str	x13, [x0, #CTX_PMSCR_EL2]
239	ret
240endfunc el2_sysregs_context_save_spe
241
242func el2_sysregs_context_restore_spe
243	ldr	x13, [x0, #CTX_PMSCR_EL2]
244	msr	PMSCR_EL2, x13
245	ret
246endfunc el2_sysregs_context_restore_spe
247#endif /* ENABLE_SPE_FOR_LOWER_ELS */
248
249#if CTX_INCLUDE_MTE_REGS
250func el2_sysregs_context_save_mte
251	mrs	x9, TFSR_EL2
252	str	x9, [x0, #CTX_TFSR_EL2]
253	ret
254endfunc el2_sysregs_context_save_mte
255
256func el2_sysregs_context_restore_mte
257	ldr	x9, [x0, #CTX_TFSR_EL2]
258	msr	TFSR_EL2, x9
259	ret
260endfunc el2_sysregs_context_restore_mte
261#endif /* CTX_INCLUDE_MTE_REGS */
262
263#if ENABLE_MPAM_FOR_LOWER_ELS
264func el2_sysregs_context_save_mpam
265	mrs	x10, MPAM2_EL2
266	str	x10, [x0, #CTX_MPAM2_EL2]
267
268	mrs	x11, MPAMHCR_EL2
269	mrs	x12, MPAMVPM0_EL2
270	stp	x11, x12, [x0, #CTX_MPAMHCR_EL2]
271
272	mrs	x13, MPAMVPM1_EL2
273	mrs	x14, MPAMVPM2_EL2
274	stp	x13, x14, [x0, #CTX_MPAMVPM1_EL2]
275
276	mrs	x15, MPAMVPM3_EL2
277	mrs	x16, MPAMVPM4_EL2
278	stp	x15, x16, [x0, #CTX_MPAMVPM3_EL2]
279
280	mrs	x9, MPAMVPM5_EL2
281	mrs	x10, MPAMVPM6_EL2
282	stp	x9, x10, [x0, #CTX_MPAMVPM5_EL2]
283
284	mrs	x11, MPAMVPM7_EL2
285	mrs	x12, MPAMVPMV_EL2
286	stp	x11, x12, [x0, #CTX_MPAMVPM7_EL2]
287	ret
288endfunc func el2_sysregs_context_save_mpam
289
290func el2_sysregs_context_restore_mpam
291	ldr	x10, [x0, #CTX_MPAM2_EL2]
292	msr	MPAM2_EL2, x10
293
294	ldp	x11, x12, [x0, #CTX_MPAMHCR_EL2]
295	msr	MPAMHCR_EL2, x11
296	msr	MPAMVPM0_EL2, x12
297
298	ldp	x13, x14, [x0, #CTX_MPAMVPM1_EL2]
299	msr	MPAMVPM1_EL2, x13
300	msr	MPAMVPM2_EL2, x14
301
302	ldp	x15, x16, [x0, #CTX_MPAMVPM3_EL2]
303	msr	MPAMVPM3_EL2, x15
304	msr	MPAMVPM4_EL2, x16
305
306	ldp	x9, x10, [x0, #CTX_MPAMVPM5_EL2]
307	msr	MPAMVPM5_EL2, x9
308	msr	MPAMVPM6_EL2, x10
309
310	ldp	x11, x12, [x0, #CTX_MPAMVPM7_EL2]
311	msr	MPAMVPM7_EL2, x11
312	msr	MPAMVPMV_EL2, x12
313	ret
314endfunc el2_sysregs_context_restore_mpam
315#endif /* ENABLE_MPAM_FOR_LOWER_ELS */
316
317#if ENABLE_FEAT_FGT
318func el2_sysregs_context_save_fgt
319	mrs	x13, HDFGRTR_EL2
320#if ENABLE_FEAT_AMUv1
321	mrs	x14, HAFGRTR_EL2
322	stp	x13, x14, [x0, #CTX_HDFGRTR_EL2]
323#else
324	str	x13, [x0, #CTX_HDFGRTR_EL2]
325#endif /* ENABLE_FEAT_AMUv1 */
326	mrs	x15, HDFGWTR_EL2
327	mrs	x16, HFGITR_EL2
328	stp	x15, x16, [x0, #CTX_HDFGWTR_EL2]
329
330	mrs	x9, HFGRTR_EL2
331	mrs	x10, HFGWTR_EL2
332	stp	x9, x10, [x0, #CTX_HFGRTR_EL2]
333	ret
334endfunc el2_sysregs_context_save_fgt
335
336func el2_sysregs_context_restore_fgt
337	#if ENABLE_FEAT_AMUv1
338	ldp	x13, x14, [x0, #CTX_HDFGRTR_EL2]
339	msr	HAFGRTR_EL2, x14
340#else
341	ldr	x13, [x0, #CTX_HDFGRTR_EL2]
342#endif /* ENABLE_FEAT_AMUv1 */
343	msr	HDFGRTR_EL2, x13
344
345	ldp	x15, x16, [x0, #CTX_HDFGWTR_EL2]
346	msr	HDFGWTR_EL2, x15
347	msr	HFGITR_EL2, x16
348
349	ldp	x9, x10, [x0, #CTX_HFGRTR_EL2]
350	msr	HFGRTR_EL2, x9
351	msr	HFGWTR_EL2, x10
352	ret
353endfunc el2_sysregs_context_restore_fgt
354#endif /* ENABLE_FEAT_FGT */
355
356#if ENABLE_FEAT_ECV
357func el2_sysregs_context_save_ecv
358	mrs	x11, CNTPOFF_EL2
359	str	x11, [x0, #CTX_CNTPOFF_EL2]
360	ret
361endfunc el2_sysregs_context_save_ecv
362
363func el2_sysregs_context_restore_ecv
364	ldr	x11, [x0, #CTX_CNTPOFF_EL2]
365	msr	CNTPOFF_EL2, x11
366	ret
367endfunc el2_sysregs_context_restore_ecv
368#endif /* ENABLE_FEAT_ECV */
369
370#if ENABLE_FEAT_VHE
371func el2_sysregs_context_save_vhe
372	/*
373	 * CONTEXTIDR_EL2 register is saved only when FEAT_VHE or
374	 * FEAT_Debugv8p2 (currently not in TF-A) is supported.
375	 */
376	mrs	x9, contextidr_el2
377	mrs	x10, ttbr1_el2
378	stp	x9, x10, [x0, #CTX_CONTEXTIDR_EL2]
379	ret
380endfunc el2_sysregs_context_save_vhe
381
382func el2_sysregs_context_restore_vhe
383	/*
384	 * CONTEXTIDR_EL2 register is restored only when FEAT_VHE or
385	 * FEAT_Debugv8p2 (currently not in TF-A) is supported.
386	 */
387	ldp	x9, x10, [x0, #CTX_CONTEXTIDR_EL2]
388	msr	contextidr_el2, x9
389	msr	ttbr1_el2, x10
390	ret
391endfunc el2_sysregs_context_restore_vhe
392#endif /* ENABLE_FEAT_VHE */
393
394#if RAS_EXTENSION
395func el2_sysregs_context_save_ras
396	/*
397	 * VDISR_EL2 and VSESR_EL2 registers are saved only when
398	 * FEAT_RAS is supported.
399	 */
400	mrs	x11, vdisr_el2
401	mrs	x12, vsesr_el2
402	stp	x11, x12, [x0, #CTX_VDISR_EL2]
403	ret
404endfunc el2_sysregs_context_save_ras
405
406func el2_sysregs_context_restore_ras
407	/*
408	 * VDISR_EL2 and VSESR_EL2 registers are restored only when FEAT_RAS
409	 * is supported.
410	 */
411	ldp	x11, x12, [x0, #CTX_VDISR_EL2]
412	msr	vdisr_el2, x11
413	msr	vsesr_el2, x12
414	ret
415endfunc el2_sysregs_context_restore_ras
416#endif /* RAS_EXTENSION */
417
418#if CTX_INCLUDE_NEVE_REGS
419func el2_sysregs_context_save_nv2
420	/*
421	 * VNCR_EL2 register is saved only when FEAT_NV2 is supported.
422	 */
423	mrs	x16, vncr_el2
424	str	x16, [x0, #CTX_VNCR_EL2]
425	ret
426endfunc el2_sysregs_context_save_nv2
427
428func el2_sysregs_context_restore_nv2
429	/*
430	 * VNCR_EL2 register is restored only when FEAT_NV2 is supported.
431	 */
432	ldr	x16, [x0, #CTX_VNCR_EL2]
433	msr	vncr_el2, x16
434	ret
435endfunc el2_sysregs_context_restore_nv2
436#endif /* CTX_INCLUDE_NEVE_REGS */
437
438#if ENABLE_TRF_FOR_NS
439func el2_sysregs_context_save_trf
440	/*
441	 * TRFCR_EL2 register is saved only when FEAT_TRF is supported.
442	 */
443	mrs	x12, TRFCR_EL2
444	str	x12, [x0, #CTX_TRFCR_EL2]
445	ret
446endfunc el2_sysregs_context_save_trf
447
448func el2_sysregs_context_restore_trf
449	/*
450	 * TRFCR_EL2 register is restored only when FEAT_TRF is supported.
451	 */
452	ldr	x12, [x0, #CTX_TRFCR_EL2]
453	msr	TRFCR_EL2, x12
454	ret
455endfunc el2_sysregs_context_restore_trf
456#endif /* ENABLE_TRF_FOR_NS */
457
458#if ENABLE_FEAT_CSV2_2
459func el2_sysregs_context_save_csv2
460	/*
461	 * SCXTNUM_EL2 register is saved only when FEAT_CSV2_2 is supported.
462	 */
463	mrs	x13, scxtnum_el2
464	str	x13, [x0, #CTX_SCXTNUM_EL2]
465	ret
466endfunc el2_sysregs_context_save_csv2
467
468func el2_sysregs_context_restore_csv2
469	/*
470	 * SCXTNUM_EL2 register is restored only when FEAT_CSV2_2 is supported.
471	 */
472	ldr	x13, [x0, #CTX_SCXTNUM_EL2]
473	msr	scxtnum_el2, x13
474	ret
475endfunc el2_sysregs_context_restore_csv2
476#endif /* ENABLE_FEAT_CSV2_2 */
477
478#if ENABLE_FEAT_HCX
479func el2_sysregs_context_save_hcx
480	mrs	x14, hcrx_el2
481	str	x14, [x0, #CTX_HCRX_EL2]
482	ret
483endfunc el2_sysregs_context_save_hcx
484
485func el2_sysregs_context_restore_hcx
486	ldr	x14, [x0, #CTX_HCRX_EL2]
487	msr	hcrx_el2, x14
488	ret
489endfunc el2_sysregs_context_restore_hcx
490#endif /* ENABLE_FEAT_HCX */
491#endif /* CTX_INCLUDE_EL2_REGS */
492
493/* ------------------------------------------------------------------
494 * The following function strictly follows the AArch64 PCS to use
495 * x9-x17 (temporary caller-saved registers) to save EL1 system
496 * register context. It assumes that 'x0' is pointing to a
497 * 'el1_sys_regs' structure where the register context will be saved.
498 * ------------------------------------------------------------------
499 */
500func el1_sysregs_context_save
501
502	mrs	x9, spsr_el1
503	mrs	x10, elr_el1
504	stp	x9, x10, [x0, #CTX_SPSR_EL1]
505
506#if !ERRATA_SPECULATIVE_AT
507	mrs	x15, sctlr_el1
508	mrs	x16, tcr_el1
509	stp	x15, x16, [x0, #CTX_SCTLR_EL1]
510#endif /* ERRATA_SPECULATIVE_AT */
511
512	mrs	x17, cpacr_el1
513	mrs	x9, csselr_el1
514	stp	x17, x9, [x0, #CTX_CPACR_EL1]
515
516	mrs	x10, sp_el1
517	mrs	x11, esr_el1
518	stp	x10, x11, [x0, #CTX_SP_EL1]
519
520	mrs	x12, ttbr0_el1
521	mrs	x13, ttbr1_el1
522	stp	x12, x13, [x0, #CTX_TTBR0_EL1]
523
524	mrs	x14, mair_el1
525	mrs	x15, amair_el1
526	stp	x14, x15, [x0, #CTX_MAIR_EL1]
527
528	mrs	x16, actlr_el1
529	mrs	x17, tpidr_el1
530	stp	x16, x17, [x0, #CTX_ACTLR_EL1]
531
532	mrs	x9, tpidr_el0
533	mrs	x10, tpidrro_el0
534	stp	x9, x10, [x0, #CTX_TPIDR_EL0]
535
536	mrs	x13, par_el1
537	mrs	x14, far_el1
538	stp	x13, x14, [x0, #CTX_PAR_EL1]
539
540	mrs	x15, afsr0_el1
541	mrs	x16, afsr1_el1
542	stp	x15, x16, [x0, #CTX_AFSR0_EL1]
543
544	mrs	x17, contextidr_el1
545	mrs	x9, vbar_el1
546	stp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
547
548	/* Save AArch32 system registers if the build has instructed so */
549#if CTX_INCLUDE_AARCH32_REGS
550	mrs	x11, spsr_abt
551	mrs	x12, spsr_und
552	stp	x11, x12, [x0, #CTX_SPSR_ABT]
553
554	mrs	x13, spsr_irq
555	mrs	x14, spsr_fiq
556	stp	x13, x14, [x0, #CTX_SPSR_IRQ]
557
558	mrs	x15, dacr32_el2
559	mrs	x16, ifsr32_el2
560	stp	x15, x16, [x0, #CTX_DACR32_EL2]
561#endif /* CTX_INCLUDE_AARCH32_REGS */
562
563	/* Save NS timer registers if the build has instructed so */
564#if NS_TIMER_SWITCH
565	mrs	x10, cntp_ctl_el0
566	mrs	x11, cntp_cval_el0
567	stp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
568
569	mrs	x12, cntv_ctl_el0
570	mrs	x13, cntv_cval_el0
571	stp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
572
573	mrs	x14, cntkctl_el1
574	str	x14, [x0, #CTX_CNTKCTL_EL1]
575#endif /* NS_TIMER_SWITCH */
576
577	/* Save MTE system registers if the build has instructed so */
578#if CTX_INCLUDE_MTE_REGS
579	mrs	x15, TFSRE0_EL1
580	mrs	x16, TFSR_EL1
581	stp	x15, x16, [x0, #CTX_TFSRE0_EL1]
582
583	mrs	x9, RGSR_EL1
584	mrs	x10, GCR_EL1
585	stp	x9, x10, [x0, #CTX_RGSR_EL1]
586#endif /* CTX_INCLUDE_MTE_REGS */
587
588	ret
589endfunc el1_sysregs_context_save
590
591/* ------------------------------------------------------------------
592 * The following function strictly follows the AArch64 PCS to use
593 * x9-x17 (temporary caller-saved registers) to restore EL1 system
594 * register context.  It assumes that 'x0' is pointing to a
595 * 'el1_sys_regs' structure from where the register context will be
596 * restored
597 * ------------------------------------------------------------------
598 */
599func el1_sysregs_context_restore
600
601	ldp	x9, x10, [x0, #CTX_SPSR_EL1]
602	msr	spsr_el1, x9
603	msr	elr_el1, x10
604
605#if !ERRATA_SPECULATIVE_AT
606	ldp	x15, x16, [x0, #CTX_SCTLR_EL1]
607	msr	sctlr_el1, x15
608	msr	tcr_el1, x16
609#endif /* ERRATA_SPECULATIVE_AT */
610
611	ldp	x17, x9, [x0, #CTX_CPACR_EL1]
612	msr	cpacr_el1, x17
613	msr	csselr_el1, x9
614
615	ldp	x10, x11, [x0, #CTX_SP_EL1]
616	msr	sp_el1, x10
617	msr	esr_el1, x11
618
619	ldp	x12, x13, [x0, #CTX_TTBR0_EL1]
620	msr	ttbr0_el1, x12
621	msr	ttbr1_el1, x13
622
623	ldp	x14, x15, [x0, #CTX_MAIR_EL1]
624	msr	mair_el1, x14
625	msr	amair_el1, x15
626
627	ldp 	x16, x17, [x0, #CTX_ACTLR_EL1]
628	msr	actlr_el1, x16
629	msr	tpidr_el1, x17
630
631	ldp	x9, x10, [x0, #CTX_TPIDR_EL0]
632	msr	tpidr_el0, x9
633	msr	tpidrro_el0, x10
634
635	ldp	x13, x14, [x0, #CTX_PAR_EL1]
636	msr	par_el1, x13
637	msr	far_el1, x14
638
639	ldp	x15, x16, [x0, #CTX_AFSR0_EL1]
640	msr	afsr0_el1, x15
641	msr	afsr1_el1, x16
642
643	ldp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
644	msr	contextidr_el1, x17
645	msr	vbar_el1, x9
646
647	/* Restore AArch32 system registers if the build has instructed so */
648#if CTX_INCLUDE_AARCH32_REGS
649	ldp	x11, x12, [x0, #CTX_SPSR_ABT]
650	msr	spsr_abt, x11
651	msr	spsr_und, x12
652
653	ldp	x13, x14, [x0, #CTX_SPSR_IRQ]
654	msr	spsr_irq, x13
655	msr	spsr_fiq, x14
656
657	ldp	x15, x16, [x0, #CTX_DACR32_EL2]
658	msr	dacr32_el2, x15
659	msr	ifsr32_el2, x16
660#endif /* CTX_INCLUDE_AARCH32_REGS */
661
662	/* Restore NS timer registers if the build has instructed so */
663#if NS_TIMER_SWITCH
664	ldp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
665	msr	cntp_ctl_el0, x10
666	msr	cntp_cval_el0, x11
667
668	ldp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
669	msr	cntv_ctl_el0, x12
670	msr	cntv_cval_el0, x13
671
672	ldr	x14, [x0, #CTX_CNTKCTL_EL1]
673	msr	cntkctl_el1, x14
674#endif /* NS_TIMER_SWITCH */
675
676	/* Restore MTE system registers if the build has instructed so */
677#if CTX_INCLUDE_MTE_REGS
678	ldp	x11, x12, [x0, #CTX_TFSRE0_EL1]
679	msr	TFSRE0_EL1, x11
680	msr	TFSR_EL1, x12
681
682	ldp	x13, x14, [x0, #CTX_RGSR_EL1]
683	msr	RGSR_EL1, x13
684	msr	GCR_EL1, x14
685#endif /* CTX_INCLUDE_MTE_REGS */
686
687	/* No explict ISB required here as ERET covers it */
688	ret
689endfunc el1_sysregs_context_restore
690
691/* ------------------------------------------------------------------
692 * The following function follows the aapcs_64 strictly to use
693 * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
694 * to save floating point register context. It assumes that 'x0' is
695 * pointing to a 'fp_regs' structure where the register context will
696 * be saved.
697 *
698 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
699 * However currently we don't use VFP registers nor set traps in
700 * Trusted Firmware, and assume it's cleared.
701 *
702 * TODO: Revisit when VFP is used in secure world
703 * ------------------------------------------------------------------
704 */
705#if CTX_INCLUDE_FPREGS
706func fpregs_context_save
707	stp	q0, q1, [x0, #CTX_FP_Q0]
708	stp	q2, q3, [x0, #CTX_FP_Q2]
709	stp	q4, q5, [x0, #CTX_FP_Q4]
710	stp	q6, q7, [x0, #CTX_FP_Q6]
711	stp	q8, q9, [x0, #CTX_FP_Q8]
712	stp	q10, q11, [x0, #CTX_FP_Q10]
713	stp	q12, q13, [x0, #CTX_FP_Q12]
714	stp	q14, q15, [x0, #CTX_FP_Q14]
715	stp	q16, q17, [x0, #CTX_FP_Q16]
716	stp	q18, q19, [x0, #CTX_FP_Q18]
717	stp	q20, q21, [x0, #CTX_FP_Q20]
718	stp	q22, q23, [x0, #CTX_FP_Q22]
719	stp	q24, q25, [x0, #CTX_FP_Q24]
720	stp	q26, q27, [x0, #CTX_FP_Q26]
721	stp	q28, q29, [x0, #CTX_FP_Q28]
722	stp	q30, q31, [x0, #CTX_FP_Q30]
723
724	mrs	x9, fpsr
725	str	x9, [x0, #CTX_FP_FPSR]
726
727	mrs	x10, fpcr
728	str	x10, [x0, #CTX_FP_FPCR]
729
730#if CTX_INCLUDE_AARCH32_REGS
731	mrs	x11, fpexc32_el2
732	str	x11, [x0, #CTX_FP_FPEXC32_EL2]
733#endif /* CTX_INCLUDE_AARCH32_REGS */
734	ret
735endfunc fpregs_context_save
736
737/* ------------------------------------------------------------------
738 * The following function follows the aapcs_64 strictly to use x9-x17
739 * (temporary caller-saved registers according to AArch64 PCS) to
740 * restore floating point register context. It assumes that 'x0' is
741 * pointing to a 'fp_regs' structure from where the register context
742 * will be restored.
743 *
744 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
745 * However currently we don't use VFP registers nor set traps in
746 * Trusted Firmware, and assume it's cleared.
747 *
748 * TODO: Revisit when VFP is used in secure world
749 * ------------------------------------------------------------------
750 */
751func fpregs_context_restore
752	ldp	q0, q1, [x0, #CTX_FP_Q0]
753	ldp	q2, q3, [x0, #CTX_FP_Q2]
754	ldp	q4, q5, [x0, #CTX_FP_Q4]
755	ldp	q6, q7, [x0, #CTX_FP_Q6]
756	ldp	q8, q9, [x0, #CTX_FP_Q8]
757	ldp	q10, q11, [x0, #CTX_FP_Q10]
758	ldp	q12, q13, [x0, #CTX_FP_Q12]
759	ldp	q14, q15, [x0, #CTX_FP_Q14]
760	ldp	q16, q17, [x0, #CTX_FP_Q16]
761	ldp	q18, q19, [x0, #CTX_FP_Q18]
762	ldp	q20, q21, [x0, #CTX_FP_Q20]
763	ldp	q22, q23, [x0, #CTX_FP_Q22]
764	ldp	q24, q25, [x0, #CTX_FP_Q24]
765	ldp	q26, q27, [x0, #CTX_FP_Q26]
766	ldp	q28, q29, [x0, #CTX_FP_Q28]
767	ldp	q30, q31, [x0, #CTX_FP_Q30]
768
769	ldr	x9, [x0, #CTX_FP_FPSR]
770	msr	fpsr, x9
771
772	ldr	x10, [x0, #CTX_FP_FPCR]
773	msr	fpcr, x10
774
775#if CTX_INCLUDE_AARCH32_REGS
776	ldr	x11, [x0, #CTX_FP_FPEXC32_EL2]
777	msr	fpexc32_el2, x11
778#endif /* CTX_INCLUDE_AARCH32_REGS */
779
780	/*
781	 * No explict ISB required here as ERET to
782	 * switch to secure EL1 or non-secure world
783	 * covers it
784	 */
785
786	ret
787endfunc fpregs_context_restore
788#endif /* CTX_INCLUDE_FPREGS */
789
790	/*
791	 * Set SCR_EL3.EA bit to enable SErrors at EL3
792	 */
793	.macro enable_serror_at_el3
794	mrs     x8, scr_el3
795	orr     x8, x8, #SCR_EA_BIT
796	msr     scr_el3, x8
797	.endm
798
799	/*
800	 * Set the PSTATE bits not set when the exception was taken as
801	 * described in the AArch64.TakeException() pseudocode function
802	 * in ARM DDI 0487F.c page J1-7635 to a default value.
803	 */
804	.macro set_unset_pstate_bits
805	/*
806	 * If Data Independent Timing (DIT) functionality is implemented,
807	 * always enable DIT in EL3
808	 */
809#if ENABLE_FEAT_DIT
810	mov     x8, #DIT_BIT
811	msr     DIT, x8
812#endif /* ENABLE_FEAT_DIT */
813	.endm /* set_unset_pstate_bits */
814
815/* ------------------------------------------------------------------
816 * The following macro is used to save and restore all the general
817 * purpose and ARMv8.3-PAuth (if enabled) registers.
818 * It also checks if the Secure Cycle Counter (PMCCNTR_EL0)
819 * is disabled in EL3/Secure (ARMv8.5-PMU), wherein PMCCNTR_EL0
820 * needs not to be saved/restored during world switch.
821 *
822 * Ideally we would only save and restore the callee saved registers
823 * when a world switch occurs but that type of implementation is more
824 * complex. So currently we will always save and restore these
825 * registers on entry and exit of EL3.
826 * clobbers: x18
827 * ------------------------------------------------------------------
828 */
829	.macro save_gp_pmcr_pauth_regs
830	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
831	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
832	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
833	stp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
834	stp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
835	stp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
836	stp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
837	stp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
838	stp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
839	stp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
840	stp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
841	stp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
842	stp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
843	stp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
844	stp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
845	mrs	x18, sp_el0
846	str	x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
847
848	/* ----------------------------------------------------------
849	 * Check if earlier initialization of MDCR_EL3.SCCD/MCCD to 1
850	 * has failed.
851	 *
852	 * MDCR_EL3:
853	 * MCCD bit set, Prohibits the Cycle Counter PMCCNTR_EL0 from
854	 * counting at EL3.
855	 * SCCD bit set, Secure Cycle Counter Disable. Prohibits PMCCNTR_EL0
856	 * from counting in Secure state.
857	 * If these bits are not set, meaning that FEAT_PMUv3p5/7 is
858	 * not implemented and PMCR_EL0 should be saved in non-secure
859	 * context.
860	 * ----------------------------------------------------------
861	 */
862	mov_imm	x10, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
863	mrs	x9, mdcr_el3
864	tst	x9, x10
865	bne	1f
866
867	/* ----------------------------------------------------------
868	 * If control reaches here, it ensures the Secure Cycle
869	 * Counter (PMCCNTR_EL0) is not prohibited from counting at
870	 * EL3 and in secure states.
871	 * Henceforth, PMCR_EL0 to be saved before world switch.
872	 * ----------------------------------------------------------
873	 */
874	mrs	x9, pmcr_el0
875
876	/* Check caller's security state */
877	mrs	x10, scr_el3
878	tst	x10, #SCR_NS_BIT
879	beq	2f
880
881	/* Save PMCR_EL0 if called from Non-secure state */
882	str	x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
883
884	/* Disable cycle counter when event counting is prohibited */
8852:	orr	x9, x9, #PMCR_EL0_DP_BIT
886	msr	pmcr_el0, x9
887	isb
8881:
889#if CTX_INCLUDE_PAUTH_REGS
890	/* ----------------------------------------------------------
891 	 * Save the ARMv8.3-PAuth keys as they are not banked
892 	 * by exception level
893	 * ----------------------------------------------------------
894	 */
895	add	x19, sp, #CTX_PAUTH_REGS_OFFSET
896
897	mrs	x20, APIAKeyLo_EL1	/* x21:x20 = APIAKey */
898	mrs	x21, APIAKeyHi_EL1
899	mrs	x22, APIBKeyLo_EL1	/* x23:x22 = APIBKey */
900	mrs	x23, APIBKeyHi_EL1
901	mrs	x24, APDAKeyLo_EL1	/* x25:x24 = APDAKey */
902	mrs	x25, APDAKeyHi_EL1
903	mrs	x26, APDBKeyLo_EL1	/* x27:x26 = APDBKey */
904	mrs	x27, APDBKeyHi_EL1
905	mrs	x28, APGAKeyLo_EL1	/* x29:x28 = APGAKey */
906	mrs	x29, APGAKeyHi_EL1
907
908	stp	x20, x21, [x19, #CTX_PACIAKEY_LO]
909	stp	x22, x23, [x19, #CTX_PACIBKEY_LO]
910	stp	x24, x25, [x19, #CTX_PACDAKEY_LO]
911	stp	x26, x27, [x19, #CTX_PACDBKEY_LO]
912	stp	x28, x29, [x19, #CTX_PACGAKEY_LO]
913#endif /* CTX_INCLUDE_PAUTH_REGS */
914	.endm /* save_gp_pmcr_pauth_regs */
915
916/* -----------------------------------------------------------------
917 * This function saves the context and sets the PSTATE to a known
918 * state, preparing entry to el3.
919 * Save all the general purpose and ARMv8.3-PAuth (if enabled)
920 * registers.
921 * Then set any of the PSTATE bits that are not set by hardware
922 * according to the Aarch64.TakeException pseudocode in the Arm
923 * Architecture Reference Manual to a default value for EL3.
924 * clobbers: x17
925 * -----------------------------------------------------------------
926 */
927func prepare_el3_entry
928	save_gp_pmcr_pauth_regs
929	enable_serror_at_el3
930	/*
931	 * Set the PSTATE bits not described in the Aarch64.TakeException
932	 * pseudocode to their default values.
933	 */
934	set_unset_pstate_bits
935	ret
936endfunc prepare_el3_entry
937
938/* ------------------------------------------------------------------
939 * This function restores ARMv8.3-PAuth (if enabled) and all general
940 * purpose registers except x30 from the CPU context.
941 * x30 register must be explicitly restored by the caller.
942 * ------------------------------------------------------------------
943 */
944func restore_gp_pmcr_pauth_regs
945#if CTX_INCLUDE_PAUTH_REGS
946 	/* Restore the ARMv8.3 PAuth keys */
947	add	x10, sp, #CTX_PAUTH_REGS_OFFSET
948
949	ldp	x0, x1, [x10, #CTX_PACIAKEY_LO]	/* x1:x0 = APIAKey */
950	ldp	x2, x3, [x10, #CTX_PACIBKEY_LO]	/* x3:x2 = APIBKey */
951	ldp	x4, x5, [x10, #CTX_PACDAKEY_LO]	/* x5:x4 = APDAKey */
952	ldp	x6, x7, [x10, #CTX_PACDBKEY_LO]	/* x7:x6 = APDBKey */
953	ldp	x8, x9, [x10, #CTX_PACGAKEY_LO]	/* x9:x8 = APGAKey */
954
955	msr	APIAKeyLo_EL1, x0
956	msr	APIAKeyHi_EL1, x1
957	msr	APIBKeyLo_EL1, x2
958	msr	APIBKeyHi_EL1, x3
959	msr	APDAKeyLo_EL1, x4
960	msr	APDAKeyHi_EL1, x5
961	msr	APDBKeyLo_EL1, x6
962	msr	APDBKeyHi_EL1, x7
963	msr	APGAKeyLo_EL1, x8
964	msr	APGAKeyHi_EL1, x9
965#endif /* CTX_INCLUDE_PAUTH_REGS */
966
967	/* ----------------------------------------------------------
968	 * Restore PMCR_EL0 when returning to Non-secure state if
969	 * Secure Cycle Counter is not disabled in MDCR_EL3 when
970	 * ARMv8.5-PMU is implemented.
971	 * ----------------------------------------------------------
972	 */
973	mrs	x0, scr_el3
974	tst	x0, #SCR_NS_BIT
975	beq	2f
976
977	/* ----------------------------------------------------------
978	 * Back to Non-secure state.
979	 * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1
980	 * failed, meaning that FEAT_PMUv3p5/7 is not implemented and
981	 * PMCR_EL0 should be restored from non-secure context.
982	 * ----------------------------------------------------------
983	 */
984	mov_imm	x1, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
985	mrs	x0, mdcr_el3
986	tst	x0, x1
987	bne	2f
988	ldr	x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
989	msr	pmcr_el0, x0
9902:
991	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
992	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
993	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
994	ldp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
995	ldp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
996	ldp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
997	ldp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
998	ldp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
999	ldp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
1000	ldp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
1001	ldp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
1002	ldp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
1003	ldp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
1004	ldp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
1005	ldr	x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
1006	msr	sp_el0, x28
1007	ldp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
1008	ret
1009endfunc restore_gp_pmcr_pauth_regs
1010
1011/*
1012 * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
1013 * registers and update EL1 registers to disable stage1 and stage2
1014 * page table walk
1015 */
1016func save_and_update_ptw_el1_sys_regs
1017	/* ----------------------------------------------------------
1018	 * Save only sctlr_el1 and tcr_el1 registers
1019	 * ----------------------------------------------------------
1020	 */
1021	mrs	x29, sctlr_el1
1022	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)]
1023	mrs	x29, tcr_el1
1024	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)]
1025
1026	/* ------------------------------------------------------------
1027	 * Must follow below order in order to disable page table
1028	 * walk for lower ELs (EL1 and EL0). First step ensures that
1029	 * page table walk is disabled for stage1 and second step
1030	 * ensures that page table walker should use TCR_EL1.EPDx
1031	 * bits to perform address translation. ISB ensures that CPU
1032	 * does these 2 steps in order.
1033	 *
1034	 * 1. Update TCR_EL1.EPDx bits to disable page table walk by
1035	 *    stage1.
1036	 * 2. Enable MMU bit to avoid identity mapping via stage2
1037	 *    and force TCR_EL1.EPDx to be used by the page table
1038	 *    walker.
1039	 * ------------------------------------------------------------
1040	 */
1041	orr	x29, x29, #(TCR_EPD0_BIT)
1042	orr	x29, x29, #(TCR_EPD1_BIT)
1043	msr	tcr_el1, x29
1044	isb
1045	mrs	x29, sctlr_el1
1046	orr	x29, x29, #SCTLR_M_BIT
1047	msr	sctlr_el1, x29
1048	isb
1049
1050	ret
1051endfunc save_and_update_ptw_el1_sys_regs
1052
1053/* ------------------------------------------------------------------
1054 * This routine assumes that the SP_EL3 is pointing to a valid
1055 * context structure from where the gp regs and other special
1056 * registers can be retrieved.
1057 * ------------------------------------------------------------------
1058 */
1059func el3_exit
1060#if ENABLE_ASSERTIONS
1061	/* el3_exit assumes SP_EL0 on entry */
1062	mrs	x17, spsel
1063	cmp	x17, #MODE_SP_EL0
1064	ASM_ASSERT(eq)
1065#endif /* ENABLE_ASSERTIONS */
1066
1067	/* ----------------------------------------------------------
1068	 * Save the current SP_EL0 i.e. the EL3 runtime stack which
1069	 * will be used for handling the next SMC.
1070	 * Then switch to SP_EL3.
1071	 * ----------------------------------------------------------
1072	 */
1073	mov	x17, sp
1074	msr	spsel, #MODE_SP_ELX
1075	str	x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
1076
1077#if IMAGE_BL31
1078	/* ----------------------------------------------------------
1079	 * Restore CPTR_EL3.
1080	 * ZCR is only restored if SVE is supported and enabled.
1081	 * Synchronization is required before zcr_el3 is addressed.
1082	 * ----------------------------------------------------------
1083	 */
1084	ldp	x19, x20, [sp, #CTX_EL3STATE_OFFSET + CTX_CPTR_EL3]
1085	msr	cptr_el3, x19
1086
1087	ands	x19, x19, #CPTR_EZ_BIT
1088	beq	sve_not_enabled
1089
1090	isb
1091	msr	S3_6_C1_C2_0, x20 /* zcr_el3 */
1092sve_not_enabled:
1093#endif /* IMAGE_BL31 */
1094
1095#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
1096	/* ----------------------------------------------------------
1097	 * Restore mitigation state as it was on entry to EL3
1098	 * ----------------------------------------------------------
1099	 */
1100	ldr	x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
1101	cbz	x17, 1f
1102	blr	x17
11031:
1104#endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */
1105
1106#if IMAGE_BL31 && RAS_EXTENSION
1107	/* ----------------------------------------------------------
1108	 * Issue Error Synchronization Barrier to synchronize SErrors
1109	 * before exiting EL3. We're running with EAs unmasked, so
1110	 * any synchronized errors would be taken immediately;
1111	 * therefore no need to inspect DISR_EL1 register.
1112 	 * ----------------------------------------------------------
1113	 */
1114	esb
1115#else
1116	dsb	sy
1117#endif /* IMAGE_BL31 && RAS_EXTENSION */
1118
1119	/* ----------------------------------------------------------
1120	 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
1121	 * ----------------------------------------------------------
1122	 */
1123	ldr	x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
1124	ldp	x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
1125	msr	scr_el3, x18
1126	msr	spsr_el3, x16
1127	msr	elr_el3, x17
1128
1129	restore_ptw_el1_sys_regs
1130
1131	/* ----------------------------------------------------------
1132	 * Restore general purpose (including x30), PMCR_EL0 and
1133	 * ARMv8.3-PAuth registers.
1134	 * Exit EL3 via ERET to a lower exception level.
1135 	 * ----------------------------------------------------------
1136 	 */
1137	bl	restore_gp_pmcr_pauth_regs
1138	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
1139
1140#ifdef IMAGE_BL31
1141	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
1142#endif /* IMAGE_BL31 */
1143
1144	exception_return
1145
1146endfunc el3_exit
1147