xref: /rk3399_ARM-atf/lib/el3_runtime/aarch64/context.S (revision 9a905a7d86867bab8a5d9befd40a67a6ab9aaea2)
1/*
2 * Copyright (c) 2013-2023, Arm Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <assert_macros.S>
10#include <context.h>
11#include <el3_common_macros.S>
12
13#if CTX_INCLUDE_EL2_REGS
14	.global	el2_sysregs_context_save_common
15	.global	el2_sysregs_context_restore_common
16#if CTX_INCLUDE_MTE_REGS
17	.global	el2_sysregs_context_save_mte
18	.global	el2_sysregs_context_restore_mte
19#endif /* CTX_INCLUDE_MTE_REGS */
20#if RAS_EXTENSION
21	.global	el2_sysregs_context_save_ras
22	.global	el2_sysregs_context_restore_ras
23#endif /* RAS_EXTENSION */
24#endif /* CTX_INCLUDE_EL2_REGS */
25
26	.global	el1_sysregs_context_save
27	.global	el1_sysregs_context_restore
28#if CTX_INCLUDE_FPREGS
29	.global	fpregs_context_save
30	.global	fpregs_context_restore
31#endif /* CTX_INCLUDE_FPREGS */
32	.global	prepare_el3_entry
33	.global	restore_gp_pmcr_pauth_regs
34	.global save_and_update_ptw_el1_sys_regs
35	.global	el3_exit
36
37#if CTX_INCLUDE_EL2_REGS
38
39/* -----------------------------------------------------
40 * The following functions strictly follow the AArch64
41 * PCS to use x9-x16 (temporary caller-saved registers)
42 * to save/restore EL2 system register context.
43 * el2_sysregs_context_save/restore_common functions
44 * save and restore registers that are common to all
45 * configurations. The rest of the functions save and
46 * restore EL2 system registers that are present when a
47 * particular feature is enabled. All functions assume
48 * that 'x0' is pointing to a 'el2_sys_regs' structure
49 * where the register context will be saved/restored.
50 *
51 * The following registers are not added.
52 * AMEVCNTVOFF0<n>_EL2
53 * AMEVCNTVOFF1<n>_EL2
54 * ICH_AP0R<n>_EL2
55 * ICH_AP1R<n>_EL2
56 * ICH_LR<n>_EL2
57 * -----------------------------------------------------
58 */
59func el2_sysregs_context_save_common
60	mrs	x9, actlr_el2
61	mrs	x10, afsr0_el2
62	stp	x9, x10, [x0, #CTX_ACTLR_EL2]
63
64	mrs	x11, afsr1_el2
65	mrs	x12, amair_el2
66	stp	x11, x12, [x0, #CTX_AFSR1_EL2]
67
68	mrs	x13, cnthctl_el2
69	mrs	x14, cntvoff_el2
70	stp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
71
72	mrs	x15, cptr_el2
73	str	x15, [x0, #CTX_CPTR_EL2]
74
75#if CTX_INCLUDE_AARCH32_REGS
76	mrs	x16, dbgvcr32_el2
77	str	x16, [x0, #CTX_DBGVCR32_EL2]
78#endif /* CTX_INCLUDE_AARCH32_REGS */
79
80	mrs	x9, elr_el2
81	mrs	x10, esr_el2
82	stp	x9, x10, [x0, #CTX_ELR_EL2]
83
84	mrs	x11, far_el2
85	mrs	x12, hacr_el2
86	stp	x11, x12, [x0, #CTX_FAR_EL2]
87
88	mrs	x13, hcr_el2
89	mrs	x14, hpfar_el2
90	stp	x13, x14, [x0, #CTX_HCR_EL2]
91
92	mrs	x15, hstr_el2
93	mrs	x16, ICC_SRE_EL2
94	stp	x15, x16, [x0, #CTX_HSTR_EL2]
95
96	mrs	x9, ICH_HCR_EL2
97	mrs	x10, ICH_VMCR_EL2
98	stp	x9, x10, [x0, #CTX_ICH_HCR_EL2]
99
100	mrs	x11, mair_el2
101	mrs	x12, mdcr_el2
102	stp	x11, x12, [x0, #CTX_MAIR_EL2]
103
104	mrs	x14, sctlr_el2
105	str	x14, [x0, #CTX_SCTLR_EL2]
106
107	mrs	x15, spsr_el2
108	mrs	x16, sp_el2
109	stp	x15, x16, [x0, #CTX_SPSR_EL2]
110
111	mrs	x9, tcr_el2
112	mrs	x10, tpidr_el2
113	stp	x9, x10, [x0, #CTX_TCR_EL2]
114
115	mrs	x11, ttbr0_el2
116	mrs	x12, vbar_el2
117	stp	x11, x12, [x0, #CTX_TTBR0_EL2]
118
119	mrs	x13, vmpidr_el2
120	mrs	x14, vpidr_el2
121	stp	x13, x14, [x0, #CTX_VMPIDR_EL2]
122
123	mrs	x15, vtcr_el2
124	mrs	x16, vttbr_el2
125	stp	x15, x16, [x0, #CTX_VTCR_EL2]
126	ret
127endfunc el2_sysregs_context_save_common
128
129func el2_sysregs_context_restore_common
130	ldp	x9, x10, [x0, #CTX_ACTLR_EL2]
131	msr	actlr_el2, x9
132	msr	afsr0_el2, x10
133
134	ldp	x11, x12, [x0, #CTX_AFSR1_EL2]
135	msr	afsr1_el2, x11
136	msr	amair_el2, x12
137
138	ldp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
139	msr	cnthctl_el2, x13
140	msr	cntvoff_el2, x14
141
142	ldr	x15, [x0, #CTX_CPTR_EL2]
143	msr	cptr_el2, x15
144
145#if CTX_INCLUDE_AARCH32_REGS
146	ldr	x16, [x0, #CTX_DBGVCR32_EL2]
147	msr	dbgvcr32_el2, x16
148#endif /* CTX_INCLUDE_AARCH32_REGS */
149
150	ldp	x9, x10, [x0, #CTX_ELR_EL2]
151	msr	elr_el2, x9
152	msr	esr_el2, x10
153
154	ldp	x11, x12, [x0, #CTX_FAR_EL2]
155	msr	far_el2, x11
156	msr	hacr_el2, x12
157
158	ldp	x13, x14, [x0, #CTX_HCR_EL2]
159	msr	hcr_el2, x13
160	msr	hpfar_el2, x14
161
162	ldp	x15, x16, [x0, #CTX_HSTR_EL2]
163	msr	hstr_el2, x15
164	msr	ICC_SRE_EL2, x16
165
166	ldp	x9, x10, [x0, #CTX_ICH_HCR_EL2]
167	msr	ICH_HCR_EL2, x9
168	msr	ICH_VMCR_EL2, x10
169
170	ldp	x11, x12, [x0, #CTX_MAIR_EL2]
171	msr	mair_el2, x11
172	msr	mdcr_el2, x12
173
174	ldr	x14, [x0, #CTX_SCTLR_EL2]
175	msr	sctlr_el2, x14
176
177	ldp	x15, x16, [x0, #CTX_SPSR_EL2]
178	msr	spsr_el2, x15
179	msr	sp_el2, x16
180
181	ldp	x9, x10, [x0, #CTX_TCR_EL2]
182	msr	tcr_el2, x9
183	msr	tpidr_el2, x10
184
185	ldp	x11, x12, [x0, #CTX_TTBR0_EL2]
186	msr	ttbr0_el2, x11
187	msr	vbar_el2, x12
188
189	ldp	x13, x14, [x0, #CTX_VMPIDR_EL2]
190	msr	vmpidr_el2, x13
191	msr	vpidr_el2, x14
192
193	ldp	x15, x16, [x0, #CTX_VTCR_EL2]
194	msr	vtcr_el2, x15
195	msr	vttbr_el2, x16
196	ret
197endfunc el2_sysregs_context_restore_common
198
199#if CTX_INCLUDE_MTE_REGS
200func el2_sysregs_context_save_mte
201	mrs	x9, TFSR_EL2
202	str	x9, [x0, #CTX_TFSR_EL2]
203	ret
204endfunc el2_sysregs_context_save_mte
205
206func el2_sysregs_context_restore_mte
207	ldr	x9, [x0, #CTX_TFSR_EL2]
208	msr	TFSR_EL2, x9
209	ret
210endfunc el2_sysregs_context_restore_mte
211#endif /* CTX_INCLUDE_MTE_REGS */
212
213#if RAS_EXTENSION
214func el2_sysregs_context_save_ras
215	/*
216	 * VDISR_EL2 and VSESR_EL2 registers are saved only when
217	 * FEAT_RAS is supported.
218	 */
219	mrs	x11, vdisr_el2
220	mrs	x12, vsesr_el2
221	stp	x11, x12, [x0, #CTX_VDISR_EL2]
222	ret
223endfunc el2_sysregs_context_save_ras
224
225func el2_sysregs_context_restore_ras
226	/*
227	 * VDISR_EL2 and VSESR_EL2 registers are restored only when FEAT_RAS
228	 * is supported.
229	 */
230	ldp	x11, x12, [x0, #CTX_VDISR_EL2]
231	msr	vdisr_el2, x11
232	msr	vsesr_el2, x12
233	ret
234endfunc el2_sysregs_context_restore_ras
235#endif /* RAS_EXTENSION */
236
237#endif /* CTX_INCLUDE_EL2_REGS */
238
239/* ------------------------------------------------------------------
240 * The following function strictly follows the AArch64 PCS to use
241 * x9-x17 (temporary caller-saved registers) to save EL1 system
242 * register context. It assumes that 'x0' is pointing to a
243 * 'el1_sys_regs' structure where the register context will be saved.
244 * ------------------------------------------------------------------
245 */
246func el1_sysregs_context_save
247
248	mrs	x9, spsr_el1
249	mrs	x10, elr_el1
250	stp	x9, x10, [x0, #CTX_SPSR_EL1]
251
252#if !ERRATA_SPECULATIVE_AT
253	mrs	x15, sctlr_el1
254	mrs	x16, tcr_el1
255	stp	x15, x16, [x0, #CTX_SCTLR_EL1]
256#endif /* ERRATA_SPECULATIVE_AT */
257
258	mrs	x17, cpacr_el1
259	mrs	x9, csselr_el1
260	stp	x17, x9, [x0, #CTX_CPACR_EL1]
261
262	mrs	x10, sp_el1
263	mrs	x11, esr_el1
264	stp	x10, x11, [x0, #CTX_SP_EL1]
265
266	mrs	x12, ttbr0_el1
267	mrs	x13, ttbr1_el1
268	stp	x12, x13, [x0, #CTX_TTBR0_EL1]
269
270	mrs	x14, mair_el1
271	mrs	x15, amair_el1
272	stp	x14, x15, [x0, #CTX_MAIR_EL1]
273
274	mrs	x16, actlr_el1
275	mrs	x17, tpidr_el1
276	stp	x16, x17, [x0, #CTX_ACTLR_EL1]
277
278	mrs	x9, tpidr_el0
279	mrs	x10, tpidrro_el0
280	stp	x9, x10, [x0, #CTX_TPIDR_EL0]
281
282	mrs	x13, par_el1
283	mrs	x14, far_el1
284	stp	x13, x14, [x0, #CTX_PAR_EL1]
285
286	mrs	x15, afsr0_el1
287	mrs	x16, afsr1_el1
288	stp	x15, x16, [x0, #CTX_AFSR0_EL1]
289
290	mrs	x17, contextidr_el1
291	mrs	x9, vbar_el1
292	stp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
293
294	/* Save AArch32 system registers if the build has instructed so */
295#if CTX_INCLUDE_AARCH32_REGS
296	mrs	x11, spsr_abt
297	mrs	x12, spsr_und
298	stp	x11, x12, [x0, #CTX_SPSR_ABT]
299
300	mrs	x13, spsr_irq
301	mrs	x14, spsr_fiq
302	stp	x13, x14, [x0, #CTX_SPSR_IRQ]
303
304	mrs	x15, dacr32_el2
305	mrs	x16, ifsr32_el2
306	stp	x15, x16, [x0, #CTX_DACR32_EL2]
307#endif /* CTX_INCLUDE_AARCH32_REGS */
308
309	/* Save NS timer registers if the build has instructed so */
310#if NS_TIMER_SWITCH
311	mrs	x10, cntp_ctl_el0
312	mrs	x11, cntp_cval_el0
313	stp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
314
315	mrs	x12, cntv_ctl_el0
316	mrs	x13, cntv_cval_el0
317	stp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
318
319	mrs	x14, cntkctl_el1
320	str	x14, [x0, #CTX_CNTKCTL_EL1]
321#endif /* NS_TIMER_SWITCH */
322
323	/* Save MTE system registers if the build has instructed so */
324#if CTX_INCLUDE_MTE_REGS
325	mrs	x15, TFSRE0_EL1
326	mrs	x16, TFSR_EL1
327	stp	x15, x16, [x0, #CTX_TFSRE0_EL1]
328
329	mrs	x9, RGSR_EL1
330	mrs	x10, GCR_EL1
331	stp	x9, x10, [x0, #CTX_RGSR_EL1]
332#endif /* CTX_INCLUDE_MTE_REGS */
333
334	ret
335endfunc el1_sysregs_context_save
336
337/* ------------------------------------------------------------------
338 * The following function strictly follows the AArch64 PCS to use
339 * x9-x17 (temporary caller-saved registers) to restore EL1 system
340 * register context.  It assumes that 'x0' is pointing to a
341 * 'el1_sys_regs' structure from where the register context will be
342 * restored
343 * ------------------------------------------------------------------
344 */
345func el1_sysregs_context_restore
346
347	ldp	x9, x10, [x0, #CTX_SPSR_EL1]
348	msr	spsr_el1, x9
349	msr	elr_el1, x10
350
351#if !ERRATA_SPECULATIVE_AT
352	ldp	x15, x16, [x0, #CTX_SCTLR_EL1]
353	msr	sctlr_el1, x15
354	msr	tcr_el1, x16
355#endif /* ERRATA_SPECULATIVE_AT */
356
357	ldp	x17, x9, [x0, #CTX_CPACR_EL1]
358	msr	cpacr_el1, x17
359	msr	csselr_el1, x9
360
361	ldp	x10, x11, [x0, #CTX_SP_EL1]
362	msr	sp_el1, x10
363	msr	esr_el1, x11
364
365	ldp	x12, x13, [x0, #CTX_TTBR0_EL1]
366	msr	ttbr0_el1, x12
367	msr	ttbr1_el1, x13
368
369	ldp	x14, x15, [x0, #CTX_MAIR_EL1]
370	msr	mair_el1, x14
371	msr	amair_el1, x15
372
373	ldp 	x16, x17, [x0, #CTX_ACTLR_EL1]
374	msr	actlr_el1, x16
375	msr	tpidr_el1, x17
376
377	ldp	x9, x10, [x0, #CTX_TPIDR_EL0]
378	msr	tpidr_el0, x9
379	msr	tpidrro_el0, x10
380
381	ldp	x13, x14, [x0, #CTX_PAR_EL1]
382	msr	par_el1, x13
383	msr	far_el1, x14
384
385	ldp	x15, x16, [x0, #CTX_AFSR0_EL1]
386	msr	afsr0_el1, x15
387	msr	afsr1_el1, x16
388
389	ldp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
390	msr	contextidr_el1, x17
391	msr	vbar_el1, x9
392
393	/* Restore AArch32 system registers if the build has instructed so */
394#if CTX_INCLUDE_AARCH32_REGS
395	ldp	x11, x12, [x0, #CTX_SPSR_ABT]
396	msr	spsr_abt, x11
397	msr	spsr_und, x12
398
399	ldp	x13, x14, [x0, #CTX_SPSR_IRQ]
400	msr	spsr_irq, x13
401	msr	spsr_fiq, x14
402
403	ldp	x15, x16, [x0, #CTX_DACR32_EL2]
404	msr	dacr32_el2, x15
405	msr	ifsr32_el2, x16
406#endif /* CTX_INCLUDE_AARCH32_REGS */
407
408	/* Restore NS timer registers if the build has instructed so */
409#if NS_TIMER_SWITCH
410	ldp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
411	msr	cntp_ctl_el0, x10
412	msr	cntp_cval_el0, x11
413
414	ldp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
415	msr	cntv_ctl_el0, x12
416	msr	cntv_cval_el0, x13
417
418	ldr	x14, [x0, #CTX_CNTKCTL_EL1]
419	msr	cntkctl_el1, x14
420#endif /* NS_TIMER_SWITCH */
421
422	/* Restore MTE system registers if the build has instructed so */
423#if CTX_INCLUDE_MTE_REGS
424	ldp	x11, x12, [x0, #CTX_TFSRE0_EL1]
425	msr	TFSRE0_EL1, x11
426	msr	TFSR_EL1, x12
427
428	ldp	x13, x14, [x0, #CTX_RGSR_EL1]
429	msr	RGSR_EL1, x13
430	msr	GCR_EL1, x14
431#endif /* CTX_INCLUDE_MTE_REGS */
432
433	/* No explict ISB required here as ERET covers it */
434	ret
435endfunc el1_sysregs_context_restore
436
437/* ------------------------------------------------------------------
438 * The following function follows the aapcs_64 strictly to use
439 * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
440 * to save floating point register context. It assumes that 'x0' is
441 * pointing to a 'fp_regs' structure where the register context will
442 * be saved.
443 *
444 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
445 * However currently we don't use VFP registers nor set traps in
446 * Trusted Firmware, and assume it's cleared.
447 *
448 * TODO: Revisit when VFP is used in secure world
449 * ------------------------------------------------------------------
450 */
451#if CTX_INCLUDE_FPREGS
452func fpregs_context_save
453	stp	q0, q1, [x0, #CTX_FP_Q0]
454	stp	q2, q3, [x0, #CTX_FP_Q2]
455	stp	q4, q5, [x0, #CTX_FP_Q4]
456	stp	q6, q7, [x0, #CTX_FP_Q6]
457	stp	q8, q9, [x0, #CTX_FP_Q8]
458	stp	q10, q11, [x0, #CTX_FP_Q10]
459	stp	q12, q13, [x0, #CTX_FP_Q12]
460	stp	q14, q15, [x0, #CTX_FP_Q14]
461	stp	q16, q17, [x0, #CTX_FP_Q16]
462	stp	q18, q19, [x0, #CTX_FP_Q18]
463	stp	q20, q21, [x0, #CTX_FP_Q20]
464	stp	q22, q23, [x0, #CTX_FP_Q22]
465	stp	q24, q25, [x0, #CTX_FP_Q24]
466	stp	q26, q27, [x0, #CTX_FP_Q26]
467	stp	q28, q29, [x0, #CTX_FP_Q28]
468	stp	q30, q31, [x0, #CTX_FP_Q30]
469
470	mrs	x9, fpsr
471	str	x9, [x0, #CTX_FP_FPSR]
472
473	mrs	x10, fpcr
474	str	x10, [x0, #CTX_FP_FPCR]
475
476#if CTX_INCLUDE_AARCH32_REGS
477	mrs	x11, fpexc32_el2
478	str	x11, [x0, #CTX_FP_FPEXC32_EL2]
479#endif /* CTX_INCLUDE_AARCH32_REGS */
480	ret
481endfunc fpregs_context_save
482
483/* ------------------------------------------------------------------
484 * The following function follows the aapcs_64 strictly to use x9-x17
485 * (temporary caller-saved registers according to AArch64 PCS) to
486 * restore floating point register context. It assumes that 'x0' is
487 * pointing to a 'fp_regs' structure from where the register context
488 * will be restored.
489 *
490 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
491 * However currently we don't use VFP registers nor set traps in
492 * Trusted Firmware, and assume it's cleared.
493 *
494 * TODO: Revisit when VFP is used in secure world
495 * ------------------------------------------------------------------
496 */
497func fpregs_context_restore
498	ldp	q0, q1, [x0, #CTX_FP_Q0]
499	ldp	q2, q3, [x0, #CTX_FP_Q2]
500	ldp	q4, q5, [x0, #CTX_FP_Q4]
501	ldp	q6, q7, [x0, #CTX_FP_Q6]
502	ldp	q8, q9, [x0, #CTX_FP_Q8]
503	ldp	q10, q11, [x0, #CTX_FP_Q10]
504	ldp	q12, q13, [x0, #CTX_FP_Q12]
505	ldp	q14, q15, [x0, #CTX_FP_Q14]
506	ldp	q16, q17, [x0, #CTX_FP_Q16]
507	ldp	q18, q19, [x0, #CTX_FP_Q18]
508	ldp	q20, q21, [x0, #CTX_FP_Q20]
509	ldp	q22, q23, [x0, #CTX_FP_Q22]
510	ldp	q24, q25, [x0, #CTX_FP_Q24]
511	ldp	q26, q27, [x0, #CTX_FP_Q26]
512	ldp	q28, q29, [x0, #CTX_FP_Q28]
513	ldp	q30, q31, [x0, #CTX_FP_Q30]
514
515	ldr	x9, [x0, #CTX_FP_FPSR]
516	msr	fpsr, x9
517
518	ldr	x10, [x0, #CTX_FP_FPCR]
519	msr	fpcr, x10
520
521#if CTX_INCLUDE_AARCH32_REGS
522	ldr	x11, [x0, #CTX_FP_FPEXC32_EL2]
523	msr	fpexc32_el2, x11
524#endif /* CTX_INCLUDE_AARCH32_REGS */
525
526	/*
527	 * No explict ISB required here as ERET to
528	 * switch to secure EL1 or non-secure world
529	 * covers it
530	 */
531
532	ret
533endfunc fpregs_context_restore
534#endif /* CTX_INCLUDE_FPREGS */
535
536	/*
537	 * Set SCR_EL3.EA bit to enable SErrors at EL3
538	 */
539	.macro enable_serror_at_el3
540	mrs     x8, scr_el3
541	orr     x8, x8, #SCR_EA_BIT
542	msr     scr_el3, x8
543	.endm
544
545	/*
546	 * Set the PSTATE bits not set when the exception was taken as
547	 * described in the AArch64.TakeException() pseudocode function
548	 * in ARM DDI 0487F.c page J1-7635 to a default value.
549	 */
550	.macro set_unset_pstate_bits
551	/*
552	 * If Data Independent Timing (DIT) functionality is implemented,
553	 * always enable DIT in EL3
554	 */
555#if ENABLE_FEAT_DIT
556	mov     x8, #DIT_BIT
557	msr     DIT, x8
558#endif /* ENABLE_FEAT_DIT */
559	.endm /* set_unset_pstate_bits */
560
561/* ------------------------------------------------------------------
562 * The following macro is used to save and restore all the general
563 * purpose and ARMv8.3-PAuth (if enabled) registers.
564 * It also checks if the Secure Cycle Counter (PMCCNTR_EL0)
565 * is disabled in EL3/Secure (ARMv8.5-PMU), wherein PMCCNTR_EL0
566 * needs not to be saved/restored during world switch.
567 *
568 * Ideally we would only save and restore the callee saved registers
569 * when a world switch occurs but that type of implementation is more
570 * complex. So currently we will always save and restore these
571 * registers on entry and exit of EL3.
572 * clobbers: x18
573 * ------------------------------------------------------------------
574 */
575	.macro save_gp_pmcr_pauth_regs
576	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
577	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
578	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
579	stp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
580	stp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
581	stp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
582	stp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
583	stp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
584	stp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
585	stp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
586	stp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
587	stp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
588	stp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
589	stp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
590	stp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
591	mrs	x18, sp_el0
592	str	x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
593
594	/* ----------------------------------------------------------
595	 * Check if earlier initialization of MDCR_EL3.SCCD/MCCD to 1
596	 * has failed.
597	 *
598	 * MDCR_EL3:
599	 * MCCD bit set, Prohibits the Cycle Counter PMCCNTR_EL0 from
600	 * counting at EL3.
601	 * SCCD bit set, Secure Cycle Counter Disable. Prohibits PMCCNTR_EL0
602	 * from counting in Secure state.
603	 * If these bits are not set, meaning that FEAT_PMUv3p5/7 is
604	 * not implemented and PMCR_EL0 should be saved in non-secure
605	 * context.
606	 * ----------------------------------------------------------
607	 */
608	mov_imm	x10, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
609	mrs	x9, mdcr_el3
610	tst	x9, x10
611	bne	1f
612
613	/* ----------------------------------------------------------
614	 * If control reaches here, it ensures the Secure Cycle
615	 * Counter (PMCCNTR_EL0) is not prohibited from counting at
616	 * EL3 and in secure states.
617	 * Henceforth, PMCR_EL0 to be saved before world switch.
618	 * ----------------------------------------------------------
619	 */
620	mrs	x9, pmcr_el0
621
622	/* Check caller's security state */
623	mrs	x10, scr_el3
624	tst	x10, #SCR_NS_BIT
625	beq	2f
626
627	/* Save PMCR_EL0 if called from Non-secure state */
628	str	x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
629
630	/* Disable cycle counter when event counting is prohibited */
6312:	orr	x9, x9, #PMCR_EL0_DP_BIT
632	msr	pmcr_el0, x9
633	isb
6341:
635#if CTX_INCLUDE_PAUTH_REGS
636	/* ----------------------------------------------------------
637 	 * Save the ARMv8.3-PAuth keys as they are not banked
638 	 * by exception level
639	 * ----------------------------------------------------------
640	 */
641	add	x19, sp, #CTX_PAUTH_REGS_OFFSET
642
643	mrs	x20, APIAKeyLo_EL1	/* x21:x20 = APIAKey */
644	mrs	x21, APIAKeyHi_EL1
645	mrs	x22, APIBKeyLo_EL1	/* x23:x22 = APIBKey */
646	mrs	x23, APIBKeyHi_EL1
647	mrs	x24, APDAKeyLo_EL1	/* x25:x24 = APDAKey */
648	mrs	x25, APDAKeyHi_EL1
649	mrs	x26, APDBKeyLo_EL1	/* x27:x26 = APDBKey */
650	mrs	x27, APDBKeyHi_EL1
651	mrs	x28, APGAKeyLo_EL1	/* x29:x28 = APGAKey */
652	mrs	x29, APGAKeyHi_EL1
653
654	stp	x20, x21, [x19, #CTX_PACIAKEY_LO]
655	stp	x22, x23, [x19, #CTX_PACIBKEY_LO]
656	stp	x24, x25, [x19, #CTX_PACDAKEY_LO]
657	stp	x26, x27, [x19, #CTX_PACDBKEY_LO]
658	stp	x28, x29, [x19, #CTX_PACGAKEY_LO]
659#endif /* CTX_INCLUDE_PAUTH_REGS */
660	.endm /* save_gp_pmcr_pauth_regs */
661
662/* -----------------------------------------------------------------
663 * This function saves the context and sets the PSTATE to a known
664 * state, preparing entry to el3.
665 * Save all the general purpose and ARMv8.3-PAuth (if enabled)
666 * registers.
667 * Then set any of the PSTATE bits that are not set by hardware
668 * according to the Aarch64.TakeException pseudocode in the Arm
669 * Architecture Reference Manual to a default value for EL3.
670 * clobbers: x17
671 * -----------------------------------------------------------------
672 */
673func prepare_el3_entry
674	save_gp_pmcr_pauth_regs
675	enable_serror_at_el3
676	/*
677	 * Set the PSTATE bits not described in the Aarch64.TakeException
678	 * pseudocode to their default values.
679	 */
680	set_unset_pstate_bits
681	ret
682endfunc prepare_el3_entry
683
684/* ------------------------------------------------------------------
685 * This function restores ARMv8.3-PAuth (if enabled) and all general
686 * purpose registers except x30 from the CPU context.
687 * x30 register must be explicitly restored by the caller.
688 * ------------------------------------------------------------------
689 */
690func restore_gp_pmcr_pauth_regs
691#if CTX_INCLUDE_PAUTH_REGS
692 	/* Restore the ARMv8.3 PAuth keys */
693	add	x10, sp, #CTX_PAUTH_REGS_OFFSET
694
695	ldp	x0, x1, [x10, #CTX_PACIAKEY_LO]	/* x1:x0 = APIAKey */
696	ldp	x2, x3, [x10, #CTX_PACIBKEY_LO]	/* x3:x2 = APIBKey */
697	ldp	x4, x5, [x10, #CTX_PACDAKEY_LO]	/* x5:x4 = APDAKey */
698	ldp	x6, x7, [x10, #CTX_PACDBKEY_LO]	/* x7:x6 = APDBKey */
699	ldp	x8, x9, [x10, #CTX_PACGAKEY_LO]	/* x9:x8 = APGAKey */
700
701	msr	APIAKeyLo_EL1, x0
702	msr	APIAKeyHi_EL1, x1
703	msr	APIBKeyLo_EL1, x2
704	msr	APIBKeyHi_EL1, x3
705	msr	APDAKeyLo_EL1, x4
706	msr	APDAKeyHi_EL1, x5
707	msr	APDBKeyLo_EL1, x6
708	msr	APDBKeyHi_EL1, x7
709	msr	APGAKeyLo_EL1, x8
710	msr	APGAKeyHi_EL1, x9
711#endif /* CTX_INCLUDE_PAUTH_REGS */
712
713	/* ----------------------------------------------------------
714	 * Restore PMCR_EL0 when returning to Non-secure state if
715	 * Secure Cycle Counter is not disabled in MDCR_EL3 when
716	 * ARMv8.5-PMU is implemented.
717	 * ----------------------------------------------------------
718	 */
719	mrs	x0, scr_el3
720	tst	x0, #SCR_NS_BIT
721	beq	2f
722
723	/* ----------------------------------------------------------
724	 * Back to Non-secure state.
725	 * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1
726	 * failed, meaning that FEAT_PMUv3p5/7 is not implemented and
727	 * PMCR_EL0 should be restored from non-secure context.
728	 * ----------------------------------------------------------
729	 */
730	mov_imm	x1, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
731	mrs	x0, mdcr_el3
732	tst	x0, x1
733	bne	2f
734	ldr	x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
735	msr	pmcr_el0, x0
7362:
737	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
738	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
739	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
740	ldp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
741	ldp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
742	ldp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
743	ldp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
744	ldp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
745	ldp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
746	ldp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
747	ldp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
748	ldp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
749	ldp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
750	ldp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
751	ldr	x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
752	msr	sp_el0, x28
753	ldp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
754	ret
755endfunc restore_gp_pmcr_pauth_regs
756
757/*
758 * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
759 * registers and update EL1 registers to disable stage1 and stage2
760 * page table walk
761 */
762func save_and_update_ptw_el1_sys_regs
763	/* ----------------------------------------------------------
764	 * Save only sctlr_el1 and tcr_el1 registers
765	 * ----------------------------------------------------------
766	 */
767	mrs	x29, sctlr_el1
768	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)]
769	mrs	x29, tcr_el1
770	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)]
771
772	/* ------------------------------------------------------------
773	 * Must follow below order in order to disable page table
774	 * walk for lower ELs (EL1 and EL0). First step ensures that
775	 * page table walk is disabled for stage1 and second step
776	 * ensures that page table walker should use TCR_EL1.EPDx
777	 * bits to perform address translation. ISB ensures that CPU
778	 * does these 2 steps in order.
779	 *
780	 * 1. Update TCR_EL1.EPDx bits to disable page table walk by
781	 *    stage1.
782	 * 2. Enable MMU bit to avoid identity mapping via stage2
783	 *    and force TCR_EL1.EPDx to be used by the page table
784	 *    walker.
785	 * ------------------------------------------------------------
786	 */
787	orr	x29, x29, #(TCR_EPD0_BIT)
788	orr	x29, x29, #(TCR_EPD1_BIT)
789	msr	tcr_el1, x29
790	isb
791	mrs	x29, sctlr_el1
792	orr	x29, x29, #SCTLR_M_BIT
793	msr	sctlr_el1, x29
794	isb
795
796	ret
797endfunc save_and_update_ptw_el1_sys_regs
798
799/* ------------------------------------------------------------------
800 * This routine assumes that the SP_EL3 is pointing to a valid
801 * context structure from where the gp regs and other special
802 * registers can be retrieved.
803 * ------------------------------------------------------------------
804 */
805func el3_exit
806#if ENABLE_ASSERTIONS
807	/* el3_exit assumes SP_EL0 on entry */
808	mrs	x17, spsel
809	cmp	x17, #MODE_SP_EL0
810	ASM_ASSERT(eq)
811#endif /* ENABLE_ASSERTIONS */
812
813	/* ----------------------------------------------------------
814	 * Save the current SP_EL0 i.e. the EL3 runtime stack which
815	 * will be used for handling the next SMC.
816	 * Then switch to SP_EL3.
817	 * ----------------------------------------------------------
818	 */
819	mov	x17, sp
820	msr	spsel, #MODE_SP_ELX
821	str	x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
822
823#if IMAGE_BL31
824	/* ----------------------------------------------------------
825	 * Restore CPTR_EL3.
826	 * ZCR is only restored if SVE is supported and enabled.
827	 * Synchronization is required before zcr_el3 is addressed.
828	 * ----------------------------------------------------------
829	 */
830	ldp	x19, x20, [sp, #CTX_EL3STATE_OFFSET + CTX_CPTR_EL3]
831	msr	cptr_el3, x19
832
833	ands	x19, x19, #CPTR_EZ_BIT
834	beq	sve_not_enabled
835
836	isb
837	msr	S3_6_C1_C2_0, x20 /* zcr_el3 */
838sve_not_enabled:
839#endif /* IMAGE_BL31 */
840
841#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
842	/* ----------------------------------------------------------
843	 * Restore mitigation state as it was on entry to EL3
844	 * ----------------------------------------------------------
845	 */
846	ldr	x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
847	cbz	x17, 1f
848	blr	x17
8491:
850#endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */
851
852#if IMAGE_BL31 && RAS_EXTENSION
853	/* ----------------------------------------------------------
854	 * Issue Error Synchronization Barrier to synchronize SErrors
855	 * before exiting EL3. We're running with EAs unmasked, so
856	 * any synchronized errors would be taken immediately;
857	 * therefore no need to inspect DISR_EL1 register.
858 	 * ----------------------------------------------------------
859	 */
860	esb
861#else
862	dsb	sy
863#endif /* IMAGE_BL31 && RAS_EXTENSION */
864
865	/* ----------------------------------------------------------
866	 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
867	 * ----------------------------------------------------------
868	 */
869	ldr	x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
870	ldp	x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
871	msr	scr_el3, x18
872	msr	spsr_el3, x16
873	msr	elr_el3, x17
874
875	restore_ptw_el1_sys_regs
876
877	/* ----------------------------------------------------------
878	 * Restore general purpose (including x30), PMCR_EL0 and
879	 * ARMv8.3-PAuth registers.
880	 * Exit EL3 via ERET to a lower exception level.
881 	 * ----------------------------------------------------------
882 	 */
883	bl	restore_gp_pmcr_pauth_regs
884	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
885
886#ifdef IMAGE_BL31
887	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
888#endif /* IMAGE_BL31 */
889
890	exception_return
891
892endfunc el3_exit
893