xref: /rk3399_ARM-atf/lib/el3_runtime/aarch64/context.S (revision 8b95e8487006ff77a7d84fba5bd20ba7e68d8330)
1/*
2 * Copyright (c) 2013-2022, Arm Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <assert_macros.S>
10#include <context.h>
11#include <el3_common_macros.S>
12
13#if CTX_INCLUDE_EL2_REGS
14	.global	el2_sysregs_context_save
15	.global	el2_sysregs_context_restore
16#endif /* CTX_INCLUDE_EL2_REGS */
17
18	.global	el1_sysregs_context_save
19	.global	el1_sysregs_context_restore
20#if CTX_INCLUDE_FPREGS
21	.global	fpregs_context_save
22	.global	fpregs_context_restore
23#endif /* CTX_INCLUDE_FPREGS */
24	.global	prepare_el3_entry
25	.global	restore_gp_pmcr_pauth_regs
26	.global save_and_update_ptw_el1_sys_regs
27	.global	el3_exit
28
29#if CTX_INCLUDE_EL2_REGS
30
31/* -----------------------------------------------------
32 * The following function strictly follows the AArch64
33 * PCS to use x9-x16 (temporary caller-saved registers)
34 * to save EL2 system register context. It assumes that
35 * 'x0' is pointing to a 'el2_sys_regs' structure where
36 * the register context will be saved.
37 *
38 * The following registers are not added.
39 * AMEVCNTVOFF0<n>_EL2
40 * AMEVCNTVOFF1<n>_EL2
41 * ICH_AP0R<n>_EL2
42 * ICH_AP1R<n>_EL2
43 * ICH_LR<n>_EL2
44 * -----------------------------------------------------
45 */
46func el2_sysregs_context_save
47	mrs	x9, actlr_el2
48	mrs	x10, afsr0_el2
49	stp	x9, x10, [x0, #CTX_ACTLR_EL2]
50
51	mrs	x11, afsr1_el2
52	mrs	x12, amair_el2
53	stp	x11, x12, [x0, #CTX_AFSR1_EL2]
54
55	mrs	x13, cnthctl_el2
56	mrs	x14, cntvoff_el2
57	stp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
58
59	mrs	x15, cptr_el2
60	str	x15, [x0, #CTX_CPTR_EL2]
61
62#if CTX_INCLUDE_AARCH32_REGS
63	mrs	x16, dbgvcr32_el2
64	str	x16, [x0, #CTX_DBGVCR32_EL2]
65#endif /* CTX_INCLUDE_AARCH32_REGS */
66
67	mrs	x9, elr_el2
68	mrs	x10, esr_el2
69	stp	x9, x10, [x0, #CTX_ELR_EL2]
70
71	mrs	x11, far_el2
72	mrs	x12, hacr_el2
73	stp	x11, x12, [x0, #CTX_FAR_EL2]
74
75	mrs	x13, hcr_el2
76	mrs	x14, hpfar_el2
77	stp	x13, x14, [x0, #CTX_HCR_EL2]
78
79	mrs	x15, hstr_el2
80	mrs	x16, ICC_SRE_EL2
81	stp	x15, x16, [x0, #CTX_HSTR_EL2]
82
83	mrs	x9, ICH_HCR_EL2
84	mrs	x10, ICH_VMCR_EL2
85	stp	x9, x10, [x0, #CTX_ICH_HCR_EL2]
86
87	mrs	x11, mair_el2
88	mrs	x12, mdcr_el2
89	stp	x11, x12, [x0, #CTX_MAIR_EL2]
90
91#if ENABLE_SPE_FOR_LOWER_ELS
92	mrs	x13, PMSCR_EL2
93	str	x13, [x0, #CTX_PMSCR_EL2]
94#endif /* ENABLE_SPE_FOR_LOWER_ELS */
95
96	mrs	x14, sctlr_el2
97	str	x14, [x0, #CTX_SCTLR_EL2]
98
99	mrs	x15, spsr_el2
100	mrs	x16, sp_el2
101	stp	x15, x16, [x0, #CTX_SPSR_EL2]
102
103	mrs	x9, tcr_el2
104	mrs	x10, tpidr_el2
105	stp	x9, x10, [x0, #CTX_TCR_EL2]
106
107	mrs	x11, ttbr0_el2
108	mrs	x12, vbar_el2
109	stp	x11, x12, [x0, #CTX_TTBR0_EL2]
110
111	mrs	x13, vmpidr_el2
112	mrs	x14, vpidr_el2
113	stp	x13, x14, [x0, #CTX_VMPIDR_EL2]
114
115	mrs	x15, vtcr_el2
116	mrs	x16, vttbr_el2
117	stp	x15, x16, [x0, #CTX_VTCR_EL2]
118
119#if CTX_INCLUDE_MTE_REGS
120	mrs	x9, TFSR_EL2
121	str	x9, [x0, #CTX_TFSR_EL2]
122#endif /* CTX_INCLUDE_MTE_REGS */
123
124#if ENABLE_MPAM_FOR_LOWER_ELS
125	mrs	x10, MPAM2_EL2
126	str	x10, [x0, #CTX_MPAM2_EL2]
127
128	mrs	x11, MPAMHCR_EL2
129	mrs	x12, MPAMVPM0_EL2
130	stp	x11, x12, [x0, #CTX_MPAMHCR_EL2]
131
132	mrs	x13, MPAMVPM1_EL2
133	mrs	x14, MPAMVPM2_EL2
134	stp	x13, x14, [x0, #CTX_MPAMVPM1_EL2]
135
136	mrs	x15, MPAMVPM3_EL2
137	mrs	x16, MPAMVPM4_EL2
138	stp	x15, x16, [x0, #CTX_MPAMVPM3_EL2]
139
140	mrs	x9, MPAMVPM5_EL2
141	mrs	x10, MPAMVPM6_EL2
142	stp	x9, x10, [x0, #CTX_MPAMVPM5_EL2]
143
144	mrs	x11, MPAMVPM7_EL2
145	mrs	x12, MPAMVPMV_EL2
146	stp	x11, x12, [x0, #CTX_MPAMVPM7_EL2]
147#endif /* ENABLE_MPAM_FOR_LOWER_ELS */
148
149#if ENABLE_FEAT_FGT
150	mrs	x13, HDFGRTR_EL2
151#if ENABLE_FEAT_AMUv1
152   	mrs	x14, HAFGRTR_EL2
153   	stp	x13, x14, [x0, #CTX_HDFGRTR_EL2]
154#else
155   	str	x13, [x0, #CTX_HDFGRTR_EL2]
156#endif /* ENABLE_FEAT_AMUv1 */
157	mrs	x15, HDFGWTR_EL2
158	mrs	x16, HFGITR_EL2
159	stp	x15, x16, [x0, #CTX_HDFGWTR_EL2]
160
161	mrs	x9, HFGRTR_EL2
162	mrs	x10, HFGWTR_EL2
163	stp	x9, x10, [x0, #CTX_HFGRTR_EL2]
164#endif /* ENABLE_FEAT_FGT */
165
166#if ENABLE_FEAT_ECV
167	mrs	x11, CNTPOFF_EL2
168	str	x11, [x0, #CTX_CNTPOFF_EL2]
169#endif /* ENABLE_FEAT_ECV */
170
171#if ENABLE_FEAT_VHE
172	/*
173	 * CONTEXTIDR_EL2 register is saved only when FEAT_VHE or
174	 * FEAT_Debugv8p2 (currently not in TF-A) is supported.
175	 */
176	mrs	x9, contextidr_el2
177	mrs	x10, ttbr1_el2
178	stp	x9, x10, [x0, #CTX_CONTEXTIDR_EL2]
179#endif /* ENABLE_FEAT_VHE */
180
181#if RAS_EXTENSION
182	/*
183	 * VDISR_EL2 and VSESR_EL2 registers are saved only when
184	 * FEAT_RAS is supported.
185	 */
186	mrs	x11, vdisr_el2
187	mrs	x12, vsesr_el2
188	stp	x11, x12, [x0, #CTX_VDISR_EL2]
189#endif /* RAS_EXTENSION */
190
191#if CTX_INCLUDE_NEVE_REGS
192	/*
193	 * VNCR_EL2 register is saved only when FEAT_NV2 is supported.
194	 */
195	mrs	x16, vncr_el2
196	str	x16, [x0, #CTX_VNCR_EL2]
197#endif /* CTX_INCLUDE_NEVE_REGS */
198
199#if ENABLE_TRF_FOR_NS
200	/*
201	 * TRFCR_EL2 register is saved only when FEAT_TRF is supported.
202	 */
203	mrs	x12, TRFCR_EL2
204	str	x12, [x0, #CTX_TRFCR_EL2]
205#endif /* ENABLE_TRF_FOR_NS */
206
207#if ENABLE_FEAT_CSV2_2
208	/*
209	 * SCXTNUM_EL2 register is saved only when FEAT_CSV2_2 is supported.
210	 */
211	mrs	x13, scxtnum_el2
212	str	x13, [x0, #CTX_SCXTNUM_EL2]
213#endif /* ENABLE_FEAT_CSV2_2 */
214
215#if ENABLE_FEAT_HCX
216	mrs	x14, hcrx_el2
217	str	x14, [x0, #CTX_HCRX_EL2]
218#endif /* ENABLE_FEAT_HCX */
219
220	ret
221endfunc el2_sysregs_context_save
222
223
224/* -----------------------------------------------------
225 * The following function strictly follows the AArch64
226 * PCS to use x9-x16 (temporary caller-saved registers)
227 * to restore EL2 system register context.  It assumes
228 * that 'x0' is pointing to a 'el2_sys_regs' structure
229 * from where the register context will be restored
230
231 * The following registers are not restored
232 * AMEVCNTVOFF0<n>_EL2
233 * AMEVCNTVOFF1<n>_EL2
234 * ICH_AP0R<n>_EL2
235 * ICH_AP1R<n>_EL2
236 * ICH_LR<n>_EL2
237 * -----------------------------------------------------
238 */
239func el2_sysregs_context_restore
240	ldp	x9, x10, [x0, #CTX_ACTLR_EL2]
241	msr	actlr_el2, x9
242	msr	afsr0_el2, x10
243
244	ldp	x11, x12, [x0, #CTX_AFSR1_EL2]
245	msr	afsr1_el2, x11
246	msr	amair_el2, x12
247
248	ldp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
249	msr	cnthctl_el2, x13
250	msr	cntvoff_el2, x14
251
252	ldr	x15, [x0, #CTX_CPTR_EL2]
253	msr	cptr_el2, x15
254
255#if CTX_INCLUDE_AARCH32_REGS
256	ldr	x16, [x0, #CTX_DBGVCR32_EL2]
257	msr	dbgvcr32_el2, x16
258#endif /* CTX_INCLUDE_AARCH32_REGS */
259
260	ldp	x9, x10, [x0, #CTX_ELR_EL2]
261	msr	elr_el2, x9
262	msr	esr_el2, x10
263
264	ldp	x11, x12, [x0, #CTX_FAR_EL2]
265	msr	far_el2, x11
266	msr	hacr_el2, x12
267
268	ldp	x13, x14, [x0, #CTX_HCR_EL2]
269	msr	hcr_el2, x13
270	msr	hpfar_el2, x14
271
272	ldp	x15, x16, [x0, #CTX_HSTR_EL2]
273	msr	hstr_el2, x15
274	msr	ICC_SRE_EL2, x16
275
276	ldp	x9, x10, [x0, #CTX_ICH_HCR_EL2]
277	msr	ICH_HCR_EL2, x9
278	msr	ICH_VMCR_EL2, x10
279
280	ldp	x11, x12, [x0, #CTX_MAIR_EL2]
281	msr	mair_el2, x11
282	msr	mdcr_el2, x12
283
284#if ENABLE_SPE_FOR_LOWER_ELS
285	ldr	x13, [x0, #CTX_PMSCR_EL2]
286	msr	PMSCR_EL2, x13
287#endif /* ENABLE_SPE_FOR_LOWER_ELS */
288
289	ldr	x14, [x0, #CTX_SCTLR_EL2]
290	msr	sctlr_el2, x14
291
292	ldp	x15, x16, [x0, #CTX_SPSR_EL2]
293	msr	spsr_el2, x15
294	msr	sp_el2, x16
295
296	ldp	x9, x10, [x0, #CTX_TCR_EL2]
297	msr	tcr_el2, x9
298	msr	tpidr_el2, x10
299
300	ldp	x11, x12, [x0, #CTX_TTBR0_EL2]
301	msr	ttbr0_el2, x11
302	msr	vbar_el2, x12
303
304	ldp	x13, x14, [x0, #CTX_VMPIDR_EL2]
305	msr	vmpidr_el2, x13
306	msr	vpidr_el2, x14
307
308	ldp	x15, x16, [x0, #CTX_VTCR_EL2]
309	msr	vtcr_el2, x15
310	msr	vttbr_el2, x16
311
312#if CTX_INCLUDE_MTE_REGS
313	ldr	x9, [x0, #CTX_TFSR_EL2]
314	msr	TFSR_EL2, x9
315#endif /* CTX_INCLUDE_MTE_REGS */
316
317#if ENABLE_MPAM_FOR_LOWER_ELS
318	ldr	x10, [x0, #CTX_MPAM2_EL2]
319	msr	MPAM2_EL2, x10
320
321	ldp	x11, x12, [x0, #CTX_MPAMHCR_EL2]
322	msr	MPAMHCR_EL2, x11
323	msr	MPAMVPM0_EL2, x12
324
325	ldp	x13, x14, [x0, #CTX_MPAMVPM1_EL2]
326	msr	MPAMVPM1_EL2, x13
327	msr	MPAMVPM2_EL2, x14
328
329	ldp	x15, x16, [x0, #CTX_MPAMVPM3_EL2]
330	msr	MPAMVPM3_EL2, x15
331	msr	MPAMVPM4_EL2, x16
332
333	ldp	x9, x10, [x0, #CTX_MPAMVPM5_EL2]
334	msr	MPAMVPM5_EL2, x9
335	msr	MPAMVPM6_EL2, x10
336
337	ldp	x11, x12, [x0, #CTX_MPAMVPM7_EL2]
338	msr	MPAMVPM7_EL2, x11
339	msr	MPAMVPMV_EL2, x12
340#endif /* ENABLE_MPAM_FOR_LOWER_ELS */
341
342#if ENABLE_FEAT_FGT
343#if ENABLE_FEAT_AMUv1
344	ldp	x13, x14, [x0, #CTX_HDFGRTR_EL2]
345	msr	HAFGRTR_EL2, x14
346#else
347	ldr	x13, [x0, #CTX_HDFGRTR_EL2]
348#endif /* ENABLE_FEAT_AMUv1 */
349	msr	HDFGRTR_EL2, x13
350
351	ldp	x15, x16, [x0, #CTX_HDFGWTR_EL2]
352	msr	HDFGWTR_EL2, x15
353	msr	HFGITR_EL2, x16
354
355	ldp	x9, x10, [x0, #CTX_HFGRTR_EL2]
356	msr	HFGRTR_EL2, x9
357	msr	HFGWTR_EL2, x10
358#endif /* ENABLE_FEAT_FGT */
359
360#if ENABLE_FEAT_ECV
361	ldr	x11, [x0, #CTX_CNTPOFF_EL2]
362	msr	CNTPOFF_EL2, x11
363#endif /* ENABLE_FEAT_ECV */
364
365#if ENABLE_FEAT_VHE
366	/*
367	 * CONTEXTIDR_EL2 register is restored only when FEAT_VHE or
368	 * FEAT_Debugv8p2 (currently not in TF-A) is supported.
369	 */
370	ldp	x9, x10, [x0, #CTX_CONTEXTIDR_EL2]
371	msr	contextidr_el2, x9
372	msr	ttbr1_el2, x10
373#endif /* ENABLE_FEAT_VHE */
374
375#if RAS_EXTENSION
376	/*
377	 * VDISR_EL2 and VSESR_EL2 registers are restored only when FEAT_RAS
378	 * is supported.
379	 */
380	ldp	x11, x12, [x0, #CTX_VDISR_EL2]
381	msr	vdisr_el2, x11
382	msr	vsesr_el2, x12
383#endif /* RAS_EXTENSION */
384
385#if CTX_INCLUDE_NEVE_REGS
386	/*
387	 * VNCR_EL2 register is restored only when FEAT_NV2 is supported.
388	 */
389	ldr	x16, [x0, #CTX_VNCR_EL2]
390	msr	vncr_el2, x16
391#endif /* CTX_INCLUDE_NEVE_REGS */
392
393#if ENABLE_TRF_FOR_NS
394	/*
395	 * TRFCR_EL2 register is restored only when FEAT_TRF is supported.
396	 */
397	ldr	x12, [x0, #CTX_TRFCR_EL2]
398	msr	TRFCR_EL2, x12
399#endif /* ENABLE_TRF_FOR_NS */
400
401#if ENABLE_FEAT_CSV2_2
402	/*
403	 * SCXTNUM_EL2 register is restored only when FEAT_CSV2_2 is supported.
404	 */
405	ldr	x13, [x0, #CTX_SCXTNUM_EL2]
406	msr	scxtnum_el2, x13
407#endif /* ENABLE_FEAT_CSV2_2 */
408
409#if ENABLE_FEAT_HCX
410	ldr	x14, [x0, #CTX_HCRX_EL2]
411	msr	hcrx_el2, x14
412#endif /* ENABLE_FEAT_HCX */
413
414	ret
415endfunc el2_sysregs_context_restore
416
417#endif /* CTX_INCLUDE_EL2_REGS */
418
419/* ------------------------------------------------------------------
420 * The following function strictly follows the AArch64 PCS to use
421 * x9-x17 (temporary caller-saved registers) to save EL1 system
422 * register context. It assumes that 'x0' is pointing to a
423 * 'el1_sys_regs' structure where the register context will be saved.
424 * ------------------------------------------------------------------
425 */
426func el1_sysregs_context_save
427
428	mrs	x9, spsr_el1
429	mrs	x10, elr_el1
430	stp	x9, x10, [x0, #CTX_SPSR_EL1]
431
432#if !ERRATA_SPECULATIVE_AT
433	mrs	x15, sctlr_el1
434	mrs	x16, tcr_el1
435	stp	x15, x16, [x0, #CTX_SCTLR_EL1]
436#endif /* ERRATA_SPECULATIVE_AT */
437
438	mrs	x17, cpacr_el1
439	mrs	x9, csselr_el1
440	stp	x17, x9, [x0, #CTX_CPACR_EL1]
441
442	mrs	x10, sp_el1
443	mrs	x11, esr_el1
444	stp	x10, x11, [x0, #CTX_SP_EL1]
445
446	mrs	x12, ttbr0_el1
447	mrs	x13, ttbr1_el1
448	stp	x12, x13, [x0, #CTX_TTBR0_EL1]
449
450	mrs	x14, mair_el1
451	mrs	x15, amair_el1
452	stp	x14, x15, [x0, #CTX_MAIR_EL1]
453
454	mrs	x16, actlr_el1
455	mrs	x17, tpidr_el1
456	stp	x16, x17, [x0, #CTX_ACTLR_EL1]
457
458	mrs	x9, tpidr_el0
459	mrs	x10, tpidrro_el0
460	stp	x9, x10, [x0, #CTX_TPIDR_EL0]
461
462	mrs	x13, par_el1
463	mrs	x14, far_el1
464	stp	x13, x14, [x0, #CTX_PAR_EL1]
465
466	mrs	x15, afsr0_el1
467	mrs	x16, afsr1_el1
468	stp	x15, x16, [x0, #CTX_AFSR0_EL1]
469
470	mrs	x17, contextidr_el1
471	mrs	x9, vbar_el1
472	stp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
473
474	/* Save AArch32 system registers if the build has instructed so */
475#if CTX_INCLUDE_AARCH32_REGS
476	mrs	x11, spsr_abt
477	mrs	x12, spsr_und
478	stp	x11, x12, [x0, #CTX_SPSR_ABT]
479
480	mrs	x13, spsr_irq
481	mrs	x14, spsr_fiq
482	stp	x13, x14, [x0, #CTX_SPSR_IRQ]
483
484	mrs	x15, dacr32_el2
485	mrs	x16, ifsr32_el2
486	stp	x15, x16, [x0, #CTX_DACR32_EL2]
487#endif /* CTX_INCLUDE_AARCH32_REGS */
488
489	/* Save NS timer registers if the build has instructed so */
490#if NS_TIMER_SWITCH
491	mrs	x10, cntp_ctl_el0
492	mrs	x11, cntp_cval_el0
493	stp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
494
495	mrs	x12, cntv_ctl_el0
496	mrs	x13, cntv_cval_el0
497	stp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
498
499	mrs	x14, cntkctl_el1
500	str	x14, [x0, #CTX_CNTKCTL_EL1]
501#endif /* NS_TIMER_SWITCH */
502
503	/* Save MTE system registers if the build has instructed so */
504#if CTX_INCLUDE_MTE_REGS
505	mrs	x15, TFSRE0_EL1
506	mrs	x16, TFSR_EL1
507	stp	x15, x16, [x0, #CTX_TFSRE0_EL1]
508
509	mrs	x9, RGSR_EL1
510	mrs	x10, GCR_EL1
511	stp	x9, x10, [x0, #CTX_RGSR_EL1]
512#endif /* CTX_INCLUDE_MTE_REGS */
513
514	ret
515endfunc el1_sysregs_context_save
516
517/* ------------------------------------------------------------------
518 * The following function strictly follows the AArch64 PCS to use
519 * x9-x17 (temporary caller-saved registers) to restore EL1 system
520 * register context.  It assumes that 'x0' is pointing to a
521 * 'el1_sys_regs' structure from where the register context will be
522 * restored
523 * ------------------------------------------------------------------
524 */
525func el1_sysregs_context_restore
526
527	ldp	x9, x10, [x0, #CTX_SPSR_EL1]
528	msr	spsr_el1, x9
529	msr	elr_el1, x10
530
531#if !ERRATA_SPECULATIVE_AT
532	ldp	x15, x16, [x0, #CTX_SCTLR_EL1]
533	msr	sctlr_el1, x15
534	msr	tcr_el1, x16
535#endif /* ERRATA_SPECULATIVE_AT */
536
537	ldp	x17, x9, [x0, #CTX_CPACR_EL1]
538	msr	cpacr_el1, x17
539	msr	csselr_el1, x9
540
541	ldp	x10, x11, [x0, #CTX_SP_EL1]
542	msr	sp_el1, x10
543	msr	esr_el1, x11
544
545	ldp	x12, x13, [x0, #CTX_TTBR0_EL1]
546	msr	ttbr0_el1, x12
547	msr	ttbr1_el1, x13
548
549	ldp	x14, x15, [x0, #CTX_MAIR_EL1]
550	msr	mair_el1, x14
551	msr	amair_el1, x15
552
553	ldp 	x16, x17, [x0, #CTX_ACTLR_EL1]
554	msr	actlr_el1, x16
555	msr	tpidr_el1, x17
556
557	ldp	x9, x10, [x0, #CTX_TPIDR_EL0]
558	msr	tpidr_el0, x9
559	msr	tpidrro_el0, x10
560
561	ldp	x13, x14, [x0, #CTX_PAR_EL1]
562	msr	par_el1, x13
563	msr	far_el1, x14
564
565	ldp	x15, x16, [x0, #CTX_AFSR0_EL1]
566	msr	afsr0_el1, x15
567	msr	afsr1_el1, x16
568
569	ldp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
570	msr	contextidr_el1, x17
571	msr	vbar_el1, x9
572
573	/* Restore AArch32 system registers if the build has instructed so */
574#if CTX_INCLUDE_AARCH32_REGS
575	ldp	x11, x12, [x0, #CTX_SPSR_ABT]
576	msr	spsr_abt, x11
577	msr	spsr_und, x12
578
579	ldp	x13, x14, [x0, #CTX_SPSR_IRQ]
580	msr	spsr_irq, x13
581	msr	spsr_fiq, x14
582
583	ldp	x15, x16, [x0, #CTX_DACR32_EL2]
584	msr	dacr32_el2, x15
585	msr	ifsr32_el2, x16
586#endif /* CTX_INCLUDE_AARCH32_REGS */
587
588	/* Restore NS timer registers if the build has instructed so */
589#if NS_TIMER_SWITCH
590	ldp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
591	msr	cntp_ctl_el0, x10
592	msr	cntp_cval_el0, x11
593
594	ldp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
595	msr	cntv_ctl_el0, x12
596	msr	cntv_cval_el0, x13
597
598	ldr	x14, [x0, #CTX_CNTKCTL_EL1]
599	msr	cntkctl_el1, x14
600#endif /* NS_TIMER_SWITCH */
601
602	/* Restore MTE system registers if the build has instructed so */
603#if CTX_INCLUDE_MTE_REGS
604	ldp	x11, x12, [x0, #CTX_TFSRE0_EL1]
605	msr	TFSRE0_EL1, x11
606	msr	TFSR_EL1, x12
607
608	ldp	x13, x14, [x0, #CTX_RGSR_EL1]
609	msr	RGSR_EL1, x13
610	msr	GCR_EL1, x14
611#endif /* CTX_INCLUDE_MTE_REGS */
612
613	/* No explict ISB required here as ERET covers it */
614	ret
615endfunc el1_sysregs_context_restore
616
617/* ------------------------------------------------------------------
618 * The following function follows the aapcs_64 strictly to use
619 * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
620 * to save floating point register context. It assumes that 'x0' is
621 * pointing to a 'fp_regs' structure where the register context will
622 * be saved.
623 *
624 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
625 * However currently we don't use VFP registers nor set traps in
626 * Trusted Firmware, and assume it's cleared.
627 *
628 * TODO: Revisit when VFP is used in secure world
629 * ------------------------------------------------------------------
630 */
631#if CTX_INCLUDE_FPREGS
632func fpregs_context_save
633	stp	q0, q1, [x0, #CTX_FP_Q0]
634	stp	q2, q3, [x0, #CTX_FP_Q2]
635	stp	q4, q5, [x0, #CTX_FP_Q4]
636	stp	q6, q7, [x0, #CTX_FP_Q6]
637	stp	q8, q9, [x0, #CTX_FP_Q8]
638	stp	q10, q11, [x0, #CTX_FP_Q10]
639	stp	q12, q13, [x0, #CTX_FP_Q12]
640	stp	q14, q15, [x0, #CTX_FP_Q14]
641	stp	q16, q17, [x0, #CTX_FP_Q16]
642	stp	q18, q19, [x0, #CTX_FP_Q18]
643	stp	q20, q21, [x0, #CTX_FP_Q20]
644	stp	q22, q23, [x0, #CTX_FP_Q22]
645	stp	q24, q25, [x0, #CTX_FP_Q24]
646	stp	q26, q27, [x0, #CTX_FP_Q26]
647	stp	q28, q29, [x0, #CTX_FP_Q28]
648	stp	q30, q31, [x0, #CTX_FP_Q30]
649
650	mrs	x9, fpsr
651	str	x9, [x0, #CTX_FP_FPSR]
652
653	mrs	x10, fpcr
654	str	x10, [x0, #CTX_FP_FPCR]
655
656#if CTX_INCLUDE_AARCH32_REGS
657	mrs	x11, fpexc32_el2
658	str	x11, [x0, #CTX_FP_FPEXC32_EL2]
659#endif /* CTX_INCLUDE_AARCH32_REGS */
660	ret
661endfunc fpregs_context_save
662
663/* ------------------------------------------------------------------
664 * The following function follows the aapcs_64 strictly to use x9-x17
665 * (temporary caller-saved registers according to AArch64 PCS) to
666 * restore floating point register context. It assumes that 'x0' is
667 * pointing to a 'fp_regs' structure from where the register context
668 * will be restored.
669 *
670 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
671 * However currently we don't use VFP registers nor set traps in
672 * Trusted Firmware, and assume it's cleared.
673 *
674 * TODO: Revisit when VFP is used in secure world
675 * ------------------------------------------------------------------
676 */
677func fpregs_context_restore
678	ldp	q0, q1, [x0, #CTX_FP_Q0]
679	ldp	q2, q3, [x0, #CTX_FP_Q2]
680	ldp	q4, q5, [x0, #CTX_FP_Q4]
681	ldp	q6, q7, [x0, #CTX_FP_Q6]
682	ldp	q8, q9, [x0, #CTX_FP_Q8]
683	ldp	q10, q11, [x0, #CTX_FP_Q10]
684	ldp	q12, q13, [x0, #CTX_FP_Q12]
685	ldp	q14, q15, [x0, #CTX_FP_Q14]
686	ldp	q16, q17, [x0, #CTX_FP_Q16]
687	ldp	q18, q19, [x0, #CTX_FP_Q18]
688	ldp	q20, q21, [x0, #CTX_FP_Q20]
689	ldp	q22, q23, [x0, #CTX_FP_Q22]
690	ldp	q24, q25, [x0, #CTX_FP_Q24]
691	ldp	q26, q27, [x0, #CTX_FP_Q26]
692	ldp	q28, q29, [x0, #CTX_FP_Q28]
693	ldp	q30, q31, [x0, #CTX_FP_Q30]
694
695	ldr	x9, [x0, #CTX_FP_FPSR]
696	msr	fpsr, x9
697
698	ldr	x10, [x0, #CTX_FP_FPCR]
699	msr	fpcr, x10
700
701#if CTX_INCLUDE_AARCH32_REGS
702	ldr	x11, [x0, #CTX_FP_FPEXC32_EL2]
703	msr	fpexc32_el2, x11
704#endif /* CTX_INCLUDE_AARCH32_REGS */
705
706	/*
707	 * No explict ISB required here as ERET to
708	 * switch to secure EL1 or non-secure world
709	 * covers it
710	 */
711
712	ret
713endfunc fpregs_context_restore
714#endif /* CTX_INCLUDE_FPREGS */
715
716	/*
717	 * Set the PSTATE bits not set when the exception was taken as
718	 * described in the AArch64.TakeException() pseudocode function
719	 * in ARM DDI 0487F.c page J1-7635 to a default value.
720	 */
721	.macro set_unset_pstate_bits
722	/*
723	 * If Data Independent Timing (DIT) functionality is implemented,
724	 * always enable DIT in EL3
725	 */
726#if ENABLE_FEAT_DIT
727	mov     x8, #DIT_BIT
728	msr     DIT, x8
729#endif /* ENABLE_FEAT_DIT */
730	.endm /* set_unset_pstate_bits */
731
732/* ------------------------------------------------------------------
733 * The following macro is used to save and restore all the general
734 * purpose and ARMv8.3-PAuth (if enabled) registers.
735 * It also checks if Secure Cycle Counter is not disabled in MDCR_EL3
736 * when ARMv8.5-PMU is implemented, and if called from Non-secure
737 * state saves PMCR_EL0 and disables Cycle Counter.
738 *
739 * Ideally we would only save and restore the callee saved registers
740 * when a world switch occurs but that type of implementation is more
741 * complex. So currently we will always save and restore these
742 * registers on entry and exit of EL3.
743 * clobbers: x18
744 * ------------------------------------------------------------------
745 */
746	.macro save_gp_pmcr_pauth_regs
747	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
748	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
749	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
750	stp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
751	stp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
752	stp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
753	stp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
754	stp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
755	stp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
756	stp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
757	stp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
758	stp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
759	stp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
760	stp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
761	stp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
762	mrs	x18, sp_el0
763	str	x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
764
765	/* ----------------------------------------------------------
766	 * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1
767	 * failed, meaning that FEAT_PMUv3p5/7 is not implemented and
768	 * PMCR_EL0 should be saved in non-secure context.
769	 * ----------------------------------------------------------
770	 */
771	mov_imm	x10, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
772	mrs	x9, mdcr_el3
773	tst	x9, x10
774	bne	1f
775
776	/* Secure Cycle Counter is not disabled */
777	mrs	x9, pmcr_el0
778
779	/* Check caller's security state */
780	mrs	x10, scr_el3
781	tst	x10, #SCR_NS_BIT
782	beq	2f
783
784	/* Save PMCR_EL0 if called from Non-secure state */
785	str	x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
786
787	/* Disable cycle counter when event counting is prohibited */
7882:	orr	x9, x9, #PMCR_EL0_DP_BIT
789	msr	pmcr_el0, x9
790	isb
7911:
792#if CTX_INCLUDE_PAUTH_REGS
793	/* ----------------------------------------------------------
794 	 * Save the ARMv8.3-PAuth keys as they are not banked
795 	 * by exception level
796	 * ----------------------------------------------------------
797	 */
798	add	x19, sp, #CTX_PAUTH_REGS_OFFSET
799
800	mrs	x20, APIAKeyLo_EL1	/* x21:x20 = APIAKey */
801	mrs	x21, APIAKeyHi_EL1
802	mrs	x22, APIBKeyLo_EL1	/* x23:x22 = APIBKey */
803	mrs	x23, APIBKeyHi_EL1
804	mrs	x24, APDAKeyLo_EL1	/* x25:x24 = APDAKey */
805	mrs	x25, APDAKeyHi_EL1
806	mrs	x26, APDBKeyLo_EL1	/* x27:x26 = APDBKey */
807	mrs	x27, APDBKeyHi_EL1
808	mrs	x28, APGAKeyLo_EL1	/* x29:x28 = APGAKey */
809	mrs	x29, APGAKeyHi_EL1
810
811	stp	x20, x21, [x19, #CTX_PACIAKEY_LO]
812	stp	x22, x23, [x19, #CTX_PACIBKEY_LO]
813	stp	x24, x25, [x19, #CTX_PACDAKEY_LO]
814	stp	x26, x27, [x19, #CTX_PACDBKEY_LO]
815	stp	x28, x29, [x19, #CTX_PACGAKEY_LO]
816#endif /* CTX_INCLUDE_PAUTH_REGS */
817	.endm /* save_gp_pmcr_pauth_regs */
818
819/* -----------------------------------------------------------------
820 * This function saves the context and sets the PSTATE to a known
821 * state, preparing entry to el3.
822 * Save all the general purpose and ARMv8.3-PAuth (if enabled)
823 * registers.
824 * Then set any of the PSTATE bits that are not set by hardware
825 * according to the Aarch64.TakeException pseudocode in the Arm
826 * Architecture Reference Manual to a default value for EL3.
827 * clobbers: x17
828 * -----------------------------------------------------------------
829 */
830func prepare_el3_entry
831	save_gp_pmcr_pauth_regs
832	/*
833	 * Set the PSTATE bits not described in the Aarch64.TakeException
834	 * pseudocode to their default values.
835	 */
836	set_unset_pstate_bits
837	ret
838endfunc prepare_el3_entry
839
840/* ------------------------------------------------------------------
841 * This function restores ARMv8.3-PAuth (if enabled) and all general
842 * purpose registers except x30 from the CPU context.
843 * x30 register must be explicitly restored by the caller.
844 * ------------------------------------------------------------------
845 */
846func restore_gp_pmcr_pauth_regs
847#if CTX_INCLUDE_PAUTH_REGS
848 	/* Restore the ARMv8.3 PAuth keys */
849	add	x10, sp, #CTX_PAUTH_REGS_OFFSET
850
851	ldp	x0, x1, [x10, #CTX_PACIAKEY_LO]	/* x1:x0 = APIAKey */
852	ldp	x2, x3, [x10, #CTX_PACIBKEY_LO]	/* x3:x2 = APIBKey */
853	ldp	x4, x5, [x10, #CTX_PACDAKEY_LO]	/* x5:x4 = APDAKey */
854	ldp	x6, x7, [x10, #CTX_PACDBKEY_LO]	/* x7:x6 = APDBKey */
855	ldp	x8, x9, [x10, #CTX_PACGAKEY_LO]	/* x9:x8 = APGAKey */
856
857	msr	APIAKeyLo_EL1, x0
858	msr	APIAKeyHi_EL1, x1
859	msr	APIBKeyLo_EL1, x2
860	msr	APIBKeyHi_EL1, x3
861	msr	APDAKeyLo_EL1, x4
862	msr	APDAKeyHi_EL1, x5
863	msr	APDBKeyLo_EL1, x6
864	msr	APDBKeyHi_EL1, x7
865	msr	APGAKeyLo_EL1, x8
866	msr	APGAKeyHi_EL1, x9
867#endif /* CTX_INCLUDE_PAUTH_REGS */
868
869	/* ----------------------------------------------------------
870	 * Restore PMCR_EL0 when returning to Non-secure state if
871	 * Secure Cycle Counter is not disabled in MDCR_EL3 when
872	 * ARMv8.5-PMU is implemented.
873	 * ----------------------------------------------------------
874	 */
875	mrs	x0, scr_el3
876	tst	x0, #SCR_NS_BIT
877	beq	2f
878
879	/* ----------------------------------------------------------
880	 * Back to Non-secure state.
881	 * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1
882	 * failed, meaning that FEAT_PMUv3p5/7 is not implemented and
883	 * PMCR_EL0 should be restored from non-secure context.
884	 * ----------------------------------------------------------
885	 */
886	mov_imm	x1, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
887	mrs	x0, mdcr_el3
888	tst	x0, x1
889	bne	2f
890	ldr	x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
891	msr	pmcr_el0, x0
8922:
893	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
894	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
895	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
896	ldp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
897	ldp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
898	ldp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
899	ldp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
900	ldp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
901	ldp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
902	ldp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
903	ldp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
904	ldp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
905	ldp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
906	ldp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
907	ldr	x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
908	msr	sp_el0, x28
909	ldp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
910	ret
911endfunc restore_gp_pmcr_pauth_regs
912
913/*
914 * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
915 * registers and update EL1 registers to disable stage1 and stage2
916 * page table walk
917 */
918func save_and_update_ptw_el1_sys_regs
919	/* ----------------------------------------------------------
920	 * Save only sctlr_el1 and tcr_el1 registers
921	 * ----------------------------------------------------------
922	 */
923	mrs	x29, sctlr_el1
924	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)]
925	mrs	x29, tcr_el1
926	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)]
927
928	/* ------------------------------------------------------------
929	 * Must follow below order in order to disable page table
930	 * walk for lower ELs (EL1 and EL0). First step ensures that
931	 * page table walk is disabled for stage1 and second step
932	 * ensures that page table walker should use TCR_EL1.EPDx
933	 * bits to perform address translation. ISB ensures that CPU
934	 * does these 2 steps in order.
935	 *
936	 * 1. Update TCR_EL1.EPDx bits to disable page table walk by
937	 *    stage1.
938	 * 2. Enable MMU bit to avoid identity mapping via stage2
939	 *    and force TCR_EL1.EPDx to be used by the page table
940	 *    walker.
941	 * ------------------------------------------------------------
942	 */
943	orr	x29, x29, #(TCR_EPD0_BIT)
944	orr	x29, x29, #(TCR_EPD1_BIT)
945	msr	tcr_el1, x29
946	isb
947	mrs	x29, sctlr_el1
948	orr	x29, x29, #SCTLR_M_BIT
949	msr	sctlr_el1, x29
950	isb
951
952	ret
953endfunc save_and_update_ptw_el1_sys_regs
954
955/* ------------------------------------------------------------------
956 * This routine assumes that the SP_EL3 is pointing to a valid
957 * context structure from where the gp regs and other special
958 * registers can be retrieved.
959 * ------------------------------------------------------------------
960 */
961func el3_exit
962#if ENABLE_ASSERTIONS
963	/* el3_exit assumes SP_EL0 on entry */
964	mrs	x17, spsel
965	cmp	x17, #MODE_SP_EL0
966	ASM_ASSERT(eq)
967#endif /* ENABLE_ASSERTIONS */
968
969	/* ----------------------------------------------------------
970	 * Save the current SP_EL0 i.e. the EL3 runtime stack which
971	 * will be used for handling the next SMC.
972	 * Then switch to SP_EL3.
973	 * ----------------------------------------------------------
974	 */
975	mov	x17, sp
976	msr	spsel, #MODE_SP_ELX
977	str	x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
978
979	/* ----------------------------------------------------------
980	 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
981	 * ----------------------------------------------------------
982	 */
983	ldr	x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
984	ldp	x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
985	msr	scr_el3, x18
986	msr	spsr_el3, x16
987	msr	elr_el3, x17
988
989#if IMAGE_BL31
990	/* ----------------------------------------------------------
991	 * Restore CPTR_EL3.
992	 * ZCR is only restored if SVE is supported and enabled.
993	 * Synchronization is required before zcr_el3 is addressed.
994	 * ----------------------------------------------------------
995	 */
996	ldp	x19, x20, [sp, #CTX_EL3STATE_OFFSET + CTX_CPTR_EL3]
997	msr	cptr_el3, x19
998
999	ands	x19, x19, #CPTR_EZ_BIT
1000	beq	sve_not_enabled
1001
1002	isb
1003	msr	S3_6_C1_C2_0, x20 /* zcr_el3 */
1004sve_not_enabled:
1005#endif /* IMAGE_BL31 */
1006
1007#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
1008	/* ----------------------------------------------------------
1009	 * Restore mitigation state as it was on entry to EL3
1010	 * ----------------------------------------------------------
1011	 */
1012	ldr	x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
1013	cbz	x17, 1f
1014	blr	x17
10151:
1016#endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */
1017
1018	restore_ptw_el1_sys_regs
1019
1020	/* ----------------------------------------------------------
1021	 * Restore general purpose (including x30), PMCR_EL0 and
1022	 * ARMv8.3-PAuth registers.
1023	 * Exit EL3 via ERET to a lower exception level.
1024 	 * ----------------------------------------------------------
1025 	 */
1026	bl	restore_gp_pmcr_pauth_regs
1027	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
1028
1029#if IMAGE_BL31 && RAS_EXTENSION
1030	/* ----------------------------------------------------------
1031	 * Issue Error Synchronization Barrier to synchronize SErrors
1032	 * before exiting EL3. We're running with EAs unmasked, so
1033	 * any synchronized errors would be taken immediately;
1034	 * therefore no need to inspect DISR_EL1 register.
1035 	 * ----------------------------------------------------------
1036	 */
1037	esb
1038#else
1039	dsb	sy
1040#endif /* IMAGE_BL31 && RAS_EXTENSION */
1041
1042#ifdef IMAGE_BL31
1043	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
1044#endif /* IMAGE_BL31 */
1045
1046	exception_return
1047
1048endfunc el3_exit
1049