xref: /rk3399_ARM-atf/lib/el3_runtime/aarch64/context.S (revision cf9346cb83804feb083b56a668eb0a462983e038)
1/*
2 * Copyright (c) 2013-2022, Arm Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <assert_macros.S>
10#include <context.h>
11#include <el3_common_macros.S>
12
13#if CTX_INCLUDE_EL2_REGS
14	.global	el2_sysregs_context_save_common
15	.global	el2_sysregs_context_restore_common
16#if ENABLE_SPE_FOR_LOWER_ELS
17	.global	el2_sysregs_context_save_spe
18	.global	el2_sysregs_context_restore_spe
19#endif /* ENABLE_SPE_FOR_LOWER_ELS */
20#if CTX_INCLUDE_MTE_REGS
21	.global	el2_sysregs_context_save_mte
22	.global	el2_sysregs_context_restore_mte
23#endif /* CTX_INCLUDE_MTE_REGS */
24#if ENABLE_MPAM_FOR_LOWER_ELS
25	.global	el2_sysregs_context_save_mpam
26	.global	el2_sysregs_context_restore_mpam
27#endif /* ENABLE_MPAM_FOR_LOWER_ELS */
28#if ENABLE_FEAT_ECV
29	.global	el2_sysregs_context_save_ecv
30	.global	el2_sysregs_context_restore_ecv
31#endif /* ENABLE_FEAT_ECV */
32#if ENABLE_FEAT_VHE
33	.global	el2_sysregs_context_save_vhe
34	.global	el2_sysregs_context_restore_vhe
35#endif /* ENABLE_FEAT_VHE */
36#if RAS_EXTENSION
37	.global	el2_sysregs_context_save_ras
38	.global	el2_sysregs_context_restore_ras
39#endif /* RAS_EXTENSION */
40#if CTX_INCLUDE_NEVE_REGS
41	.global	el2_sysregs_context_save_nv2
42	.global	el2_sysregs_context_restore_nv2
43#endif /* CTX_INCLUDE_NEVE_REGS */
44#if ENABLE_TRF_FOR_NS
45	.global	el2_sysregs_context_save_trf
46	.global	el2_sysregs_context_restore_trf
47#endif /* ENABLE_TRF_FOR_NS */
48#if ENABLE_FEAT_CSV2_2
49	.global	el2_sysregs_context_save_csv2
50	.global	el2_sysregs_context_restore_csv2
51#endif /* ENABLE_FEAT_CSV2_2 */
52#endif /* CTX_INCLUDE_EL2_REGS */
53
54	.global	el1_sysregs_context_save
55	.global	el1_sysregs_context_restore
56#if CTX_INCLUDE_FPREGS
57	.global	fpregs_context_save
58	.global	fpregs_context_restore
59#endif /* CTX_INCLUDE_FPREGS */
60	.global	prepare_el3_entry
61	.global	restore_gp_pmcr_pauth_regs
62	.global save_and_update_ptw_el1_sys_regs
63	.global	el3_exit
64
65#if CTX_INCLUDE_EL2_REGS
66
67/* -----------------------------------------------------
68 * The following functions strictly follow the AArch64
69 * PCS to use x9-x16 (temporary caller-saved registers)
70 * to save/restore EL2 system register context.
71 * el2_sysregs_context_save/restore_common functions
72 * save and restore registers that are common to all
73 * configurations. The rest of the functions save and
74 * restore EL2 system registers that are present when a
75 * particular feature is enabled. All functions assume
76 * that 'x0' is pointing to a 'el2_sys_regs' structure
77 * where the register context will be saved/restored.
78 *
79 * The following registers are not added.
80 * AMEVCNTVOFF0<n>_EL2
81 * AMEVCNTVOFF1<n>_EL2
82 * ICH_AP0R<n>_EL2
83 * ICH_AP1R<n>_EL2
84 * ICH_LR<n>_EL2
85 * -----------------------------------------------------
86 */
87func el2_sysregs_context_save_common
88	mrs	x9, actlr_el2
89	mrs	x10, afsr0_el2
90	stp	x9, x10, [x0, #CTX_ACTLR_EL2]
91
92	mrs	x11, afsr1_el2
93	mrs	x12, amair_el2
94	stp	x11, x12, [x0, #CTX_AFSR1_EL2]
95
96	mrs	x13, cnthctl_el2
97	mrs	x14, cntvoff_el2
98	stp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
99
100	mrs	x15, cptr_el2
101	str	x15, [x0, #CTX_CPTR_EL2]
102
103#if CTX_INCLUDE_AARCH32_REGS
104	mrs	x16, dbgvcr32_el2
105	str	x16, [x0, #CTX_DBGVCR32_EL2]
106#endif /* CTX_INCLUDE_AARCH32_REGS */
107
108	mrs	x9, elr_el2
109	mrs	x10, esr_el2
110	stp	x9, x10, [x0, #CTX_ELR_EL2]
111
112	mrs	x11, far_el2
113	mrs	x12, hacr_el2
114	stp	x11, x12, [x0, #CTX_FAR_EL2]
115
116	mrs	x13, hcr_el2
117	mrs	x14, hpfar_el2
118	stp	x13, x14, [x0, #CTX_HCR_EL2]
119
120	mrs	x15, hstr_el2
121	mrs	x16, ICC_SRE_EL2
122	stp	x15, x16, [x0, #CTX_HSTR_EL2]
123
124	mrs	x9, ICH_HCR_EL2
125	mrs	x10, ICH_VMCR_EL2
126	stp	x9, x10, [x0, #CTX_ICH_HCR_EL2]
127
128	mrs	x11, mair_el2
129	mrs	x12, mdcr_el2
130	stp	x11, x12, [x0, #CTX_MAIR_EL2]
131
132	mrs	x14, sctlr_el2
133	str	x14, [x0, #CTX_SCTLR_EL2]
134
135	mrs	x15, spsr_el2
136	mrs	x16, sp_el2
137	stp	x15, x16, [x0, #CTX_SPSR_EL2]
138
139	mrs	x9, tcr_el2
140	mrs	x10, tpidr_el2
141	stp	x9, x10, [x0, #CTX_TCR_EL2]
142
143	mrs	x11, ttbr0_el2
144	mrs	x12, vbar_el2
145	stp	x11, x12, [x0, #CTX_TTBR0_EL2]
146
147	mrs	x13, vmpidr_el2
148	mrs	x14, vpidr_el2
149	stp	x13, x14, [x0, #CTX_VMPIDR_EL2]
150
151	mrs	x15, vtcr_el2
152	mrs	x16, vttbr_el2
153	stp	x15, x16, [x0, #CTX_VTCR_EL2]
154	ret
155endfunc el2_sysregs_context_save_common
156
157func el2_sysregs_context_restore_common
158	ldp	x9, x10, [x0, #CTX_ACTLR_EL2]
159	msr	actlr_el2, x9
160	msr	afsr0_el2, x10
161
162	ldp	x11, x12, [x0, #CTX_AFSR1_EL2]
163	msr	afsr1_el2, x11
164	msr	amair_el2, x12
165
166	ldp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
167	msr	cnthctl_el2, x13
168	msr	cntvoff_el2, x14
169
170	ldr	x15, [x0, #CTX_CPTR_EL2]
171	msr	cptr_el2, x15
172
173#if CTX_INCLUDE_AARCH32_REGS
174	ldr	x16, [x0, #CTX_DBGVCR32_EL2]
175	msr	dbgvcr32_el2, x16
176#endif /* CTX_INCLUDE_AARCH32_REGS */
177
178	ldp	x9, x10, [x0, #CTX_ELR_EL2]
179	msr	elr_el2, x9
180	msr	esr_el2, x10
181
182	ldp	x11, x12, [x0, #CTX_FAR_EL2]
183	msr	far_el2, x11
184	msr	hacr_el2, x12
185
186	ldp	x13, x14, [x0, #CTX_HCR_EL2]
187	msr	hcr_el2, x13
188	msr	hpfar_el2, x14
189
190	ldp	x15, x16, [x0, #CTX_HSTR_EL2]
191	msr	hstr_el2, x15
192	msr	ICC_SRE_EL2, x16
193
194	ldp	x9, x10, [x0, #CTX_ICH_HCR_EL2]
195	msr	ICH_HCR_EL2, x9
196	msr	ICH_VMCR_EL2, x10
197
198	ldp	x11, x12, [x0, #CTX_MAIR_EL2]
199	msr	mair_el2, x11
200	msr	mdcr_el2, x12
201
202	ldr	x14, [x0, #CTX_SCTLR_EL2]
203	msr	sctlr_el2, x14
204
205	ldp	x15, x16, [x0, #CTX_SPSR_EL2]
206	msr	spsr_el2, x15
207	msr	sp_el2, x16
208
209	ldp	x9, x10, [x0, #CTX_TCR_EL2]
210	msr	tcr_el2, x9
211	msr	tpidr_el2, x10
212
213	ldp	x11, x12, [x0, #CTX_TTBR0_EL2]
214	msr	ttbr0_el2, x11
215	msr	vbar_el2, x12
216
217	ldp	x13, x14, [x0, #CTX_VMPIDR_EL2]
218	msr	vmpidr_el2, x13
219	msr	vpidr_el2, x14
220
221	ldp	x15, x16, [x0, #CTX_VTCR_EL2]
222	msr	vtcr_el2, x15
223	msr	vttbr_el2, x16
224	ret
225endfunc el2_sysregs_context_restore_common
226
227#if ENABLE_SPE_FOR_LOWER_ELS
228func el2_sysregs_context_save_spe
229	mrs	x13, PMSCR_EL2
230	str	x13, [x0, #CTX_PMSCR_EL2]
231	ret
232endfunc el2_sysregs_context_save_spe
233
234func el2_sysregs_context_restore_spe
235	ldr	x13, [x0, #CTX_PMSCR_EL2]
236	msr	PMSCR_EL2, x13
237	ret
238endfunc el2_sysregs_context_restore_spe
239#endif /* ENABLE_SPE_FOR_LOWER_ELS */
240
241#if CTX_INCLUDE_MTE_REGS
242func el2_sysregs_context_save_mte
243	mrs	x9, TFSR_EL2
244	str	x9, [x0, #CTX_TFSR_EL2]
245	ret
246endfunc el2_sysregs_context_save_mte
247
248func el2_sysregs_context_restore_mte
249	ldr	x9, [x0, #CTX_TFSR_EL2]
250	msr	TFSR_EL2, x9
251	ret
252endfunc el2_sysregs_context_restore_mte
253#endif /* CTX_INCLUDE_MTE_REGS */
254
255#if ENABLE_MPAM_FOR_LOWER_ELS
256func el2_sysregs_context_save_mpam
257	mrs	x10, MPAM2_EL2
258	str	x10, [x0, #CTX_MPAM2_EL2]
259
260	mrs	x11, MPAMHCR_EL2
261	mrs	x12, MPAMVPM0_EL2
262	stp	x11, x12, [x0, #CTX_MPAMHCR_EL2]
263
264	mrs	x13, MPAMVPM1_EL2
265	mrs	x14, MPAMVPM2_EL2
266	stp	x13, x14, [x0, #CTX_MPAMVPM1_EL2]
267
268	mrs	x15, MPAMVPM3_EL2
269	mrs	x16, MPAMVPM4_EL2
270	stp	x15, x16, [x0, #CTX_MPAMVPM3_EL2]
271
272	mrs	x9, MPAMVPM5_EL2
273	mrs	x10, MPAMVPM6_EL2
274	stp	x9, x10, [x0, #CTX_MPAMVPM5_EL2]
275
276	mrs	x11, MPAMVPM7_EL2
277	mrs	x12, MPAMVPMV_EL2
278	stp	x11, x12, [x0, #CTX_MPAMVPM7_EL2]
279	ret
280endfunc el2_sysregs_context_save_mpam
281
282func el2_sysregs_context_restore_mpam
283	ldr	x10, [x0, #CTX_MPAM2_EL2]
284	msr	MPAM2_EL2, x10
285
286	ldp	x11, x12, [x0, #CTX_MPAMHCR_EL2]
287	msr	MPAMHCR_EL2, x11
288	msr	MPAMVPM0_EL2, x12
289
290	ldp	x13, x14, [x0, #CTX_MPAMVPM1_EL2]
291	msr	MPAMVPM1_EL2, x13
292	msr	MPAMVPM2_EL2, x14
293
294	ldp	x15, x16, [x0, #CTX_MPAMVPM3_EL2]
295	msr	MPAMVPM3_EL2, x15
296	msr	MPAMVPM4_EL2, x16
297
298	ldp	x9, x10, [x0, #CTX_MPAMVPM5_EL2]
299	msr	MPAMVPM5_EL2, x9
300	msr	MPAMVPM6_EL2, x10
301
302	ldp	x11, x12, [x0, #CTX_MPAMVPM7_EL2]
303	msr	MPAMVPM7_EL2, x11
304	msr	MPAMVPMV_EL2, x12
305	ret
306endfunc el2_sysregs_context_restore_mpam
307#endif /* ENABLE_MPAM_FOR_LOWER_ELS */
308
309#if ENABLE_FEAT_ECV
310func el2_sysregs_context_save_ecv
311	mrs	x11, CNTPOFF_EL2
312	str	x11, [x0, #CTX_CNTPOFF_EL2]
313	ret
314endfunc el2_sysregs_context_save_ecv
315
316func el2_sysregs_context_restore_ecv
317	ldr	x11, [x0, #CTX_CNTPOFF_EL2]
318	msr	CNTPOFF_EL2, x11
319	ret
320endfunc el2_sysregs_context_restore_ecv
321#endif /* ENABLE_FEAT_ECV */
322
323#if ENABLE_FEAT_VHE
324func el2_sysregs_context_save_vhe
325	/*
326	 * CONTEXTIDR_EL2 register is saved only when FEAT_VHE or
327	 * FEAT_Debugv8p2 (currently not in TF-A) is supported.
328	 */
329	mrs	x9, contextidr_el2
330	mrs	x10, ttbr1_el2
331	stp	x9, x10, [x0, #CTX_CONTEXTIDR_EL2]
332	ret
333endfunc el2_sysregs_context_save_vhe
334
335func el2_sysregs_context_restore_vhe
336	/*
337	 * CONTEXTIDR_EL2 register is restored only when FEAT_VHE or
338	 * FEAT_Debugv8p2 (currently not in TF-A) is supported.
339	 */
340	ldp	x9, x10, [x0, #CTX_CONTEXTIDR_EL2]
341	msr	contextidr_el2, x9
342	msr	ttbr1_el2, x10
343	ret
344endfunc el2_sysregs_context_restore_vhe
345#endif /* ENABLE_FEAT_VHE */
346
347#if RAS_EXTENSION
348func el2_sysregs_context_save_ras
349	/*
350	 * VDISR_EL2 and VSESR_EL2 registers are saved only when
351	 * FEAT_RAS is supported.
352	 */
353	mrs	x11, vdisr_el2
354	mrs	x12, vsesr_el2
355	stp	x11, x12, [x0, #CTX_VDISR_EL2]
356	ret
357endfunc el2_sysregs_context_save_ras
358
359func el2_sysregs_context_restore_ras
360	/*
361	 * VDISR_EL2 and VSESR_EL2 registers are restored only when FEAT_RAS
362	 * is supported.
363	 */
364	ldp	x11, x12, [x0, #CTX_VDISR_EL2]
365	msr	vdisr_el2, x11
366	msr	vsesr_el2, x12
367	ret
368endfunc el2_sysregs_context_restore_ras
369#endif /* RAS_EXTENSION */
370
371#if CTX_INCLUDE_NEVE_REGS
372func el2_sysregs_context_save_nv2
373	/*
374	 * VNCR_EL2 register is saved only when FEAT_NV2 is supported.
375	 */
376	mrs	x16, vncr_el2
377	str	x16, [x0, #CTX_VNCR_EL2]
378	ret
379endfunc el2_sysregs_context_save_nv2
380
381func el2_sysregs_context_restore_nv2
382	/*
383	 * VNCR_EL2 register is restored only when FEAT_NV2 is supported.
384	 */
385	ldr	x16, [x0, #CTX_VNCR_EL2]
386	msr	vncr_el2, x16
387	ret
388endfunc el2_sysregs_context_restore_nv2
389#endif /* CTX_INCLUDE_NEVE_REGS */
390
391#if ENABLE_TRF_FOR_NS
392func el2_sysregs_context_save_trf
393	/*
394	 * TRFCR_EL2 register is saved only when FEAT_TRF is supported.
395	 */
396	mrs	x12, TRFCR_EL2
397	str	x12, [x0, #CTX_TRFCR_EL2]
398	ret
399endfunc el2_sysregs_context_save_trf
400
401func el2_sysregs_context_restore_trf
402	/*
403	 * TRFCR_EL2 register is restored only when FEAT_TRF is supported.
404	 */
405	ldr	x12, [x0, #CTX_TRFCR_EL2]
406	msr	TRFCR_EL2, x12
407	ret
408endfunc el2_sysregs_context_restore_trf
409#endif /* ENABLE_TRF_FOR_NS */
410
411#if ENABLE_FEAT_CSV2_2
412func el2_sysregs_context_save_csv2
413	/*
414	 * SCXTNUM_EL2 register is saved only when FEAT_CSV2_2 is supported.
415	 */
416	mrs	x13, scxtnum_el2
417	str	x13, [x0, #CTX_SCXTNUM_EL2]
418	ret
419endfunc el2_sysregs_context_save_csv2
420
421func el2_sysregs_context_restore_csv2
422	/*
423	 * SCXTNUM_EL2 register is restored only when FEAT_CSV2_2 is supported.
424	 */
425	ldr	x13, [x0, #CTX_SCXTNUM_EL2]
426	msr	scxtnum_el2, x13
427	ret
428endfunc el2_sysregs_context_restore_csv2
429#endif /* ENABLE_FEAT_CSV2_2 */
430
431#endif /* CTX_INCLUDE_EL2_REGS */
432
433/* ------------------------------------------------------------------
434 * The following function strictly follows the AArch64 PCS to use
435 * x9-x17 (temporary caller-saved registers) to save EL1 system
436 * register context. It assumes that 'x0' is pointing to a
437 * 'el1_sys_regs' structure where the register context will be saved.
438 * ------------------------------------------------------------------
439 */
440func el1_sysregs_context_save
441
442	mrs	x9, spsr_el1
443	mrs	x10, elr_el1
444	stp	x9, x10, [x0, #CTX_SPSR_EL1]
445
446#if !ERRATA_SPECULATIVE_AT
447	mrs	x15, sctlr_el1
448	mrs	x16, tcr_el1
449	stp	x15, x16, [x0, #CTX_SCTLR_EL1]
450#endif /* ERRATA_SPECULATIVE_AT */
451
452	mrs	x17, cpacr_el1
453	mrs	x9, csselr_el1
454	stp	x17, x9, [x0, #CTX_CPACR_EL1]
455
456	mrs	x10, sp_el1
457	mrs	x11, esr_el1
458	stp	x10, x11, [x0, #CTX_SP_EL1]
459
460	mrs	x12, ttbr0_el1
461	mrs	x13, ttbr1_el1
462	stp	x12, x13, [x0, #CTX_TTBR0_EL1]
463
464	mrs	x14, mair_el1
465	mrs	x15, amair_el1
466	stp	x14, x15, [x0, #CTX_MAIR_EL1]
467
468	mrs	x16, actlr_el1
469	mrs	x17, tpidr_el1
470	stp	x16, x17, [x0, #CTX_ACTLR_EL1]
471
472	mrs	x9, tpidr_el0
473	mrs	x10, tpidrro_el0
474	stp	x9, x10, [x0, #CTX_TPIDR_EL0]
475
476	mrs	x13, par_el1
477	mrs	x14, far_el1
478	stp	x13, x14, [x0, #CTX_PAR_EL1]
479
480	mrs	x15, afsr0_el1
481	mrs	x16, afsr1_el1
482	stp	x15, x16, [x0, #CTX_AFSR0_EL1]
483
484	mrs	x17, contextidr_el1
485	mrs	x9, vbar_el1
486	stp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
487
488	/* Save AArch32 system registers if the build has instructed so */
489#if CTX_INCLUDE_AARCH32_REGS
490	mrs	x11, spsr_abt
491	mrs	x12, spsr_und
492	stp	x11, x12, [x0, #CTX_SPSR_ABT]
493
494	mrs	x13, spsr_irq
495	mrs	x14, spsr_fiq
496	stp	x13, x14, [x0, #CTX_SPSR_IRQ]
497
498	mrs	x15, dacr32_el2
499	mrs	x16, ifsr32_el2
500	stp	x15, x16, [x0, #CTX_DACR32_EL2]
501#endif /* CTX_INCLUDE_AARCH32_REGS */
502
503	/* Save NS timer registers if the build has instructed so */
504#if NS_TIMER_SWITCH
505	mrs	x10, cntp_ctl_el0
506	mrs	x11, cntp_cval_el0
507	stp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
508
509	mrs	x12, cntv_ctl_el0
510	mrs	x13, cntv_cval_el0
511	stp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
512
513	mrs	x14, cntkctl_el1
514	str	x14, [x0, #CTX_CNTKCTL_EL1]
515#endif /* NS_TIMER_SWITCH */
516
517	/* Save MTE system registers if the build has instructed so */
518#if CTX_INCLUDE_MTE_REGS
519	mrs	x15, TFSRE0_EL1
520	mrs	x16, TFSR_EL1
521	stp	x15, x16, [x0, #CTX_TFSRE0_EL1]
522
523	mrs	x9, RGSR_EL1
524	mrs	x10, GCR_EL1
525	stp	x9, x10, [x0, #CTX_RGSR_EL1]
526#endif /* CTX_INCLUDE_MTE_REGS */
527
528	ret
529endfunc el1_sysregs_context_save
530
531/* ------------------------------------------------------------------
532 * The following function strictly follows the AArch64 PCS to use
533 * x9-x17 (temporary caller-saved registers) to restore EL1 system
534 * register context.  It assumes that 'x0' is pointing to a
535 * 'el1_sys_regs' structure from where the register context will be
536 * restored
537 * ------------------------------------------------------------------
538 */
539func el1_sysregs_context_restore
540
541	ldp	x9, x10, [x0, #CTX_SPSR_EL1]
542	msr	spsr_el1, x9
543	msr	elr_el1, x10
544
545#if !ERRATA_SPECULATIVE_AT
546	ldp	x15, x16, [x0, #CTX_SCTLR_EL1]
547	msr	sctlr_el1, x15
548	msr	tcr_el1, x16
549#endif /* ERRATA_SPECULATIVE_AT */
550
551	ldp	x17, x9, [x0, #CTX_CPACR_EL1]
552	msr	cpacr_el1, x17
553	msr	csselr_el1, x9
554
555	ldp	x10, x11, [x0, #CTX_SP_EL1]
556	msr	sp_el1, x10
557	msr	esr_el1, x11
558
559	ldp	x12, x13, [x0, #CTX_TTBR0_EL1]
560	msr	ttbr0_el1, x12
561	msr	ttbr1_el1, x13
562
563	ldp	x14, x15, [x0, #CTX_MAIR_EL1]
564	msr	mair_el1, x14
565	msr	amair_el1, x15
566
567	ldp 	x16, x17, [x0, #CTX_ACTLR_EL1]
568	msr	actlr_el1, x16
569	msr	tpidr_el1, x17
570
571	ldp	x9, x10, [x0, #CTX_TPIDR_EL0]
572	msr	tpidr_el0, x9
573	msr	tpidrro_el0, x10
574
575	ldp	x13, x14, [x0, #CTX_PAR_EL1]
576	msr	par_el1, x13
577	msr	far_el1, x14
578
579	ldp	x15, x16, [x0, #CTX_AFSR0_EL1]
580	msr	afsr0_el1, x15
581	msr	afsr1_el1, x16
582
583	ldp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
584	msr	contextidr_el1, x17
585	msr	vbar_el1, x9
586
587	/* Restore AArch32 system registers if the build has instructed so */
588#if CTX_INCLUDE_AARCH32_REGS
589	ldp	x11, x12, [x0, #CTX_SPSR_ABT]
590	msr	spsr_abt, x11
591	msr	spsr_und, x12
592
593	ldp	x13, x14, [x0, #CTX_SPSR_IRQ]
594	msr	spsr_irq, x13
595	msr	spsr_fiq, x14
596
597	ldp	x15, x16, [x0, #CTX_DACR32_EL2]
598	msr	dacr32_el2, x15
599	msr	ifsr32_el2, x16
600#endif /* CTX_INCLUDE_AARCH32_REGS */
601
602	/* Restore NS timer registers if the build has instructed so */
603#if NS_TIMER_SWITCH
604	ldp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
605	msr	cntp_ctl_el0, x10
606	msr	cntp_cval_el0, x11
607
608	ldp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
609	msr	cntv_ctl_el0, x12
610	msr	cntv_cval_el0, x13
611
612	ldr	x14, [x0, #CTX_CNTKCTL_EL1]
613	msr	cntkctl_el1, x14
614#endif /* NS_TIMER_SWITCH */
615
616	/* Restore MTE system registers if the build has instructed so */
617#if CTX_INCLUDE_MTE_REGS
618	ldp	x11, x12, [x0, #CTX_TFSRE0_EL1]
619	msr	TFSRE0_EL1, x11
620	msr	TFSR_EL1, x12
621
622	ldp	x13, x14, [x0, #CTX_RGSR_EL1]
623	msr	RGSR_EL1, x13
624	msr	GCR_EL1, x14
625#endif /* CTX_INCLUDE_MTE_REGS */
626
627	/* No explict ISB required here as ERET covers it */
628	ret
629endfunc el1_sysregs_context_restore
630
631/* ------------------------------------------------------------------
632 * The following function follows the aapcs_64 strictly to use
633 * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
634 * to save floating point register context. It assumes that 'x0' is
635 * pointing to a 'fp_regs' structure where the register context will
636 * be saved.
637 *
638 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
639 * However currently we don't use VFP registers nor set traps in
640 * Trusted Firmware, and assume it's cleared.
641 *
642 * TODO: Revisit when VFP is used in secure world
643 * ------------------------------------------------------------------
644 */
645#if CTX_INCLUDE_FPREGS
646func fpregs_context_save
647	stp	q0, q1, [x0, #CTX_FP_Q0]
648	stp	q2, q3, [x0, #CTX_FP_Q2]
649	stp	q4, q5, [x0, #CTX_FP_Q4]
650	stp	q6, q7, [x0, #CTX_FP_Q6]
651	stp	q8, q9, [x0, #CTX_FP_Q8]
652	stp	q10, q11, [x0, #CTX_FP_Q10]
653	stp	q12, q13, [x0, #CTX_FP_Q12]
654	stp	q14, q15, [x0, #CTX_FP_Q14]
655	stp	q16, q17, [x0, #CTX_FP_Q16]
656	stp	q18, q19, [x0, #CTX_FP_Q18]
657	stp	q20, q21, [x0, #CTX_FP_Q20]
658	stp	q22, q23, [x0, #CTX_FP_Q22]
659	stp	q24, q25, [x0, #CTX_FP_Q24]
660	stp	q26, q27, [x0, #CTX_FP_Q26]
661	stp	q28, q29, [x0, #CTX_FP_Q28]
662	stp	q30, q31, [x0, #CTX_FP_Q30]
663
664	mrs	x9, fpsr
665	str	x9, [x0, #CTX_FP_FPSR]
666
667	mrs	x10, fpcr
668	str	x10, [x0, #CTX_FP_FPCR]
669
670#if CTX_INCLUDE_AARCH32_REGS
671	mrs	x11, fpexc32_el2
672	str	x11, [x0, #CTX_FP_FPEXC32_EL2]
673#endif /* CTX_INCLUDE_AARCH32_REGS */
674	ret
675endfunc fpregs_context_save
676
677/* ------------------------------------------------------------------
678 * The following function follows the aapcs_64 strictly to use x9-x17
679 * (temporary caller-saved registers according to AArch64 PCS) to
680 * restore floating point register context. It assumes that 'x0' is
681 * pointing to a 'fp_regs' structure from where the register context
682 * will be restored.
683 *
684 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
685 * However currently we don't use VFP registers nor set traps in
686 * Trusted Firmware, and assume it's cleared.
687 *
688 * TODO: Revisit when VFP is used in secure world
689 * ------------------------------------------------------------------
690 */
691func fpregs_context_restore
692	ldp	q0, q1, [x0, #CTX_FP_Q0]
693	ldp	q2, q3, [x0, #CTX_FP_Q2]
694	ldp	q4, q5, [x0, #CTX_FP_Q4]
695	ldp	q6, q7, [x0, #CTX_FP_Q6]
696	ldp	q8, q9, [x0, #CTX_FP_Q8]
697	ldp	q10, q11, [x0, #CTX_FP_Q10]
698	ldp	q12, q13, [x0, #CTX_FP_Q12]
699	ldp	q14, q15, [x0, #CTX_FP_Q14]
700	ldp	q16, q17, [x0, #CTX_FP_Q16]
701	ldp	q18, q19, [x0, #CTX_FP_Q18]
702	ldp	q20, q21, [x0, #CTX_FP_Q20]
703	ldp	q22, q23, [x0, #CTX_FP_Q22]
704	ldp	q24, q25, [x0, #CTX_FP_Q24]
705	ldp	q26, q27, [x0, #CTX_FP_Q26]
706	ldp	q28, q29, [x0, #CTX_FP_Q28]
707	ldp	q30, q31, [x0, #CTX_FP_Q30]
708
709	ldr	x9, [x0, #CTX_FP_FPSR]
710	msr	fpsr, x9
711
712	ldr	x10, [x0, #CTX_FP_FPCR]
713	msr	fpcr, x10
714
715#if CTX_INCLUDE_AARCH32_REGS
716	ldr	x11, [x0, #CTX_FP_FPEXC32_EL2]
717	msr	fpexc32_el2, x11
718#endif /* CTX_INCLUDE_AARCH32_REGS */
719
720	/*
721	 * No explict ISB required here as ERET to
722	 * switch to secure EL1 or non-secure world
723	 * covers it
724	 */
725
726	ret
727endfunc fpregs_context_restore
728#endif /* CTX_INCLUDE_FPREGS */
729
730	/*
731	 * Set SCR_EL3.EA bit to enable SErrors at EL3
732	 */
733	.macro enable_serror_at_el3
734	mrs     x8, scr_el3
735	orr     x8, x8, #SCR_EA_BIT
736	msr     scr_el3, x8
737	.endm
738
739	/*
740	 * Set the PSTATE bits not set when the exception was taken as
741	 * described in the AArch64.TakeException() pseudocode function
742	 * in ARM DDI 0487F.c page J1-7635 to a default value.
743	 */
744	.macro set_unset_pstate_bits
745	/*
746	 * If Data Independent Timing (DIT) functionality is implemented,
747	 * always enable DIT in EL3
748	 */
749#if ENABLE_FEAT_DIT
750	mov     x8, #DIT_BIT
751	msr     DIT, x8
752#endif /* ENABLE_FEAT_DIT */
753	.endm /* set_unset_pstate_bits */
754
755/* ------------------------------------------------------------------
756 * The following macro is used to save and restore all the general
757 * purpose and ARMv8.3-PAuth (if enabled) registers.
758 * It also checks if the Secure Cycle Counter (PMCCNTR_EL0)
759 * is disabled in EL3/Secure (ARMv8.5-PMU), wherein PMCCNTR_EL0
760 * needs not to be saved/restored during world switch.
761 *
762 * Ideally we would only save and restore the callee saved registers
763 * when a world switch occurs but that type of implementation is more
764 * complex. So currently we will always save and restore these
765 * registers on entry and exit of EL3.
766 * clobbers: x18
767 * ------------------------------------------------------------------
768 */
769	.macro save_gp_pmcr_pauth_regs
770	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
771	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
772	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
773	stp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
774	stp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
775	stp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
776	stp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
777	stp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
778	stp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
779	stp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
780	stp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
781	stp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
782	stp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
783	stp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
784	stp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
785	mrs	x18, sp_el0
786	str	x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
787
788	/* ----------------------------------------------------------
789	 * Check if earlier initialization of MDCR_EL3.SCCD/MCCD to 1
790	 * has failed.
791	 *
792	 * MDCR_EL3:
793	 * MCCD bit set, Prohibits the Cycle Counter PMCCNTR_EL0 from
794	 * counting at EL3.
795	 * SCCD bit set, Secure Cycle Counter Disable. Prohibits PMCCNTR_EL0
796	 * from counting in Secure state.
797	 * If these bits are not set, meaning that FEAT_PMUv3p5/7 is
798	 * not implemented and PMCR_EL0 should be saved in non-secure
799	 * context.
800	 * ----------------------------------------------------------
801	 */
802	mov_imm	x10, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
803	mrs	x9, mdcr_el3
804	tst	x9, x10
805	bne	1f
806
807	/* ----------------------------------------------------------
808	 * If control reaches here, it ensures the Secure Cycle
809	 * Counter (PMCCNTR_EL0) is not prohibited from counting at
810	 * EL3 and in secure states.
811	 * Henceforth, PMCR_EL0 to be saved before world switch.
812	 * ----------------------------------------------------------
813	 */
814	mrs	x9, pmcr_el0
815
816	/* Check caller's security state */
817	mrs	x10, scr_el3
818	tst	x10, #SCR_NS_BIT
819	beq	2f
820
821	/* Save PMCR_EL0 if called from Non-secure state */
822	str	x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
823
824	/* Disable cycle counter when event counting is prohibited */
8252:	orr	x9, x9, #PMCR_EL0_DP_BIT
826	msr	pmcr_el0, x9
827	isb
8281:
829#if CTX_INCLUDE_PAUTH_REGS
830	/* ----------------------------------------------------------
831 	 * Save the ARMv8.3-PAuth keys as they are not banked
832 	 * by exception level
833	 * ----------------------------------------------------------
834	 */
835	add	x19, sp, #CTX_PAUTH_REGS_OFFSET
836
837	mrs	x20, APIAKeyLo_EL1	/* x21:x20 = APIAKey */
838	mrs	x21, APIAKeyHi_EL1
839	mrs	x22, APIBKeyLo_EL1	/* x23:x22 = APIBKey */
840	mrs	x23, APIBKeyHi_EL1
841	mrs	x24, APDAKeyLo_EL1	/* x25:x24 = APDAKey */
842	mrs	x25, APDAKeyHi_EL1
843	mrs	x26, APDBKeyLo_EL1	/* x27:x26 = APDBKey */
844	mrs	x27, APDBKeyHi_EL1
845	mrs	x28, APGAKeyLo_EL1	/* x29:x28 = APGAKey */
846	mrs	x29, APGAKeyHi_EL1
847
848	stp	x20, x21, [x19, #CTX_PACIAKEY_LO]
849	stp	x22, x23, [x19, #CTX_PACIBKEY_LO]
850	stp	x24, x25, [x19, #CTX_PACDAKEY_LO]
851	stp	x26, x27, [x19, #CTX_PACDBKEY_LO]
852	stp	x28, x29, [x19, #CTX_PACGAKEY_LO]
853#endif /* CTX_INCLUDE_PAUTH_REGS */
854	.endm /* save_gp_pmcr_pauth_regs */
855
856/* -----------------------------------------------------------------
857 * This function saves the context and sets the PSTATE to a known
858 * state, preparing entry to el3.
859 * Save all the general purpose and ARMv8.3-PAuth (if enabled)
860 * registers.
861 * Then set any of the PSTATE bits that are not set by hardware
862 * according to the Aarch64.TakeException pseudocode in the Arm
863 * Architecture Reference Manual to a default value for EL3.
864 * clobbers: x17
865 * -----------------------------------------------------------------
866 */
867func prepare_el3_entry
868	save_gp_pmcr_pauth_regs
869	enable_serror_at_el3
870	/*
871	 * Set the PSTATE bits not described in the Aarch64.TakeException
872	 * pseudocode to their default values.
873	 */
874	set_unset_pstate_bits
875	ret
876endfunc prepare_el3_entry
877
878/* ------------------------------------------------------------------
879 * This function restores ARMv8.3-PAuth (if enabled) and all general
880 * purpose registers except x30 from the CPU context.
881 * x30 register must be explicitly restored by the caller.
882 * ------------------------------------------------------------------
883 */
884func restore_gp_pmcr_pauth_regs
885#if CTX_INCLUDE_PAUTH_REGS
886 	/* Restore the ARMv8.3 PAuth keys */
887	add	x10, sp, #CTX_PAUTH_REGS_OFFSET
888
889	ldp	x0, x1, [x10, #CTX_PACIAKEY_LO]	/* x1:x0 = APIAKey */
890	ldp	x2, x3, [x10, #CTX_PACIBKEY_LO]	/* x3:x2 = APIBKey */
891	ldp	x4, x5, [x10, #CTX_PACDAKEY_LO]	/* x5:x4 = APDAKey */
892	ldp	x6, x7, [x10, #CTX_PACDBKEY_LO]	/* x7:x6 = APDBKey */
893	ldp	x8, x9, [x10, #CTX_PACGAKEY_LO]	/* x9:x8 = APGAKey */
894
895	msr	APIAKeyLo_EL1, x0
896	msr	APIAKeyHi_EL1, x1
897	msr	APIBKeyLo_EL1, x2
898	msr	APIBKeyHi_EL1, x3
899	msr	APDAKeyLo_EL1, x4
900	msr	APDAKeyHi_EL1, x5
901	msr	APDBKeyLo_EL1, x6
902	msr	APDBKeyHi_EL1, x7
903	msr	APGAKeyLo_EL1, x8
904	msr	APGAKeyHi_EL1, x9
905#endif /* CTX_INCLUDE_PAUTH_REGS */
906
907	/* ----------------------------------------------------------
908	 * Restore PMCR_EL0 when returning to Non-secure state if
909	 * Secure Cycle Counter is not disabled in MDCR_EL3 when
910	 * ARMv8.5-PMU is implemented.
911	 * ----------------------------------------------------------
912	 */
913	mrs	x0, scr_el3
914	tst	x0, #SCR_NS_BIT
915	beq	2f
916
917	/* ----------------------------------------------------------
918	 * Back to Non-secure state.
919	 * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1
920	 * failed, meaning that FEAT_PMUv3p5/7 is not implemented and
921	 * PMCR_EL0 should be restored from non-secure context.
922	 * ----------------------------------------------------------
923	 */
924	mov_imm	x1, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
925	mrs	x0, mdcr_el3
926	tst	x0, x1
927	bne	2f
928	ldr	x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
929	msr	pmcr_el0, x0
9302:
931	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
932	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
933	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
934	ldp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
935	ldp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
936	ldp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
937	ldp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
938	ldp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
939	ldp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
940	ldp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
941	ldp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
942	ldp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
943	ldp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
944	ldp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
945	ldr	x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
946	msr	sp_el0, x28
947	ldp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
948	ret
949endfunc restore_gp_pmcr_pauth_regs
950
951/*
952 * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
953 * registers and update EL1 registers to disable stage1 and stage2
954 * page table walk
955 */
956func save_and_update_ptw_el1_sys_regs
957	/* ----------------------------------------------------------
958	 * Save only sctlr_el1 and tcr_el1 registers
959	 * ----------------------------------------------------------
960	 */
961	mrs	x29, sctlr_el1
962	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)]
963	mrs	x29, tcr_el1
964	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)]
965
966	/* ------------------------------------------------------------
967	 * Must follow below order in order to disable page table
968	 * walk for lower ELs (EL1 and EL0). First step ensures that
969	 * page table walk is disabled for stage1 and second step
970	 * ensures that page table walker should use TCR_EL1.EPDx
971	 * bits to perform address translation. ISB ensures that CPU
972	 * does these 2 steps in order.
973	 *
974	 * 1. Update TCR_EL1.EPDx bits to disable page table walk by
975	 *    stage1.
976	 * 2. Enable MMU bit to avoid identity mapping via stage2
977	 *    and force TCR_EL1.EPDx to be used by the page table
978	 *    walker.
979	 * ------------------------------------------------------------
980	 */
981	orr	x29, x29, #(TCR_EPD0_BIT)
982	orr	x29, x29, #(TCR_EPD1_BIT)
983	msr	tcr_el1, x29
984	isb
985	mrs	x29, sctlr_el1
986	orr	x29, x29, #SCTLR_M_BIT
987	msr	sctlr_el1, x29
988	isb
989
990	ret
991endfunc save_and_update_ptw_el1_sys_regs
992
993/* ------------------------------------------------------------------
994 * This routine assumes that the SP_EL3 is pointing to a valid
995 * context structure from where the gp regs and other special
996 * registers can be retrieved.
997 * ------------------------------------------------------------------
998 */
999func el3_exit
1000#if ENABLE_ASSERTIONS
1001	/* el3_exit assumes SP_EL0 on entry */
1002	mrs	x17, spsel
1003	cmp	x17, #MODE_SP_EL0
1004	ASM_ASSERT(eq)
1005#endif /* ENABLE_ASSERTIONS */
1006
1007	/* ----------------------------------------------------------
1008	 * Save the current SP_EL0 i.e. the EL3 runtime stack which
1009	 * will be used for handling the next SMC.
1010	 * Then switch to SP_EL3.
1011	 * ----------------------------------------------------------
1012	 */
1013	mov	x17, sp
1014	msr	spsel, #MODE_SP_ELX
1015	str	x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
1016
1017#if IMAGE_BL31
1018	/* ----------------------------------------------------------
1019	 * Restore CPTR_EL3.
1020	 * ZCR is only restored if SVE is supported and enabled.
1021	 * Synchronization is required before zcr_el3 is addressed.
1022	 * ----------------------------------------------------------
1023	 */
1024	ldp	x19, x20, [sp, #CTX_EL3STATE_OFFSET + CTX_CPTR_EL3]
1025	msr	cptr_el3, x19
1026
1027	ands	x19, x19, #CPTR_EZ_BIT
1028	beq	sve_not_enabled
1029
1030	isb
1031	msr	S3_6_C1_C2_0, x20 /* zcr_el3 */
1032sve_not_enabled:
1033#endif /* IMAGE_BL31 */
1034
1035#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
1036	/* ----------------------------------------------------------
1037	 * Restore mitigation state as it was on entry to EL3
1038	 * ----------------------------------------------------------
1039	 */
1040	ldr	x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
1041	cbz	x17, 1f
1042	blr	x17
10431:
1044#endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */
1045
1046#if IMAGE_BL31 && RAS_EXTENSION
1047	/* ----------------------------------------------------------
1048	 * Issue Error Synchronization Barrier to synchronize SErrors
1049	 * before exiting EL3. We're running with EAs unmasked, so
1050	 * any synchronized errors would be taken immediately;
1051	 * therefore no need to inspect DISR_EL1 register.
1052 	 * ----------------------------------------------------------
1053	 */
1054	esb
1055#else
1056	dsb	sy
1057#endif /* IMAGE_BL31 && RAS_EXTENSION */
1058
1059	/* ----------------------------------------------------------
1060	 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
1061	 * ----------------------------------------------------------
1062	 */
1063	ldr	x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
1064	ldp	x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
1065	msr	scr_el3, x18
1066	msr	spsr_el3, x16
1067	msr	elr_el3, x17
1068
1069	restore_ptw_el1_sys_regs
1070
1071	/* ----------------------------------------------------------
1072	 * Restore general purpose (including x30), PMCR_EL0 and
1073	 * ARMv8.3-PAuth registers.
1074	 * Exit EL3 via ERET to a lower exception level.
1075 	 * ----------------------------------------------------------
1076 	 */
1077	bl	restore_gp_pmcr_pauth_regs
1078	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
1079
1080#ifdef IMAGE_BL31
1081	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
1082#endif /* IMAGE_BL31 */
1083
1084	exception_return
1085
1086endfunc el3_exit
1087