xref: /rk3399_ARM-atf/lib/el3_runtime/aarch64/context.S (revision d9e984cc30cdedafe39b4730152581c88abff724)
1/*
2 * Copyright (c) 2013-2022, Arm Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <assert_macros.S>
10#include <context.h>
11#include <el3_common_macros.S>
12
13#if CTX_INCLUDE_EL2_REGS
14	.global	el2_sysregs_context_save
15	.global	el2_sysregs_context_restore
16#endif /* CTX_INCLUDE_EL2_REGS */
17
18	.global	el1_sysregs_context_save
19	.global	el1_sysregs_context_restore
20#if CTX_INCLUDE_FPREGS
21	.global	fpregs_context_save
22	.global	fpregs_context_restore
23#endif /* CTX_INCLUDE_FPREGS */
24	.global	prepare_el3_entry
25	.global	restore_gp_pmcr_pauth_regs
26	.global save_and_update_ptw_el1_sys_regs
27	.global	el3_exit
28
29#if CTX_INCLUDE_EL2_REGS
30
31/* -----------------------------------------------------
32 * The following function strictly follows the AArch64
33 * PCS to use x9-x16 (temporary caller-saved registers)
34 * to save EL2 system register context. It assumes that
35 * 'x0' is pointing to a 'el2_sys_regs' structure where
36 * the register context will be saved.
37 *
38 * The following registers are not added.
39 * AMEVCNTVOFF0<n>_EL2
40 * AMEVCNTVOFF1<n>_EL2
41 * ICH_AP0R<n>_EL2
42 * ICH_AP1R<n>_EL2
43 * ICH_LR<n>_EL2
44 * -----------------------------------------------------
45 */
46func el2_sysregs_context_save
47	mrs	x9, actlr_el2
48	mrs	x10, afsr0_el2
49	stp	x9, x10, [x0, #CTX_ACTLR_EL2]
50
51	mrs	x11, afsr1_el2
52	mrs	x12, amair_el2
53	stp	x11, x12, [x0, #CTX_AFSR1_EL2]
54
55	mrs	x13, cnthctl_el2
56	mrs	x14, cntvoff_el2
57	stp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
58
59	mrs	x15, cptr_el2
60	str	x15, [x0, #CTX_CPTR_EL2]
61
62#if CTX_INCLUDE_AARCH32_REGS
63	mrs	x16, dbgvcr32_el2
64	str	x16, [x0, #CTX_DBGVCR32_EL2]
65#endif /* CTX_INCLUDE_AARCH32_REGS */
66
67	mrs	x9, elr_el2
68	mrs	x10, esr_el2
69	stp	x9, x10, [x0, #CTX_ELR_EL2]
70
71	mrs	x11, far_el2
72	mrs	x12, hacr_el2
73	stp	x11, x12, [x0, #CTX_FAR_EL2]
74
75	mrs	x13, hcr_el2
76	mrs	x14, hpfar_el2
77	stp	x13, x14, [x0, #CTX_HCR_EL2]
78
79	mrs	x15, hstr_el2
80	mrs	x16, ICC_SRE_EL2
81	stp	x15, x16, [x0, #CTX_HSTR_EL2]
82
83	mrs	x9, ICH_HCR_EL2
84	mrs	x10, ICH_VMCR_EL2
85	stp	x9, x10, [x0, #CTX_ICH_HCR_EL2]
86
87	mrs	x11, mair_el2
88	mrs	x12, mdcr_el2
89	stp	x11, x12, [x0, #CTX_MAIR_EL2]
90
91#if ENABLE_SPE_FOR_LOWER_ELS
92	mrs	x13, PMSCR_EL2
93	str	x13, [x0, #CTX_PMSCR_EL2]
94#endif /* ENABLE_SPE_FOR_LOWER_ELS */
95
96	mrs	x14, sctlr_el2
97	str	x14, [x0, #CTX_SCTLR_EL2]
98
99	mrs	x15, spsr_el2
100	mrs	x16, sp_el2
101	stp	x15, x16, [x0, #CTX_SPSR_EL2]
102
103	mrs	x9, tcr_el2
104	mrs	x10, tpidr_el2
105	stp	x9, x10, [x0, #CTX_TCR_EL2]
106
107	mrs	x11, ttbr0_el2
108	mrs	x12, vbar_el2
109	stp	x11, x12, [x0, #CTX_TTBR0_EL2]
110
111	mrs	x13, vmpidr_el2
112	mrs	x14, vpidr_el2
113	stp	x13, x14, [x0, #CTX_VMPIDR_EL2]
114
115	mrs	x15, vtcr_el2
116	mrs	x16, vttbr_el2
117	stp	x15, x16, [x0, #CTX_VTCR_EL2]
118
119#if CTX_INCLUDE_MTE_REGS
120	mrs	x9, TFSR_EL2
121	str	x9, [x0, #CTX_TFSR_EL2]
122#endif /* CTX_INCLUDE_MTE_REGS */
123
124#if ENABLE_MPAM_FOR_LOWER_ELS
125	mrs	x10, MPAM2_EL2
126	str	x10, [x0, #CTX_MPAM2_EL2]
127
128	mrs	x11, MPAMHCR_EL2
129	mrs	x12, MPAMVPM0_EL2
130	stp	x11, x12, [x0, #CTX_MPAMHCR_EL2]
131
132	mrs	x13, MPAMVPM1_EL2
133	mrs	x14, MPAMVPM2_EL2
134	stp	x13, x14, [x0, #CTX_MPAMVPM1_EL2]
135
136	mrs	x15, MPAMVPM3_EL2
137	mrs	x16, MPAMVPM4_EL2
138	stp	x15, x16, [x0, #CTX_MPAMVPM3_EL2]
139
140	mrs	x9, MPAMVPM5_EL2
141	mrs	x10, MPAMVPM6_EL2
142	stp	x9, x10, [x0, #CTX_MPAMVPM5_EL2]
143
144	mrs	x11, MPAMVPM7_EL2
145	mrs	x12, MPAMVPMV_EL2
146	stp	x11, x12, [x0, #CTX_MPAMVPM7_EL2]
147#endif /* ENABLE_MPAM_FOR_LOWER_ELS */
148
149#if ENABLE_FEAT_FGT
150	mrs	x13, HDFGRTR_EL2
151#if ENABLE_FEAT_AMUv1
152   	mrs	x14, HAFGRTR_EL2
153   	stp	x13, x14, [x0, #CTX_HDFGRTR_EL2]
154#else
155   	str	x13, [x0, #CTX_HDFGRTR_EL2]
156#endif /* ENABLE_FEAT_AMUv1 */
157	mrs	x15, HDFGWTR_EL2
158	mrs	x16, HFGITR_EL2
159	stp	x15, x16, [x0, #CTX_HDFGWTR_EL2]
160
161	mrs	x9, HFGRTR_EL2
162	mrs	x10, HFGWTR_EL2
163	stp	x9, x10, [x0, #CTX_HFGRTR_EL2]
164#endif /* ENABLE_FEAT_FGT */
165
166#if ENABLE_FEAT_ECV
167	mrs	x11, CNTPOFF_EL2
168	str	x11, [x0, #CTX_CNTPOFF_EL2]
169#endif /* ENABLE_FEAT_ECV */
170
171#if ENABLE_FEAT_VHE
172	/*
173	 * CONTEXTIDR_EL2 register is saved only when FEAT_VHE or
174	 * FEAT_Debugv8p2 (currently not in TF-A) is supported.
175	 */
176	mrs	x9, contextidr_el2
177	mrs	x10, ttbr1_el2
178	stp	x9, x10, [x0, #CTX_CONTEXTIDR_EL2]
179#endif /* ENABLE_FEAT_VHE */
180
181#if RAS_EXTENSION
182	/*
183	 * VDISR_EL2 and VSESR_EL2 registers are saved only when
184	 * FEAT_RAS is supported.
185	 */
186	mrs	x11, vdisr_el2
187	mrs	x12, vsesr_el2
188	stp	x11, x12, [x0, #CTX_VDISR_EL2]
189#endif /* RAS_EXTENSION */
190
191#if ENABLE_FEAT_SEL2
192	/*
193	 * VSTCR_EL2 and VSTTBR_EL2 registers are saved only
194	 * when FEAT_SEL2 is supported.
195	 */
196	mrs	x13, vstcr_el2
197	mrs	x14, vsttbr_el2
198	stp	x13, x14, [x0, #CTX_VSTCR_EL2]
199#endif /* ENABLE_FEAT_SEL2 */
200
201#if CTX_INCLUDE_AARCH32_REGS && ENABLE_FEAT_SEL2
202	/*
203	 * SDER32_EL2 register is saved only when EL2 and EL1
204	 * capable of using Aarch32 and FEAT_SEL2 is supported.
205	 */
206	mrs	x15, sder32_el2
207	str	x15, [x0, #CTX_SDER32_EL2]
208#endif /* CTX_INCLUDE_AARCH32_REGS && ENABLE_FEAT_SEL2 */
209
210#if CTX_INCLUDE_NEVE_REGS
211	/*
212	 * VNCR_EL2 register is saved only when FEAT_NV2 is supported.
213	 */
214	mrs	x16, vncr_el2
215	str	x16, [x0, #CTX_VNCR_EL2]
216#endif /* CTX_INCLUDE_NEVE_REGS */
217
218#if ENABLE_TRF_FOR_NS
219	/*
220	 * TRFCR_EL2 register is saved only when FEAT_TRF is supported.
221	 */
222	mrs	x12, TRFCR_EL2
223	str	x12, [x0, #CTX_TRFCR_EL2]
224#endif /* ENABLE_TRF_FOR_NS */
225
226#if ENABLE_FEAT_CSV2_2
227	/*
228	 * SCXTNUM_EL2 register is saved only when FEAT_CSV2_2 is supported.
229	 */
230	mrs	x13, scxtnum_el2
231	str	x13, [x0, #CTX_SCXTNUM_EL2]
232#endif /* ENABLE_FEAT_CSV2_2 */
233
234#if ENABLE_FEAT_HCX
235	mrs	x14, hcrx_el2
236	str	x14, [x0, #CTX_HCRX_EL2]
237#endif /* ENABLE_FEAT_HCX */
238
239	ret
240endfunc el2_sysregs_context_save
241
242
243/* -----------------------------------------------------
244 * The following function strictly follows the AArch64
245 * PCS to use x9-x16 (temporary caller-saved registers)
246 * to restore EL2 system register context.  It assumes
247 * that 'x0' is pointing to a 'el2_sys_regs' structure
248 * from where the register context will be restored
249
250 * The following registers are not restored
251 * AMEVCNTVOFF0<n>_EL2
252 * AMEVCNTVOFF1<n>_EL2
253 * ICH_AP0R<n>_EL2
254 * ICH_AP1R<n>_EL2
255 * ICH_LR<n>_EL2
256 * -----------------------------------------------------
257 */
258func el2_sysregs_context_restore
259	ldp	x9, x10, [x0, #CTX_ACTLR_EL2]
260	msr	actlr_el2, x9
261	msr	afsr0_el2, x10
262
263	ldp	x11, x12, [x0, #CTX_AFSR1_EL2]
264	msr	afsr1_el2, x11
265	msr	amair_el2, x12
266
267	ldp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
268	msr	cnthctl_el2, x13
269	msr	cntvoff_el2, x14
270
271	ldr	x15, [x0, #CTX_CPTR_EL2]
272	msr	cptr_el2, x15
273
274#if CTX_INCLUDE_AARCH32_REGS
275	ldr	x16, [x0, #CTX_DBGVCR32_EL2]
276	msr	dbgvcr32_el2, x16
277#endif /* CTX_INCLUDE_AARCH32_REGS */
278
279	ldp	x9, x10, [x0, #CTX_ELR_EL2]
280	msr	elr_el2, x9
281	msr	esr_el2, x10
282
283	ldp	x11, x12, [x0, #CTX_FAR_EL2]
284	msr	far_el2, x11
285	msr	hacr_el2, x12
286
287	ldp	x13, x14, [x0, #CTX_HCR_EL2]
288	msr	hcr_el2, x13
289	msr	hpfar_el2, x14
290
291	ldp	x15, x16, [x0, #CTX_HSTR_EL2]
292	msr	hstr_el2, x15
293	msr	ICC_SRE_EL2, x16
294
295	ldp	x9, x10, [x0, #CTX_ICH_HCR_EL2]
296	msr	ICH_HCR_EL2, x9
297	msr	ICH_VMCR_EL2, x10
298
299	ldp	x11, x12, [x0, #CTX_MAIR_EL2]
300	msr	mair_el2, x11
301	msr	mdcr_el2, x12
302
303#if ENABLE_SPE_FOR_LOWER_ELS
304	ldr	x13, [x0, #CTX_PMSCR_EL2]
305	msr	PMSCR_EL2, x13
306#endif /* ENABLE_SPE_FOR_LOWER_ELS */
307
308	ldr	x14, [x0, #CTX_SCTLR_EL2]
309	msr	sctlr_el2, x14
310
311	ldp	x15, x16, [x0, #CTX_SPSR_EL2]
312	msr	spsr_el2, x15
313	msr	sp_el2, x16
314
315	ldp	x9, x10, [x0, #CTX_TCR_EL2]
316	msr	tcr_el2, x9
317	msr	tpidr_el2, x10
318
319	ldp	x11, x12, [x0, #CTX_TTBR0_EL2]
320	msr	ttbr0_el2, x11
321	msr	vbar_el2, x12
322
323	ldp	x13, x14, [x0, #CTX_VMPIDR_EL2]
324	msr	vmpidr_el2, x13
325	msr	vpidr_el2, x14
326
327	ldp	x15, x16, [x0, #CTX_VTCR_EL2]
328	msr	vtcr_el2, x15
329	msr	vttbr_el2, x16
330
331#if CTX_INCLUDE_MTE_REGS
332	ldr	x9, [x0, #CTX_TFSR_EL2]
333	msr	TFSR_EL2, x9
334#endif /* CTX_INCLUDE_MTE_REGS */
335
336#if ENABLE_MPAM_FOR_LOWER_ELS
337	ldr	x10, [x0, #CTX_MPAM2_EL2]
338	msr	MPAM2_EL2, x10
339
340	ldp	x11, x12, [x0, #CTX_MPAMHCR_EL2]
341	msr	MPAMHCR_EL2, x11
342	msr	MPAMVPM0_EL2, x12
343
344	ldp	x13, x14, [x0, #CTX_MPAMVPM1_EL2]
345	msr	MPAMVPM1_EL2, x13
346	msr	MPAMVPM2_EL2, x14
347
348	ldp	x15, x16, [x0, #CTX_MPAMVPM3_EL2]
349	msr	MPAMVPM3_EL2, x15
350	msr	MPAMVPM4_EL2, x16
351
352	ldp	x9, x10, [x0, #CTX_MPAMVPM5_EL2]
353	msr	MPAMVPM5_EL2, x9
354	msr	MPAMVPM6_EL2, x10
355
356	ldp	x11, x12, [x0, #CTX_MPAMVPM7_EL2]
357	msr	MPAMVPM7_EL2, x11
358	msr	MPAMVPMV_EL2, x12
359#endif /* ENABLE_MPAM_FOR_LOWER_ELS */
360
361#if ENABLE_FEAT_FGT
362#if ENABLE_FEAT_AMUv1
363	ldp	x13, x14, [x0, #CTX_HDFGRTR_EL2]
364	msr	HAFGRTR_EL2, x14
365#else
366	ldr	x13, [x0, #CTX_HDFGRTR_EL2]
367#endif /* ENABLE_FEAT_AMUv1 */
368	msr	HDFGRTR_EL2, x13
369
370	ldp	x15, x16, [x0, #CTX_HDFGWTR_EL2]
371	msr	HDFGWTR_EL2, x15
372	msr	HFGITR_EL2, x16
373
374	ldp	x9, x10, [x0, #CTX_HFGRTR_EL2]
375	msr	HFGRTR_EL2, x9
376	msr	HFGWTR_EL2, x10
377#endif /* ENABLE_FEAT_FGT */
378
379#if ENABLE_FEAT_ECV
380	ldr	x11, [x0, #CTX_CNTPOFF_EL2]
381	msr	CNTPOFF_EL2, x11
382#endif /* ENABLE_FEAT_ECV */
383
384#if ENABLE_FEAT_VHE
385	/*
386	 * CONTEXTIDR_EL2 register is restored only when FEAT_VHE or
387	 * FEAT_Debugv8p2 (currently not in TF-A) is supported.
388	 */
389	ldp	x9, x10, [x0, #CTX_CONTEXTIDR_EL2]
390	msr	contextidr_el2, x9
391	msr	ttbr1_el2, x10
392#endif /* ENABLE_FEAT_VHE */
393
394#if RAS_EXTENSION
395	/*
396	 * VDISR_EL2 and VSESR_EL2 registers are restored only when FEAT_RAS
397	 * is supported.
398	 */
399	ldp	x11, x12, [x0, #CTX_VDISR_EL2]
400	msr	vdisr_el2, x11
401	msr	vsesr_el2, x12
402#endif /* RAS_EXTENSION */
403
404#if ENABLE_FEAT_SEL2
405	/*
406	 * VSTCR_EL2 and VSTTBR_EL2 registers are restored only when FEAT_SEL2
407	 * is supported.
408	 */
409	ldp	x13, x14, [x0, #CTX_VSTCR_EL2]
410	msr	vstcr_el2, x13
411	msr	vsttbr_el2, x14
412#endif /* ENABLE_FEAT_SEL2 */
413
414#if CTX_INCLUDE_AARCH32_REGS && ENABLE_FEAT_SEL2
415	/*
416	 * SDER32_EL2 register is restored only when EL2 and EL1 capable of using
417	 * Aarch32 and FEAT_SEL2 is supported.
418	 */
419	ldr	x15, [x0, #CTX_SDER32_EL2]
420	msr	sder32_el2, x15
421#endif /* CTX_INCLUDE_AARCH32_REGS && ENABLE_FEAT_SEL2 */
422
423#if CTX_INCLUDE_NEVE_REGS
424	/*
425	 * VNCR_EL2 register is restored only when FEAT_NV2 is supported.
426	 */
427	ldr	x16, [x0, #CTX_VNCR_EL2]
428	msr	vncr_el2, x16
429#endif /* CTX_INCLUDE_NEVE_REGS */
430
431#if ENABLE_TRF_FOR_NS
432	/*
433	 * TRFCR_EL2 register is restored only when FEAT_TRF is supported.
434	 */
435	ldr	x12, [x0, #CTX_TRFCR_EL2]
436	msr	TRFCR_EL2, x12
437#endif /* ENABLE_TRF_FOR_NS */
438
439#if ENABLE_FEAT_CSV2_2
440	/*
441	 * SCXTNUM_EL2 register is restored only when FEAT_CSV2_2 is supported.
442	 */
443	ldr	x13, [x0, #CTX_SCXTNUM_EL2]
444	msr	scxtnum_el2, x13
445#endif /* ENABLE_FEAT_CSV2_2 */
446
447#if ENABLE_FEAT_HCX
448	ldr	x14, [x0, #CTX_HCRX_EL2]
449	msr	hcrx_el2, x14
450#endif /* ENABLE_FEAT_HCX */
451
452	ret
453endfunc el2_sysregs_context_restore
454
455#endif /* CTX_INCLUDE_EL2_REGS */
456
457/* ------------------------------------------------------------------
458 * The following function strictly follows the AArch64 PCS to use
459 * x9-x17 (temporary caller-saved registers) to save EL1 system
460 * register context. It assumes that 'x0' is pointing to a
461 * 'el1_sys_regs' structure where the register context will be saved.
462 * ------------------------------------------------------------------
463 */
464func el1_sysregs_context_save
465
466	mrs	x9, spsr_el1
467	mrs	x10, elr_el1
468	stp	x9, x10, [x0, #CTX_SPSR_EL1]
469
470#if !ERRATA_SPECULATIVE_AT
471	mrs	x15, sctlr_el1
472	mrs	x16, tcr_el1
473	stp	x15, x16, [x0, #CTX_SCTLR_EL1]
474#endif /* ERRATA_SPECULATIVE_AT */
475
476	mrs	x17, cpacr_el1
477	mrs	x9, csselr_el1
478	stp	x17, x9, [x0, #CTX_CPACR_EL1]
479
480	mrs	x10, sp_el1
481	mrs	x11, esr_el1
482	stp	x10, x11, [x0, #CTX_SP_EL1]
483
484	mrs	x12, ttbr0_el1
485	mrs	x13, ttbr1_el1
486	stp	x12, x13, [x0, #CTX_TTBR0_EL1]
487
488	mrs	x14, mair_el1
489	mrs	x15, amair_el1
490	stp	x14, x15, [x0, #CTX_MAIR_EL1]
491
492	mrs	x16, actlr_el1
493	mrs	x17, tpidr_el1
494	stp	x16, x17, [x0, #CTX_ACTLR_EL1]
495
496	mrs	x9, tpidr_el0
497	mrs	x10, tpidrro_el0
498	stp	x9, x10, [x0, #CTX_TPIDR_EL0]
499
500	mrs	x13, par_el1
501	mrs	x14, far_el1
502	stp	x13, x14, [x0, #CTX_PAR_EL1]
503
504	mrs	x15, afsr0_el1
505	mrs	x16, afsr1_el1
506	stp	x15, x16, [x0, #CTX_AFSR0_EL1]
507
508	mrs	x17, contextidr_el1
509	mrs	x9, vbar_el1
510	stp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
511
512	/* Save AArch32 system registers if the build has instructed so */
513#if CTX_INCLUDE_AARCH32_REGS
514	mrs	x11, spsr_abt
515	mrs	x12, spsr_und
516	stp	x11, x12, [x0, #CTX_SPSR_ABT]
517
518	mrs	x13, spsr_irq
519	mrs	x14, spsr_fiq
520	stp	x13, x14, [x0, #CTX_SPSR_IRQ]
521
522	mrs	x15, dacr32_el2
523	mrs	x16, ifsr32_el2
524	stp	x15, x16, [x0, #CTX_DACR32_EL2]
525#endif /* CTX_INCLUDE_AARCH32_REGS */
526
527	/* Save NS timer registers if the build has instructed so */
528#if NS_TIMER_SWITCH
529	mrs	x10, cntp_ctl_el0
530	mrs	x11, cntp_cval_el0
531	stp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
532
533	mrs	x12, cntv_ctl_el0
534	mrs	x13, cntv_cval_el0
535	stp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
536
537	mrs	x14, cntkctl_el1
538	str	x14, [x0, #CTX_CNTKCTL_EL1]
539#endif /* NS_TIMER_SWITCH */
540
541	/* Save MTE system registers if the build has instructed so */
542#if CTX_INCLUDE_MTE_REGS
543	mrs	x15, TFSRE0_EL1
544	mrs	x16, TFSR_EL1
545	stp	x15, x16, [x0, #CTX_TFSRE0_EL1]
546
547	mrs	x9, RGSR_EL1
548	mrs	x10, GCR_EL1
549	stp	x9, x10, [x0, #CTX_RGSR_EL1]
550#endif /* CTX_INCLUDE_MTE_REGS */
551
552	ret
553endfunc el1_sysregs_context_save
554
555/* ------------------------------------------------------------------
556 * The following function strictly follows the AArch64 PCS to use
557 * x9-x17 (temporary caller-saved registers) to restore EL1 system
558 * register context.  It assumes that 'x0' is pointing to a
559 * 'el1_sys_regs' structure from where the register context will be
560 * restored
561 * ------------------------------------------------------------------
562 */
563func el1_sysregs_context_restore
564
565	ldp	x9, x10, [x0, #CTX_SPSR_EL1]
566	msr	spsr_el1, x9
567	msr	elr_el1, x10
568
569#if !ERRATA_SPECULATIVE_AT
570	ldp	x15, x16, [x0, #CTX_SCTLR_EL1]
571	msr	sctlr_el1, x15
572	msr	tcr_el1, x16
573#endif /* ERRATA_SPECULATIVE_AT */
574
575	ldp	x17, x9, [x0, #CTX_CPACR_EL1]
576	msr	cpacr_el1, x17
577	msr	csselr_el1, x9
578
579	ldp	x10, x11, [x0, #CTX_SP_EL1]
580	msr	sp_el1, x10
581	msr	esr_el1, x11
582
583	ldp	x12, x13, [x0, #CTX_TTBR0_EL1]
584	msr	ttbr0_el1, x12
585	msr	ttbr1_el1, x13
586
587	ldp	x14, x15, [x0, #CTX_MAIR_EL1]
588	msr	mair_el1, x14
589	msr	amair_el1, x15
590
591	ldp 	x16, x17, [x0, #CTX_ACTLR_EL1]
592	msr	actlr_el1, x16
593	msr	tpidr_el1, x17
594
595	ldp	x9, x10, [x0, #CTX_TPIDR_EL0]
596	msr	tpidr_el0, x9
597	msr	tpidrro_el0, x10
598
599	ldp	x13, x14, [x0, #CTX_PAR_EL1]
600	msr	par_el1, x13
601	msr	far_el1, x14
602
603	ldp	x15, x16, [x0, #CTX_AFSR0_EL1]
604	msr	afsr0_el1, x15
605	msr	afsr1_el1, x16
606
607	ldp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
608	msr	contextidr_el1, x17
609	msr	vbar_el1, x9
610
611	/* Restore AArch32 system registers if the build has instructed so */
612#if CTX_INCLUDE_AARCH32_REGS
613	ldp	x11, x12, [x0, #CTX_SPSR_ABT]
614	msr	spsr_abt, x11
615	msr	spsr_und, x12
616
617	ldp	x13, x14, [x0, #CTX_SPSR_IRQ]
618	msr	spsr_irq, x13
619	msr	spsr_fiq, x14
620
621	ldp	x15, x16, [x0, #CTX_DACR32_EL2]
622	msr	dacr32_el2, x15
623	msr	ifsr32_el2, x16
624#endif /* CTX_INCLUDE_AARCH32_REGS */
625
626	/* Restore NS timer registers if the build has instructed so */
627#if NS_TIMER_SWITCH
628	ldp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
629	msr	cntp_ctl_el0, x10
630	msr	cntp_cval_el0, x11
631
632	ldp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
633	msr	cntv_ctl_el0, x12
634	msr	cntv_cval_el0, x13
635
636	ldr	x14, [x0, #CTX_CNTKCTL_EL1]
637	msr	cntkctl_el1, x14
638#endif /* NS_TIMER_SWITCH */
639
640	/* Restore MTE system registers if the build has instructed so */
641#if CTX_INCLUDE_MTE_REGS
642	ldp	x11, x12, [x0, #CTX_TFSRE0_EL1]
643	msr	TFSRE0_EL1, x11
644	msr	TFSR_EL1, x12
645
646	ldp	x13, x14, [x0, #CTX_RGSR_EL1]
647	msr	RGSR_EL1, x13
648	msr	GCR_EL1, x14
649#endif /* CTX_INCLUDE_MTE_REGS */
650
651	/* No explict ISB required here as ERET covers it */
652	ret
653endfunc el1_sysregs_context_restore
654
655/* ------------------------------------------------------------------
656 * The following function follows the aapcs_64 strictly to use
657 * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
658 * to save floating point register context. It assumes that 'x0' is
659 * pointing to a 'fp_regs' structure where the register context will
660 * be saved.
661 *
662 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
663 * However currently we don't use VFP registers nor set traps in
664 * Trusted Firmware, and assume it's cleared.
665 *
666 * TODO: Revisit when VFP is used in secure world
667 * ------------------------------------------------------------------
668 */
669#if CTX_INCLUDE_FPREGS
670func fpregs_context_save
671	stp	q0, q1, [x0, #CTX_FP_Q0]
672	stp	q2, q3, [x0, #CTX_FP_Q2]
673	stp	q4, q5, [x0, #CTX_FP_Q4]
674	stp	q6, q7, [x0, #CTX_FP_Q6]
675	stp	q8, q9, [x0, #CTX_FP_Q8]
676	stp	q10, q11, [x0, #CTX_FP_Q10]
677	stp	q12, q13, [x0, #CTX_FP_Q12]
678	stp	q14, q15, [x0, #CTX_FP_Q14]
679	stp	q16, q17, [x0, #CTX_FP_Q16]
680	stp	q18, q19, [x0, #CTX_FP_Q18]
681	stp	q20, q21, [x0, #CTX_FP_Q20]
682	stp	q22, q23, [x0, #CTX_FP_Q22]
683	stp	q24, q25, [x0, #CTX_FP_Q24]
684	stp	q26, q27, [x0, #CTX_FP_Q26]
685	stp	q28, q29, [x0, #CTX_FP_Q28]
686	stp	q30, q31, [x0, #CTX_FP_Q30]
687
688	mrs	x9, fpsr
689	str	x9, [x0, #CTX_FP_FPSR]
690
691	mrs	x10, fpcr
692	str	x10, [x0, #CTX_FP_FPCR]
693
694#if CTX_INCLUDE_AARCH32_REGS
695	mrs	x11, fpexc32_el2
696	str	x11, [x0, #CTX_FP_FPEXC32_EL2]
697#endif /* CTX_INCLUDE_AARCH32_REGS */
698	ret
699endfunc fpregs_context_save
700
701/* ------------------------------------------------------------------
702 * The following function follows the aapcs_64 strictly to use x9-x17
703 * (temporary caller-saved registers according to AArch64 PCS) to
704 * restore floating point register context. It assumes that 'x0' is
705 * pointing to a 'fp_regs' structure from where the register context
706 * will be restored.
707 *
708 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
709 * However currently we don't use VFP registers nor set traps in
710 * Trusted Firmware, and assume it's cleared.
711 *
712 * TODO: Revisit when VFP is used in secure world
713 * ------------------------------------------------------------------
714 */
715func fpregs_context_restore
716	ldp	q0, q1, [x0, #CTX_FP_Q0]
717	ldp	q2, q3, [x0, #CTX_FP_Q2]
718	ldp	q4, q5, [x0, #CTX_FP_Q4]
719	ldp	q6, q7, [x0, #CTX_FP_Q6]
720	ldp	q8, q9, [x0, #CTX_FP_Q8]
721	ldp	q10, q11, [x0, #CTX_FP_Q10]
722	ldp	q12, q13, [x0, #CTX_FP_Q12]
723	ldp	q14, q15, [x0, #CTX_FP_Q14]
724	ldp	q16, q17, [x0, #CTX_FP_Q16]
725	ldp	q18, q19, [x0, #CTX_FP_Q18]
726	ldp	q20, q21, [x0, #CTX_FP_Q20]
727	ldp	q22, q23, [x0, #CTX_FP_Q22]
728	ldp	q24, q25, [x0, #CTX_FP_Q24]
729	ldp	q26, q27, [x0, #CTX_FP_Q26]
730	ldp	q28, q29, [x0, #CTX_FP_Q28]
731	ldp	q30, q31, [x0, #CTX_FP_Q30]
732
733	ldr	x9, [x0, #CTX_FP_FPSR]
734	msr	fpsr, x9
735
736	ldr	x10, [x0, #CTX_FP_FPCR]
737	msr	fpcr, x10
738
739#if CTX_INCLUDE_AARCH32_REGS
740	ldr	x11, [x0, #CTX_FP_FPEXC32_EL2]
741	msr	fpexc32_el2, x11
742#endif /* CTX_INCLUDE_AARCH32_REGS */
743
744	/*
745	 * No explict ISB required here as ERET to
746	 * switch to secure EL1 or non-secure world
747	 * covers it
748	 */
749
750	ret
751endfunc fpregs_context_restore
752#endif /* CTX_INCLUDE_FPREGS */
753
754	/*
755	 * Set the PSTATE bits not set when the exception was taken as
756	 * described in the AArch64.TakeException() pseudocode function
757	 * in ARM DDI 0487F.c page J1-7635 to a default value.
758	 */
759	.macro set_unset_pstate_bits
760	/*
761	 * If Data Independent Timing (DIT) functionality is implemented,
762	 * always enable DIT in EL3
763	 */
764#if ENABLE_FEAT_DIT
765	mov     x8, #DIT_BIT
766	msr     DIT, x8
767#endif /* ENABLE_FEAT_DIT */
768	.endm /* set_unset_pstate_bits */
769
770/* ------------------------------------------------------------------
771 * The following macro is used to save and restore all the general
772 * purpose and ARMv8.3-PAuth (if enabled) registers.
773 * It also checks if Secure Cycle Counter is not disabled in MDCR_EL3
774 * when ARMv8.5-PMU is implemented, and if called from Non-secure
775 * state saves PMCR_EL0 and disables Cycle Counter.
776 *
777 * Ideally we would only save and restore the callee saved registers
778 * when a world switch occurs but that type of implementation is more
779 * complex. So currently we will always save and restore these
780 * registers on entry and exit of EL3.
781 * clobbers: x18
782 * ------------------------------------------------------------------
783 */
784	.macro save_gp_pmcr_pauth_regs
785	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
786	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
787	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
788	stp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
789	stp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
790	stp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
791	stp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
792	stp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
793	stp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
794	stp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
795	stp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
796	stp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
797	stp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
798	stp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
799	stp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
800	mrs	x18, sp_el0
801	str	x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
802
803	/* ----------------------------------------------------------
804	 * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1
805	 * failed, meaning that FEAT_PMUv3p5/7 is not implemented and
806	 * PMCR_EL0 should be saved in non-secure context.
807	 * ----------------------------------------------------------
808	 */
809	mov_imm	x10, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
810	mrs	x9, mdcr_el3
811	tst	x9, x10
812	bne	1f
813
814	/* Secure Cycle Counter is not disabled */
815	mrs	x9, pmcr_el0
816
817	/* Check caller's security state */
818	mrs	x10, scr_el3
819	tst	x10, #SCR_NS_BIT
820	beq	2f
821
822	/* Save PMCR_EL0 if called from Non-secure state */
823	str	x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
824
825	/* Disable cycle counter when event counting is prohibited */
8262:	orr	x9, x9, #PMCR_EL0_DP_BIT
827	msr	pmcr_el0, x9
828	isb
8291:
830#if CTX_INCLUDE_PAUTH_REGS
831	/* ----------------------------------------------------------
832 	 * Save the ARMv8.3-PAuth keys as they are not banked
833 	 * by exception level
834	 * ----------------------------------------------------------
835	 */
836	add	x19, sp, #CTX_PAUTH_REGS_OFFSET
837
838	mrs	x20, APIAKeyLo_EL1	/* x21:x20 = APIAKey */
839	mrs	x21, APIAKeyHi_EL1
840	mrs	x22, APIBKeyLo_EL1	/* x23:x22 = APIBKey */
841	mrs	x23, APIBKeyHi_EL1
842	mrs	x24, APDAKeyLo_EL1	/* x25:x24 = APDAKey */
843	mrs	x25, APDAKeyHi_EL1
844	mrs	x26, APDBKeyLo_EL1	/* x27:x26 = APDBKey */
845	mrs	x27, APDBKeyHi_EL1
846	mrs	x28, APGAKeyLo_EL1	/* x29:x28 = APGAKey */
847	mrs	x29, APGAKeyHi_EL1
848
849	stp	x20, x21, [x19, #CTX_PACIAKEY_LO]
850	stp	x22, x23, [x19, #CTX_PACIBKEY_LO]
851	stp	x24, x25, [x19, #CTX_PACDAKEY_LO]
852	stp	x26, x27, [x19, #CTX_PACDBKEY_LO]
853	stp	x28, x29, [x19, #CTX_PACGAKEY_LO]
854#endif /* CTX_INCLUDE_PAUTH_REGS */
855	.endm /* save_gp_pmcr_pauth_regs */
856
857/* -----------------------------------------------------------------
858 * This function saves the context and sets the PSTATE to a known
859 * state, preparing entry to el3.
860 * Save all the general purpose and ARMv8.3-PAuth (if enabled)
861 * registers.
862 * Then set any of the PSTATE bits that are not set by hardware
863 * according to the Aarch64.TakeException pseudocode in the Arm
864 * Architecture Reference Manual to a default value for EL3.
865 * clobbers: x17
866 * -----------------------------------------------------------------
867 */
868func prepare_el3_entry
869	save_gp_pmcr_pauth_regs
870	/*
871	 * Set the PSTATE bits not described in the Aarch64.TakeException
872	 * pseudocode to their default values.
873	 */
874	set_unset_pstate_bits
875	ret
876endfunc prepare_el3_entry
877
878/* ------------------------------------------------------------------
879 * This function restores ARMv8.3-PAuth (if enabled) and all general
880 * purpose registers except x30 from the CPU context.
881 * x30 register must be explicitly restored by the caller.
882 * ------------------------------------------------------------------
883 */
884func restore_gp_pmcr_pauth_regs
885#if CTX_INCLUDE_PAUTH_REGS
886 	/* Restore the ARMv8.3 PAuth keys */
887	add	x10, sp, #CTX_PAUTH_REGS_OFFSET
888
889	ldp	x0, x1, [x10, #CTX_PACIAKEY_LO]	/* x1:x0 = APIAKey */
890	ldp	x2, x3, [x10, #CTX_PACIBKEY_LO]	/* x3:x2 = APIBKey */
891	ldp	x4, x5, [x10, #CTX_PACDAKEY_LO]	/* x5:x4 = APDAKey */
892	ldp	x6, x7, [x10, #CTX_PACDBKEY_LO]	/* x7:x6 = APDBKey */
893	ldp	x8, x9, [x10, #CTX_PACGAKEY_LO]	/* x9:x8 = APGAKey */
894
895	msr	APIAKeyLo_EL1, x0
896	msr	APIAKeyHi_EL1, x1
897	msr	APIBKeyLo_EL1, x2
898	msr	APIBKeyHi_EL1, x3
899	msr	APDAKeyLo_EL1, x4
900	msr	APDAKeyHi_EL1, x5
901	msr	APDBKeyLo_EL1, x6
902	msr	APDBKeyHi_EL1, x7
903	msr	APGAKeyLo_EL1, x8
904	msr	APGAKeyHi_EL1, x9
905#endif /* CTX_INCLUDE_PAUTH_REGS */
906
907	/* ----------------------------------------------------------
908	 * Restore PMCR_EL0 when returning to Non-secure state if
909	 * Secure Cycle Counter is not disabled in MDCR_EL3 when
910	 * ARMv8.5-PMU is implemented.
911	 * ----------------------------------------------------------
912	 */
913	mrs	x0, scr_el3
914	tst	x0, #SCR_NS_BIT
915	beq	2f
916
917	/* ----------------------------------------------------------
918	 * Back to Non-secure state.
919	 * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1
920	 * failed, meaning that FEAT_PMUv3p5/7 is not implemented and
921	 * PMCR_EL0 should be restored from non-secure context.
922	 * ----------------------------------------------------------
923	 */
924	mov_imm	x1, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
925	mrs	x0, mdcr_el3
926	tst	x0, x1
927	bne	2f
928	ldr	x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
929	msr	pmcr_el0, x0
9302:
931	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
932	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
933	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
934	ldp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
935	ldp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
936	ldp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
937	ldp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
938	ldp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
939	ldp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
940	ldp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
941	ldp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
942	ldp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
943	ldp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
944	ldp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
945	ldr	x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
946	msr	sp_el0, x28
947	ldp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
948	ret
949endfunc restore_gp_pmcr_pauth_regs
950
951/*
952 * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
953 * registers and update EL1 registers to disable stage1 and stage2
954 * page table walk
955 */
956func save_and_update_ptw_el1_sys_regs
957	/* ----------------------------------------------------------
958	 * Save only sctlr_el1 and tcr_el1 registers
959	 * ----------------------------------------------------------
960	 */
961	mrs	x29, sctlr_el1
962	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)]
963	mrs	x29, tcr_el1
964	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)]
965
966	/* ------------------------------------------------------------
967	 * Must follow below order in order to disable page table
968	 * walk for lower ELs (EL1 and EL0). First step ensures that
969	 * page table walk is disabled for stage1 and second step
970	 * ensures that page table walker should use TCR_EL1.EPDx
971	 * bits to perform address translation. ISB ensures that CPU
972	 * does these 2 steps in order.
973	 *
974	 * 1. Update TCR_EL1.EPDx bits to disable page table walk by
975	 *    stage1.
976	 * 2. Enable MMU bit to avoid identity mapping via stage2
977	 *    and force TCR_EL1.EPDx to be used by the page table
978	 *    walker.
979	 * ------------------------------------------------------------
980	 */
981	orr	x29, x29, #(TCR_EPD0_BIT)
982	orr	x29, x29, #(TCR_EPD1_BIT)
983	msr	tcr_el1, x29
984	isb
985	mrs	x29, sctlr_el1
986	orr	x29, x29, #SCTLR_M_BIT
987	msr	sctlr_el1, x29
988	isb
989
990	ret
991endfunc save_and_update_ptw_el1_sys_regs
992
993/* ------------------------------------------------------------------
994 * This routine assumes that the SP_EL3 is pointing to a valid
995 * context structure from where the gp regs and other special
996 * registers can be retrieved.
997 * ------------------------------------------------------------------
998 */
999func el3_exit
1000#if ENABLE_ASSERTIONS
1001	/* el3_exit assumes SP_EL0 on entry */
1002	mrs	x17, spsel
1003	cmp	x17, #MODE_SP_EL0
1004	ASM_ASSERT(eq)
1005#endif /* ENABLE_ASSERTIONS */
1006
1007	/* ----------------------------------------------------------
1008	 * Save the current SP_EL0 i.e. the EL3 runtime stack which
1009	 * will be used for handling the next SMC.
1010	 * Then switch to SP_EL3.
1011	 * ----------------------------------------------------------
1012	 */
1013	mov	x17, sp
1014	msr	spsel, #MODE_SP_ELX
1015	str	x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
1016
1017	/* ----------------------------------------------------------
1018	 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
1019	 * ----------------------------------------------------------
1020	 */
1021	ldr	x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
1022	ldp	x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
1023	msr	scr_el3, x18
1024	msr	spsr_el3, x16
1025	msr	elr_el3, x17
1026
1027#if IMAGE_BL31
1028	/* ----------------------------------------------------------
1029	 * Restore CPTR_EL3.
1030	 * ZCR is only restored if SVE is supported and enabled.
1031	 * Synchronization is required before zcr_el3 is addressed.
1032	 * ----------------------------------------------------------
1033	 */
1034	ldp	x19, x20, [sp, #CTX_EL3STATE_OFFSET + CTX_CPTR_EL3]
1035	msr	cptr_el3, x19
1036
1037	ands	x19, x19, #CPTR_EZ_BIT
1038	beq	sve_not_enabled
1039
1040	isb
1041	msr	S3_6_C1_C2_0, x20 /* zcr_el3 */
1042sve_not_enabled:
1043#endif /* IMAGE_BL31 */
1044
1045#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
1046	/* ----------------------------------------------------------
1047	 * Restore mitigation state as it was on entry to EL3
1048	 * ----------------------------------------------------------
1049	 */
1050	ldr	x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
1051	cbz	x17, 1f
1052	blr	x17
10531:
1054#endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */
1055
1056	restore_ptw_el1_sys_regs
1057
1058	/* ----------------------------------------------------------
1059	 * Restore general purpose (including x30), PMCR_EL0 and
1060	 * ARMv8.3-PAuth registers.
1061	 * Exit EL3 via ERET to a lower exception level.
1062 	 * ----------------------------------------------------------
1063 	 */
1064	bl	restore_gp_pmcr_pauth_regs
1065	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
1066
1067#if IMAGE_BL31 && RAS_EXTENSION
1068	/* ----------------------------------------------------------
1069	 * Issue Error Synchronization Barrier to synchronize SErrors
1070	 * before exiting EL3. We're running with EAs unmasked, so
1071	 * any synchronized errors would be taken immediately;
1072	 * therefore no need to inspect DISR_EL1 register.
1073 	 * ----------------------------------------------------------
1074	 */
1075	esb
1076#else
1077	dsb	sy
1078#endif /* IMAGE_BL31 && RAS_EXTENSION */
1079
1080#ifdef IMAGE_BL31
1081	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
1082#endif /* IMAGE_BL31 */
1083
1084	exception_return
1085
1086endfunc el3_exit
1087