xref: /rk3399_ARM-atf/lib/el3_runtime/aarch64/context.S (revision 7db710f0cb54c5943c9e60cb9c29eadd8817e2c5)
1/*
2 * Copyright (c) 2013-2023, Arm Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <assert_macros.S>
10#include <context.h>
11#include <el3_common_macros.S>
12
13#if CTX_INCLUDE_EL2_REGS
14	.global	el2_sysregs_context_save_common
15	.global	el2_sysregs_context_restore_common
16#if CTX_INCLUDE_MTE_REGS
17	.global	el2_sysregs_context_save_mte
18	.global	el2_sysregs_context_restore_mte
19#endif /* CTX_INCLUDE_MTE_REGS */
20#if RAS_EXTENSION
21	.global	el2_sysregs_context_save_ras
22	.global	el2_sysregs_context_restore_ras
23#endif /* RAS_EXTENSION */
24#if CTX_INCLUDE_NEVE_REGS
25	.global	el2_sysregs_context_save_nv2
26	.global	el2_sysregs_context_restore_nv2
27#endif /* CTX_INCLUDE_NEVE_REGS */
28#endif /* CTX_INCLUDE_EL2_REGS */
29
30	.global	el1_sysregs_context_save
31	.global	el1_sysregs_context_restore
32#if CTX_INCLUDE_FPREGS
33	.global	fpregs_context_save
34	.global	fpregs_context_restore
35#endif /* CTX_INCLUDE_FPREGS */
36	.global	prepare_el3_entry
37	.global	restore_gp_pmcr_pauth_regs
38	.global save_and_update_ptw_el1_sys_regs
39	.global	el3_exit
40
41#if CTX_INCLUDE_EL2_REGS
42
43/* -----------------------------------------------------
44 * The following functions strictly follow the AArch64
45 * PCS to use x9-x16 (temporary caller-saved registers)
46 * to save/restore EL2 system register context.
47 * el2_sysregs_context_save/restore_common functions
48 * save and restore registers that are common to all
49 * configurations. The rest of the functions save and
50 * restore EL2 system registers that are present when a
51 * particular feature is enabled. All functions assume
52 * that 'x0' is pointing to a 'el2_sys_regs' structure
53 * where the register context will be saved/restored.
54 *
55 * The following registers are not added.
56 * AMEVCNTVOFF0<n>_EL2
57 * AMEVCNTVOFF1<n>_EL2
58 * ICH_AP0R<n>_EL2
59 * ICH_AP1R<n>_EL2
60 * ICH_LR<n>_EL2
61 * -----------------------------------------------------
62 */
63func el2_sysregs_context_save_common
64	mrs	x9, actlr_el2
65	mrs	x10, afsr0_el2
66	stp	x9, x10, [x0, #CTX_ACTLR_EL2]
67
68	mrs	x11, afsr1_el2
69	mrs	x12, amair_el2
70	stp	x11, x12, [x0, #CTX_AFSR1_EL2]
71
72	mrs	x13, cnthctl_el2
73	mrs	x14, cntvoff_el2
74	stp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
75
76	mrs	x15, cptr_el2
77	str	x15, [x0, #CTX_CPTR_EL2]
78
79#if CTX_INCLUDE_AARCH32_REGS
80	mrs	x16, dbgvcr32_el2
81	str	x16, [x0, #CTX_DBGVCR32_EL2]
82#endif /* CTX_INCLUDE_AARCH32_REGS */
83
84	mrs	x9, elr_el2
85	mrs	x10, esr_el2
86	stp	x9, x10, [x0, #CTX_ELR_EL2]
87
88	mrs	x11, far_el2
89	mrs	x12, hacr_el2
90	stp	x11, x12, [x0, #CTX_FAR_EL2]
91
92	mrs	x13, hcr_el2
93	mrs	x14, hpfar_el2
94	stp	x13, x14, [x0, #CTX_HCR_EL2]
95
96	mrs	x15, hstr_el2
97	mrs	x16, ICC_SRE_EL2
98	stp	x15, x16, [x0, #CTX_HSTR_EL2]
99
100	mrs	x9, ICH_HCR_EL2
101	mrs	x10, ICH_VMCR_EL2
102	stp	x9, x10, [x0, #CTX_ICH_HCR_EL2]
103
104	mrs	x11, mair_el2
105	mrs	x12, mdcr_el2
106	stp	x11, x12, [x0, #CTX_MAIR_EL2]
107
108	mrs	x14, sctlr_el2
109	str	x14, [x0, #CTX_SCTLR_EL2]
110
111	mrs	x15, spsr_el2
112	mrs	x16, sp_el2
113	stp	x15, x16, [x0, #CTX_SPSR_EL2]
114
115	mrs	x9, tcr_el2
116	mrs	x10, tpidr_el2
117	stp	x9, x10, [x0, #CTX_TCR_EL2]
118
119	mrs	x11, ttbr0_el2
120	mrs	x12, vbar_el2
121	stp	x11, x12, [x0, #CTX_TTBR0_EL2]
122
123	mrs	x13, vmpidr_el2
124	mrs	x14, vpidr_el2
125	stp	x13, x14, [x0, #CTX_VMPIDR_EL2]
126
127	mrs	x15, vtcr_el2
128	mrs	x16, vttbr_el2
129	stp	x15, x16, [x0, #CTX_VTCR_EL2]
130	ret
131endfunc el2_sysregs_context_save_common
132
133func el2_sysregs_context_restore_common
134	ldp	x9, x10, [x0, #CTX_ACTLR_EL2]
135	msr	actlr_el2, x9
136	msr	afsr0_el2, x10
137
138	ldp	x11, x12, [x0, #CTX_AFSR1_EL2]
139	msr	afsr1_el2, x11
140	msr	amair_el2, x12
141
142	ldp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
143	msr	cnthctl_el2, x13
144	msr	cntvoff_el2, x14
145
146	ldr	x15, [x0, #CTX_CPTR_EL2]
147	msr	cptr_el2, x15
148
149#if CTX_INCLUDE_AARCH32_REGS
150	ldr	x16, [x0, #CTX_DBGVCR32_EL2]
151	msr	dbgvcr32_el2, x16
152#endif /* CTX_INCLUDE_AARCH32_REGS */
153
154	ldp	x9, x10, [x0, #CTX_ELR_EL2]
155	msr	elr_el2, x9
156	msr	esr_el2, x10
157
158	ldp	x11, x12, [x0, #CTX_FAR_EL2]
159	msr	far_el2, x11
160	msr	hacr_el2, x12
161
162	ldp	x13, x14, [x0, #CTX_HCR_EL2]
163	msr	hcr_el2, x13
164	msr	hpfar_el2, x14
165
166	ldp	x15, x16, [x0, #CTX_HSTR_EL2]
167	msr	hstr_el2, x15
168	msr	ICC_SRE_EL2, x16
169
170	ldp	x9, x10, [x0, #CTX_ICH_HCR_EL2]
171	msr	ICH_HCR_EL2, x9
172	msr	ICH_VMCR_EL2, x10
173
174	ldp	x11, x12, [x0, #CTX_MAIR_EL2]
175	msr	mair_el2, x11
176	msr	mdcr_el2, x12
177
178	ldr	x14, [x0, #CTX_SCTLR_EL2]
179	msr	sctlr_el2, x14
180
181	ldp	x15, x16, [x0, #CTX_SPSR_EL2]
182	msr	spsr_el2, x15
183	msr	sp_el2, x16
184
185	ldp	x9, x10, [x0, #CTX_TCR_EL2]
186	msr	tcr_el2, x9
187	msr	tpidr_el2, x10
188
189	ldp	x11, x12, [x0, #CTX_TTBR0_EL2]
190	msr	ttbr0_el2, x11
191	msr	vbar_el2, x12
192
193	ldp	x13, x14, [x0, #CTX_VMPIDR_EL2]
194	msr	vmpidr_el2, x13
195	msr	vpidr_el2, x14
196
197	ldp	x15, x16, [x0, #CTX_VTCR_EL2]
198	msr	vtcr_el2, x15
199	msr	vttbr_el2, x16
200	ret
201endfunc el2_sysregs_context_restore_common
202
203#if CTX_INCLUDE_MTE_REGS
204func el2_sysregs_context_save_mte
205	mrs	x9, TFSR_EL2
206	str	x9, [x0, #CTX_TFSR_EL2]
207	ret
208endfunc el2_sysregs_context_save_mte
209
210func el2_sysregs_context_restore_mte
211	ldr	x9, [x0, #CTX_TFSR_EL2]
212	msr	TFSR_EL2, x9
213	ret
214endfunc el2_sysregs_context_restore_mte
215#endif /* CTX_INCLUDE_MTE_REGS */
216
217#if RAS_EXTENSION
218func el2_sysregs_context_save_ras
219	/*
220	 * VDISR_EL2 and VSESR_EL2 registers are saved only when
221	 * FEAT_RAS is supported.
222	 */
223	mrs	x11, vdisr_el2
224	mrs	x12, vsesr_el2
225	stp	x11, x12, [x0, #CTX_VDISR_EL2]
226	ret
227endfunc el2_sysregs_context_save_ras
228
229func el2_sysregs_context_restore_ras
230	/*
231	 * VDISR_EL2 and VSESR_EL2 registers are restored only when FEAT_RAS
232	 * is supported.
233	 */
234	ldp	x11, x12, [x0, #CTX_VDISR_EL2]
235	msr	vdisr_el2, x11
236	msr	vsesr_el2, x12
237	ret
238endfunc el2_sysregs_context_restore_ras
239#endif /* RAS_EXTENSION */
240
241#if CTX_INCLUDE_NEVE_REGS
242func el2_sysregs_context_save_nv2
243	/*
244	 * VNCR_EL2 register is saved only when FEAT_NV2 is supported.
245	 */
246	mrs	x16, vncr_el2
247	str	x16, [x0, #CTX_VNCR_EL2]
248	ret
249endfunc el2_sysregs_context_save_nv2
250
251func el2_sysregs_context_restore_nv2
252	/*
253	 * VNCR_EL2 register is restored only when FEAT_NV2 is supported.
254	 */
255	ldr	x16, [x0, #CTX_VNCR_EL2]
256	msr	vncr_el2, x16
257	ret
258endfunc el2_sysregs_context_restore_nv2
259#endif /* CTX_INCLUDE_NEVE_REGS */
260
261#endif /* CTX_INCLUDE_EL2_REGS */
262
263/* ------------------------------------------------------------------
264 * The following function strictly follows the AArch64 PCS to use
265 * x9-x17 (temporary caller-saved registers) to save EL1 system
266 * register context. It assumes that 'x0' is pointing to a
267 * 'el1_sys_regs' structure where the register context will be saved.
268 * ------------------------------------------------------------------
269 */
270func el1_sysregs_context_save
271
272	mrs	x9, spsr_el1
273	mrs	x10, elr_el1
274	stp	x9, x10, [x0, #CTX_SPSR_EL1]
275
276#if !ERRATA_SPECULATIVE_AT
277	mrs	x15, sctlr_el1
278	mrs	x16, tcr_el1
279	stp	x15, x16, [x0, #CTX_SCTLR_EL1]
280#endif /* ERRATA_SPECULATIVE_AT */
281
282	mrs	x17, cpacr_el1
283	mrs	x9, csselr_el1
284	stp	x17, x9, [x0, #CTX_CPACR_EL1]
285
286	mrs	x10, sp_el1
287	mrs	x11, esr_el1
288	stp	x10, x11, [x0, #CTX_SP_EL1]
289
290	mrs	x12, ttbr0_el1
291	mrs	x13, ttbr1_el1
292	stp	x12, x13, [x0, #CTX_TTBR0_EL1]
293
294	mrs	x14, mair_el1
295	mrs	x15, amair_el1
296	stp	x14, x15, [x0, #CTX_MAIR_EL1]
297
298	mrs	x16, actlr_el1
299	mrs	x17, tpidr_el1
300	stp	x16, x17, [x0, #CTX_ACTLR_EL1]
301
302	mrs	x9, tpidr_el0
303	mrs	x10, tpidrro_el0
304	stp	x9, x10, [x0, #CTX_TPIDR_EL0]
305
306	mrs	x13, par_el1
307	mrs	x14, far_el1
308	stp	x13, x14, [x0, #CTX_PAR_EL1]
309
310	mrs	x15, afsr0_el1
311	mrs	x16, afsr1_el1
312	stp	x15, x16, [x0, #CTX_AFSR0_EL1]
313
314	mrs	x17, contextidr_el1
315	mrs	x9, vbar_el1
316	stp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
317
318	/* Save AArch32 system registers if the build has instructed so */
319#if CTX_INCLUDE_AARCH32_REGS
320	mrs	x11, spsr_abt
321	mrs	x12, spsr_und
322	stp	x11, x12, [x0, #CTX_SPSR_ABT]
323
324	mrs	x13, spsr_irq
325	mrs	x14, spsr_fiq
326	stp	x13, x14, [x0, #CTX_SPSR_IRQ]
327
328	mrs	x15, dacr32_el2
329	mrs	x16, ifsr32_el2
330	stp	x15, x16, [x0, #CTX_DACR32_EL2]
331#endif /* CTX_INCLUDE_AARCH32_REGS */
332
333	/* Save NS timer registers if the build has instructed so */
334#if NS_TIMER_SWITCH
335	mrs	x10, cntp_ctl_el0
336	mrs	x11, cntp_cval_el0
337	stp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
338
339	mrs	x12, cntv_ctl_el0
340	mrs	x13, cntv_cval_el0
341	stp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
342
343	mrs	x14, cntkctl_el1
344	str	x14, [x0, #CTX_CNTKCTL_EL1]
345#endif /* NS_TIMER_SWITCH */
346
347	/* Save MTE system registers if the build has instructed so */
348#if CTX_INCLUDE_MTE_REGS
349	mrs	x15, TFSRE0_EL1
350	mrs	x16, TFSR_EL1
351	stp	x15, x16, [x0, #CTX_TFSRE0_EL1]
352
353	mrs	x9, RGSR_EL1
354	mrs	x10, GCR_EL1
355	stp	x9, x10, [x0, #CTX_RGSR_EL1]
356#endif /* CTX_INCLUDE_MTE_REGS */
357
358	ret
359endfunc el1_sysregs_context_save
360
361/* ------------------------------------------------------------------
362 * The following function strictly follows the AArch64 PCS to use
363 * x9-x17 (temporary caller-saved registers) to restore EL1 system
364 * register context.  It assumes that 'x0' is pointing to a
365 * 'el1_sys_regs' structure from where the register context will be
366 * restored
367 * ------------------------------------------------------------------
368 */
369func el1_sysregs_context_restore
370
371	ldp	x9, x10, [x0, #CTX_SPSR_EL1]
372	msr	spsr_el1, x9
373	msr	elr_el1, x10
374
375#if !ERRATA_SPECULATIVE_AT
376	ldp	x15, x16, [x0, #CTX_SCTLR_EL1]
377	msr	sctlr_el1, x15
378	msr	tcr_el1, x16
379#endif /* ERRATA_SPECULATIVE_AT */
380
381	ldp	x17, x9, [x0, #CTX_CPACR_EL1]
382	msr	cpacr_el1, x17
383	msr	csselr_el1, x9
384
385	ldp	x10, x11, [x0, #CTX_SP_EL1]
386	msr	sp_el1, x10
387	msr	esr_el1, x11
388
389	ldp	x12, x13, [x0, #CTX_TTBR0_EL1]
390	msr	ttbr0_el1, x12
391	msr	ttbr1_el1, x13
392
393	ldp	x14, x15, [x0, #CTX_MAIR_EL1]
394	msr	mair_el1, x14
395	msr	amair_el1, x15
396
397	ldp 	x16, x17, [x0, #CTX_ACTLR_EL1]
398	msr	actlr_el1, x16
399	msr	tpidr_el1, x17
400
401	ldp	x9, x10, [x0, #CTX_TPIDR_EL0]
402	msr	tpidr_el0, x9
403	msr	tpidrro_el0, x10
404
405	ldp	x13, x14, [x0, #CTX_PAR_EL1]
406	msr	par_el1, x13
407	msr	far_el1, x14
408
409	ldp	x15, x16, [x0, #CTX_AFSR0_EL1]
410	msr	afsr0_el1, x15
411	msr	afsr1_el1, x16
412
413	ldp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
414	msr	contextidr_el1, x17
415	msr	vbar_el1, x9
416
417	/* Restore AArch32 system registers if the build has instructed so */
418#if CTX_INCLUDE_AARCH32_REGS
419	ldp	x11, x12, [x0, #CTX_SPSR_ABT]
420	msr	spsr_abt, x11
421	msr	spsr_und, x12
422
423	ldp	x13, x14, [x0, #CTX_SPSR_IRQ]
424	msr	spsr_irq, x13
425	msr	spsr_fiq, x14
426
427	ldp	x15, x16, [x0, #CTX_DACR32_EL2]
428	msr	dacr32_el2, x15
429	msr	ifsr32_el2, x16
430#endif /* CTX_INCLUDE_AARCH32_REGS */
431
432	/* Restore NS timer registers if the build has instructed so */
433#if NS_TIMER_SWITCH
434	ldp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
435	msr	cntp_ctl_el0, x10
436	msr	cntp_cval_el0, x11
437
438	ldp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
439	msr	cntv_ctl_el0, x12
440	msr	cntv_cval_el0, x13
441
442	ldr	x14, [x0, #CTX_CNTKCTL_EL1]
443	msr	cntkctl_el1, x14
444#endif /* NS_TIMER_SWITCH */
445
446	/* Restore MTE system registers if the build has instructed so */
447#if CTX_INCLUDE_MTE_REGS
448	ldp	x11, x12, [x0, #CTX_TFSRE0_EL1]
449	msr	TFSRE0_EL1, x11
450	msr	TFSR_EL1, x12
451
452	ldp	x13, x14, [x0, #CTX_RGSR_EL1]
453	msr	RGSR_EL1, x13
454	msr	GCR_EL1, x14
455#endif /* CTX_INCLUDE_MTE_REGS */
456
457	/* No explict ISB required here as ERET covers it */
458	ret
459endfunc el1_sysregs_context_restore
460
461/* ------------------------------------------------------------------
462 * The following function follows the aapcs_64 strictly to use
463 * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
464 * to save floating point register context. It assumes that 'x0' is
465 * pointing to a 'fp_regs' structure where the register context will
466 * be saved.
467 *
468 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
469 * However currently we don't use VFP registers nor set traps in
470 * Trusted Firmware, and assume it's cleared.
471 *
472 * TODO: Revisit when VFP is used in secure world
473 * ------------------------------------------------------------------
474 */
475#if CTX_INCLUDE_FPREGS
476func fpregs_context_save
477	stp	q0, q1, [x0, #CTX_FP_Q0]
478	stp	q2, q3, [x0, #CTX_FP_Q2]
479	stp	q4, q5, [x0, #CTX_FP_Q4]
480	stp	q6, q7, [x0, #CTX_FP_Q6]
481	stp	q8, q9, [x0, #CTX_FP_Q8]
482	stp	q10, q11, [x0, #CTX_FP_Q10]
483	stp	q12, q13, [x0, #CTX_FP_Q12]
484	stp	q14, q15, [x0, #CTX_FP_Q14]
485	stp	q16, q17, [x0, #CTX_FP_Q16]
486	stp	q18, q19, [x0, #CTX_FP_Q18]
487	stp	q20, q21, [x0, #CTX_FP_Q20]
488	stp	q22, q23, [x0, #CTX_FP_Q22]
489	stp	q24, q25, [x0, #CTX_FP_Q24]
490	stp	q26, q27, [x0, #CTX_FP_Q26]
491	stp	q28, q29, [x0, #CTX_FP_Q28]
492	stp	q30, q31, [x0, #CTX_FP_Q30]
493
494	mrs	x9, fpsr
495	str	x9, [x0, #CTX_FP_FPSR]
496
497	mrs	x10, fpcr
498	str	x10, [x0, #CTX_FP_FPCR]
499
500#if CTX_INCLUDE_AARCH32_REGS
501	mrs	x11, fpexc32_el2
502	str	x11, [x0, #CTX_FP_FPEXC32_EL2]
503#endif /* CTX_INCLUDE_AARCH32_REGS */
504	ret
505endfunc fpregs_context_save
506
507/* ------------------------------------------------------------------
508 * The following function follows the aapcs_64 strictly to use x9-x17
509 * (temporary caller-saved registers according to AArch64 PCS) to
510 * restore floating point register context. It assumes that 'x0' is
511 * pointing to a 'fp_regs' structure from where the register context
512 * will be restored.
513 *
514 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
515 * However currently we don't use VFP registers nor set traps in
516 * Trusted Firmware, and assume it's cleared.
517 *
518 * TODO: Revisit when VFP is used in secure world
519 * ------------------------------------------------------------------
520 */
521func fpregs_context_restore
522	ldp	q0, q1, [x0, #CTX_FP_Q0]
523	ldp	q2, q3, [x0, #CTX_FP_Q2]
524	ldp	q4, q5, [x0, #CTX_FP_Q4]
525	ldp	q6, q7, [x0, #CTX_FP_Q6]
526	ldp	q8, q9, [x0, #CTX_FP_Q8]
527	ldp	q10, q11, [x0, #CTX_FP_Q10]
528	ldp	q12, q13, [x0, #CTX_FP_Q12]
529	ldp	q14, q15, [x0, #CTX_FP_Q14]
530	ldp	q16, q17, [x0, #CTX_FP_Q16]
531	ldp	q18, q19, [x0, #CTX_FP_Q18]
532	ldp	q20, q21, [x0, #CTX_FP_Q20]
533	ldp	q22, q23, [x0, #CTX_FP_Q22]
534	ldp	q24, q25, [x0, #CTX_FP_Q24]
535	ldp	q26, q27, [x0, #CTX_FP_Q26]
536	ldp	q28, q29, [x0, #CTX_FP_Q28]
537	ldp	q30, q31, [x0, #CTX_FP_Q30]
538
539	ldr	x9, [x0, #CTX_FP_FPSR]
540	msr	fpsr, x9
541
542	ldr	x10, [x0, #CTX_FP_FPCR]
543	msr	fpcr, x10
544
545#if CTX_INCLUDE_AARCH32_REGS
546	ldr	x11, [x0, #CTX_FP_FPEXC32_EL2]
547	msr	fpexc32_el2, x11
548#endif /* CTX_INCLUDE_AARCH32_REGS */
549
550	/*
551	 * No explict ISB required here as ERET to
552	 * switch to secure EL1 or non-secure world
553	 * covers it
554	 */
555
556	ret
557endfunc fpregs_context_restore
558#endif /* CTX_INCLUDE_FPREGS */
559
560	/*
561	 * Set SCR_EL3.EA bit to enable SErrors at EL3
562	 */
563	.macro enable_serror_at_el3
564	mrs     x8, scr_el3
565	orr     x8, x8, #SCR_EA_BIT
566	msr     scr_el3, x8
567	.endm
568
569	/*
570	 * Set the PSTATE bits not set when the exception was taken as
571	 * described in the AArch64.TakeException() pseudocode function
572	 * in ARM DDI 0487F.c page J1-7635 to a default value.
573	 */
574	.macro set_unset_pstate_bits
575	/*
576	 * If Data Independent Timing (DIT) functionality is implemented,
577	 * always enable DIT in EL3
578	 */
579#if ENABLE_FEAT_DIT
580	mov     x8, #DIT_BIT
581	msr     DIT, x8
582#endif /* ENABLE_FEAT_DIT */
583	.endm /* set_unset_pstate_bits */
584
585/* ------------------------------------------------------------------
586 * The following macro is used to save and restore all the general
587 * purpose and ARMv8.3-PAuth (if enabled) registers.
588 * It also checks if the Secure Cycle Counter (PMCCNTR_EL0)
589 * is disabled in EL3/Secure (ARMv8.5-PMU), wherein PMCCNTR_EL0
590 * needs not to be saved/restored during world switch.
591 *
592 * Ideally we would only save and restore the callee saved registers
593 * when a world switch occurs but that type of implementation is more
594 * complex. So currently we will always save and restore these
595 * registers on entry and exit of EL3.
596 * clobbers: x18
597 * ------------------------------------------------------------------
598 */
599	.macro save_gp_pmcr_pauth_regs
600	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
601	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
602	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
603	stp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
604	stp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
605	stp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
606	stp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
607	stp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
608	stp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
609	stp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
610	stp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
611	stp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
612	stp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
613	stp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
614	stp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
615	mrs	x18, sp_el0
616	str	x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
617
618	/* ----------------------------------------------------------
619	 * Check if earlier initialization of MDCR_EL3.SCCD/MCCD to 1
620	 * has failed.
621	 *
622	 * MDCR_EL3:
623	 * MCCD bit set, Prohibits the Cycle Counter PMCCNTR_EL0 from
624	 * counting at EL3.
625	 * SCCD bit set, Secure Cycle Counter Disable. Prohibits PMCCNTR_EL0
626	 * from counting in Secure state.
627	 * If these bits are not set, meaning that FEAT_PMUv3p5/7 is
628	 * not implemented and PMCR_EL0 should be saved in non-secure
629	 * context.
630	 * ----------------------------------------------------------
631	 */
632	mov_imm	x10, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
633	mrs	x9, mdcr_el3
634	tst	x9, x10
635	bne	1f
636
637	/* ----------------------------------------------------------
638	 * If control reaches here, it ensures the Secure Cycle
639	 * Counter (PMCCNTR_EL0) is not prohibited from counting at
640	 * EL3 and in secure states.
641	 * Henceforth, PMCR_EL0 to be saved before world switch.
642	 * ----------------------------------------------------------
643	 */
644	mrs	x9, pmcr_el0
645
646	/* Check caller's security state */
647	mrs	x10, scr_el3
648	tst	x10, #SCR_NS_BIT
649	beq	2f
650
651	/* Save PMCR_EL0 if called from Non-secure state */
652	str	x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
653
654	/* Disable cycle counter when event counting is prohibited */
6552:	orr	x9, x9, #PMCR_EL0_DP_BIT
656	msr	pmcr_el0, x9
657	isb
6581:
659#if CTX_INCLUDE_PAUTH_REGS
660	/* ----------------------------------------------------------
661 	 * Save the ARMv8.3-PAuth keys as they are not banked
662 	 * by exception level
663	 * ----------------------------------------------------------
664	 */
665	add	x19, sp, #CTX_PAUTH_REGS_OFFSET
666
667	mrs	x20, APIAKeyLo_EL1	/* x21:x20 = APIAKey */
668	mrs	x21, APIAKeyHi_EL1
669	mrs	x22, APIBKeyLo_EL1	/* x23:x22 = APIBKey */
670	mrs	x23, APIBKeyHi_EL1
671	mrs	x24, APDAKeyLo_EL1	/* x25:x24 = APDAKey */
672	mrs	x25, APDAKeyHi_EL1
673	mrs	x26, APDBKeyLo_EL1	/* x27:x26 = APDBKey */
674	mrs	x27, APDBKeyHi_EL1
675	mrs	x28, APGAKeyLo_EL1	/* x29:x28 = APGAKey */
676	mrs	x29, APGAKeyHi_EL1
677
678	stp	x20, x21, [x19, #CTX_PACIAKEY_LO]
679	stp	x22, x23, [x19, #CTX_PACIBKEY_LO]
680	stp	x24, x25, [x19, #CTX_PACDAKEY_LO]
681	stp	x26, x27, [x19, #CTX_PACDBKEY_LO]
682	stp	x28, x29, [x19, #CTX_PACGAKEY_LO]
683#endif /* CTX_INCLUDE_PAUTH_REGS */
684	.endm /* save_gp_pmcr_pauth_regs */
685
686/* -----------------------------------------------------------------
687 * This function saves the context and sets the PSTATE to a known
688 * state, preparing entry to el3.
689 * Save all the general purpose and ARMv8.3-PAuth (if enabled)
690 * registers.
691 * Then set any of the PSTATE bits that are not set by hardware
692 * according to the Aarch64.TakeException pseudocode in the Arm
693 * Architecture Reference Manual to a default value for EL3.
694 * clobbers: x17
695 * -----------------------------------------------------------------
696 */
697func prepare_el3_entry
698	save_gp_pmcr_pauth_regs
699	enable_serror_at_el3
700	/*
701	 * Set the PSTATE bits not described in the Aarch64.TakeException
702	 * pseudocode to their default values.
703	 */
704	set_unset_pstate_bits
705	ret
706endfunc prepare_el3_entry
707
708/* ------------------------------------------------------------------
709 * This function restores ARMv8.3-PAuth (if enabled) and all general
710 * purpose registers except x30 from the CPU context.
711 * x30 register must be explicitly restored by the caller.
712 * ------------------------------------------------------------------
713 */
714func restore_gp_pmcr_pauth_regs
715#if CTX_INCLUDE_PAUTH_REGS
716 	/* Restore the ARMv8.3 PAuth keys */
717	add	x10, sp, #CTX_PAUTH_REGS_OFFSET
718
719	ldp	x0, x1, [x10, #CTX_PACIAKEY_LO]	/* x1:x0 = APIAKey */
720	ldp	x2, x3, [x10, #CTX_PACIBKEY_LO]	/* x3:x2 = APIBKey */
721	ldp	x4, x5, [x10, #CTX_PACDAKEY_LO]	/* x5:x4 = APDAKey */
722	ldp	x6, x7, [x10, #CTX_PACDBKEY_LO]	/* x7:x6 = APDBKey */
723	ldp	x8, x9, [x10, #CTX_PACGAKEY_LO]	/* x9:x8 = APGAKey */
724
725	msr	APIAKeyLo_EL1, x0
726	msr	APIAKeyHi_EL1, x1
727	msr	APIBKeyLo_EL1, x2
728	msr	APIBKeyHi_EL1, x3
729	msr	APDAKeyLo_EL1, x4
730	msr	APDAKeyHi_EL1, x5
731	msr	APDBKeyLo_EL1, x6
732	msr	APDBKeyHi_EL1, x7
733	msr	APGAKeyLo_EL1, x8
734	msr	APGAKeyHi_EL1, x9
735#endif /* CTX_INCLUDE_PAUTH_REGS */
736
737	/* ----------------------------------------------------------
738	 * Restore PMCR_EL0 when returning to Non-secure state if
739	 * Secure Cycle Counter is not disabled in MDCR_EL3 when
740	 * ARMv8.5-PMU is implemented.
741	 * ----------------------------------------------------------
742	 */
743	mrs	x0, scr_el3
744	tst	x0, #SCR_NS_BIT
745	beq	2f
746
747	/* ----------------------------------------------------------
748	 * Back to Non-secure state.
749	 * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1
750	 * failed, meaning that FEAT_PMUv3p5/7 is not implemented and
751	 * PMCR_EL0 should be restored from non-secure context.
752	 * ----------------------------------------------------------
753	 */
754	mov_imm	x1, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
755	mrs	x0, mdcr_el3
756	tst	x0, x1
757	bne	2f
758	ldr	x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
759	msr	pmcr_el0, x0
7602:
761	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
762	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
763	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
764	ldp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
765	ldp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
766	ldp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
767	ldp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
768	ldp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
769	ldp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
770	ldp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
771	ldp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
772	ldp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
773	ldp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
774	ldp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
775	ldr	x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
776	msr	sp_el0, x28
777	ldp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
778	ret
779endfunc restore_gp_pmcr_pauth_regs
780
781/*
782 * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
783 * registers and update EL1 registers to disable stage1 and stage2
784 * page table walk
785 */
786func save_and_update_ptw_el1_sys_regs
787	/* ----------------------------------------------------------
788	 * Save only sctlr_el1 and tcr_el1 registers
789	 * ----------------------------------------------------------
790	 */
791	mrs	x29, sctlr_el1
792	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)]
793	mrs	x29, tcr_el1
794	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)]
795
796	/* ------------------------------------------------------------
797	 * Must follow below order in order to disable page table
798	 * walk for lower ELs (EL1 and EL0). First step ensures that
799	 * page table walk is disabled for stage1 and second step
800	 * ensures that page table walker should use TCR_EL1.EPDx
801	 * bits to perform address translation. ISB ensures that CPU
802	 * does these 2 steps in order.
803	 *
804	 * 1. Update TCR_EL1.EPDx bits to disable page table walk by
805	 *    stage1.
806	 * 2. Enable MMU bit to avoid identity mapping via stage2
807	 *    and force TCR_EL1.EPDx to be used by the page table
808	 *    walker.
809	 * ------------------------------------------------------------
810	 */
811	orr	x29, x29, #(TCR_EPD0_BIT)
812	orr	x29, x29, #(TCR_EPD1_BIT)
813	msr	tcr_el1, x29
814	isb
815	mrs	x29, sctlr_el1
816	orr	x29, x29, #SCTLR_M_BIT
817	msr	sctlr_el1, x29
818	isb
819
820	ret
821endfunc save_and_update_ptw_el1_sys_regs
822
823/* ------------------------------------------------------------------
824 * This routine assumes that the SP_EL3 is pointing to a valid
825 * context structure from where the gp regs and other special
826 * registers can be retrieved.
827 * ------------------------------------------------------------------
828 */
829func el3_exit
830#if ENABLE_ASSERTIONS
831	/* el3_exit assumes SP_EL0 on entry */
832	mrs	x17, spsel
833	cmp	x17, #MODE_SP_EL0
834	ASM_ASSERT(eq)
835#endif /* ENABLE_ASSERTIONS */
836
837	/* ----------------------------------------------------------
838	 * Save the current SP_EL0 i.e. the EL3 runtime stack which
839	 * will be used for handling the next SMC.
840	 * Then switch to SP_EL3.
841	 * ----------------------------------------------------------
842	 */
843	mov	x17, sp
844	msr	spsel, #MODE_SP_ELX
845	str	x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
846
847#if IMAGE_BL31
848	/* ----------------------------------------------------------
849	 * Restore CPTR_EL3.
850	 * ZCR is only restored if SVE is supported and enabled.
851	 * Synchronization is required before zcr_el3 is addressed.
852	 * ----------------------------------------------------------
853	 */
854	ldp	x19, x20, [sp, #CTX_EL3STATE_OFFSET + CTX_CPTR_EL3]
855	msr	cptr_el3, x19
856
857	ands	x19, x19, #CPTR_EZ_BIT
858	beq	sve_not_enabled
859
860	isb
861	msr	S3_6_C1_C2_0, x20 /* zcr_el3 */
862sve_not_enabled:
863#endif /* IMAGE_BL31 */
864
865#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
866	/* ----------------------------------------------------------
867	 * Restore mitigation state as it was on entry to EL3
868	 * ----------------------------------------------------------
869	 */
870	ldr	x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
871	cbz	x17, 1f
872	blr	x17
8731:
874#endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */
875
876#if IMAGE_BL31 && RAS_EXTENSION
877	/* ----------------------------------------------------------
878	 * Issue Error Synchronization Barrier to synchronize SErrors
879	 * before exiting EL3. We're running with EAs unmasked, so
880	 * any synchronized errors would be taken immediately;
881	 * therefore no need to inspect DISR_EL1 register.
882 	 * ----------------------------------------------------------
883	 */
884	esb
885#else
886	dsb	sy
887#endif /* IMAGE_BL31 && RAS_EXTENSION */
888
889	/* ----------------------------------------------------------
890	 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
891	 * ----------------------------------------------------------
892	 */
893	ldr	x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
894	ldp	x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
895	msr	scr_el3, x18
896	msr	spsr_el3, x16
897	msr	elr_el3, x17
898
899	restore_ptw_el1_sys_regs
900
901	/* ----------------------------------------------------------
902	 * Restore general purpose (including x30), PMCR_EL0 and
903	 * ARMv8.3-PAuth registers.
904	 * Exit EL3 via ERET to a lower exception level.
905 	 * ----------------------------------------------------------
906 	 */
907	bl	restore_gp_pmcr_pauth_regs
908	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
909
910#ifdef IMAGE_BL31
911	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
912#endif /* IMAGE_BL31 */
913
914	exception_return
915
916endfunc el3_exit
917