xref: /rk3399_ARM-atf/lib/el3_runtime/aarch64/context.S (revision f2de48cb143c20ccd7a9c141df3d34cae74049de)
1/*
2 * Copyright (c) 2013-2022, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <assert_macros.S>
10#include <context.h>
11#include <el3_common_macros.S>
12
13#if CTX_INCLUDE_EL2_REGS
14	.global	el2_sysregs_context_save
15	.global	el2_sysregs_context_restore
16#endif
17
18	.global	el1_sysregs_context_save
19	.global	el1_sysregs_context_restore
20#if CTX_INCLUDE_FPREGS
21	.global	fpregs_context_save
22	.global	fpregs_context_restore
23#endif
24	.global	prepare_el3_entry
25	.global	restore_gp_pmcr_pauth_regs
26	.global save_and_update_ptw_el1_sys_regs
27	.global	el3_exit
28
29#if CTX_INCLUDE_EL2_REGS
30
31/* -----------------------------------------------------
32 * The following function strictly follows the AArch64
33 * PCS to use x9-x16 (temporary caller-saved registers)
34 * to save EL2 system register context. It assumes that
35 * 'x0' is pointing to a 'el2_sys_regs' structure where
36 * the register context will be saved.
37 *
38 * The following registers are not added.
39 * AMEVCNTVOFF0<n>_EL2
40 * AMEVCNTVOFF1<n>_EL2
41 * ICH_AP0R<n>_EL2
42 * ICH_AP1R<n>_EL2
43 * ICH_LR<n>_EL2
44 * -----------------------------------------------------
45 */
46func el2_sysregs_context_save
47	mrs	x9, actlr_el2
48	mrs	x10, afsr0_el2
49	stp	x9, x10, [x0, #CTX_ACTLR_EL2]
50
51	mrs	x11, afsr1_el2
52	mrs	x12, amair_el2
53	stp	x11, x12, [x0, #CTX_AFSR1_EL2]
54
55	mrs	x13, cnthctl_el2
56	mrs	x14, cntvoff_el2
57	stp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
58
59	mrs	x15, cptr_el2
60	str	x15, [x0, #CTX_CPTR_EL2]
61
62#if CTX_INCLUDE_AARCH32_REGS
63	mrs	x16, dbgvcr32_el2
64	str	x16, [x0, #CTX_DBGVCR32_EL2]
65#endif
66
67	mrs	x9, elr_el2
68	mrs	x10, esr_el2
69	stp	x9, x10, [x0, #CTX_ELR_EL2]
70
71	mrs	x11, far_el2
72	mrs	x12, hacr_el2
73	stp	x11, x12, [x0, #CTX_FAR_EL2]
74
75	mrs	x13, hcr_el2
76	mrs	x14, hpfar_el2
77	stp	x13, x14, [x0, #CTX_HCR_EL2]
78
79	mrs	x15, hstr_el2
80	mrs	x16, ICC_SRE_EL2
81	stp	x15, x16, [x0, #CTX_HSTR_EL2]
82
83	mrs	x9, ICH_HCR_EL2
84	mrs	x10, ICH_VMCR_EL2
85	stp	x9, x10, [x0, #CTX_ICH_HCR_EL2]
86
87	mrs	x11, mair_el2
88	mrs	x12, mdcr_el2
89	stp	x11, x12, [x0, #CTX_MAIR_EL2]
90
91#if ENABLE_SPE_FOR_LOWER_ELS
92	mrs	x13, PMSCR_EL2
93	str	x13, [x0, #CTX_PMSCR_EL2]
94#endif
95	mrs	x14, sctlr_el2
96	str	x14, [x0, #CTX_SCTLR_EL2]
97
98	mrs	x15, spsr_el2
99	mrs	x16, sp_el2
100	stp	x15, x16, [x0, #CTX_SPSR_EL2]
101
102	mrs	x9, tcr_el2
103	mrs	x10, tpidr_el2
104	stp	x9, x10, [x0, #CTX_TCR_EL2]
105
106	mrs	x11, ttbr0_el2
107	mrs	x12, vbar_el2
108	stp	x11, x12, [x0, #CTX_TTBR0_EL2]
109
110	mrs	x13, vmpidr_el2
111	mrs	x14, vpidr_el2
112	stp	x13, x14, [x0, #CTX_VMPIDR_EL2]
113
114	mrs	x15, vtcr_el2
115	mrs	x16, vttbr_el2
116	stp	x15, x16, [x0, #CTX_VTCR_EL2]
117
118#if CTX_INCLUDE_MTE_REGS
119	mrs	x9, TFSR_EL2
120	str	x9, [x0, #CTX_TFSR_EL2]
121#endif
122
123#if ENABLE_MPAM_FOR_LOWER_ELS
124	mrs	x10, MPAM2_EL2
125	str	x10, [x0, #CTX_MPAM2_EL2]
126
127	mrs	x11, MPAMHCR_EL2
128	mrs	x12, MPAMVPM0_EL2
129	stp	x11, x12, [x0, #CTX_MPAMHCR_EL2]
130
131	mrs	x13, MPAMVPM1_EL2
132	mrs	x14, MPAMVPM2_EL2
133	stp	x13, x14, [x0, #CTX_MPAMVPM1_EL2]
134
135	mrs	x15, MPAMVPM3_EL2
136	mrs	x16, MPAMVPM4_EL2
137	stp	x15, x16, [x0, #CTX_MPAMVPM3_EL2]
138
139	mrs	x9, MPAMVPM5_EL2
140	mrs	x10, MPAMVPM6_EL2
141	stp	x9, x10, [x0, #CTX_MPAMVPM5_EL2]
142
143	mrs	x11, MPAMVPM7_EL2
144	mrs	x12, MPAMVPMV_EL2
145	stp	x11, x12, [x0, #CTX_MPAMVPM7_EL2]
146#endif
147
148#if ENABLE_FEAT_FGT
149	mrs	x13, HDFGRTR_EL2
150#if ENABLE_FEAT_AMUv1
151   	mrs	x14, HAFGRTR_EL2
152   	stp	x13, x14, [x0, #CTX_HDFGRTR_EL2]
153#else
154   	str	x13, [x0, #CTX_HDFGRTR_EL2]
155#endif
156	mrs	x15, HDFGWTR_EL2
157	mrs	x16, HFGITR_EL2
158	stp	x15, x16, [x0, #CTX_HDFGWTR_EL2]
159
160	mrs	x9, HFGRTR_EL2
161	mrs	x10, HFGWTR_EL2
162	stp	x9, x10, [x0, #CTX_HFGRTR_EL2]
163#endif
164
165#if ENABLE_FEAT_ECV
166	mrs	x11, CNTPOFF_EL2
167	str	x11, [x0, #CTX_CNTPOFF_EL2]
168#endif
169
170#if ARM_ARCH_AT_LEAST(8, 4)
171	mrs	x12, contextidr_el2
172	str	x12, [x0, #CTX_CONTEXTIDR_EL2]
173
174#if CTX_INCLUDE_AARCH32_REGS
175	mrs	x13, sder32_el2
176	str	x13, [x0, #CTX_SDER32_EL2]
177#endif
178	mrs	x14, ttbr1_el2
179	mrs	x15, vdisr_el2
180	stp	x14, x15, [x0, #CTX_TTBR1_EL2]
181
182#if CTX_INCLUDE_NEVE_REGS
183	mrs	x16, vncr_el2
184	str	x16, [x0, #CTX_VNCR_EL2]
185#endif
186
187	mrs	x9, vsesr_el2
188	mrs	x10, vstcr_el2
189	stp	x9, x10, [x0, #CTX_VSESR_EL2]
190
191	mrs	x11, vsttbr_el2
192	mrs	x12, TRFCR_EL2
193	stp	x11, x12, [x0, #CTX_VSTTBR_EL2]
194#endif
195
196#if ARM_ARCH_AT_LEAST(8, 5)
197	mrs	x13, scxtnum_el2
198	str	x13, [x0, #CTX_SCXTNUM_EL2]
199#endif
200
201#if ENABLE_FEAT_HCX
202	mrs	x14, hcrx_el2
203	str	x14, [x0, #CTX_HCRX_EL2]
204#endif
205
206	ret
207endfunc el2_sysregs_context_save
208
209
210/* -----------------------------------------------------
211 * The following function strictly follows the AArch64
212 * PCS to use x9-x16 (temporary caller-saved registers)
213 * to restore EL2 system register context.  It assumes
214 * that 'x0' is pointing to a 'el2_sys_regs' structure
215 * from where the register context will be restored
216
217 * The following registers are not restored
218 * AMEVCNTVOFF0<n>_EL2
219 * AMEVCNTVOFF1<n>_EL2
220 * ICH_AP0R<n>_EL2
221 * ICH_AP1R<n>_EL2
222 * ICH_LR<n>_EL2
223 * -----------------------------------------------------
224 */
225func el2_sysregs_context_restore
226	ldp	x9, x10, [x0, #CTX_ACTLR_EL2]
227	msr	actlr_el2, x9
228	msr	afsr0_el2, x10
229
230	ldp	x11, x12, [x0, #CTX_AFSR1_EL2]
231	msr	afsr1_el2, x11
232	msr	amair_el2, x12
233
234	ldp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
235	msr	cnthctl_el2, x13
236	msr	cntvoff_el2, x14
237
238	ldr	x15, [x0, #CTX_CPTR_EL2]
239	msr	cptr_el2, x15
240
241#if CTX_INCLUDE_AARCH32_REGS
242	ldr	x16, [x0, #CTX_DBGVCR32_EL2]
243	msr	dbgvcr32_el2, x16
244#endif
245
246	ldp	x9, x10, [x0, #CTX_ELR_EL2]
247	msr	elr_el2, x9
248	msr	esr_el2, x10
249
250	ldp	x11, x12, [x0, #CTX_FAR_EL2]
251	msr	far_el2, x11
252	msr	hacr_el2, x12
253
254	ldp	x13, x14, [x0, #CTX_HCR_EL2]
255	msr	hcr_el2, x13
256	msr	hpfar_el2, x14
257
258	ldp	x15, x16, [x0, #CTX_HSTR_EL2]
259	msr	hstr_el2, x15
260	msr	ICC_SRE_EL2, x16
261
262	ldp	x9, x10, [x0, #CTX_ICH_HCR_EL2]
263	msr	ICH_HCR_EL2, x9
264	msr	ICH_VMCR_EL2, x10
265
266	ldp	x11, x12, [x0, #CTX_MAIR_EL2]
267	msr	mair_el2, x11
268	msr	mdcr_el2, x12
269
270#if ENABLE_SPE_FOR_LOWER_ELS
271	ldr	x13, [x0, #CTX_PMSCR_EL2]
272	msr	PMSCR_EL2, x13
273#endif
274	ldr	x14, [x0, #CTX_SCTLR_EL2]
275	msr	sctlr_el2, x14
276
277	ldp	x15, x16, [x0, #CTX_SPSR_EL2]
278	msr	spsr_el2, x15
279	msr	sp_el2, x16
280
281	ldp	x9, x10, [x0, #CTX_TCR_EL2]
282	msr	tcr_el2, x9
283	msr	tpidr_el2, x10
284
285	ldp	x11, x12, [x0, #CTX_TTBR0_EL2]
286	msr	ttbr0_el2, x11
287	msr	vbar_el2, x12
288
289	ldp	x13, x14, [x0, #CTX_VMPIDR_EL2]
290	msr	vmpidr_el2, x13
291	msr	vpidr_el2, x14
292
293	ldp	x15, x16, [x0, #CTX_VTCR_EL2]
294	msr	vtcr_el2, x15
295	msr	vttbr_el2, x16
296
297#if CTX_INCLUDE_MTE_REGS
298	ldr	x9, [x0, #CTX_TFSR_EL2]
299	msr	TFSR_EL2, x9
300#endif
301
302#if ENABLE_MPAM_FOR_LOWER_ELS
303	ldr	x10, [x0, #CTX_MPAM2_EL2]
304	msr	MPAM2_EL2, x10
305
306	ldp	x11, x12, [x0, #CTX_MPAMHCR_EL2]
307	msr	MPAMHCR_EL2, x11
308	msr	MPAMVPM0_EL2, x12
309
310	ldp	x13, x14, [x0, #CTX_MPAMVPM1_EL2]
311	msr	MPAMVPM1_EL2, x13
312	msr	MPAMVPM2_EL2, x14
313
314	ldp	x15, x16, [x0, #CTX_MPAMVPM3_EL2]
315	msr	MPAMVPM3_EL2, x15
316	msr	MPAMVPM4_EL2, x16
317
318	ldp	x9, x10, [x0, #CTX_MPAMVPM5_EL2]
319	msr	MPAMVPM5_EL2, x9
320	msr	MPAMVPM6_EL2, x10
321
322	ldp	x11, x12, [x0, #CTX_MPAMVPM7_EL2]
323	msr	MPAMVPM7_EL2, x11
324	msr	MPAMVPMV_EL2, x12
325#endif
326
327#if ENABLE_FEAT_FGT
328#if ENABLE_FEAT_AMUv1
329	ldp	x13, x14, [x0, #CTX_HDFGRTR_EL2]
330	msr	HAFGRTR_EL2, x14
331#else
332	ldr	x13, [x0, #CTX_HDFGRTR_EL2]
333#endif
334	msr	HDFGRTR_EL2, x13
335
336	ldp	x15, x16, [x0, #CTX_HDFGWTR_EL2]
337	msr	HDFGWTR_EL2, x15
338	msr	HFGITR_EL2, x16
339
340	ldp	x9, x10, [x0, #CTX_HFGRTR_EL2]
341	msr	HFGRTR_EL2, x9
342	msr	HFGWTR_EL2, x10
343#endif
344
345#if ENABLE_FEAT_ECV
346	ldr	x11, [x0, #CTX_CNTPOFF_EL2]
347	msr	CNTPOFF_EL2, x11
348#endif
349
350#if ARM_ARCH_AT_LEAST(8, 4)
351	ldr	x12, [x0, #CTX_CONTEXTIDR_EL2]
352	msr	contextidr_el2, x12
353
354#if CTX_INCLUDE_AARCH32_REGS
355	ldr	x13, [x0, #CTX_SDER32_EL2]
356	msr	sder32_el2, x13
357#endif
358	ldp	x14, x15, [x0, #CTX_TTBR1_EL2]
359	msr	ttbr1_el2, x14
360	msr	vdisr_el2, x15
361
362#if CTX_INCLUDE_NEVE_REGS
363	ldr	x16, [x0, #CTX_VNCR_EL2]
364	msr	vncr_el2, x16
365#endif
366
367	ldp	x9, x10, [x0, #CTX_VSESR_EL2]
368	msr	vsesr_el2, x9
369	msr	vstcr_el2, x10
370
371	ldp	x11, x12, [x0, #CTX_VSTTBR_EL2]
372	msr	vsttbr_el2, x11
373	msr	TRFCR_EL2, x12
374#endif
375
376#if ARM_ARCH_AT_LEAST(8, 5)
377	ldr	x13, [x0, #CTX_SCXTNUM_EL2]
378	msr	scxtnum_el2, x13
379#endif
380
381#if ENABLE_FEAT_HCX
382	ldr	x14, [x0, #CTX_HCRX_EL2]
383	msr	hcrx_el2, x14
384#endif
385
386	ret
387endfunc el2_sysregs_context_restore
388
389#endif /* CTX_INCLUDE_EL2_REGS */
390
391/* ------------------------------------------------------------------
392 * The following function strictly follows the AArch64 PCS to use
393 * x9-x17 (temporary caller-saved registers) to save EL1 system
394 * register context. It assumes that 'x0' is pointing to a
395 * 'el1_sys_regs' structure where the register context will be saved.
396 * ------------------------------------------------------------------
397 */
398func el1_sysregs_context_save
399
400	mrs	x9, spsr_el1
401	mrs	x10, elr_el1
402	stp	x9, x10, [x0, #CTX_SPSR_EL1]
403
404#if !ERRATA_SPECULATIVE_AT
405	mrs	x15, sctlr_el1
406	mrs	x16, tcr_el1
407	stp	x15, x16, [x0, #CTX_SCTLR_EL1]
408#endif
409
410	mrs	x17, cpacr_el1
411	mrs	x9, csselr_el1
412	stp	x17, x9, [x0, #CTX_CPACR_EL1]
413
414	mrs	x10, sp_el1
415	mrs	x11, esr_el1
416	stp	x10, x11, [x0, #CTX_SP_EL1]
417
418	mrs	x12, ttbr0_el1
419	mrs	x13, ttbr1_el1
420	stp	x12, x13, [x0, #CTX_TTBR0_EL1]
421
422	mrs	x14, mair_el1
423	mrs	x15, amair_el1
424	stp	x14, x15, [x0, #CTX_MAIR_EL1]
425
426	mrs	x16, actlr_el1
427	mrs	x17, tpidr_el1
428	stp	x16, x17, [x0, #CTX_ACTLR_EL1]
429
430	mrs	x9, tpidr_el0
431	mrs	x10, tpidrro_el0
432	stp	x9, x10, [x0, #CTX_TPIDR_EL0]
433
434	mrs	x13, par_el1
435	mrs	x14, far_el1
436	stp	x13, x14, [x0, #CTX_PAR_EL1]
437
438	mrs	x15, afsr0_el1
439	mrs	x16, afsr1_el1
440	stp	x15, x16, [x0, #CTX_AFSR0_EL1]
441
442	mrs	x17, contextidr_el1
443	mrs	x9, vbar_el1
444	stp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
445
446	/* Save AArch32 system registers if the build has instructed so */
447#if CTX_INCLUDE_AARCH32_REGS
448	mrs	x11, spsr_abt
449	mrs	x12, spsr_und
450	stp	x11, x12, [x0, #CTX_SPSR_ABT]
451
452	mrs	x13, spsr_irq
453	mrs	x14, spsr_fiq
454	stp	x13, x14, [x0, #CTX_SPSR_IRQ]
455
456	mrs	x15, dacr32_el2
457	mrs	x16, ifsr32_el2
458	stp	x15, x16, [x0, #CTX_DACR32_EL2]
459#endif
460
461	/* Save NS timer registers if the build has instructed so */
462#if NS_TIMER_SWITCH
463	mrs	x10, cntp_ctl_el0
464	mrs	x11, cntp_cval_el0
465	stp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
466
467	mrs	x12, cntv_ctl_el0
468	mrs	x13, cntv_cval_el0
469	stp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
470
471	mrs	x14, cntkctl_el1
472	str	x14, [x0, #CTX_CNTKCTL_EL1]
473#endif
474
475	/* Save MTE system registers if the build has instructed so */
476#if CTX_INCLUDE_MTE_REGS
477	mrs	x15, TFSRE0_EL1
478	mrs	x16, TFSR_EL1
479	stp	x15, x16, [x0, #CTX_TFSRE0_EL1]
480
481	mrs	x9, RGSR_EL1
482	mrs	x10, GCR_EL1
483	stp	x9, x10, [x0, #CTX_RGSR_EL1]
484#endif
485
486	ret
487endfunc el1_sysregs_context_save
488
489/* ------------------------------------------------------------------
490 * The following function strictly follows the AArch64 PCS to use
491 * x9-x17 (temporary caller-saved registers) to restore EL1 system
492 * register context.  It assumes that 'x0' is pointing to a
493 * 'el1_sys_regs' structure from where the register context will be
494 * restored
495 * ------------------------------------------------------------------
496 */
497func el1_sysregs_context_restore
498
499	ldp	x9, x10, [x0, #CTX_SPSR_EL1]
500	msr	spsr_el1, x9
501	msr	elr_el1, x10
502
503#if !ERRATA_SPECULATIVE_AT
504	ldp	x15, x16, [x0, #CTX_SCTLR_EL1]
505	msr	sctlr_el1, x15
506	msr	tcr_el1, x16
507#endif
508
509	ldp	x17, x9, [x0, #CTX_CPACR_EL1]
510	msr	cpacr_el1, x17
511	msr	csselr_el1, x9
512
513	ldp	x10, x11, [x0, #CTX_SP_EL1]
514	msr	sp_el1, x10
515	msr	esr_el1, x11
516
517	ldp	x12, x13, [x0, #CTX_TTBR0_EL1]
518	msr	ttbr0_el1, x12
519	msr	ttbr1_el1, x13
520
521	ldp	x14, x15, [x0, #CTX_MAIR_EL1]
522	msr	mair_el1, x14
523	msr	amair_el1, x15
524
525	ldp 	x16, x17, [x0, #CTX_ACTLR_EL1]
526	msr	actlr_el1, x16
527	msr	tpidr_el1, x17
528
529	ldp	x9, x10, [x0, #CTX_TPIDR_EL0]
530	msr	tpidr_el0, x9
531	msr	tpidrro_el0, x10
532
533	ldp	x13, x14, [x0, #CTX_PAR_EL1]
534	msr	par_el1, x13
535	msr	far_el1, x14
536
537	ldp	x15, x16, [x0, #CTX_AFSR0_EL1]
538	msr	afsr0_el1, x15
539	msr	afsr1_el1, x16
540
541	ldp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
542	msr	contextidr_el1, x17
543	msr	vbar_el1, x9
544
545	/* Restore AArch32 system registers if the build has instructed so */
546#if CTX_INCLUDE_AARCH32_REGS
547	ldp	x11, x12, [x0, #CTX_SPSR_ABT]
548	msr	spsr_abt, x11
549	msr	spsr_und, x12
550
551	ldp	x13, x14, [x0, #CTX_SPSR_IRQ]
552	msr	spsr_irq, x13
553	msr	spsr_fiq, x14
554
555	ldp	x15, x16, [x0, #CTX_DACR32_EL2]
556	msr	dacr32_el2, x15
557	msr	ifsr32_el2, x16
558#endif
559	/* Restore NS timer registers if the build has instructed so */
560#if NS_TIMER_SWITCH
561	ldp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
562	msr	cntp_ctl_el0, x10
563	msr	cntp_cval_el0, x11
564
565	ldp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
566	msr	cntv_ctl_el0, x12
567	msr	cntv_cval_el0, x13
568
569	ldr	x14, [x0, #CTX_CNTKCTL_EL1]
570	msr	cntkctl_el1, x14
571#endif
572	/* Restore MTE system registers if the build has instructed so */
573#if CTX_INCLUDE_MTE_REGS
574	ldp	x11, x12, [x0, #CTX_TFSRE0_EL1]
575	msr	TFSRE0_EL1, x11
576	msr	TFSR_EL1, x12
577
578	ldp	x13, x14, [x0, #CTX_RGSR_EL1]
579	msr	RGSR_EL1, x13
580	msr	GCR_EL1, x14
581#endif
582
583	/* No explict ISB required here as ERET covers it */
584	ret
585endfunc el1_sysregs_context_restore
586
587/* ------------------------------------------------------------------
588 * The following function follows the aapcs_64 strictly to use
589 * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
590 * to save floating point register context. It assumes that 'x0' is
591 * pointing to a 'fp_regs' structure where the register context will
592 * be saved.
593 *
594 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
595 * However currently we don't use VFP registers nor set traps in
596 * Trusted Firmware, and assume it's cleared.
597 *
598 * TODO: Revisit when VFP is used in secure world
599 * ------------------------------------------------------------------
600 */
601#if CTX_INCLUDE_FPREGS
602func fpregs_context_save
603	stp	q0, q1, [x0, #CTX_FP_Q0]
604	stp	q2, q3, [x0, #CTX_FP_Q2]
605	stp	q4, q5, [x0, #CTX_FP_Q4]
606	stp	q6, q7, [x0, #CTX_FP_Q6]
607	stp	q8, q9, [x0, #CTX_FP_Q8]
608	stp	q10, q11, [x0, #CTX_FP_Q10]
609	stp	q12, q13, [x0, #CTX_FP_Q12]
610	stp	q14, q15, [x0, #CTX_FP_Q14]
611	stp	q16, q17, [x0, #CTX_FP_Q16]
612	stp	q18, q19, [x0, #CTX_FP_Q18]
613	stp	q20, q21, [x0, #CTX_FP_Q20]
614	stp	q22, q23, [x0, #CTX_FP_Q22]
615	stp	q24, q25, [x0, #CTX_FP_Q24]
616	stp	q26, q27, [x0, #CTX_FP_Q26]
617	stp	q28, q29, [x0, #CTX_FP_Q28]
618	stp	q30, q31, [x0, #CTX_FP_Q30]
619
620	mrs	x9, fpsr
621	str	x9, [x0, #CTX_FP_FPSR]
622
623	mrs	x10, fpcr
624	str	x10, [x0, #CTX_FP_FPCR]
625
626#if CTX_INCLUDE_AARCH32_REGS
627	mrs	x11, fpexc32_el2
628	str	x11, [x0, #CTX_FP_FPEXC32_EL2]
629#endif
630	ret
631endfunc fpregs_context_save
632
633/* ------------------------------------------------------------------
634 * The following function follows the aapcs_64 strictly to use x9-x17
635 * (temporary caller-saved registers according to AArch64 PCS) to
636 * restore floating point register context. It assumes that 'x0' is
637 * pointing to a 'fp_regs' structure from where the register context
638 * will be restored.
639 *
640 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
641 * However currently we don't use VFP registers nor set traps in
642 * Trusted Firmware, and assume it's cleared.
643 *
644 * TODO: Revisit when VFP is used in secure world
645 * ------------------------------------------------------------------
646 */
647func fpregs_context_restore
648	ldp	q0, q1, [x0, #CTX_FP_Q0]
649	ldp	q2, q3, [x0, #CTX_FP_Q2]
650	ldp	q4, q5, [x0, #CTX_FP_Q4]
651	ldp	q6, q7, [x0, #CTX_FP_Q6]
652	ldp	q8, q9, [x0, #CTX_FP_Q8]
653	ldp	q10, q11, [x0, #CTX_FP_Q10]
654	ldp	q12, q13, [x0, #CTX_FP_Q12]
655	ldp	q14, q15, [x0, #CTX_FP_Q14]
656	ldp	q16, q17, [x0, #CTX_FP_Q16]
657	ldp	q18, q19, [x0, #CTX_FP_Q18]
658	ldp	q20, q21, [x0, #CTX_FP_Q20]
659	ldp	q22, q23, [x0, #CTX_FP_Q22]
660	ldp	q24, q25, [x0, #CTX_FP_Q24]
661	ldp	q26, q27, [x0, #CTX_FP_Q26]
662	ldp	q28, q29, [x0, #CTX_FP_Q28]
663	ldp	q30, q31, [x0, #CTX_FP_Q30]
664
665	ldr	x9, [x0, #CTX_FP_FPSR]
666	msr	fpsr, x9
667
668	ldr	x10, [x0, #CTX_FP_FPCR]
669	msr	fpcr, x10
670
671#if CTX_INCLUDE_AARCH32_REGS
672	ldr	x11, [x0, #CTX_FP_FPEXC32_EL2]
673	msr	fpexc32_el2, x11
674#endif
675	/*
676	 * No explict ISB required here as ERET to
677	 * switch to secure EL1 or non-secure world
678	 * covers it
679	 */
680
681	ret
682endfunc fpregs_context_restore
683#endif /* CTX_INCLUDE_FPREGS */
684
685	/*
686	 * Set the PSTATE bits not set when the exception was taken as
687	 * described in the AArch64.TakeException() pseudocode function
688	 * in ARM DDI 0487F.c page J1-7635 to a default value.
689	 */
690	.macro set_unset_pstate_bits
691        /*
692         * If Data Independent Timing (DIT) functionality is implemented,
693         * always enable DIT in EL3
694         */
695#if ENABLE_FEAT_DIT
696        mov     x8, #DIT_BIT
697        msr     DIT, x8
698#endif /* ENABLE_FEAT_DIT */
699	.endm /* set_unset_pstate_bits */
700
701/* ------------------------------------------------------------------
702 * The following macro is used to save and restore all the general
703 * purpose and ARMv8.3-PAuth (if enabled) registers.
704 * It also checks if Secure Cycle Counter is not disabled in MDCR_EL3
705 * when ARMv8.5-PMU is implemented, and if called from Non-secure
706 * state saves PMCR_EL0 and disables Cycle Counter.
707 *
708 * Ideally we would only save and restore the callee saved registers
709 * when a world switch occurs but that type of implementation is more
710 * complex. So currently we will always save and restore these
711 * registers on entry and exit of EL3.
712 * clobbers: x18
713 * ------------------------------------------------------------------
714 */
715	.macro save_gp_pmcr_pauth_regs
716	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
717	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
718	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
719	stp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
720	stp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
721	stp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
722	stp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
723	stp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
724	stp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
725	stp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
726	stp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
727	stp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
728	stp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
729	stp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
730	stp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
731	mrs	x18, sp_el0
732	str	x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
733
734	/* ----------------------------------------------------------
735	 * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1
736	 * failed, meaning that FEAT_PMUv3p5/7 is not implemented and
737	 * PMCR_EL0 should be saved in non-secure context.
738	 * ----------------------------------------------------------
739	 */
740	mov_imm	x10, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
741	mrs	x9, mdcr_el3
742	tst	x9, x10
743	bne	1f
744
745	/* Secure Cycle Counter is not disabled */
746	mrs	x9, pmcr_el0
747
748	/* Check caller's security state */
749	mrs	x10, scr_el3
750	tst	x10, #SCR_NS_BIT
751	beq	2f
752
753	/* Save PMCR_EL0 if called from Non-secure state */
754	str	x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
755
756	/* Disable cycle counter when event counting is prohibited */
7572:	orr	x9, x9, #PMCR_EL0_DP_BIT
758	msr	pmcr_el0, x9
759	isb
7601:
761#if CTX_INCLUDE_PAUTH_REGS
762	/* ----------------------------------------------------------
763 	 * Save the ARMv8.3-PAuth keys as they are not banked
764 	 * by exception level
765	 * ----------------------------------------------------------
766	 */
767	add	x19, sp, #CTX_PAUTH_REGS_OFFSET
768
769	mrs	x20, APIAKeyLo_EL1	/* x21:x20 = APIAKey */
770	mrs	x21, APIAKeyHi_EL1
771	mrs	x22, APIBKeyLo_EL1	/* x23:x22 = APIBKey */
772	mrs	x23, APIBKeyHi_EL1
773	mrs	x24, APDAKeyLo_EL1	/* x25:x24 = APDAKey */
774	mrs	x25, APDAKeyHi_EL1
775	mrs	x26, APDBKeyLo_EL1	/* x27:x26 = APDBKey */
776	mrs	x27, APDBKeyHi_EL1
777	mrs	x28, APGAKeyLo_EL1	/* x29:x28 = APGAKey */
778	mrs	x29, APGAKeyHi_EL1
779
780	stp	x20, x21, [x19, #CTX_PACIAKEY_LO]
781	stp	x22, x23, [x19, #CTX_PACIBKEY_LO]
782	stp	x24, x25, [x19, #CTX_PACDAKEY_LO]
783	stp	x26, x27, [x19, #CTX_PACDBKEY_LO]
784	stp	x28, x29, [x19, #CTX_PACGAKEY_LO]
785#endif /* CTX_INCLUDE_PAUTH_REGS */
786	.endm /* save_gp_pmcr_pauth_regs */
787
788/* -----------------------------------------------------------------
789 * This function saves the context and sets the PSTATE to a known
790 * state, preparing entry to el3.
791 * Save all the general purpose and ARMv8.3-PAuth (if enabled)
792 * registers.
793 * Then set any of the PSTATE bits that are not set by hardware
794 * according to the Aarch64.TakeException pseudocode in the Arm
795 * Architecture Reference Manual to a default value for EL3.
796 * clobbers: x17
797 * -----------------------------------------------------------------
798 */
799func prepare_el3_entry
800	save_gp_pmcr_pauth_regs
801	/*
802	 * Set the PSTATE bits not described in the Aarch64.TakeException
803	 * pseudocode to their default values.
804	 */
805	set_unset_pstate_bits
806	ret
807endfunc prepare_el3_entry
808
809/* ------------------------------------------------------------------
810 * This function restores ARMv8.3-PAuth (if enabled) and all general
811 * purpose registers except x30 from the CPU context.
812 * x30 register must be explicitly restored by the caller.
813 * ------------------------------------------------------------------
814 */
815func restore_gp_pmcr_pauth_regs
816#if CTX_INCLUDE_PAUTH_REGS
817 	/* Restore the ARMv8.3 PAuth keys */
818	add	x10, sp, #CTX_PAUTH_REGS_OFFSET
819
820	ldp	x0, x1, [x10, #CTX_PACIAKEY_LO]	/* x1:x0 = APIAKey */
821	ldp	x2, x3, [x10, #CTX_PACIBKEY_LO]	/* x3:x2 = APIBKey */
822	ldp	x4, x5, [x10, #CTX_PACDAKEY_LO]	/* x5:x4 = APDAKey */
823	ldp	x6, x7, [x10, #CTX_PACDBKEY_LO]	/* x7:x6 = APDBKey */
824	ldp	x8, x9, [x10, #CTX_PACGAKEY_LO]	/* x9:x8 = APGAKey */
825
826	msr	APIAKeyLo_EL1, x0
827	msr	APIAKeyHi_EL1, x1
828	msr	APIBKeyLo_EL1, x2
829	msr	APIBKeyHi_EL1, x3
830	msr	APDAKeyLo_EL1, x4
831	msr	APDAKeyHi_EL1, x5
832	msr	APDBKeyLo_EL1, x6
833	msr	APDBKeyHi_EL1, x7
834	msr	APGAKeyLo_EL1, x8
835	msr	APGAKeyHi_EL1, x9
836#endif /* CTX_INCLUDE_PAUTH_REGS */
837
838	/* ----------------------------------------------------------
839	 * Restore PMCR_EL0 when returning to Non-secure state if
840	 * Secure Cycle Counter is not disabled in MDCR_EL3 when
841	 * ARMv8.5-PMU is implemented.
842	 * ----------------------------------------------------------
843	 */
844	mrs	x0, scr_el3
845	tst	x0, #SCR_NS_BIT
846	beq	2f
847
848	/* ----------------------------------------------------------
849	 * Back to Non-secure state.
850	 * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1
851	 * failed, meaning that FEAT_PMUv3p5/7 is not implemented and
852	 * PMCR_EL0 should be restored from non-secure context.
853	 * ----------------------------------------------------------
854	 */
855	mov_imm	x1, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
856	mrs	x0, mdcr_el3
857	tst	x0, x1
858	bne	2f
859	ldr	x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
860	msr	pmcr_el0, x0
8612:
862	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
863	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
864	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
865	ldp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
866	ldp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
867	ldp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
868	ldp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
869	ldp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
870	ldp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
871	ldp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
872	ldp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
873	ldp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
874	ldp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
875	ldp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
876	ldr	x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
877	msr	sp_el0, x28
878	ldp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
879	ret
880endfunc restore_gp_pmcr_pauth_regs
881
882/*
883 * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
884 * registers and update EL1 registers to disable stage1 and stage2
885 * page table walk
886 */
887func save_and_update_ptw_el1_sys_regs
888	/* ----------------------------------------------------------
889	 * Save only sctlr_el1 and tcr_el1 registers
890	 * ----------------------------------------------------------
891	 */
892	mrs	x29, sctlr_el1
893	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)]
894	mrs	x29, tcr_el1
895	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)]
896
897	/* ------------------------------------------------------------
898	 * Must follow below order in order to disable page table
899	 * walk for lower ELs (EL1 and EL0). First step ensures that
900	 * page table walk is disabled for stage1 and second step
901	 * ensures that page table walker should use TCR_EL1.EPDx
902	 * bits to perform address translation. ISB ensures that CPU
903	 * does these 2 steps in order.
904	 *
905	 * 1. Update TCR_EL1.EPDx bits to disable page table walk by
906	 *    stage1.
907	 * 2. Enable MMU bit to avoid identity mapping via stage2
908	 *    and force TCR_EL1.EPDx to be used by the page table
909	 *    walker.
910	 * ------------------------------------------------------------
911	 */
912	orr	x29, x29, #(TCR_EPD0_BIT)
913	orr	x29, x29, #(TCR_EPD1_BIT)
914	msr	tcr_el1, x29
915	isb
916	mrs	x29, sctlr_el1
917	orr	x29, x29, #SCTLR_M_BIT
918	msr	sctlr_el1, x29
919	isb
920
921	ret
922endfunc save_and_update_ptw_el1_sys_regs
923
924/* ------------------------------------------------------------------
925 * This routine assumes that the SP_EL3 is pointing to a valid
926 * context structure from where the gp regs and other special
927 * registers can be retrieved.
928 * ------------------------------------------------------------------
929 */
930func el3_exit
931#if ENABLE_ASSERTIONS
932	/* el3_exit assumes SP_EL0 on entry */
933	mrs	x17, spsel
934	cmp	x17, #MODE_SP_EL0
935	ASM_ASSERT(eq)
936#endif
937
938	/* ----------------------------------------------------------
939	 * Save the current SP_EL0 i.e. the EL3 runtime stack which
940	 * will be used for handling the next SMC.
941	 * Then switch to SP_EL3.
942	 * ----------------------------------------------------------
943	 */
944	mov	x17, sp
945	msr	spsel, #MODE_SP_ELX
946	str	x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
947
948	/* ----------------------------------------------------------
949	 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
950	 * ----------------------------------------------------------
951	 */
952	ldr	x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
953	ldp	x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
954	msr	scr_el3, x18
955	msr	spsr_el3, x16
956	msr	elr_el3, x17
957
958#if IMAGE_BL31
959	/* ----------------------------------------------------------
960	 * Restore CPTR_EL3.
961	 * ZCR is only restored if SVE is supported and enabled.
962	 * Synchronization is required before zcr_el3 is addressed.
963	 * ----------------------------------------------------------
964	 */
965	ldp	x19, x20, [sp, #CTX_EL3STATE_OFFSET + CTX_CPTR_EL3]
966	msr	cptr_el3, x19
967
968	ands	x19, x19, #CPTR_EZ_BIT
969	beq	sve_not_enabled
970
971	isb
972	msr	S3_6_C1_C2_0, x20 /* zcr_el3 */
973sve_not_enabled:
974#endif
975
976#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
977	/* ----------------------------------------------------------
978	 * Restore mitigation state as it was on entry to EL3
979	 * ----------------------------------------------------------
980	 */
981	ldr	x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
982	cbz	x17, 1f
983	blr	x17
9841:
985#endif
986	restore_ptw_el1_sys_regs
987
988	/* ----------------------------------------------------------
989	 * Restore general purpose (including x30), PMCR_EL0 and
990	 * ARMv8.3-PAuth registers.
991	 * Exit EL3 via ERET to a lower exception level.
992 	 * ----------------------------------------------------------
993 	 */
994	bl	restore_gp_pmcr_pauth_regs
995	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
996
997#if IMAGE_BL31 && RAS_EXTENSION
998	/* ----------------------------------------------------------
999	 * Issue Error Synchronization Barrier to synchronize SErrors
1000	 * before exiting EL3. We're running with EAs unmasked, so
1001	 * any synchronized errors would be taken immediately;
1002	 * therefore no need to inspect DISR_EL1 register.
1003 	 * ----------------------------------------------------------
1004	 */
1005	esb
1006#else
1007	dsb	sy
1008#endif
1009#ifdef IMAGE_BL31
1010	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
1011#endif
1012	exception_return
1013
1014endfunc el3_exit
1015