xref: /rk3399_ARM-atf/lib/el3_runtime/aarch64/context.S (revision 6401776747a70023e5b49c5a9a528292e97fce0e)
1/*
2 * Copyright (c) 2013-2021, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <assert_macros.S>
10#include <context.h>
11#include <el3_common_macros.S>
12
13#if CTX_INCLUDE_EL2_REGS
14	.global	el2_sysregs_context_save
15	.global	el2_sysregs_context_restore
16#endif
17
18	.global	el1_sysregs_context_save
19	.global	el1_sysregs_context_restore
20#if CTX_INCLUDE_FPREGS
21	.global	fpregs_context_save
22	.global	fpregs_context_restore
23#endif
24	.global	save_gp_pmcr_pauth_regs
25	.global	restore_gp_pmcr_pauth_regs
26	.global save_and_update_ptw_el1_sys_regs
27	.global	el3_exit
28
29#if CTX_INCLUDE_EL2_REGS
30
31/* -----------------------------------------------------
32 * The following function strictly follows the AArch64
33 * PCS to use x9-x16 (temporary caller-saved registers)
34 * to save EL2 system register context. It assumes that
35 * 'x0' is pointing to a 'el2_sys_regs' structure where
36 * the register context will be saved.
37 *
38 * The following registers are not added.
39 * AMEVCNTVOFF0<n>_EL2
40 * AMEVCNTVOFF1<n>_EL2
41 * ICH_AP0R<n>_EL2
42 * ICH_AP1R<n>_EL2
43 * ICH_LR<n>_EL2
44 * -----------------------------------------------------
45 */
46func el2_sysregs_context_save
47	mrs	x9, actlr_el2
48	mrs	x10, afsr0_el2
49	stp	x9, x10, [x0, #CTX_ACTLR_EL2]
50
51	mrs	x11, afsr1_el2
52	mrs	x12, amair_el2
53	stp	x11, x12, [x0, #CTX_AFSR1_EL2]
54
55	mrs	x13, cnthctl_el2
56	mrs	x14, cntvoff_el2
57	stp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
58
59	mrs	x15, cptr_el2
60	str	x15, [x0, #CTX_CPTR_EL2]
61
62#if CTX_INCLUDE_AARCH32_REGS
63	mrs	x16, dbgvcr32_el2
64	str	x16, [x0, #CTX_DBGVCR32_EL2]
65#endif
66
67	mrs	x9, elr_el2
68	mrs	x10, esr_el2
69	stp	x9, x10, [x0, #CTX_ELR_EL2]
70
71	mrs	x11, far_el2
72	mrs	x12, hacr_el2
73	stp	x11, x12, [x0, #CTX_FAR_EL2]
74
75	mrs	x13, hcr_el2
76	mrs	x14, hpfar_el2
77	stp	x13, x14, [x0, #CTX_HCR_EL2]
78
79	mrs	x15, hstr_el2
80	mrs	x16, ICC_SRE_EL2
81	stp	x15, x16, [x0, #CTX_HSTR_EL2]
82
83	mrs	x9, ICH_HCR_EL2
84	mrs	x10, ICH_VMCR_EL2
85	stp	x9, x10, [x0, #CTX_ICH_HCR_EL2]
86
87	mrs	x11, mair_el2
88	mrs	x12, mdcr_el2
89	stp	x11, x12, [x0, #CTX_MAIR_EL2]
90
91#if ENABLE_SPE_FOR_LOWER_ELS
92	mrs	x13, PMSCR_EL2
93	str	x13, [x0, #CTX_PMSCR_EL2]
94#endif
95	mrs	x14, sctlr_el2
96	str	x14, [x0, #CTX_SCTLR_EL2]
97
98	mrs	x15, spsr_el2
99	mrs	x16, sp_el2
100	stp	x15, x16, [x0, #CTX_SPSR_EL2]
101
102	mrs	x9, tcr_el2
103	mrs	x10, tpidr_el2
104	stp	x9, x10, [x0, #CTX_TCR_EL2]
105
106	mrs	x11, ttbr0_el2
107	mrs	x12, vbar_el2
108	stp	x11, x12, [x0, #CTX_TTBR0_EL2]
109
110	mrs	x13, vmpidr_el2
111	mrs	x14, vpidr_el2
112	stp	x13, x14, [x0, #CTX_VMPIDR_EL2]
113
114	mrs	x15, vtcr_el2
115	mrs	x16, vttbr_el2
116	stp	x15, x16, [x0, #CTX_VTCR_EL2]
117
118#if CTX_INCLUDE_MTE_REGS
119	mrs	x9, TFSR_EL2
120	str	x9, [x0, #CTX_TFSR_EL2]
121#endif
122
123#if ENABLE_MPAM_FOR_LOWER_ELS
124	mrs	x10, MPAM2_EL2
125	str	x10, [x0, #CTX_MPAM2_EL2]
126
127	mrs	x11, MPAMHCR_EL2
128	mrs	x12, MPAMVPM0_EL2
129	stp	x11, x12, [x0, #CTX_MPAMHCR_EL2]
130
131	mrs	x13, MPAMVPM1_EL2
132	mrs	x14, MPAMVPM2_EL2
133	stp	x13, x14, [x0, #CTX_MPAMVPM1_EL2]
134
135	mrs	x15, MPAMVPM3_EL2
136	mrs	x16, MPAMVPM4_EL2
137	stp	x15, x16, [x0, #CTX_MPAMVPM3_EL2]
138
139	mrs	x9, MPAMVPM5_EL2
140	mrs	x10, MPAMVPM6_EL2
141	stp	x9, x10, [x0, #CTX_MPAMVPM5_EL2]
142
143	mrs	x11, MPAMVPM7_EL2
144	mrs	x12, MPAMVPMV_EL2
145	stp	x11, x12, [x0, #CTX_MPAMVPM7_EL2]
146#endif
147
148#if ENABLE_FEAT_FGT
149	mrs	x13, HDFGRTR_EL2
150#if ENABLE_FEAT_AMUv1
151   	mrs	x14, HAFGRTR_EL2
152   	stp	x13, x14, [x0, #CTX_HDFGRTR_EL2]
153#else
154   	str	x13, [x0, #CTX_HDFGRTR_EL2]
155#endif
156	mrs	x15, HDFGWTR_EL2
157	mrs	x16, HFGITR_EL2
158	stp	x15, x16, [x0, #CTX_HDFGWTR_EL2]
159
160	mrs	x9, HFGRTR_EL2
161	mrs	x10, HFGWTR_EL2
162	stp	x9, x10, [x0, #CTX_HFGRTR_EL2]
163#endif
164
165#if ENABLE_FEAT_ECV
166	mrs	x11, CNTPOFF_EL2
167	str	x11, [x0, #CTX_CNTPOFF_EL2]
168#endif
169
170#if ARM_ARCH_AT_LEAST(8, 4)
171	mrs	x12, contextidr_el2
172	str	x12, [x0, #CTX_CONTEXTIDR_EL2]
173
174#if CTX_INCLUDE_AARCH32_REGS
175	mrs	x13, sder32_el2
176	str	x13, [x0, #CTX_SDER32_EL2]
177#endif
178	mrs	x14, ttbr1_el2
179	mrs	x15, vdisr_el2
180	stp	x14, x15, [x0, #CTX_TTBR1_EL2]
181
182#if CTX_INCLUDE_NEVE_REGS
183	mrs	x16, vncr_el2
184	str	x16, [x0, #CTX_VNCR_EL2]
185#endif
186
187	mrs	x9, vsesr_el2
188	mrs	x10, vstcr_el2
189	stp	x9, x10, [x0, #CTX_VSESR_EL2]
190
191	mrs	x11, vsttbr_el2
192	mrs	x12, TRFCR_EL2
193	stp	x11, x12, [x0, #CTX_VSTTBR_EL2]
194#endif
195
196#if ARM_ARCH_AT_LEAST(8, 5)
197	mrs	x13, scxtnum_el2
198	str	x13, [x0, #CTX_SCXTNUM_EL2]
199#endif
200
201#if ENABLE_FEAT_HCX
202	mrs	x14, hcrx_el2
203	str	x14, [x0, #CTX_HCRX_EL2]
204#endif
205
206	ret
207endfunc el2_sysregs_context_save
208
209
210/* -----------------------------------------------------
211 * The following function strictly follows the AArch64
212 * PCS to use x9-x16 (temporary caller-saved registers)
213 * to restore EL2 system register context.  It assumes
214 * that 'x0' is pointing to a 'el2_sys_regs' structure
215 * from where the register context will be restored
216
217 * The following registers are not restored
218 * AMEVCNTVOFF0<n>_EL2
219 * AMEVCNTVOFF1<n>_EL2
220 * ICH_AP0R<n>_EL2
221 * ICH_AP1R<n>_EL2
222 * ICH_LR<n>_EL2
223 * -----------------------------------------------------
224 */
225func el2_sysregs_context_restore
226	ldp	x9, x10, [x0, #CTX_ACTLR_EL2]
227	msr	actlr_el2, x9
228	msr	afsr0_el2, x10
229
230	ldp	x11, x12, [x0, #CTX_AFSR1_EL2]
231	msr	afsr1_el2, x11
232	msr	amair_el2, x12
233
234	ldp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
235	msr	cnthctl_el2, x13
236	msr	cntvoff_el2, x14
237
238	ldr	x15, [x0, #CTX_CPTR_EL2]
239	msr	cptr_el2, x15
240
241#if CTX_INCLUDE_AARCH32_REGS
242	ldr	x16, [x0, #CTX_DBGVCR32_EL2]
243	msr	dbgvcr32_el2, x16
244#endif
245
246	ldp	x9, x10, [x0, #CTX_ELR_EL2]
247	msr	elr_el2, x9
248	msr	esr_el2, x10
249
250	ldp	x11, x12, [x0, #CTX_FAR_EL2]
251	msr	far_el2, x11
252	msr	hacr_el2, x12
253
254	ldp	x13, x14, [x0, #CTX_HCR_EL2]
255	msr	hcr_el2, x13
256	msr	hpfar_el2, x14
257
258	ldp	x15, x16, [x0, #CTX_HSTR_EL2]
259	msr	hstr_el2, x15
260	msr	ICC_SRE_EL2, x16
261
262	ldp	x9, x10, [x0, #CTX_ICH_HCR_EL2]
263	msr	ICH_HCR_EL2, x9
264	msr	ICH_VMCR_EL2, x10
265
266	ldp	x11, x12, [x0, #CTX_MAIR_EL2]
267	msr	mair_el2, x11
268	msr	mdcr_el2, x12
269
270#if ENABLE_SPE_FOR_LOWER_ELS
271	ldr	x13, [x0, #CTX_PMSCR_EL2]
272	msr	PMSCR_EL2, x13
273#endif
274	ldr	x14, [x0, #CTX_SCTLR_EL2]
275	msr	sctlr_el2, x14
276
277	ldp	x15, x16, [x0, #CTX_SPSR_EL2]
278	msr	spsr_el2, x15
279	msr	sp_el2, x16
280
281	ldp	x9, x10, [x0, #CTX_TCR_EL2]
282	msr	tcr_el2, x9
283	msr	tpidr_el2, x10
284
285	ldp	x11, x12, [x0, #CTX_TTBR0_EL2]
286	msr	ttbr0_el2, x11
287	msr	vbar_el2, x12
288
289	ldp	x13, x14, [x0, #CTX_VMPIDR_EL2]
290	msr	vmpidr_el2, x13
291	msr	vpidr_el2, x14
292
293	ldp	x15, x16, [x0, #CTX_VTCR_EL2]
294	msr	vtcr_el2, x15
295	msr	vttbr_el2, x16
296
297#if CTX_INCLUDE_MTE_REGS
298	ldr	x9, [x0, #CTX_TFSR_EL2]
299	msr	TFSR_EL2, x9
300#endif
301
302#if ENABLE_MPAM_FOR_LOWER_ELS
303	ldr	x10, [x0, #CTX_MPAM2_EL2]
304	msr	MPAM2_EL2, x10
305
306	ldp	x11, x12, [x0, #CTX_MPAMHCR_EL2]
307	msr	MPAMHCR_EL2, x11
308	msr	MPAMVPM0_EL2, x12
309
310	ldp	x13, x14, [x0, #CTX_MPAMVPM1_EL2]
311	msr	MPAMVPM1_EL2, x13
312	msr	MPAMVPM2_EL2, x14
313
314	ldp	x15, x16, [x0, #CTX_MPAMVPM3_EL2]
315	msr	MPAMVPM3_EL2, x15
316	msr	MPAMVPM4_EL2, x16
317
318	ldp	x9, x10, [x0, #CTX_MPAMVPM5_EL2]
319	msr	MPAMVPM5_EL2, x9
320	msr	MPAMVPM6_EL2, x10
321
322	ldp	x11, x12, [x0, #CTX_MPAMVPM7_EL2]
323	msr	MPAMVPM7_EL2, x11
324	msr	MPAMVPMV_EL2, x12
325#endif
326
327#if ENABLE_FEAT_FGT
328#if ENABLE_FEAT_AMUv1
329	ldp	x13, x14, [x0, #CTX_HDFGRTR_EL2]
330	msr	HAFGRTR_EL2, x14
331#else
332	ldr	x13, [x0, #CTX_HDFGRTR_EL2]
333#endif
334	msr	HDFGRTR_EL2, x13
335
336	ldp	x15, x16, [x0, #CTX_HDFGWTR_EL2]
337	msr	HDFGWTR_EL2, x15
338	msr	HFGITR_EL2, x16
339
340	ldp	x9, x10, [x0, #CTX_HFGRTR_EL2]
341	msr	HFGRTR_EL2, x9
342	msr	HFGWTR_EL2, x10
343#endif
344
345#if ENABLE_FEAT_ECV
346	ldr	x11, [x0, #CTX_CNTPOFF_EL2]
347	msr	CNTPOFF_EL2, x11
348#endif
349
350#if ARM_ARCH_AT_LEAST(8, 4)
351	ldr	x12, [x0, #CTX_CONTEXTIDR_EL2]
352	msr	contextidr_el2, x12
353
354#if CTX_INCLUDE_AARCH32_REGS
355	ldr	x13, [x0, #CTX_SDER32_EL2]
356	msr	sder32_el2, x13
357#endif
358	ldp	x14, x15, [x0, #CTX_TTBR1_EL2]
359	msr	ttbr1_el2, x14
360	msr	vdisr_el2, x15
361
362#if CTX_INCLUDE_NEVE_REGS
363	ldr	x16, [x0, #CTX_VNCR_EL2]
364	msr	vncr_el2, x16
365#endif
366
367	ldp	x9, x10, [x0, #CTX_VSESR_EL2]
368	msr	vsesr_el2, x9
369	msr	vstcr_el2, x10
370
371	ldp	x11, x12, [x0, #CTX_VSTTBR_EL2]
372	msr	vsttbr_el2, x11
373	msr	TRFCR_EL2, x12
374#endif
375
376#if ARM_ARCH_AT_LEAST(8, 5)
377	ldr	x13, [x0, #CTX_SCXTNUM_EL2]
378	msr	scxtnum_el2, x13
379#endif
380
381#if ENABLE_FEAT_HCX
382	ldr	x14, [x0, #CTX_HCRX_EL2]
383	msr	hcrx_el2, x14
384#endif
385
386	ret
387endfunc el2_sysregs_context_restore
388
389#endif /* CTX_INCLUDE_EL2_REGS */
390
391/* ------------------------------------------------------------------
392 * The following function strictly follows the AArch64 PCS to use
393 * x9-x17 (temporary caller-saved registers) to save EL1 system
394 * register context. It assumes that 'x0' is pointing to a
395 * 'el1_sys_regs' structure where the register context will be saved.
396 * ------------------------------------------------------------------
397 */
398func el1_sysregs_context_save
399
400	mrs	x9, spsr_el1
401	mrs	x10, elr_el1
402	stp	x9, x10, [x0, #CTX_SPSR_EL1]
403
404#if !ERRATA_SPECULATIVE_AT
405	mrs	x15, sctlr_el1
406	mrs	x16, tcr_el1
407	stp	x15, x16, [x0, #CTX_SCTLR_EL1]
408#endif
409
410	mrs	x17, cpacr_el1
411	mrs	x9, csselr_el1
412	stp	x17, x9, [x0, #CTX_CPACR_EL1]
413
414	mrs	x10, sp_el1
415	mrs	x11, esr_el1
416	stp	x10, x11, [x0, #CTX_SP_EL1]
417
418	mrs	x12, ttbr0_el1
419	mrs	x13, ttbr1_el1
420	stp	x12, x13, [x0, #CTX_TTBR0_EL1]
421
422	mrs	x14, mair_el1
423	mrs	x15, amair_el1
424	stp	x14, x15, [x0, #CTX_MAIR_EL1]
425
426	mrs	x16, actlr_el1
427	mrs	x17, tpidr_el1
428	stp	x16, x17, [x0, #CTX_ACTLR_EL1]
429
430	mrs	x9, tpidr_el0
431	mrs	x10, tpidrro_el0
432	stp	x9, x10, [x0, #CTX_TPIDR_EL0]
433
434	mrs	x13, par_el1
435	mrs	x14, far_el1
436	stp	x13, x14, [x0, #CTX_PAR_EL1]
437
438	mrs	x15, afsr0_el1
439	mrs	x16, afsr1_el1
440	stp	x15, x16, [x0, #CTX_AFSR0_EL1]
441
442	mrs	x17, contextidr_el1
443	mrs	x9, vbar_el1
444	stp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
445
446	/* Save AArch32 system registers if the build has instructed so */
447#if CTX_INCLUDE_AARCH32_REGS
448	mrs	x11, spsr_abt
449	mrs	x12, spsr_und
450	stp	x11, x12, [x0, #CTX_SPSR_ABT]
451
452	mrs	x13, spsr_irq
453	mrs	x14, spsr_fiq
454	stp	x13, x14, [x0, #CTX_SPSR_IRQ]
455
456	mrs	x15, dacr32_el2
457	mrs	x16, ifsr32_el2
458	stp	x15, x16, [x0, #CTX_DACR32_EL2]
459#endif
460
461	/* Save NS timer registers if the build has instructed so */
462#if NS_TIMER_SWITCH
463	mrs	x10, cntp_ctl_el0
464	mrs	x11, cntp_cval_el0
465	stp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
466
467	mrs	x12, cntv_ctl_el0
468	mrs	x13, cntv_cval_el0
469	stp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
470
471	mrs	x14, cntkctl_el1
472	str	x14, [x0, #CTX_CNTKCTL_EL1]
473#endif
474
475	/* Save MTE system registers if the build has instructed so */
476#if CTX_INCLUDE_MTE_REGS
477	mrs	x15, TFSRE0_EL1
478	mrs	x16, TFSR_EL1
479	stp	x15, x16, [x0, #CTX_TFSRE0_EL1]
480
481	mrs	x9, RGSR_EL1
482	mrs	x10, GCR_EL1
483	stp	x9, x10, [x0, #CTX_RGSR_EL1]
484#endif
485
486	ret
487endfunc el1_sysregs_context_save
488
489/* ------------------------------------------------------------------
490 * The following function strictly follows the AArch64 PCS to use
491 * x9-x17 (temporary caller-saved registers) to restore EL1 system
492 * register context.  It assumes that 'x0' is pointing to a
493 * 'el1_sys_regs' structure from where the register context will be
494 * restored
495 * ------------------------------------------------------------------
496 */
497func el1_sysregs_context_restore
498
499	ldp	x9, x10, [x0, #CTX_SPSR_EL1]
500	msr	spsr_el1, x9
501	msr	elr_el1, x10
502
503#if !ERRATA_SPECULATIVE_AT
504	ldp	x15, x16, [x0, #CTX_SCTLR_EL1]
505	msr	sctlr_el1, x15
506	msr	tcr_el1, x16
507#endif
508
509	ldp	x17, x9, [x0, #CTX_CPACR_EL1]
510	msr	cpacr_el1, x17
511	msr	csselr_el1, x9
512
513	ldp	x10, x11, [x0, #CTX_SP_EL1]
514	msr	sp_el1, x10
515	msr	esr_el1, x11
516
517	ldp	x12, x13, [x0, #CTX_TTBR0_EL1]
518	msr	ttbr0_el1, x12
519	msr	ttbr1_el1, x13
520
521	ldp	x14, x15, [x0, #CTX_MAIR_EL1]
522	msr	mair_el1, x14
523	msr	amair_el1, x15
524
525	ldp 	x16, x17, [x0, #CTX_ACTLR_EL1]
526	msr	actlr_el1, x16
527	msr	tpidr_el1, x17
528
529	ldp	x9, x10, [x0, #CTX_TPIDR_EL0]
530	msr	tpidr_el0, x9
531	msr	tpidrro_el0, x10
532
533	ldp	x13, x14, [x0, #CTX_PAR_EL1]
534	msr	par_el1, x13
535	msr	far_el1, x14
536
537	ldp	x15, x16, [x0, #CTX_AFSR0_EL1]
538	msr	afsr0_el1, x15
539	msr	afsr1_el1, x16
540
541	ldp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
542	msr	contextidr_el1, x17
543	msr	vbar_el1, x9
544
545	/* Restore AArch32 system registers if the build has instructed so */
546#if CTX_INCLUDE_AARCH32_REGS
547	ldp	x11, x12, [x0, #CTX_SPSR_ABT]
548	msr	spsr_abt, x11
549	msr	spsr_und, x12
550
551	ldp	x13, x14, [x0, #CTX_SPSR_IRQ]
552	msr	spsr_irq, x13
553	msr	spsr_fiq, x14
554
555	ldp	x15, x16, [x0, #CTX_DACR32_EL2]
556	msr	dacr32_el2, x15
557	msr	ifsr32_el2, x16
558#endif
559	/* Restore NS timer registers if the build has instructed so */
560#if NS_TIMER_SWITCH
561	ldp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
562	msr	cntp_ctl_el0, x10
563	msr	cntp_cval_el0, x11
564
565	ldp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
566	msr	cntv_ctl_el0, x12
567	msr	cntv_cval_el0, x13
568
569	ldr	x14, [x0, #CTX_CNTKCTL_EL1]
570	msr	cntkctl_el1, x14
571#endif
572	/* Restore MTE system registers if the build has instructed so */
573#if CTX_INCLUDE_MTE_REGS
574	ldp	x11, x12, [x0, #CTX_TFSRE0_EL1]
575	msr	TFSRE0_EL1, x11
576	msr	TFSR_EL1, x12
577
578	ldp	x13, x14, [x0, #CTX_RGSR_EL1]
579	msr	RGSR_EL1, x13
580	msr	GCR_EL1, x14
581#endif
582
583	/* No explict ISB required here as ERET covers it */
584	ret
585endfunc el1_sysregs_context_restore
586
587/* ------------------------------------------------------------------
588 * The following function follows the aapcs_64 strictly to use
589 * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
590 * to save floating point register context. It assumes that 'x0' is
591 * pointing to a 'fp_regs' structure where the register context will
592 * be saved.
593 *
594 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
595 * However currently we don't use VFP registers nor set traps in
596 * Trusted Firmware, and assume it's cleared.
597 *
598 * TODO: Revisit when VFP is used in secure world
599 * ------------------------------------------------------------------
600 */
601#if CTX_INCLUDE_FPREGS
602func fpregs_context_save
603	stp	q0, q1, [x0, #CTX_FP_Q0]
604	stp	q2, q3, [x0, #CTX_FP_Q2]
605	stp	q4, q5, [x0, #CTX_FP_Q4]
606	stp	q6, q7, [x0, #CTX_FP_Q6]
607	stp	q8, q9, [x0, #CTX_FP_Q8]
608	stp	q10, q11, [x0, #CTX_FP_Q10]
609	stp	q12, q13, [x0, #CTX_FP_Q12]
610	stp	q14, q15, [x0, #CTX_FP_Q14]
611	stp	q16, q17, [x0, #CTX_FP_Q16]
612	stp	q18, q19, [x0, #CTX_FP_Q18]
613	stp	q20, q21, [x0, #CTX_FP_Q20]
614	stp	q22, q23, [x0, #CTX_FP_Q22]
615	stp	q24, q25, [x0, #CTX_FP_Q24]
616	stp	q26, q27, [x0, #CTX_FP_Q26]
617	stp	q28, q29, [x0, #CTX_FP_Q28]
618	stp	q30, q31, [x0, #CTX_FP_Q30]
619
620	mrs	x9, fpsr
621	str	x9, [x0, #CTX_FP_FPSR]
622
623	mrs	x10, fpcr
624	str	x10, [x0, #CTX_FP_FPCR]
625
626#if CTX_INCLUDE_AARCH32_REGS
627	mrs	x11, fpexc32_el2
628	str	x11, [x0, #CTX_FP_FPEXC32_EL2]
629#endif
630	ret
631endfunc fpregs_context_save
632
633/* ------------------------------------------------------------------
634 * The following function follows the aapcs_64 strictly to use x9-x17
635 * (temporary caller-saved registers according to AArch64 PCS) to
636 * restore floating point register context. It assumes that 'x0' is
637 * pointing to a 'fp_regs' structure from where the register context
638 * will be restored.
639 *
640 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
641 * However currently we don't use VFP registers nor set traps in
642 * Trusted Firmware, and assume it's cleared.
643 *
644 * TODO: Revisit when VFP is used in secure world
645 * ------------------------------------------------------------------
646 */
647func fpregs_context_restore
648	ldp	q0, q1, [x0, #CTX_FP_Q0]
649	ldp	q2, q3, [x0, #CTX_FP_Q2]
650	ldp	q4, q5, [x0, #CTX_FP_Q4]
651	ldp	q6, q7, [x0, #CTX_FP_Q6]
652	ldp	q8, q9, [x0, #CTX_FP_Q8]
653	ldp	q10, q11, [x0, #CTX_FP_Q10]
654	ldp	q12, q13, [x0, #CTX_FP_Q12]
655	ldp	q14, q15, [x0, #CTX_FP_Q14]
656	ldp	q16, q17, [x0, #CTX_FP_Q16]
657	ldp	q18, q19, [x0, #CTX_FP_Q18]
658	ldp	q20, q21, [x0, #CTX_FP_Q20]
659	ldp	q22, q23, [x0, #CTX_FP_Q22]
660	ldp	q24, q25, [x0, #CTX_FP_Q24]
661	ldp	q26, q27, [x0, #CTX_FP_Q26]
662	ldp	q28, q29, [x0, #CTX_FP_Q28]
663	ldp	q30, q31, [x0, #CTX_FP_Q30]
664
665	ldr	x9, [x0, #CTX_FP_FPSR]
666	msr	fpsr, x9
667
668	ldr	x10, [x0, #CTX_FP_FPCR]
669	msr	fpcr, x10
670
671#if CTX_INCLUDE_AARCH32_REGS
672	ldr	x11, [x0, #CTX_FP_FPEXC32_EL2]
673	msr	fpexc32_el2, x11
674#endif
675	/*
676	 * No explict ISB required here as ERET to
677	 * switch to secure EL1 or non-secure world
678	 * covers it
679	 */
680
681	ret
682endfunc fpregs_context_restore
683#endif /* CTX_INCLUDE_FPREGS */
684
685/* ------------------------------------------------------------------
686 * The following function is used to save and restore all the general
687 * purpose and ARMv8.3-PAuth (if enabled) registers.
688 * It also checks if Secure Cycle Counter is not disabled in MDCR_EL3
689 * when ARMv8.5-PMU is implemented, and if called from Non-secure
690 * state saves PMCR_EL0 and disables Cycle Counter.
691 *
692 * Ideally we would only save and restore the callee saved registers
693 * when a world switch occurs but that type of implementation is more
694 * complex. So currently we will always save and restore these
695 * registers on entry and exit of EL3.
696 * These are not macros to ensure their invocation fits within the 32
697 * instructions per exception vector.
698 * clobbers: x18
699 * ------------------------------------------------------------------
700 */
701func save_gp_pmcr_pauth_regs
702	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
703	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
704	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
705	stp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
706	stp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
707	stp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
708	stp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
709	stp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
710	stp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
711	stp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
712	stp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
713	stp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
714	stp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
715	stp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
716	stp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
717	mrs	x18, sp_el0
718	str	x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
719
720	/* ----------------------------------------------------------
721	 * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1
722	 * failed, meaning that FEAT_PMUv3p5/7 is not implemented and
723	 * PMCR_EL0 should be saved in non-secure context.
724	 * ----------------------------------------------------------
725	 */
726	mov_imm	x10, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
727	mrs	x9, mdcr_el3
728	tst	x9, x10
729	bne	1f
730
731	/* Secure Cycle Counter is not disabled */
732	mrs	x9, pmcr_el0
733
734	/* Check caller's security state */
735	mrs	x10, scr_el3
736	tst	x10, #SCR_NS_BIT
737	beq	2f
738
739	/* Save PMCR_EL0 if called from Non-secure state */
740	str	x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
741
742	/* Disable cycle counter when event counting is prohibited */
7432:	orr	x9, x9, #PMCR_EL0_DP_BIT
744	msr	pmcr_el0, x9
745	isb
7461:
747#if CTX_INCLUDE_PAUTH_REGS
748	/* ----------------------------------------------------------
749 	 * Save the ARMv8.3-PAuth keys as they are not banked
750 	 * by exception level
751	 * ----------------------------------------------------------
752	 */
753	add	x19, sp, #CTX_PAUTH_REGS_OFFSET
754
755	mrs	x20, APIAKeyLo_EL1	/* x21:x20 = APIAKey */
756	mrs	x21, APIAKeyHi_EL1
757	mrs	x22, APIBKeyLo_EL1	/* x23:x22 = APIBKey */
758	mrs	x23, APIBKeyHi_EL1
759	mrs	x24, APDAKeyLo_EL1	/* x25:x24 = APDAKey */
760	mrs	x25, APDAKeyHi_EL1
761	mrs	x26, APDBKeyLo_EL1	/* x27:x26 = APDBKey */
762	mrs	x27, APDBKeyHi_EL1
763	mrs	x28, APGAKeyLo_EL1	/* x29:x28 = APGAKey */
764	mrs	x29, APGAKeyHi_EL1
765
766	stp	x20, x21, [x19, #CTX_PACIAKEY_LO]
767	stp	x22, x23, [x19, #CTX_PACIBKEY_LO]
768	stp	x24, x25, [x19, #CTX_PACDAKEY_LO]
769	stp	x26, x27, [x19, #CTX_PACDBKEY_LO]
770	stp	x28, x29, [x19, #CTX_PACGAKEY_LO]
771#endif /* CTX_INCLUDE_PAUTH_REGS */
772
773	ret
774endfunc save_gp_pmcr_pauth_regs
775
776/* ------------------------------------------------------------------
777 * This function restores ARMv8.3-PAuth (if enabled) and all general
778 * purpose registers except x30 from the CPU context.
779 * x30 register must be explicitly restored by the caller.
780 * ------------------------------------------------------------------
781 */
782func restore_gp_pmcr_pauth_regs
783#if CTX_INCLUDE_PAUTH_REGS
784 	/* Restore the ARMv8.3 PAuth keys */
785	add	x10, sp, #CTX_PAUTH_REGS_OFFSET
786
787	ldp	x0, x1, [x10, #CTX_PACIAKEY_LO]	/* x1:x0 = APIAKey */
788	ldp	x2, x3, [x10, #CTX_PACIBKEY_LO]	/* x3:x2 = APIBKey */
789	ldp	x4, x5, [x10, #CTX_PACDAKEY_LO]	/* x5:x4 = APDAKey */
790	ldp	x6, x7, [x10, #CTX_PACDBKEY_LO]	/* x7:x6 = APDBKey */
791	ldp	x8, x9, [x10, #CTX_PACGAKEY_LO]	/* x9:x8 = APGAKey */
792
793	msr	APIAKeyLo_EL1, x0
794	msr	APIAKeyHi_EL1, x1
795	msr	APIBKeyLo_EL1, x2
796	msr	APIBKeyHi_EL1, x3
797	msr	APDAKeyLo_EL1, x4
798	msr	APDAKeyHi_EL1, x5
799	msr	APDBKeyLo_EL1, x6
800	msr	APDBKeyHi_EL1, x7
801	msr	APGAKeyLo_EL1, x8
802	msr	APGAKeyHi_EL1, x9
803#endif /* CTX_INCLUDE_PAUTH_REGS */
804
805	/* ----------------------------------------------------------
806	 * Restore PMCR_EL0 when returning to Non-secure state if
807	 * Secure Cycle Counter is not disabled in MDCR_EL3 when
808	 * ARMv8.5-PMU is implemented.
809	 * ----------------------------------------------------------
810	 */
811	mrs	x0, scr_el3
812	tst	x0, #SCR_NS_BIT
813	beq	2f
814
815	/* ----------------------------------------------------------
816	 * Back to Non-secure state.
817	 * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1
818	 * failed, meaning that FEAT_PMUv3p5/7 is not implemented and
819	 * PMCR_EL0 should be restored from non-secure context.
820	 * ----------------------------------------------------------
821	 */
822	mov_imm	x1, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
823	mrs	x0, mdcr_el3
824	tst	x0, x1
825	bne	2f
826	ldr	x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
827	msr	pmcr_el0, x0
8282:
829	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
830	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
831	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
832	ldp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
833	ldp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
834	ldp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
835	ldp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
836	ldp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
837	ldp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
838	ldp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
839	ldp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
840	ldp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
841	ldp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
842	ldp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
843	ldr	x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
844	msr	sp_el0, x28
845	ldp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
846	ret
847endfunc restore_gp_pmcr_pauth_regs
848
849/*
850 * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
851 * registers and update EL1 registers to disable stage1 and stage2
852 * page table walk
853 */
854func save_and_update_ptw_el1_sys_regs
855	/* ----------------------------------------------------------
856	 * Save only sctlr_el1 and tcr_el1 registers
857	 * ----------------------------------------------------------
858	 */
859	mrs	x29, sctlr_el1
860	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)]
861	mrs	x29, tcr_el1
862	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)]
863
864	/* ------------------------------------------------------------
865	 * Must follow below order in order to disable page table
866	 * walk for lower ELs (EL1 and EL0). First step ensures that
867	 * page table walk is disabled for stage1 and second step
868	 * ensures that page table walker should use TCR_EL1.EPDx
869	 * bits to perform address translation. ISB ensures that CPU
870	 * does these 2 steps in order.
871	 *
872	 * 1. Update TCR_EL1.EPDx bits to disable page table walk by
873	 *    stage1.
874	 * 2. Enable MMU bit to avoid identity mapping via stage2
875	 *    and force TCR_EL1.EPDx to be used by the page table
876	 *    walker.
877	 * ------------------------------------------------------------
878	 */
879	orr	x29, x29, #(TCR_EPD0_BIT)
880	orr	x29, x29, #(TCR_EPD1_BIT)
881	msr	tcr_el1, x29
882	isb
883	mrs	x29, sctlr_el1
884	orr	x29, x29, #SCTLR_M_BIT
885	msr	sctlr_el1, x29
886	isb
887
888	ret
889endfunc save_and_update_ptw_el1_sys_regs
890
891/* ------------------------------------------------------------------
892 * This routine assumes that the SP_EL3 is pointing to a valid
893 * context structure from where the gp regs and other special
894 * registers can be retrieved.
895 * ------------------------------------------------------------------
896 */
897func el3_exit
898#if ENABLE_ASSERTIONS
899	/* el3_exit assumes SP_EL0 on entry */
900	mrs	x17, spsel
901	cmp	x17, #MODE_SP_EL0
902	ASM_ASSERT(eq)
903#endif
904
905	/* ----------------------------------------------------------
906	 * Save the current SP_EL0 i.e. the EL3 runtime stack which
907	 * will be used for handling the next SMC.
908	 * Then switch to SP_EL3.
909	 * ----------------------------------------------------------
910	 */
911	mov	x17, sp
912	msr	spsel, #MODE_SP_ELX
913	str	x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
914
915	/* ----------------------------------------------------------
916	 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
917	 * ----------------------------------------------------------
918	 */
919	ldr	x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
920	ldp	x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
921	msr	scr_el3, x18
922	msr	spsr_el3, x16
923	msr	elr_el3, x17
924
925#if IMAGE_BL31
926	/* ----------------------------------------------------------
927	 * Restore CPTR_EL3.
928	 * ZCR is only restored if SVE is supported and enabled.
929	 * Synchronization is required before zcr_el3 is addressed.
930	 * ----------------------------------------------------------
931	 */
932	ldp	x19, x20, [sp, #CTX_EL3STATE_OFFSET + CTX_CPTR_EL3]
933	msr	cptr_el3, x19
934
935	ands	x19, x19, #CPTR_EZ_BIT
936	beq	sve_not_enabled
937
938	isb
939	msr	S3_6_C1_C2_0, x20 /* zcr_el3 */
940sve_not_enabled:
941#endif
942
943#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
944	/* ----------------------------------------------------------
945	 * Restore mitigation state as it was on entry to EL3
946	 * ----------------------------------------------------------
947	 */
948	ldr	x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
949	cbz	x17, 1f
950	blr	x17
9511:
952#endif
953	restore_ptw_el1_sys_regs
954
955	/* ----------------------------------------------------------
956	 * Restore general purpose (including x30), PMCR_EL0 and
957	 * ARMv8.3-PAuth registers.
958	 * Exit EL3 via ERET to a lower exception level.
959 	 * ----------------------------------------------------------
960 	 */
961	bl	restore_gp_pmcr_pauth_regs
962	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
963
964#if IMAGE_BL31 && RAS_EXTENSION
965	/* ----------------------------------------------------------
966	 * Issue Error Synchronization Barrier to synchronize SErrors
967	 * before exiting EL3. We're running with EAs unmasked, so
968	 * any synchronized errors would be taken immediately;
969	 * therefore no need to inspect DISR_EL1 register.
970 	 * ----------------------------------------------------------
971	 */
972	esb
973#else
974	dsb	sy
975#endif
976#ifdef IMAGE_BL31
977	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
978#endif
979	exception_return
980
981endfunc el3_exit
982