xref: /rk3399_ARM-atf/lib/el3_runtime/aarch64/context.S (revision 0a0a7a9ac82cb79af91f098cedc69cc67bca3978)
1/*
2 * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <assert_macros.S>
10#include <context.h>
11
12#if CTX_INCLUDE_EL2_REGS
13	.global	el2_sysregs_context_save
14	.global	el2_sysregs_context_restore
15#endif
16
17	.global	el1_sysregs_context_save
18	.global	el1_sysregs_context_restore
19#if CTX_INCLUDE_FPREGS
20	.global	fpregs_context_save
21	.global	fpregs_context_restore
22#endif
23	.global	save_gp_pmcr_pauth_regs
24	.global	restore_gp_pmcr_pauth_regs
25	.global	el3_exit
26
27#if CTX_INCLUDE_EL2_REGS
28
29/* -----------------------------------------------------
30 * The following function strictly follows the AArch64
31 * PCS to use x9-x17 (temporary caller-saved registers)
32 * to save EL2 system register context. It assumes that
33 * 'x0' is pointing to a 'el2_sys_regs' structure where
34 * the register context will be saved.
35 *
36 * The following registers are not added.
37 * AMEVCNTVOFF0<n>_EL2
38 * AMEVCNTVOFF1<n>_EL2
39 * ICH_AP0R<n>_EL2
40 * ICH_AP1R<n>_EL2
41 * ICH_LR<n>_EL2
42 * -----------------------------------------------------
43 */
44
45func el2_sysregs_context_save
46	mrs	x9, actlr_el2
47	mrs	x10, afsr0_el2
48	stp	x9, x10, [x0, #CTX_ACTLR_EL2]
49
50	mrs	x11, afsr1_el2
51	mrs	x12, amair_el2
52	stp	x11, x12, [x0, #CTX_AFSR1_EL2]
53
54	mrs	x13, cnthctl_el2
55	mrs	x14, cnthp_ctl_el2
56	stp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
57
58	mrs	x15, cnthp_cval_el2
59	mrs	x16, cnthp_tval_el2
60	stp	x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
61
62	mrs	x17, cntvoff_el2
63	mrs	x9, cptr_el2
64	stp	x17, x9, [x0, #CTX_CNTVOFF_EL2]
65
66	mrs	x10, dbgvcr32_el2
67	mrs	x11, elr_el2
68	stp	x10, x11, [x0, #CTX_DBGVCR32_EL2]
69
70	mrs	x14, esr_el2
71	mrs	x15, far_el2
72	stp	x14, x15, [x0, #CTX_ESR_EL2]
73
74	mrs	x16, fpexc32_el2
75	mrs	x17, hacr_el2
76	stp	x16, x17, [x0, #CTX_FPEXC32_EL2]
77
78	mrs	x9, hcr_el2
79	mrs	x10, hpfar_el2
80	stp	x9, x10, [x0, #CTX_HCR_EL2]
81
82	mrs	x11, hstr_el2
83	mrs	x12, ICC_SRE_EL2
84	stp	x11, x12, [x0, #CTX_HSTR_EL2]
85
86	mrs	x13, ICH_HCR_EL2
87	mrs	x14, ICH_VMCR_EL2
88	stp	x13, x14, [x0, #CTX_ICH_HCR_EL2]
89
90	mrs	x15, mair_el2
91	mrs	x16, mdcr_el2
92	stp	x15, x16, [x0, #CTX_MAIR_EL2]
93
94	mrs	x17, PMSCR_EL2
95	mrs	x9, sctlr_el2
96	stp	x17, x9, [x0, #CTX_PMSCR_EL2]
97
98	mrs	x10, spsr_el2
99	mrs	x11, sp_el2
100	stp	x10, x11, [x0, #CTX_SPSR_EL2]
101
102	mrs	x12, tcr_el2
103	mrs	x13, tpidr_el2
104	stp	x12, x13, [x0, #CTX_TCR_EL2]
105
106	mrs	x14, ttbr0_el2
107	mrs	x15, vbar_el2
108	stp	x14, x15, [x0, #CTX_TTBR0_EL2]
109
110	mrs	x16, vmpidr_el2
111	mrs	x17, vpidr_el2
112	stp	x16, x17, [x0, #CTX_VMPIDR_EL2]
113
114	mrs	x9, vtcr_el2
115	mrs	x10, vttbr_el2
116	stp	x9, x10, [x0, #CTX_VTCR_EL2]
117
118#if CTX_INCLUDE_MTE_REGS
119	mrs	x11, TFSR_EL2
120	str	x11, [x0, #CTX_TFSR_EL2]
121#endif
122
123#if ENABLE_MPAM_FOR_LOWER_ELS
124	mrs	x9, MPAM2_EL2
125	mrs	x10, MPAMHCR_EL2
126	stp	x9, x10, [x0, #CTX_MPAM2_EL2]
127
128	mrs	x11, MPAMVPM0_EL2
129	mrs	x12, MPAMVPM1_EL2
130	stp	x11, x12, [x0, #CTX_MPAMVPM0_EL2]
131
132	mrs	x13, MPAMVPM2_EL2
133	mrs	x14, MPAMVPM3_EL2
134	stp	x13, x14, [x0, #CTX_MPAMVPM2_EL2]
135
136	mrs	x15, MPAMVPM4_EL2
137	mrs	x16, MPAMVPM5_EL2
138	stp	x15, x16, [x0, #CTX_MPAMVPM4_EL2]
139
140	mrs	x17, MPAMVPM6_EL2
141	mrs	x9, MPAMVPM7_EL2
142	stp	x17, x9, [x0, #CTX_MPAMVPM6_EL2]
143
144	mrs	x10, MPAMVPMV_EL2
145	str	x10, [x0, #CTX_MPAMVPMV_EL2]
146#endif
147
148
149#if ARM_ARCH_AT_LEAST(8, 6)
150	mrs	x11, HAFGRTR_EL2
151	mrs	x12, HDFGRTR_EL2
152	stp	x11, x12, [x0, #CTX_HAFGRTR_EL2]
153
154	mrs	x13, HDFGWTR_EL2
155	mrs	x14, HFGITR_EL2
156	stp	x13, x14, [x0, #CTX_HDFGWTR_EL2]
157
158	mrs	x15, HFGRTR_EL2
159	mrs	x16, HFGWTR_EL2
160	stp	x15, x16, [x0, #CTX_HFGRTR_EL2]
161
162	mrs	x17, CNTPOFF_EL2
163	str	x17, [x0, #CTX_CNTPOFF_EL2]
164#endif
165
166#if ARM_ARCH_AT_LEAST(8, 4)
167	mrs	x9, cnthps_ctl_el2
168	mrs	x10, cnthps_cval_el2
169	stp	x9, x10, [x0, #CTX_CNTHPS_CTL_EL2]
170
171	mrs	x11, cnthps_tval_el2
172	mrs	x12, cnthvs_ctl_el2
173	stp	x11, x12, [x0, #CTX_CNTHPS_TVAL_EL2]
174
175	mrs	x13, cnthvs_cval_el2
176	mrs	x14, cnthvs_tval_el2
177	stp	x13, x14, [x0, #CTX_CNTHVS_CVAL_EL2]
178
179	mrs	x15, cnthv_ctl_el2
180	mrs	x16, cnthv_cval_el2
181	stp	x15, x16, [x0, #CTX_CNTHV_CTL_EL2]
182
183	mrs	x17, cnthv_tval_el2
184	mrs	x9, contextidr_el2
185	stp	x17, x9, [x0, #CTX_CNTHV_TVAL_EL2]
186
187	mrs	x10, sder32_el2
188	str	x10, [x0, #CTX_SDER32_EL2]
189
190	mrs	x11, ttbr1_el2
191	str	x11, [x0, #CTX_TTBR1_EL2]
192
193	mrs	x12, vdisr_el2
194	str	x12, [x0, #CTX_VDISR_EL2]
195
196	mrs	x13, vncr_el2
197	str	x13, [x0, #CTX_VNCR_EL2]
198
199	mrs	x14, vsesr_el2
200	str	x14, [x0, #CTX_VSESR_EL2]
201
202	mrs	x15, vstcr_el2
203	str	x15, [x0, #CTX_VSTCR_EL2]
204
205	mrs	x16, vsttbr_el2
206	str	x16, [x0, #CTX_VSTTBR_EL2]
207
208	mrs	x17, TRFCR_EL2
209	str	x17, [x0, #CTX_TRFCR_EL2]
210#endif
211
212#if ARM_ARCH_AT_LEAST(8, 5)
213	mrs	x9, scxtnum_el2
214	str	x9, [x0, #CTX_SCXTNUM_EL2]
215#endif
216
217	ret
218endfunc el2_sysregs_context_save
219
220/* -----------------------------------------------------
221 * The following function strictly follows the AArch64
222 * PCS to use x9-x17 (temporary caller-saved registers)
223 * to restore EL2 system register context.  It assumes
224 * that 'x0' is pointing to a 'el2_sys_regs' structure
225 * from where the register context will be restored
226
227 * The following registers are not restored
228 * AMEVCNTVOFF0<n>_EL2
229 * AMEVCNTVOFF1<n>_EL2
230 * ICH_AP0R<n>_EL2
231 * ICH_AP1R<n>_EL2
232 * ICH_LR<n>_EL2
233 * -----------------------------------------------------
234 */
235func el2_sysregs_context_restore
236
237	ldp	x9, x10, [x0, #CTX_ACTLR_EL2]
238	msr	actlr_el2, x9
239	msr	afsr0_el2, x10
240
241	ldp	x11, x12, [x0, #CTX_AFSR1_EL2]
242	msr	afsr1_el2, x11
243	msr	amair_el2, x12
244
245	ldp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
246	msr	cnthctl_el2, x13
247	msr	cnthp_ctl_el2, x14
248
249	ldp	x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
250	msr	cnthp_cval_el2, x15
251	msr	cnthp_tval_el2, x16
252
253	ldp	x17, x9, [x0, #CTX_CNTVOFF_EL2]
254	msr	cntvoff_el2, x17
255	msr	cptr_el2, x9
256
257	ldp	x10, x11, [x0, #CTX_DBGVCR32_EL2]
258	msr	dbgvcr32_el2, x10
259	msr	elr_el2, x11
260
261	ldp	x14, x15, [x0, #CTX_ESR_EL2]
262	msr	esr_el2, x14
263	msr	far_el2, x15
264
265	ldp	x16, x17, [x0, #CTX_FPEXC32_EL2]
266	msr	fpexc32_el2, x16
267	msr	hacr_el2, x17
268
269	ldp	x9, x10, [x0, #CTX_HCR_EL2]
270	msr	hcr_el2, x9
271	msr	hpfar_el2, x10
272
273	ldp	x11, x12, [x0, #CTX_HSTR_EL2]
274	msr	hstr_el2, x11
275	msr	ICC_SRE_EL2, x12
276
277	ldp	x13, x14, [x0, #CTX_ICH_HCR_EL2]
278	msr	ICH_HCR_EL2, x13
279	msr	ICH_VMCR_EL2, x14
280
281	ldp	x15, x16, [x0, #CTX_MAIR_EL2]
282	msr	mair_el2, x15
283	msr	mdcr_el2, x16
284
285	ldp	x17, x9, [x0, #CTX_PMSCR_EL2]
286	msr	PMSCR_EL2, x17
287	msr	sctlr_el2, x9
288
289	ldp	x10, x11, [x0, #CTX_SPSR_EL2]
290	msr	spsr_el2, x10
291	msr	sp_el2, x11
292
293	ldp	x12, x13, [x0, #CTX_TCR_EL2]
294	msr	tcr_el2, x12
295	msr	tpidr_el2, x13
296
297	ldp	x14, x15, [x0, #CTX_TTBR0_EL2]
298	msr	ttbr0_el2, x14
299	msr	vbar_el2, x15
300
301	ldp	x16, x17, [x0, #CTX_VMPIDR_EL2]
302	msr	vmpidr_el2, x16
303	msr	vpidr_el2, x17
304
305	ldp	x9, x10, [x0, #CTX_VTCR_EL2]
306	msr	vtcr_el2, x9
307	msr	vttbr_el2, x10
308
309#if CTX_INCLUDE_MTE_REGS
310	ldr	x11, [x0, #CTX_TFSR_EL2]
311	msr	TFSR_EL2, x11
312#endif
313
314#if ENABLE_MPAM_FOR_LOWER_ELS
315	ldp	x9, x10, [x0, #CTX_MPAM2_EL2]
316	msr	MPAM2_EL2, x9
317	msr	MPAMHCR_EL2, x10
318
319	ldp	x11, x12, [x0, #CTX_MPAMVPM0_EL2]
320	msr	MPAMVPM0_EL2, x11
321	msr	MPAMVPM1_EL2, x12
322
323	ldp	x13, x14, [x0, #CTX_MPAMVPM2_EL2]
324	msr	MPAMVPM2_EL2, x13
325	msr	MPAMVPM3_EL2, x14
326
327	ldp	x15, x16, [x0, #CTX_MPAMVPM4_EL2]
328	msr	MPAMVPM4_EL2, x15
329	msr	MPAMVPM5_EL2, x16
330
331	ldp	x17, x9, [x0, #CTX_MPAMVPM6_EL2]
332	msr	MPAMVPM6_EL2, x17
333	msr	MPAMVPM7_EL2, x9
334
335	ldr	x10, [x0, #CTX_MPAMVPMV_EL2]
336	msr	MPAMVPMV_EL2, x10
337#endif
338
339#if ARM_ARCH_AT_LEAST(8, 6)
340	ldp	x11, x12, [x0, #CTX_HAFGRTR_EL2]
341	msr	HAFGRTR_EL2, x11
342	msr	HDFGRTR_EL2, x12
343
344	ldp	x13, x14, [x0, #CTX_HDFGWTR_EL2]
345	msr	HDFGWTR_EL2, x13
346	msr	HFGITR_EL2, x14
347
348	ldp	x15, x16, [x0, #CTX_HFGRTR_EL2]
349	msr	HFGRTR_EL2, x15
350	msr	HFGWTR_EL2, x16
351
352	ldr	x17, [x0, #CTX_CNTPOFF_EL2]
353	msr	CNTPOFF_EL2, x17
354#endif
355
356#if ARM_ARCH_AT_LEAST(8, 4)
357	ldp	x9, x10, [x0, #CTX_CNTHPS_CTL_EL2]
358	msr	cnthps_ctl_el2, x9
359	msr	cnthps_cval_el2, x10
360
361	ldp	x11, x12, [x0, #CTX_CNTHPS_TVAL_EL2]
362	msr	cnthps_tval_el2, x11
363	msr	cnthvs_ctl_el2, x12
364
365	ldp	x13, x14, [x0, #CTX_CNTHVS_CVAL_EL2]
366	msr	cnthvs_cval_el2, x13
367	msr	cnthvs_tval_el2, x14
368
369	ldp	x15, x16, [x0, #CTX_CNTHV_CTL_EL2]
370	msr	cnthv_ctl_el2, x15
371	msr	cnthv_cval_el2, x16
372
373	ldp	x17, x9, [x0, #CTX_CNTHV_TVAL_EL2]
374	msr	cnthv_tval_el2, x17
375	msr	contextidr_el2, x9
376
377	ldr	x10, [x0, #CTX_SDER32_EL2]
378	msr	sder32_el2, x10
379
380	ldr	x11, [x0, #CTX_TTBR1_EL2]
381	msr	ttbr1_el2, x11
382
383	ldr	x12, [x0, #CTX_VDISR_EL2]
384	msr	vdisr_el2, x12
385
386	ldr	x13, [x0, #CTX_VNCR_EL2]
387	msr	vncr_el2, x13
388
389	ldr	x14, [x0, #CTX_VSESR_EL2]
390	msr	vsesr_el2, x14
391
392	ldr	x15, [x0, #CTX_VSTCR_EL2]
393	msr	vstcr_el2, x15
394
395	ldr	x16, [x0, #CTX_VSTTBR_EL2]
396	msr	vsttbr_el2, x16
397
398	ldr	x17, [x0, #CTX_TRFCR_EL2]
399	msr	TRFCR_EL2, x17
400#endif
401
402#if ARM_ARCH_AT_LEAST(8, 5)
403	ldr	x9, [x0, #CTX_SCXTNUM_EL2]
404	msr	scxtnum_el2, x9
405#endif
406
407	ret
408endfunc el2_sysregs_context_restore
409
410#endif /* CTX_INCLUDE_EL2_REGS */
411
412/* ------------------------------------------------------------------
413 * The following function strictly follows the AArch64 PCS to use
414 * x9-x17 (temporary caller-saved registers) to save EL1 system
415 * register context. It assumes that 'x0' is pointing to a
416 * 'el1_sys_regs' structure where the register context will be saved.
417 * ------------------------------------------------------------------
418 */
419func el1_sysregs_context_save
420
421	mrs	x9, spsr_el1
422	mrs	x10, elr_el1
423	stp	x9, x10, [x0, #CTX_SPSR_EL1]
424
425	mrs	x15, sctlr_el1
426	mrs	x16, actlr_el1
427	stp	x15, x16, [x0, #CTX_SCTLR_EL1]
428
429	mrs	x17, cpacr_el1
430	mrs	x9, csselr_el1
431	stp	x17, x9, [x0, #CTX_CPACR_EL1]
432
433	mrs	x10, sp_el1
434	mrs	x11, esr_el1
435	stp	x10, x11, [x0, #CTX_SP_EL1]
436
437	mrs	x12, ttbr0_el1
438	mrs	x13, ttbr1_el1
439	stp	x12, x13, [x0, #CTX_TTBR0_EL1]
440
441	mrs	x14, mair_el1
442	mrs	x15, amair_el1
443	stp	x14, x15, [x0, #CTX_MAIR_EL1]
444
445	mrs	x16, tcr_el1
446	mrs	x17, tpidr_el1
447	stp	x16, x17, [x0, #CTX_TCR_EL1]
448
449	mrs	x9, tpidr_el0
450	mrs	x10, tpidrro_el0
451	stp	x9, x10, [x0, #CTX_TPIDR_EL0]
452
453	mrs	x13, par_el1
454	mrs	x14, far_el1
455	stp	x13, x14, [x0, #CTX_PAR_EL1]
456
457	mrs	x15, afsr0_el1
458	mrs	x16, afsr1_el1
459	stp	x15, x16, [x0, #CTX_AFSR0_EL1]
460
461	mrs	x17, contextidr_el1
462	mrs	x9, vbar_el1
463	stp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
464
465	/* Save AArch32 system registers if the build has instructed so */
466#if CTX_INCLUDE_AARCH32_REGS
467	mrs	x11, spsr_abt
468	mrs	x12, spsr_und
469	stp	x11, x12, [x0, #CTX_SPSR_ABT]
470
471	mrs	x13, spsr_irq
472	mrs	x14, spsr_fiq
473	stp	x13, x14, [x0, #CTX_SPSR_IRQ]
474
475	mrs	x15, dacr32_el2
476	mrs	x16, ifsr32_el2
477	stp	x15, x16, [x0, #CTX_DACR32_EL2]
478#endif
479
480	/* Save NS timer registers if the build has instructed so */
481#if NS_TIMER_SWITCH
482	mrs	x10, cntp_ctl_el0
483	mrs	x11, cntp_cval_el0
484	stp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
485
486	mrs	x12, cntv_ctl_el0
487	mrs	x13, cntv_cval_el0
488	stp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
489
490	mrs	x14, cntkctl_el1
491	str	x14, [x0, #CTX_CNTKCTL_EL1]
492#endif
493
494	/* Save MTE system registers if the build has instructed so */
495#if CTX_INCLUDE_MTE_REGS
496	mrs	x15, TFSRE0_EL1
497	mrs	x16, TFSR_EL1
498	stp	x15, x16, [x0, #CTX_TFSRE0_EL1]
499
500	mrs	x9, RGSR_EL1
501	mrs	x10, GCR_EL1
502	stp	x9, x10, [x0, #CTX_RGSR_EL1]
503#endif
504
505	ret
506endfunc el1_sysregs_context_save
507
508/* ------------------------------------------------------------------
509 * The following function strictly follows the AArch64 PCS to use
510 * x9-x17 (temporary caller-saved registers) to restore EL1 system
511 * register context.  It assumes that 'x0' is pointing to a
512 * 'el1_sys_regs' structure from where the register context will be
513 * restored
514 * ------------------------------------------------------------------
515 */
516func el1_sysregs_context_restore
517
518	ldp	x9, x10, [x0, #CTX_SPSR_EL1]
519	msr	spsr_el1, x9
520	msr	elr_el1, x10
521
522	ldp	x15, x16, [x0, #CTX_SCTLR_EL1]
523	msr	sctlr_el1, x15
524	msr	actlr_el1, x16
525
526	ldp	x17, x9, [x0, #CTX_CPACR_EL1]
527	msr	cpacr_el1, x17
528	msr	csselr_el1, x9
529
530	ldp	x10, x11, [x0, #CTX_SP_EL1]
531	msr	sp_el1, x10
532	msr	esr_el1, x11
533
534	ldp	x12, x13, [x0, #CTX_TTBR0_EL1]
535	msr	ttbr0_el1, x12
536	msr	ttbr1_el1, x13
537
538	ldp	x14, x15, [x0, #CTX_MAIR_EL1]
539	msr	mair_el1, x14
540	msr	amair_el1, x15
541
542	ldp	x16, x17, [x0, #CTX_TCR_EL1]
543	msr	tcr_el1, x16
544	msr	tpidr_el1, x17
545
546	ldp	x9, x10, [x0, #CTX_TPIDR_EL0]
547	msr	tpidr_el0, x9
548	msr	tpidrro_el0, x10
549
550	ldp	x13, x14, [x0, #CTX_PAR_EL1]
551	msr	par_el1, x13
552	msr	far_el1, x14
553
554	ldp	x15, x16, [x0, #CTX_AFSR0_EL1]
555	msr	afsr0_el1, x15
556	msr	afsr1_el1, x16
557
558	ldp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
559	msr	contextidr_el1, x17
560	msr	vbar_el1, x9
561
562	/* Restore AArch32 system registers if the build has instructed so */
563#if CTX_INCLUDE_AARCH32_REGS
564	ldp	x11, x12, [x0, #CTX_SPSR_ABT]
565	msr	spsr_abt, x11
566	msr	spsr_und, x12
567
568	ldp	x13, x14, [x0, #CTX_SPSR_IRQ]
569	msr	spsr_irq, x13
570	msr	spsr_fiq, x14
571
572	ldp	x15, x16, [x0, #CTX_DACR32_EL2]
573	msr	dacr32_el2, x15
574	msr	ifsr32_el2, x16
575#endif
576	/* Restore NS timer registers if the build has instructed so */
577#if NS_TIMER_SWITCH
578	ldp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
579	msr	cntp_ctl_el0, x10
580	msr	cntp_cval_el0, x11
581
582	ldp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
583	msr	cntv_ctl_el0, x12
584	msr	cntv_cval_el0, x13
585
586	ldr	x14, [x0, #CTX_CNTKCTL_EL1]
587	msr	cntkctl_el1, x14
588#endif
589	/* Restore MTE system registers if the build has instructed so */
590#if CTX_INCLUDE_MTE_REGS
591	ldp	x11, x12, [x0, #CTX_TFSRE0_EL1]
592	msr	TFSRE0_EL1, x11
593	msr	TFSR_EL1, x12
594
595	ldp	x13, x14, [x0, #CTX_RGSR_EL1]
596	msr	RGSR_EL1, x13
597	msr	GCR_EL1, x14
598#endif
599
600	/* No explict ISB required here as ERET covers it */
601	ret
602endfunc el1_sysregs_context_restore
603
604/* ------------------------------------------------------------------
605 * The following function follows the aapcs_64 strictly to use
606 * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
607 * to save floating point register context. It assumes that 'x0' is
608 * pointing to a 'fp_regs' structure where the register context will
609 * be saved.
610 *
611 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
612 * However currently we don't use VFP registers nor set traps in
613 * Trusted Firmware, and assume it's cleared.
614 *
615 * TODO: Revisit when VFP is used in secure world
616 * ------------------------------------------------------------------
617 */
618#if CTX_INCLUDE_FPREGS
619func fpregs_context_save
620	stp	q0, q1, [x0, #CTX_FP_Q0]
621	stp	q2, q3, [x0, #CTX_FP_Q2]
622	stp	q4, q5, [x0, #CTX_FP_Q4]
623	stp	q6, q7, [x0, #CTX_FP_Q6]
624	stp	q8, q9, [x0, #CTX_FP_Q8]
625	stp	q10, q11, [x0, #CTX_FP_Q10]
626	stp	q12, q13, [x0, #CTX_FP_Q12]
627	stp	q14, q15, [x0, #CTX_FP_Q14]
628	stp	q16, q17, [x0, #CTX_FP_Q16]
629	stp	q18, q19, [x0, #CTX_FP_Q18]
630	stp	q20, q21, [x0, #CTX_FP_Q20]
631	stp	q22, q23, [x0, #CTX_FP_Q22]
632	stp	q24, q25, [x0, #CTX_FP_Q24]
633	stp	q26, q27, [x0, #CTX_FP_Q26]
634	stp	q28, q29, [x0, #CTX_FP_Q28]
635	stp	q30, q31, [x0, #CTX_FP_Q30]
636
637	mrs	x9, fpsr
638	str	x9, [x0, #CTX_FP_FPSR]
639
640	mrs	x10, fpcr
641	str	x10, [x0, #CTX_FP_FPCR]
642
643#if CTX_INCLUDE_AARCH32_REGS
644	mrs	x11, fpexc32_el2
645	str	x11, [x0, #CTX_FP_FPEXC32_EL2]
646#endif
647	ret
648endfunc fpregs_context_save
649
650/* ------------------------------------------------------------------
651 * The following function follows the aapcs_64 strictly to use x9-x17
652 * (temporary caller-saved registers according to AArch64 PCS) to
653 * restore floating point register context. It assumes that 'x0' is
654 * pointing to a 'fp_regs' structure from where the register context
655 * will be restored.
656 *
657 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
658 * However currently we don't use VFP registers nor set traps in
659 * Trusted Firmware, and assume it's cleared.
660 *
661 * TODO: Revisit when VFP is used in secure world
662 * ------------------------------------------------------------------
663 */
664func fpregs_context_restore
665	ldp	q0, q1, [x0, #CTX_FP_Q0]
666	ldp	q2, q3, [x0, #CTX_FP_Q2]
667	ldp	q4, q5, [x0, #CTX_FP_Q4]
668	ldp	q6, q7, [x0, #CTX_FP_Q6]
669	ldp	q8, q9, [x0, #CTX_FP_Q8]
670	ldp	q10, q11, [x0, #CTX_FP_Q10]
671	ldp	q12, q13, [x0, #CTX_FP_Q12]
672	ldp	q14, q15, [x0, #CTX_FP_Q14]
673	ldp	q16, q17, [x0, #CTX_FP_Q16]
674	ldp	q18, q19, [x0, #CTX_FP_Q18]
675	ldp	q20, q21, [x0, #CTX_FP_Q20]
676	ldp	q22, q23, [x0, #CTX_FP_Q22]
677	ldp	q24, q25, [x0, #CTX_FP_Q24]
678	ldp	q26, q27, [x0, #CTX_FP_Q26]
679	ldp	q28, q29, [x0, #CTX_FP_Q28]
680	ldp	q30, q31, [x0, #CTX_FP_Q30]
681
682	ldr	x9, [x0, #CTX_FP_FPSR]
683	msr	fpsr, x9
684
685	ldr	x10, [x0, #CTX_FP_FPCR]
686	msr	fpcr, x10
687
688#if CTX_INCLUDE_AARCH32_REGS
689	ldr	x11, [x0, #CTX_FP_FPEXC32_EL2]
690	msr	fpexc32_el2, x11
691#endif
692	/*
693	 * No explict ISB required here as ERET to
694	 * switch to secure EL1 or non-secure world
695	 * covers it
696	 */
697
698	ret
699endfunc fpregs_context_restore
700#endif /* CTX_INCLUDE_FPREGS */
701
702/* ------------------------------------------------------------------
703 * The following function is used to save and restore all the general
704 * purpose and ARMv8.3-PAuth (if enabled) registers.
705 * It also checks if Secure Cycle Counter is not disabled in MDCR_EL3
706 * when ARMv8.5-PMU is implemented, and if called from Non-secure
707 * state saves PMCR_EL0 and disables Cycle Counter.
708 *
709 * Ideally we would only save and restore the callee saved registers
710 * when a world switch occurs but that type of implementation is more
711 * complex. So currently we will always save and restore these
712 * registers on entry and exit of EL3.
713 * These are not macros to ensure their invocation fits within the 32
714 * instructions per exception vector.
715 * clobbers: x18
716 * ------------------------------------------------------------------
717 */
718func save_gp_pmcr_pauth_regs
719	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
720	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
721	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
722	stp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
723	stp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
724	stp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
725	stp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
726	stp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
727	stp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
728	stp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
729	stp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
730	stp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
731	stp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
732	stp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
733	stp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
734	mrs	x18, sp_el0
735	str	x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
736
737	/* ----------------------------------------------------------
738	 * Check if earlier initialization MDCR_EL3.SCCD to 1 failed,
739	 * meaning that ARMv8-PMU is not implemented and PMCR_EL0
740	 * should be saved in non-secure context.
741	 * ----------------------------------------------------------
742	 */
743	mrs	x9, mdcr_el3
744	tst	x9, #MDCR_SCCD_BIT
745	bne	1f
746
747	/* Secure Cycle Counter is not disabled */
748	mrs	x9, pmcr_el0
749
750	/* Check caller's security state */
751	mrs	x10, scr_el3
752	tst	x10, #SCR_NS_BIT
753	beq	2f
754
755	/* Save PMCR_EL0 if called from Non-secure state */
756	str	x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
757
758	/* Disable cycle counter when event counting is prohibited */
7592:	orr	x9, x9, #PMCR_EL0_DP_BIT
760	msr	pmcr_el0, x9
761	isb
7621:
763#if CTX_INCLUDE_PAUTH_REGS
764	/* ----------------------------------------------------------
765 	 * Save the ARMv8.3-PAuth keys as they are not banked
766 	 * by exception level
767	 * ----------------------------------------------------------
768	 */
769	add	x19, sp, #CTX_PAUTH_REGS_OFFSET
770
771	mrs	x20, APIAKeyLo_EL1	/* x21:x20 = APIAKey */
772	mrs	x21, APIAKeyHi_EL1
773	mrs	x22, APIBKeyLo_EL1	/* x23:x22 = APIBKey */
774	mrs	x23, APIBKeyHi_EL1
775	mrs	x24, APDAKeyLo_EL1	/* x25:x24 = APDAKey */
776	mrs	x25, APDAKeyHi_EL1
777	mrs	x26, APDBKeyLo_EL1	/* x27:x26 = APDBKey */
778	mrs	x27, APDBKeyHi_EL1
779	mrs	x28, APGAKeyLo_EL1	/* x29:x28 = APGAKey */
780	mrs	x29, APGAKeyHi_EL1
781
782	stp	x20, x21, [x19, #CTX_PACIAKEY_LO]
783	stp	x22, x23, [x19, #CTX_PACIBKEY_LO]
784	stp	x24, x25, [x19, #CTX_PACDAKEY_LO]
785	stp	x26, x27, [x19, #CTX_PACDBKEY_LO]
786	stp	x28, x29, [x19, #CTX_PACGAKEY_LO]
787#endif /* CTX_INCLUDE_PAUTH_REGS */
788
789	ret
790endfunc save_gp_pmcr_pauth_regs
791
792/* ------------------------------------------------------------------
793 * This function restores ARMv8.3-PAuth (if enabled) and all general
794 * purpose registers except x30 from the CPU context.
795 * x30 register must be explicitly restored by the caller.
796 * ------------------------------------------------------------------
797 */
798func restore_gp_pmcr_pauth_regs
799#if CTX_INCLUDE_PAUTH_REGS
800 	/* Restore the ARMv8.3 PAuth keys */
801	add	x10, sp, #CTX_PAUTH_REGS_OFFSET
802
803	ldp	x0, x1, [x10, #CTX_PACIAKEY_LO]	/* x1:x0 = APIAKey */
804	ldp	x2, x3, [x10, #CTX_PACIBKEY_LO]	/* x3:x2 = APIBKey */
805	ldp	x4, x5, [x10, #CTX_PACDAKEY_LO]	/* x5:x4 = APDAKey */
806	ldp	x6, x7, [x10, #CTX_PACDBKEY_LO]	/* x7:x6 = APDBKey */
807	ldp	x8, x9, [x10, #CTX_PACGAKEY_LO]	/* x9:x8 = APGAKey */
808
809	msr	APIAKeyLo_EL1, x0
810	msr	APIAKeyHi_EL1, x1
811	msr	APIBKeyLo_EL1, x2
812	msr	APIBKeyHi_EL1, x3
813	msr	APDAKeyLo_EL1, x4
814	msr	APDAKeyHi_EL1, x5
815	msr	APDBKeyLo_EL1, x6
816	msr	APDBKeyHi_EL1, x7
817	msr	APGAKeyLo_EL1, x8
818	msr	APGAKeyHi_EL1, x9
819#endif /* CTX_INCLUDE_PAUTH_REGS */
820
821	/* ----------------------------------------------------------
822	 * Restore PMCR_EL0 when returning to Non-secure state if
823	 * Secure Cycle Counter is not disabled in MDCR_EL3 when
824	 * ARMv8.5-PMU is implemented.
825	 * ----------------------------------------------------------
826	 */
827	mrs	x0, scr_el3
828	tst	x0, #SCR_NS_BIT
829	beq	2f
830
831	/* ----------------------------------------------------------
832	 * Back to Non-secure state.
833	 * Check if earlier initialization MDCR_EL3.SCCD to 1 failed,
834	 * meaning that ARMv8-PMU is not implemented and PMCR_EL0
835	 * should be restored from non-secure context.
836	 * ----------------------------------------------------------
837	 */
838	mrs	x0, mdcr_el3
839	tst	x0, #MDCR_SCCD_BIT
840	bne	2f
841	ldr	x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
842	msr	pmcr_el0, x0
8432:
844	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
845	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
846	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
847	ldp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
848	ldp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
849	ldp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
850	ldp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
851	ldp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
852	ldp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
853	ldp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
854	ldp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
855	ldp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
856	ldp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
857	ldp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
858	ldr	x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
859	msr	sp_el0, x28
860	ldp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
861	ret
862endfunc restore_gp_pmcr_pauth_regs
863
864/* ------------------------------------------------------------------
865 * This routine assumes that the SP_EL3 is pointing to a valid
866 * context structure from where the gp regs and other special
867 * registers can be retrieved.
868 * ------------------------------------------------------------------
869 */
870func el3_exit
871#if ENABLE_ASSERTIONS
872	/* el3_exit assumes SP_EL0 on entry */
873	mrs	x17, spsel
874	cmp	x17, #MODE_SP_EL0
875	ASM_ASSERT(eq)
876#endif
877
878	/* ----------------------------------------------------------
879	 * Save the current SP_EL0 i.e. the EL3 runtime stack which
880	 * will be used for handling the next SMC.
881	 * Then switch to SP_EL3.
882	 * ----------------------------------------------------------
883	 */
884	mov	x17, sp
885	msr	spsel, #MODE_SP_ELX
886	str	x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
887
888	/* ----------------------------------------------------------
889	 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
890	 * ----------------------------------------------------------
891	 */
892	ldr	x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
893	ldp	x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
894	msr	scr_el3, x18
895	msr	spsr_el3, x16
896	msr	elr_el3, x17
897
898#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
899	/* ----------------------------------------------------------
900	 * Restore mitigation state as it was on entry to EL3
901	 * ----------------------------------------------------------
902	 */
903	ldr	x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
904	cbz	x17, 1f
905	blr	x17
9061:
907#endif
908	/* ----------------------------------------------------------
909	 * Restore general purpose (including x30), PMCR_EL0 and
910	 * ARMv8.3-PAuth registers.
911	 * Exit EL3 via ERET to a lower exception level.
912 	 * ----------------------------------------------------------
913 	 */
914	bl	restore_gp_pmcr_pauth_regs
915	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
916
917#if IMAGE_BL31 && RAS_EXTENSION
918	/* ----------------------------------------------------------
919	 * Issue Error Synchronization Barrier to synchronize SErrors
920	 * before exiting EL3. We're running with EAs unmasked, so
921	 * any synchronized errors would be taken immediately;
922	 * therefore no need to inspect DISR_EL1 register.
923 	 * ----------------------------------------------------------
924	 */
925	esb
926#endif
927	exception_return
928
929endfunc el3_exit
930