xref: /rk3399_ARM-atf/lib/el3_runtime/aarch64/context.S (revision 0f777eabd99d4bf40acf0a215112160502917172)
1/*
2 * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <assert_macros.S>
10#include <context.h>
11#include <el3_common_macros.S>
12
13#if CTX_INCLUDE_EL2_REGS
14	.global	el2_sysregs_context_save
15	.global	el2_sysregs_context_restore
16#endif
17
18	.global	el1_sysregs_context_save
19	.global	el1_sysregs_context_restore
20#if CTX_INCLUDE_FPREGS
21	.global	fpregs_context_save
22	.global	fpregs_context_restore
23#endif
24	.global	save_gp_pmcr_pauth_regs
25	.global	restore_gp_pmcr_pauth_regs
26	.global save_and_update_ptw_el1_sys_regs
27	.global	el3_exit
28
29#if CTX_INCLUDE_EL2_REGS
30
31/* -----------------------------------------------------
32 * The following function strictly follows the AArch64
33 * PCS to use x9-x17 (temporary caller-saved registers)
34 * to save EL2 system register context. It assumes that
35 * 'x0' is pointing to a 'el2_sys_regs' structure where
36 * the register context will be saved.
37 *
38 * The following registers are not added.
39 * AMEVCNTVOFF0<n>_EL2
40 * AMEVCNTVOFF1<n>_EL2
41 * ICH_AP0R<n>_EL2
42 * ICH_AP1R<n>_EL2
43 * ICH_LR<n>_EL2
44 * -----------------------------------------------------
45 */
46
47func el2_sysregs_context_save
48	mrs	x9, actlr_el2
49	mrs	x10, afsr0_el2
50	stp	x9, x10, [x0, #CTX_ACTLR_EL2]
51
52	mrs	x11, afsr1_el2
53	mrs	x12, amair_el2
54	stp	x11, x12, [x0, #CTX_AFSR1_EL2]
55
56	mrs	x13, cnthctl_el2
57	mrs	x14, cnthp_ctl_el2
58	stp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
59
60	mrs	x15, cnthp_cval_el2
61	mrs	x16, cnthp_tval_el2
62	stp	x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
63
64	mrs	x17, cntvoff_el2
65	mrs	x9, cptr_el2
66	stp	x17, x9, [x0, #CTX_CNTVOFF_EL2]
67
68	mrs	x11, elr_el2
69#if CTX_INCLUDE_AARCH32_REGS
70	mrs	x10, dbgvcr32_el2
71	stp	x10, x11, [x0, #CTX_DBGVCR32_EL2]
72#else
73	str	x11, [x0, #CTX_ELR_EL2]
74#endif
75
76	mrs	x14, esr_el2
77	mrs	x15, far_el2
78	stp	x14, x15, [x0, #CTX_ESR_EL2]
79
80	mrs	x16, hacr_el2
81	mrs	x17, hcr_el2
82	stp	x16, x17, [x0, #CTX_HACR_EL2]
83
84	mrs	x9, hpfar_el2
85	mrs	x10, hstr_el2
86	stp	x9, x10, [x0, #CTX_HPFAR_EL2]
87
88	mrs	x11, ICC_SRE_EL2
89	mrs	x12, ICH_HCR_EL2
90	stp	x11, x12, [x0, #CTX_ICC_SRE_EL2]
91
92	mrs	x13, ICH_VMCR_EL2
93	mrs	x14, mair_el2
94	stp	x13, x14, [x0, #CTX_ICH_VMCR_EL2]
95
96	mrs	x15, mdcr_el2
97	mrs	x16, PMSCR_EL2
98	stp	x15, x16, [x0, #CTX_MDCR_EL2]
99
100	mrs	x17, sctlr_el2
101	mrs	x9, spsr_el2
102	stp	x17, x9, [x0, #CTX_SCTLR_EL2]
103
104	mrs	x10, sp_el2
105	mrs	x11, tcr_el2
106	stp	x10, x11, [x0, #CTX_SP_EL2]
107
108	mrs	x12, tpidr_el2
109	mrs	x13, ttbr0_el2
110	stp	x12, x13, [x0, #CTX_TPIDR_EL2]
111
112	mrs	x14, vbar_el2
113	mrs	x15, vmpidr_el2
114	stp	x14, x15, [x0, #CTX_VBAR_EL2]
115
116	mrs	x16, vpidr_el2
117	mrs	x17, vtcr_el2
118	stp	x16, x17, [x0, #CTX_VPIDR_EL2]
119
120	mrs	x9, vttbr_el2
121	str	x9, [x0, #CTX_VTTBR_EL2]
122
123#if CTX_INCLUDE_MTE_REGS
124	mrs	x10, TFSR_EL2
125	str	x10, [x0, #CTX_TFSR_EL2]
126#endif
127
128#if ENABLE_MPAM_FOR_LOWER_ELS
129	mrs	x9, MPAM2_EL2
130	mrs	x10, MPAMHCR_EL2
131	stp	x9, x10, [x0, #CTX_MPAM2_EL2]
132
133	mrs	x11, MPAMVPM0_EL2
134	mrs	x12, MPAMVPM1_EL2
135	stp	x11, x12, [x0, #CTX_MPAMVPM0_EL2]
136
137	mrs	x13, MPAMVPM2_EL2
138	mrs	x14, MPAMVPM3_EL2
139	stp	x13, x14, [x0, #CTX_MPAMVPM2_EL2]
140
141	mrs	x15, MPAMVPM4_EL2
142	mrs	x16, MPAMVPM5_EL2
143	stp	x15, x16, [x0, #CTX_MPAMVPM4_EL2]
144
145	mrs	x17, MPAMVPM6_EL2
146	mrs	x9, MPAMVPM7_EL2
147	stp	x17, x9, [x0, #CTX_MPAMVPM6_EL2]
148
149	mrs	x10, MPAMVPMV_EL2
150	str	x10, [x0, #CTX_MPAMVPMV_EL2]
151#endif
152
153
154#if ARM_ARCH_AT_LEAST(8, 6)
155	mrs	x11, HAFGRTR_EL2
156	mrs	x12, HDFGRTR_EL2
157	stp	x11, x12, [x0, #CTX_HAFGRTR_EL2]
158
159	mrs	x13, HDFGWTR_EL2
160	mrs	x14, HFGITR_EL2
161	stp	x13, x14, [x0, #CTX_HDFGWTR_EL2]
162
163	mrs	x15, HFGRTR_EL2
164	mrs	x16, HFGWTR_EL2
165	stp	x15, x16, [x0, #CTX_HFGRTR_EL2]
166
167	mrs	x17, CNTPOFF_EL2
168	str	x17, [x0, #CTX_CNTPOFF_EL2]
169#endif
170
171#if ARM_ARCH_AT_LEAST(8, 4)
172	mrs	x9, cnthps_ctl_el2
173	mrs	x10, cnthps_cval_el2
174	stp	x9, x10, [x0, #CTX_CNTHPS_CTL_EL2]
175
176	mrs	x11, cnthps_tval_el2
177	mrs	x12, cnthvs_ctl_el2
178	stp	x11, x12, [x0, #CTX_CNTHPS_TVAL_EL2]
179
180	mrs	x13, cnthvs_cval_el2
181	mrs	x14, cnthvs_tval_el2
182	stp	x13, x14, [x0, #CTX_CNTHVS_CVAL_EL2]
183
184	mrs	x15, cnthv_ctl_el2
185	mrs	x16, cnthv_cval_el2
186	stp	x15, x16, [x0, #CTX_CNTHV_CTL_EL2]
187
188	mrs	x17, cnthv_tval_el2
189	mrs	x9, contextidr_el2
190	stp	x17, x9, [x0, #CTX_CNTHV_TVAL_EL2]
191
192#if CTX_INCLUDE_AARCH32_REGS
193	mrs	x10, sder32_el2
194	str	x10, [x0, #CTX_SDER32_EL2]
195#endif
196
197	mrs	x11, ttbr1_el2
198	str	x11, [x0, #CTX_TTBR1_EL2]
199
200	mrs	x12, vdisr_el2
201	str	x12, [x0, #CTX_VDISR_EL2]
202
203	mrs	x13, vncr_el2
204	str	x13, [x0, #CTX_VNCR_EL2]
205
206	mrs	x14, vsesr_el2
207	str	x14, [x0, #CTX_VSESR_EL2]
208
209	mrs	x15, vstcr_el2
210	str	x15, [x0, #CTX_VSTCR_EL2]
211
212	mrs	x16, vsttbr_el2
213	str	x16, [x0, #CTX_VSTTBR_EL2]
214
215	mrs	x17, TRFCR_EL2
216	str	x17, [x0, #CTX_TRFCR_EL2]
217#endif
218
219#if ARM_ARCH_AT_LEAST(8, 5)
220	mrs	x9, scxtnum_el2
221	str	x9, [x0, #CTX_SCXTNUM_EL2]
222#endif
223
224	ret
225endfunc el2_sysregs_context_save
226
227/* -----------------------------------------------------
228 * The following function strictly follows the AArch64
229 * PCS to use x9-x17 (temporary caller-saved registers)
230 * to restore EL2 system register context.  It assumes
231 * that 'x0' is pointing to a 'el2_sys_regs' structure
232 * from where the register context will be restored
233
234 * The following registers are not restored
235 * AMEVCNTVOFF0<n>_EL2
236 * AMEVCNTVOFF1<n>_EL2
237 * ICH_AP0R<n>_EL2
238 * ICH_AP1R<n>_EL2
239 * ICH_LR<n>_EL2
240 * -----------------------------------------------------
241 */
242func el2_sysregs_context_restore
243
244	ldp	x9, x10, [x0, #CTX_ACTLR_EL2]
245	msr	actlr_el2, x9
246	msr	afsr0_el2, x10
247
248	ldp	x11, x12, [x0, #CTX_AFSR1_EL2]
249	msr	afsr1_el2, x11
250	msr	amair_el2, x12
251
252	ldp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
253	msr	cnthctl_el2, x13
254	msr	cnthp_ctl_el2, x14
255
256	ldp	x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
257	msr	cnthp_cval_el2, x15
258	msr	cnthp_tval_el2, x16
259
260	ldp	x17, x9, [x0, #CTX_CNTVOFF_EL2]
261	msr	cntvoff_el2, x17
262	msr	cptr_el2, x9
263
264#if CTX_INCLUDE_AARCH32_REGS
265	ldp	x10, x11, [x0, #CTX_DBGVCR32_EL2]
266	msr	dbgvcr32_el2, x10
267#else
268	ldr	x11, [x0, #CTX_ELR_EL2]
269#endif
270	msr	elr_el2, x11
271
272	ldp	x14, x15, [x0, #CTX_ESR_EL2]
273	msr	esr_el2, x14
274	msr	far_el2, x15
275
276	ldp	x16, x17, [x0, #CTX_HACR_EL2]
277	msr	hacr_el2, x16
278	msr	hcr_el2, x17
279
280	ldp	x9, x10, [x0, #CTX_HPFAR_EL2]
281	msr	hpfar_el2, x9
282	msr	hstr_el2, x10
283
284	ldp	x11, x12, [x0, #CTX_ICC_SRE_EL2]
285	msr	ICC_SRE_EL2, x11
286	msr	ICH_HCR_EL2, x12
287
288	ldp	x13, x14, [x0, #CTX_ICH_VMCR_EL2]
289	msr	ICH_VMCR_EL2, x13
290	msr	mair_el2, x14
291
292	ldp	x15, x16, [x0, #CTX_MDCR_EL2]
293	msr	mdcr_el2, x15
294	msr	PMSCR_EL2, x16
295
296	ldp	x17, x9, [x0, #CTX_SCTLR_EL2]
297	msr	sctlr_el2, x17
298	msr	spsr_el2, x9
299
300	ldp	x10, x11, [x0, #CTX_SP_EL2]
301	msr	sp_el2, x10
302	msr	tcr_el2, x11
303
304	ldp	x12, x13, [x0, #CTX_TPIDR_EL2]
305	msr	tpidr_el2, x12
306	msr	ttbr0_el2, x13
307
308	ldp	x13, x14, [x0, #CTX_VBAR_EL2]
309	msr	vbar_el2, x13
310	msr	vmpidr_el2, x14
311
312	ldp	x15, x16, [x0, #CTX_VPIDR_EL2]
313	msr	vpidr_el2, x15
314	msr	vtcr_el2, x16
315
316	ldr	x17, [x0, #CTX_VTTBR_EL2]
317	msr	vttbr_el2, x17
318
319#if CTX_INCLUDE_MTE_REGS
320	ldr	x9, [x0, #CTX_TFSR_EL2]
321	msr	TFSR_EL2, x9
322#endif
323
324#if ENABLE_MPAM_FOR_LOWER_ELS
325	ldp	x10, x11, [x0, #CTX_MPAM2_EL2]
326	msr	MPAM2_EL2, x10
327	msr	MPAMHCR_EL2, x11
328
329	ldp	x12, x13, [x0, #CTX_MPAMVPM0_EL2]
330	msr	MPAMVPM0_EL2, x12
331	msr	MPAMVPM1_EL2, x13
332
333	ldp	x14, x15, [x0, #CTX_MPAMVPM2_EL2]
334	msr	MPAMVPM2_EL2, x14
335	msr	MPAMVPM3_EL2, x15
336
337	ldp	x16, x17, [x0, #CTX_MPAMVPM4_EL2]
338	msr	MPAMVPM4_EL2, x16
339	msr	MPAMVPM5_EL2, x17
340
341	ldp	x9, x10, [x0, #CTX_MPAMVPM6_EL2]
342	msr	MPAMVPM6_EL2, x9
343	msr	MPAMVPM7_EL2, x10
344
345	ldr	x11, [x0, #CTX_MPAMVPMV_EL2]
346	msr	MPAMVPMV_EL2, x11
347#endif
348
349#if ARM_ARCH_AT_LEAST(8, 6)
350	ldp	x12, x13, [x0, #CTX_HAFGRTR_EL2]
351	msr	HAFGRTR_EL2, x12
352	msr	HDFGRTR_EL2, x13
353
354	ldp	x14, x15, [x0, #CTX_HDFGWTR_EL2]
355	msr	HDFGWTR_EL2, x14
356	msr	HFGITR_EL2, x15
357
358	ldp	x16, x17, [x0, #CTX_HFGRTR_EL2]
359	msr	HFGRTR_EL2, x16
360	msr	HFGWTR_EL2, x17
361
362	ldr	x9, [x0, #CTX_CNTPOFF_EL2]
363	msr	CNTPOFF_EL2, x9
364#endif
365
366#if ARM_ARCH_AT_LEAST(8, 4)
367	ldp	x10, x11, [x0, #CTX_CNTHPS_CTL_EL2]
368	msr	cnthps_ctl_el2, x10
369	msr	cnthps_cval_el2, x11
370
371	ldp	x12, x13, [x0, #CTX_CNTHPS_TVAL_EL2]
372	msr	cnthps_tval_el2, x12
373	msr	cnthvs_ctl_el2, x13
374
375	ldp	x14, x15, [x0, #CTX_CNTHVS_CVAL_EL2]
376	msr	cnthvs_cval_el2, x14
377	msr	cnthvs_tval_el2, x15
378
379	ldp	x16, x17, [x0, #CTX_CNTHV_CTL_EL2]
380	msr	cnthv_ctl_el2, x16
381	msr	cnthv_cval_el2, x17
382
383	ldp	x9, x10, [x0, #CTX_CNTHV_TVAL_EL2]
384	msr	cnthv_tval_el2, x9
385	msr	contextidr_el2, x10
386
387#if CTX_INCLUDE_AARCH32_REGS
388	ldr	x11, [x0, #CTX_SDER32_EL2]
389	msr	sder32_el2, x11
390#endif
391
392	ldr	x12, [x0, #CTX_TTBR1_EL2]
393	msr	ttbr1_el2, x12
394
395	ldr	x13, [x0, #CTX_VDISR_EL2]
396	msr	vdisr_el2, x13
397
398	ldr	x14, [x0, #CTX_VNCR_EL2]
399	msr	vncr_el2, x14
400
401	ldr	x15, [x0, #CTX_VSESR_EL2]
402	msr	vsesr_el2, x15
403
404	ldr	x16, [x0, #CTX_VSTCR_EL2]
405	msr	vstcr_el2, x16
406
407	ldr	x17, [x0, #CTX_VSTTBR_EL2]
408	msr	vsttbr_el2, x17
409
410	ldr	x9, [x0, #CTX_TRFCR_EL2]
411	msr	TRFCR_EL2, x9
412#endif
413
414#if ARM_ARCH_AT_LEAST(8, 5)
415	ldr	x10, [x0, #CTX_SCXTNUM_EL2]
416	msr	scxtnum_el2, x10
417#endif
418
419	ret
420endfunc el2_sysregs_context_restore
421
422#endif /* CTX_INCLUDE_EL2_REGS */
423
424/* ------------------------------------------------------------------
425 * The following function strictly follows the AArch64 PCS to use
426 * x9-x17 (temporary caller-saved registers) to save EL1 system
427 * register context. It assumes that 'x0' is pointing to a
428 * 'el1_sys_regs' structure where the register context will be saved.
429 * ------------------------------------------------------------------
430 */
431func el1_sysregs_context_save
432
433	mrs	x9, spsr_el1
434	mrs	x10, elr_el1
435	stp	x9, x10, [x0, #CTX_SPSR_EL1]
436
437#if !ERRATA_SPECULATIVE_AT
438	mrs	x15, sctlr_el1
439	mrs	x16, tcr_el1
440	stp	x15, x16, [x0, #CTX_SCTLR_EL1]
441#endif
442
443	mrs	x17, cpacr_el1
444	mrs	x9, csselr_el1
445	stp	x17, x9, [x0, #CTX_CPACR_EL1]
446
447	mrs	x10, sp_el1
448	mrs	x11, esr_el1
449	stp	x10, x11, [x0, #CTX_SP_EL1]
450
451	mrs	x12, ttbr0_el1
452	mrs	x13, ttbr1_el1
453	stp	x12, x13, [x0, #CTX_TTBR0_EL1]
454
455	mrs	x14, mair_el1
456	mrs	x15, amair_el1
457	stp	x14, x15, [x0, #CTX_MAIR_EL1]
458
459	mrs	x16, actlr_el1
460	mrs	x17, tpidr_el1
461	stp	x16, x17, [x0, #CTX_ACTLR_EL1]
462
463	mrs	x9, tpidr_el0
464	mrs	x10, tpidrro_el0
465	stp	x9, x10, [x0, #CTX_TPIDR_EL0]
466
467	mrs	x13, par_el1
468	mrs	x14, far_el1
469	stp	x13, x14, [x0, #CTX_PAR_EL1]
470
471	mrs	x15, afsr0_el1
472	mrs	x16, afsr1_el1
473	stp	x15, x16, [x0, #CTX_AFSR0_EL1]
474
475	mrs	x17, contextidr_el1
476	mrs	x9, vbar_el1
477	stp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
478
479	/* Save AArch32 system registers if the build has instructed so */
480#if CTX_INCLUDE_AARCH32_REGS
481	mrs	x11, spsr_abt
482	mrs	x12, spsr_und
483	stp	x11, x12, [x0, #CTX_SPSR_ABT]
484
485	mrs	x13, spsr_irq
486	mrs	x14, spsr_fiq
487	stp	x13, x14, [x0, #CTX_SPSR_IRQ]
488
489	mrs	x15, dacr32_el2
490	mrs	x16, ifsr32_el2
491	stp	x15, x16, [x0, #CTX_DACR32_EL2]
492#endif
493
494	/* Save NS timer registers if the build has instructed so */
495#if NS_TIMER_SWITCH
496	mrs	x10, cntp_ctl_el0
497	mrs	x11, cntp_cval_el0
498	stp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
499
500	mrs	x12, cntv_ctl_el0
501	mrs	x13, cntv_cval_el0
502	stp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
503
504	mrs	x14, cntkctl_el1
505	str	x14, [x0, #CTX_CNTKCTL_EL1]
506#endif
507
508	/* Save MTE system registers if the build has instructed so */
509#if CTX_INCLUDE_MTE_REGS
510	mrs	x15, TFSRE0_EL1
511	mrs	x16, TFSR_EL1
512	stp	x15, x16, [x0, #CTX_TFSRE0_EL1]
513
514	mrs	x9, RGSR_EL1
515	mrs	x10, GCR_EL1
516	stp	x9, x10, [x0, #CTX_RGSR_EL1]
517#endif
518
519	ret
520endfunc el1_sysregs_context_save
521
522/* ------------------------------------------------------------------
523 * The following function strictly follows the AArch64 PCS to use
524 * x9-x17 (temporary caller-saved registers) to restore EL1 system
525 * register context.  It assumes that 'x0' is pointing to a
526 * 'el1_sys_regs' structure from where the register context will be
527 * restored
528 * ------------------------------------------------------------------
529 */
530func el1_sysregs_context_restore
531
532	ldp	x9, x10, [x0, #CTX_SPSR_EL1]
533	msr	spsr_el1, x9
534	msr	elr_el1, x10
535
536#if !ERRATA_SPECULATIVE_AT
537	ldp	x15, x16, [x0, #CTX_SCTLR_EL1]
538	msr	sctlr_el1, x15
539	msr	tcr_el1, x16
540#endif
541
542	ldp	x17, x9, [x0, #CTX_CPACR_EL1]
543	msr	cpacr_el1, x17
544	msr	csselr_el1, x9
545
546	ldp	x10, x11, [x0, #CTX_SP_EL1]
547	msr	sp_el1, x10
548	msr	esr_el1, x11
549
550	ldp	x12, x13, [x0, #CTX_TTBR0_EL1]
551	msr	ttbr0_el1, x12
552	msr	ttbr1_el1, x13
553
554	ldp	x14, x15, [x0, #CTX_MAIR_EL1]
555	msr	mair_el1, x14
556	msr	amair_el1, x15
557
558	ldp 	x16, x17, [x0, #CTX_ACTLR_EL1]
559	msr	actlr_el1, x16
560	msr	tpidr_el1, x17
561
562	ldp	x9, x10, [x0, #CTX_TPIDR_EL0]
563	msr	tpidr_el0, x9
564	msr	tpidrro_el0, x10
565
566	ldp	x13, x14, [x0, #CTX_PAR_EL1]
567	msr	par_el1, x13
568	msr	far_el1, x14
569
570	ldp	x15, x16, [x0, #CTX_AFSR0_EL1]
571	msr	afsr0_el1, x15
572	msr	afsr1_el1, x16
573
574	ldp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
575	msr	contextidr_el1, x17
576	msr	vbar_el1, x9
577
578	/* Restore AArch32 system registers if the build has instructed so */
579#if CTX_INCLUDE_AARCH32_REGS
580	ldp	x11, x12, [x0, #CTX_SPSR_ABT]
581	msr	spsr_abt, x11
582	msr	spsr_und, x12
583
584	ldp	x13, x14, [x0, #CTX_SPSR_IRQ]
585	msr	spsr_irq, x13
586	msr	spsr_fiq, x14
587
588	ldp	x15, x16, [x0, #CTX_DACR32_EL2]
589	msr	dacr32_el2, x15
590	msr	ifsr32_el2, x16
591#endif
592	/* Restore NS timer registers if the build has instructed so */
593#if NS_TIMER_SWITCH
594	ldp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
595	msr	cntp_ctl_el0, x10
596	msr	cntp_cval_el0, x11
597
598	ldp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
599	msr	cntv_ctl_el0, x12
600	msr	cntv_cval_el0, x13
601
602	ldr	x14, [x0, #CTX_CNTKCTL_EL1]
603	msr	cntkctl_el1, x14
604#endif
605	/* Restore MTE system registers if the build has instructed so */
606#if CTX_INCLUDE_MTE_REGS
607	ldp	x11, x12, [x0, #CTX_TFSRE0_EL1]
608	msr	TFSRE0_EL1, x11
609	msr	TFSR_EL1, x12
610
611	ldp	x13, x14, [x0, #CTX_RGSR_EL1]
612	msr	RGSR_EL1, x13
613	msr	GCR_EL1, x14
614#endif
615
616	/* No explict ISB required here as ERET covers it */
617	ret
618endfunc el1_sysregs_context_restore
619
620/* ------------------------------------------------------------------
621 * The following function follows the aapcs_64 strictly to use
622 * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
623 * to save floating point register context. It assumes that 'x0' is
624 * pointing to a 'fp_regs' structure where the register context will
625 * be saved.
626 *
627 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
628 * However currently we don't use VFP registers nor set traps in
629 * Trusted Firmware, and assume it's cleared.
630 *
631 * TODO: Revisit when VFP is used in secure world
632 * ------------------------------------------------------------------
633 */
634#if CTX_INCLUDE_FPREGS
635func fpregs_context_save
636	stp	q0, q1, [x0, #CTX_FP_Q0]
637	stp	q2, q3, [x0, #CTX_FP_Q2]
638	stp	q4, q5, [x0, #CTX_FP_Q4]
639	stp	q6, q7, [x0, #CTX_FP_Q6]
640	stp	q8, q9, [x0, #CTX_FP_Q8]
641	stp	q10, q11, [x0, #CTX_FP_Q10]
642	stp	q12, q13, [x0, #CTX_FP_Q12]
643	stp	q14, q15, [x0, #CTX_FP_Q14]
644	stp	q16, q17, [x0, #CTX_FP_Q16]
645	stp	q18, q19, [x0, #CTX_FP_Q18]
646	stp	q20, q21, [x0, #CTX_FP_Q20]
647	stp	q22, q23, [x0, #CTX_FP_Q22]
648	stp	q24, q25, [x0, #CTX_FP_Q24]
649	stp	q26, q27, [x0, #CTX_FP_Q26]
650	stp	q28, q29, [x0, #CTX_FP_Q28]
651	stp	q30, q31, [x0, #CTX_FP_Q30]
652
653	mrs	x9, fpsr
654	str	x9, [x0, #CTX_FP_FPSR]
655
656	mrs	x10, fpcr
657	str	x10, [x0, #CTX_FP_FPCR]
658
659#if CTX_INCLUDE_AARCH32_REGS
660	mrs	x11, fpexc32_el2
661	str	x11, [x0, #CTX_FP_FPEXC32_EL2]
662#endif
663	ret
664endfunc fpregs_context_save
665
666/* ------------------------------------------------------------------
667 * The following function follows the aapcs_64 strictly to use x9-x17
668 * (temporary caller-saved registers according to AArch64 PCS) to
669 * restore floating point register context. It assumes that 'x0' is
670 * pointing to a 'fp_regs' structure from where the register context
671 * will be restored.
672 *
673 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
674 * However currently we don't use VFP registers nor set traps in
675 * Trusted Firmware, and assume it's cleared.
676 *
677 * TODO: Revisit when VFP is used in secure world
678 * ------------------------------------------------------------------
679 */
680func fpregs_context_restore
681	ldp	q0, q1, [x0, #CTX_FP_Q0]
682	ldp	q2, q3, [x0, #CTX_FP_Q2]
683	ldp	q4, q5, [x0, #CTX_FP_Q4]
684	ldp	q6, q7, [x0, #CTX_FP_Q6]
685	ldp	q8, q9, [x0, #CTX_FP_Q8]
686	ldp	q10, q11, [x0, #CTX_FP_Q10]
687	ldp	q12, q13, [x0, #CTX_FP_Q12]
688	ldp	q14, q15, [x0, #CTX_FP_Q14]
689	ldp	q16, q17, [x0, #CTX_FP_Q16]
690	ldp	q18, q19, [x0, #CTX_FP_Q18]
691	ldp	q20, q21, [x0, #CTX_FP_Q20]
692	ldp	q22, q23, [x0, #CTX_FP_Q22]
693	ldp	q24, q25, [x0, #CTX_FP_Q24]
694	ldp	q26, q27, [x0, #CTX_FP_Q26]
695	ldp	q28, q29, [x0, #CTX_FP_Q28]
696	ldp	q30, q31, [x0, #CTX_FP_Q30]
697
698	ldr	x9, [x0, #CTX_FP_FPSR]
699	msr	fpsr, x9
700
701	ldr	x10, [x0, #CTX_FP_FPCR]
702	msr	fpcr, x10
703
704#if CTX_INCLUDE_AARCH32_REGS
705	ldr	x11, [x0, #CTX_FP_FPEXC32_EL2]
706	msr	fpexc32_el2, x11
707#endif
708	/*
709	 * No explict ISB required here as ERET to
710	 * switch to secure EL1 or non-secure world
711	 * covers it
712	 */
713
714	ret
715endfunc fpregs_context_restore
716#endif /* CTX_INCLUDE_FPREGS */
717
718/* ------------------------------------------------------------------
719 * The following function is used to save and restore all the general
720 * purpose and ARMv8.3-PAuth (if enabled) registers.
721 * It also checks if Secure Cycle Counter is not disabled in MDCR_EL3
722 * when ARMv8.5-PMU is implemented, and if called from Non-secure
723 * state saves PMCR_EL0 and disables Cycle Counter.
724 *
725 * Ideally we would only save and restore the callee saved registers
726 * when a world switch occurs but that type of implementation is more
727 * complex. So currently we will always save and restore these
728 * registers on entry and exit of EL3.
729 * These are not macros to ensure their invocation fits within the 32
730 * instructions per exception vector.
731 * clobbers: x18
732 * ------------------------------------------------------------------
733 */
734func save_gp_pmcr_pauth_regs
735	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
736	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
737	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
738	stp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
739	stp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
740	stp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
741	stp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
742	stp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
743	stp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
744	stp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
745	stp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
746	stp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
747	stp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
748	stp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
749	stp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
750	mrs	x18, sp_el0
751	str	x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
752
753	/* ----------------------------------------------------------
754	 * Check if earlier initialization MDCR_EL3.SCCD to 1 failed,
755	 * meaning that ARMv8-PMU is not implemented and PMCR_EL0
756	 * should be saved in non-secure context.
757	 * ----------------------------------------------------------
758	 */
759	mrs	x9, mdcr_el3
760	tst	x9, #MDCR_SCCD_BIT
761	bne	1f
762
763	/* Secure Cycle Counter is not disabled */
764	mrs	x9, pmcr_el0
765
766	/* Check caller's security state */
767	mrs	x10, scr_el3
768	tst	x10, #SCR_NS_BIT
769	beq	2f
770
771	/* Save PMCR_EL0 if called from Non-secure state */
772	str	x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
773
774	/* Disable cycle counter when event counting is prohibited */
7752:	orr	x9, x9, #PMCR_EL0_DP_BIT
776	msr	pmcr_el0, x9
777	isb
7781:
779#if CTX_INCLUDE_PAUTH_REGS
780	/* ----------------------------------------------------------
781 	 * Save the ARMv8.3-PAuth keys as they are not banked
782 	 * by exception level
783	 * ----------------------------------------------------------
784	 */
785	add	x19, sp, #CTX_PAUTH_REGS_OFFSET
786
787	mrs	x20, APIAKeyLo_EL1	/* x21:x20 = APIAKey */
788	mrs	x21, APIAKeyHi_EL1
789	mrs	x22, APIBKeyLo_EL1	/* x23:x22 = APIBKey */
790	mrs	x23, APIBKeyHi_EL1
791	mrs	x24, APDAKeyLo_EL1	/* x25:x24 = APDAKey */
792	mrs	x25, APDAKeyHi_EL1
793	mrs	x26, APDBKeyLo_EL1	/* x27:x26 = APDBKey */
794	mrs	x27, APDBKeyHi_EL1
795	mrs	x28, APGAKeyLo_EL1	/* x29:x28 = APGAKey */
796	mrs	x29, APGAKeyHi_EL1
797
798	stp	x20, x21, [x19, #CTX_PACIAKEY_LO]
799	stp	x22, x23, [x19, #CTX_PACIBKEY_LO]
800	stp	x24, x25, [x19, #CTX_PACDAKEY_LO]
801	stp	x26, x27, [x19, #CTX_PACDBKEY_LO]
802	stp	x28, x29, [x19, #CTX_PACGAKEY_LO]
803#endif /* CTX_INCLUDE_PAUTH_REGS */
804
805	ret
806endfunc save_gp_pmcr_pauth_regs
807
808/* ------------------------------------------------------------------
809 * This function restores ARMv8.3-PAuth (if enabled) and all general
810 * purpose registers except x30 from the CPU context.
811 * x30 register must be explicitly restored by the caller.
812 * ------------------------------------------------------------------
813 */
814func restore_gp_pmcr_pauth_regs
815#if CTX_INCLUDE_PAUTH_REGS
816 	/* Restore the ARMv8.3 PAuth keys */
817	add	x10, sp, #CTX_PAUTH_REGS_OFFSET
818
819	ldp	x0, x1, [x10, #CTX_PACIAKEY_LO]	/* x1:x0 = APIAKey */
820	ldp	x2, x3, [x10, #CTX_PACIBKEY_LO]	/* x3:x2 = APIBKey */
821	ldp	x4, x5, [x10, #CTX_PACDAKEY_LO]	/* x5:x4 = APDAKey */
822	ldp	x6, x7, [x10, #CTX_PACDBKEY_LO]	/* x7:x6 = APDBKey */
823	ldp	x8, x9, [x10, #CTX_PACGAKEY_LO]	/* x9:x8 = APGAKey */
824
825	msr	APIAKeyLo_EL1, x0
826	msr	APIAKeyHi_EL1, x1
827	msr	APIBKeyLo_EL1, x2
828	msr	APIBKeyHi_EL1, x3
829	msr	APDAKeyLo_EL1, x4
830	msr	APDAKeyHi_EL1, x5
831	msr	APDBKeyLo_EL1, x6
832	msr	APDBKeyHi_EL1, x7
833	msr	APGAKeyLo_EL1, x8
834	msr	APGAKeyHi_EL1, x9
835#endif /* CTX_INCLUDE_PAUTH_REGS */
836
837	/* ----------------------------------------------------------
838	 * Restore PMCR_EL0 when returning to Non-secure state if
839	 * Secure Cycle Counter is not disabled in MDCR_EL3 when
840	 * ARMv8.5-PMU is implemented.
841	 * ----------------------------------------------------------
842	 */
843	mrs	x0, scr_el3
844	tst	x0, #SCR_NS_BIT
845	beq	2f
846
847	/* ----------------------------------------------------------
848	 * Back to Non-secure state.
849	 * Check if earlier initialization MDCR_EL3.SCCD to 1 failed,
850	 * meaning that ARMv8-PMU is not implemented and PMCR_EL0
851	 * should be restored from non-secure context.
852	 * ----------------------------------------------------------
853	 */
854	mrs	x0, mdcr_el3
855	tst	x0, #MDCR_SCCD_BIT
856	bne	2f
857	ldr	x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
858	msr	pmcr_el0, x0
8592:
860	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
861	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
862	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
863	ldp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
864	ldp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
865	ldp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
866	ldp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
867	ldp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
868	ldp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
869	ldp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
870	ldp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
871	ldp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
872	ldp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
873	ldp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
874	ldr	x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
875	msr	sp_el0, x28
876	ldp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
877	ret
878endfunc restore_gp_pmcr_pauth_regs
879
880/*
881 * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
882 * registers and update EL1 registers to disable stage1 and stage2
883 * page table walk
884 */
885func save_and_update_ptw_el1_sys_regs
886	/* ----------------------------------------------------------
887	 * Save only sctlr_el1 and tcr_el1 registers
888	 * ----------------------------------------------------------
889	 */
890	mrs	x29, sctlr_el1
891	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)]
892	mrs	x29, tcr_el1
893	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)]
894
895	/* ------------------------------------------------------------
896	 * Must follow below order in order to disable page table
897	 * walk for lower ELs (EL1 and EL0). First step ensures that
898	 * page table walk is disabled for stage1 and second step
899	 * ensures that page table walker should use TCR_EL1.EPDx
900	 * bits to perform address translation. ISB ensures that CPU
901	 * does these 2 steps in order.
902	 *
903	 * 1. Update TCR_EL1.EPDx bits to disable page table walk by
904	 *    stage1.
905	 * 2. Enable MMU bit to avoid identity mapping via stage2
906	 *    and force TCR_EL1.EPDx to be used by the page table
907	 *    walker.
908	 * ------------------------------------------------------------
909	 */
910	orr	x29, x29, #(TCR_EPD0_BIT)
911	orr	x29, x29, #(TCR_EPD1_BIT)
912	msr	tcr_el1, x29
913	isb
914	mrs	x29, sctlr_el1
915	orr	x29, x29, #SCTLR_M_BIT
916	msr	sctlr_el1, x29
917	isb
918
919	ret
920endfunc save_and_update_ptw_el1_sys_regs
921
922/* ------------------------------------------------------------------
923 * This routine assumes that the SP_EL3 is pointing to a valid
924 * context structure from where the gp regs and other special
925 * registers can be retrieved.
926 * ------------------------------------------------------------------
927 */
928func el3_exit
929#if ENABLE_ASSERTIONS
930	/* el3_exit assumes SP_EL0 on entry */
931	mrs	x17, spsel
932	cmp	x17, #MODE_SP_EL0
933	ASM_ASSERT(eq)
934#endif
935
936	/* ----------------------------------------------------------
937	 * Save the current SP_EL0 i.e. the EL3 runtime stack which
938	 * will be used for handling the next SMC.
939	 * Then switch to SP_EL3.
940	 * ----------------------------------------------------------
941	 */
942	mov	x17, sp
943	msr	spsel, #MODE_SP_ELX
944	str	x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
945
946	/* ----------------------------------------------------------
947	 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
948	 * ----------------------------------------------------------
949	 */
950	ldr	x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
951	ldp	x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
952	msr	scr_el3, x18
953	msr	spsr_el3, x16
954	msr	elr_el3, x17
955
956#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
957	/* ----------------------------------------------------------
958	 * Restore mitigation state as it was on entry to EL3
959	 * ----------------------------------------------------------
960	 */
961	ldr	x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
962	cbz	x17, 1f
963	blr	x17
9641:
965#endif
966	restore_ptw_el1_sys_regs
967
968	/* ----------------------------------------------------------
969	 * Restore general purpose (including x30), PMCR_EL0 and
970	 * ARMv8.3-PAuth registers.
971	 * Exit EL3 via ERET to a lower exception level.
972 	 * ----------------------------------------------------------
973 	 */
974	bl	restore_gp_pmcr_pauth_regs
975	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
976
977#if IMAGE_BL31 && RAS_EXTENSION
978	/* ----------------------------------------------------------
979	 * Issue Error Synchronization Barrier to synchronize SErrors
980	 * before exiting EL3. We're running with EAs unmasked, so
981	 * any synchronized errors would be taken immediately;
982	 * therefore no need to inspect DISR_EL1 register.
983 	 * ----------------------------------------------------------
984	 */
985	esb
986#endif
987	exception_return
988
989endfunc el3_exit
990