xref: /rk3399_ARM-atf/lib/el3_runtime/aarch64/context.S (revision 1123a5e2f973dc9f0223467f4782f6b2df542620)
1/*
2 * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <assert_macros.S>
10#include <context.h>
11#include <el3_common_macros.S>
12
13#if CTX_INCLUDE_EL2_REGS
14	.global	el2_sysregs_context_save
15	.global	el2_sysregs_context_restore
16#endif
17
18	.global	el1_sysregs_context_save
19	.global	el1_sysregs_context_restore
20#if CTX_INCLUDE_FPREGS
21	.global	fpregs_context_save
22	.global	fpregs_context_restore
23#endif
24	.global	save_gp_pmcr_pauth_regs
25	.global	restore_gp_pmcr_pauth_regs
26	.global save_and_update_ptw_el1_sys_regs
27	.global	el3_exit
28
29#if CTX_INCLUDE_EL2_REGS
30
31/* -----------------------------------------------------
32 * The following function strictly follows the AArch64
33 * PCS to use x9-x17 (temporary caller-saved registers)
34 * to save EL2 system register context. It assumes that
35 * 'x0' is pointing to a 'el2_sys_regs' structure where
36 * the register context will be saved.
37 *
38 * The following registers are not added.
39 * AMEVCNTVOFF0<n>_EL2
40 * AMEVCNTVOFF1<n>_EL2
41 * ICH_AP0R<n>_EL2
42 * ICH_AP1R<n>_EL2
43 * ICH_LR<n>_EL2
44 * -----------------------------------------------------
45 */
46
47func el2_sysregs_context_save
48	mrs	x9, actlr_el2
49	mrs	x10, afsr0_el2
50	stp	x9, x10, [x0, #CTX_ACTLR_EL2]
51
52	mrs	x11, afsr1_el2
53	mrs	x12, amair_el2
54	stp	x11, x12, [x0, #CTX_AFSR1_EL2]
55
56	mrs	x13, cnthctl_el2
57	mrs	x14, cnthp_ctl_el2
58	stp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
59
60	mrs	x15, cnthp_cval_el2
61	mrs	x16, cnthp_tval_el2
62	stp	x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
63
64	mrs	x17, cntvoff_el2
65	mrs	x9, cptr_el2
66	stp	x17, x9, [x0, #CTX_CNTVOFF_EL2]
67
68	mrs	x10, dbgvcr32_el2
69	mrs	x11, elr_el2
70	stp	x10, x11, [x0, #CTX_DBGVCR32_EL2]
71
72	mrs	x14, esr_el2
73	mrs	x15, far_el2
74	stp	x14, x15, [x0, #CTX_ESR_EL2]
75
76	mrs	x16, hacr_el2
77	mrs	x17, hcr_el2
78	stp	x16, x17, [x0, #CTX_HACR_EL2]
79
80	mrs	x9, hpfar_el2
81	mrs	x10, hstr_el2
82	stp	x9, x10, [x0, #CTX_HPFAR_EL2]
83
84	mrs	x11, ICC_SRE_EL2
85	mrs	x12, ICH_HCR_EL2
86	stp	x11, x12, [x0, #CTX_ICC_SRE_EL2]
87
88	mrs	x13, ICH_VMCR_EL2
89	mrs	x14, mair_el2
90	stp	x13, x14, [x0, #CTX_ICH_VMCR_EL2]
91
92	mrs	x15, mdcr_el2
93	mrs	x16, PMSCR_EL2
94	stp	x15, x16, [x0, #CTX_MDCR_EL2]
95
96	mrs	x17, sctlr_el2
97	mrs	x9, spsr_el2
98	stp	x17, x9, [x0, #CTX_SCTLR_EL2]
99
100	mrs	x10, sp_el2
101	mrs	x11, tcr_el2
102	stp	x10, x11, [x0, #CTX_SP_EL2]
103
104	mrs	x12, tpidr_el2
105	mrs	x13, ttbr0_el2
106	stp	x12, x13, [x0, #CTX_TPIDR_EL2]
107
108	mrs	x14, vbar_el2
109	mrs	x15, vmpidr_el2
110	stp	x14, x15, [x0, #CTX_VBAR_EL2]
111
112	mrs	x16, vpidr_el2
113	mrs	x17, vtcr_el2
114	stp	x16, x17, [x0, #CTX_VPIDR_EL2]
115
116	mrs	x9, vttbr_el2
117	str	x9, [x0, #CTX_VTTBR_EL2]
118
119#if CTX_INCLUDE_MTE_REGS
120	mrs	x10, TFSR_EL2
121	str	x10, [x0, #CTX_TFSR_EL2]
122#endif
123
124#if ENABLE_MPAM_FOR_LOWER_ELS
125	mrs	x9, MPAM2_EL2
126	mrs	x10, MPAMHCR_EL2
127	stp	x9, x10, [x0, #CTX_MPAM2_EL2]
128
129	mrs	x11, MPAMVPM0_EL2
130	mrs	x12, MPAMVPM1_EL2
131	stp	x11, x12, [x0, #CTX_MPAMVPM0_EL2]
132
133	mrs	x13, MPAMVPM2_EL2
134	mrs	x14, MPAMVPM3_EL2
135	stp	x13, x14, [x0, #CTX_MPAMVPM2_EL2]
136
137	mrs	x15, MPAMVPM4_EL2
138	mrs	x16, MPAMVPM5_EL2
139	stp	x15, x16, [x0, #CTX_MPAMVPM4_EL2]
140
141	mrs	x17, MPAMVPM6_EL2
142	mrs	x9, MPAMVPM7_EL2
143	stp	x17, x9, [x0, #CTX_MPAMVPM6_EL2]
144
145	mrs	x10, MPAMVPMV_EL2
146	str	x10, [x0, #CTX_MPAMVPMV_EL2]
147#endif
148
149
150#if ARM_ARCH_AT_LEAST(8, 6)
151	mrs	x11, HAFGRTR_EL2
152	mrs	x12, HDFGRTR_EL2
153	stp	x11, x12, [x0, #CTX_HAFGRTR_EL2]
154
155	mrs	x13, HDFGWTR_EL2
156	mrs	x14, HFGITR_EL2
157	stp	x13, x14, [x0, #CTX_HDFGWTR_EL2]
158
159	mrs	x15, HFGRTR_EL2
160	mrs	x16, HFGWTR_EL2
161	stp	x15, x16, [x0, #CTX_HFGRTR_EL2]
162
163	mrs	x17, CNTPOFF_EL2
164	str	x17, [x0, #CTX_CNTPOFF_EL2]
165#endif
166
167#if ARM_ARCH_AT_LEAST(8, 4)
168	mrs	x9, cnthps_ctl_el2
169	mrs	x10, cnthps_cval_el2
170	stp	x9, x10, [x0, #CTX_CNTHPS_CTL_EL2]
171
172	mrs	x11, cnthps_tval_el2
173	mrs	x12, cnthvs_ctl_el2
174	stp	x11, x12, [x0, #CTX_CNTHPS_TVAL_EL2]
175
176	mrs	x13, cnthvs_cval_el2
177	mrs	x14, cnthvs_tval_el2
178	stp	x13, x14, [x0, #CTX_CNTHVS_CVAL_EL2]
179
180	mrs	x15, cnthv_ctl_el2
181	mrs	x16, cnthv_cval_el2
182	stp	x15, x16, [x0, #CTX_CNTHV_CTL_EL2]
183
184	mrs	x17, cnthv_tval_el2
185	mrs	x9, contextidr_el2
186	stp	x17, x9, [x0, #CTX_CNTHV_TVAL_EL2]
187
188	mrs	x10, sder32_el2
189	str	x10, [x0, #CTX_SDER32_EL2]
190
191	mrs	x11, ttbr1_el2
192	str	x11, [x0, #CTX_TTBR1_EL2]
193
194	mrs	x12, vdisr_el2
195	str	x12, [x0, #CTX_VDISR_EL2]
196
197	mrs	x13, vncr_el2
198	str	x13, [x0, #CTX_VNCR_EL2]
199
200	mrs	x14, vsesr_el2
201	str	x14, [x0, #CTX_VSESR_EL2]
202
203	mrs	x15, vstcr_el2
204	str	x15, [x0, #CTX_VSTCR_EL2]
205
206	mrs	x16, vsttbr_el2
207	str	x16, [x0, #CTX_VSTTBR_EL2]
208
209	mrs	x17, TRFCR_EL2
210	str	x17, [x0, #CTX_TRFCR_EL2]
211#endif
212
213#if ARM_ARCH_AT_LEAST(8, 5)
214	mrs	x9, scxtnum_el2
215	str	x9, [x0, #CTX_SCXTNUM_EL2]
216#endif
217
218	ret
219endfunc el2_sysregs_context_save
220
221/* -----------------------------------------------------
222 * The following function strictly follows the AArch64
223 * PCS to use x9-x17 (temporary caller-saved registers)
224 * to restore EL2 system register context.  It assumes
225 * that 'x0' is pointing to a 'el2_sys_regs' structure
226 * from where the register context will be restored
227
228 * The following registers are not restored
229 * AMEVCNTVOFF0<n>_EL2
230 * AMEVCNTVOFF1<n>_EL2
231 * ICH_AP0R<n>_EL2
232 * ICH_AP1R<n>_EL2
233 * ICH_LR<n>_EL2
234 * -----------------------------------------------------
235 */
236func el2_sysregs_context_restore
237
238	ldp	x9, x10, [x0, #CTX_ACTLR_EL2]
239	msr	actlr_el2, x9
240	msr	afsr0_el2, x10
241
242	ldp	x11, x12, [x0, #CTX_AFSR1_EL2]
243	msr	afsr1_el2, x11
244	msr	amair_el2, x12
245
246	ldp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
247	msr	cnthctl_el2, x13
248	msr	cnthp_ctl_el2, x14
249
250	ldp	x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
251	msr	cnthp_cval_el2, x15
252	msr	cnthp_tval_el2, x16
253
254	ldp	x17, x9, [x0, #CTX_CNTVOFF_EL2]
255	msr	cntvoff_el2, x17
256	msr	cptr_el2, x9
257
258	ldp	x10, x11, [x0, #CTX_DBGVCR32_EL2]
259	msr	dbgvcr32_el2, x10
260	msr	elr_el2, x11
261
262	ldp	x14, x15, [x0, #CTX_ESR_EL2]
263	msr	esr_el2, x14
264	msr	far_el2, x15
265
266	ldp	x16, x17, [x0, #CTX_HACR_EL2]
267	msr	hacr_el2, x16
268	msr	hcr_el2, x17
269
270	ldp	x9, x10, [x0, #CTX_HPFAR_EL2]
271	msr	hpfar_el2, x9
272	msr	hstr_el2, x10
273
274	ldp	x11, x12, [x0, #CTX_ICC_SRE_EL2]
275	msr	ICC_SRE_EL2, x11
276	msr	ICH_HCR_EL2, x12
277
278	ldp	x13, x14, [x0, #CTX_ICH_VMCR_EL2]
279	msr	ICH_VMCR_EL2, x13
280	msr	mair_el2, x14
281
282	ldp	x15, x16, [x0, #CTX_MDCR_EL2]
283	msr	mdcr_el2, x15
284	msr	PMSCR_EL2, x16
285
286	ldp	x17, x9, [x0, #CTX_SCTLR_EL2]
287	msr	sctlr_el2, x17
288	msr	spsr_el2, x9
289
290	ldp	x10, x11, [x0, #CTX_SP_EL2]
291	msr	sp_el2, x10
292	msr	tcr_el2, x11
293
294	ldp	x12, x13, [x0, #CTX_TPIDR_EL2]
295	msr	tpidr_el2, x12
296	msr	ttbr0_el2, x13
297
298	ldp	x13, x14, [x0, #CTX_VBAR_EL2]
299	msr	vbar_el2, x13
300	msr	vmpidr_el2, x14
301
302	ldp	x15, x16, [x0, #CTX_VPIDR_EL2]
303	msr	vpidr_el2, x15
304	msr	vtcr_el2, x16
305
306	ldr	x17, [x0, #CTX_VTTBR_EL2]
307	msr	vttbr_el2, x17
308
309#if CTX_INCLUDE_MTE_REGS
310	ldr	x9, [x0, #CTX_TFSR_EL2]
311	msr	TFSR_EL2, x9
312#endif
313
314#if ENABLE_MPAM_FOR_LOWER_ELS
315	ldp	x10, x11, [x0, #CTX_MPAM2_EL2]
316	msr	MPAM2_EL2, x10
317	msr	MPAMHCR_EL2, x11
318
319	ldp	x12, x13, [x0, #CTX_MPAMVPM0_EL2]
320	msr	MPAMVPM0_EL2, x12
321	msr	MPAMVPM1_EL2, x13
322
323	ldp	x14, x15, [x0, #CTX_MPAMVPM2_EL2]
324	msr	MPAMVPM2_EL2, x14
325	msr	MPAMVPM3_EL2, x15
326
327	ldp	x16, x17, [x0, #CTX_MPAMVPM4_EL2]
328	msr	MPAMVPM4_EL2, x16
329	msr	MPAMVPM5_EL2, x17
330
331	ldp	x9, x10, [x0, #CTX_MPAMVPM6_EL2]
332	msr	MPAMVPM6_EL2, x9
333	msr	MPAMVPM7_EL2, x10
334
335	ldr	x11, [x0, #CTX_MPAMVPMV_EL2]
336	msr	MPAMVPMV_EL2, x11
337#endif
338
339#if ARM_ARCH_AT_LEAST(8, 6)
340	ldp	x12, x13, [x0, #CTX_HAFGRTR_EL2]
341	msr	HAFGRTR_EL2, x12
342	msr	HDFGRTR_EL2, x13
343
344	ldp	x14, x15, [x0, #CTX_HDFGWTR_EL2]
345	msr	HDFGWTR_EL2, x14
346	msr	HFGITR_EL2, x15
347
348	ldp	x16, x17, [x0, #CTX_HFGRTR_EL2]
349	msr	HFGRTR_EL2, x16
350	msr	HFGWTR_EL2, x17
351
352	ldr	x9, [x0, #CTX_CNTPOFF_EL2]
353	msr	CNTPOFF_EL2, x9
354#endif
355
356#if ARM_ARCH_AT_LEAST(8, 4)
357	ldp	x10, x11, [x0, #CTX_CNTHPS_CTL_EL2]
358	msr	cnthps_ctl_el2, x10
359	msr	cnthps_cval_el2, x11
360
361	ldp	x12, x13, [x0, #CTX_CNTHPS_TVAL_EL2]
362	msr	cnthps_tval_el2, x12
363	msr	cnthvs_ctl_el2, x13
364
365	ldp	x14, x15, [x0, #CTX_CNTHVS_CVAL_EL2]
366	msr	cnthvs_cval_el2, x14
367	msr	cnthvs_tval_el2, x15
368
369	ldp	x16, x17, [x0, #CTX_CNTHV_CTL_EL2]
370	msr	cnthv_ctl_el2, x16
371	msr	cnthv_cval_el2, x17
372
373	ldp	x9, x10, [x0, #CTX_CNTHV_TVAL_EL2]
374	msr	cnthv_tval_el2, x9
375	msr	contextidr_el2, x10
376
377	ldr	x11, [x0, #CTX_SDER32_EL2]
378	msr	sder32_el2, x11
379
380	ldr	x12, [x0, #CTX_TTBR1_EL2]
381	msr	ttbr1_el2, x12
382
383	ldr	x13, [x0, #CTX_VDISR_EL2]
384	msr	vdisr_el2, x13
385
386	ldr	x14, [x0, #CTX_VNCR_EL2]
387	msr	vncr_el2, x14
388
389	ldr	x15, [x0, #CTX_VSESR_EL2]
390	msr	vsesr_el2, x15
391
392	ldr	x16, [x0, #CTX_VSTCR_EL2]
393	msr	vstcr_el2, x16
394
395	ldr	x17, [x0, #CTX_VSTTBR_EL2]
396	msr	vsttbr_el2, x17
397
398	ldr	x9, [x0, #CTX_TRFCR_EL2]
399	msr	TRFCR_EL2, x9
400#endif
401
402#if ARM_ARCH_AT_LEAST(8, 5)
403	ldr	x10, [x0, #CTX_SCXTNUM_EL2]
404	msr	scxtnum_el2, x10
405#endif
406
407	ret
408endfunc el2_sysregs_context_restore
409
410#endif /* CTX_INCLUDE_EL2_REGS */
411
412/* ------------------------------------------------------------------
413 * The following function strictly follows the AArch64 PCS to use
414 * x9-x17 (temporary caller-saved registers) to save EL1 system
415 * register context. It assumes that 'x0' is pointing to a
416 * 'el1_sys_regs' structure where the register context will be saved.
417 * ------------------------------------------------------------------
418 */
419func el1_sysregs_context_save
420
421	mrs	x9, spsr_el1
422	mrs	x10, elr_el1
423	stp	x9, x10, [x0, #CTX_SPSR_EL1]
424
425#if !ERRATA_SPECULATIVE_AT
426	mrs	x15, sctlr_el1
427	mrs	x16, tcr_el1
428	stp	x15, x16, [x0, #CTX_SCTLR_EL1]
429#endif
430
431	mrs	x17, cpacr_el1
432	mrs	x9, csselr_el1
433	stp	x17, x9, [x0, #CTX_CPACR_EL1]
434
435	mrs	x10, sp_el1
436	mrs	x11, esr_el1
437	stp	x10, x11, [x0, #CTX_SP_EL1]
438
439	mrs	x12, ttbr0_el1
440	mrs	x13, ttbr1_el1
441	stp	x12, x13, [x0, #CTX_TTBR0_EL1]
442
443	mrs	x14, mair_el1
444	mrs	x15, amair_el1
445	stp	x14, x15, [x0, #CTX_MAIR_EL1]
446
447	mrs	x16, actlr_el1
448	mrs	x17, tpidr_el1
449	stp	x16, x17, [x0, #CTX_ACTLR_EL1]
450
451	mrs	x9, tpidr_el0
452	mrs	x10, tpidrro_el0
453	stp	x9, x10, [x0, #CTX_TPIDR_EL0]
454
455	mrs	x13, par_el1
456	mrs	x14, far_el1
457	stp	x13, x14, [x0, #CTX_PAR_EL1]
458
459	mrs	x15, afsr0_el1
460	mrs	x16, afsr1_el1
461	stp	x15, x16, [x0, #CTX_AFSR0_EL1]
462
463	mrs	x17, contextidr_el1
464	mrs	x9, vbar_el1
465	stp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
466
467	/* Save AArch32 system registers if the build has instructed so */
468#if CTX_INCLUDE_AARCH32_REGS
469	mrs	x11, spsr_abt
470	mrs	x12, spsr_und
471	stp	x11, x12, [x0, #CTX_SPSR_ABT]
472
473	mrs	x13, spsr_irq
474	mrs	x14, spsr_fiq
475	stp	x13, x14, [x0, #CTX_SPSR_IRQ]
476
477	mrs	x15, dacr32_el2
478	mrs	x16, ifsr32_el2
479	stp	x15, x16, [x0, #CTX_DACR32_EL2]
480#endif
481
482	/* Save NS timer registers if the build has instructed so */
483#if NS_TIMER_SWITCH
484	mrs	x10, cntp_ctl_el0
485	mrs	x11, cntp_cval_el0
486	stp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
487
488	mrs	x12, cntv_ctl_el0
489	mrs	x13, cntv_cval_el0
490	stp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
491
492	mrs	x14, cntkctl_el1
493	str	x14, [x0, #CTX_CNTKCTL_EL1]
494#endif
495
496	/* Save MTE system registers if the build has instructed so */
497#if CTX_INCLUDE_MTE_REGS
498	mrs	x15, TFSRE0_EL1
499	mrs	x16, TFSR_EL1
500	stp	x15, x16, [x0, #CTX_TFSRE0_EL1]
501
502	mrs	x9, RGSR_EL1
503	mrs	x10, GCR_EL1
504	stp	x9, x10, [x0, #CTX_RGSR_EL1]
505#endif
506
507	ret
508endfunc el1_sysregs_context_save
509
510/* ------------------------------------------------------------------
511 * The following function strictly follows the AArch64 PCS to use
512 * x9-x17 (temporary caller-saved registers) to restore EL1 system
513 * register context.  It assumes that 'x0' is pointing to a
514 * 'el1_sys_regs' structure from where the register context will be
515 * restored
516 * ------------------------------------------------------------------
517 */
518func el1_sysregs_context_restore
519
520	ldp	x9, x10, [x0, #CTX_SPSR_EL1]
521	msr	spsr_el1, x9
522	msr	elr_el1, x10
523
524#if !ERRATA_SPECULATIVE_AT
525	ldp	x15, x16, [x0, #CTX_SCTLR_EL1]
526	msr	sctlr_el1, x15
527	msr	tcr_el1, x16
528#endif
529
530	ldp	x17, x9, [x0, #CTX_CPACR_EL1]
531	msr	cpacr_el1, x17
532	msr	csselr_el1, x9
533
534	ldp	x10, x11, [x0, #CTX_SP_EL1]
535	msr	sp_el1, x10
536	msr	esr_el1, x11
537
538	ldp	x12, x13, [x0, #CTX_TTBR0_EL1]
539	msr	ttbr0_el1, x12
540	msr	ttbr1_el1, x13
541
542	ldp	x14, x15, [x0, #CTX_MAIR_EL1]
543	msr	mair_el1, x14
544	msr	amair_el1, x15
545
546	ldp 	x16, x17, [x0, #CTX_ACTLR_EL1]
547	msr	actlr_el1, x16
548	msr	tpidr_el1, x17
549
550	ldp	x9, x10, [x0, #CTX_TPIDR_EL0]
551	msr	tpidr_el0, x9
552	msr	tpidrro_el0, x10
553
554	ldp	x13, x14, [x0, #CTX_PAR_EL1]
555	msr	par_el1, x13
556	msr	far_el1, x14
557
558	ldp	x15, x16, [x0, #CTX_AFSR0_EL1]
559	msr	afsr0_el1, x15
560	msr	afsr1_el1, x16
561
562	ldp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
563	msr	contextidr_el1, x17
564	msr	vbar_el1, x9
565
566	/* Restore AArch32 system registers if the build has instructed so */
567#if CTX_INCLUDE_AARCH32_REGS
568	ldp	x11, x12, [x0, #CTX_SPSR_ABT]
569	msr	spsr_abt, x11
570	msr	spsr_und, x12
571
572	ldp	x13, x14, [x0, #CTX_SPSR_IRQ]
573	msr	spsr_irq, x13
574	msr	spsr_fiq, x14
575
576	ldp	x15, x16, [x0, #CTX_DACR32_EL2]
577	msr	dacr32_el2, x15
578	msr	ifsr32_el2, x16
579#endif
580	/* Restore NS timer registers if the build has instructed so */
581#if NS_TIMER_SWITCH
582	ldp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
583	msr	cntp_ctl_el0, x10
584	msr	cntp_cval_el0, x11
585
586	ldp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
587	msr	cntv_ctl_el0, x12
588	msr	cntv_cval_el0, x13
589
590	ldr	x14, [x0, #CTX_CNTKCTL_EL1]
591	msr	cntkctl_el1, x14
592#endif
593	/* Restore MTE system registers if the build has instructed so */
594#if CTX_INCLUDE_MTE_REGS
595	ldp	x11, x12, [x0, #CTX_TFSRE0_EL1]
596	msr	TFSRE0_EL1, x11
597	msr	TFSR_EL1, x12
598
599	ldp	x13, x14, [x0, #CTX_RGSR_EL1]
600	msr	RGSR_EL1, x13
601	msr	GCR_EL1, x14
602#endif
603
604	/* No explict ISB required here as ERET covers it */
605	ret
606endfunc el1_sysregs_context_restore
607
608/* ------------------------------------------------------------------
609 * The following function follows the aapcs_64 strictly to use
610 * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
611 * to save floating point register context. It assumes that 'x0' is
612 * pointing to a 'fp_regs' structure where the register context will
613 * be saved.
614 *
615 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
616 * However currently we don't use VFP registers nor set traps in
617 * Trusted Firmware, and assume it's cleared.
618 *
619 * TODO: Revisit when VFP is used in secure world
620 * ------------------------------------------------------------------
621 */
622#if CTX_INCLUDE_FPREGS
623func fpregs_context_save
624	stp	q0, q1, [x0, #CTX_FP_Q0]
625	stp	q2, q3, [x0, #CTX_FP_Q2]
626	stp	q4, q5, [x0, #CTX_FP_Q4]
627	stp	q6, q7, [x0, #CTX_FP_Q6]
628	stp	q8, q9, [x0, #CTX_FP_Q8]
629	stp	q10, q11, [x0, #CTX_FP_Q10]
630	stp	q12, q13, [x0, #CTX_FP_Q12]
631	stp	q14, q15, [x0, #CTX_FP_Q14]
632	stp	q16, q17, [x0, #CTX_FP_Q16]
633	stp	q18, q19, [x0, #CTX_FP_Q18]
634	stp	q20, q21, [x0, #CTX_FP_Q20]
635	stp	q22, q23, [x0, #CTX_FP_Q22]
636	stp	q24, q25, [x0, #CTX_FP_Q24]
637	stp	q26, q27, [x0, #CTX_FP_Q26]
638	stp	q28, q29, [x0, #CTX_FP_Q28]
639	stp	q30, q31, [x0, #CTX_FP_Q30]
640
641	mrs	x9, fpsr
642	str	x9, [x0, #CTX_FP_FPSR]
643
644	mrs	x10, fpcr
645	str	x10, [x0, #CTX_FP_FPCR]
646
647#if CTX_INCLUDE_AARCH32_REGS
648	mrs	x11, fpexc32_el2
649	str	x11, [x0, #CTX_FP_FPEXC32_EL2]
650#endif
651	ret
652endfunc fpregs_context_save
653
654/* ------------------------------------------------------------------
655 * The following function follows the aapcs_64 strictly to use x9-x17
656 * (temporary caller-saved registers according to AArch64 PCS) to
657 * restore floating point register context. It assumes that 'x0' is
658 * pointing to a 'fp_regs' structure from where the register context
659 * will be restored.
660 *
661 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
662 * However currently we don't use VFP registers nor set traps in
663 * Trusted Firmware, and assume it's cleared.
664 *
665 * TODO: Revisit when VFP is used in secure world
666 * ------------------------------------------------------------------
667 */
668func fpregs_context_restore
669	ldp	q0, q1, [x0, #CTX_FP_Q0]
670	ldp	q2, q3, [x0, #CTX_FP_Q2]
671	ldp	q4, q5, [x0, #CTX_FP_Q4]
672	ldp	q6, q7, [x0, #CTX_FP_Q6]
673	ldp	q8, q9, [x0, #CTX_FP_Q8]
674	ldp	q10, q11, [x0, #CTX_FP_Q10]
675	ldp	q12, q13, [x0, #CTX_FP_Q12]
676	ldp	q14, q15, [x0, #CTX_FP_Q14]
677	ldp	q16, q17, [x0, #CTX_FP_Q16]
678	ldp	q18, q19, [x0, #CTX_FP_Q18]
679	ldp	q20, q21, [x0, #CTX_FP_Q20]
680	ldp	q22, q23, [x0, #CTX_FP_Q22]
681	ldp	q24, q25, [x0, #CTX_FP_Q24]
682	ldp	q26, q27, [x0, #CTX_FP_Q26]
683	ldp	q28, q29, [x0, #CTX_FP_Q28]
684	ldp	q30, q31, [x0, #CTX_FP_Q30]
685
686	ldr	x9, [x0, #CTX_FP_FPSR]
687	msr	fpsr, x9
688
689	ldr	x10, [x0, #CTX_FP_FPCR]
690	msr	fpcr, x10
691
692#if CTX_INCLUDE_AARCH32_REGS
693	ldr	x11, [x0, #CTX_FP_FPEXC32_EL2]
694	msr	fpexc32_el2, x11
695#endif
696	/*
697	 * No explict ISB required here as ERET to
698	 * switch to secure EL1 or non-secure world
699	 * covers it
700	 */
701
702	ret
703endfunc fpregs_context_restore
704#endif /* CTX_INCLUDE_FPREGS */
705
706/* ------------------------------------------------------------------
707 * The following function is used to save and restore all the general
708 * purpose and ARMv8.3-PAuth (if enabled) registers.
709 * It also checks if Secure Cycle Counter is not disabled in MDCR_EL3
710 * when ARMv8.5-PMU is implemented, and if called from Non-secure
711 * state saves PMCR_EL0 and disables Cycle Counter.
712 *
713 * Ideally we would only save and restore the callee saved registers
714 * when a world switch occurs but that type of implementation is more
715 * complex. So currently we will always save and restore these
716 * registers on entry and exit of EL3.
717 * These are not macros to ensure their invocation fits within the 32
718 * instructions per exception vector.
719 * clobbers: x18
720 * ------------------------------------------------------------------
721 */
722func save_gp_pmcr_pauth_regs
723	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
724	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
725	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
726	stp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
727	stp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
728	stp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
729	stp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
730	stp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
731	stp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
732	stp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
733	stp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
734	stp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
735	stp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
736	stp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
737	stp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
738	mrs	x18, sp_el0
739	str	x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
740
741	/* ----------------------------------------------------------
742	 * Check if earlier initialization MDCR_EL3.SCCD to 1 failed,
743	 * meaning that ARMv8-PMU is not implemented and PMCR_EL0
744	 * should be saved in non-secure context.
745	 * ----------------------------------------------------------
746	 */
747	mrs	x9, mdcr_el3
748	tst	x9, #MDCR_SCCD_BIT
749	bne	1f
750
751	/* Secure Cycle Counter is not disabled */
752	mrs	x9, pmcr_el0
753
754	/* Check caller's security state */
755	mrs	x10, scr_el3
756	tst	x10, #SCR_NS_BIT
757	beq	2f
758
759	/* Save PMCR_EL0 if called from Non-secure state */
760	str	x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
761
762	/* Disable cycle counter when event counting is prohibited */
7632:	orr	x9, x9, #PMCR_EL0_DP_BIT
764	msr	pmcr_el0, x9
765	isb
7661:
767#if CTX_INCLUDE_PAUTH_REGS
768	/* ----------------------------------------------------------
769 	 * Save the ARMv8.3-PAuth keys as they are not banked
770 	 * by exception level
771	 * ----------------------------------------------------------
772	 */
773	add	x19, sp, #CTX_PAUTH_REGS_OFFSET
774
775	mrs	x20, APIAKeyLo_EL1	/* x21:x20 = APIAKey */
776	mrs	x21, APIAKeyHi_EL1
777	mrs	x22, APIBKeyLo_EL1	/* x23:x22 = APIBKey */
778	mrs	x23, APIBKeyHi_EL1
779	mrs	x24, APDAKeyLo_EL1	/* x25:x24 = APDAKey */
780	mrs	x25, APDAKeyHi_EL1
781	mrs	x26, APDBKeyLo_EL1	/* x27:x26 = APDBKey */
782	mrs	x27, APDBKeyHi_EL1
783	mrs	x28, APGAKeyLo_EL1	/* x29:x28 = APGAKey */
784	mrs	x29, APGAKeyHi_EL1
785
786	stp	x20, x21, [x19, #CTX_PACIAKEY_LO]
787	stp	x22, x23, [x19, #CTX_PACIBKEY_LO]
788	stp	x24, x25, [x19, #CTX_PACDAKEY_LO]
789	stp	x26, x27, [x19, #CTX_PACDBKEY_LO]
790	stp	x28, x29, [x19, #CTX_PACGAKEY_LO]
791#endif /* CTX_INCLUDE_PAUTH_REGS */
792
793	ret
794endfunc save_gp_pmcr_pauth_regs
795
796/* ------------------------------------------------------------------
797 * This function restores ARMv8.3-PAuth (if enabled) and all general
798 * purpose registers except x30 from the CPU context.
799 * x30 register must be explicitly restored by the caller.
800 * ------------------------------------------------------------------
801 */
802func restore_gp_pmcr_pauth_regs
803#if CTX_INCLUDE_PAUTH_REGS
804 	/* Restore the ARMv8.3 PAuth keys */
805	add	x10, sp, #CTX_PAUTH_REGS_OFFSET
806
807	ldp	x0, x1, [x10, #CTX_PACIAKEY_LO]	/* x1:x0 = APIAKey */
808	ldp	x2, x3, [x10, #CTX_PACIBKEY_LO]	/* x3:x2 = APIBKey */
809	ldp	x4, x5, [x10, #CTX_PACDAKEY_LO]	/* x5:x4 = APDAKey */
810	ldp	x6, x7, [x10, #CTX_PACDBKEY_LO]	/* x7:x6 = APDBKey */
811	ldp	x8, x9, [x10, #CTX_PACGAKEY_LO]	/* x9:x8 = APGAKey */
812
813	msr	APIAKeyLo_EL1, x0
814	msr	APIAKeyHi_EL1, x1
815	msr	APIBKeyLo_EL1, x2
816	msr	APIBKeyHi_EL1, x3
817	msr	APDAKeyLo_EL1, x4
818	msr	APDAKeyHi_EL1, x5
819	msr	APDBKeyLo_EL1, x6
820	msr	APDBKeyHi_EL1, x7
821	msr	APGAKeyLo_EL1, x8
822	msr	APGAKeyHi_EL1, x9
823#endif /* CTX_INCLUDE_PAUTH_REGS */
824
825	/* ----------------------------------------------------------
826	 * Restore PMCR_EL0 when returning to Non-secure state if
827	 * Secure Cycle Counter is not disabled in MDCR_EL3 when
828	 * ARMv8.5-PMU is implemented.
829	 * ----------------------------------------------------------
830	 */
831	mrs	x0, scr_el3
832	tst	x0, #SCR_NS_BIT
833	beq	2f
834
835	/* ----------------------------------------------------------
836	 * Back to Non-secure state.
837	 * Check if earlier initialization MDCR_EL3.SCCD to 1 failed,
838	 * meaning that ARMv8-PMU is not implemented and PMCR_EL0
839	 * should be restored from non-secure context.
840	 * ----------------------------------------------------------
841	 */
842	mrs	x0, mdcr_el3
843	tst	x0, #MDCR_SCCD_BIT
844	bne	2f
845	ldr	x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
846	msr	pmcr_el0, x0
8472:
848	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
849	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
850	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
851	ldp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
852	ldp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
853	ldp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
854	ldp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
855	ldp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
856	ldp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
857	ldp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
858	ldp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
859	ldp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
860	ldp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
861	ldp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
862	ldr	x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
863	msr	sp_el0, x28
864	ldp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
865	ret
866endfunc restore_gp_pmcr_pauth_regs
867
868/*
869 * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
870 * registers and update EL1 registers to disable stage1 and stage2
871 * page table walk
872 */
873func save_and_update_ptw_el1_sys_regs
874	/* ----------------------------------------------------------
875	 * Save only sctlr_el1 and tcr_el1 registers
876	 * ----------------------------------------------------------
877	 */
878	mrs	x29, sctlr_el1
879	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)]
880	mrs	x29, tcr_el1
881	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)]
882
883	/* ------------------------------------------------------------
884	 * Must follow below order in order to disable page table
885	 * walk for lower ELs (EL1 and EL0). First step ensures that
886	 * page table walk is disabled for stage1 and second step
887	 * ensures that page table walker should use TCR_EL1.EPDx
888	 * bits to perform address translation. ISB ensures that CPU
889	 * does these 2 steps in order.
890	 *
891	 * 1. Update TCR_EL1.EPDx bits to disable page table walk by
892	 *    stage1.
893	 * 2. Enable MMU bit to avoid identity mapping via stage2
894	 *    and force TCR_EL1.EPDx to be used by the page table
895	 *    walker.
896	 * ------------------------------------------------------------
897	 */
898	orr	x29, x29, #(TCR_EPD0_BIT)
899	orr	x29, x29, #(TCR_EPD1_BIT)
900	msr	tcr_el1, x29
901	isb
902	mrs	x29, sctlr_el1
903	orr	x29, x29, #SCTLR_M_BIT
904	msr	sctlr_el1, x29
905	isb
906
907	ret
908endfunc save_and_update_ptw_el1_sys_regs
909
910/* ------------------------------------------------------------------
911 * This routine assumes that the SP_EL3 is pointing to a valid
912 * context structure from where the gp regs and other special
913 * registers can be retrieved.
914 * ------------------------------------------------------------------
915 */
916func el3_exit
917#if ENABLE_ASSERTIONS
918	/* el3_exit assumes SP_EL0 on entry */
919	mrs	x17, spsel
920	cmp	x17, #MODE_SP_EL0
921	ASM_ASSERT(eq)
922#endif
923
924	/* ----------------------------------------------------------
925	 * Save the current SP_EL0 i.e. the EL3 runtime stack which
926	 * will be used for handling the next SMC.
927	 * Then switch to SP_EL3.
928	 * ----------------------------------------------------------
929	 */
930	mov	x17, sp
931	msr	spsel, #MODE_SP_ELX
932	str	x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
933
934	/* ----------------------------------------------------------
935	 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
936	 * ----------------------------------------------------------
937	 */
938	ldr	x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
939	ldp	x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
940	msr	scr_el3, x18
941	msr	spsr_el3, x16
942	msr	elr_el3, x17
943
944#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
945	/* ----------------------------------------------------------
946	 * Restore mitigation state as it was on entry to EL3
947	 * ----------------------------------------------------------
948	 */
949	ldr	x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
950	cbz	x17, 1f
951	blr	x17
9521:
953#endif
954	restore_ptw_el1_sys_regs
955
956	/* ----------------------------------------------------------
957	 * Restore general purpose (including x30), PMCR_EL0 and
958	 * ARMv8.3-PAuth registers.
959	 * Exit EL3 via ERET to a lower exception level.
960 	 * ----------------------------------------------------------
961 	 */
962	bl	restore_gp_pmcr_pauth_regs
963	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
964
965#if IMAGE_BL31 && RAS_EXTENSION
966	/* ----------------------------------------------------------
967	 * Issue Error Synchronization Barrier to synchronize SErrors
968	 * before exiting EL3. We're running with EAs unmasked, so
969	 * any synchronized errors would be taken immediately;
970	 * therefore no need to inspect DISR_EL1 register.
971 	 * ----------------------------------------------------------
972	 */
973	esb
974#endif
975	exception_return
976
977endfunc el3_exit
978