xref: /rk3399_ARM-atf/include/lib/el3_runtime/aarch64/context.h (revision e40b563e87fd4ff58474a289909a1827c8d2bca7)
1 /*
2  * Copyright (c) 2013-2024, Arm Limited and Contributors. All rights reserved.
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #ifndef CONTEXT_H
8 #define CONTEXT_H
9 
10 #include <lib/el3_runtime/context_el2.h>
11 #include <lib/el3_runtime/cpu_data.h>
12 #include <lib/utils_def.h>
13 
14 /*******************************************************************************
15  * Constants that allow assembler code to access members of and the 'gp_regs'
16  * structure at their correct offsets.
17  ******************************************************************************/
18 #define CTX_GPREGS_OFFSET	U(0x0)
19 #define CTX_GPREG_X0		U(0x0)
20 #define CTX_GPREG_X1		U(0x8)
21 #define CTX_GPREG_X2		U(0x10)
22 #define CTX_GPREG_X3		U(0x18)
23 #define CTX_GPREG_X4		U(0x20)
24 #define CTX_GPREG_X5		U(0x28)
25 #define CTX_GPREG_X6		U(0x30)
26 #define CTX_GPREG_X7		U(0x38)
27 #define CTX_GPREG_X8		U(0x40)
28 #define CTX_GPREG_X9		U(0x48)
29 #define CTX_GPREG_X10		U(0x50)
30 #define CTX_GPREG_X11		U(0x58)
31 #define CTX_GPREG_X12		U(0x60)
32 #define CTX_GPREG_X13		U(0x68)
33 #define CTX_GPREG_X14		U(0x70)
34 #define CTX_GPREG_X15		U(0x78)
35 #define CTX_GPREG_X16		U(0x80)
36 #define CTX_GPREG_X17		U(0x88)
37 #define CTX_GPREG_X18		U(0x90)
38 #define CTX_GPREG_X19		U(0x98)
39 #define CTX_GPREG_X20		U(0xa0)
40 #define CTX_GPREG_X21		U(0xa8)
41 #define CTX_GPREG_X22		U(0xb0)
42 #define CTX_GPREG_X23		U(0xb8)
43 #define CTX_GPREG_X24		U(0xc0)
44 #define CTX_GPREG_X25		U(0xc8)
45 #define CTX_GPREG_X26		U(0xd0)
46 #define CTX_GPREG_X27		U(0xd8)
47 #define CTX_GPREG_X28		U(0xe0)
48 #define CTX_GPREG_X29		U(0xe8)
49 #define CTX_GPREG_LR		U(0xf0)
50 #define CTX_GPREG_SP_EL0	U(0xf8)
51 #define CTX_GPREGS_END		U(0x100)
52 
53 /*******************************************************************************
54  * Constants that allow assembler code to access members of and the 'el3_state'
55  * structure at their correct offsets. Note that some of the registers are only
56  * 32-bits wide but are stored as 64-bit values for convenience
57  ******************************************************************************/
58 #define CTX_EL3STATE_OFFSET	(CTX_GPREGS_OFFSET + CTX_GPREGS_END)
59 #define CTX_SCR_EL3		U(0x0)
60 #define CTX_ESR_EL3		U(0x8)
61 #define CTX_RUNTIME_SP		U(0x10)
62 #define CTX_SPSR_EL3		U(0x18)
63 #define CTX_ELR_EL3		U(0x20)
64 #define CTX_PMCR_EL0		U(0x28)
65 #define CTX_IS_IN_EL3		U(0x30)
66 /* Constants required in supporting nested exception in EL3 */
67 #define CTX_SAVED_ELR_EL3	U(0x38)
68 /*
69  * General purpose flag, to save various EL3 states
70  * FFH mode : Used to identify if handling nested exception
71  * KFH mode : Used as counter value
72  */
73 #define CTX_NESTED_EA_FLAG	U(0x40)
74 #if FFH_SUPPORT
75  #define CTX_SAVED_ESR_EL3	U(0x48)
76  #define CTX_SAVED_SPSR_EL3	U(0x50)
77  #define CTX_SAVED_GPREG_LR	U(0x58)
78  #define CTX_EL3STATE_END	U(0x60) /* Align to the next 16 byte boundary */
79 #else
80  #define CTX_EL3STATE_END	U(0x50) /* Align to the next 16 byte boundary */
81 #endif /* FFH_SUPPORT */
82 
83 /*******************************************************************************
84  * Constants that allow assembler code to access members of and the
85  * 'el1_sys_regs' structure at their correct offsets. Note that some of the
86  * registers are only 32-bits wide but are stored as 64-bit values for
87  * convenience
88  ******************************************************************************/
89 #define CTX_EL1_SYSREGS_OFFSET	(CTX_EL3STATE_OFFSET + CTX_EL3STATE_END)
90 #define CTX_SPSR_EL1		U(0x0)
91 #define CTX_ELR_EL1		U(0x8)
92 #define CTX_SCTLR_EL1		U(0x10)
93 #define CTX_TCR_EL1		U(0x18)
94 #define CTX_CPACR_EL1		U(0x20)
95 #define CTX_CSSELR_EL1		U(0x28)
96 #define CTX_SP_EL1		U(0x30)
97 #define CTX_ESR_EL1		U(0x38)
98 #define CTX_TTBR0_EL1		U(0x40)
99 #define CTX_TTBR1_EL1		U(0x48)
100 #define CTX_MAIR_EL1		U(0x50)
101 #define CTX_AMAIR_EL1		U(0x58)
102 #define CTX_ACTLR_EL1		U(0x60)
103 #define CTX_TPIDR_EL1		U(0x68)
104 #define CTX_TPIDR_EL0		U(0x70)
105 #define CTX_TPIDRRO_EL0		U(0x78)
106 #define CTX_PAR_EL1		U(0x80)
107 #define CTX_FAR_EL1		U(0x88)
108 #define CTX_AFSR0_EL1		U(0x90)
109 #define CTX_AFSR1_EL1		U(0x98)
110 #define CTX_CONTEXTIDR_EL1	U(0xa0)
111 #define CTX_VBAR_EL1		U(0xa8)
112 #define CTX_MDCCINT_EL1		U(0xb0)
113 #define CTX_MDSCR_EL1		U(0xb8)
114 
115 #define CTX_AARCH64_END		U(0xc0) /* Align to the next 16 byte boundary */
116 
117 /*
118  * If the platform is AArch64-only, there is no need to save and restore these
119  * AArch32 registers.
120  */
121 #if CTX_INCLUDE_AARCH32_REGS
122 #define CTX_SPSR_ABT		(CTX_AARCH64_END + U(0x0))
123 #define CTX_SPSR_UND		(CTX_AARCH64_END + U(0x8))
124 #define CTX_SPSR_IRQ		(CTX_AARCH64_END + U(0x10))
125 #define CTX_SPSR_FIQ		(CTX_AARCH64_END + U(0x18))
126 #define CTX_DACR32_EL2		(CTX_AARCH64_END + U(0x20))
127 #define CTX_IFSR32_EL2		(CTX_AARCH64_END + U(0x28))
128 #define CTX_AARCH32_END		(CTX_AARCH64_END + U(0x30)) /* Align to the next 16 byte boundary */
129 #else
130 #define CTX_AARCH32_END		CTX_AARCH64_END
131 #endif /* CTX_INCLUDE_AARCH32_REGS */
132 
133 /*
134  * If the timer registers aren't saved and restored, we don't have to reserve
135  * space for them in the context
136  */
137 #if NS_TIMER_SWITCH
138 #define CTX_CNTP_CTL_EL0	(CTX_AARCH32_END + U(0x0))
139 #define CTX_CNTP_CVAL_EL0	(CTX_AARCH32_END + U(0x8))
140 #define CTX_CNTV_CTL_EL0	(CTX_AARCH32_END + U(0x10))
141 #define CTX_CNTV_CVAL_EL0	(CTX_AARCH32_END + U(0x18))
142 #define CTX_CNTKCTL_EL1		(CTX_AARCH32_END + U(0x20))
143 #define CTX_TIMER_SYSREGS_END	(CTX_AARCH32_END + U(0x30)) /* Align to the next 16 byte boundary */
144 #else
145 #define CTX_TIMER_SYSREGS_END	CTX_AARCH32_END
146 #endif /* NS_TIMER_SWITCH */
147 
148 #if ENABLE_FEAT_MTE2
149 #define CTX_TFSRE0_EL1		(CTX_TIMER_SYSREGS_END + U(0x0))
150 #define CTX_TFSR_EL1		(CTX_TIMER_SYSREGS_END + U(0x8))
151 #define CTX_RGSR_EL1		(CTX_TIMER_SYSREGS_END + U(0x10))
152 #define CTX_GCR_EL1		(CTX_TIMER_SYSREGS_END + U(0x18))
153 #define CTX_MTE_REGS_END	(CTX_TIMER_SYSREGS_END + U(0x20)) /* Align to the next 16 byte boundary */
154 #else
155 #define CTX_MTE_REGS_END	CTX_TIMER_SYSREGS_END
156 #endif /* ENABLE_FEAT_MTE2 */
157 
158 #if ENABLE_FEAT_RAS
159 #define CTX_DISR_EL1		(CTX_MTE_REGS_END + U(0x0))
160 #define CTX_RAS_REGS_END	(CTX_MTE_REGS_END + U(0x10)) /* Align to the next 16 byte boundary */
161 #else
162 #define CTX_RAS_REGS_END        CTX_MTE_REGS_END
163 #endif /* ENABLE_FEAT_RAS */
164 
165 #if ENABLE_FEAT_S1PIE
166 #define CTX_PIRE0_EL1		(CTX_RAS_REGS_END + U(0x0))
167 #define CTX_PIR_EL1		(CTX_RAS_REGS_END + U(0x8))
168 #define CTX_S1PIE_REGS_END	(CTX_RAS_REGS_END + U(0x10)) /* Align to the next 16 byte boundary */
169 #else
170 #define CTX_S1PIE_REGS_END	CTX_RAS_REGS_END
171 #endif /* ENABLE_FEAT_S1PIE */
172 
173 #if ENABLE_FEAT_S1POE
174 #define CTX_POR_EL1		(CTX_S1PIE_REGS_END + U(0x0))
175 #define CTX_S1POE_REGS_END	(CTX_S1PIE_REGS_END + U(0x10)) /* Align to the next 16 byte boundary */
176 #else
177 #define CTX_S1POE_REGS_END	CTX_S1PIE_REGS_END
178 #endif /* ENABLE_FEAT_S1POE */
179 
180 #if ENABLE_FEAT_S2POE
181 #define CTX_S2POR_EL1		(CTX_S1POE_REGS_END + U(0x0))
182 #define CTX_S2POE_REGS_END	(CTX_S1POE_REGS_END + U(0x10)) /* Align to the next 16 byte boundary */
183 #else
184 #define CTX_S2POE_REGS_END	CTX_S1POE_REGS_END
185 #endif /* ENABLE_FEAT_S2POE */
186 
187 #if ENABLE_FEAT_TCR2
188 #define CTX_TCR2_EL1		(CTX_S2POE_REGS_END + U(0x0))
189 #define CTX_TCR2_REGS_END	(CTX_S2POE_REGS_END + U(0x10)) /* Align to the next 16 byte boundary */
190 #else
191 #define CTX_TCR2_REGS_END       CTX_S2POE_REGS_END
192 #endif /* ENABLE_FEAT_TCR2 */
193 
194 /*
195  * End of system registers.
196  */
197 #define CTX_EL1_SYSREGS_END	CTX_TCR2_REGS_END
198 
199 /*******************************************************************************
200  * Constants that allow assembler code to access members of and the 'fp_regs'
201  * structure at their correct offsets.
202  ******************************************************************************/
203 # define CTX_FPREGS_OFFSET	(CTX_EL1_SYSREGS_OFFSET + CTX_EL1_SYSREGS_END)
204 #if CTX_INCLUDE_FPREGS
205 #define CTX_FP_Q0		U(0x0)
206 #define CTX_FP_Q1		U(0x10)
207 #define CTX_FP_Q2		U(0x20)
208 #define CTX_FP_Q3		U(0x30)
209 #define CTX_FP_Q4		U(0x40)
210 #define CTX_FP_Q5		U(0x50)
211 #define CTX_FP_Q6		U(0x60)
212 #define CTX_FP_Q7		U(0x70)
213 #define CTX_FP_Q8		U(0x80)
214 #define CTX_FP_Q9		U(0x90)
215 #define CTX_FP_Q10		U(0xa0)
216 #define CTX_FP_Q11		U(0xb0)
217 #define CTX_FP_Q12		U(0xc0)
218 #define CTX_FP_Q13		U(0xd0)
219 #define CTX_FP_Q14		U(0xe0)
220 #define CTX_FP_Q15		U(0xf0)
221 #define CTX_FP_Q16		U(0x100)
222 #define CTX_FP_Q17		U(0x110)
223 #define CTX_FP_Q18		U(0x120)
224 #define CTX_FP_Q19		U(0x130)
225 #define CTX_FP_Q20		U(0x140)
226 #define CTX_FP_Q21		U(0x150)
227 #define CTX_FP_Q22		U(0x160)
228 #define CTX_FP_Q23		U(0x170)
229 #define CTX_FP_Q24		U(0x180)
230 #define CTX_FP_Q25		U(0x190)
231 #define CTX_FP_Q26		U(0x1a0)
232 #define CTX_FP_Q27		U(0x1b0)
233 #define CTX_FP_Q28		U(0x1c0)
234 #define CTX_FP_Q29		U(0x1d0)
235 #define CTX_FP_Q30		U(0x1e0)
236 #define CTX_FP_Q31		U(0x1f0)
237 #define CTX_FP_FPSR		U(0x200)
238 #define CTX_FP_FPCR		U(0x208)
239 #if CTX_INCLUDE_AARCH32_REGS
240 #define CTX_FP_FPEXC32_EL2	U(0x210)
241 #define CTX_FPREGS_END		U(0x220) /* Align to the next 16 byte boundary */
242 #else
243 #define CTX_FPREGS_END		U(0x210) /* Align to the next 16 byte boundary */
244 #endif /* CTX_INCLUDE_AARCH32_REGS */
245 #else
246 #define CTX_FPREGS_END		U(0)
247 #endif /* CTX_INCLUDE_FPREGS */
248 
249 /*******************************************************************************
250  * Registers related to CVE-2018-3639
251  ******************************************************************************/
252 #define CTX_CVE_2018_3639_OFFSET	(CTX_FPREGS_OFFSET + CTX_FPREGS_END)
253 #define CTX_CVE_2018_3639_DISABLE	U(0)
254 #define CTX_CVE_2018_3639_END		U(0x10) /* Align to the next 16 byte boundary */
255 
256 /*******************************************************************************
257  * Registers related to ARMv8.3-PAuth.
258  ******************************************************************************/
259 #define CTX_PAUTH_REGS_OFFSET	(CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_END)
260 #if CTX_INCLUDE_PAUTH_REGS
261 #define CTX_PACIAKEY_LO		U(0x0)
262 #define CTX_PACIAKEY_HI		U(0x8)
263 #define CTX_PACIBKEY_LO		U(0x10)
264 #define CTX_PACIBKEY_HI		U(0x18)
265 #define CTX_PACDAKEY_LO		U(0x20)
266 #define CTX_PACDAKEY_HI		U(0x28)
267 #define CTX_PACDBKEY_LO		U(0x30)
268 #define CTX_PACDBKEY_HI		U(0x38)
269 #define CTX_PACGAKEY_LO		U(0x40)
270 #define CTX_PACGAKEY_HI		U(0x48)
271 #define CTX_PAUTH_REGS_END	U(0x50) /* Align to the next 16 byte boundary */
272 #else
273 #define CTX_PAUTH_REGS_END	U(0)
274 #endif /* CTX_INCLUDE_PAUTH_REGS */
275 
276 /*******************************************************************************
277  * Registers related to ARMv8.2-MPAM.
278  ******************************************************************************/
279 #define CTX_MPAM_REGS_OFFSET	(CTX_PAUTH_REGS_OFFSET + CTX_PAUTH_REGS_END)
280 #if CTX_INCLUDE_MPAM_REGS
281 #define CTX_MPAM2_EL2		U(0x0)
282 #define CTX_MPAMHCR_EL2		U(0x8)
283 #define CTX_MPAMVPM0_EL2	U(0x10)
284 #define CTX_MPAMVPM1_EL2	U(0x18)
285 #define CTX_MPAMVPM2_EL2	U(0x20)
286 #define CTX_MPAMVPM3_EL2	U(0x28)
287 #define CTX_MPAMVPM4_EL2	U(0x30)
288 #define CTX_MPAMVPM5_EL2	U(0x38)
289 #define CTX_MPAMVPM6_EL2	U(0x40)
290 #define CTX_MPAMVPM7_EL2	U(0x48)
291 #define CTX_MPAMVPMV_EL2	U(0x50)
292 #define CTX_MPAM_REGS_END	U(0x60)
293 #else
294 #define CTX_MPAM_REGS_END	U(0x0)
295 #endif /* CTX_INCLUDE_MPAM_REGS */
296 
297 /*******************************************************************************
298  * Registers initialised in a per-world context.
299  ******************************************************************************/
300 #define CTX_CPTR_EL3			U(0x0)
301 #define CTX_ZCR_EL3			U(0x8)
302 #define CTX_MPAM3_EL3			U(0x10)
303 #define CTX_PERWORLD_EL3STATE_END	U(0x18)
304 
305 #ifndef __ASSEMBLER__
306 
307 #include <stdint.h>
308 
309 #include <lib/cassert.h>
310 
311 /*
312  * Common constants to help define the 'cpu_context' structure and its
313  * members below.
314  */
315 #define DWORD_SHIFT		U(3)
316 #define DEFINE_REG_STRUCT(name, num_regs)	\
317 	typedef struct name {			\
318 		uint64_t ctx_regs[num_regs];	\
319 	}  __aligned(16) name##_t
320 
321 /* Constants to determine the size of individual context structures */
322 #define CTX_GPREG_ALL		(CTX_GPREGS_END >> DWORD_SHIFT)
323 #define CTX_EL1_SYSREGS_ALL	(CTX_EL1_SYSREGS_END >> DWORD_SHIFT)
324 
325 #if CTX_INCLUDE_FPREGS
326 # define CTX_FPREG_ALL		(CTX_FPREGS_END >> DWORD_SHIFT)
327 #endif
328 #define CTX_EL3STATE_ALL	(CTX_EL3STATE_END >> DWORD_SHIFT)
329 #define CTX_CVE_2018_3639_ALL	(CTX_CVE_2018_3639_END >> DWORD_SHIFT)
330 #if CTX_INCLUDE_PAUTH_REGS
331 # define CTX_PAUTH_REGS_ALL	(CTX_PAUTH_REGS_END >> DWORD_SHIFT)
332 #endif
333 #if CTX_INCLUDE_MPAM_REGS
334 # define CTX_MPAM_REGS_ALL	(CTX_MPAM_REGS_END >> DWORD_SHIFT)
335 #endif
336 
337 /*
338  * AArch64 general purpose register context structure. Usually x0-x18,
339  * lr are saved as the compiler is expected to preserve the remaining
340  * callee saved registers if used by the C runtime and the assembler
341  * does not touch the remaining. But in case of world switch during
342  * exception handling, we need to save the callee registers too.
343  */
344 DEFINE_REG_STRUCT(gp_regs, CTX_GPREG_ALL);
345 
346 /*
347  * AArch64 EL1 system register context structure for preserving the
348  * architectural state during world switches.
349  */
350 DEFINE_REG_STRUCT(el1_sysregs, CTX_EL1_SYSREGS_ALL);
351 
352 /*
353  * AArch64 floating point register context structure for preserving
354  * the floating point state during switches from one security state to
355  * another.
356  */
357 #if CTX_INCLUDE_FPREGS
358 DEFINE_REG_STRUCT(fp_regs, CTX_FPREG_ALL);
359 #endif
360 
361 /*
362  * Miscellaneous registers used by EL3 firmware to maintain its state
363  * across exception entries and exits
364  */
365 DEFINE_REG_STRUCT(el3_state, CTX_EL3STATE_ALL);
366 
367 /* Function pointer used by CVE-2018-3639 dynamic mitigation */
368 DEFINE_REG_STRUCT(cve_2018_3639, CTX_CVE_2018_3639_ALL);
369 
370 /* Registers associated to ARMv8.3-PAuth */
371 #if CTX_INCLUDE_PAUTH_REGS
372 DEFINE_REG_STRUCT(pauth, CTX_PAUTH_REGS_ALL);
373 #endif
374 
375 /* Registers associated to ARMv8.2 MPAM */
376 #if CTX_INCLUDE_MPAM_REGS
377 DEFINE_REG_STRUCT(mpam, CTX_MPAM_REGS_ALL);
378 #endif
379 
380 /*
381  * Macros to access members of any of the above structures using their
382  * offsets
383  */
384 #define read_ctx_reg(ctx, offset)	((ctx)->ctx_regs[(offset) >> DWORD_SHIFT])
385 #define write_ctx_reg(ctx, offset, val)	(((ctx)->ctx_regs[(offset) >> DWORD_SHIFT]) \
386 					 = (uint64_t) (val))
387 
388 /*
389  * Top-level context structure which is used by EL3 firmware to preserve
390  * the state of a core at the next lower EL in a given security state and
391  * save enough EL3 meta data to be able to return to that EL and security
392  * state. The context management library will be used to ensure that
393  * SP_EL3 always points to an instance of this structure at exception
394  * entry and exit.
395  */
396 typedef struct cpu_context {
397 	gp_regs_t gpregs_ctx;
398 	el3_state_t el3state_ctx;
399 	el1_sysregs_t el1_sysregs_ctx;
400 
401 #if CTX_INCLUDE_FPREGS
402 	fp_regs_t fpregs_ctx;
403 #endif
404 	cve_2018_3639_t cve_2018_3639_ctx;
405 
406 #if CTX_INCLUDE_PAUTH_REGS
407 	pauth_t pauth_ctx;
408 #endif
409 
410 #if CTX_INCLUDE_MPAM_REGS
411 	mpam_t	mpam_ctx;
412 #endif
413 
414 #if CTX_INCLUDE_EL2_REGS
415 	el2_sysregs_t el2_sysregs_ctx;
416 #endif
417 
418 } cpu_context_t;
419 
420 /*
421  * Per-World Context.
422  * It stores registers whose values can be shared across CPUs.
423  */
424 typedef struct per_world_context {
425 	uint64_t ctx_cptr_el3;
426 	uint64_t ctx_zcr_el3;
427 	uint64_t ctx_mpam3_el3;
428 } per_world_context_t;
429 
430 extern per_world_context_t per_world_context[CPU_DATA_CONTEXT_NUM];
431 
432 /* Macros to access members of the 'cpu_context_t' structure */
433 #define get_el3state_ctx(h)	(&((cpu_context_t *) h)->el3state_ctx)
434 #if CTX_INCLUDE_FPREGS
435 # define get_fpregs_ctx(h)	(&((cpu_context_t *) h)->fpregs_ctx)
436 #endif
437 #define get_el1_sysregs_ctx(h)	(&((cpu_context_t *) h)->el1_sysregs_ctx)
438 #if CTX_INCLUDE_EL2_REGS
439 # define get_el2_sysregs_ctx(h)	(&((cpu_context_t *) h)->el2_sysregs_ctx)
440 #endif
441 #define get_gpregs_ctx(h)	(&((cpu_context_t *) h)->gpregs_ctx)
442 #define get_cve_2018_3639_ctx(h)	(&((cpu_context_t *) h)->cve_2018_3639_ctx)
443 #if CTX_INCLUDE_PAUTH_REGS
444 # define get_pauth_ctx(h)	(&((cpu_context_t *) h)->pauth_ctx)
445 #endif
446 #if CTX_INCLUDE_MPAM_REGS
447 # define get_mpam_ctx(h)	(&((cpu_context_t *) h)->mpam_ctx)
448 #endif
449 
450 /*
451  * Compile time assertions related to the 'cpu_context' structure to
452  * ensure that the assembler and the compiler view of the offsets of
453  * the structure members is the same.
454  */
455 CASSERT(CTX_GPREGS_OFFSET == __builtin_offsetof(cpu_context_t, gpregs_ctx),
456 	assert_core_context_gp_offset_mismatch);
457 
458 CASSERT(CTX_EL3STATE_OFFSET == __builtin_offsetof(cpu_context_t, el3state_ctx),
459 	assert_core_context_el3state_offset_mismatch);
460 
461 CASSERT(CTX_EL1_SYSREGS_OFFSET == __builtin_offsetof(cpu_context_t, el1_sysregs_ctx),
462 	assert_core_context_el1_sys_offset_mismatch);
463 
464 #if CTX_INCLUDE_FPREGS
465 CASSERT(CTX_FPREGS_OFFSET == __builtin_offsetof(cpu_context_t, fpregs_ctx),
466 	assert_core_context_fp_offset_mismatch);
467 #endif /* CTX_INCLUDE_FPREGS */
468 
469 CASSERT(CTX_CVE_2018_3639_OFFSET == __builtin_offsetof(cpu_context_t, cve_2018_3639_ctx),
470 	assert_core_context_cve_2018_3639_offset_mismatch);
471 
472 #if CTX_INCLUDE_PAUTH_REGS
473 CASSERT(CTX_PAUTH_REGS_OFFSET == __builtin_offsetof(cpu_context_t, pauth_ctx),
474 	assert_core_context_pauth_offset_mismatch);
475 #endif /* CTX_INCLUDE_PAUTH_REGS */
476 
477 #if CTX_INCLUDE_MPAM_REGS
478 CASSERT(CTX_MPAM_REGS_OFFSET == __builtin_offsetof(cpu_context_t, mpam_ctx),
479 	assert_core_context_mpam_offset_mismatch);
480 #endif /* CTX_INCLUDE_MPAM_REGS */
481 
482 /*
483  * Helper macro to set the general purpose registers that correspond to
484  * parameters in an aapcs_64 call i.e. x0-x7
485  */
486 #define set_aapcs_args0(ctx, x0)				do {	\
487 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X0, x0);	\
488 	} while (0)
489 #define set_aapcs_args1(ctx, x0, x1)				do {	\
490 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X1, x1);	\
491 		set_aapcs_args0(ctx, x0);				\
492 	} while (0)
493 #define set_aapcs_args2(ctx, x0, x1, x2)			do {	\
494 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X2, x2);	\
495 		set_aapcs_args1(ctx, x0, x1);				\
496 	} while (0)
497 #define set_aapcs_args3(ctx, x0, x1, x2, x3)			do {	\
498 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X3, x3);	\
499 		set_aapcs_args2(ctx, x0, x1, x2);			\
500 	} while (0)
501 #define set_aapcs_args4(ctx, x0, x1, x2, x3, x4)		do {	\
502 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X4, x4);	\
503 		set_aapcs_args3(ctx, x0, x1, x2, x3);			\
504 	} while (0)
505 #define set_aapcs_args5(ctx, x0, x1, x2, x3, x4, x5)		do {	\
506 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X5, x5);	\
507 		set_aapcs_args4(ctx, x0, x1, x2, x3, x4);		\
508 	} while (0)
509 #define set_aapcs_args6(ctx, x0, x1, x2, x3, x4, x5, x6)	do {	\
510 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X6, x6);	\
511 		set_aapcs_args5(ctx, x0, x1, x2, x3, x4, x5);		\
512 	} while (0)
513 #define set_aapcs_args7(ctx, x0, x1, x2, x3, x4, x5, x6, x7)	do {	\
514 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X7, x7);	\
515 		set_aapcs_args6(ctx, x0, x1, x2, x3, x4, x5, x6);	\
516 	} while (0)
517 
518 /*******************************************************************************
519  * Function prototypes
520  ******************************************************************************/
521 #if CTX_INCLUDE_FPREGS
522 void fpregs_context_save(fp_regs_t *regs);
523 void fpregs_context_restore(fp_regs_t *regs);
524 #endif
525 
526 #endif /* __ASSEMBLER__ */
527 
528 #endif /* CONTEXT_H */
529