xref: /rk3399_ARM-atf/include/lib/el3_runtime/aarch64/context.h (revision 123002f9171384d976d95935b7f566740d69cc68)
1 /*
2  * Copyright (c) 2013-2024, Arm Limited and Contributors. All rights reserved.
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #ifndef CONTEXT_H
8 #define CONTEXT_H
9 
10 #include <lib/el3_runtime/context_el2.h>
11 #include <lib/el3_runtime/cpu_data.h>
12 #include <lib/utils_def.h>
13 
14 /*******************************************************************************
15  * Constants that allow assembler code to access members of and the 'gp_regs'
16  * structure at their correct offsets.
17  ******************************************************************************/
18 #define CTX_GPREGS_OFFSET	U(0x0)
19 #define CTX_GPREG_X0		U(0x0)
20 #define CTX_GPREG_X1		U(0x8)
21 #define CTX_GPREG_X2		U(0x10)
22 #define CTX_GPREG_X3		U(0x18)
23 #define CTX_GPREG_X4		U(0x20)
24 #define CTX_GPREG_X5		U(0x28)
25 #define CTX_GPREG_X6		U(0x30)
26 #define CTX_GPREG_X7		U(0x38)
27 #define CTX_GPREG_X8		U(0x40)
28 #define CTX_GPREG_X9		U(0x48)
29 #define CTX_GPREG_X10		U(0x50)
30 #define CTX_GPREG_X11		U(0x58)
31 #define CTX_GPREG_X12		U(0x60)
32 #define CTX_GPREG_X13		U(0x68)
33 #define CTX_GPREG_X14		U(0x70)
34 #define CTX_GPREG_X15		U(0x78)
35 #define CTX_GPREG_X16		U(0x80)
36 #define CTX_GPREG_X17		U(0x88)
37 #define CTX_GPREG_X18		U(0x90)
38 #define CTX_GPREG_X19		U(0x98)
39 #define CTX_GPREG_X20		U(0xa0)
40 #define CTX_GPREG_X21		U(0xa8)
41 #define CTX_GPREG_X22		U(0xb0)
42 #define CTX_GPREG_X23		U(0xb8)
43 #define CTX_GPREG_X24		U(0xc0)
44 #define CTX_GPREG_X25		U(0xc8)
45 #define CTX_GPREG_X26		U(0xd0)
46 #define CTX_GPREG_X27		U(0xd8)
47 #define CTX_GPREG_X28		U(0xe0)
48 #define CTX_GPREG_X29		U(0xe8)
49 #define CTX_GPREG_LR		U(0xf0)
50 #define CTX_GPREG_SP_EL0	U(0xf8)
51 #define CTX_GPREGS_END		U(0x100)
52 
53 /*******************************************************************************
54  * Constants that allow assembler code to access members of and the 'el3_state'
55  * structure at their correct offsets. Note that some of the registers are only
56  * 32-bits wide but are stored as 64-bit values for convenience
57  ******************************************************************************/
58 #define CTX_EL3STATE_OFFSET	(CTX_GPREGS_OFFSET + CTX_GPREGS_END)
59 #define CTX_SCR_EL3		U(0x0)
60 #define CTX_ESR_EL3		U(0x8)
61 #define CTX_RUNTIME_SP		U(0x10)
62 #define CTX_SPSR_EL3		U(0x18)
63 #define CTX_ELR_EL3		U(0x20)
64 #define CTX_PMCR_EL0		U(0x28)
65 #define CTX_IS_IN_EL3		U(0x30)
66 #define CTX_MDCR_EL3		U(0x38)
67 /* Constants required in supporting nested exception in EL3 */
68 #define CTX_SAVED_ELR_EL3	U(0x40)
69 /*
70  * General purpose flag, to save various EL3 states
71  * FFH mode : Used to identify if handling nested exception
72  * KFH mode : Used as counter value
73  */
74 #define CTX_NESTED_EA_FLAG	U(0x48)
75 #if FFH_SUPPORT
76  #define CTX_SAVED_ESR_EL3	U(0x50)
77  #define CTX_SAVED_SPSR_EL3	U(0x58)
78  #define CTX_SAVED_GPREG_LR	U(0x60)
79  #define CTX_EL3STATE_END	U(0x70) /* Align to the next 16 byte boundary */
80 #else
81  #define CTX_EL3STATE_END	U(0x50) /* Align to the next 16 byte boundary */
82 #endif /* FFH_SUPPORT */
83 
84 /*******************************************************************************
85  * Constants that allow assembler code to access members of and the
86  * 'el1_sys_regs' structure at their correct offsets. Note that some of the
87  * registers are only 32-bits wide but are stored as 64-bit values for
88  * convenience
89  ******************************************************************************/
90 #define CTX_EL1_SYSREGS_OFFSET	(CTX_EL3STATE_OFFSET + CTX_EL3STATE_END)
91 #define CTX_SPSR_EL1		U(0x0)
92 #define CTX_ELR_EL1		U(0x8)
93 #define CTX_SCTLR_EL1		U(0x10)
94 #define CTX_TCR_EL1		U(0x18)
95 #define CTX_CPACR_EL1		U(0x20)
96 #define CTX_CSSELR_EL1		U(0x28)
97 #define CTX_SP_EL1		U(0x30)
98 #define CTX_ESR_EL1		U(0x38)
99 #define CTX_TTBR0_EL1		U(0x40)
100 #define CTX_TTBR1_EL1		U(0x48)
101 #define CTX_MAIR_EL1		U(0x50)
102 #define CTX_AMAIR_EL1		U(0x58)
103 #define CTX_ACTLR_EL1		U(0x60)
104 #define CTX_TPIDR_EL1		U(0x68)
105 #define CTX_TPIDR_EL0		U(0x70)
106 #define CTX_TPIDRRO_EL0		U(0x78)
107 #define CTX_PAR_EL1		U(0x80)
108 #define CTX_FAR_EL1		U(0x88)
109 #define CTX_AFSR0_EL1		U(0x90)
110 #define CTX_AFSR1_EL1		U(0x98)
111 #define CTX_CONTEXTIDR_EL1	U(0xa0)
112 #define CTX_VBAR_EL1		U(0xa8)
113 #define CTX_MDCCINT_EL1		U(0xb0)
114 #define CTX_MDSCR_EL1		U(0xb8)
115 
116 #define CTX_AARCH64_END		U(0xc0) /* Align to the next 16 byte boundary */
117 
118 /*
119  * If the platform is AArch64-only, there is no need to save and restore these
120  * AArch32 registers.
121  */
122 #if CTX_INCLUDE_AARCH32_REGS
123 #define CTX_SPSR_ABT		(CTX_AARCH64_END + U(0x0))
124 #define CTX_SPSR_UND		(CTX_AARCH64_END + U(0x8))
125 #define CTX_SPSR_IRQ		(CTX_AARCH64_END + U(0x10))
126 #define CTX_SPSR_FIQ		(CTX_AARCH64_END + U(0x18))
127 #define CTX_DACR32_EL2		(CTX_AARCH64_END + U(0x20))
128 #define CTX_IFSR32_EL2		(CTX_AARCH64_END + U(0x28))
129 #define CTX_AARCH32_END		(CTX_AARCH64_END + U(0x30)) /* Align to the next 16 byte boundary */
130 #else
131 #define CTX_AARCH32_END		CTX_AARCH64_END
132 #endif /* CTX_INCLUDE_AARCH32_REGS */
133 
134 /*
135  * If the timer registers aren't saved and restored, we don't have to reserve
136  * space for them in the context
137  */
138 #if NS_TIMER_SWITCH
139 #define CTX_CNTP_CTL_EL0	(CTX_AARCH32_END + U(0x0))
140 #define CTX_CNTP_CVAL_EL0	(CTX_AARCH32_END + U(0x8))
141 #define CTX_CNTV_CTL_EL0	(CTX_AARCH32_END + U(0x10))
142 #define CTX_CNTV_CVAL_EL0	(CTX_AARCH32_END + U(0x18))
143 #define CTX_CNTKCTL_EL1		(CTX_AARCH32_END + U(0x20))
144 #define CTX_TIMER_SYSREGS_END	(CTX_AARCH32_END + U(0x30)) /* Align to the next 16 byte boundary */
145 #else
146 #define CTX_TIMER_SYSREGS_END	CTX_AARCH32_END
147 #endif /* NS_TIMER_SWITCH */
148 
149 #if ENABLE_FEAT_MTE2
150 #define CTX_TFSRE0_EL1		(CTX_TIMER_SYSREGS_END + U(0x0))
151 #define CTX_TFSR_EL1		(CTX_TIMER_SYSREGS_END + U(0x8))
152 #define CTX_RGSR_EL1		(CTX_TIMER_SYSREGS_END + U(0x10))
153 #define CTX_GCR_EL1		(CTX_TIMER_SYSREGS_END + U(0x18))
154 #define CTX_MTE_REGS_END	(CTX_TIMER_SYSREGS_END + U(0x20)) /* Align to the next 16 byte boundary */
155 #else
156 #define CTX_MTE_REGS_END	CTX_TIMER_SYSREGS_END
157 #endif /* ENABLE_FEAT_MTE2 */
158 
159 #if ENABLE_FEAT_RAS
160 #define CTX_DISR_EL1		(CTX_MTE_REGS_END + U(0x0))
161 #define CTX_RAS_REGS_END	(CTX_MTE_REGS_END + U(0x10)) /* Align to the next 16 byte boundary */
162 #else
163 #define CTX_RAS_REGS_END        CTX_MTE_REGS_END
164 #endif /* ENABLE_FEAT_RAS */
165 
166 #if ENABLE_FEAT_S1PIE
167 #define CTX_PIRE0_EL1		(CTX_RAS_REGS_END + U(0x0))
168 #define CTX_PIR_EL1		(CTX_RAS_REGS_END + U(0x8))
169 #define CTX_S1PIE_REGS_END	(CTX_RAS_REGS_END + U(0x10)) /* Align to the next 16 byte boundary */
170 #else
171 #define CTX_S1PIE_REGS_END	CTX_RAS_REGS_END
172 #endif /* ENABLE_FEAT_S1PIE */
173 
174 #if ENABLE_FEAT_S1POE
175 #define CTX_POR_EL1		(CTX_S1PIE_REGS_END + U(0x0))
176 #define CTX_S1POE_REGS_END	(CTX_S1PIE_REGS_END + U(0x10)) /* Align to the next 16 byte boundary */
177 #else
178 #define CTX_S1POE_REGS_END	CTX_S1PIE_REGS_END
179 #endif /* ENABLE_FEAT_S1POE */
180 
181 #if ENABLE_FEAT_S2POE
182 #define CTX_S2POR_EL1		(CTX_S1POE_REGS_END + U(0x0))
183 #define CTX_S2POE_REGS_END	(CTX_S1POE_REGS_END + U(0x10)) /* Align to the next 16 byte boundary */
184 #else
185 #define CTX_S2POE_REGS_END	CTX_S1POE_REGS_END
186 #endif /* ENABLE_FEAT_S2POE */
187 
188 #if ENABLE_FEAT_TCR2
189 #define CTX_TCR2_EL1		(CTX_S2POE_REGS_END + U(0x0))
190 #define CTX_TCR2_REGS_END	(CTX_S2POE_REGS_END + U(0x10)) /* Align to the next 16 byte boundary */
191 #else
192 #define CTX_TCR2_REGS_END       CTX_S2POE_REGS_END
193 #endif /* ENABLE_FEAT_TCR2 */
194 
195 #if ENABLE_TRF_FOR_NS
196 #define CTX_TRFCR_EL1		(CTX_TCR2_REGS_END + U(0x0))
197 #define CTX_TRF_REGS_END	(CTX_TCR2_REGS_END + U(0x10)) /* Align to the next 16 byte boundary */
198 #else
199 #define CTX_TRF_REGS_END	CTX_TCR2_REGS_END
200 #endif /* ENABLE_TRF_FOR_NS */
201 
202 #if ENABLE_FEAT_CSV2_2
203 #define CTX_SCXTNUM_EL0		(CTX_TRF_REGS_END + U(0x0))
204 #define CTX_SCXTNUM_EL1		(CTX_TRF_REGS_END + U(0x8))
205 #define CTX_CSV2_2_REGS_END	(CTX_TRF_REGS_END + U(0x10)) /* Align to the next 16 byte boundary */
206 #else
207 #define CTX_CSV2_2_REGS_END	CTX_TRF_REGS_END
208 #endif /* ENABLE_FEAT_CSV2_2 */
209 
210 #if ENABLE_FEAT_GCS
211 #define CTX_GCSCR_EL1		(CTX_CSV2_2_REGS_END + U(0x0))
212 #define CTX_GCSCRE0_EL1		(CTX_CSV2_2_REGS_END + U(0x8))
213 #define CTX_GCSPR_EL1		(CTX_CSV2_2_REGS_END + U(0x10))
214 #define CTX_GCSPR_EL0		(CTX_CSV2_2_REGS_END + U(0x18))
215 #define CTX_GCS_REGS_END	(CTX_CSV2_2_REGS_END + U(0x20)) /* Align to the next 16 byte boundary */
216 #else
217 #define CTX_GCS_REGS_END	CTX_CSV2_2_REGS_END
218 #endif /* ENABLE_FEAT_GCS */
219 
220 /*
221  * End of EL1 system registers.
222  */
223 #define CTX_EL1_SYSREGS_END	CTX_GCS_REGS_END
224 
225 /*******************************************************************************
226  * Constants that allow assembler code to access members of and the 'fp_regs'
227  * structure at their correct offsets.
228  ******************************************************************************/
229 # define CTX_FPREGS_OFFSET	(CTX_EL1_SYSREGS_OFFSET + CTX_EL1_SYSREGS_END)
230 #if CTX_INCLUDE_FPREGS
231 #define CTX_FP_Q0		U(0x0)
232 #define CTX_FP_Q1		U(0x10)
233 #define CTX_FP_Q2		U(0x20)
234 #define CTX_FP_Q3		U(0x30)
235 #define CTX_FP_Q4		U(0x40)
236 #define CTX_FP_Q5		U(0x50)
237 #define CTX_FP_Q6		U(0x60)
238 #define CTX_FP_Q7		U(0x70)
239 #define CTX_FP_Q8		U(0x80)
240 #define CTX_FP_Q9		U(0x90)
241 #define CTX_FP_Q10		U(0xa0)
242 #define CTX_FP_Q11		U(0xb0)
243 #define CTX_FP_Q12		U(0xc0)
244 #define CTX_FP_Q13		U(0xd0)
245 #define CTX_FP_Q14		U(0xe0)
246 #define CTX_FP_Q15		U(0xf0)
247 #define CTX_FP_Q16		U(0x100)
248 #define CTX_FP_Q17		U(0x110)
249 #define CTX_FP_Q18		U(0x120)
250 #define CTX_FP_Q19		U(0x130)
251 #define CTX_FP_Q20		U(0x140)
252 #define CTX_FP_Q21		U(0x150)
253 #define CTX_FP_Q22		U(0x160)
254 #define CTX_FP_Q23		U(0x170)
255 #define CTX_FP_Q24		U(0x180)
256 #define CTX_FP_Q25		U(0x190)
257 #define CTX_FP_Q26		U(0x1a0)
258 #define CTX_FP_Q27		U(0x1b0)
259 #define CTX_FP_Q28		U(0x1c0)
260 #define CTX_FP_Q29		U(0x1d0)
261 #define CTX_FP_Q30		U(0x1e0)
262 #define CTX_FP_Q31		U(0x1f0)
263 #define CTX_FP_FPSR		U(0x200)
264 #define CTX_FP_FPCR		U(0x208)
265 #if CTX_INCLUDE_AARCH32_REGS
266 #define CTX_FP_FPEXC32_EL2	U(0x210)
267 #define CTX_FPREGS_END		U(0x220) /* Align to the next 16 byte boundary */
268 #else
269 #define CTX_FPREGS_END		U(0x210) /* Align to the next 16 byte boundary */
270 #endif /* CTX_INCLUDE_AARCH32_REGS */
271 #else
272 #define CTX_FPREGS_END		U(0)
273 #endif /* CTX_INCLUDE_FPREGS */
274 
275 /*******************************************************************************
276  * Registers related to CVE-2018-3639
277  ******************************************************************************/
278 #define CTX_CVE_2018_3639_OFFSET	(CTX_FPREGS_OFFSET + CTX_FPREGS_END)
279 #define CTX_CVE_2018_3639_DISABLE	U(0)
280 #define CTX_CVE_2018_3639_END		U(0x10) /* Align to the next 16 byte boundary */
281 
282 /*******************************************************************************
283  * Registers related to ARMv8.3-PAuth.
284  ******************************************************************************/
285 #define CTX_PAUTH_REGS_OFFSET	(CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_END)
286 #if CTX_INCLUDE_PAUTH_REGS
287 #define CTX_PACIAKEY_LO		U(0x0)
288 #define CTX_PACIAKEY_HI		U(0x8)
289 #define CTX_PACIBKEY_LO		U(0x10)
290 #define CTX_PACIBKEY_HI		U(0x18)
291 #define CTX_PACDAKEY_LO		U(0x20)
292 #define CTX_PACDAKEY_HI		U(0x28)
293 #define CTX_PACDBKEY_LO		U(0x30)
294 #define CTX_PACDBKEY_HI		U(0x38)
295 #define CTX_PACGAKEY_LO		U(0x40)
296 #define CTX_PACGAKEY_HI		U(0x48)
297 #define CTX_PAUTH_REGS_END	U(0x50) /* Align to the next 16 byte boundary */
298 #else
299 #define CTX_PAUTH_REGS_END	U(0)
300 #endif /* CTX_INCLUDE_PAUTH_REGS */
301 
302 /*******************************************************************************
303  * Registers initialised in a per-world context.
304  ******************************************************************************/
305 #define CTX_CPTR_EL3			U(0x0)
306 #define CTX_ZCR_EL3			U(0x8)
307 #define CTX_MPAM3_EL3			U(0x10)
308 #define CTX_PERWORLD_EL3STATE_END	U(0x18)
309 
310 #ifndef __ASSEMBLER__
311 
312 #include <stdint.h>
313 
314 #include <lib/cassert.h>
315 
316 /*
317  * Common constants to help define the 'cpu_context' structure and its
318  * members below.
319  */
320 #define DWORD_SHIFT		U(3)
321 #define DEFINE_REG_STRUCT(name, num_regs)	\
322 	typedef struct name {			\
323 		uint64_t ctx_regs[num_regs];	\
324 	}  __aligned(16) name##_t
325 
326 /* Constants to determine the size of individual context structures */
327 #define CTX_GPREG_ALL		(CTX_GPREGS_END >> DWORD_SHIFT)
328 #define CTX_EL1_SYSREGS_ALL	(CTX_EL1_SYSREGS_END >> DWORD_SHIFT)
329 
330 #if CTX_INCLUDE_FPREGS
331 # define CTX_FPREG_ALL		(CTX_FPREGS_END >> DWORD_SHIFT)
332 #endif
333 #define CTX_EL3STATE_ALL	(CTX_EL3STATE_END >> DWORD_SHIFT)
334 #define CTX_CVE_2018_3639_ALL	(CTX_CVE_2018_3639_END >> DWORD_SHIFT)
335 #if CTX_INCLUDE_PAUTH_REGS
336 # define CTX_PAUTH_REGS_ALL	(CTX_PAUTH_REGS_END >> DWORD_SHIFT)
337 #endif
338 
339 /*
340  * AArch64 general purpose register context structure. Usually x0-x18,
341  * lr are saved as the compiler is expected to preserve the remaining
342  * callee saved registers if used by the C runtime and the assembler
343  * does not touch the remaining. But in case of world switch during
344  * exception handling, we need to save the callee registers too.
345  */
346 DEFINE_REG_STRUCT(gp_regs, CTX_GPREG_ALL);
347 
348 /*
349  * AArch64 EL1 system register context structure for preserving the
350  * architectural state during world switches.
351  */
352 DEFINE_REG_STRUCT(el1_sysregs, CTX_EL1_SYSREGS_ALL);
353 
354 /*
355  * AArch64 floating point register context structure for preserving
356  * the floating point state during switches from one security state to
357  * another.
358  */
359 #if CTX_INCLUDE_FPREGS
360 DEFINE_REG_STRUCT(fp_regs, CTX_FPREG_ALL);
361 #endif
362 
363 /*
364  * Miscellaneous registers used by EL3 firmware to maintain its state
365  * across exception entries and exits
366  */
367 DEFINE_REG_STRUCT(el3_state, CTX_EL3STATE_ALL);
368 
369 /* Function pointer used by CVE-2018-3639 dynamic mitigation */
370 DEFINE_REG_STRUCT(cve_2018_3639, CTX_CVE_2018_3639_ALL);
371 
372 /* Registers associated to ARMv8.3-PAuth */
373 #if CTX_INCLUDE_PAUTH_REGS
374 DEFINE_REG_STRUCT(pauth, CTX_PAUTH_REGS_ALL);
375 #endif
376 
377 /*
378  * Macros to access members of any of the above structures using their
379  * offsets
380  */
381 #define read_ctx_reg(ctx, offset)	((ctx)->ctx_regs[(offset) >> DWORD_SHIFT])
382 #define write_ctx_reg(ctx, offset, val)	(((ctx)->ctx_regs[(offset) >> DWORD_SHIFT]) \
383 					 = (uint64_t) (val))
384 
385 /*
386  * Top-level context structure which is used by EL3 firmware to preserve
387  * the state of a core at the next lower EL in a given security state and
388  * save enough EL3 meta data to be able to return to that EL and security
389  * state. The context management library will be used to ensure that
390  * SP_EL3 always points to an instance of this structure at exception
391  * entry and exit.
392  */
393 typedef struct cpu_context {
394 	gp_regs_t gpregs_ctx;
395 	el3_state_t el3state_ctx;
396 	el1_sysregs_t el1_sysregs_ctx;
397 
398 #if CTX_INCLUDE_FPREGS
399 	fp_regs_t fpregs_ctx;
400 #endif
401 	cve_2018_3639_t cve_2018_3639_ctx;
402 
403 #if CTX_INCLUDE_PAUTH_REGS
404 	pauth_t pauth_ctx;
405 #endif
406 
407 #if CTX_INCLUDE_EL2_REGS
408 	el2_sysregs_t el2_sysregs_ctx;
409 #endif
410 
411 } cpu_context_t;
412 
413 /*
414  * Per-World Context.
415  * It stores registers whose values can be shared across CPUs.
416  */
417 typedef struct per_world_context {
418 	uint64_t ctx_cptr_el3;
419 	uint64_t ctx_zcr_el3;
420 	uint64_t ctx_mpam3_el3;
421 } per_world_context_t;
422 
423 extern per_world_context_t per_world_context[CPU_DATA_CONTEXT_NUM];
424 
425 /* Macros to access members of the 'cpu_context_t' structure */
426 #define get_el3state_ctx(h)	(&((cpu_context_t *) h)->el3state_ctx)
427 #if CTX_INCLUDE_FPREGS
428 # define get_fpregs_ctx(h)	(&((cpu_context_t *) h)->fpregs_ctx)
429 #endif
430 #define get_el1_sysregs_ctx(h)	(&((cpu_context_t *) h)->el1_sysregs_ctx)
431 #if CTX_INCLUDE_EL2_REGS
432 # define get_el2_sysregs_ctx(h)	(&((cpu_context_t *) h)->el2_sysregs_ctx)
433 #endif
434 #define get_gpregs_ctx(h)	(&((cpu_context_t *) h)->gpregs_ctx)
435 #define get_cve_2018_3639_ctx(h)	(&((cpu_context_t *) h)->cve_2018_3639_ctx)
436 #if CTX_INCLUDE_PAUTH_REGS
437 # define get_pauth_ctx(h)	(&((cpu_context_t *) h)->pauth_ctx)
438 #endif
439 
440 /*
441  * Compile time assertions related to the 'cpu_context' structure to
442  * ensure that the assembler and the compiler view of the offsets of
443  * the structure members is the same.
444  */
445 CASSERT(CTX_GPREGS_OFFSET == __builtin_offsetof(cpu_context_t, gpregs_ctx),
446 	assert_core_context_gp_offset_mismatch);
447 
448 CASSERT(CTX_EL3STATE_OFFSET == __builtin_offsetof(cpu_context_t, el3state_ctx),
449 	assert_core_context_el3state_offset_mismatch);
450 
451 CASSERT(CTX_EL1_SYSREGS_OFFSET == __builtin_offsetof(cpu_context_t, el1_sysregs_ctx),
452 	assert_core_context_el1_sys_offset_mismatch);
453 
454 #if CTX_INCLUDE_FPREGS
455 CASSERT(CTX_FPREGS_OFFSET == __builtin_offsetof(cpu_context_t, fpregs_ctx),
456 	assert_core_context_fp_offset_mismatch);
457 #endif /* CTX_INCLUDE_FPREGS */
458 
459 CASSERT(CTX_CVE_2018_3639_OFFSET == __builtin_offsetof(cpu_context_t, cve_2018_3639_ctx),
460 	assert_core_context_cve_2018_3639_offset_mismatch);
461 
462 #if CTX_INCLUDE_PAUTH_REGS
463 CASSERT(CTX_PAUTH_REGS_OFFSET == __builtin_offsetof(cpu_context_t, pauth_ctx),
464 	assert_core_context_pauth_offset_mismatch);
465 #endif /* CTX_INCLUDE_PAUTH_REGS */
466 
467 /*
468  * Helper macro to set the general purpose registers that correspond to
469  * parameters in an aapcs_64 call i.e. x0-x7
470  */
471 #define set_aapcs_args0(ctx, x0)				do {	\
472 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X0, x0);	\
473 	} while (0)
474 #define set_aapcs_args1(ctx, x0, x1)				do {	\
475 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X1, x1);	\
476 		set_aapcs_args0(ctx, x0);				\
477 	} while (0)
478 #define set_aapcs_args2(ctx, x0, x1, x2)			do {	\
479 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X2, x2);	\
480 		set_aapcs_args1(ctx, x0, x1);				\
481 	} while (0)
482 #define set_aapcs_args3(ctx, x0, x1, x2, x3)			do {	\
483 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X3, x3);	\
484 		set_aapcs_args2(ctx, x0, x1, x2);			\
485 	} while (0)
486 #define set_aapcs_args4(ctx, x0, x1, x2, x3, x4)		do {	\
487 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X4, x4);	\
488 		set_aapcs_args3(ctx, x0, x1, x2, x3);			\
489 	} while (0)
490 #define set_aapcs_args5(ctx, x0, x1, x2, x3, x4, x5)		do {	\
491 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X5, x5);	\
492 		set_aapcs_args4(ctx, x0, x1, x2, x3, x4);		\
493 	} while (0)
494 #define set_aapcs_args6(ctx, x0, x1, x2, x3, x4, x5, x6)	do {	\
495 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X6, x6);	\
496 		set_aapcs_args5(ctx, x0, x1, x2, x3, x4, x5);		\
497 	} while (0)
498 #define set_aapcs_args7(ctx, x0, x1, x2, x3, x4, x5, x6, x7)	do {	\
499 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X7, x7);	\
500 		set_aapcs_args6(ctx, x0, x1, x2, x3, x4, x5, x6);	\
501 	} while (0)
502 
503 /*******************************************************************************
504  * Function prototypes
505  ******************************************************************************/
506 #if CTX_INCLUDE_FPREGS
507 void fpregs_context_save(fp_regs_t *regs);
508 void fpregs_context_restore(fp_regs_t *regs);
509 #endif
510 
511 #endif /* __ASSEMBLER__ */
512 
513 #endif /* CONTEXT_H */
514