xref: /rk3399_ARM-atf/include/lib/el3_runtime/aarch64/context.h (revision 308ebfa18859c89c8b630c1c130e7002095e875f)
1 /*
2  * Copyright (c) 2013-2024, Arm Limited and Contributors. All rights reserved.
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #ifndef CONTEXT_H
8 #define CONTEXT_H
9 
10 #include <lib/el3_runtime/context_el1.h>
11 #include <lib/el3_runtime/context_el2.h>
12 #include <lib/el3_runtime/cpu_data.h>
13 #include <lib/el3_runtime/simd_ctx.h>
14 #include <lib/utils_def.h>
15 
16 /*******************************************************************************
17  * Constants that allow assembler code to access members of and the 'gp_regs'
18  * structure at their correct offsets.
19  ******************************************************************************/
20 #define CTX_GPREGS_OFFSET	U(0x0)
21 #define CTX_GPREG_X0		U(0x0)
22 #define CTX_GPREG_X1		U(0x8)
23 #define CTX_GPREG_X2		U(0x10)
24 #define CTX_GPREG_X3		U(0x18)
25 #define CTX_GPREG_X4		U(0x20)
26 #define CTX_GPREG_X5		U(0x28)
27 #define CTX_GPREG_X6		U(0x30)
28 #define CTX_GPREG_X7		U(0x38)
29 #define CTX_GPREG_X8		U(0x40)
30 #define CTX_GPREG_X9		U(0x48)
31 #define CTX_GPREG_X10		U(0x50)
32 #define CTX_GPREG_X11		U(0x58)
33 #define CTX_GPREG_X12		U(0x60)
34 #define CTX_GPREG_X13		U(0x68)
35 #define CTX_GPREG_X14		U(0x70)
36 #define CTX_GPREG_X15		U(0x78)
37 #define CTX_GPREG_X16		U(0x80)
38 #define CTX_GPREG_X17		U(0x88)
39 #define CTX_GPREG_X18		U(0x90)
40 #define CTX_GPREG_X19		U(0x98)
41 #define CTX_GPREG_X20		U(0xa0)
42 #define CTX_GPREG_X21		U(0xa8)
43 #define CTX_GPREG_X22		U(0xb0)
44 #define CTX_GPREG_X23		U(0xb8)
45 #define CTX_GPREG_X24		U(0xc0)
46 #define CTX_GPREG_X25		U(0xc8)
47 #define CTX_GPREG_X26		U(0xd0)
48 #define CTX_GPREG_X27		U(0xd8)
49 #define CTX_GPREG_X28		U(0xe0)
50 #define CTX_GPREG_X29		U(0xe8)
51 #define CTX_GPREG_LR		U(0xf0)
52 #define CTX_GPREG_SP_EL0	U(0xf8)
53 #define CTX_GPREGS_END		U(0x100)
54 
55 /*******************************************************************************
56  * Constants that allow assembler code to access members of and the 'el3_state'
57  * structure at their correct offsets. Note that some of the registers are only
58  * 32-bits wide but are stored as 64-bit values for convenience
59  ******************************************************************************/
60 #define CTX_EL3STATE_OFFSET	(CTX_GPREGS_OFFSET + CTX_GPREGS_END)
61 #define CTX_SCR_EL3		U(0x0)
62 #define CTX_ESR_EL3		U(0x8)
63 #define CTX_RUNTIME_SP		U(0x10)
64 #define CTX_SPSR_EL3		U(0x18)
65 #define CTX_ELR_EL3		U(0x20)
66 #define CTX_PMCR_EL0		U(0x28)
67 #define CTX_IS_IN_EL3		U(0x30)
68 #define CTX_MDCR_EL3		U(0x38)
69 /* Constants required in supporting nested exception in EL3 */
70 #define CTX_SAVED_ELR_EL3	U(0x40)
71 /*
72  * General purpose flag, to save various EL3 states
73  * FFH mode : Used to identify if handling nested exception
74  * KFH mode : Used as counter value
75  */
76 #define CTX_NESTED_EA_FLAG	U(0x48)
77 #if FFH_SUPPORT
78  #define CTX_SAVED_ESR_EL3	U(0x50)
79  #define CTX_SAVED_SPSR_EL3	U(0x58)
80  #define CTX_SAVED_GPREG_LR	U(0x60)
81  #define CTX_EL3STATE_END	U(0x70) /* Align to the next 16 byte boundary */
82 #else
83  #define CTX_EL3STATE_END	U(0x50) /* Align to the next 16 byte boundary */
84 #endif /* FFH_SUPPORT */
85 
86 /*******************************************************************************
87  * Constants that allow assembler code to access members of and the 'fp_regs'
88  * structure at their correct offsets.
89  ******************************************************************************/
90 # define CTX_FPREGS_OFFSET	(CTX_EL3STATE_OFFSET + CTX_EL3STATE_END)
91 #if CTX_INCLUDE_FPREGS
92 #define CTX_FP_Q0		U(0x0)
93 #define CTX_FP_Q1		U(0x10)
94 #define CTX_FP_Q2		U(0x20)
95 #define CTX_FP_Q3		U(0x30)
96 #define CTX_FP_Q4		U(0x40)
97 #define CTX_FP_Q5		U(0x50)
98 #define CTX_FP_Q6		U(0x60)
99 #define CTX_FP_Q7		U(0x70)
100 #define CTX_FP_Q8		U(0x80)
101 #define CTX_FP_Q9		U(0x90)
102 #define CTX_FP_Q10		U(0xa0)
103 #define CTX_FP_Q11		U(0xb0)
104 #define CTX_FP_Q12		U(0xc0)
105 #define CTX_FP_Q13		U(0xd0)
106 #define CTX_FP_Q14		U(0xe0)
107 #define CTX_FP_Q15		U(0xf0)
108 #define CTX_FP_Q16		U(0x100)
109 #define CTX_FP_Q17		U(0x110)
110 #define CTX_FP_Q18		U(0x120)
111 #define CTX_FP_Q19		U(0x130)
112 #define CTX_FP_Q20		U(0x140)
113 #define CTX_FP_Q21		U(0x150)
114 #define CTX_FP_Q22		U(0x160)
115 #define CTX_FP_Q23		U(0x170)
116 #define CTX_FP_Q24		U(0x180)
117 #define CTX_FP_Q25		U(0x190)
118 #define CTX_FP_Q26		U(0x1a0)
119 #define CTX_FP_Q27		U(0x1b0)
120 #define CTX_FP_Q28		U(0x1c0)
121 #define CTX_FP_Q29		U(0x1d0)
122 #define CTX_FP_Q30		U(0x1e0)
123 #define CTX_FP_Q31		U(0x1f0)
124 #define CTX_FP_FPSR		U(0x200)
125 #define CTX_FP_FPCR		U(0x208)
126 #if CTX_INCLUDE_AARCH32_REGS
127 #define CTX_FP_FPEXC32_EL2	U(0x210)
128 #define CTX_FPREGS_END		U(0x220) /* Align to the next 16 byte boundary */
129 #else
130 #define CTX_FPREGS_END		U(0x210) /* Align to the next 16 byte boundary */
131 #endif /* CTX_INCLUDE_AARCH32_REGS */
132 #else
133 #define CTX_FPREGS_END		U(0)
134 #endif /* CTX_INCLUDE_FPREGS */
135 
136 /*******************************************************************************
137  * Registers related to CVE-2018-3639
138  ******************************************************************************/
139 #define CTX_CVE_2018_3639_OFFSET	(CTX_FPREGS_OFFSET + CTX_FPREGS_END)
140 #define CTX_CVE_2018_3639_DISABLE	U(0)
141 #define CTX_CVE_2018_3639_END		U(0x10) /* Align to the next 16 byte boundary */
142 
143 /*******************************************************************************
144  * Registers related to ERRATA_SPECULATIVE_AT
145  *
146  * This is essential as with EL1 and EL2 context registers being decoupled,
147  * both will not be present for a given build configuration.
148  * As ERRATA_SPECULATIVE_AT errata requires SCTLR_EL1 and TCR_EL1 registers
149  * independent of the above logic, we need explicit context entries to be
150  * reserved for these registers.
151  *
152  * NOTE: Based on this we end up with following different configurations depending
153  * on the presence of errata and inclusion of EL1 or EL2 context.
154  *
155  * ============================================================================
156  * | ERRATA_SPECULATIVE_AT | EL1 context| Memory allocation(Sctlr_el1,Tcr_el1)|
157  * ============================================================================
158  * |        0              |      0     |            None                     |
159  * |        0              |      1     |    EL1 C-Context structure          |
160  * |        1              |      0     |    Errata Context Offset Entries    |
161  * |        1              |      1     |    Errata Context Offset Entries    |
162  * ============================================================================
163  *
164  * In the above table, when ERRATA_SPECULATIVE_AT=1, EL1_Context=0, it implies
165  * there is only EL2 context and memory for SCTLR_EL1 and TCR_EL1 registers is
166  * reserved explicitly under ERRATA_SPECULATIVE_AT build flag here.
167  *
168  * In situations when EL1_Context=1 and  ERRATA_SPECULATIVE_AT=1, since SCTLR_EL1
169  * and TCR_EL1 registers will be modified under errata and it happens at the
170  * early in the codeflow prior to el1 context (save and restore operations),
171  * context memory still will be reserved under the errata logic here explicitly.
172  * These registers will not be part of EL1 context save & restore routines.
173  *
174  * Only when ERRATA_SPECULATIVE_AT=0, EL1_Context=1, for this combination,
175  * SCTLR_EL1 and TCR_EL1 will be part of EL1 context structure (context_el1.h)
176  * -----------------------------------------------------------------------------
177  ******************************************************************************/
178 #define CTX_ERRATA_SPEC_AT_OFFSET	(CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_END)
179 #if ERRATA_SPECULATIVE_AT
180 #define CTX_ERRATA_SPEC_AT_SCTLR_EL1	U(0x0)
181 #define CTX_ERRATA_SPEC_AT_TCR_EL1	U(0x8)
182 #define CTX_ERRATA_SPEC_AT_END		U(0x10) /* Align to the next 16 byte boundary */
183 #else
184 #define CTX_ERRATA_SPEC_AT_END		U(0x0)
185 #endif /* ERRATA_SPECULATIVE_AT */
186 
187 /*******************************************************************************
188  * Registers related to ARMv8.3-PAuth.
189  ******************************************************************************/
190 #define CTX_PAUTH_REGS_OFFSET	(CTX_ERRATA_SPEC_AT_OFFSET + CTX_ERRATA_SPEC_AT_END)
191 #if CTX_INCLUDE_PAUTH_REGS
192 #define CTX_PACIAKEY_LO		U(0x0)
193 #define CTX_PACIAKEY_HI		U(0x8)
194 #define CTX_PACIBKEY_LO		U(0x10)
195 #define CTX_PACIBKEY_HI		U(0x18)
196 #define CTX_PACDAKEY_LO		U(0x20)
197 #define CTX_PACDAKEY_HI		U(0x28)
198 #define CTX_PACDBKEY_LO		U(0x30)
199 #define CTX_PACDBKEY_HI		U(0x38)
200 #define CTX_PACGAKEY_LO		U(0x40)
201 #define CTX_PACGAKEY_HI		U(0x48)
202 #define CTX_PAUTH_REGS_END	U(0x50) /* Align to the next 16 byte boundary */
203 #else
204 #define CTX_PAUTH_REGS_END	U(0)
205 #endif /* CTX_INCLUDE_PAUTH_REGS */
206 
207 /*******************************************************************************
208  * Registers initialised in a per-world context.
209  ******************************************************************************/
210 #define CTX_CPTR_EL3			U(0x0)
211 #define CTX_ZCR_EL3			U(0x8)
212 #define CTX_MPAM3_EL3			U(0x10)
213 #define CTX_PERWORLD_EL3STATE_END	U(0x18)
214 
215 #ifndef __ASSEMBLER__
216 
217 #include <stdint.h>
218 
219 #include <lib/cassert.h>
220 
221 /*
222  * Common constants to help define the 'cpu_context' structure and its
223  * members below.
224  */
225 #define DWORD_SHIFT		U(3)
226 #define DEFINE_REG_STRUCT(name, num_regs)	\
227 	typedef struct name {			\
228 		uint64_t ctx_regs[num_regs];	\
229 	}  __aligned(16) name##_t
230 
231 /* Constants to determine the size of individual context structures */
232 #define CTX_GPREG_ALL		(CTX_GPREGS_END >> DWORD_SHIFT)
233 
234 #if CTX_INCLUDE_FPREGS
235 # define CTX_FPREG_ALL		(CTX_FPREGS_END >> DWORD_SHIFT)
236 #endif
237 #define CTX_EL3STATE_ALL	(CTX_EL3STATE_END >> DWORD_SHIFT)
238 #define CTX_CVE_2018_3639_ALL	(CTX_CVE_2018_3639_END >> DWORD_SHIFT)
239 
240 #if ERRATA_SPECULATIVE_AT
241 #define CTX_ERRATA_SPEC_AT_ALL	(CTX_ERRATA_SPEC_AT_END >> DWORD_SHIFT)
242 #endif
243 #if CTX_INCLUDE_PAUTH_REGS
244 # define CTX_PAUTH_REGS_ALL	(CTX_PAUTH_REGS_END >> DWORD_SHIFT)
245 #endif
246 
247 /*
248  * AArch64 general purpose register context structure. Usually x0-x18,
249  * lr are saved as the compiler is expected to preserve the remaining
250  * callee saved registers if used by the C runtime and the assembler
251  * does not touch the remaining. But in case of world switch during
252  * exception handling, we need to save the callee registers too.
253  */
254 DEFINE_REG_STRUCT(gp_regs, CTX_GPREG_ALL);
255 
256 /*
257  * AArch64 floating point register context structure for preserving
258  * the floating point state during switches from one security state to
259  * another.
260  */
261 #if CTX_INCLUDE_FPREGS
262 DEFINE_REG_STRUCT(fp_regs, CTX_FPREG_ALL);
263 #endif
264 
265 /*
266  * Miscellaneous registers used by EL3 firmware to maintain its state
267  * across exception entries and exits
268  */
269 DEFINE_REG_STRUCT(el3_state, CTX_EL3STATE_ALL);
270 
271 /* Function pointer used by CVE-2018-3639 dynamic mitigation */
272 DEFINE_REG_STRUCT(cve_2018_3639, CTX_CVE_2018_3639_ALL);
273 
274 /* Registers associated to Errata_Speculative */
275 #if ERRATA_SPECULATIVE_AT
276 DEFINE_REG_STRUCT(errata_speculative_at, CTX_ERRATA_SPEC_AT_ALL);
277 #endif
278 
279 /* Registers associated to ARMv8.3-PAuth */
280 #if CTX_INCLUDE_PAUTH_REGS
281 DEFINE_REG_STRUCT(pauth, CTX_PAUTH_REGS_ALL);
282 #endif
283 
284 /*
285  * Macros to access members of any of the above structures using their
286  * offsets
287  */
288 #define read_ctx_reg(ctx, offset)	((ctx)->ctx_regs[(offset) >> DWORD_SHIFT])
289 #define write_ctx_reg(ctx, offset, val)	(((ctx)->ctx_regs[(offset) >> DWORD_SHIFT]) \
290 					 = (uint64_t) (val))
291 
292 /*
293  * Top-level context structure which is used by EL3 firmware to preserve
294  * the state of a core at the next lower EL in a given security state and
295  * save enough EL3 meta data to be able to return to that EL and security
296  * state. The context management library will be used to ensure that
297  * SP_EL3 always points to an instance of this structure at exception
298  * entry and exit.
299  */
300 typedef struct cpu_context {
301 	gp_regs_t gpregs_ctx;
302 	el3_state_t el3state_ctx;
303 
304 #if CTX_INCLUDE_FPREGS
305 	fp_regs_t fpregs_ctx;
306 #endif
307 	cve_2018_3639_t cve_2018_3639_ctx;
308 
309 #if ERRATA_SPECULATIVE_AT
310 	errata_speculative_at_t errata_speculative_at_ctx;
311 #endif
312 
313 #if CTX_INCLUDE_PAUTH_REGS
314 	pauth_t pauth_ctx;
315 #endif
316 
317 	el1_sysregs_t el1_sysregs_ctx;
318 
319 #if CTX_INCLUDE_EL2_REGS
320 	el2_sysregs_t el2_sysregs_ctx;
321 #endif
322 
323 } cpu_context_t;
324 
325 /*
326  * Per-World Context.
327  * It stores registers whose values can be shared across CPUs.
328  */
329 typedef struct per_world_context {
330 	uint64_t ctx_cptr_el3;
331 	uint64_t ctx_zcr_el3;
332 	uint64_t ctx_mpam3_el3;
333 } per_world_context_t;
334 
335 extern per_world_context_t per_world_context[CPU_DATA_CONTEXT_NUM];
336 
337 /* Macros to access members of the 'cpu_context_t' structure */
338 #define get_el3state_ctx(h)	(&((cpu_context_t *) h)->el3state_ctx)
339 #if CTX_INCLUDE_FPREGS
340 # define get_fpregs_ctx(h)	(&((cpu_context_t *) h)->fpregs_ctx)
341 #endif
342 #define get_el1_sysregs_ctx(h)	(&((cpu_context_t *) h)->el1_sysregs_ctx)
343 #if CTX_INCLUDE_EL2_REGS
344 # define get_el2_sysregs_ctx(h)	(&((cpu_context_t *) h)->el2_sysregs_ctx)
345 #endif
346 #define get_gpregs_ctx(h)	(&((cpu_context_t *) h)->gpregs_ctx)
347 #define get_cve_2018_3639_ctx(h)	(&((cpu_context_t *) h)->cve_2018_3639_ctx)
348 
349 #if ERRATA_SPECULATIVE_AT
350 #define get_errata_speculative_at_ctx(h)	(&((cpu_context_t *) h)->errata_speculative_at_ctx)
351 #endif
352 
353 #if CTX_INCLUDE_PAUTH_REGS
354 # define get_pauth_ctx(h)	(&((cpu_context_t *) h)->pauth_ctx)
355 #endif
356 
357 /*
358  * Compile time assertions related to the 'cpu_context' structure to
359  * ensure that the assembler and the compiler view of the offsets of
360  * the structure members is the same.
361  */
362 CASSERT(CTX_GPREGS_OFFSET == __builtin_offsetof(cpu_context_t, gpregs_ctx),
363 	assert_core_context_gp_offset_mismatch);
364 
365 CASSERT(CTX_EL3STATE_OFFSET == __builtin_offsetof(cpu_context_t, el3state_ctx),
366 	assert_core_context_el3state_offset_mismatch);
367 
368 #if CTX_INCLUDE_FPREGS
369 CASSERT(CTX_FPREGS_OFFSET == __builtin_offsetof(cpu_context_t, fpregs_ctx),
370 	assert_core_context_fp_offset_mismatch);
371 #endif /* CTX_INCLUDE_FPREGS */
372 
373 CASSERT(CTX_CVE_2018_3639_OFFSET == __builtin_offsetof(cpu_context_t, cve_2018_3639_ctx),
374 	assert_core_context_cve_2018_3639_offset_mismatch);
375 
376 #if ERRATA_SPECULATIVE_AT
377 CASSERT(CTX_ERRATA_SPEC_AT_OFFSET == __builtin_offsetof(cpu_context_t, errata_speculative_at_ctx),
378 	assert_core_context_errata_speculative_at_offset_mismatch);
379 #endif
380 
381 #if CTX_INCLUDE_PAUTH_REGS
382 CASSERT(CTX_PAUTH_REGS_OFFSET == __builtin_offsetof(cpu_context_t, pauth_ctx),
383 	assert_core_context_pauth_offset_mismatch);
384 #endif /* CTX_INCLUDE_PAUTH_REGS */
385 
386 /*
387  * Helper macro to set the general purpose registers that correspond to
388  * parameters in an aapcs_64 call i.e. x0-x7
389  */
390 #define set_aapcs_args0(ctx, x0)				do {	\
391 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X0, x0);	\
392 	} while (0)
393 #define set_aapcs_args1(ctx, x0, x1)				do {	\
394 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X1, x1);	\
395 		set_aapcs_args0(ctx, x0);				\
396 	} while (0)
397 #define set_aapcs_args2(ctx, x0, x1, x2)			do {	\
398 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X2, x2);	\
399 		set_aapcs_args1(ctx, x0, x1);				\
400 	} while (0)
401 #define set_aapcs_args3(ctx, x0, x1, x2, x3)			do {	\
402 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X3, x3);	\
403 		set_aapcs_args2(ctx, x0, x1, x2);			\
404 	} while (0)
405 #define set_aapcs_args4(ctx, x0, x1, x2, x3, x4)		do {	\
406 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X4, x4);	\
407 		set_aapcs_args3(ctx, x0, x1, x2, x3);			\
408 	} while (0)
409 #define set_aapcs_args5(ctx, x0, x1, x2, x3, x4, x5)		do {	\
410 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X5, x5);	\
411 		set_aapcs_args4(ctx, x0, x1, x2, x3, x4);		\
412 	} while (0)
413 #define set_aapcs_args6(ctx, x0, x1, x2, x3, x4, x5, x6)	do {	\
414 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X6, x6);	\
415 		set_aapcs_args5(ctx, x0, x1, x2, x3, x4, x5);		\
416 	} while (0)
417 #define set_aapcs_args7(ctx, x0, x1, x2, x3, x4, x5, x6, x7)	do {	\
418 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X7, x7);	\
419 		set_aapcs_args6(ctx, x0, x1, x2, x3, x4, x5, x6);	\
420 	} while (0)
421 
422 /*******************************************************************************
423  * Function prototypes
424  ******************************************************************************/
425 #if CTX_INCLUDE_FPREGS
426 void fpregs_context_save(simd_regs_t *regs);
427 void fpregs_context_restore(simd_regs_t *regs);
428 #endif
429 
430 #endif /* __ASSEMBLER__ */
431 
432 #endif /* CONTEXT_H */
433