xref: /rk3399_ARM-atf/include/lib/el3_runtime/aarch64/context.h (revision a1032beb656d78d1cffc97fa64c961d098b23b48)
1 /*
2  * Copyright (c) 2013-2025, Arm Limited and Contributors. All rights reserved.
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #ifndef CONTEXT_H
8 #define CONTEXT_H
9 
10 #if (CTX_INCLUDE_EL2_REGS && IMAGE_BL31)
11 #include <lib/el3_runtime/context_el2.h>
12 #else
13 /**
14  * El1 context is required either when:
15  * IMAGE_BL1 || ((!CTX_INCLUDE_EL2_REGS) && IMAGE_BL31)
16  */
17 #include <lib/el3_runtime/context_el1.h>
18 #endif /* (CTX_INCLUDE_EL2_REGS && IMAGE_BL31) */
19 
20 #include <lib/el3_runtime/cpu_data.h>
21 #include <lib/el3_runtime/simd_ctx.h>
22 #include <lib/utils_def.h>
23 #include <platform_def.h> /* For CACHE_WRITEBACK_GRANULE */
24 
25 /*******************************************************************************
26  * Constants that allow assembler code to access members of and the 'gp_regs'
27  * structure at their correct offsets.
28  ******************************************************************************/
29 #define CTX_GPREGS_OFFSET	U(0x0)
30 #define CTX_GPREG_X0		U(0x0)
31 #define CTX_GPREG_X1		U(0x8)
32 #define CTX_GPREG_X2		U(0x10)
33 #define CTX_GPREG_X3		U(0x18)
34 #define CTX_GPREG_X4		U(0x20)
35 #define CTX_GPREG_X5		U(0x28)
36 #define CTX_GPREG_X6		U(0x30)
37 #define CTX_GPREG_X7		U(0x38)
38 #define CTX_GPREG_X8		U(0x40)
39 #define CTX_GPREG_X9		U(0x48)
40 #define CTX_GPREG_X10		U(0x50)
41 #define CTX_GPREG_X11		U(0x58)
42 #define CTX_GPREG_X12		U(0x60)
43 #define CTX_GPREG_X13		U(0x68)
44 #define CTX_GPREG_X14		U(0x70)
45 #define CTX_GPREG_X15		U(0x78)
46 #define CTX_GPREG_X16		U(0x80)
47 #define CTX_GPREG_X17		U(0x88)
48 #define CTX_GPREG_X18		U(0x90)
49 #define CTX_GPREG_X19		U(0x98)
50 #define CTX_GPREG_X20		U(0xa0)
51 #define CTX_GPREG_X21		U(0xa8)
52 #define CTX_GPREG_X22		U(0xb0)
53 #define CTX_GPREG_X23		U(0xb8)
54 #define CTX_GPREG_X24		U(0xc0)
55 #define CTX_GPREG_X25		U(0xc8)
56 #define CTX_GPREG_X26		U(0xd0)
57 #define CTX_GPREG_X27		U(0xd8)
58 #define CTX_GPREG_X28		U(0xe0)
59 #define CTX_GPREG_X29		U(0xe8)
60 #define CTX_GPREG_LR		U(0xf0)
61 #define CTX_GPREG_SP_EL0	U(0xf8)
62 #define CTX_GPREGS_END		U(0x100)
63 
64 /*******************************************************************************
65  * Constants that allow assembler code to access members of and the 'el3_state'
66  * structure at their correct offsets. Note that some of the registers are only
67  * 32-bits wide but are stored as 64-bit values for convenience
68  ******************************************************************************/
69 #define CTX_EL3STATE_OFFSET	(CTX_GPREGS_OFFSET + CTX_GPREGS_END)
70 #define CTX_SCR_EL3		U(0x0)
71 #define CTX_RUNTIME_SP		U(0x8)
72 #define CTX_SPSR_EL3		U(0x10)
73 #define CTX_ELR_EL3		U(0x18)
74 #define CTX_PMCR_EL0		U(0x20)
75 #define CTX_IS_IN_EL3		U(0x28)
76 #define CTX_MDCR_EL3		U(0x30)
77 /* Constants required in supporting nested exception in EL3 */
78 #define CTX_SAVED_ELR_EL3	U(0x38)
79 /*
80  * General purpose flag, to save various EL3 states
81  * FFH mode : Used to identify if handling nested exception
82  * KFH mode : Used as counter value
83  */
84 #define CTX_NESTED_EA_FLAG	U(0x40)
85 #if FFH_SUPPORT
86  #define CTX_SAVED_ESR_EL3	U(0x48)
87  #define CTX_SAVED_SPSR_EL3	U(0x50)
88  #define CTX_SAVED_GPREG_LR	U(0x58)
89  #define CTX_DOUBLE_FAULT_ESR	U(0x60)
90  #define CTX_EL3STATE_END	U(0x70) /* Align to the next 16 byte boundary */
91 #else
92  #define CTX_EL3STATE_END	U(0x50) /* Align to the next 16 byte boundary */
93 #endif /* FFH_SUPPORT */
94 
95 
96 /*******************************************************************************
97  * Registers related to CVE-2018-3639
98  ******************************************************************************/
99 #define CTX_CVE_2018_3639_OFFSET	(CTX_EL3STATE_OFFSET + CTX_EL3STATE_END)
100 #define CTX_CVE_2018_3639_DISABLE	U(0)
101 #define CTX_CVE_2018_3639_END		U(0x10) /* Align to the next 16 byte boundary */
102 
103 /*******************************************************************************
104  * Registers related to ERRATA_SPECULATIVE_AT
105  *
106  * This is essential as with EL1 and EL2 context registers being decoupled,
107  * both will not be present for a given build configuration.
108  * As ERRATA_SPECULATIVE_AT errata requires SCTLR_EL1 and TCR_EL1 registers
109  * independent of the above logic, we need explicit context entries to be
110  * reserved for these registers.
111  *
112  * NOTE: Based on this we end up with following different configurations depending
113  * on the presence of errata and inclusion of EL1 or EL2 context.
114  *
115  * ============================================================================
116  * | ERRATA_SPECULATIVE_AT | EL1 context| Memory allocation(Sctlr_el1,Tcr_el1)|
117  * ============================================================================
118  * |        0              |      0     |            None                     |
119  * |        0              |      1     |    EL1 C-Context structure          |
120  * |        1              |      0     |    Errata Context Offset Entries    |
121  * |        1              |      1     |    Errata Context Offset Entries    |
122  * ============================================================================
123  *
124  * In the above table, when ERRATA_SPECULATIVE_AT=1, EL1_Context=0, it implies
125  * there is only EL2 context and memory for SCTLR_EL1 and TCR_EL1 registers is
126  * reserved explicitly under ERRATA_SPECULATIVE_AT build flag here.
127  *
128  * In situations when EL1_Context=1 and  ERRATA_SPECULATIVE_AT=1, since SCTLR_EL1
129  * and TCR_EL1 registers will be modified under errata and it happens at the
130  * early in the codeflow prior to el1 context (save and restore operations),
131  * context memory still will be reserved under the errata logic here explicitly.
132  * These registers will not be part of EL1 context save & restore routines.
133  *
134  * Only when ERRATA_SPECULATIVE_AT=0, EL1_Context=1, for this combination,
135  * SCTLR_EL1 and TCR_EL1 will be part of EL1 context structure (context_el1.h)
136  * -----------------------------------------------------------------------------
137  ******************************************************************************/
138 #define CTX_ERRATA_SPEC_AT_OFFSET	(CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_END)
139 #if ERRATA_SPECULATIVE_AT
140 #define CTX_ERRATA_SPEC_AT_SCTLR_EL1	U(0x0)
141 #define CTX_ERRATA_SPEC_AT_TCR_EL1	U(0x8)
142 #define CTX_ERRATA_SPEC_AT_END		U(0x10) /* Align to the next 16 byte boundary */
143 #else
144 #define CTX_ERRATA_SPEC_AT_END		U(0x0)
145 #endif /* ERRATA_SPECULATIVE_AT */
146 
147 /*******************************************************************************
148  * Registers related to ARMv8.3-PAuth.
149  ******************************************************************************/
150 #define CTX_PAUTH_REGS_OFFSET	(CTX_ERRATA_SPEC_AT_OFFSET + CTX_ERRATA_SPEC_AT_END)
151 #if CTX_INCLUDE_PAUTH_REGS
152 #define CTX_PACIAKEY_LO		U(0x0)
153 #define CTX_PACIAKEY_HI		U(0x8)
154 #define CTX_PACIBKEY_LO		U(0x10)
155 #define CTX_PACIBKEY_HI		U(0x18)
156 #define CTX_PACDAKEY_LO		U(0x20)
157 #define CTX_PACDAKEY_HI		U(0x28)
158 #define CTX_PACDBKEY_LO		U(0x30)
159 #define CTX_PACDBKEY_HI		U(0x38)
160 #define CTX_PACGAKEY_LO		U(0x40)
161 #define CTX_PACGAKEY_HI		U(0x48)
162 #define CTX_PAUTH_REGS_END	U(0x50) /* Align to the next 16 byte boundary */
163 #else
164 #define CTX_PAUTH_REGS_END	U(0)
165 #endif /* CTX_INCLUDE_PAUTH_REGS */
166 
167 /*******************************************************************************
168  * Registers initialised in a per-world context.
169  ******************************************************************************/
170 #define CTX_CPTR_EL3			U(0x0)
171 #define CTX_MPAM3_EL3			U(0x8)
172 #define CTX_PERWORLD_EL3STATE_END	U(0x10)
173 
174 #ifndef __ASSEMBLER__
175 
176 #include <stdint.h>
177 
178 #include <lib/cassert.h>
179 
180 /*
181  * Common constants to help define the 'cpu_context' structure and its
182  * members below.
183  */
184 #define DWORD_SHIFT		U(3)
185 #define DEFINE_REG_STRUCT(name, num_regs)	\
186 	typedef struct name {			\
187 		uint64_t ctx_regs[num_regs];	\
188 	}  __aligned(16) name##_t
189 
190 /* Constants to determine the size of individual context structures */
191 #define CTX_GPREG_ALL		(CTX_GPREGS_END >> DWORD_SHIFT)
192 
193 #define CTX_EL3STATE_ALL	(CTX_EL3STATE_END >> DWORD_SHIFT)
194 #define CTX_CVE_2018_3639_ALL	(CTX_CVE_2018_3639_END >> DWORD_SHIFT)
195 
196 #if ERRATA_SPECULATIVE_AT
197 #define CTX_ERRATA_SPEC_AT_ALL	(CTX_ERRATA_SPEC_AT_END >> DWORD_SHIFT)
198 #endif
199 #if CTX_INCLUDE_PAUTH_REGS
200 # define CTX_PAUTH_REGS_ALL	(CTX_PAUTH_REGS_END >> DWORD_SHIFT)
201 #endif
202 
203 /*
204  * AArch64 general purpose register context structure. Usually x0-x18,
205  * lr are saved as the compiler is expected to preserve the remaining
206  * callee saved registers if used by the C runtime and the assembler
207  * does not touch the remaining. But in case of world switch during
208  * exception handling, we need to save the callee registers too.
209  */
210 DEFINE_REG_STRUCT(gp_regs, CTX_GPREG_ALL);
211 
212 /*
213  * Miscellaneous registers used by EL3 firmware to maintain its state
214  * across exception entries and exits
215  */
216 DEFINE_REG_STRUCT(el3_state, CTX_EL3STATE_ALL);
217 
218 /* Function pointer used by CVE-2018-3639 dynamic mitigation */
219 DEFINE_REG_STRUCT(cve_2018_3639, CTX_CVE_2018_3639_ALL);
220 
221 /* Registers associated to Errata_Speculative */
222 #if ERRATA_SPECULATIVE_AT
223 DEFINE_REG_STRUCT(errata_speculative_at, CTX_ERRATA_SPEC_AT_ALL);
224 #endif
225 
226 /* Registers associated to ARMv8.3-PAuth */
227 #if CTX_INCLUDE_PAUTH_REGS
228 DEFINE_REG_STRUCT(pauth, CTX_PAUTH_REGS_ALL);
229 #endif
230 
231 /*
232  * Macros to access members of any of the above structures using their
233  * offsets
234  */
235 #define read_ctx_reg(ctx, offset)	((ctx)->ctx_regs[(offset) >> DWORD_SHIFT])
236 #define write_ctx_reg(ctx, offset, val)	(((ctx)->ctx_regs[(offset) >> DWORD_SHIFT]) \
237 					 = (uint64_t) (val))
238 
239 /*
240  * Top-level context structure which is used by EL3 firmware to preserve
241  * the state of a core at the next lower EL in a given security state and
242  * save enough EL3 meta data to be able to return to that EL and security
243  * state. The context management library will be used to ensure that
244  * SP_EL3 always points to an instance of this structure at exception
245  * entry and exit.
246  */
247 typedef struct cpu_context {
248 	gp_regs_t gpregs_ctx;
249 	el3_state_t el3state_ctx;
250 
251 	cve_2018_3639_t cve_2018_3639_ctx;
252 
253 #if ERRATA_SPECULATIVE_AT
254 	errata_speculative_at_t errata_speculative_at_ctx;
255 #endif
256 
257 #if CTX_INCLUDE_PAUTH_REGS
258 	pauth_t pauth_ctx;
259 #endif
260 
261 #if (CTX_INCLUDE_EL2_REGS && IMAGE_BL31)
262 	el2_sysregs_t el2_sysregs_ctx;
263 #else
264 	/* El1 context should be included only either for IMAGE_BL1,
265 	 * or for IMAGE_BL31 when CTX_INCLUDE_EL2_REGS=0:
266 	 * When SPMD_SPM_AT_SEL2=1, SPMC at S-EL2 takes care of saving
267 	 * and restoring EL1 registers. In this case, BL31 at EL3 can
268 	 * exclude save and restore of EL1 context registers.
269 	 */
270 	el1_sysregs_t el1_sysregs_ctx;
271 #endif
272 
273 	/* TODO: the CACHE_WRITEBACK_GRANULE alignment is not necessary if this is
274 	 * contained in a per-cpu data structure (i.e. cpu_data_t).
275 	 */
276 } __aligned(CACHE_WRITEBACK_GRANULE) cpu_context_t;
277 
278 /*
279  * Per-World Context.
280  * It stores registers whose values can be shared across CPUs.
281  */
282 typedef struct per_world_context {
283 	uint64_t ctx_cptr_el3;
284 	uint64_t ctx_mpam3_el3;
285 } per_world_context_t;
286 
287 extern per_world_context_t per_world_context[CPU_DATA_CONTEXT_NUM];
288 
289 /* Macros to access members of the 'cpu_context_t' structure */
290 #define get_el3state_ctx(h)	(&((cpu_context_t *) h)->el3state_ctx)
291 
292 #if (CTX_INCLUDE_EL2_REGS && IMAGE_BL31)
293 #define get_el2_sysregs_ctx(h)	(&((cpu_context_t *) h)->el2_sysregs_ctx)
294 #else
295 #define get_el1_sysregs_ctx(h)	(&((cpu_context_t *) h)->el1_sysregs_ctx)
296 #endif
297 
298 #define get_gpregs_ctx(h)	(&((cpu_context_t *) h)->gpregs_ctx)
299 #define get_cve_2018_3639_ctx(h)	(&((cpu_context_t *) h)->cve_2018_3639_ctx)
300 
301 #if ERRATA_SPECULATIVE_AT
302 #define get_errata_speculative_at_ctx(h)	(&((cpu_context_t *) h)->errata_speculative_at_ctx)
303 #endif
304 
305 #if CTX_INCLUDE_PAUTH_REGS
306 # define get_pauth_ctx(h)	(&((cpu_context_t *) h)->pauth_ctx)
307 #endif
308 
309 /*
310  * Compile time assertions related to the 'cpu_context' structure to
311  * ensure that the assembler and the compiler view of the offsets of
312  * the structure members is the same.
313  */
314 CASSERT(CTX_GPREGS_OFFSET == __builtin_offsetof(cpu_context_t, gpregs_ctx),
315 	assert_core_context_gp_offset_mismatch);
316 
317 CASSERT(CTX_EL3STATE_OFFSET == __builtin_offsetof(cpu_context_t, el3state_ctx),
318 	assert_core_context_el3state_offset_mismatch);
319 
320 
321 CASSERT(CTX_CVE_2018_3639_OFFSET == __builtin_offsetof(cpu_context_t, cve_2018_3639_ctx),
322 	assert_core_context_cve_2018_3639_offset_mismatch);
323 
324 #if ERRATA_SPECULATIVE_AT
325 CASSERT(CTX_ERRATA_SPEC_AT_OFFSET == __builtin_offsetof(cpu_context_t, errata_speculative_at_ctx),
326 	assert_core_context_errata_speculative_at_offset_mismatch);
327 #endif
328 
329 #if CTX_INCLUDE_PAUTH_REGS
330 CASSERT(CTX_PAUTH_REGS_OFFSET == __builtin_offsetof(cpu_context_t, pauth_ctx),
331 	assert_core_context_pauth_offset_mismatch);
332 #endif /* CTX_INCLUDE_PAUTH_REGS */
333 
334 /*
335  * Helper macro to set the general purpose registers that correspond to
336  * parameters in an aapcs_64 call i.e. x0-x7
337  */
338 #define set_aapcs_args0(ctx, x0)				do {	\
339 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X0, x0);	\
340 	} while (0)
341 #define set_aapcs_args1(ctx, x0, x1)				do {	\
342 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X1, x1);	\
343 		set_aapcs_args0(ctx, x0);				\
344 	} while (0)
345 #define set_aapcs_args2(ctx, x0, x1, x2)			do {	\
346 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X2, x2);	\
347 		set_aapcs_args1(ctx, x0, x1);				\
348 	} while (0)
349 #define set_aapcs_args3(ctx, x0, x1, x2, x3)			do {	\
350 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X3, x3);	\
351 		set_aapcs_args2(ctx, x0, x1, x2);			\
352 	} while (0)
353 #define set_aapcs_args4(ctx, x0, x1, x2, x3, x4)		do {	\
354 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X4, x4);	\
355 		set_aapcs_args3(ctx, x0, x1, x2, x3);			\
356 	} while (0)
357 #define set_aapcs_args5(ctx, x0, x1, x2, x3, x4, x5)		do {	\
358 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X5, x5);	\
359 		set_aapcs_args4(ctx, x0, x1, x2, x3, x4);		\
360 	} while (0)
361 #define set_aapcs_args6(ctx, x0, x1, x2, x3, x4, x5, x6)	do {	\
362 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X6, x6);	\
363 		set_aapcs_args5(ctx, x0, x1, x2, x3, x4, x5);		\
364 	} while (0)
365 #define set_aapcs_args7(ctx, x0, x1, x2, x3, x4, x5, x6, x7)	do {	\
366 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X7, x7);	\
367 		set_aapcs_args6(ctx, x0, x1, x2, x3, x4, x5, x6);	\
368 	} while (0)
369 
370 /*******************************************************************************
371  * Function prototypes
372  ******************************************************************************/
373 #if CTX_INCLUDE_FPREGS
374 void fpregs_context_save(simd_regs_t *regs);
375 void fpregs_context_restore(simd_regs_t *regs);
376 #endif
377 
378 /*******************************************************************************
379  * The next four inline functions are required for IMAGE_BL1, as well as for
380  * IMAGE_BL31 for the below combinations.
381  * ============================================================================
382  * | ERRATA_SPECULATIVE_AT| CTX_INCLUDE_EL2_REGS |   Combination              |
383  * ============================================================================
384  * |       0              |       0              |   Valid (EL1 ctx)          |
385  * |______________________|______________________|____________________________|
386  * |                      |                      | Invalid (No Errata/EL1 Ctx)|
387  * |       0              |       1              | Hence commented out.       |
388  * |______________________|______________________|____________________________|
389  * |                      |                      |                            |
390  * |       1              |       0              |   Valid (Errata ctx)       |
391  * |______________________|______________________|____________________________|
392  * |                      |                      |                            |
393  * |       1              |       1              |   Valid (Errata ctx)       |
394  * |______________________|______________________|____________________________|
395  * ============================================================================
396  ******************************************************************************/
397 #if (IMAGE_BL1 || ((ERRATA_SPECULATIVE_AT) || (!CTX_INCLUDE_EL2_REGS)))
398 
399 static inline void write_ctx_sctlr_el1_reg_errata(cpu_context_t *ctx, u_register_t val)
400 {
401 #if (ERRATA_SPECULATIVE_AT)
402 	write_ctx_reg(get_errata_speculative_at_ctx(ctx),
403 		      CTX_ERRATA_SPEC_AT_SCTLR_EL1, val);
404 #else
405 	write_el1_ctx_common(get_el1_sysregs_ctx(ctx), sctlr_el1, val);
406 #endif /* ERRATA_SPECULATIVE_AT */
407 }
408 
409 static inline void write_ctx_tcr_el1_reg_errata(cpu_context_t *ctx, u_register_t val)
410 {
411 #if (ERRATA_SPECULATIVE_AT)
412 	write_ctx_reg(get_errata_speculative_at_ctx(ctx),
413 		      CTX_ERRATA_SPEC_AT_TCR_EL1, val);
414 #else
415 	write_el1_ctx_common(get_el1_sysregs_ctx(ctx), tcr_el1, val);
416 #endif /* ERRATA_SPECULATIVE_AT */
417 }
418 
419 static inline u_register_t read_ctx_sctlr_el1_reg_errata(cpu_context_t *ctx)
420 {
421 #if (ERRATA_SPECULATIVE_AT)
422 	return read_ctx_reg(get_errata_speculative_at_ctx(ctx),
423 		      CTX_ERRATA_SPEC_AT_SCTLR_EL1);
424 #else
425 	return read_el1_ctx_common(get_el1_sysregs_ctx(ctx), sctlr_el1);
426 #endif /* ERRATA_SPECULATIVE_AT */
427 }
428 
429 static inline u_register_t read_ctx_tcr_el1_reg_errata(cpu_context_t *ctx)
430 {
431 #if (ERRATA_SPECULATIVE_AT)
432 	return read_ctx_reg(get_errata_speculative_at_ctx(ctx),
433 		      CTX_ERRATA_SPEC_AT_TCR_EL1);
434 #else
435 	return read_el1_ctx_common(get_el1_sysregs_ctx(ctx), tcr_el1);
436 #endif /* ERRATA_SPECULATIVE_AT */
437 }
438 
439 #endif /* (IMAGE_BL1 || ((ERRATA_SPECULATIVE_AT) || (!CTX_INCLUDE_EL2_REGS))) */
440 
441 #endif /* __ASSEMBLER__ */
442 
443 #endif /* CONTEXT_H */
444