xref: /rk3399_ARM-atf/include/lib/el3_runtime/aarch64/context.h (revision 05d22c3045e2e972c2262b9ccd6c82cb7545bf83)
1 /*
2  * Copyright (c) 2013-2025, Arm Limited and Contributors. All rights reserved.
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #ifndef CONTEXT_H
8 #define CONTEXT_H
9 
10 #if (CTX_INCLUDE_EL2_REGS && IMAGE_BL31)
11 #include <lib/el3_runtime/context_el2.h>
12 #else
13 /**
14  * El1 context is required either when:
15  * IMAGE_BL1 || ((!CTX_INCLUDE_EL2_REGS) && IMAGE_BL31)
16  */
17 #include <lib/el3_runtime/context_el1.h>
18 #endif /* (CTX_INCLUDE_EL2_REGS && IMAGE_BL31) */
19 
20 #include <lib/el3_runtime/simd_ctx.h>
21 #include <lib/utils_def.h>
22 #include <platform_def.h> /* For CACHE_WRITEBACK_GRANULE */
23 
24 #define	CPU_CONTEXT_SECURE	UL(0)
25 #define	CPU_CONTEXT_NS		UL(1)
26 #if ENABLE_RME
27 #define	CPU_CONTEXT_REALM	UL(2)
28 #define	CPU_CONTEXT_NUM		UL(3)
29 #else
30 #define	CPU_CONTEXT_NUM		UL(2)
31 #endif
32 
33 /*******************************************************************************
34  * Constants that allow assembler code to access members of and the 'gp_regs'
35  * structure at their correct offsets.
36  ******************************************************************************/
37 #define CTX_GPREGS_OFFSET	U(0x0)
38 #define CTX_GPREG_X0		U(0x0)
39 #define CTX_GPREG_X1		U(0x8)
40 #define CTX_GPREG_X2		U(0x10)
41 #define CTX_GPREG_X3		U(0x18)
42 #define CTX_GPREG_X4		U(0x20)
43 #define CTX_GPREG_X5		U(0x28)
44 #define CTX_GPREG_X6		U(0x30)
45 #define CTX_GPREG_X7		U(0x38)
46 #define CTX_GPREG_X8		U(0x40)
47 #define CTX_GPREG_X9		U(0x48)
48 #define CTX_GPREG_X10		U(0x50)
49 #define CTX_GPREG_X11		U(0x58)
50 #define CTX_GPREG_X12		U(0x60)
51 #define CTX_GPREG_X13		U(0x68)
52 #define CTX_GPREG_X14		U(0x70)
53 #define CTX_GPREG_X15		U(0x78)
54 #define CTX_GPREG_X16		U(0x80)
55 #define CTX_GPREG_X17		U(0x88)
56 #define CTX_GPREG_X18		U(0x90)
57 #define CTX_GPREG_X19		U(0x98)
58 #define CTX_GPREG_X20		U(0xa0)
59 #define CTX_GPREG_X21		U(0xa8)
60 #define CTX_GPREG_X22		U(0xb0)
61 #define CTX_GPREG_X23		U(0xb8)
62 #define CTX_GPREG_X24		U(0xc0)
63 #define CTX_GPREG_X25		U(0xc8)
64 #define CTX_GPREG_X26		U(0xd0)
65 #define CTX_GPREG_X27		U(0xd8)
66 #define CTX_GPREG_X28		U(0xe0)
67 #define CTX_GPREG_X29		U(0xe8)
68 #define CTX_GPREG_LR		U(0xf0)
69 #define CTX_GPREG_SP_EL0	U(0xf8)
70 #define CTX_GPREGS_END		U(0x100)
71 
72 /*******************************************************************************
73  * Constants that allow assembler code to access members of and the 'el3_state'
74  * structure at their correct offsets. Note that some of the registers are only
75  * 32-bits wide but are stored as 64-bit values for convenience
76  ******************************************************************************/
77 #define CTX_EL3STATE_OFFSET	(CTX_GPREGS_OFFSET + CTX_GPREGS_END)
78 #define CTX_SCR_EL3		U(0x0)
79 #define CTX_RUNTIME_SP		U(0x8)
80 #define CTX_SPSR_EL3		U(0x10)
81 #define CTX_ELR_EL3		U(0x18)
82 #define CTX_PMCR_EL0		U(0x20)
83 #define CTX_IS_IN_EL3		U(0x28)
84 #define CTX_MDCR_EL3		U(0x30)
85 /* Constants required in supporting nested exception in EL3 */
86 #define CTX_SAVED_ELR_EL3	U(0x38)
87 /*
88  * General purpose flag, to save various EL3 states
89  * FFH mode : Used to identify if handling nested exception
90  * KFH mode : Used as counter value
91  */
92 #define CTX_NESTED_EA_FLAG	U(0x40)
93 #if FFH_SUPPORT
94  #define CTX_SAVED_ESR_EL3	U(0x48)
95  #define CTX_SAVED_SPSR_EL3	U(0x50)
96  #define CTX_SAVED_GPREG_LR	U(0x58)
97  #define CTX_DOUBLE_FAULT_ESR	U(0x60)
98  #define CTX_EL3STATE_END	U(0x70) /* Align to the next 16 byte boundary */
99 #else
100  #define CTX_EL3STATE_END	U(0x50) /* Align to the next 16 byte boundary */
101 #endif /* FFH_SUPPORT */
102 
103 
104 /*******************************************************************************
105  * Registers related to CVE-2018-3639
106  ******************************************************************************/
107 #define CTX_CVE_2018_3639_OFFSET	(CTX_EL3STATE_OFFSET + CTX_EL3STATE_END)
108 #define CTX_CVE_2018_3639_DISABLE	U(0)
109 #define CTX_CVE_2018_3639_END		U(0x10) /* Align to the next 16 byte boundary */
110 
111 /*******************************************************************************
112  * Registers related to ERRATA_SPECULATIVE_AT
113  *
114  * This is essential as with EL1 and EL2 context registers being decoupled,
115  * both will not be present for a given build configuration.
116  * As ERRATA_SPECULATIVE_AT errata requires SCTLR_EL1 and TCR_EL1 registers
117  * independent of the above logic, we need explicit context entries to be
118  * reserved for these registers.
119  *
120  * NOTE: Based on this we end up with following different configurations depending
121  * on the presence of errata and inclusion of EL1 or EL2 context.
122  *
123  * ============================================================================
124  * | ERRATA_SPECULATIVE_AT | EL1 context| Memory allocation(Sctlr_el1,Tcr_el1)|
125  * ============================================================================
126  * |        0              |      0     |            None                     |
127  * |        0              |      1     |    EL1 C-Context structure          |
128  * |        1              |      0     |    Errata Context Offset Entries    |
129  * |        1              |      1     |    Errata Context Offset Entries    |
130  * ============================================================================
131  *
132  * In the above table, when ERRATA_SPECULATIVE_AT=1, EL1_Context=0, it implies
133  * there is only EL2 context and memory for SCTLR_EL1 and TCR_EL1 registers is
134  * reserved explicitly under ERRATA_SPECULATIVE_AT build flag here.
135  *
136  * In situations when EL1_Context=1 and  ERRATA_SPECULATIVE_AT=1, since SCTLR_EL1
137  * and TCR_EL1 registers will be modified under errata and it happens at the
138  * early in the codeflow prior to el1 context (save and restore operations),
139  * context memory still will be reserved under the errata logic here explicitly.
140  * These registers will not be part of EL1 context save & restore routines.
141  *
142  * Only when ERRATA_SPECULATIVE_AT=0, EL1_Context=1, for this combination,
143  * SCTLR_EL1 and TCR_EL1 will be part of EL1 context structure (context_el1.h)
144  * -----------------------------------------------------------------------------
145  ******************************************************************************/
146 #define CTX_ERRATA_SPEC_AT_OFFSET	(CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_END)
147 #if ERRATA_SPECULATIVE_AT
148 #define CTX_ERRATA_SPEC_AT_SCTLR_EL1	U(0x0)
149 #define CTX_ERRATA_SPEC_AT_TCR_EL1	U(0x8)
150 #define CTX_ERRATA_SPEC_AT_END		U(0x10) /* Align to the next 16 byte boundary */
151 #else
152 #define CTX_ERRATA_SPEC_AT_END		U(0x0)
153 #endif /* ERRATA_SPECULATIVE_AT */
154 
155 /*******************************************************************************
156  * Registers related to ARMv8.3-PAuth.
157  ******************************************************************************/
158 #define CTX_PAUTH_REGS_OFFSET	(CTX_ERRATA_SPEC_AT_OFFSET + CTX_ERRATA_SPEC_AT_END)
159 #if CTX_INCLUDE_PAUTH_REGS
160 #define CTX_PACIAKEY_LO		U(0x0)
161 #define CTX_PACIAKEY_HI		U(0x8)
162 #define CTX_PACIBKEY_LO		U(0x10)
163 #define CTX_PACIBKEY_HI		U(0x18)
164 #define CTX_PACDAKEY_LO		U(0x20)
165 #define CTX_PACDAKEY_HI		U(0x28)
166 #define CTX_PACDBKEY_LO		U(0x30)
167 #define CTX_PACDBKEY_HI		U(0x38)
168 #define CTX_PACGAKEY_LO		U(0x40)
169 #define CTX_PACGAKEY_HI		U(0x48)
170 #define CTX_PAUTH_REGS_END	U(0x50) /* Align to the next 16 byte boundary */
171 #else
172 #define CTX_PAUTH_REGS_END	U(0)
173 #endif /* CTX_INCLUDE_PAUTH_REGS */
174 
175 /*******************************************************************************
176  * Registers initialised in a per-world context.
177  ******************************************************************************/
178 #define CTX_CPTR_EL3			U(0x0)
179 #define CTX_MPAM3_EL3			U(0x8)
180 #define CTX_PERWORLD_EL3STATE_END	U(0x10)
181 
182 #ifndef __ASSEMBLER__
183 
184 #include <stdint.h>
185 
186 #include <assert.h>
187 #include <common/ep_info.h>
188 #include <lib/cassert.h>
189 
190 /*
191  * Common constants to help define the 'cpu_context' structure and its
192  * members below.
193  */
194 #define DWORD_SHIFT		U(3)
195 #define DEFINE_REG_STRUCT(name, num_regs)	\
196 	typedef struct name {			\
197 		uint64_t ctx_regs[num_regs];	\
198 	}  __aligned(16) name##_t
199 
200 /* Constants to determine the size of individual context structures */
201 #define CTX_GPREG_ALL		(CTX_GPREGS_END >> DWORD_SHIFT)
202 
203 #define CTX_EL3STATE_ALL	(CTX_EL3STATE_END >> DWORD_SHIFT)
204 #define CTX_CVE_2018_3639_ALL	(CTX_CVE_2018_3639_END >> DWORD_SHIFT)
205 
206 #if ERRATA_SPECULATIVE_AT
207 #define CTX_ERRATA_SPEC_AT_ALL	(CTX_ERRATA_SPEC_AT_END >> DWORD_SHIFT)
208 #endif
209 #if CTX_INCLUDE_PAUTH_REGS
210 # define CTX_PAUTH_REGS_ALL	(CTX_PAUTH_REGS_END >> DWORD_SHIFT)
211 #endif
212 
213 /*
214  * AArch64 general purpose register context structure. Usually x0-x18,
215  * lr are saved as the compiler is expected to preserve the remaining
216  * callee saved registers if used by the C runtime and the assembler
217  * does not touch the remaining. But in case of world switch during
218  * exception handling, we need to save the callee registers too.
219  */
220 DEFINE_REG_STRUCT(gp_regs, CTX_GPREG_ALL);
221 
222 /*
223  * Miscellaneous registers used by EL3 firmware to maintain its state
224  * across exception entries and exits
225  */
226 DEFINE_REG_STRUCT(el3_state, CTX_EL3STATE_ALL);
227 
228 /* Function pointer used by CVE-2018-3639 dynamic mitigation */
229 DEFINE_REG_STRUCT(cve_2018_3639, CTX_CVE_2018_3639_ALL);
230 
231 /* Registers associated to Errata_Speculative */
232 #if ERRATA_SPECULATIVE_AT
233 DEFINE_REG_STRUCT(errata_speculative_at, CTX_ERRATA_SPEC_AT_ALL);
234 #endif
235 
236 /* Registers associated to ARMv8.3-PAuth */
237 #if CTX_INCLUDE_PAUTH_REGS
238 DEFINE_REG_STRUCT(pauth, CTX_PAUTH_REGS_ALL);
239 #endif
240 
241 /*
242  * Macros to access members of any of the above structures using their
243  * offsets
244  */
245 #define read_ctx_reg(ctx, offset)	((ctx)->ctx_regs[(offset) >> DWORD_SHIFT])
246 #define write_ctx_reg(ctx, offset, val)	(((ctx)->ctx_regs[(offset) >> DWORD_SHIFT]) \
247 					 = (uint64_t) (val))
248 
249 /*
250  * Top-level context structure which is used by EL3 firmware to preserve
251  * the state of a core at the next lower EL in a given security state and
252  * save enough EL3 meta data to be able to return to that EL and security
253  * state. The context management library will be used to ensure that
254  * SP_EL3 always points to an instance of this structure at exception
255  * entry and exit.
256  */
257 typedef struct cpu_context {
258 	gp_regs_t gpregs_ctx;
259 	el3_state_t el3state_ctx;
260 
261 	cve_2018_3639_t cve_2018_3639_ctx;
262 
263 #if ERRATA_SPECULATIVE_AT
264 	errata_speculative_at_t errata_speculative_at_ctx;
265 #endif
266 
267 #if CTX_INCLUDE_PAUTH_REGS
268 	pauth_t pauth_ctx;
269 #endif
270 
271 #if (CTX_INCLUDE_EL2_REGS && IMAGE_BL31)
272 	el2_sysregs_t el2_sysregs_ctx;
273 #else
274 	/* El1 context should be included only either for IMAGE_BL1,
275 	 * or for IMAGE_BL31 when CTX_INCLUDE_EL2_REGS=0:
276 	 * When SPMD_SPM_AT_SEL2=1, SPMC at S-EL2 takes care of saving
277 	 * and restoring EL1 registers. In this case, BL31 at EL3 can
278 	 * exclude save and restore of EL1 context registers.
279 	 */
280 	el1_sysregs_t el1_sysregs_ctx;
281 #endif
282 
283 	/* TODO: the CACHE_WRITEBACK_GRANULE alignment is not necessary if this is
284 	 * contained in a per-cpu data structure (i.e. cpu_data_t).
285 	 */
286 } __aligned(CACHE_WRITEBACK_GRANULE) cpu_context_t;
287 
288 /*
289  * Per-World Context.
290  * It stores registers whose values can be shared across CPUs.
291  */
292 typedef struct per_world_context {
293 	uint64_t ctx_cptr_el3;
294 	uint64_t ctx_mpam3_el3;
295 } per_world_context_t;
296 
297 static inline uint8_t get_cpu_context_index(size_t security_state)
298 {
299 	if (security_state == SECURE) {
300 		return CPU_CONTEXT_SECURE;
301 #if ENABLE_RME
302 	} else  if (security_state == REALM) {
303 		return CPU_CONTEXT_REALM;
304 #endif
305 	} else {
306 		assert(security_state == NON_SECURE);
307 		return CPU_CONTEXT_NS;
308 	}
309 }
310 
311 extern per_world_context_t per_world_context[CPU_CONTEXT_NUM];
312 
313 /* Macros to access members of the 'cpu_context_t' structure */
314 #define get_el3state_ctx(h)	(&((cpu_context_t *) h)->el3state_ctx)
315 
316 #if (CTX_INCLUDE_EL2_REGS && IMAGE_BL31)
317 #define get_el2_sysregs_ctx(h)	(&((cpu_context_t *) h)->el2_sysregs_ctx)
318 #else
319 #define get_el1_sysregs_ctx(h)	(&((cpu_context_t *) h)->el1_sysregs_ctx)
320 #endif
321 
322 #define get_gpregs_ctx(h)	(&((cpu_context_t *) h)->gpregs_ctx)
323 #define get_cve_2018_3639_ctx(h)	(&((cpu_context_t *) h)->cve_2018_3639_ctx)
324 
325 #if ERRATA_SPECULATIVE_AT
326 #define get_errata_speculative_at_ctx(h)	(&((cpu_context_t *) h)->errata_speculative_at_ctx)
327 #endif
328 
329 #if CTX_INCLUDE_PAUTH_REGS
330 # define get_pauth_ctx(h)	(&((cpu_context_t *) h)->pauth_ctx)
331 #endif
332 
333 /*
334  * Compile time assertions related to the 'cpu_context' structure to
335  * ensure that the assembler and the compiler view of the offsets of
336  * the structure members is the same.
337  */
338 CASSERT(CTX_GPREGS_OFFSET == __builtin_offsetof(cpu_context_t, gpregs_ctx),
339 	assert_core_context_gp_offset_mismatch);
340 
341 CASSERT(CTX_EL3STATE_OFFSET == __builtin_offsetof(cpu_context_t, el3state_ctx),
342 	assert_core_context_el3state_offset_mismatch);
343 
344 
345 CASSERT(CTX_CVE_2018_3639_OFFSET == __builtin_offsetof(cpu_context_t, cve_2018_3639_ctx),
346 	assert_core_context_cve_2018_3639_offset_mismatch);
347 
348 #if ERRATA_SPECULATIVE_AT
349 CASSERT(CTX_ERRATA_SPEC_AT_OFFSET == __builtin_offsetof(cpu_context_t, errata_speculative_at_ctx),
350 	assert_core_context_errata_speculative_at_offset_mismatch);
351 #endif
352 
353 #if CTX_INCLUDE_PAUTH_REGS
354 CASSERT(CTX_PAUTH_REGS_OFFSET == __builtin_offsetof(cpu_context_t, pauth_ctx),
355 	assert_core_context_pauth_offset_mismatch);
356 #endif /* CTX_INCLUDE_PAUTH_REGS */
357 
358 /*
359  * Helper macro to set the general purpose registers that correspond to
360  * parameters in an aapcs_64 call i.e. x0-x7
361  */
362 #define set_aapcs_args0(ctx, x0)				do {	\
363 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X0, x0);	\
364 	} while (0)
365 #define set_aapcs_args1(ctx, x0, x1)				do {	\
366 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X1, x1);	\
367 		set_aapcs_args0(ctx, x0);				\
368 	} while (0)
369 #define set_aapcs_args2(ctx, x0, x1, x2)			do {	\
370 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X2, x2);	\
371 		set_aapcs_args1(ctx, x0, x1);				\
372 	} while (0)
373 #define set_aapcs_args3(ctx, x0, x1, x2, x3)			do {	\
374 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X3, x3);	\
375 		set_aapcs_args2(ctx, x0, x1, x2);			\
376 	} while (0)
377 #define set_aapcs_args4(ctx, x0, x1, x2, x3, x4)		do {	\
378 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X4, x4);	\
379 		set_aapcs_args3(ctx, x0, x1, x2, x3);			\
380 	} while (0)
381 #define set_aapcs_args5(ctx, x0, x1, x2, x3, x4, x5)		do {	\
382 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X5, x5);	\
383 		set_aapcs_args4(ctx, x0, x1, x2, x3, x4);		\
384 	} while (0)
385 #define set_aapcs_args6(ctx, x0, x1, x2, x3, x4, x5, x6)	do {	\
386 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X6, x6);	\
387 		set_aapcs_args5(ctx, x0, x1, x2, x3, x4, x5);		\
388 	} while (0)
389 #define set_aapcs_args7(ctx, x0, x1, x2, x3, x4, x5, x6, x7)	do {	\
390 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X7, x7);	\
391 		set_aapcs_args6(ctx, x0, x1, x2, x3, x4, x5, x6);	\
392 	} while (0)
393 
394 /*******************************************************************************
395  * Function prototypes
396  ******************************************************************************/
397 #if CTX_INCLUDE_FPREGS
398 void fpregs_context_save(simd_regs_t *regs);
399 void fpregs_context_restore(simd_regs_t *regs);
400 #endif
401 
402 /*******************************************************************************
403  * The next four inline functions are required for IMAGE_BL1, as well as for
404  * IMAGE_BL31 for the below combinations.
405  * ============================================================================
406  * | ERRATA_SPECULATIVE_AT| CTX_INCLUDE_EL2_REGS |   Combination              |
407  * ============================================================================
408  * |       0              |       0              |   Valid (EL1 ctx)          |
409  * |______________________|______________________|____________________________|
410  * |                      |                      | Invalid (No Errata/EL1 Ctx)|
411  * |       0              |       1              | Hence commented out.       |
412  * |______________________|______________________|____________________________|
413  * |                      |                      |                            |
414  * |       1              |       0              |   Valid (Errata ctx)       |
415  * |______________________|______________________|____________________________|
416  * |                      |                      |                            |
417  * |       1              |       1              |   Valid (Errata ctx)       |
418  * |______________________|______________________|____________________________|
419  * ============================================================================
420  ******************************************************************************/
421 #if (IMAGE_BL1 || ((ERRATA_SPECULATIVE_AT) || (!CTX_INCLUDE_EL2_REGS)))
422 
423 static inline void write_ctx_sctlr_el1_reg_errata(cpu_context_t *ctx, u_register_t val)
424 {
425 #if (ERRATA_SPECULATIVE_AT)
426 	write_ctx_reg(get_errata_speculative_at_ctx(ctx),
427 		      CTX_ERRATA_SPEC_AT_SCTLR_EL1, val);
428 #else
429 	write_el1_ctx_common(get_el1_sysregs_ctx(ctx), sctlr_el1, val);
430 #endif /* ERRATA_SPECULATIVE_AT */
431 }
432 
433 static inline void write_ctx_tcr_el1_reg_errata(cpu_context_t *ctx, u_register_t val)
434 {
435 #if (ERRATA_SPECULATIVE_AT)
436 	write_ctx_reg(get_errata_speculative_at_ctx(ctx),
437 		      CTX_ERRATA_SPEC_AT_TCR_EL1, val);
438 #else
439 	write_el1_ctx_common(get_el1_sysregs_ctx(ctx), tcr_el1, val);
440 #endif /* ERRATA_SPECULATIVE_AT */
441 }
442 
443 static inline u_register_t read_ctx_sctlr_el1_reg_errata(cpu_context_t *ctx)
444 {
445 #if (ERRATA_SPECULATIVE_AT)
446 	return read_ctx_reg(get_errata_speculative_at_ctx(ctx),
447 		      CTX_ERRATA_SPEC_AT_SCTLR_EL1);
448 #else
449 	return read_el1_ctx_common(get_el1_sysregs_ctx(ctx), sctlr_el1);
450 #endif /* ERRATA_SPECULATIVE_AT */
451 }
452 
453 static inline u_register_t read_ctx_tcr_el1_reg_errata(cpu_context_t *ctx)
454 {
455 #if (ERRATA_SPECULATIVE_AT)
456 	return read_ctx_reg(get_errata_speculative_at_ctx(ctx),
457 		      CTX_ERRATA_SPEC_AT_TCR_EL1);
458 #else
459 	return read_el1_ctx_common(get_el1_sysregs_ctx(ctx), tcr_el1);
460 #endif /* ERRATA_SPECULATIVE_AT */
461 }
462 
463 #endif /* (IMAGE_BL1 || ((ERRATA_SPECULATIVE_AT) || (!CTX_INCLUDE_EL2_REGS))) */
464 
465 #endif /* __ASSEMBLER__ */
466 
467 #endif /* CONTEXT_H */
468