1 /*
2 * Copyright (c) 2013-2025, Arm Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7 #ifndef CONTEXT_H
8 #define CONTEXT_H
9
10 #if (CTX_INCLUDE_EL2_REGS && IMAGE_BL31)
11 #include <lib/el3_runtime/context_el2.h>
12 #else
13 /**
14 * El1 context is required either when:
15 * IMAGE_BL1 || ((!CTX_INCLUDE_EL2_REGS) && IMAGE_BL31)
16 */
17 #include <lib/el3_runtime/context_el1.h>
18 #endif /* (CTX_INCLUDE_EL2_REGS && IMAGE_BL31) */
19
20 #include <lib/el3_runtime/simd_ctx.h>
21 #include <lib/utils_def.h>
22 #include <platform_def.h> /* For CACHE_WRITEBACK_GRANULE */
23
24 #define CPU_CONTEXT_SECURE UL(0)
25 #define CPU_CONTEXT_NS UL(1)
26 #if ENABLE_RME
27 #define CPU_CONTEXT_REALM UL(2)
28 #define CPU_CONTEXT_NUM UL(3)
29 #else
30 #define CPU_CONTEXT_NUM UL(2)
31 #endif
32
33 /*******************************************************************************
34 * Constants that allow assembler code to access members of and the 'gp_regs'
35 * structure at their correct offsets.
36 ******************************************************************************/
37 #define CTX_GPREGS_OFFSET U(0x0)
38 #define CTX_GPREG_X0 U(0x0)
39 #define CTX_GPREG_X1 U(0x8)
40 #define CTX_GPREG_X2 U(0x10)
41 #define CTX_GPREG_X3 U(0x18)
42 #define CTX_GPREG_X4 U(0x20)
43 #define CTX_GPREG_X5 U(0x28)
44 #define CTX_GPREG_X6 U(0x30)
45 #define CTX_GPREG_X7 U(0x38)
46 #define CTX_GPREG_X8 U(0x40)
47 #define CTX_GPREG_X9 U(0x48)
48 #define CTX_GPREG_X10 U(0x50)
49 #define CTX_GPREG_X11 U(0x58)
50 #define CTX_GPREG_X12 U(0x60)
51 #define CTX_GPREG_X13 U(0x68)
52 #define CTX_GPREG_X14 U(0x70)
53 #define CTX_GPREG_X15 U(0x78)
54 #define CTX_GPREG_X16 U(0x80)
55 #define CTX_GPREG_X17 U(0x88)
56 #define CTX_GPREG_X18 U(0x90)
57 #define CTX_GPREG_X19 U(0x98)
58 #define CTX_GPREG_X20 U(0xa0)
59 #define CTX_GPREG_X21 U(0xa8)
60 #define CTX_GPREG_X22 U(0xb0)
61 #define CTX_GPREG_X23 U(0xb8)
62 #define CTX_GPREG_X24 U(0xc0)
63 #define CTX_GPREG_X25 U(0xc8)
64 #define CTX_GPREG_X26 U(0xd0)
65 #define CTX_GPREG_X27 U(0xd8)
66 #define CTX_GPREG_X28 U(0xe0)
67 #define CTX_GPREG_X29 U(0xe8)
68 #define CTX_GPREG_LR U(0xf0)
69 #define CTX_GPREG_SP_EL0 U(0xf8)
70 #define CTX_GPREGS_END U(0x100)
71
72 /*******************************************************************************
73 * Constants that allow assembler code to access members of and the 'el3_state'
74 * structure at their correct offsets. Note that some of the registers are only
75 * 32-bits wide but are stored as 64-bit values for convenience
76 ******************************************************************************/
77 #define CTX_EL3STATE_OFFSET (CTX_GPREGS_OFFSET + CTX_GPREGS_END)
78 #define CTX_SCR_EL3 U(0x0)
79 #define CTX_RUNTIME_SP U(0x8)
80 #define CTX_SPSR_EL3 U(0x10)
81 #define CTX_ELR_EL3 U(0x18)
82 #define CTX_PMCR_EL0 U(0x20)
83 #define CTX_IS_IN_EL3 U(0x28)
84 #define CTX_MDCR_EL3 U(0x30)
85 /* Constants required in supporting nested exception in EL3 */
86 #define CTX_SAVED_ELR_EL3 U(0x38)
87 /*
88 * General purpose flag, to save various EL3 states
89 * FFH mode : Used to identify if handling nested exception
90 * KFH mode : Used as counter value
91 */
92 #define CTX_NESTED_EA_FLAG U(0x40)
93 #if FFH_SUPPORT
94 #define CTX_SAVED_ESR_EL3 U(0x48)
95 #define CTX_SAVED_SPSR_EL3 U(0x50)
96 #define CTX_SAVED_GPREG_LR U(0x58)
97 #define CTX_DOUBLE_FAULT_ESR U(0x60)
98 #define CTX_EL3STATE_END U(0x70) /* Align to the next 16 byte boundary */
99 #else
100 #define CTX_EL3STATE_END U(0x50) /* Align to the next 16 byte boundary */
101 #endif /* FFH_SUPPORT */
102
103
104 /*******************************************************************************
105 * Registers related to CVE-2018-3639
106 ******************************************************************************/
107 #define CTX_CVE_2018_3639_OFFSET (CTX_EL3STATE_OFFSET + CTX_EL3STATE_END)
108 #define CTX_CVE_2018_3639_DISABLE U(0)
109 #define CTX_CVE_2018_3639_END U(0x10) /* Align to the next 16 byte boundary */
110
111 /*******************************************************************************
112 * Registers related to ERRATA_SPECULATIVE_AT
113 *
114 * This is essential as with EL1 and EL2 context registers being decoupled,
115 * both will not be present for a given build configuration.
116 * As ERRATA_SPECULATIVE_AT errata requires SCTLR_EL1 and TCR_EL1 registers
117 * independent of the above logic, we need explicit context entries to be
118 * reserved for these registers.
119 *
120 * NOTE: Based on this we end up with following different configurations depending
121 * on the presence of errata and inclusion of EL1 or EL2 context.
122 *
123 * ============================================================================
124 * | ERRATA_SPECULATIVE_AT | EL1 context| Memory allocation(Sctlr_el1,Tcr_el1)|
125 * ============================================================================
126 * | 0 | 0 | None |
127 * | 0 | 1 | EL1 C-Context structure |
128 * | 1 | 0 | Errata Context Offset Entries |
129 * | 1 | 1 | Errata Context Offset Entries |
130 * ============================================================================
131 *
132 * In the above table, when ERRATA_SPECULATIVE_AT=1, EL1_Context=0, it implies
133 * there is only EL2 context and memory for SCTLR_EL1 and TCR_EL1 registers is
134 * reserved explicitly under ERRATA_SPECULATIVE_AT build flag here.
135 *
136 * In situations when EL1_Context=1 and ERRATA_SPECULATIVE_AT=1, since SCTLR_EL1
137 * and TCR_EL1 registers will be modified under errata and it happens at the
138 * early in the codeflow prior to el1 context (save and restore operations),
139 * context memory still will be reserved under the errata logic here explicitly.
140 * These registers will not be part of EL1 context save & restore routines.
141 *
142 * Only when ERRATA_SPECULATIVE_AT=0, EL1_Context=1, for this combination,
143 * SCTLR_EL1 and TCR_EL1 will be part of EL1 context structure (context_el1.h)
144 * -----------------------------------------------------------------------------
145 ******************************************************************************/
146 #define CTX_ERRATA_SPEC_AT_OFFSET (CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_END)
147 #if ERRATA_SPECULATIVE_AT
148 #define CTX_ERRATA_SPEC_AT_SCTLR_EL1 U(0x0)
149 #define CTX_ERRATA_SPEC_AT_TCR_EL1 U(0x8)
150 #define CTX_ERRATA_SPEC_AT_END U(0x10) /* Align to the next 16 byte boundary */
151 #else
152 #define CTX_ERRATA_SPEC_AT_END U(0x0)
153 #endif /* ERRATA_SPECULATIVE_AT */
154
155 /*******************************************************************************
156 * Registers related to ARMv8.3-PAuth.
157 ******************************************************************************/
158 #define CTX_PAUTH_REGS_OFFSET (CTX_ERRATA_SPEC_AT_OFFSET + CTX_ERRATA_SPEC_AT_END)
159 #if CTX_INCLUDE_PAUTH_REGS
160 #define CTX_PACIAKEY_LO U(0x0)
161 #define CTX_PACIAKEY_HI U(0x8)
162 #define CTX_PACIBKEY_LO U(0x10)
163 #define CTX_PACIBKEY_HI U(0x18)
164 #define CTX_PACDAKEY_LO U(0x20)
165 #define CTX_PACDAKEY_HI U(0x28)
166 #define CTX_PACDBKEY_LO U(0x30)
167 #define CTX_PACDBKEY_HI U(0x38)
168 #define CTX_PACGAKEY_LO U(0x40)
169 #define CTX_PACGAKEY_HI U(0x48)
170 #define CTX_PAUTH_REGS_END U(0x50) /* Align to the next 16 byte boundary */
171 #else
172 #define CTX_PAUTH_REGS_END U(0)
173 #endif /* CTX_INCLUDE_PAUTH_REGS */
174
175 /*******************************************************************************
176 * Registers related to Morello.
177 ******************************************************************************/
178 #define CTX_DDC_OFFSET (CTX_PAUTH_REGS_OFFSET + CTX_PAUTH_REGS_END)
179 #if ENABLE_FEAT_MORELLO
180 #define CTX_DDC_EL0 U(0x0)
181 #define CTX_DDC_END U(0x10) /* Align to the next 16 byte boundary */
182 #else
183 #define CTX_DDC_END U(0)
184 #endif /* ENABLE_FEAT_MORELLO */
185
186 /*******************************************************************************
187 * Registers initialised in a per-world context.
188 ******************************************************************************/
189 #define CTX_CPTR_EL3 U(0x0)
190 #define CTX_MPAM3_EL3 U(0x8)
191 #if (ENABLE_FEAT_IDTE3 && IMAGE_BL31)
192 #define CTX_IDREGS_EL3 U(0x10)
193 #define CTX_PERWORLD_EL3STATE_END U(0x78)
194 #else
195 #define CTX_PERWORLD_EL3STATE_END U(0x10)
196 #endif /* ENABLE_FEAT_IDTE3 && IMAGE_BL31 */
197
198 #ifndef __ASSEMBLER__
199
200 #include <stdint.h>
201
202 #include <assert.h>
203 #include <common/ep_info.h>
204 #include <lib/cassert.h>
205
206 /*
207 * Common constants to help define the 'cpu_context' structure and its
208 * members below.
209 */
210 #define DWORD_SHIFT U(3)
211 #define DEFINE_REG_STRUCT(name, num_regs) \
212 typedef struct name { \
213 uint64_t ctx_regs[num_regs]; \
214 } __aligned(16) name##_t
215
216 /* Constants to determine the size of individual context structures */
217 #define CTX_GPREG_ALL (CTX_GPREGS_END >> DWORD_SHIFT)
218
219 #define CTX_EL3STATE_ALL (CTX_EL3STATE_END >> DWORD_SHIFT)
220 #define CTX_CVE_2018_3639_ALL (CTX_CVE_2018_3639_END >> DWORD_SHIFT)
221
222 #if ERRATA_SPECULATIVE_AT
223 #define CTX_ERRATA_SPEC_AT_ALL (CTX_ERRATA_SPEC_AT_END >> DWORD_SHIFT)
224 #endif
225 #if CTX_INCLUDE_PAUTH_REGS
226 # define CTX_PAUTH_REGS_ALL (CTX_PAUTH_REGS_END >> DWORD_SHIFT)
227 #endif
228
229 #define CTX_DDC_ALL (CTX_DDC_END >> DWORD_SHIFT)
230
231 /*
232 * AArch64 general purpose register context structure. Usually x0-x18,
233 * lr are saved as the compiler is expected to preserve the remaining
234 * callee saved registers if used by the C runtime and the assembler
235 * does not touch the remaining. But in case of world switch during
236 * exception handling, we need to save the callee registers too.
237 */
238 DEFINE_REG_STRUCT(gp_regs, CTX_GPREG_ALL);
239
240 /*
241 * Miscellaneous registers used by EL3 firmware to maintain its state
242 * across exception entries and exits
243 */
244 DEFINE_REG_STRUCT(el3_state, CTX_EL3STATE_ALL);
245
246 /* Function pointer used by CVE-2018-3639 dynamic mitigation */
247 DEFINE_REG_STRUCT(cve_2018_3639, CTX_CVE_2018_3639_ALL);
248
249 /* Registers associated to Errata_Speculative */
250 #if ERRATA_SPECULATIVE_AT
251 DEFINE_REG_STRUCT(errata_speculative_at, CTX_ERRATA_SPEC_AT_ALL);
252 #endif
253
254 /* Registers associated to ARMv8.3-PAuth */
255 #if CTX_INCLUDE_PAUTH_REGS
256 DEFINE_REG_STRUCT(pauth, CTX_PAUTH_REGS_ALL);
257 #endif
258
259 /* Registers associated with Morello */
260 typedef void *__capability ddc_cap_t;
261
262 /*
263 * Macros to access members of any of the above structures using their
264 * offsets
265 */
266 #define read_ctx_reg(ctx, offset) ((ctx)->ctx_regs[(offset) >> DWORD_SHIFT])
267 #define write_ctx_reg(ctx, offset, val) (((ctx)->ctx_regs[(offset) >> DWORD_SHIFT]) \
268 = (uint64_t) (val))
269
270 #if ENABLE_FEAT_IDTE3
271 typedef struct perworld_idreg {
272 u_register_t id_aa64pfr0_el1;
273 u_register_t id_aa64pfr1_el1;
274 u_register_t id_aa64pfr2_el1;
275 u_register_t id_aa64smfr0_el1;
276 u_register_t id_aa64isar0_el1;
277 u_register_t id_aa64isar1_el1;
278 u_register_t id_aa64isar2_el1;
279 u_register_t id_aa64isar3_el1;
280 u_register_t id_aa64mmfr0_el1;
281 u_register_t id_aa64mmfr1_el1;
282 u_register_t id_aa64mmfr2_el1;
283 u_register_t id_aa64mmfr3_el1;
284 u_register_t id_aa64mmfr4_el1;
285 } perworld_idregs_t;
286 #endif
287
288 typedef struct world_amu_regs {
289 uint64_t amevcntr02_el0;
290 uint64_t amevcntr03_el0;
291 } world_amu_regs_t;
292
293 /*
294 * Top-level context structure which is used by EL3 firmware to preserve
295 * the state of a core at the next lower EL in a given security state and
296 * save enough EL3 meta data to be able to return to that EL and security
297 * state. The context management library will be used to ensure that
298 * SP_EL3 always points to an instance of this structure at exception
299 * entry and exit.
300 */
301 typedef struct cpu_context {
302 gp_regs_t gpregs_ctx;
303 el3_state_t el3state_ctx;
304
305 cve_2018_3639_t cve_2018_3639_ctx;
306
307 #if ERRATA_SPECULATIVE_AT
308 errata_speculative_at_t errata_speculative_at_ctx;
309 #endif
310
311 #if CTX_INCLUDE_PAUTH_REGS
312 pauth_t pauth_ctx;
313 #endif
314
315 ddc_cap_t ddc_el0;
316
317 #if (CTX_INCLUDE_EL2_REGS && IMAGE_BL31)
318 el2_sysregs_t el2_sysregs_ctx;
319 #else
320 /* El1 context should be included only either for IMAGE_BL1,
321 * or for IMAGE_BL31 when CTX_INCLUDE_EL2_REGS=0:
322 * When SPMD_SPM_AT_SEL2=1, SPMC at S-EL2 takes care of saving
323 * and restoring EL1 registers. In this case, BL31 at EL3 can
324 * exclude save and restore of EL1 context registers.
325 */
326 el1_sysregs_t el1_sysregs_ctx;
327 #endif
328
329 /* TODO: the CACHE_WRITEBACK_GRANULE alignment is not necessary if this is
330 * contained in a per-cpu data structure (i.e. cpu_data_t).
331 */
332 } __aligned(CACHE_WRITEBACK_GRANULE) cpu_context_t;
333
334 /*
335 * Per-World Context.
336 * It stores registers whose values can be shared across CPUs.
337 */
338 typedef struct per_world_context {
339 uint64_t ctx_cptr_el3;
340 uint64_t ctx_mpam3_el3;
341 #if (ENABLE_FEAT_IDTE3 && IMAGE_BL31)
342 perworld_idregs_t idregs;
343 #endif
344 } per_world_context_t;
345
get_cpu_context_index(size_t security_state)346 static inline uint8_t get_cpu_context_index(size_t security_state)
347 {
348 if (security_state == SECURE) {
349 return CPU_CONTEXT_SECURE;
350 #if ENABLE_RME
351 } else if (security_state == REALM) {
352 return CPU_CONTEXT_REALM;
353 #endif
354 } else {
355 assert(security_state == NON_SECURE);
356 return CPU_CONTEXT_NS;
357 }
358 }
359
360 extern per_world_context_t per_world_context[CPU_CONTEXT_NUM];
361
362 /* Macros to access members of the 'cpu_context_t' structure */
363 #define get_el3state_ctx(h) (&((cpu_context_t *) h)->el3state_ctx)
364
365 #if (CTX_INCLUDE_EL2_REGS && IMAGE_BL31)
366 #define get_el2_sysregs_ctx(h) (&((cpu_context_t *) h)->el2_sysregs_ctx)
367 #else
368 #define get_el1_sysregs_ctx(h) (&((cpu_context_t *) h)->el1_sysregs_ctx)
369 #endif
370
371 #define get_gpregs_ctx(h) (&((cpu_context_t *) h)->gpregs_ctx)
372 #define get_cve_2018_3639_ctx(h) (&((cpu_context_t *) h)->cve_2018_3639_ctx)
373
374 #if ERRATA_SPECULATIVE_AT
375 #define get_errata_speculative_at_ctx(h) (&((cpu_context_t *) h)->errata_speculative_at_ctx)
376 #endif
377
378 #if CTX_INCLUDE_PAUTH_REGS
379 # define get_pauth_ctx(h) (&((cpu_context_t *) h)->pauth_ctx)
380 #else
381 # define get_pauth_ctx(h) NULL
382 #endif
383
384 /*
385 * Compile time assertions related to the 'cpu_context' structure to
386 * ensure that the assembler and the compiler view of the offsets of
387 * the structure members is the same.
388 */
389 CASSERT(CTX_GPREGS_OFFSET == __builtin_offsetof(cpu_context_t, gpregs_ctx),
390 assert_core_context_gp_offset_mismatch);
391
392 CASSERT(CTX_EL3STATE_OFFSET == __builtin_offsetof(cpu_context_t, el3state_ctx),
393 assert_core_context_el3state_offset_mismatch);
394
395
396 CASSERT(CTX_CVE_2018_3639_OFFSET == __builtin_offsetof(cpu_context_t, cve_2018_3639_ctx),
397 assert_core_context_cve_2018_3639_offset_mismatch);
398
399 #if ERRATA_SPECULATIVE_AT
400 CASSERT(CTX_ERRATA_SPEC_AT_OFFSET == __builtin_offsetof(cpu_context_t, errata_speculative_at_ctx),
401 assert_core_context_errata_speculative_at_offset_mismatch);
402 #endif
403
404 #if CTX_INCLUDE_PAUTH_REGS
405 CASSERT(CTX_PAUTH_REGS_OFFSET == __builtin_offsetof(cpu_context_t, pauth_ctx),
406 assert_core_context_pauth_offset_mismatch);
407 #endif /* CTX_INCLUDE_PAUTH_REGS */
408
409 /*******************************************************************************
410 * Function prototypes
411 ******************************************************************************/
412 #if CTX_INCLUDE_FPREGS
413 void fpregs_context_save(simd_regs_t *regs);
414 void fpregs_context_restore(simd_regs_t *regs);
415 #endif
416
417 /*******************************************************************************
418 * The next four inline functions are required for IMAGE_BL1, as well as for
419 * IMAGE_BL31 for the below combinations.
420 * ============================================================================
421 * | ERRATA_SPECULATIVE_AT| CTX_INCLUDE_EL2_REGS | Combination |
422 * ============================================================================
423 * | 0 | 0 | Valid (EL1 ctx) |
424 * |______________________|______________________|____________________________|
425 * | | | Invalid (No Errata/EL1 Ctx)|
426 * | 0 | 1 | Hence commented out. |
427 * |______________________|______________________|____________________________|
428 * | | | |
429 * | 1 | 0 | Valid (Errata ctx) |
430 * |______________________|______________________|____________________________|
431 * | | | |
432 * | 1 | 1 | Valid (Errata ctx) |
433 * |______________________|______________________|____________________________|
434 * ============================================================================
435 ******************************************************************************/
436 #if (IMAGE_BL1 || ((ERRATA_SPECULATIVE_AT) || (!CTX_INCLUDE_EL2_REGS)))
437
write_ctx_sctlr_el1_reg_errata(cpu_context_t * ctx,u_register_t val)438 static inline void write_ctx_sctlr_el1_reg_errata(cpu_context_t *ctx, u_register_t val)
439 {
440 #if (ERRATA_SPECULATIVE_AT)
441 write_ctx_reg(get_errata_speculative_at_ctx(ctx),
442 CTX_ERRATA_SPEC_AT_SCTLR_EL1, val);
443 #else
444 write_el1_ctx_common(get_el1_sysregs_ctx(ctx), sctlr_el1, val);
445 #endif /* ERRATA_SPECULATIVE_AT */
446 }
447
write_ctx_tcr_el1_reg_errata(cpu_context_t * ctx,u_register_t val)448 static inline void write_ctx_tcr_el1_reg_errata(cpu_context_t *ctx, u_register_t val)
449 {
450 #if (ERRATA_SPECULATIVE_AT)
451 write_ctx_reg(get_errata_speculative_at_ctx(ctx),
452 CTX_ERRATA_SPEC_AT_TCR_EL1, val);
453 #else
454 write_el1_ctx_common(get_el1_sysregs_ctx(ctx), tcr_el1, val);
455 #endif /* ERRATA_SPECULATIVE_AT */
456 }
457
read_ctx_sctlr_el1_reg_errata(cpu_context_t * ctx)458 static inline u_register_t read_ctx_sctlr_el1_reg_errata(cpu_context_t *ctx)
459 {
460 #if (ERRATA_SPECULATIVE_AT)
461 return read_ctx_reg(get_errata_speculative_at_ctx(ctx),
462 CTX_ERRATA_SPEC_AT_SCTLR_EL1);
463 #else
464 return read_el1_ctx_common(get_el1_sysregs_ctx(ctx), sctlr_el1);
465 #endif /* ERRATA_SPECULATIVE_AT */
466 }
467
read_ctx_tcr_el1_reg_errata(cpu_context_t * ctx)468 static inline u_register_t read_ctx_tcr_el1_reg_errata(cpu_context_t *ctx)
469 {
470 #if (ERRATA_SPECULATIVE_AT)
471 return read_ctx_reg(get_errata_speculative_at_ctx(ctx),
472 CTX_ERRATA_SPEC_AT_TCR_EL1);
473 #else
474 return read_el1_ctx_common(get_el1_sysregs_ctx(ctx), tcr_el1);
475 #endif /* ERRATA_SPECULATIVE_AT */
476 }
477
478 #endif /* (IMAGE_BL1 || ((ERRATA_SPECULATIVE_AT) || (!CTX_INCLUDE_EL2_REGS))) */
479
480 #endif /* __ASSEMBLER__ */
481
482 #endif /* CONTEXT_H */
483