xref: /rk3399_ARM-atf/include/lib/el3_runtime/aarch64/context.h (revision e01ce1ea61368f169f8f827a05ad9d0c5bb06160)
1 /*
2  * Copyright (c) 2013-2025, Arm Limited and Contributors. All rights reserved.
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #ifndef CONTEXT_H
8 #define CONTEXT_H
9 
10 #if (CTX_INCLUDE_EL2_REGS && IMAGE_BL31)
11 #include <lib/el3_runtime/context_el2.h>
12 #else
13 /**
14  * El1 context is required either when:
15  * IMAGE_BL1 || ((!CTX_INCLUDE_EL2_REGS) && IMAGE_BL31)
16  */
17 #include <lib/el3_runtime/context_el1.h>
18 #endif /* (CTX_INCLUDE_EL2_REGS && IMAGE_BL31) */
19 
20 #include <lib/el3_runtime/simd_ctx.h>
21 #include <lib/utils_def.h>
22 #include <platform_def.h> /* For CACHE_WRITEBACK_GRANULE */
23 
24 #define	CPU_CONTEXT_SECURE	UL(0)
25 #define	CPU_CONTEXT_NS		UL(1)
26 #if ENABLE_RME
27 #define	CPU_CONTEXT_REALM	UL(2)
28 #define	CPU_CONTEXT_NUM		UL(3)
29 #else
30 #define	CPU_CONTEXT_NUM		UL(2)
31 #endif
32 
33 /*******************************************************************************
34  * Constants that allow assembler code to access members of and the 'gp_regs'
35  * structure at their correct offsets.
36  ******************************************************************************/
37 #define CTX_GPREGS_OFFSET	U(0x0)
38 #define CTX_GPREG_X0		U(0x0)
39 #define CTX_GPREG_X1		U(0x8)
40 #define CTX_GPREG_X2		U(0x10)
41 #define CTX_GPREG_X3		U(0x18)
42 #define CTX_GPREG_X4		U(0x20)
43 #define CTX_GPREG_X5		U(0x28)
44 #define CTX_GPREG_X6		U(0x30)
45 #define CTX_GPREG_X7		U(0x38)
46 #define CTX_GPREG_X8		U(0x40)
47 #define CTX_GPREG_X9		U(0x48)
48 #define CTX_GPREG_X10		U(0x50)
49 #define CTX_GPREG_X11		U(0x58)
50 #define CTX_GPREG_X12		U(0x60)
51 #define CTX_GPREG_X13		U(0x68)
52 #define CTX_GPREG_X14		U(0x70)
53 #define CTX_GPREG_X15		U(0x78)
54 #define CTX_GPREG_X16		U(0x80)
55 #define CTX_GPREG_X17		U(0x88)
56 #define CTX_GPREG_X18		U(0x90)
57 #define CTX_GPREG_X19		U(0x98)
58 #define CTX_GPREG_X20		U(0xa0)
59 #define CTX_GPREG_X21		U(0xa8)
60 #define CTX_GPREG_X22		U(0xb0)
61 #define CTX_GPREG_X23		U(0xb8)
62 #define CTX_GPREG_X24		U(0xc0)
63 #define CTX_GPREG_X25		U(0xc8)
64 #define CTX_GPREG_X26		U(0xd0)
65 #define CTX_GPREG_X27		U(0xd8)
66 #define CTX_GPREG_X28		U(0xe0)
67 #define CTX_GPREG_X29		U(0xe8)
68 #define CTX_GPREG_LR		U(0xf0)
69 #define CTX_GPREG_SP_EL0	U(0xf8)
70 #define CTX_GPREGS_END		U(0x100)
71 
72 /*******************************************************************************
73  * Constants that allow assembler code to access members of and the 'el3_state'
74  * structure at their correct offsets. Note that some of the registers are only
75  * 32-bits wide but are stored as 64-bit values for convenience
76  ******************************************************************************/
77 #define CTX_EL3STATE_OFFSET	(CTX_GPREGS_OFFSET + CTX_GPREGS_END)
78 #define CTX_SCR_EL3		U(0x0)
79 #define CTX_RUNTIME_SP		U(0x8)
80 #define CTX_SPSR_EL3		U(0x10)
81 #define CTX_ELR_EL3		U(0x18)
82 #define CTX_PMCR_EL0		U(0x20)
83 #define CTX_IS_IN_EL3		U(0x28)
84 #define CTX_MDCR_EL3		U(0x30)
85 /* Constants required in supporting nested exception in EL3 */
86 #define CTX_SAVED_ELR_EL3	U(0x38)
87 /*
88  * General purpose flag, to save various EL3 states
89  * FFH mode : Used to identify if handling nested exception
90  * KFH mode : Used as counter value
91  */
92 #define CTX_NESTED_EA_FLAG	U(0x40)
93 #if FFH_SUPPORT
94  #define CTX_SAVED_ESR_EL3	U(0x48)
95  #define CTX_SAVED_SPSR_EL3	U(0x50)
96  #define CTX_SAVED_GPREG_LR	U(0x58)
97  #define CTX_DOUBLE_FAULT_ESR	U(0x60)
98  #define CTX_EL3STATE_END	U(0x70) /* Align to the next 16 byte boundary */
99 #else
100  #define CTX_EL3STATE_END	U(0x50) /* Align to the next 16 byte boundary */
101 #endif /* FFH_SUPPORT */
102 
103 
104 /*******************************************************************************
105  * Registers related to CVE-2018-3639
106  ******************************************************************************/
107 #define CTX_CVE_2018_3639_OFFSET	(CTX_EL3STATE_OFFSET + CTX_EL3STATE_END)
108 #define CTX_CVE_2018_3639_DISABLE	U(0)
109 #define CTX_CVE_2018_3639_END		U(0x10) /* Align to the next 16 byte boundary */
110 
111 /*******************************************************************************
112  * Registers related to ERRATA_SPECULATIVE_AT
113  *
114  * This is essential as with EL1 and EL2 context registers being decoupled,
115  * both will not be present for a given build configuration.
116  * As ERRATA_SPECULATIVE_AT errata requires SCTLR_EL1 and TCR_EL1 registers
117  * independent of the above logic, we need explicit context entries to be
118  * reserved for these registers.
119  *
120  * NOTE: Based on this we end up with following different configurations depending
121  * on the presence of errata and inclusion of EL1 or EL2 context.
122  *
123  * ============================================================================
124  * | ERRATA_SPECULATIVE_AT | EL1 context| Memory allocation(Sctlr_el1,Tcr_el1)|
125  * ============================================================================
126  * |        0              |      0     |            None                     |
127  * |        0              |      1     |    EL1 C-Context structure          |
128  * |        1              |      0     |    Errata Context Offset Entries    |
129  * |        1              |      1     |    Errata Context Offset Entries    |
130  * ============================================================================
131  *
132  * In the above table, when ERRATA_SPECULATIVE_AT=1, EL1_Context=0, it implies
133  * there is only EL2 context and memory for SCTLR_EL1 and TCR_EL1 registers is
134  * reserved explicitly under ERRATA_SPECULATIVE_AT build flag here.
135  *
136  * In situations when EL1_Context=1 and  ERRATA_SPECULATIVE_AT=1, since SCTLR_EL1
137  * and TCR_EL1 registers will be modified under errata and it happens at the
138  * early in the codeflow prior to el1 context (save and restore operations),
139  * context memory still will be reserved under the errata logic here explicitly.
140  * These registers will not be part of EL1 context save & restore routines.
141  *
142  * Only when ERRATA_SPECULATIVE_AT=0, EL1_Context=1, for this combination,
143  * SCTLR_EL1 and TCR_EL1 will be part of EL1 context structure (context_el1.h)
144  * -----------------------------------------------------------------------------
145  ******************************************************************************/
146 #define CTX_ERRATA_SPEC_AT_OFFSET	(CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_END)
147 #if ERRATA_SPECULATIVE_AT
148 #define CTX_ERRATA_SPEC_AT_SCTLR_EL1	U(0x0)
149 #define CTX_ERRATA_SPEC_AT_TCR_EL1	U(0x8)
150 #define CTX_ERRATA_SPEC_AT_END		U(0x10) /* Align to the next 16 byte boundary */
151 #else
152 #define CTX_ERRATA_SPEC_AT_END		U(0x0)
153 #endif /* ERRATA_SPECULATIVE_AT */
154 
155 /*******************************************************************************
156  * Registers related to ARMv8.3-PAuth.
157  ******************************************************************************/
158 #define CTX_PAUTH_REGS_OFFSET	(CTX_ERRATA_SPEC_AT_OFFSET + CTX_ERRATA_SPEC_AT_END)
159 #if CTX_INCLUDE_PAUTH_REGS
160 #define CTX_PACIAKEY_LO		U(0x0)
161 #define CTX_PACIAKEY_HI		U(0x8)
162 #define CTX_PACIBKEY_LO		U(0x10)
163 #define CTX_PACIBKEY_HI		U(0x18)
164 #define CTX_PACDAKEY_LO		U(0x20)
165 #define CTX_PACDAKEY_HI		U(0x28)
166 #define CTX_PACDBKEY_LO		U(0x30)
167 #define CTX_PACDBKEY_HI		U(0x38)
168 #define CTX_PACGAKEY_LO		U(0x40)
169 #define CTX_PACGAKEY_HI		U(0x48)
170 #define CTX_PAUTH_REGS_END	U(0x50) /* Align to the next 16 byte boundary */
171 #else
172 #define CTX_PAUTH_REGS_END	U(0)
173 #endif /* CTX_INCLUDE_PAUTH_REGS */
174 
175 /*******************************************************************************
176  * Registers initialised in a per-world context.
177  ******************************************************************************/
178 #define CTX_CPTR_EL3			U(0x0)
179 #define CTX_MPAM3_EL3			U(0x8)
180 #if (ENABLE_FEAT_IDTE3 && IMAGE_BL31)
181 #define CTX_IDREGS_EL3			U(0x10)
182 #define CTX_PERWORLD_EL3STATE_END	U(0x78)
183 #else
184 #define CTX_PERWORLD_EL3STATE_END	U(0x10)
185 #endif /* ENABLE_FEAT_IDTE3 && IMAGE_BL31 */
186 
187 #ifndef __ASSEMBLER__
188 
189 #include <stdint.h>
190 
191 #include <assert.h>
192 #include <common/ep_info.h>
193 #include <lib/cassert.h>
194 
195 /*
196  * Common constants to help define the 'cpu_context' structure and its
197  * members below.
198  */
199 #define DWORD_SHIFT		U(3)
200 #define DEFINE_REG_STRUCT(name, num_regs)	\
201 	typedef struct name {			\
202 		uint64_t ctx_regs[num_regs];	\
203 	}  __aligned(16) name##_t
204 
205 /* Constants to determine the size of individual context structures */
206 #define CTX_GPREG_ALL		(CTX_GPREGS_END >> DWORD_SHIFT)
207 
208 #define CTX_EL3STATE_ALL	(CTX_EL3STATE_END >> DWORD_SHIFT)
209 #define CTX_CVE_2018_3639_ALL	(CTX_CVE_2018_3639_END >> DWORD_SHIFT)
210 
211 #if ERRATA_SPECULATIVE_AT
212 #define CTX_ERRATA_SPEC_AT_ALL	(CTX_ERRATA_SPEC_AT_END >> DWORD_SHIFT)
213 #endif
214 #if CTX_INCLUDE_PAUTH_REGS
215 # define CTX_PAUTH_REGS_ALL	(CTX_PAUTH_REGS_END >> DWORD_SHIFT)
216 #endif
217 
218 /*
219  * AArch64 general purpose register context structure. Usually x0-x18,
220  * lr are saved as the compiler is expected to preserve the remaining
221  * callee saved registers if used by the C runtime and the assembler
222  * does not touch the remaining. But in case of world switch during
223  * exception handling, we need to save the callee registers too.
224  */
225 DEFINE_REG_STRUCT(gp_regs, CTX_GPREG_ALL);
226 
227 /*
228  * Miscellaneous registers used by EL3 firmware to maintain its state
229  * across exception entries and exits
230  */
231 DEFINE_REG_STRUCT(el3_state, CTX_EL3STATE_ALL);
232 
233 /* Function pointer used by CVE-2018-3639 dynamic mitigation */
234 DEFINE_REG_STRUCT(cve_2018_3639, CTX_CVE_2018_3639_ALL);
235 
236 /* Registers associated to Errata_Speculative */
237 #if ERRATA_SPECULATIVE_AT
238 DEFINE_REG_STRUCT(errata_speculative_at, CTX_ERRATA_SPEC_AT_ALL);
239 #endif
240 
241 /* Registers associated to ARMv8.3-PAuth */
242 #if CTX_INCLUDE_PAUTH_REGS
243 DEFINE_REG_STRUCT(pauth, CTX_PAUTH_REGS_ALL);
244 #endif
245 
246 /*
247  * Macros to access members of any of the above structures using their
248  * offsets
249  */
250 #define read_ctx_reg(ctx, offset)	((ctx)->ctx_regs[(offset) >> DWORD_SHIFT])
251 #define write_ctx_reg(ctx, offset, val)	(((ctx)->ctx_regs[(offset) >> DWORD_SHIFT]) \
252 					 = (uint64_t) (val))
253 
254 #if ENABLE_FEAT_IDTE3
255 typedef struct perworld_idreg {
256 	u_register_t id_aa64pfr0_el1;
257 	u_register_t id_aa64pfr1_el1;
258 	u_register_t id_aa64pfr2_el1;
259 	u_register_t id_aa64smfr0_el1;
260 	u_register_t id_aa64isar0_el1;
261 	u_register_t id_aa64isar1_el1;
262 	u_register_t id_aa64isar2_el1;
263 	u_register_t id_aa64isar3_el1;
264 	u_register_t id_aa64mmfr0_el1;
265 	u_register_t id_aa64mmfr1_el1;
266 	u_register_t id_aa64mmfr2_el1;
267 	u_register_t id_aa64mmfr3_el1;
268 	u_register_t id_aa64mmfr4_el1;
269 } perworld_idregs_t;
270 #endif
271 
272 /*
273  * Top-level context structure which is used by EL3 firmware to preserve
274  * the state of a core at the next lower EL in a given security state and
275  * save enough EL3 meta data to be able to return to that EL and security
276  * state. The context management library will be used to ensure that
277  * SP_EL3 always points to an instance of this structure at exception
278  * entry and exit.
279  */
280 typedef struct cpu_context {
281 	gp_regs_t gpregs_ctx;
282 	el3_state_t el3state_ctx;
283 
284 	cve_2018_3639_t cve_2018_3639_ctx;
285 
286 #if ERRATA_SPECULATIVE_AT
287 	errata_speculative_at_t errata_speculative_at_ctx;
288 #endif
289 
290 #if CTX_INCLUDE_PAUTH_REGS
291 	pauth_t pauth_ctx;
292 #endif
293 
294 #if (CTX_INCLUDE_EL2_REGS && IMAGE_BL31)
295 	el2_sysregs_t el2_sysregs_ctx;
296 #else
297 	/* El1 context should be included only either for IMAGE_BL1,
298 	 * or for IMAGE_BL31 when CTX_INCLUDE_EL2_REGS=0:
299 	 * When SPMD_SPM_AT_SEL2=1, SPMC at S-EL2 takes care of saving
300 	 * and restoring EL1 registers. In this case, BL31 at EL3 can
301 	 * exclude save and restore of EL1 context registers.
302 	 */
303 	el1_sysregs_t el1_sysregs_ctx;
304 #endif
305 
306 	/* TODO: the CACHE_WRITEBACK_GRANULE alignment is not necessary if this is
307 	 * contained in a per-cpu data structure (i.e. cpu_data_t).
308 	 */
309 } __aligned(CACHE_WRITEBACK_GRANULE) cpu_context_t;
310 
311 /*
312  * Per-World Context.
313  * It stores registers whose values can be shared across CPUs.
314  */
315 typedef struct per_world_context {
316 	uint64_t ctx_cptr_el3;
317 	uint64_t ctx_mpam3_el3;
318 #if (ENABLE_FEAT_IDTE3 && IMAGE_BL31)
319 	perworld_idregs_t idregs;
320 #endif
321 } per_world_context_t;
322 
323 static inline uint8_t get_cpu_context_index(size_t security_state)
324 {
325 	if (security_state == SECURE) {
326 		return CPU_CONTEXT_SECURE;
327 #if ENABLE_RME
328 	} else  if (security_state == REALM) {
329 		return CPU_CONTEXT_REALM;
330 #endif
331 	} else {
332 		assert(security_state == NON_SECURE);
333 		return CPU_CONTEXT_NS;
334 	}
335 }
336 
337 extern per_world_context_t per_world_context[CPU_CONTEXT_NUM];
338 
339 /* Macros to access members of the 'cpu_context_t' structure */
340 #define get_el3state_ctx(h)	(&((cpu_context_t *) h)->el3state_ctx)
341 
342 #if (CTX_INCLUDE_EL2_REGS && IMAGE_BL31)
343 #define get_el2_sysregs_ctx(h)	(&((cpu_context_t *) h)->el2_sysregs_ctx)
344 #else
345 #define get_el1_sysregs_ctx(h)	(&((cpu_context_t *) h)->el1_sysregs_ctx)
346 #endif
347 
348 #define get_gpregs_ctx(h)	(&((cpu_context_t *) h)->gpregs_ctx)
349 #define get_cve_2018_3639_ctx(h)	(&((cpu_context_t *) h)->cve_2018_3639_ctx)
350 
351 #if ERRATA_SPECULATIVE_AT
352 #define get_errata_speculative_at_ctx(h)	(&((cpu_context_t *) h)->errata_speculative_at_ctx)
353 #endif
354 
355 #if CTX_INCLUDE_PAUTH_REGS
356 # define get_pauth_ctx(h)	(&((cpu_context_t *) h)->pauth_ctx)
357 #endif
358 
359 /*
360  * Compile time assertions related to the 'cpu_context' structure to
361  * ensure that the assembler and the compiler view of the offsets of
362  * the structure members is the same.
363  */
364 CASSERT(CTX_GPREGS_OFFSET == __builtin_offsetof(cpu_context_t, gpregs_ctx),
365 	assert_core_context_gp_offset_mismatch);
366 
367 CASSERT(CTX_EL3STATE_OFFSET == __builtin_offsetof(cpu_context_t, el3state_ctx),
368 	assert_core_context_el3state_offset_mismatch);
369 
370 
371 CASSERT(CTX_CVE_2018_3639_OFFSET == __builtin_offsetof(cpu_context_t, cve_2018_3639_ctx),
372 	assert_core_context_cve_2018_3639_offset_mismatch);
373 
374 #if ERRATA_SPECULATIVE_AT
375 CASSERT(CTX_ERRATA_SPEC_AT_OFFSET == __builtin_offsetof(cpu_context_t, errata_speculative_at_ctx),
376 	assert_core_context_errata_speculative_at_offset_mismatch);
377 #endif
378 
379 #if CTX_INCLUDE_PAUTH_REGS
380 CASSERT(CTX_PAUTH_REGS_OFFSET == __builtin_offsetof(cpu_context_t, pauth_ctx),
381 	assert_core_context_pauth_offset_mismatch);
382 #endif /* CTX_INCLUDE_PAUTH_REGS */
383 
384 /*
385  * Helper macro to set the general purpose registers that correspond to
386  * parameters in an aapcs_64 call i.e. x0-x7
387  */
388 #define set_aapcs_args0(ctx, x0)				do {	\
389 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X0, x0);	\
390 	} while (0)
391 #define set_aapcs_args1(ctx, x0, x1)				do {	\
392 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X1, x1);	\
393 		set_aapcs_args0(ctx, x0);				\
394 	} while (0)
395 #define set_aapcs_args2(ctx, x0, x1, x2)			do {	\
396 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X2, x2);	\
397 		set_aapcs_args1(ctx, x0, x1);				\
398 	} while (0)
399 #define set_aapcs_args3(ctx, x0, x1, x2, x3)			do {	\
400 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X3, x3);	\
401 		set_aapcs_args2(ctx, x0, x1, x2);			\
402 	} while (0)
403 #define set_aapcs_args4(ctx, x0, x1, x2, x3, x4)		do {	\
404 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X4, x4);	\
405 		set_aapcs_args3(ctx, x0, x1, x2, x3);			\
406 	} while (0)
407 #define set_aapcs_args5(ctx, x0, x1, x2, x3, x4, x5)		do {	\
408 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X5, x5);	\
409 		set_aapcs_args4(ctx, x0, x1, x2, x3, x4);		\
410 	} while (0)
411 #define set_aapcs_args6(ctx, x0, x1, x2, x3, x4, x5, x6)	do {	\
412 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X6, x6);	\
413 		set_aapcs_args5(ctx, x0, x1, x2, x3, x4, x5);		\
414 	} while (0)
415 #define set_aapcs_args7(ctx, x0, x1, x2, x3, x4, x5, x6, x7)	do {	\
416 		write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X7, x7);	\
417 		set_aapcs_args6(ctx, x0, x1, x2, x3, x4, x5, x6);	\
418 	} while (0)
419 
420 /*******************************************************************************
421  * Function prototypes
422  ******************************************************************************/
423 #if CTX_INCLUDE_FPREGS
424 void fpregs_context_save(simd_regs_t *regs);
425 void fpregs_context_restore(simd_regs_t *regs);
426 #endif
427 
428 /*******************************************************************************
429  * The next four inline functions are required for IMAGE_BL1, as well as for
430  * IMAGE_BL31 for the below combinations.
431  * ============================================================================
432  * | ERRATA_SPECULATIVE_AT| CTX_INCLUDE_EL2_REGS |   Combination              |
433  * ============================================================================
434  * |       0              |       0              |   Valid (EL1 ctx)          |
435  * |______________________|______________________|____________________________|
436  * |                      |                      | Invalid (No Errata/EL1 Ctx)|
437  * |       0              |       1              | Hence commented out.       |
438  * |______________________|______________________|____________________________|
439  * |                      |                      |                            |
440  * |       1              |       0              |   Valid (Errata ctx)       |
441  * |______________________|______________________|____________________________|
442  * |                      |                      |                            |
443  * |       1              |       1              |   Valid (Errata ctx)       |
444  * |______________________|______________________|____________________________|
445  * ============================================================================
446  ******************************************************************************/
447 #if (IMAGE_BL1 || ((ERRATA_SPECULATIVE_AT) || (!CTX_INCLUDE_EL2_REGS)))
448 
449 static inline void write_ctx_sctlr_el1_reg_errata(cpu_context_t *ctx, u_register_t val)
450 {
451 #if (ERRATA_SPECULATIVE_AT)
452 	write_ctx_reg(get_errata_speculative_at_ctx(ctx),
453 		      CTX_ERRATA_SPEC_AT_SCTLR_EL1, val);
454 #else
455 	write_el1_ctx_common(get_el1_sysregs_ctx(ctx), sctlr_el1, val);
456 #endif /* ERRATA_SPECULATIVE_AT */
457 }
458 
459 static inline void write_ctx_tcr_el1_reg_errata(cpu_context_t *ctx, u_register_t val)
460 {
461 #if (ERRATA_SPECULATIVE_AT)
462 	write_ctx_reg(get_errata_speculative_at_ctx(ctx),
463 		      CTX_ERRATA_SPEC_AT_TCR_EL1, val);
464 #else
465 	write_el1_ctx_common(get_el1_sysregs_ctx(ctx), tcr_el1, val);
466 #endif /* ERRATA_SPECULATIVE_AT */
467 }
468 
469 static inline u_register_t read_ctx_sctlr_el1_reg_errata(cpu_context_t *ctx)
470 {
471 #if (ERRATA_SPECULATIVE_AT)
472 	return read_ctx_reg(get_errata_speculative_at_ctx(ctx),
473 		      CTX_ERRATA_SPEC_AT_SCTLR_EL1);
474 #else
475 	return read_el1_ctx_common(get_el1_sysregs_ctx(ctx), sctlr_el1);
476 #endif /* ERRATA_SPECULATIVE_AT */
477 }
478 
479 static inline u_register_t read_ctx_tcr_el1_reg_errata(cpu_context_t *ctx)
480 {
481 #if (ERRATA_SPECULATIVE_AT)
482 	return read_ctx_reg(get_errata_speculative_at_ctx(ctx),
483 		      CTX_ERRATA_SPEC_AT_TCR_EL1);
484 #else
485 	return read_el1_ctx_common(get_el1_sysregs_ctx(ctx), tcr_el1);
486 #endif /* ERRATA_SPECULATIVE_AT */
487 }
488 
489 #endif /* (IMAGE_BL1 || ((ERRATA_SPECULATIVE_AT) || (!CTX_INCLUDE_EL2_REGS))) */
490 
491 #endif /* __ASSEMBLER__ */
492 
493 #endif /* CONTEXT_H */
494