xref: /rk3399_ARM-atf/include/lib/el3_runtime/cpu_data.h (revision e8e8fc56c53b5735104d9b27d8974568c68cafdb)
1 /*
2  * Copyright (c) 2014-2026, Arm Limited and Contributors. All rights reserved.
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #ifndef CPU_DATA_H
8 #define CPU_DATA_H
9 
10 #include <platform_def.h>	/* CACHE_WRITEBACK_GRANULE required */
11 
12 #include <bl31/ehf.h>
13 #include <context.h>
14 #include <lib/utils_def.h>
15 #include <lib/cpus/cpu_ops.h>
16 
17 #if ENABLE_RUNTIME_INSTRUMENTATION
18 /* Temporary space to store PMF timestamps from assembly code */
19 #define CPU_DATA_PMF_TS_COUNT		1
20 #define CPU_DATA_PMF_TS0_IDX		0
21 #endif
22 
23 #ifdef __aarch64__
24 #define CPU_DATA_CPU_CONTEXT_SIZE	(CPU_CONTEXT_NUM * CPU_WORD_SIZE)
25 #else /* __aarch64__ */
26 #define CPU_DATA_CPU_CONTEXT_SIZE	0
27 #endif /* __aarch64__ */
28 #define CPU_DATA_WARMBOOT_EP_INFO_SIZE	CPU_WORD_SIZE
29 #define CPU_DATA_WARMBOOT_EP_INFO_ALIGN	CPU_WORD_SIZE
30 #define CPU_DATA_CPU_OPS_PTR_SIZE	CPU_WORD_SIZE
31 #define CPU_DATA_CPU_OPS_PTR_ALIGN	CPU_WORD_SIZE
32 #define CPU_DATA_PSCI_SVC_CPU_DATA_SIZE 12
33 #define CPU_DATA_PSCI_SVC_CPU_DATA_ALIGN CPU_WORD_SIZE
34 #if ENABLE_PAUTH
35 /* uint64_t apiakey[2] */
36 #define CPU_DATA_APIAKEY_SIZE		16
37 /* uint64_t alignement */
38 #define CPU_DATA_APIAKEY_ALIGN		8
39 #else /* ENABLE_PAUTH */
40 #define CPU_DATA_APIAKEY_SIZE		0
41 #define CPU_DATA_APIAKEY_ALIGN		1
42 #endif /* ENABLE_PAUTH */
43 #if ENABLE_RUNTIME_INSTRUMENTATION
44 #define CPU_DATA_CPU_DATA_PMF_TS_SIZE	(CPU_DATA_PMF_TS_COUNT * 8)
45 /* uint64_t alignement */
46 #define CPU_DATA_CPU_DATA_PMF_TS_ALIGN	8
47 #else /* ENABLE_RUNTIME_INSTRUMENTATION */
48 #define CPU_DATA_CPU_DATA_PMF_TS_SIZE	0
49 #define CPU_DATA_CPU_DATA_PMF_TS_ALIGN	1
50 #endif /* ENABLE_RUNTIME_INSTRUMENTATION */
51 #ifdef PLAT_PCPU_DATA_SIZE
52 #define CPU_DATA_PLATFORM_CPU_DATA_SIZE	PLAT_PCPU_DATA_SIZE
53 #define CPU_DATA_PLATFORM_CPU_DATA_ALIGN 1
54 #else /* PLAT_PCPU_DATA_SIZE */
55 #define CPU_DATA_PLATFORM_CPU_DATA_SIZE	0
56 #define CPU_DATA_PLATFORM_CPU_DATA_ALIGN 1
57 #endif /* PLAT_PCPU_DATA_SIZE */
58 #if EL3_EXCEPTION_HANDLING
59 /* buffer space for EHF data is sizeof(pe_exc_data_t) */
60 #define CPU_DATA_EHF_DATA_SIZE		8
61 /* hardcoded to 64 bit alignment */
62 #define CPU_DATA_EHF_DATA_ALIGN		8
63 #else /* EL3_EXCEPTION_HANDLING */
64 #define CPU_DATA_EHF_DATA_SIZE		0
65 #define CPU_DATA_EHF_DATA_ALIGN		1
66 #endif
67 #if ENABLE_FEAT_IDTE3 && defined(__aarch64__)
68 #define CPU_DATA_PCPU_IDREGS_SIZE	(16 * CPU_CONTEXT_NUM)
69 #define CPU_DATA_PCPU_IDREGS_ALIGN	8
70 #else /* ENABLE_FEAT_IDTE3 && defined(__aarch64__) */
71 #define CPU_DATA_PCPU_IDREGS_SIZE	0
72 #define CPU_DATA_PCPU_IDREGS_ALIGN	1
73 #endif /* ENABLE_FEAT_IDTE3 && defined(__aarch64__) */
74 /* cpu_data size is the data size rounded up to the platform cache line size */
75 #define CPU_DATA_SIZE_ALIGN		CACHE_WRITEBACK_GRANULE
76 
77 #define CPU_DATA_CPU_CONTEXT		0
78 #define CPU_DATA_WARMBOOT_EP_INFO	ROUND_UP_2EVAL((CPU_DATA_CPU_CONTEXT + CPU_DATA_CPU_CONTEXT_SIZE), CPU_DATA_CPU_OPS_PTR_ALIGN)
79 #define CPU_DATA_CPU_OPS_PTR		ROUND_UP_2EVAL((CPU_DATA_WARMBOOT_EP_INFO + CPU_DATA_WARMBOOT_EP_INFO_SIZE), CPU_DATA_CPU_OPS_PTR_ALIGN)
80 #define CPU_DATA_PSCI_SVC_CPU_DATA	ROUND_UP_2EVAL((CPU_DATA_CPU_OPS_PTR + CPU_DATA_CPU_OPS_PTR_SIZE), CPU_DATA_PSCI_SVC_CPU_DATA_ALIGN)
81 #define CPU_DATA_APIAKEY		ROUND_UP_2EVAL((CPU_DATA_PSCI_SVC_CPU_DATA + CPU_DATA_PSCI_SVC_CPU_DATA_SIZE), CPU_DATA_APIAKEY_ALIGN)
82 #define CPU_DATA_CPU_DATA_PMF_TS	ROUND_UP_2EVAL((CPU_DATA_APIAKEY + CPU_DATA_APIAKEY_SIZE), CPU_DATA_CPU_DATA_PMF_TS_ALIGN)
83 #define CPU_DATA_PLATFORM_CPU_DATA	ROUND_UP_2EVAL((CPU_DATA_CPU_DATA_PMF_TS + CPU_DATA_CPU_DATA_PMF_TS_SIZE), CPU_DATA_PLATFORM_CPU_DATA_ALIGN)
84 #define CPU_DATA_EHF_DATA		ROUND_UP_2EVAL((CPU_DATA_PLATFORM_CPU_DATA + CPU_DATA_PLATFORM_CPU_DATA_SIZE), CPU_DATA_EHF_DATA_ALIGN)
85 #define CPU_DATA_PCPU_IDREGS		ROUND_UP_2EVAL((CPU_DATA_EHF_DATA + CPU_DATA_EHF_DATA_SIZE), CPU_DATA_PCPU_IDREGS_ALIGN)
86 #define CPU_DATA_SIZE			ROUND_UP_2EVAL((CPU_DATA_PCPU_IDREGS + CPU_DATA_PCPU_IDREGS_SIZE), CPU_DATA_SIZE_ALIGN)
87 
88 #ifndef __ASSEMBLER__
89 
90 #include <assert.h>
91 #include <stdint.h>
92 
93 #include <arch_helpers.h>
94 #include <lib/cassert.h>
95 #include <lib/per_cpu/per_cpu.h>
96 #include <lib/psci/psci.h>
97 
98 #include <platform_def.h>
99 
100 /*******************************************************************************
101  * Function & variable prototypes
102  ******************************************************************************/
103 #if ENABLE_FEAT_IDTE3 && defined(__aarch64__)
104 typedef struct percpu_idreg {
105 	u_register_t id_aa64dfr0_el1;
106 	u_register_t id_aa64dfr1_el1;
107 } percpu_idregs_t;
108 #endif /* ENABLE_FEAT_IDTE3 && defined(__aarch64__) */
109 
110 
111 /*******************************************************************************
112  * Cache of frequently used per-cpu data:
113  *   Pointers to non-secure, realm, and secure security state contexts
114  *   Address of the crash stack
115  * It is aligned to the cache line boundary to allow efficient concurrent
116  * manipulation of these pointers on different cpus
117  *
118  * The data structure and the _cpu_data accessors should not be used directly
119  * by components that have per-cpu members. The member access macros should be
120  * used for this.
121  ******************************************************************************/
122 typedef struct cpu_data {
123 #ifdef __aarch64__
124 	void *cpu_context[CPU_CONTEXT_NUM];
125 #endif /* __aarch64__ */
126 	entry_point_info_t *warmboot_ep_info;
127 	struct cpu_ops *cpu_ops_ptr;
128 	struct psci_cpu_data psci_svc_cpu_data;
129 #if ENABLE_PAUTH
130 	uint64_t apiakey[2];
131 #endif
132 #if ENABLE_RUNTIME_INSTRUMENTATION
133 	uint64_t cpu_data_pmf_ts[CPU_DATA_PMF_TS_COUNT];
134 #endif
135 #if PLAT_PCPU_DATA_SIZE
136 	uint8_t platform_cpu_data[PLAT_PCPU_DATA_SIZE];
137 #endif
138 #if EL3_EXCEPTION_HANDLING
139 	pe_exc_data_t ehf_data;
140 #endif
141 #if (ENABLE_FEAT_IDTE3 && defined(__aarch64__))
142 	percpu_idregs_t idregs[CPU_CONTEXT_NUM];
143 #endif
144 } __aligned(CACHE_WRITEBACK_GRANULE) cpu_data_t;
145 
146 PER_CPU_DECLARE(cpu_data_t, percpu_data);
147 
148 #define CPU_DATA_ASSERT_OFFSET(left, right) \
149 	CASSERT(CPU_DATA_ ## left == __builtin_offsetof \
150 		(cpu_data_t, right), \
151 		assert_cpu_data_ ## right ## _mismatch)
152 
153 /* verify assembler offsets match data structures */
154 CPU_DATA_ASSERT_OFFSET(WARMBOOT_EP_INFO, warmboot_ep_info);
155 CPU_DATA_ASSERT_OFFSET(CPU_OPS_PTR, cpu_ops_ptr);
156 CPU_DATA_ASSERT_OFFSET(PSCI_SVC_CPU_DATA, psci_svc_cpu_data);
157 #if ENABLE_PAUTH
158 CPU_DATA_ASSERT_OFFSET(APIAKEY, apiakey);
159 #endif
160 #if ENABLE_RUNTIME_INSTRUMENTATION
161 CPU_DATA_ASSERT_OFFSET(CPU_DATA_PMF_TS, cpu_data_pmf_ts);
162 #endif
163 #if PLAT_PCPU_DATA_SIZE
164 CPU_DATA_ASSERT_OFFSET(PLATFORM_CPU_DATA, platform_cpu_data);
165 #endif
166 #if EL3_EXCEPTION_HANDLING
167 CPU_DATA_ASSERT_OFFSET(EHF_DATA, ehf_data);
168 #endif
169 #if (ENABLE_FEAT_IDTE3 && defined(__aarch64__))
170 CPU_DATA_ASSERT_OFFSET(PCPU_IDREGS, idregs);
171 #endif
172 
173 CASSERT(CPU_DATA_SIZE == sizeof(cpu_data_t),
174 		assert_cpu_data_size_mismatch);
175 
176 #ifndef __aarch64__
177 cpu_data_t *_cpu_data(void);
178 #endif
179 
180 /**************************************************************************
181  * APIs for initialising and accessing per-cpu data
182  *************************************************************************/
183 
184 void cpu_data_init_cpu_ops(void);
185 
186 #define get_cpu_data(_m)			PER_CPU_CUR(percpu_data)->_m
187 #define set_cpu_data(_m, _v)			PER_CPU_CUR(percpu_data)->_m = (_v)
188 #define get_cpu_data_by_index(_ix, _m)		PER_CPU_BY_INDEX(percpu_data, _ix)->_m
189 #define set_cpu_data_by_index(_ix, _m, _v)	PER_CPU_BY_INDEX(percpu_data, _ix)->_m = (_v)
190 /* ((cpu_data_t *)0)->_m is a dummy to get the sizeof the struct member _m */
191 #define flush_cpu_data(_m)	   flush_dcache_range((uintptr_t)	  \
192 						&(PER_CPU_CUR(percpu_data)->_m), \
193 						sizeof(((cpu_data_t *)0)->_m))
194 #define inv_cpu_data(_m)	   inv_dcache_range((uintptr_t)	  	  \
195 						&(PER_CPU_CUR(percpu_data)->_m), \
196 						sizeof(((cpu_data_t *)0)->_m))
197 #define flush_cpu_data_by_index(_ix, _m)	\
198 				   flush_dcache_range((uintptr_t)	  \
199 					 &(PER_CPU_BY_INDEX(percpu_data, _ix)->_m),  \
200 						sizeof(((cpu_data_t *)0)->_m))
201 
202 
203 #endif /* __ASSEMBLER__ */
204 #endif /* CPU_DATA_H */
205