xref: /optee_os/core/arch/riscv/include/riscv.h (revision 769cbbd704592f9fa940f52dabb0511ff10cf78a)
1 /* SPDX-License-Identifier: BSD-2-Clause */
2 /*
3  * Copyright 2022-2023 NXP
4  */
5 
6 #ifndef RISCV_H
7 #define RISCV_H
8 
9 #include <compiler.h>
10 #include <encoding.h>
11 #include <stdint.h>
12 #include <sys/cdefs.h>
13 #include <util.h>
14 
15 #define RISCV_XLEN_BITS		(__riscv_xlen)
16 #define RISCV_XLEN_BYTES	(__riscv_xlen / 8)
17 
18 #define REGOFF(x)			((x) * RISCV_XLEN_BYTES)
19 
20 #if __riscv_xlen == 32
21 #define STR       sw
22 #define LDR       lw
23 #else
24 #define STR       sd
25 #define LDR       ld
26 #endif
27 
28 /* Bind registers to their ABI names */
29 #define REG_RA	1
30 #define REG_SP	2
31 #define REG_GP	3
32 #define REG_TP	4
33 #define REG_T0	5
34 #define REG_T2	7
35 #define REG_S0	8
36 #define REG_S1	9
37 #define REG_A0	10
38 #define REG_A1	11
39 #define REG_A2	12
40 #define REG_A3	13
41 #define REG_A5	15
42 #define REG_A7	17
43 #define REG_S2	18
44 #define REG_S11	27
45 #define REG_T3	28
46 #define REG_T6	31
47 
48 #if defined(CFG_RISCV_M_MODE)
49 #define CSR_MODE_OFFSET	PRV_M
50 #define XRET			mret
51 #elif defined(CFG_RISCV_S_MODE)
52 #define CSR_MODE_OFFSET	PRV_S
53 #define XRET			sret
54 #endif
55 
56 #define CSR_MODE_BITS		SHIFT_U64(CSR_MODE_OFFSET, 8)
57 
58 #define CSR_XSTATUS		(CSR_MODE_BITS | 0x000)
59 #define CSR_XIE			(CSR_MODE_BITS | 0x004)
60 #define CSR_XTVEC		(CSR_MODE_BITS | 0x005)
61 #define CSR_XSCRATCH		(CSR_MODE_BITS | 0x040)
62 #define CSR_XEPC		(CSR_MODE_BITS | 0x041)
63 #define CSR_XCAUSE		(CSR_MODE_BITS | 0x042)
64 #define CSR_XTVAL		(CSR_MODE_BITS | 0x043)
65 #define CSR_XIP			(CSR_MODE_BITS | 0x044)
66 
67 #define IRQ_XSOFT		(CSR_MODE_OFFSET + 0)
68 #define IRQ_XTIMER		(CSR_MODE_OFFSET + 4)
69 #define IRQ_XEXT		(CSR_MODE_OFFSET + 8)
70 
71 #define CSR_XIE_SIE		BIT64(IRQ_XSOFT)
72 #define CSR_XIE_TIE		BIT64(IRQ_XTIMER)
73 #define CSR_XIE_EIE		BIT64(IRQ_XEXT)
74 
75 #define CSR_XSTATUS_IE		BIT(CSR_MODE_OFFSET + 0)
76 #define CSR_XSTATUS_PIE		BIT(CSR_MODE_OFFSET + 4)
77 #define CSR_XSTATUS_SPP		BIT(8)
78 #define CSR_XSTATUS_SUM		BIT(18)
79 #define CSR_XSTATUS_MXR		BIT(19)
80 
81 #ifndef __ASSEMBLER__
82 
83 static inline __noprof void mb(void)
84 {
85 	asm volatile ("fence" : : : "memory");
86 }
87 
88 static inline __noprof unsigned long read_tp(void)
89 {
90 	unsigned long tp;
91 
92 	asm volatile("mv %0, tp" : "=&r"(tp));
93 	return tp;
94 }
95 
96 static inline __noprof void wfi(void)
97 {
98 	asm volatile ("wfi");
99 }
100 
101 static inline __noprof void flush_tlb(void)
102 {
103 	asm volatile("sfence.vma zero, zero");
104 }
105 
106 static inline __noprof void flush_tlb_entry(unsigned long va)
107 {
108 	asm volatile ("sfence.vma %0" : : "r" (va) : "memory");
109 }
110 
111 /* supervisor address translation and protection */
112 static inline __noprof unsigned long read_satp(void)
113 {
114 	unsigned long satp;
115 
116 	asm volatile("csrr %0, satp" : "=r" (satp));
117 
118 	return satp;
119 }
120 
121 static inline __noprof void write_satp(unsigned long satp)
122 {
123 	asm volatile("csrw satp, %0" : : "r" (satp));
124 }
125 
126 /* machine trap-vector base-address register */
127 static inline __noprof unsigned long read_mtvec(void)
128 {
129 	unsigned long mtvec;
130 
131 	asm volatile("csrr %0, mtvec" : "=r" (mtvec));
132 
133 	return mtvec;
134 }
135 
136 static inline __noprof void write_mtvec(unsigned long mtvec)
137 {
138 	asm volatile("csrw mtvec, %0" : : "r" (mtvec));
139 }
140 
141 /* supervisor trap-vector base-address register */
142 static inline __noprof unsigned long read_stvec(void)
143 {
144 	unsigned long stvec;
145 
146 	asm volatile("csrr %0, stvec" : "=r" (stvec));
147 
148 	return stvec;
149 }
150 
151 static inline __noprof void write_stvec(unsigned long stvec)
152 {
153 	asm volatile("csrw stvec, %0" : : "r" (stvec));
154 }
155 
156 /* machine status register */
157 static inline __noprof unsigned long read_mstatus(void)
158 {
159 	unsigned long mstatus;
160 
161 	asm volatile("csrr %0, mstatus" : "=r" (mstatus));
162 
163 	return mstatus;
164 }
165 
166 static inline __noprof void write_mstatus(unsigned long mstatus)
167 {
168 	asm volatile("csrw mstatus, %0" : : "r" (mstatus));
169 }
170 
171 /* supervisor status register */
172 static inline __noprof unsigned long read_sstatus(void)
173 {
174 	unsigned long sstatus;
175 
176 	asm volatile("csrr %0, sstatus" : "=r" (sstatus));
177 
178 	return sstatus;
179 }
180 
181 static inline __noprof void write_sstatus(unsigned long sstatus)
182 {
183 	asm volatile("csrw sstatus, %0" : : "r" (sstatus));
184 }
185 
186 static inline __noprof void set_sstatus(unsigned long sstatus)
187 {
188 	unsigned long x;
189 
190 	asm volatile ("csrrs %0, sstatus, %1" : "=r"(x) : "rK"(sstatus));
191 }
192 
193 /* machine exception delegation */
194 static inline __noprof unsigned long read_medeleg(void)
195 {
196 	unsigned long medeleg;
197 
198 	asm volatile("csrr %0, medeleg" : "=r" (medeleg));
199 
200 	return medeleg;
201 }
202 
203 static inline __noprof void write_medeleg(unsigned long medeleg)
204 {
205 	asm volatile("csrw medeleg, %0" : : "r" (medeleg));
206 }
207 
208 /* machine interrupt delegation */
209 static inline __noprof unsigned long read_mideleg(void)
210 {
211 	unsigned long mideleg;
212 
213 	asm volatile("csrr %0, mideleg" : "=r" (mideleg));
214 
215 	return mideleg;
216 }
217 
218 static inline __noprof void write_mideleg(unsigned long mideleg)
219 {
220 	asm volatile("csrw mideleg, %0" : : "r" (mideleg));
221 }
222 
223 /* machine interrupt-enable register */
224 static inline __noprof unsigned long read_mie(void)
225 {
226 	unsigned long mie;
227 
228 	asm volatile("csrr %0, mie" : "=r" (mie));
229 
230 	return mie;
231 }
232 
233 static inline __noprof void write_mie(unsigned long mie)
234 {
235 	asm volatile("csrw mie, %0" : : "r" (mie));
236 }
237 
238 /* supervisor interrupt-enable register */
239 static inline __noprof unsigned long read_sie(void)
240 {
241 	unsigned long sie;
242 
243 	asm volatile("csrr %0, sie" : "=r" (sie));
244 
245 	return sie;
246 }
247 
248 static inline __noprof void write_sie(unsigned long sie)
249 {
250 	asm volatile("csrw sie, %0" : : "r" (sie));
251 }
252 
253 /* machine exception program counter */
254 static inline __noprof unsigned long read_mepc(void)
255 {
256 	unsigned long mepc;
257 
258 	asm volatile("csrr %0, mepc" : "=r" (mepc));
259 
260 	return mepc;
261 }
262 
263 static inline __noprof void write_mepc(unsigned long mepc)
264 {
265 	asm volatile("csrw mepc, %0" : : "r" (mepc));
266 }
267 
268 /* supervisor exception program counter */
269 static inline __noprof unsigned long read_sepc(void)
270 {
271 	unsigned long sepc;
272 
273 	asm volatile("csrr %0, sepc" : "=r" (sepc));
274 
275 	return sepc;
276 }
277 
278 static inline __noprof void write_sepc(unsigned long sepc)
279 {
280 	asm volatile("csrw sepc, %0" : : "r" (sepc));
281 }
282 
283 /* machine scratch register */
284 static inline __noprof unsigned long read_mscratch(void)
285 {
286 	unsigned long mscratch;
287 
288 	asm volatile("csrr %0, mscratch" : "=r" (mscratch));
289 
290 	return mscratch;
291 }
292 
293 static inline __noprof void write_mscratch(unsigned long mscratch)
294 {
295 	asm volatile("csrw mscratch, %0" : : "r" (mscratch));
296 }
297 
298 /* supervisor scratch register */
299 static inline __noprof unsigned long read_sscratch(void)
300 {
301 	unsigned long sscratch;
302 
303 	asm volatile("csrr %0, sscratch" : "=r" (sscratch));
304 
305 	return sscratch;
306 }
307 
308 static inline __noprof void write_sscratch(unsigned long sscratch)
309 {
310 	asm volatile("csrw sscratch, %0" : : "r" (sscratch));
311 }
312 
313 /* trap-return instructions */
314 static inline __noprof void mret(void)
315 {
316 	asm volatile("mret");
317 }
318 
319 static inline __noprof void sret(void)
320 {
321 	asm volatile("sret");
322 }
323 
324 static inline __noprof void uret(void)
325 {
326 	asm volatile("uret");
327 }
328 
329 #endif /*__ASSEMBLER__*/
330 
331 #endif /*RISCV_H*/
332