xref: /optee_os/core/arch/riscv/include/riscv.h (revision 750c544ca5b1d498d11b2a16fdfe30411f6ea6ce)
1 /* SPDX-License-Identifier: BSD-2-Clause */
2 /*
3  * Copyright 2022-2023 NXP
4  */
5 
6 #ifndef RISCV_H
7 #define RISCV_H
8 
9 #include <compiler.h>
10 #include <encoding.h>
11 #include <stdint.h>
12 #include <sys/cdefs.h>
13 #include <util.h>
14 
15 #define RISCV_XLEN_BITS		(__riscv_xlen)
16 #define RISCV_XLEN_BYTES	(__riscv_xlen / 8)
17 
18 #define REGOFF(x)			((x) * RISCV_XLEN_BYTES)
19 
20 #if __riscv_xlen == 32
21 #define STR       sw
22 #define LDR       lw
23 #else
24 #define STR       sd
25 #define LDR       ld
26 #endif
27 
28 /* Bind registers to their ABI names */
29 #define REG_RA	1
30 #define REG_SP	2
31 #define REG_GP	3
32 #define REG_TP	4
33 #define REG_T0	5
34 #define REG_T2	7
35 #define REG_S0	8
36 #define REG_S1	9
37 #define REG_A0	10
38 #define REG_A1	11
39 #define REG_A2	12
40 #define REG_A3	13
41 #define REG_A5	15
42 #define REG_A7	17
43 #define REG_S2	18
44 #define REG_S11	27
45 #define REG_T3	28
46 #define REG_T6	31
47 
48 #if defined(CFG_RISCV_M_MODE)
49 #define CSR_MODE_OFFSET	PRV_M
50 #define XRET			mret
51 #elif defined(CFG_RISCV_S_MODE)
52 #define CSR_MODE_OFFSET	PRV_S
53 #define XRET			sret
54 #endif
55 
56 #define CSR_MODE_BITS		SHIFT_U64(CSR_MODE_OFFSET, 8)
57 
58 #define CSR_XSTATUS		(CSR_MODE_BITS | 0x000)
59 #define CSR_XIE			(CSR_MODE_BITS | 0x004)
60 #define CSR_XTVEC		(CSR_MODE_BITS | 0x005)
61 #define CSR_XSCRATCH		(CSR_MODE_BITS | 0x040)
62 #define CSR_XEPC		(CSR_MODE_BITS | 0x041)
63 #define CSR_XCAUSE		(CSR_MODE_BITS | 0x042)
64 #define CSR_XTVAL		(CSR_MODE_BITS | 0x043)
65 #define CSR_XIP			(CSR_MODE_BITS | 0x044)
66 
67 #define IRQ_XSOFT		(CSR_MODE_OFFSET + 0)
68 #define IRQ_XTIMER		(CSR_MODE_OFFSET + 4)
69 #define IRQ_XEXT		(CSR_MODE_OFFSET + 8)
70 
71 #define CSR_XIE_SIE		BIT64(IRQ_XSOFT)
72 #define CSR_XIE_TIE		BIT64(IRQ_XTIMER)
73 #define CSR_XIE_EIE		BIT64(IRQ_XEXT)
74 
75 #define CSR_XSTATUS_IE		BIT(CSR_MODE_OFFSET + 0)
76 #define CSR_XSTATUS_PIE		BIT(CSR_MODE_OFFSET + 4)
77 #define CSR_XSTATUS_SPP		BIT(8)
78 #define CSR_XSTATUS_SUM		BIT(18)
79 #define CSR_XSTATUS_MXR		BIT(19)
80 
81 #ifndef __ASSEMBLER__
82 
83 #define read_csr(csr)							\
84 	({								\
85 		unsigned long __tmp;					\
86 		asm volatile ("csrr %0, %1" : "=r"(__tmp) : "i"(csr));	\
87 		__tmp;							\
88 	})
89 
90 #define write_csr(csr, val)						\
91 	({								\
92 		asm volatile ("csrw %0, %1" : : "i"(csr), "rK"(val));	\
93 	})
94 
95 #define swap_csr(csr, val)						\
96 	({								\
97 		unsigned long __tmp;					\
98 		asm volatile ("csrrw %0, %1, %2"			\
99 			      : "=r"(__tmp) : "i"(csr), "rK"(val));	\
100 		__tmp;							\
101 	})
102 
103 #define set_csr(csr, bit)						\
104 	({								\
105 		unsigned long __tmp;					\
106 		asm volatile ("csrrs %0, %1, %2"			\
107 			      : "=r"(__tmp) : "i"(csr), "rK"(bit));	\
108 		__tmp;							\
109 	})
110 
111 #define clear_csr(csr, bit)						\
112 	({								\
113 		unsigned long __tmp;					\
114 		asm volatile ("csrrc %0, %1, %2"			\
115 			      : "=r"(__tmp) : "i"(csr), "rK"(bit));	\
116 		__tmp;							\
117 	})
118 
119 #define rdtime() read_csr(CSR_TIME)
120 #define rdcycle() read_csr(CSR_CYCLE)
121 #define rdinstret() read_csr(CSR_INSTRET)
122 
123 static inline __noprof void mb(void)
124 {
125 	asm volatile ("fence" : : : "memory");
126 }
127 
128 static inline __noprof unsigned long read_tp(void)
129 {
130 	unsigned long tp;
131 
132 	asm volatile("mv %0, tp" : "=&r"(tp));
133 	return tp;
134 }
135 
136 static inline __noprof void wfi(void)
137 {
138 	asm volatile ("wfi");
139 }
140 
141 static inline __noprof void flush_tlb(void)
142 {
143 	asm volatile("sfence.vma zero, zero");
144 }
145 
146 static inline __noprof void flush_tlb_entry(unsigned long va)
147 {
148 	asm volatile ("sfence.vma %0" : : "r" (va) : "memory");
149 }
150 
151 /* supervisor address translation and protection */
152 static inline __noprof unsigned long read_satp(void)
153 {
154 	unsigned long satp;
155 
156 	asm volatile("csrr %0, satp" : "=r" (satp));
157 
158 	return satp;
159 }
160 
161 static inline __noprof void write_satp(unsigned long satp)
162 {
163 	asm volatile("csrw satp, %0" : : "r" (satp));
164 }
165 
166 /* machine trap-vector base-address register */
167 static inline __noprof unsigned long read_mtvec(void)
168 {
169 	unsigned long mtvec;
170 
171 	asm volatile("csrr %0, mtvec" : "=r" (mtvec));
172 
173 	return mtvec;
174 }
175 
176 static inline __noprof void write_mtvec(unsigned long mtvec)
177 {
178 	asm volatile("csrw mtvec, %0" : : "r" (mtvec));
179 }
180 
181 /* supervisor trap-vector base-address register */
182 static inline __noprof unsigned long read_stvec(void)
183 {
184 	unsigned long stvec;
185 
186 	asm volatile("csrr %0, stvec" : "=r" (stvec));
187 
188 	return stvec;
189 }
190 
191 static inline __noprof void write_stvec(unsigned long stvec)
192 {
193 	asm volatile("csrw stvec, %0" : : "r" (stvec));
194 }
195 
196 /* machine status register */
197 static inline __noprof unsigned long read_mstatus(void)
198 {
199 	unsigned long mstatus;
200 
201 	asm volatile("csrr %0, mstatus" : "=r" (mstatus));
202 
203 	return mstatus;
204 }
205 
206 static inline __noprof void write_mstatus(unsigned long mstatus)
207 {
208 	asm volatile("csrw mstatus, %0" : : "r" (mstatus));
209 }
210 
211 /* supervisor status register */
212 static inline __noprof unsigned long read_sstatus(void)
213 {
214 	unsigned long sstatus;
215 
216 	asm volatile("csrr %0, sstatus" : "=r" (sstatus));
217 
218 	return sstatus;
219 }
220 
221 static inline __noprof void write_sstatus(unsigned long sstatus)
222 {
223 	asm volatile("csrw sstatus, %0" : : "r" (sstatus));
224 }
225 
226 static inline __noprof void set_sstatus(unsigned long sstatus)
227 {
228 	unsigned long x;
229 
230 	asm volatile ("csrrs %0, sstatus, %1" : "=r"(x) : "rK"(sstatus));
231 }
232 
233 /* machine exception delegation */
234 static inline __noprof unsigned long read_medeleg(void)
235 {
236 	unsigned long medeleg;
237 
238 	asm volatile("csrr %0, medeleg" : "=r" (medeleg));
239 
240 	return medeleg;
241 }
242 
243 static inline __noprof void write_medeleg(unsigned long medeleg)
244 {
245 	asm volatile("csrw medeleg, %0" : : "r" (medeleg));
246 }
247 
248 /* machine interrupt delegation */
249 static inline __noprof unsigned long read_mideleg(void)
250 {
251 	unsigned long mideleg;
252 
253 	asm volatile("csrr %0, mideleg" : "=r" (mideleg));
254 
255 	return mideleg;
256 }
257 
258 static inline __noprof void write_mideleg(unsigned long mideleg)
259 {
260 	asm volatile("csrw mideleg, %0" : : "r" (mideleg));
261 }
262 
263 /* machine interrupt-enable register */
264 static inline __noprof unsigned long read_mie(void)
265 {
266 	unsigned long mie;
267 
268 	asm volatile("csrr %0, mie" : "=r" (mie));
269 
270 	return mie;
271 }
272 
273 static inline __noprof void write_mie(unsigned long mie)
274 {
275 	asm volatile("csrw mie, %0" : : "r" (mie));
276 }
277 
278 /* supervisor interrupt-enable register */
279 static inline __noprof unsigned long read_sie(void)
280 {
281 	unsigned long sie;
282 
283 	asm volatile("csrr %0, sie" : "=r" (sie));
284 
285 	return sie;
286 }
287 
288 static inline __noprof void write_sie(unsigned long sie)
289 {
290 	asm volatile("csrw sie, %0" : : "r" (sie));
291 }
292 
293 /* machine exception program counter */
294 static inline __noprof unsigned long read_mepc(void)
295 {
296 	unsigned long mepc;
297 
298 	asm volatile("csrr %0, mepc" : "=r" (mepc));
299 
300 	return mepc;
301 }
302 
303 static inline __noprof void write_mepc(unsigned long mepc)
304 {
305 	asm volatile("csrw mepc, %0" : : "r" (mepc));
306 }
307 
308 /* supervisor exception program counter */
309 static inline __noprof unsigned long read_sepc(void)
310 {
311 	unsigned long sepc;
312 
313 	asm volatile("csrr %0, sepc" : "=r" (sepc));
314 
315 	return sepc;
316 }
317 
318 static inline __noprof void write_sepc(unsigned long sepc)
319 {
320 	asm volatile("csrw sepc, %0" : : "r" (sepc));
321 }
322 
323 /* machine scratch register */
324 static inline __noprof unsigned long read_mscratch(void)
325 {
326 	unsigned long mscratch;
327 
328 	asm volatile("csrr %0, mscratch" : "=r" (mscratch));
329 
330 	return mscratch;
331 }
332 
333 static inline __noprof void write_mscratch(unsigned long mscratch)
334 {
335 	asm volatile("csrw mscratch, %0" : : "r" (mscratch));
336 }
337 
338 /* supervisor scratch register */
339 static inline __noprof unsigned long read_sscratch(void)
340 {
341 	unsigned long sscratch;
342 
343 	asm volatile("csrr %0, sscratch" : "=r" (sscratch));
344 
345 	return sscratch;
346 }
347 
348 static inline __noprof void write_sscratch(unsigned long sscratch)
349 {
350 	asm volatile("csrw sscratch, %0" : : "r" (sscratch));
351 }
352 
353 /* trap-return instructions */
354 static inline __noprof void mret(void)
355 {
356 	asm volatile("mret");
357 }
358 
359 static inline __noprof void sret(void)
360 {
361 	asm volatile("sret");
362 }
363 
364 static inline __noprof void uret(void)
365 {
366 	asm volatile("uret");
367 }
368 
369 #endif /*__ASSEMBLER__*/
370 
371 #endif /*RISCV_H*/
372