xref: /optee_os/core/arch/riscv/include/riscv.h (revision b357d34fe91f4e7f6e0eacea17a7fbe5f6c01e7e)
1 /* SPDX-License-Identifier: BSD-2-Clause */
2 /*
3  * Copyright 2022-2023 NXP
4  */
5 
6 #ifndef RISCV_H
7 #define RISCV_H
8 
9 #include <compiler.h>
10 #include <encoding.h>
11 #include <stdint.h>
12 #include <sys/cdefs.h>
13 #include <util.h>
14 
15 #define RISCV_XLEN_BITS		(__riscv_xlen)
16 #define RISCV_XLEN_BYTES	(__riscv_xlen / 8)
17 
18 #define REGOFF(x)			((x) * RISCV_XLEN_BYTES)
19 
20 #if __riscv_xlen == 32
21 #define STR       sw
22 #define LDR       lw
23 #else
24 #define STR       sd
25 #define LDR       ld
26 #endif
27 
28 /* Bind registers to their ABI names */
29 #define REG_RA	1
30 #define REG_SP	2
31 #define REG_GP	3
32 #define REG_TP	4
33 #define REG_T0	5
34 #define REG_T2	7
35 #define REG_S0	8
36 #define REG_S1	9
37 #define REG_A0	10
38 #define REG_A1	11
39 #define REG_A2	12
40 #define REG_A3	13
41 #define REG_A5	15
42 #define REG_A7	17
43 #define REG_S2	18
44 #define REG_S11	27
45 #define REG_T3	28
46 #define REG_T6	31
47 
48 #if defined(CFG_RISCV_M_MODE)
49 #define CSR_MODE_OFFSET	PRV_M
50 #define XRET			mret
51 #elif defined(CFG_RISCV_S_MODE)
52 #define CSR_MODE_OFFSET	PRV_S
53 #define XRET			sret
54 #endif
55 
56 #define CSR_MODE_BITS		SHIFT_U64(CSR_MODE_OFFSET, 8)
57 
58 #define CSR_XSTATUS		(CSR_MODE_BITS | 0x000)
59 #define CSR_XIE			(CSR_MODE_BITS | 0x004)
60 #define CSR_XTVEC		(CSR_MODE_BITS | 0x005)
61 #define CSR_XSCRATCH		(CSR_MODE_BITS | 0x040)
62 #define CSR_XEPC		(CSR_MODE_BITS | 0x041)
63 #define CSR_XCAUSE		(CSR_MODE_BITS | 0x042)
64 #define CSR_XTVAL		(CSR_MODE_BITS | 0x043)
65 #define CSR_XIP			(CSR_MODE_BITS | 0x044)
66 
67 #define IRQ_XSOFT		(CSR_MODE_OFFSET + 0)
68 #define IRQ_XTIMER		(CSR_MODE_OFFSET + 4)
69 #define IRQ_XEXT		(CSR_MODE_OFFSET + 8)
70 
71 #define CSR_XIE_SIE		BIT64(IRQ_XSOFT)
72 #define CSR_XIE_TIE		BIT64(IRQ_XTIMER)
73 #define CSR_XIE_EIE		BIT64(IRQ_XEXT)
74 
75 #define CSR_XSTATUS_IE		BIT(CSR_MODE_OFFSET + 0)
76 #define CSR_XSTATUS_PIE		BIT(CSR_MODE_OFFSET + 4)
77 #define CSR_XSTATUS_SPP		BIT(8)
78 #define CSR_XSTATUS_SUM		BIT(18)
79 #define CSR_XSTATUS_MXR		BIT(19)
80 
81 #ifndef __ASSEMBLER__
82 
83 #define read_csr(csr)							\
84 	({								\
85 		unsigned long __tmp;					\
86 		asm volatile ("csrr %0, %1" : "=r"(__tmp) : "i"(csr));	\
87 		__tmp;							\
88 	})
89 
90 #define write_csr(csr, val)						\
91 	({								\
92 		asm volatile ("csrw %0, %1" : : "i"(csr), "rK"(val));	\
93 	})
94 
95 #define swap_csr(csr, val)						\
96 	({								\
97 		unsigned long __tmp;					\
98 		asm volatile ("csrrw %0, %1, %2"			\
99 			      : "=r"(__tmp) : "i"(csr), "rK"(val));	\
100 		__tmp;							\
101 	})
102 
103 #define set_csr(csr, bit)						\
104 	({								\
105 		unsigned long __tmp;					\
106 		asm volatile ("csrrs %0, %1, %2"			\
107 			      : "=r"(__tmp) : "i"(csr), "rK"(bit));	\
108 		__tmp;							\
109 	})
110 
111 #define clear_csr(csr, bit)						\
112 	({								\
113 		unsigned long __tmp;					\
114 		asm volatile ("csrrc %0, %1, %2"			\
115 			      : "=r"(__tmp) : "i"(csr), "rK"(bit));	\
116 		__tmp;							\
117 	})
118 
119 #define rdtime() read_csr(CSR_TIME)
120 #define rdcycle() read_csr(CSR_CYCLE)
121 #define rdinstret() read_csr(CSR_INSTRET)
122 
123 static inline __noprof void mb(void)
124 {
125 	asm volatile ("fence" : : : "memory");
126 }
127 
128 static inline __noprof unsigned long read_tp(void)
129 {
130 	unsigned long tp;
131 
132 	asm volatile("mv %0, tp" : "=&r"(tp));
133 	return tp;
134 }
135 
136 static inline __noprof unsigned long read_fp(void)
137 {
138 	unsigned long fp = 0;
139 
140 	asm volatile ("mv %0, fp" : "=r" (fp));
141 
142 	return fp;
143 }
144 
145 static inline __noprof unsigned long read_pc(void)
146 {
147 	unsigned long pc = 0;
148 
149 	asm volatile ("auipc %0, 0" : "=r" (pc));
150 
151 	return pc;
152 }
153 
154 static inline __noprof void wfi(void)
155 {
156 	asm volatile ("wfi");
157 }
158 
159 static inline __noprof void flush_tlb(void)
160 {
161 	asm volatile("sfence.vma zero, zero");
162 }
163 
164 static inline __noprof void flush_tlb_entry(unsigned long va)
165 {
166 	asm volatile ("sfence.vma %0" : : "r" (va) : "memory");
167 }
168 
169 /* supervisor address translation and protection */
170 static inline __noprof unsigned long read_satp(void)
171 {
172 	unsigned long satp;
173 
174 	asm volatile("csrr %0, satp" : "=r" (satp));
175 
176 	return satp;
177 }
178 
179 static inline __noprof void write_satp(unsigned long satp)
180 {
181 	asm volatile("csrw satp, %0" : : "r" (satp));
182 }
183 
184 /* machine trap-vector base-address register */
185 static inline __noprof unsigned long read_mtvec(void)
186 {
187 	unsigned long mtvec;
188 
189 	asm volatile("csrr %0, mtvec" : "=r" (mtvec));
190 
191 	return mtvec;
192 }
193 
194 static inline __noprof void write_mtvec(unsigned long mtvec)
195 {
196 	asm volatile("csrw mtvec, %0" : : "r" (mtvec));
197 }
198 
199 /* supervisor trap-vector base-address register */
200 static inline __noprof unsigned long read_stvec(void)
201 {
202 	unsigned long stvec;
203 
204 	asm volatile("csrr %0, stvec" : "=r" (stvec));
205 
206 	return stvec;
207 }
208 
209 static inline __noprof void write_stvec(unsigned long stvec)
210 {
211 	asm volatile("csrw stvec, %0" : : "r" (stvec));
212 }
213 
214 /* machine status register */
215 static inline __noprof unsigned long read_mstatus(void)
216 {
217 	unsigned long mstatus;
218 
219 	asm volatile("csrr %0, mstatus" : "=r" (mstatus));
220 
221 	return mstatus;
222 }
223 
224 static inline __noprof void write_mstatus(unsigned long mstatus)
225 {
226 	asm volatile("csrw mstatus, %0" : : "r" (mstatus));
227 }
228 
229 /* supervisor status register */
230 static inline __noprof unsigned long read_sstatus(void)
231 {
232 	unsigned long sstatus;
233 
234 	asm volatile("csrr %0, sstatus" : "=r" (sstatus));
235 
236 	return sstatus;
237 }
238 
239 static inline __noprof void write_sstatus(unsigned long sstatus)
240 {
241 	asm volatile("csrw sstatus, %0" : : "r" (sstatus));
242 }
243 
244 static inline __noprof void set_sstatus(unsigned long sstatus)
245 {
246 	unsigned long x;
247 
248 	asm volatile ("csrrs %0, sstatus, %1" : "=r"(x) : "rK"(sstatus));
249 }
250 
251 /* machine exception delegation */
252 static inline __noprof unsigned long read_medeleg(void)
253 {
254 	unsigned long medeleg;
255 
256 	asm volatile("csrr %0, medeleg" : "=r" (medeleg));
257 
258 	return medeleg;
259 }
260 
261 static inline __noprof void write_medeleg(unsigned long medeleg)
262 {
263 	asm volatile("csrw medeleg, %0" : : "r" (medeleg));
264 }
265 
266 /* machine interrupt delegation */
267 static inline __noprof unsigned long read_mideleg(void)
268 {
269 	unsigned long mideleg;
270 
271 	asm volatile("csrr %0, mideleg" : "=r" (mideleg));
272 
273 	return mideleg;
274 }
275 
276 static inline __noprof void write_mideleg(unsigned long mideleg)
277 {
278 	asm volatile("csrw mideleg, %0" : : "r" (mideleg));
279 }
280 
281 /* machine interrupt-enable register */
282 static inline __noprof unsigned long read_mie(void)
283 {
284 	unsigned long mie;
285 
286 	asm volatile("csrr %0, mie" : "=r" (mie));
287 
288 	return mie;
289 }
290 
291 static inline __noprof void write_mie(unsigned long mie)
292 {
293 	asm volatile("csrw mie, %0" : : "r" (mie));
294 }
295 
296 /* supervisor interrupt-enable register */
297 static inline __noprof unsigned long read_sie(void)
298 {
299 	unsigned long sie;
300 
301 	asm volatile("csrr %0, sie" : "=r" (sie));
302 
303 	return sie;
304 }
305 
306 static inline __noprof void write_sie(unsigned long sie)
307 {
308 	asm volatile("csrw sie, %0" : : "r" (sie));
309 }
310 
311 /* machine exception program counter */
312 static inline __noprof unsigned long read_mepc(void)
313 {
314 	unsigned long mepc;
315 
316 	asm volatile("csrr %0, mepc" : "=r" (mepc));
317 
318 	return mepc;
319 }
320 
321 static inline __noprof void write_mepc(unsigned long mepc)
322 {
323 	asm volatile("csrw mepc, %0" : : "r" (mepc));
324 }
325 
326 /* supervisor exception program counter */
327 static inline __noprof unsigned long read_sepc(void)
328 {
329 	unsigned long sepc;
330 
331 	asm volatile("csrr %0, sepc" : "=r" (sepc));
332 
333 	return sepc;
334 }
335 
336 static inline __noprof void write_sepc(unsigned long sepc)
337 {
338 	asm volatile("csrw sepc, %0" : : "r" (sepc));
339 }
340 
341 /* machine scratch register */
342 static inline __noprof unsigned long read_mscratch(void)
343 {
344 	unsigned long mscratch;
345 
346 	asm volatile("csrr %0, mscratch" : "=r" (mscratch));
347 
348 	return mscratch;
349 }
350 
351 static inline __noprof void write_mscratch(unsigned long mscratch)
352 {
353 	asm volatile("csrw mscratch, %0" : : "r" (mscratch));
354 }
355 
356 /* supervisor scratch register */
357 static inline __noprof unsigned long read_sscratch(void)
358 {
359 	unsigned long sscratch;
360 
361 	asm volatile("csrr %0, sscratch" : "=r" (sscratch));
362 
363 	return sscratch;
364 }
365 
366 static inline __noprof void write_sscratch(unsigned long sscratch)
367 {
368 	asm volatile("csrw sscratch, %0" : : "r" (sscratch));
369 }
370 
371 /* trap-return instructions */
372 static inline __noprof void mret(void)
373 {
374 	asm volatile("mret");
375 }
376 
377 static inline __noprof void sret(void)
378 {
379 	asm volatile("sret");
380 }
381 
382 static inline __noprof void uret(void)
383 {
384 	asm volatile("uret");
385 }
386 
387 #endif /*__ASSEMBLER__*/
388 
389 #endif /*RISCV_H*/
390