xref: /optee_os/core/arch/riscv/include/riscv.h (revision 5c2c0fb31efbeff60960336d7438e810b825d582)
1 /* SPDX-License-Identifier: BSD-2-Clause */
2 /*
3  * Copyright 2022-2023 NXP
4  */
5 
6 #ifndef RISCV_H
7 #define RISCV_H
8 
9 #include <compiler.h>
10 #include <encoding.h>
11 #include <stdint.h>
12 #include <sys/cdefs.h>
13 #include <util.h>
14 
15 #define RISCV_XLEN_BITS		(__riscv_xlen)
16 #define RISCV_XLEN_BYTES	(__riscv_xlen / 8)
17 
18 /* Bind registers to their ABI names */
19 #define REG_RA	1
20 #define REG_SP	2
21 #define REG_GP	3
22 #define REG_TP	4
23 #define REG_T0	5
24 #define REG_T2	7
25 #define REG_S0	8
26 #define REG_S1	9
27 #define REG_A0	10
28 #define REG_A1	11
29 #define REG_A2	12
30 #define REG_A3	13
31 #define REG_A5	15
32 #define REG_A7	17
33 #define REG_S2	18
34 #define REG_S11	27
35 #define REG_T3	28
36 #define REG_T6	31
37 
38 #if defined(CFG_RISCV_M_MODE)
39 #define CSR_MODE_OFFSET	PRV_M
40 #define XRET			mret
41 #elif defined(CFG_RISCV_S_MODE)
42 #define CSR_MODE_OFFSET	PRV_S
43 #define XRET			sret
44 #endif
45 
46 #define CSR_MODE_BITS		SHIFT_U64(CSR_MODE_OFFSET, 8)
47 
48 #define CSR_XSTATUS		(CSR_MODE_BITS | 0x000)
49 #define CSR_XIE			(CSR_MODE_BITS | 0x004)
50 #define CSR_XTVEC		(CSR_MODE_BITS | 0x005)
51 #define CSR_XSCRATCH		(CSR_MODE_BITS | 0x040)
52 #define CSR_XEPC		(CSR_MODE_BITS | 0x041)
53 #define CSR_XCAUSE		(CSR_MODE_BITS | 0x042)
54 #define CSR_XTVAL		(CSR_MODE_BITS | 0x043)
55 #define CSR_XIP			(CSR_MODE_BITS | 0x044)
56 
57 #define IRQ_XSOFT		(CSR_MODE_OFFSET + 0)
58 #define IRQ_XTIMER		(CSR_MODE_OFFSET + 4)
59 #define IRQ_XEXT		(CSR_MODE_OFFSET + 8)
60 
61 #define CSR_XIE_SIE		BIT64(IRQ_XSOFT)
62 #define CSR_XIE_TIE		BIT64(IRQ_XTIMER)
63 #define CSR_XIE_EIE		BIT64(IRQ_XEXT)
64 
65 #define CSR_XSTATUS_IE		BIT(CSR_MODE_OFFSET + 0)
66 #define CSR_XSTATUS_PIE		BIT(CSR_MODE_OFFSET + 4)
67 #define CSR_XSTATUS_SPP		BIT(8)
68 #define CSR_XSTATUS_SUM		BIT(18)
69 #define CSR_XSTATUS_MXR		BIT(19)
70 
71 #ifndef __ASSEMBLER__
72 
73 #define read_csr(csr)							\
74 	({								\
75 		unsigned long __tmp;					\
76 		asm volatile ("csrr %0, %1" : "=r"(__tmp) : "i"(csr));	\
77 		__tmp;							\
78 	})
79 
80 #define write_csr(csr, val)						\
81 	({								\
82 		asm volatile ("csrw %0, %1" : : "i"(csr), "rK"(val));	\
83 	})
84 
85 #define swap_csr(csr, val)						\
86 	({								\
87 		unsigned long __tmp;					\
88 		asm volatile ("csrrw %0, %1, %2"			\
89 			      : "=r"(__tmp) : "i"(csr), "rK"(val));	\
90 		__tmp;							\
91 	})
92 
93 #define set_csr(csr, bit)						\
94 	({								\
95 		unsigned long __tmp;					\
96 		asm volatile ("csrrs %0, %1, %2"			\
97 			      : "=r"(__tmp) : "i"(csr), "rK"(bit));	\
98 		__tmp;							\
99 	})
100 
101 #define clear_csr(csr, bit)						\
102 	({								\
103 		unsigned long __tmp;					\
104 		asm volatile ("csrrc %0, %1, %2"			\
105 			      : "=r"(__tmp) : "i"(csr), "rK"(bit));	\
106 		__tmp;							\
107 	})
108 
109 #define rdtime() read_csr(CSR_TIME)
110 #define rdcycle() read_csr(CSR_CYCLE)
111 #define rdinstret() read_csr(CSR_INSTRET)
112 
113 static inline __noprof void mb(void)
114 {
115 	asm volatile ("fence" : : : "memory");
116 }
117 
118 static inline __noprof unsigned long read_tp(void)
119 {
120 	unsigned long tp;
121 
122 	asm volatile("mv %0, tp" : "=&r"(tp));
123 	return tp;
124 }
125 
126 static inline __noprof unsigned long read_fp(void)
127 {
128 	unsigned long fp = 0;
129 
130 	asm volatile ("mv %0, s0" : "=r" (fp));
131 
132 	return fp;
133 }
134 
135 static inline __noprof unsigned long read_pc(void)
136 {
137 	unsigned long pc = 0;
138 
139 	asm volatile ("auipc %0, 0" : "=r" (pc));
140 
141 	return pc;
142 }
143 
144 static inline __noprof void wfi(void)
145 {
146 	asm volatile ("wfi");
147 }
148 
149 static inline __noprof void flush_tlb(void)
150 {
151 	asm volatile("sfence.vma zero, zero");
152 }
153 
154 static inline __noprof void flush_tlb_entry(unsigned long va)
155 {
156 	asm volatile ("sfence.vma %0" : : "r" (va) : "memory");
157 }
158 
159 /* supervisor address translation and protection */
160 static inline __noprof unsigned long read_satp(void)
161 {
162 	unsigned long satp;
163 
164 	asm volatile("csrr %0, satp" : "=r" (satp));
165 
166 	return satp;
167 }
168 
169 static inline __noprof void write_satp(unsigned long satp)
170 {
171 	asm volatile("csrw satp, %0" : : "r" (satp));
172 }
173 
174 /* machine trap-vector base-address register */
175 static inline __noprof unsigned long read_mtvec(void)
176 {
177 	unsigned long mtvec;
178 
179 	asm volatile("csrr %0, mtvec" : "=r" (mtvec));
180 
181 	return mtvec;
182 }
183 
184 static inline __noprof void write_mtvec(unsigned long mtvec)
185 {
186 	asm volatile("csrw mtvec, %0" : : "r" (mtvec));
187 }
188 
189 /* supervisor trap-vector base-address register */
190 static inline __noprof unsigned long read_stvec(void)
191 {
192 	unsigned long stvec;
193 
194 	asm volatile("csrr %0, stvec" : "=r" (stvec));
195 
196 	return stvec;
197 }
198 
199 static inline __noprof void write_stvec(unsigned long stvec)
200 {
201 	asm volatile("csrw stvec, %0" : : "r" (stvec));
202 }
203 
204 /* machine status register */
205 static inline __noprof unsigned long read_mstatus(void)
206 {
207 	unsigned long mstatus;
208 
209 	asm volatile("csrr %0, mstatus" : "=r" (mstatus));
210 
211 	return mstatus;
212 }
213 
214 static inline __noprof void write_mstatus(unsigned long mstatus)
215 {
216 	asm volatile("csrw mstatus, %0" : : "r" (mstatus));
217 }
218 
219 /* supervisor status register */
220 static inline __noprof unsigned long read_sstatus(void)
221 {
222 	unsigned long sstatus;
223 
224 	asm volatile("csrr %0, sstatus" : "=r" (sstatus));
225 
226 	return sstatus;
227 }
228 
229 static inline __noprof void write_sstatus(unsigned long sstatus)
230 {
231 	asm volatile("csrw sstatus, %0" : : "r" (sstatus));
232 }
233 
234 static inline __noprof void set_sstatus(unsigned long sstatus)
235 {
236 	unsigned long x;
237 
238 	asm volatile ("csrrs %0, sstatus, %1" : "=r"(x) : "rK"(sstatus));
239 }
240 
241 /* machine exception delegation */
242 static inline __noprof unsigned long read_medeleg(void)
243 {
244 	unsigned long medeleg;
245 
246 	asm volatile("csrr %0, medeleg" : "=r" (medeleg));
247 
248 	return medeleg;
249 }
250 
251 static inline __noprof void write_medeleg(unsigned long medeleg)
252 {
253 	asm volatile("csrw medeleg, %0" : : "r" (medeleg));
254 }
255 
256 /* machine interrupt delegation */
257 static inline __noprof unsigned long read_mideleg(void)
258 {
259 	unsigned long mideleg;
260 
261 	asm volatile("csrr %0, mideleg" : "=r" (mideleg));
262 
263 	return mideleg;
264 }
265 
266 static inline __noprof void write_mideleg(unsigned long mideleg)
267 {
268 	asm volatile("csrw mideleg, %0" : : "r" (mideleg));
269 }
270 
271 /* machine interrupt-enable register */
272 static inline __noprof unsigned long read_mie(void)
273 {
274 	unsigned long mie;
275 
276 	asm volatile("csrr %0, mie" : "=r" (mie));
277 
278 	return mie;
279 }
280 
281 static inline __noprof void write_mie(unsigned long mie)
282 {
283 	asm volatile("csrw mie, %0" : : "r" (mie));
284 }
285 
286 /* supervisor interrupt-enable register */
287 static inline __noprof unsigned long read_sie(void)
288 {
289 	unsigned long sie;
290 
291 	asm volatile("csrr %0, sie" : "=r" (sie));
292 
293 	return sie;
294 }
295 
296 static inline __noprof void write_sie(unsigned long sie)
297 {
298 	asm volatile("csrw sie, %0" : : "r" (sie));
299 }
300 
301 /* machine exception program counter */
302 static inline __noprof unsigned long read_mepc(void)
303 {
304 	unsigned long mepc;
305 
306 	asm volatile("csrr %0, mepc" : "=r" (mepc));
307 
308 	return mepc;
309 }
310 
311 static inline __noprof void write_mepc(unsigned long mepc)
312 {
313 	asm volatile("csrw mepc, %0" : : "r" (mepc));
314 }
315 
316 /* supervisor exception program counter */
317 static inline __noprof unsigned long read_sepc(void)
318 {
319 	unsigned long sepc;
320 
321 	asm volatile("csrr %0, sepc" : "=r" (sepc));
322 
323 	return sepc;
324 }
325 
326 static inline __noprof void write_sepc(unsigned long sepc)
327 {
328 	asm volatile("csrw sepc, %0" : : "r" (sepc));
329 }
330 
331 /* machine scratch register */
332 static inline __noprof unsigned long read_mscratch(void)
333 {
334 	unsigned long mscratch;
335 
336 	asm volatile("csrr %0, mscratch" : "=r" (mscratch));
337 
338 	return mscratch;
339 }
340 
341 static inline __noprof void write_mscratch(unsigned long mscratch)
342 {
343 	asm volatile("csrw mscratch, %0" : : "r" (mscratch));
344 }
345 
346 /* supervisor scratch register */
347 static inline __noprof unsigned long read_sscratch(void)
348 {
349 	unsigned long sscratch;
350 
351 	asm volatile("csrr %0, sscratch" : "=r" (sscratch));
352 
353 	return sscratch;
354 }
355 
356 static inline __noprof void write_sscratch(unsigned long sscratch)
357 {
358 	asm volatile("csrw sscratch, %0" : : "r" (sscratch));
359 }
360 
361 /* trap-return instructions */
362 static inline __noprof void mret(void)
363 {
364 	asm volatile("mret");
365 }
366 
367 static inline __noprof void sret(void)
368 {
369 	asm volatile("sret");
370 }
371 
372 static inline __noprof void uret(void)
373 {
374 	asm volatile("uret");
375 }
376 
377 __noprof uint64_t read_time(void);
378 
379 static inline __noprof uint64_t barrier_read_counter_timer(void)
380 {
381 	mb();	/* Get timer value after pending operations have completed */
382 	return read_time();
383 }
384 
385 static inline __noprof uint32_t read_cntfrq(void)
386 {
387 	return CFG_RISCV_MTIME_RATE;
388 }
389 
390 #endif /*__ASSEMBLER__*/
391 
392 #endif /*RISCV_H*/
393