xref: /optee_os/core/arch/riscv/include/riscv.h (revision af3fb62410645ac9636d27c3d1db72c0c9fca913)
1 /* SPDX-License-Identifier: BSD-2-Clause */
2 /*
3  * Copyright 2022-2023 NXP
4  */
5 
6 #ifndef __RISCV_H
7 #define __RISCV_H
8 
9 #include <compiler.h>
10 #include <encoding.h>
11 #include <stdbool.h>
12 #include <stdint.h>
13 #include <sys/cdefs.h>
14 #include <util.h>
15 
16 #define RISCV_XLEN_BITS		(__riscv_xlen)
17 #define RISCV_XLEN_BYTES	(__riscv_xlen / 8)
18 
19 /* Bind registers to their ABI names */
20 #define REG_RA	1
21 #define REG_SP	2
22 #define REG_GP	3
23 #define REG_TP	4
24 #define REG_T0	5
25 #define REG_T2	7
26 #define REG_S0	8
27 #define REG_S1	9
28 #define REG_A0	10
29 #define REG_A1	11
30 #define REG_A2	12
31 #define REG_A3	13
32 #define REG_A5	15
33 #define REG_A7	17
34 #define REG_S2	18
35 #define REG_S11	27
36 #define REG_T3	28
37 #define REG_T6	31
38 
39 #if defined(CFG_RISCV_M_MODE)
40 #define CSR_MODE_OFFSET	PRV_M
41 #define XRET			mret
42 #elif defined(CFG_RISCV_S_MODE)
43 #define CSR_MODE_OFFSET	PRV_S
44 #define XRET			sret
45 #endif
46 
47 #define CSR_MODE_BITS		SHIFT_U64(CSR_MODE_OFFSET, 8)
48 
49 #define CSR_XSTATUS		(CSR_MODE_BITS | 0x000)
50 #define CSR_XIE			(CSR_MODE_BITS | 0x004)
51 #define CSR_XTVEC		(CSR_MODE_BITS | 0x005)
52 #define CSR_XSCRATCH		(CSR_MODE_BITS | 0x040)
53 #define CSR_XEPC		(CSR_MODE_BITS | 0x041)
54 #define CSR_XCAUSE		(CSR_MODE_BITS | 0x042)
55 #define CSR_XTVAL		(CSR_MODE_BITS | 0x043)
56 #define CSR_XIP			(CSR_MODE_BITS | 0x044)
57 
58 #define IRQ_XSOFT		(CSR_MODE_OFFSET + 0)
59 #define IRQ_XTIMER		(CSR_MODE_OFFSET + 4)
60 #define IRQ_XEXT		(CSR_MODE_OFFSET + 8)
61 
62 #define CSR_XIE_SIE		BIT64(IRQ_XSOFT)
63 #define CSR_XIE_TIE		BIT64(IRQ_XTIMER)
64 #define CSR_XIE_EIE		BIT64(IRQ_XEXT)
65 
66 #define CSR_XSTATUS_IE		BIT(CSR_MODE_OFFSET + 0)
67 #define CSR_XSTATUS_PIE		BIT(CSR_MODE_OFFSET + 4)
68 #define CSR_XSTATUS_SPP		BIT(8)
69 #define CSR_XSTATUS_SUM		BIT(18)
70 #define CSR_XSTATUS_MXR		BIT(19)
71 
72 #ifndef __ASSEMBLER__
73 
74 #define read_csr(csr)							\
75 	({								\
76 		unsigned long __tmp;					\
77 		asm volatile ("csrr %0, %1" : "=r"(__tmp) : "i"(csr));	\
78 		__tmp;							\
79 	})
80 
81 #define write_csr(csr, val)						\
82 	({								\
83 		asm volatile ("csrw %0, %1" : : "i"(csr), "rK"(val));	\
84 	})
85 
86 #define swap_csr(csr, val)						\
87 	({								\
88 		unsigned long __tmp;					\
89 		asm volatile ("csrrw %0, %1, %2"			\
90 			      : "=r"(__tmp) : "i"(csr), "rK"(val));	\
91 		__tmp;							\
92 	})
93 
94 #define set_csr(csr, bit)						\
95 	({								\
96 		unsigned long __tmp;					\
97 		asm volatile ("csrrs %0, %1, %2"			\
98 			      : "=r"(__tmp) : "i"(csr), "rK"(bit));	\
99 		__tmp;							\
100 	})
101 
102 #define clear_csr(csr, bit)						\
103 	({								\
104 		unsigned long __tmp;					\
105 		asm volatile ("csrrc %0, %1, %2"			\
106 			      : "=r"(__tmp) : "i"(csr), "rK"(bit));	\
107 		__tmp;							\
108 	})
109 
110 #define rdtime() read_csr(CSR_TIME)
111 #define rdcycle() read_csr(CSR_CYCLE)
112 #define rdinstret() read_csr(CSR_INSTRET)
113 
114 static inline __noprof void mb(void)
115 {
116 	asm volatile ("fence" : : : "memory");
117 }
118 
119 static inline __noprof unsigned long read_gp(void)
120 {
121 	unsigned long gp = 0;
122 
123 	asm volatile("mv %0, gp" : "=&r"(gp));
124 	return gp;
125 }
126 
127 static inline __noprof unsigned long read_tp(void)
128 {
129 	unsigned long tp = 0;
130 
131 	asm volatile("mv %0, tp" : "=&r"(tp));
132 	return tp;
133 }
134 
135 static inline __noprof unsigned long read_fp(void)
136 {
137 	unsigned long fp = 0;
138 
139 	asm volatile ("mv %0, s0" : "=r" (fp));
140 
141 	return fp;
142 }
143 
144 static inline __noprof unsigned long read_pc(void)
145 {
146 	unsigned long pc = 0;
147 
148 	asm volatile ("auipc %0, 0" : "=r" (pc));
149 
150 	return pc;
151 }
152 
153 static inline __noprof void wfi(void)
154 {
155 	asm volatile ("wfi");
156 }
157 
158 static inline __noprof void flush_tlb(void)
159 {
160 	asm volatile("sfence.vma zero, zero");
161 }
162 
163 static inline __noprof void flush_tlb_entry(unsigned long va)
164 {
165 	asm volatile ("sfence.vma %0" : : "r" (va) : "memory");
166 }
167 
168 /* supervisor address translation and protection */
169 static inline __noprof unsigned long read_satp(void)
170 {
171 	unsigned long satp;
172 
173 	asm volatile("csrr %0, satp" : "=r" (satp));
174 
175 	return satp;
176 }
177 
178 static inline __noprof void write_satp(unsigned long satp)
179 {
180 	asm volatile("csrw satp, %0" : : "r" (satp));
181 }
182 
183 /* machine trap-vector base-address register */
184 static inline __noprof unsigned long read_mtvec(void)
185 {
186 	unsigned long mtvec;
187 
188 	asm volatile("csrr %0, mtvec" : "=r" (mtvec));
189 
190 	return mtvec;
191 }
192 
193 static inline __noprof void write_mtvec(unsigned long mtvec)
194 {
195 	asm volatile("csrw mtvec, %0" : : "r" (mtvec));
196 }
197 
198 /* supervisor trap-vector base-address register */
199 static inline __noprof unsigned long read_stvec(void)
200 {
201 	unsigned long stvec;
202 
203 	asm volatile("csrr %0, stvec" : "=r" (stvec));
204 
205 	return stvec;
206 }
207 
208 static inline __noprof void write_stvec(unsigned long stvec)
209 {
210 	asm volatile("csrw stvec, %0" : : "r" (stvec));
211 }
212 
213 /* machine status register */
214 static inline __noprof unsigned long read_mstatus(void)
215 {
216 	unsigned long mstatus;
217 
218 	asm volatile("csrr %0, mstatus" : "=r" (mstatus));
219 
220 	return mstatus;
221 }
222 
223 static inline __noprof void write_mstatus(unsigned long mstatus)
224 {
225 	asm volatile("csrw mstatus, %0" : : "r" (mstatus));
226 }
227 
228 /* supervisor status register */
229 static inline __noprof unsigned long read_sstatus(void)
230 {
231 	unsigned long sstatus;
232 
233 	asm volatile("csrr %0, sstatus" : "=r" (sstatus));
234 
235 	return sstatus;
236 }
237 
238 static inline __noprof void write_sstatus(unsigned long sstatus)
239 {
240 	asm volatile("csrw sstatus, %0" : : "r" (sstatus));
241 }
242 
243 static inline __noprof void set_sstatus(unsigned long sstatus)
244 {
245 	unsigned long x;
246 
247 	asm volatile ("csrrs %0, sstatus, %1" : "=r"(x) : "rK"(sstatus));
248 }
249 
250 /* machine exception delegation */
251 static inline __noprof unsigned long read_medeleg(void)
252 {
253 	unsigned long medeleg;
254 
255 	asm volatile("csrr %0, medeleg" : "=r" (medeleg));
256 
257 	return medeleg;
258 }
259 
260 static inline __noprof void write_medeleg(unsigned long medeleg)
261 {
262 	asm volatile("csrw medeleg, %0" : : "r" (medeleg));
263 }
264 
265 /* machine interrupt delegation */
266 static inline __noprof unsigned long read_mideleg(void)
267 {
268 	unsigned long mideleg;
269 
270 	asm volatile("csrr %0, mideleg" : "=r" (mideleg));
271 
272 	return mideleg;
273 }
274 
275 static inline __noprof void write_mideleg(unsigned long mideleg)
276 {
277 	asm volatile("csrw mideleg, %0" : : "r" (mideleg));
278 }
279 
280 /* machine interrupt-enable register */
281 static inline __noprof unsigned long read_mie(void)
282 {
283 	unsigned long mie;
284 
285 	asm volatile("csrr %0, mie" : "=r" (mie));
286 
287 	return mie;
288 }
289 
290 static inline __noprof void write_mie(unsigned long mie)
291 {
292 	asm volatile("csrw mie, %0" : : "r" (mie));
293 }
294 
295 /* supervisor interrupt-enable register */
296 static inline __noprof unsigned long read_sie(void)
297 {
298 	unsigned long sie;
299 
300 	asm volatile("csrr %0, sie" : "=r" (sie));
301 
302 	return sie;
303 }
304 
305 static inline __noprof void write_sie(unsigned long sie)
306 {
307 	asm volatile("csrw sie, %0" : : "r" (sie));
308 }
309 
310 /* machine exception program counter */
311 static inline __noprof unsigned long read_mepc(void)
312 {
313 	unsigned long mepc;
314 
315 	asm volatile("csrr %0, mepc" : "=r" (mepc));
316 
317 	return mepc;
318 }
319 
320 static inline __noprof void write_mepc(unsigned long mepc)
321 {
322 	asm volatile("csrw mepc, %0" : : "r" (mepc));
323 }
324 
325 /* supervisor exception program counter */
326 static inline __noprof unsigned long read_sepc(void)
327 {
328 	unsigned long sepc;
329 
330 	asm volatile("csrr %0, sepc" : "=r" (sepc));
331 
332 	return sepc;
333 }
334 
335 static inline __noprof void write_sepc(unsigned long sepc)
336 {
337 	asm volatile("csrw sepc, %0" : : "r" (sepc));
338 }
339 
340 /* machine scratch register */
341 static inline __noprof unsigned long read_mscratch(void)
342 {
343 	unsigned long mscratch;
344 
345 	asm volatile("csrr %0, mscratch" : "=r" (mscratch));
346 
347 	return mscratch;
348 }
349 
350 static inline __noprof void write_mscratch(unsigned long mscratch)
351 {
352 	asm volatile("csrw mscratch, %0" : : "r" (mscratch));
353 }
354 
355 /* supervisor scratch register */
356 static inline __noprof unsigned long read_sscratch(void)
357 {
358 	unsigned long sscratch;
359 
360 	asm volatile("csrr %0, sscratch" : "=r" (sscratch));
361 
362 	return sscratch;
363 }
364 
365 static inline __noprof void write_sscratch(unsigned long sscratch)
366 {
367 	asm volatile("csrw sscratch, %0" : : "r" (sscratch));
368 }
369 
370 /* trap-return instructions */
371 static inline __noprof void mret(void)
372 {
373 	asm volatile("mret");
374 }
375 
376 static inline __noprof void sret(void)
377 {
378 	asm volatile("sret");
379 }
380 
381 static inline __noprof void uret(void)
382 {
383 	asm volatile("uret");
384 }
385 
386 __noprof uint64_t read_time(void);
387 
388 static inline __noprof uint64_t barrier_read_counter_timer(void)
389 {
390 	mb();	/* Get timer value after pending operations have completed */
391 	return read_time();
392 }
393 
394 static inline __noprof uint32_t read_cntfrq(void)
395 {
396 	return CFG_RISCV_MTIME_RATE;
397 }
398 
399 __noprof bool riscv_detect_csr_seed(void);
400 
401 #endif /*__ASSEMBLER__*/
402 
403 #endif /*__RISCV_H*/
404