xref: /optee_os/core/arch/riscv/include/riscv.h (revision 8cd20f2366f528dfe8f30854a33b473713b4a29e)
1 /* SPDX-License-Identifier: BSD-2-Clause */
2 /*
3  * Copyright 2022-2023 NXP
4  */
5 
6 #ifndef __RISCV_H
7 #define __RISCV_H
8 
9 #include <compiler.h>
10 #include <encoding.h>
11 #include <stdbool.h>
12 #include <stdint.h>
13 #include <sys/cdefs.h>
14 #include <util.h>
15 
16 #define RISCV_XLEN_BITS		(__riscv_xlen)
17 #define RISCV_XLEN_BYTES	(__riscv_xlen / 8)
18 
19 /* Bind registers to their ABI names */
20 #define REG_RA	1
21 #define REG_SP	2
22 #define REG_GP	3
23 #define REG_TP	4
24 #define REG_T0	5
25 #define REG_T2	7
26 #define REG_S0	8
27 #define REG_S1	9
28 #define REG_A0	10
29 #define REG_A1	11
30 #define REG_A2	12
31 #define REG_A3	13
32 #define REG_A5	15
33 #define REG_A7	17
34 #define REG_S2	18
35 #define REG_S11	27
36 #define REG_T3	28
37 #define REG_T6	31
38 
39 #if defined(CFG_RISCV_M_MODE)
40 #define CSR_MODE_OFFSET	PRV_M
41 #define XRET			mret
42 #elif defined(CFG_RISCV_S_MODE)
43 #define CSR_MODE_OFFSET	PRV_S
44 #define XRET			sret
45 #endif
46 
47 #define CSR_MODE_BITS		SHIFT_U64(CSR_MODE_OFFSET, 8)
48 
49 #define CSR_XSTATUS		(CSR_MODE_BITS | 0x000)
50 #define CSR_XIE			(CSR_MODE_BITS | 0x004)
51 #define CSR_XTVEC		(CSR_MODE_BITS | 0x005)
52 #define CSR_XSCRATCH		(CSR_MODE_BITS | 0x040)
53 #define CSR_XEPC		(CSR_MODE_BITS | 0x041)
54 #define CSR_XCAUSE		(CSR_MODE_BITS | 0x042)
55 #define CSR_XTVAL		(CSR_MODE_BITS | 0x043)
56 #define CSR_XIP			(CSR_MODE_BITS | 0x044)
57 
58 #define IRQ_XSOFT		(CSR_MODE_OFFSET + 0)
59 #define IRQ_XTIMER		(CSR_MODE_OFFSET + 4)
60 #define IRQ_XEXT		(CSR_MODE_OFFSET + 8)
61 
62 #define CSR_XIE_SIE		BIT64(IRQ_XSOFT)
63 #define CSR_XIE_TIE		BIT64(IRQ_XTIMER)
64 #define CSR_XIE_EIE		BIT64(IRQ_XEXT)
65 
66 #define CSR_XSTATUS_IE		BIT(CSR_MODE_OFFSET + 0)
67 #define CSR_XSTATUS_PIE		BIT(CSR_MODE_OFFSET + 4)
68 #define CSR_XSTATUS_SPP		BIT(8)
69 #define CSR_XSTATUS_SUM		BIT(18)
70 #define CSR_XSTATUS_MXR		BIT(19)
71 
72 #ifndef __ASSEMBLER__
73 
74 #define read_csr(csr)							\
75 	({								\
76 		unsigned long __tmp;					\
77 		asm volatile ("csrr %0, %1" : "=r"(__tmp) : "i"(csr));	\
78 		__tmp;							\
79 	})
80 
81 #define write_csr(csr, val)						\
82 	({								\
83 		asm volatile ("csrw %0, %1" : : "i"(csr), "rK"(val));	\
84 	})
85 
86 #define swap_csr(csr, val)						\
87 	({								\
88 		unsigned long __tmp;					\
89 		asm volatile ("csrrw %0, %1, %2"			\
90 			      : "=r"(__tmp) : "i"(csr), "rK"(val));	\
91 		__tmp;							\
92 	})
93 
94 #define set_csr(csr, bit)						\
95 	({								\
96 		unsigned long __tmp;					\
97 		asm volatile ("csrrs %0, %1, %2"			\
98 			      : "=r"(__tmp) : "i"(csr), "rK"(bit));	\
99 		__tmp;							\
100 	})
101 
102 #define clear_csr(csr, bit)						\
103 	({								\
104 		unsigned long __tmp;					\
105 		asm volatile ("csrrc %0, %1, %2"			\
106 			      : "=r"(__tmp) : "i"(csr), "rK"(bit));	\
107 		__tmp;							\
108 	})
109 
110 #define rdtime() read_csr(CSR_TIME)
111 #define rdcycle() read_csr(CSR_CYCLE)
112 #define rdinstret() read_csr(CSR_INSTRET)
113 
114 static inline __noprof void mb(void)
115 {
116 	asm volatile ("fence" : : : "memory");
117 }
118 
119 static inline __noprof unsigned long read_gp(void)
120 {
121 	unsigned long gp = 0;
122 
123 	asm volatile("mv %0, gp" : "=&r"(gp));
124 	return gp;
125 }
126 
127 static inline __noprof unsigned long read_tp(void)
128 {
129 	unsigned long tp = 0;
130 
131 	asm volatile("mv %0, tp" : "=&r"(tp));
132 	return tp;
133 }
134 
135 static inline __noprof unsigned long read_fp(void)
136 {
137 	unsigned long fp = 0;
138 
139 	asm volatile ("mv %0, s0" : "=r" (fp));
140 
141 	return fp;
142 }
143 
144 static inline __noprof unsigned long read_pc(void)
145 {
146 	unsigned long pc = 0;
147 
148 	asm volatile ("auipc %0, 0" : "=r" (pc));
149 
150 	return pc;
151 }
152 
153 static inline __noprof void wfi(void)
154 {
155 	asm volatile ("wfi");
156 }
157 
158 static inline __noprof void riscv_cpu_pause(void)
159 {
160 	unsigned long dummy = 0;
161 
162 	/*
163 	 * Use a divide instruction to force wait
164 	 * for multiple CPU cycles.
165 	 * Note: RISC-V does not raise an exception
166 	 * on divide by zero.
167 	 */
168 	asm volatile ("div %0, %0, zero" : "=r" (dummy));
169 
170 	/*
171 	 * Use the encoding of the 'pause' instruction,
172 	 * thus no need to verify toolchain support for
173 	 * zihintpause.
174 	 * On hardware platforms that do not implement
175 	 * this extension, it will simply serve as a no-op.
176 	 */
177 	asm volatile (".4byte 0x100000f"); /* pause */
178 	barrier();
179 }
180 
181 static inline __noprof void flush_tlb(void)
182 {
183 	asm volatile("sfence.vma zero, zero");
184 }
185 
186 static inline __noprof void flush_tlb_entry(unsigned long va)
187 {
188 	asm volatile ("sfence.vma %0" : : "r" (va) : "memory");
189 }
190 
191 /* supervisor address translation and protection */
192 static inline __noprof unsigned long read_satp(void)
193 {
194 	unsigned long satp;
195 
196 	asm volatile("csrr %0, satp" : "=r" (satp));
197 
198 	return satp;
199 }
200 
201 static inline __noprof void write_satp(unsigned long satp)
202 {
203 	asm volatile("csrw satp, %0" : : "r" (satp));
204 }
205 
206 /* machine trap-vector base-address register */
207 static inline __noprof unsigned long read_mtvec(void)
208 {
209 	unsigned long mtvec;
210 
211 	asm volatile("csrr %0, mtvec" : "=r" (mtvec));
212 
213 	return mtvec;
214 }
215 
216 static inline __noprof void write_mtvec(unsigned long mtvec)
217 {
218 	asm volatile("csrw mtvec, %0" : : "r" (mtvec));
219 }
220 
221 /* supervisor trap-vector base-address register */
222 static inline __noprof unsigned long read_stvec(void)
223 {
224 	unsigned long stvec;
225 
226 	asm volatile("csrr %0, stvec" : "=r" (stvec));
227 
228 	return stvec;
229 }
230 
231 static inline __noprof void write_stvec(unsigned long stvec)
232 {
233 	asm volatile("csrw stvec, %0" : : "r" (stvec));
234 }
235 
236 /* machine status register */
237 static inline __noprof unsigned long read_mstatus(void)
238 {
239 	unsigned long mstatus;
240 
241 	asm volatile("csrr %0, mstatus" : "=r" (mstatus));
242 
243 	return mstatus;
244 }
245 
246 static inline __noprof void write_mstatus(unsigned long mstatus)
247 {
248 	asm volatile("csrw mstatus, %0" : : "r" (mstatus));
249 }
250 
251 /* supervisor status register */
252 static inline __noprof unsigned long read_sstatus(void)
253 {
254 	unsigned long sstatus;
255 
256 	asm volatile("csrr %0, sstatus" : "=r" (sstatus));
257 
258 	return sstatus;
259 }
260 
261 static inline __noprof void write_sstatus(unsigned long sstatus)
262 {
263 	asm volatile("csrw sstatus, %0" : : "r" (sstatus));
264 }
265 
266 static inline __noprof void set_sstatus(unsigned long sstatus)
267 {
268 	unsigned long x;
269 
270 	asm volatile ("csrrs %0, sstatus, %1" : "=r"(x) : "rK"(sstatus));
271 }
272 
273 /* machine exception delegation */
274 static inline __noprof unsigned long read_medeleg(void)
275 {
276 	unsigned long medeleg;
277 
278 	asm volatile("csrr %0, medeleg" : "=r" (medeleg));
279 
280 	return medeleg;
281 }
282 
283 static inline __noprof void write_medeleg(unsigned long medeleg)
284 {
285 	asm volatile("csrw medeleg, %0" : : "r" (medeleg));
286 }
287 
288 /* machine interrupt delegation */
289 static inline __noprof unsigned long read_mideleg(void)
290 {
291 	unsigned long mideleg;
292 
293 	asm volatile("csrr %0, mideleg" : "=r" (mideleg));
294 
295 	return mideleg;
296 }
297 
298 static inline __noprof void write_mideleg(unsigned long mideleg)
299 {
300 	asm volatile("csrw mideleg, %0" : : "r" (mideleg));
301 }
302 
303 /* machine interrupt-enable register */
304 static inline __noprof unsigned long read_mie(void)
305 {
306 	unsigned long mie;
307 
308 	asm volatile("csrr %0, mie" : "=r" (mie));
309 
310 	return mie;
311 }
312 
313 static inline __noprof void write_mie(unsigned long mie)
314 {
315 	asm volatile("csrw mie, %0" : : "r" (mie));
316 }
317 
318 /* supervisor interrupt-enable register */
319 static inline __noprof unsigned long read_sie(void)
320 {
321 	unsigned long sie;
322 
323 	asm volatile("csrr %0, sie" : "=r" (sie));
324 
325 	return sie;
326 }
327 
328 static inline __noprof void write_sie(unsigned long sie)
329 {
330 	asm volatile("csrw sie, %0" : : "r" (sie));
331 }
332 
333 /* machine exception program counter */
334 static inline __noprof unsigned long read_mepc(void)
335 {
336 	unsigned long mepc;
337 
338 	asm volatile("csrr %0, mepc" : "=r" (mepc));
339 
340 	return mepc;
341 }
342 
343 static inline __noprof void write_mepc(unsigned long mepc)
344 {
345 	asm volatile("csrw mepc, %0" : : "r" (mepc));
346 }
347 
348 /* supervisor exception program counter */
349 static inline __noprof unsigned long read_sepc(void)
350 {
351 	unsigned long sepc;
352 
353 	asm volatile("csrr %0, sepc" : "=r" (sepc));
354 
355 	return sepc;
356 }
357 
358 static inline __noprof void write_sepc(unsigned long sepc)
359 {
360 	asm volatile("csrw sepc, %0" : : "r" (sepc));
361 }
362 
363 /* machine scratch register */
364 static inline __noprof unsigned long read_mscratch(void)
365 {
366 	unsigned long mscratch;
367 
368 	asm volatile("csrr %0, mscratch" : "=r" (mscratch));
369 
370 	return mscratch;
371 }
372 
373 static inline __noprof void write_mscratch(unsigned long mscratch)
374 {
375 	asm volatile("csrw mscratch, %0" : : "r" (mscratch));
376 }
377 
378 /* supervisor scratch register */
379 static inline __noprof unsigned long read_sscratch(void)
380 {
381 	unsigned long sscratch;
382 
383 	asm volatile("csrr %0, sscratch" : "=r" (sscratch));
384 
385 	return sscratch;
386 }
387 
388 static inline __noprof void write_sscratch(unsigned long sscratch)
389 {
390 	asm volatile("csrw sscratch, %0" : : "r" (sscratch));
391 }
392 
393 /* trap-return instructions */
394 static inline __noprof void mret(void)
395 {
396 	asm volatile("mret");
397 }
398 
399 static inline __noprof void sret(void)
400 {
401 	asm volatile("sret");
402 }
403 
404 static inline __noprof void uret(void)
405 {
406 	asm volatile("uret");
407 }
408 
409 __noprof uint64_t read_time(void);
410 
411 static inline __noprof uint64_t barrier_read_counter_timer(void)
412 {
413 	mb();	/* Get timer value after pending operations have completed */
414 	return read_time();
415 }
416 
417 static inline __noprof uint32_t read_cntfrq(void)
418 {
419 	return CFG_RISCV_MTIME_RATE;
420 }
421 
422 __noprof bool riscv_detect_csr_seed(void);
423 
424 #endif /*__ASSEMBLER__*/
425 
426 #endif /*__RISCV_H*/
427