xref: /optee_os/core/arch/riscv/include/riscv.h (revision 04e46975d8f02e25209af552aaea4acb4d70c7f9)
1 /* SPDX-License-Identifier: BSD-2-Clause */
2 /*
3  * Copyright 2022-2023 NXP
4  */
5 
6 #ifndef __RISCV_H
7 #define __RISCV_H
8 
9 #include <compiler.h>
10 #include <encoding.h>
11 #include <stdbool.h>
12 #include <stdint.h>
13 #include <sys/cdefs.h>
14 #include <util.h>
15 
16 #define RISCV_XLEN_BITS		(__riscv_xlen)
17 #define RISCV_XLEN_BYTES	(__riscv_xlen / 8)
18 
19 /* Bind registers to their ABI names */
20 #define REG_RA	1
21 #define REG_SP	2
22 #define REG_GP	3
23 #define REG_TP	4
24 #define REG_T0	5
25 #define REG_T2	7
26 #define REG_S0	8
27 #define REG_S1	9
28 #define REG_A0	10
29 #define REG_A1	11
30 #define REG_A2	12
31 #define REG_A3	13
32 #define REG_A5	15
33 #define REG_A7	17
34 #define REG_S2	18
35 #define REG_S11	27
36 #define REG_T3	28
37 #define REG_T6	31
38 
39 #if defined(CFG_RISCV_M_MODE)
40 #define CSR_MODE_OFFSET	PRV_M
41 #define XRET			mret
42 #elif defined(CFG_RISCV_S_MODE)
43 #define CSR_MODE_OFFSET	PRV_S
44 #define XRET			sret
45 #endif
46 
47 #define CSR_MODE_BITS		SHIFT_U64(CSR_MODE_OFFSET, 8)
48 
49 #define CSR_XSTATUS		(CSR_MODE_BITS | 0x000)
50 #define CSR_XIE			(CSR_MODE_BITS | 0x004)
51 #define CSR_XTVEC		(CSR_MODE_BITS | 0x005)
52 #define CSR_XSCRATCH		(CSR_MODE_BITS | 0x040)
53 #define CSR_XEPC		(CSR_MODE_BITS | 0x041)
54 #define CSR_XCAUSE		(CSR_MODE_BITS | 0x042)
55 #define CSR_XTVAL		(CSR_MODE_BITS | 0x043)
56 #define CSR_XIP			(CSR_MODE_BITS | 0x044)
57 
58 #define IRQ_XSOFT		(CSR_MODE_OFFSET + 0)
59 #define IRQ_XTIMER		(CSR_MODE_OFFSET + 4)
60 #define IRQ_XEXT		(CSR_MODE_OFFSET + 8)
61 
62 #define CSR_XIE_SIE		BIT64(IRQ_XSOFT)
63 #define CSR_XIE_TIE		BIT64(IRQ_XTIMER)
64 #define CSR_XIE_EIE		BIT64(IRQ_XEXT)
65 
66 #define CSR_XSTATUS_IE		BIT(CSR_MODE_OFFSET + 0)
67 #define CSR_XSTATUS_PIE		BIT(CSR_MODE_OFFSET + 4)
68 #define CSR_XSTATUS_SPP		BIT(8)
69 #define CSR_XSTATUS_SUM		BIT(18)
70 #define CSR_XSTATUS_MXR		BIT(19)
71 
72 #define CSR_XCAUSE_INTR_FLAG	BIT64(__riscv_xlen - 1)
73 
74 #ifndef __ASSEMBLER__
75 
76 #define read_csr(csr)							\
77 	({								\
78 		unsigned long __tmp;					\
79 		asm volatile ("csrr %0, %1" : "=r"(__tmp) : "i"(csr));	\
80 		__tmp;							\
81 	})
82 
83 #define write_csr(csr, val)						\
84 	({								\
85 		asm volatile ("csrw %0, %1" : : "i"(csr), "rK"(val));	\
86 	})
87 
88 #define swap_csr(csr, val)						\
89 	({								\
90 		unsigned long __tmp;					\
91 		asm volatile ("csrrw %0, %1, %2"			\
92 			      : "=r"(__tmp) : "i"(csr), "rK"(val));	\
93 		__tmp;							\
94 	})
95 
96 #define read_set_csr(csr, val)						\
97 	({								\
98 		unsigned long __tmp;					\
99 		asm volatile ("csrrs %0, %1, %2"			\
100 			      : "=r"(__tmp) : "i"(csr), "rK"(val));	\
101 		__tmp;							\
102 	})
103 
104 #define set_csr(csr, val)						\
105 	({								\
106 		asm volatile ("csrs %0, %1" : : "i"(csr), "rK"(val));	\
107 	})
108 
109 #define read_clear_csr(csr, val)					\
110 	({								\
111 		unsigned long __tmp;					\
112 		asm volatile ("csrrc %0, %1, %2"			\
113 			      : "=r"(__tmp) : "i"(csr), "rK"(val));	\
114 		__tmp;							\
115 	})
116 
117 #define clear_csr(csr, val)						\
118 	({								\
119 		asm volatile ("csrc %0, %1" : : "i"(csr), "rK"(val));	\
120 	})
121 
122 #define rdtime() read_csr(CSR_TIME)
123 #define rdcycle() read_csr(CSR_CYCLE)
124 #define rdinstret() read_csr(CSR_INSTRET)
125 
126 static inline __noprof void mb(void)
127 {
128 	asm volatile ("fence" : : : "memory");
129 }
130 
131 static inline __noprof unsigned long read_gp(void)
132 {
133 	unsigned long gp = 0;
134 
135 	asm volatile("mv %0, gp" : "=&r"(gp));
136 	return gp;
137 }
138 
139 static inline __noprof unsigned long read_tp(void)
140 {
141 	unsigned long tp = 0;
142 
143 	asm volatile("mv %0, tp" : "=&r"(tp));
144 	return tp;
145 }
146 
147 static inline __noprof unsigned long read_fp(void)
148 {
149 	unsigned long fp = 0;
150 
151 	asm volatile ("mv %0, s0" : "=r" (fp));
152 
153 	return fp;
154 }
155 
156 static inline __noprof unsigned long read_pc(void)
157 {
158 	unsigned long pc = 0;
159 
160 	asm volatile ("auipc %0, 0" : "=r" (pc));
161 
162 	return pc;
163 }
164 
165 static inline __noprof void wfi(void)
166 {
167 	asm volatile ("wfi");
168 }
169 
170 static inline __noprof void riscv_cpu_pause(void)
171 {
172 	unsigned long dummy = 0;
173 
174 	/*
175 	 * Use a divide instruction to force wait
176 	 * for multiple CPU cycles.
177 	 * Note: RISC-V does not raise an exception
178 	 * on divide by zero.
179 	 */
180 	asm volatile ("div %0, %0, zero" : "=r" (dummy));
181 
182 	/*
183 	 * Use the encoding of the 'pause' instruction,
184 	 * thus no need to verify toolchain support for
185 	 * zihintpause.
186 	 * On hardware platforms that do not implement
187 	 * this extension, it will simply serve as a no-op.
188 	 */
189 	asm volatile (".4byte 0x100000f"); /* pause */
190 	barrier();
191 }
192 
193 static inline __noprof void flush_tlb(void)
194 {
195 	asm volatile("sfence.vma zero, zero");
196 }
197 
198 static inline __noprof void flush_tlb_entry(unsigned long va)
199 {
200 	asm volatile ("sfence.vma %0" : : "r" (va) : "memory");
201 }
202 
203 /* supervisor address translation and protection */
204 static inline __noprof unsigned long read_satp(void)
205 {
206 	unsigned long satp;
207 
208 	asm volatile("csrr %0, satp" : "=r" (satp));
209 
210 	return satp;
211 }
212 
213 static inline __noprof void write_satp(unsigned long satp)
214 {
215 	asm volatile("csrw satp, %0" : : "r" (satp));
216 }
217 
218 /* machine trap-vector base-address register */
219 static inline __noprof unsigned long read_mtvec(void)
220 {
221 	unsigned long mtvec;
222 
223 	asm volatile("csrr %0, mtvec" : "=r" (mtvec));
224 
225 	return mtvec;
226 }
227 
228 static inline __noprof void write_mtvec(unsigned long mtvec)
229 {
230 	asm volatile("csrw mtvec, %0" : : "r" (mtvec));
231 }
232 
233 /* supervisor trap-vector base-address register */
234 static inline __noprof unsigned long read_stvec(void)
235 {
236 	unsigned long stvec;
237 
238 	asm volatile("csrr %0, stvec" : "=r" (stvec));
239 
240 	return stvec;
241 }
242 
243 static inline __noprof void write_stvec(unsigned long stvec)
244 {
245 	asm volatile("csrw stvec, %0" : : "r" (stvec));
246 }
247 
248 /* machine status register */
249 static inline __noprof unsigned long read_mstatus(void)
250 {
251 	unsigned long mstatus;
252 
253 	asm volatile("csrr %0, mstatus" : "=r" (mstatus));
254 
255 	return mstatus;
256 }
257 
258 static inline __noprof void write_mstatus(unsigned long mstatus)
259 {
260 	asm volatile("csrw mstatus, %0" : : "r" (mstatus));
261 }
262 
263 /* supervisor status register */
264 static inline __noprof unsigned long read_sstatus(void)
265 {
266 	unsigned long sstatus;
267 
268 	asm volatile("csrr %0, sstatus" : "=r" (sstatus));
269 
270 	return sstatus;
271 }
272 
273 static inline __noprof void write_sstatus(unsigned long sstatus)
274 {
275 	asm volatile("csrw sstatus, %0" : : "r" (sstatus));
276 }
277 
278 static inline __noprof void set_sstatus(unsigned long sstatus)
279 {
280 	unsigned long x;
281 
282 	asm volatile ("csrrs %0, sstatus, %1" : "=r"(x) : "rK"(sstatus));
283 }
284 
285 /* machine exception delegation */
286 static inline __noprof unsigned long read_medeleg(void)
287 {
288 	unsigned long medeleg;
289 
290 	asm volatile("csrr %0, medeleg" : "=r" (medeleg));
291 
292 	return medeleg;
293 }
294 
295 static inline __noprof void write_medeleg(unsigned long medeleg)
296 {
297 	asm volatile("csrw medeleg, %0" : : "r" (medeleg));
298 }
299 
300 /* machine interrupt delegation */
301 static inline __noprof unsigned long read_mideleg(void)
302 {
303 	unsigned long mideleg;
304 
305 	asm volatile("csrr %0, mideleg" : "=r" (mideleg));
306 
307 	return mideleg;
308 }
309 
310 static inline __noprof void write_mideleg(unsigned long mideleg)
311 {
312 	asm volatile("csrw mideleg, %0" : : "r" (mideleg));
313 }
314 
315 /* machine interrupt-enable register */
316 static inline __noprof unsigned long read_mie(void)
317 {
318 	unsigned long mie;
319 
320 	asm volatile("csrr %0, mie" : "=r" (mie));
321 
322 	return mie;
323 }
324 
325 static inline __noprof void write_mie(unsigned long mie)
326 {
327 	asm volatile("csrw mie, %0" : : "r" (mie));
328 }
329 
330 /* supervisor interrupt-enable register */
331 static inline __noprof unsigned long read_sie(void)
332 {
333 	unsigned long sie;
334 
335 	asm volatile("csrr %0, sie" : "=r" (sie));
336 
337 	return sie;
338 }
339 
340 static inline __noprof void write_sie(unsigned long sie)
341 {
342 	asm volatile("csrw sie, %0" : : "r" (sie));
343 }
344 
345 /* machine exception program counter */
346 static inline __noprof unsigned long read_mepc(void)
347 {
348 	unsigned long mepc;
349 
350 	asm volatile("csrr %0, mepc" : "=r" (mepc));
351 
352 	return mepc;
353 }
354 
355 static inline __noprof void write_mepc(unsigned long mepc)
356 {
357 	asm volatile("csrw mepc, %0" : : "r" (mepc));
358 }
359 
360 /* supervisor exception program counter */
361 static inline __noprof unsigned long read_sepc(void)
362 {
363 	unsigned long sepc;
364 
365 	asm volatile("csrr %0, sepc" : "=r" (sepc));
366 
367 	return sepc;
368 }
369 
370 static inline __noprof void write_sepc(unsigned long sepc)
371 {
372 	asm volatile("csrw sepc, %0" : : "r" (sepc));
373 }
374 
375 /* machine scratch register */
376 static inline __noprof unsigned long read_mscratch(void)
377 {
378 	unsigned long mscratch;
379 
380 	asm volatile("csrr %0, mscratch" : "=r" (mscratch));
381 
382 	return mscratch;
383 }
384 
385 static inline __noprof void write_mscratch(unsigned long mscratch)
386 {
387 	asm volatile("csrw mscratch, %0" : : "r" (mscratch));
388 }
389 
390 /* supervisor scratch register */
391 static inline __noprof unsigned long read_sscratch(void)
392 {
393 	unsigned long sscratch;
394 
395 	asm volatile("csrr %0, sscratch" : "=r" (sscratch));
396 
397 	return sscratch;
398 }
399 
400 static inline __noprof void write_sscratch(unsigned long sscratch)
401 {
402 	asm volatile("csrw sscratch, %0" : : "r" (sscratch));
403 }
404 
405 /* trap-return instructions */
406 static inline __noprof void mret(void)
407 {
408 	asm volatile("mret");
409 }
410 
411 static inline __noprof void sret(void)
412 {
413 	asm volatile("sret");
414 }
415 
416 static inline __noprof void uret(void)
417 {
418 	asm volatile("uret");
419 }
420 
421 __noprof uint64_t read_time(void);
422 
423 static inline __noprof uint64_t barrier_read_counter_timer(void)
424 {
425 	mb();	/* Get timer value after pending operations have completed */
426 	return read_time();
427 }
428 
429 static inline __noprof uint32_t read_cntfrq(void)
430 {
431 	return CFG_RISCV_MTIME_RATE;
432 }
433 
434 __noprof bool riscv_detect_csr_seed(void);
435 
436 #endif /*__ASSEMBLER__*/
437 
438 #endif /*__RISCV_H*/
439