xref: /optee_os/core/arch/riscv/include/riscv.h (revision 45fecab081173ef58b1cb14b6ddf6892b0b9d3f6)
1 /* SPDX-License-Identifier: BSD-2-Clause */
2 /*
3  * Copyright 2022-2023 NXP
4  */
5 
6 #ifndef __RISCV_H
7 #define __RISCV_H
8 
9 #include <compiler.h>
10 #include <encoding.h>
11 #include <stdbool.h>
12 #include <stdint.h>
13 #include <sys/cdefs.h>
14 #include <util.h>
15 
16 #define RISCV_XLEN_BITS		(__riscv_xlen)
17 #define RISCV_XLEN_BYTES	(__riscv_xlen / 8)
18 
19 /* Bind registers to their ABI names */
20 #define REG_RA	1
21 #define REG_SP	2
22 #define REG_GP	3
23 #define REG_TP	4
24 #define REG_T0	5
25 #define REG_T2	7
26 #define REG_S0	8
27 #define REG_S1	9
28 #define REG_A0	10
29 #define REG_A1	11
30 #define REG_A2	12
31 #define REG_A3	13
32 #define REG_A5	15
33 #define REG_A7	17
34 #define REG_S2	18
35 #define REG_S11	27
36 #define REG_T3	28
37 #define REG_T6	31
38 
39 #if defined(CFG_RISCV_M_MODE)
40 #define CSR_MODE_OFFSET	PRV_M
41 #define XRET			mret
42 #elif defined(CFG_RISCV_S_MODE)
43 #define CSR_MODE_OFFSET	PRV_S
44 #define XRET			sret
45 #endif
46 
47 #define CSR_MODE_BITS		SHIFT_U64(CSR_MODE_OFFSET, 8)
48 
49 #define CSR_XSTATUS		(CSR_MODE_BITS | 0x000)
50 #define CSR_XIE			(CSR_MODE_BITS | 0x004)
51 #define CSR_XTVEC		(CSR_MODE_BITS | 0x005)
52 #define CSR_XSCRATCH		(CSR_MODE_BITS | 0x040)
53 #define CSR_XEPC		(CSR_MODE_BITS | 0x041)
54 #define CSR_XCAUSE		(CSR_MODE_BITS | 0x042)
55 #define CSR_XTVAL		(CSR_MODE_BITS | 0x043)
56 #define CSR_XIP			(CSR_MODE_BITS | 0x044)
57 #define CSR_XISELECT		(CSR_MODE_BITS | 0x050)
58 #define CSR_XIREG		(CSR_MODE_BITS | 0x051)
59 #define CSR_XTOPEI		(CSR_MODE_BITS | 0x05C)
60 
61 #define IRQ_XSOFT		(CSR_MODE_OFFSET + 0)
62 #define IRQ_XTIMER		(CSR_MODE_OFFSET + 4)
63 #define IRQ_XEXT		(CSR_MODE_OFFSET + 8)
64 
65 #define CSR_XIE_SIE		BIT64(IRQ_XSOFT)
66 #define CSR_XIE_TIE		BIT64(IRQ_XTIMER)
67 #define CSR_XIE_EIE		BIT64(IRQ_XEXT)
68 
69 #define CSR_XSTATUS_IE		BIT(CSR_MODE_OFFSET + 0)
70 #define CSR_XSTATUS_PIE		BIT(CSR_MODE_OFFSET + 4)
71 #define CSR_XSTATUS_SPP		BIT(8)
72 #define CSR_XSTATUS_SUM		BIT(18)
73 #define CSR_XSTATUS_MXR		BIT(19)
74 
75 #define CSR_XCAUSE_INTR_FLAG	BIT64(__riscv_xlen - 1)
76 
77 #ifndef __ASSEMBLER__
78 
79 #define read_csr(csr)							\
80 	({								\
81 		unsigned long __tmp;					\
82 		asm volatile ("csrr %0, %1" : "=r"(__tmp) : "i"(csr));	\
83 		__tmp;							\
84 	})
85 
86 #define write_csr(csr, val)						\
87 	({								\
88 		asm volatile ("csrw %0, %1" : : "i"(csr), "rK"(val));	\
89 	})
90 
91 #define swap_csr(csr, val)						\
92 	({								\
93 		unsigned long __tmp;					\
94 		asm volatile ("csrrw %0, %1, %2"			\
95 			      : "=r"(__tmp) : "i"(csr), "rK"(val));	\
96 		__tmp;							\
97 	})
98 
99 #define read_set_csr(csr, val)						\
100 	({								\
101 		unsigned long __tmp;					\
102 		asm volatile ("csrrs %0, %1, %2"			\
103 			      : "=r"(__tmp) : "i"(csr), "rK"(val));	\
104 		__tmp;							\
105 	})
106 
107 #define set_csr(csr, val)						\
108 	({								\
109 		asm volatile ("csrs %0, %1" : : "i"(csr), "rK"(val));	\
110 	})
111 
112 #define read_clear_csr(csr, val)					\
113 	({								\
114 		unsigned long __tmp;					\
115 		asm volatile ("csrrc %0, %1, %2"			\
116 			      : "=r"(__tmp) : "i"(csr), "rK"(val));	\
117 		__tmp;							\
118 	})
119 
120 #define clear_csr(csr, val)						\
121 	({								\
122 		asm volatile ("csrc %0, %1" : : "i"(csr), "rK"(val));	\
123 	})
124 
125 #define rdtime() read_csr(CSR_TIME)
126 #define rdcycle() read_csr(CSR_CYCLE)
127 #define rdinstret() read_csr(CSR_INSTRET)
128 
129 static inline __noprof void mb(void)
130 {
131 	asm volatile ("fence" : : : "memory");
132 }
133 
134 static inline __noprof unsigned long read_gp(void)
135 {
136 	unsigned long gp = 0;
137 
138 	asm volatile("mv %0, gp" : "=&r"(gp));
139 	return gp;
140 }
141 
142 static inline __noprof unsigned long read_tp(void)
143 {
144 	unsigned long tp = 0;
145 
146 	asm volatile("mv %0, tp" : "=&r"(tp));
147 	return tp;
148 }
149 
150 static inline __noprof unsigned long read_fp(void)
151 {
152 	unsigned long fp = 0;
153 
154 	asm volatile ("mv %0, s0" : "=r" (fp));
155 
156 	return fp;
157 }
158 
159 static inline __noprof unsigned long read_pc(void)
160 {
161 	unsigned long pc = 0;
162 
163 	asm volatile ("auipc %0, 0" : "=r" (pc));
164 
165 	return pc;
166 }
167 
168 static inline __noprof void wfi(void)
169 {
170 	asm volatile ("wfi");
171 }
172 
173 static inline __noprof void riscv_cpu_pause(void)
174 {
175 	unsigned long dummy = 0;
176 
177 	/*
178 	 * Use a divide instruction to force wait
179 	 * for multiple CPU cycles.
180 	 * Note: RISC-V does not raise an exception
181 	 * on divide by zero.
182 	 */
183 	asm volatile ("div %0, %0, zero" : "=r" (dummy));
184 
185 	/*
186 	 * Use the encoding of the 'pause' instruction,
187 	 * thus no need to verify toolchain support for
188 	 * zihintpause.
189 	 * On hardware platforms that do not implement
190 	 * this extension, it will simply serve as a no-op.
191 	 */
192 	asm volatile (".4byte 0x100000f"); /* pause */
193 	barrier();
194 }
195 
196 static inline __noprof void flush_tlb(void)
197 {
198 	asm volatile("sfence.vma zero, zero");
199 }
200 
201 static inline __noprof void flush_tlb_entry(unsigned long va)
202 {
203 	asm volatile ("sfence.vma %0" : : "r" (va) : "memory");
204 }
205 
206 /* supervisor address translation and protection */
207 static inline __noprof unsigned long read_satp(void)
208 {
209 	unsigned long satp;
210 
211 	asm volatile("csrr %0, satp" : "=r" (satp));
212 
213 	return satp;
214 }
215 
216 static inline __noprof void write_satp(unsigned long satp)
217 {
218 	asm volatile("csrw satp, %0" : : "r" (satp));
219 }
220 
221 /* machine trap-vector base-address register */
222 static inline __noprof unsigned long read_mtvec(void)
223 {
224 	unsigned long mtvec;
225 
226 	asm volatile("csrr %0, mtvec" : "=r" (mtvec));
227 
228 	return mtvec;
229 }
230 
231 static inline __noprof void write_mtvec(unsigned long mtvec)
232 {
233 	asm volatile("csrw mtvec, %0" : : "r" (mtvec));
234 }
235 
236 /* supervisor trap-vector base-address register */
237 static inline __noprof unsigned long read_stvec(void)
238 {
239 	unsigned long stvec;
240 
241 	asm volatile("csrr %0, stvec" : "=r" (stvec));
242 
243 	return stvec;
244 }
245 
246 static inline __noprof void write_stvec(unsigned long stvec)
247 {
248 	asm volatile("csrw stvec, %0" : : "r" (stvec));
249 }
250 
251 /* machine status register */
252 static inline __noprof unsigned long read_mstatus(void)
253 {
254 	unsigned long mstatus;
255 
256 	asm volatile("csrr %0, mstatus" : "=r" (mstatus));
257 
258 	return mstatus;
259 }
260 
261 static inline __noprof void write_mstatus(unsigned long mstatus)
262 {
263 	asm volatile("csrw mstatus, %0" : : "r" (mstatus));
264 }
265 
266 /* supervisor status register */
267 static inline __noprof unsigned long read_sstatus(void)
268 {
269 	unsigned long sstatus;
270 
271 	asm volatile("csrr %0, sstatus" : "=r" (sstatus));
272 
273 	return sstatus;
274 }
275 
276 static inline __noprof void write_sstatus(unsigned long sstatus)
277 {
278 	asm volatile("csrw sstatus, %0" : : "r" (sstatus));
279 }
280 
281 static inline __noprof void set_sstatus(unsigned long sstatus)
282 {
283 	unsigned long x;
284 
285 	asm volatile ("csrrs %0, sstatus, %1" : "=r"(x) : "rK"(sstatus));
286 }
287 
288 /* machine exception delegation */
289 static inline __noprof unsigned long read_medeleg(void)
290 {
291 	unsigned long medeleg;
292 
293 	asm volatile("csrr %0, medeleg" : "=r" (medeleg));
294 
295 	return medeleg;
296 }
297 
298 static inline __noprof void write_medeleg(unsigned long medeleg)
299 {
300 	asm volatile("csrw medeleg, %0" : : "r" (medeleg));
301 }
302 
303 /* machine interrupt delegation */
304 static inline __noprof unsigned long read_mideleg(void)
305 {
306 	unsigned long mideleg;
307 
308 	asm volatile("csrr %0, mideleg" : "=r" (mideleg));
309 
310 	return mideleg;
311 }
312 
313 static inline __noprof void write_mideleg(unsigned long mideleg)
314 {
315 	asm volatile("csrw mideleg, %0" : : "r" (mideleg));
316 }
317 
318 /* machine interrupt-enable register */
319 static inline __noprof unsigned long read_mie(void)
320 {
321 	unsigned long mie;
322 
323 	asm volatile("csrr %0, mie" : "=r" (mie));
324 
325 	return mie;
326 }
327 
328 static inline __noprof void write_mie(unsigned long mie)
329 {
330 	asm volatile("csrw mie, %0" : : "r" (mie));
331 }
332 
333 /* supervisor interrupt-enable register */
334 static inline __noprof unsigned long read_sie(void)
335 {
336 	unsigned long sie;
337 
338 	asm volatile("csrr %0, sie" : "=r" (sie));
339 
340 	return sie;
341 }
342 
343 static inline __noprof void write_sie(unsigned long sie)
344 {
345 	asm volatile("csrw sie, %0" : : "r" (sie));
346 }
347 
348 /* machine exception program counter */
349 static inline __noprof unsigned long read_mepc(void)
350 {
351 	unsigned long mepc;
352 
353 	asm volatile("csrr %0, mepc" : "=r" (mepc));
354 
355 	return mepc;
356 }
357 
358 static inline __noprof void write_mepc(unsigned long mepc)
359 {
360 	asm volatile("csrw mepc, %0" : : "r" (mepc));
361 }
362 
363 /* supervisor exception program counter */
364 static inline __noprof unsigned long read_sepc(void)
365 {
366 	unsigned long sepc;
367 
368 	asm volatile("csrr %0, sepc" : "=r" (sepc));
369 
370 	return sepc;
371 }
372 
373 static inline __noprof void write_sepc(unsigned long sepc)
374 {
375 	asm volatile("csrw sepc, %0" : : "r" (sepc));
376 }
377 
378 /* machine scratch register */
379 static inline __noprof unsigned long read_mscratch(void)
380 {
381 	unsigned long mscratch;
382 
383 	asm volatile("csrr %0, mscratch" : "=r" (mscratch));
384 
385 	return mscratch;
386 }
387 
388 static inline __noprof void write_mscratch(unsigned long mscratch)
389 {
390 	asm volatile("csrw mscratch, %0" : : "r" (mscratch));
391 }
392 
393 /* supervisor scratch register */
394 static inline __noprof unsigned long read_sscratch(void)
395 {
396 	unsigned long sscratch;
397 
398 	asm volatile("csrr %0, sscratch" : "=r" (sscratch));
399 
400 	return sscratch;
401 }
402 
403 static inline __noprof void write_sscratch(unsigned long sscratch)
404 {
405 	asm volatile("csrw sscratch, %0" : : "r" (sscratch));
406 }
407 
408 /* trap-return instructions */
409 static inline __noprof void mret(void)
410 {
411 	asm volatile("mret");
412 }
413 
414 static inline __noprof void sret(void)
415 {
416 	asm volatile("sret");
417 }
418 
419 static inline __noprof void uret(void)
420 {
421 	asm volatile("uret");
422 }
423 
424 __noprof uint64_t read_time(void);
425 
426 static inline __noprof uint64_t barrier_read_counter_timer(void)
427 {
428 	mb();	/* Get timer value after pending operations have completed */
429 	return read_time();
430 }
431 
432 static inline __noprof uint32_t read_cntfrq(void)
433 {
434 	return CFG_RISCV_MTIME_RATE;
435 }
436 
437 __noprof bool riscv_detect_csr_seed(void);
438 
439 #endif /*__ASSEMBLER__*/
440 
441 #endif /*__RISCV_H*/
442