xref: /optee_os/core/arch/riscv/include/riscv.h (revision cbaab388b89ff6248a0da3d97dc58622dc758bc1)
1 /* SPDX-License-Identifier: BSD-2-Clause */
2 /*
3  * Copyright 2022-2023 NXP
4  */
5 
6 #ifndef RISCV_H
7 #define RISCV_H
8 
9 #include <compiler.h>
10 #include <encoding.h>
11 #include <stdint.h>
12 #include <sys/cdefs.h>
13 #include <util.h>
14 
15 #define RISCV_XLEN_BITS		(__riscv_xlen)
16 #define RISCV_XLEN_BYTES	(__riscv_xlen / 8)
17 
18 #define REGOFF(x)			((x) * RISCV_XLEN_BYTES)
19 
20 #if __riscv_xlen == 32
21 #define STR       sw
22 #define LDR       lw
23 #else
24 #define STR       sd
25 #define LDR       ld
26 #endif
27 
28 #if defined(CFG_RISCV_M_MODE)
29 #define CSR_MODE_OFFSET	PRV_M
30 #define XRET			mret
31 #elif defined(CFG_RISCV_S_MODE)
32 #define CSR_MODE_OFFSET	PRV_S
33 #define XRET			sret
34 #endif
35 
36 #define CSR_MODE_BITS		SHIFT_U64(CSR_MODE_OFFSET, 8)
37 
38 #define CSR_XSTATUS		(CSR_MODE_BITS | 0x000)
39 #define CSR_XIE			(CSR_MODE_BITS | 0x004)
40 #define CSR_XTVEC		(CSR_MODE_BITS | 0x005)
41 #define CSR_XSCRATCH		(CSR_MODE_BITS | 0x040)
42 #define CSR_XEPC		(CSR_MODE_BITS | 0x041)
43 #define CSR_XCAUSE		(CSR_MODE_BITS | 0x042)
44 #define CSR_XTVAL		(CSR_MODE_BITS | 0x043)
45 #define CSR_XIP			(CSR_MODE_BITS | 0x044)
46 
47 #define IRQ_XSOFT		(CSR_MODE_OFFSET + 0)
48 #define IRQ_XTIMER		(CSR_MODE_OFFSET + 4)
49 #define IRQ_XEXT		(CSR_MODE_OFFSET + 8)
50 
51 #define CSR_XIE_SIE		BIT64(IRQ_XSOFT)
52 #define CSR_XIE_TIE		BIT64(IRQ_XTIMER)
53 #define CSR_XIE_EIE		BIT64(IRQ_XEXT)
54 
55 #define CSR_XSTATUS_IE		BIT(CSR_MODE_OFFSET + 0)
56 #define CSR_XSTATUS_PIE		BIT(CSR_MODE_OFFSET + 4)
57 #define CSR_XSTATUS_SPP		BIT(8)
58 #define CSR_XSTATUS_SUM		BIT(18)
59 #define CSR_XSTATUS_MXR		BIT(19)
60 
61 #ifndef __ASSEMBLER__
62 
63 static inline __noprof void mb(void)
64 {
65 	asm volatile ("fence" : : : "memory");
66 }
67 
68 static inline __noprof unsigned long read_tp(void)
69 {
70 	unsigned long tp;
71 
72 	asm volatile("mv %0, tp" : "=&r"(tp));
73 	return tp;
74 }
75 
76 static inline __noprof void wfi(void)
77 {
78 	asm volatile ("wfi");
79 }
80 
81 static inline __noprof void flush_tlb(void)
82 {
83 	asm volatile("sfence.vma zero, zero");
84 }
85 
86 static inline __noprof void flush_tlb_entry(unsigned long va)
87 {
88 	asm volatile ("sfence.vma %0" : : "r" (va) : "memory");
89 }
90 
91 /* supervisor address translation and protection */
92 static inline __noprof unsigned long read_satp(void)
93 {
94 	unsigned long satp;
95 
96 	asm volatile("csrr %0, satp" : "=r" (satp));
97 
98 	return satp;
99 }
100 
101 static inline __noprof void write_satp(unsigned long satp)
102 {
103 	asm volatile("csrw satp, %0" : : "r" (satp));
104 }
105 
106 /* machine trap-vector base-address register */
107 static inline __noprof unsigned long read_mtvec(void)
108 {
109 	unsigned long mtvec;
110 
111 	asm volatile("csrr %0, mtvec" : "=r" (mtvec));
112 
113 	return mtvec;
114 }
115 
116 static inline __noprof void write_mtvec(unsigned long mtvec)
117 {
118 	asm volatile("csrw mtvec, %0" : : "r" (mtvec));
119 }
120 
121 /* supervisor trap-vector base-address register */
122 static inline __noprof unsigned long read_stvec(void)
123 {
124 	unsigned long stvec;
125 
126 	asm volatile("csrr %0, stvec" : "=r" (stvec));
127 
128 	return stvec;
129 }
130 
131 static inline __noprof void write_stvec(unsigned long stvec)
132 {
133 	asm volatile("csrw stvec, %0" : : "r" (stvec));
134 }
135 
136 /* machine status register */
137 static inline __noprof unsigned long read_mstatus(void)
138 {
139 	unsigned long mstatus;
140 
141 	asm volatile("csrr %0, mstatus" : "=r" (mstatus));
142 
143 	return mstatus;
144 }
145 
146 static inline __noprof void write_mstatus(unsigned long mstatus)
147 {
148 	asm volatile("csrw mstatus, %0" : : "r" (mstatus));
149 }
150 
151 /* supervisor status register */
152 static inline __noprof unsigned long read_sstatus(void)
153 {
154 	unsigned long sstatus;
155 
156 	asm volatile("csrr %0, sstatus" : "=r" (sstatus));
157 
158 	return sstatus;
159 }
160 
161 static inline __noprof void write_sstatus(unsigned long sstatus)
162 {
163 	asm volatile("csrw sstatus, %0" : : "r" (sstatus));
164 }
165 
166 static inline __noprof void set_sstatus(unsigned long sstatus)
167 {
168 	unsigned long x;
169 
170 	asm volatile ("csrrs %0, sstatus, %1" : "=r"(x) : "rK"(sstatus));
171 }
172 
173 /* machine exception delegation */
174 static inline __noprof unsigned long read_medeleg(void)
175 {
176 	unsigned long medeleg;
177 
178 	asm volatile("csrr %0, medeleg" : "=r" (medeleg));
179 
180 	return medeleg;
181 }
182 
183 static inline __noprof void write_medeleg(unsigned long medeleg)
184 {
185 	asm volatile("csrw medeleg, %0" : : "r" (medeleg));
186 }
187 
188 /* machine interrupt delegation */
189 static inline __noprof unsigned long read_mideleg(void)
190 {
191 	unsigned long mideleg;
192 
193 	asm volatile("csrr %0, mideleg" : "=r" (mideleg));
194 
195 	return mideleg;
196 }
197 
198 static inline __noprof void write_mideleg(unsigned long mideleg)
199 {
200 	asm volatile("csrw mideleg, %0" : : "r" (mideleg));
201 }
202 
203 /* machine interrupt-enable register */
204 static inline __noprof unsigned long read_mie(void)
205 {
206 	unsigned long mie;
207 
208 	asm volatile("csrr %0, mie" : "=r" (mie));
209 
210 	return mie;
211 }
212 
213 static inline __noprof void write_mie(unsigned long mie)
214 {
215 	asm volatile("csrw mie, %0" : : "r" (mie));
216 }
217 
218 /* supervisor interrupt-enable register */
219 static inline __noprof unsigned long read_sie(void)
220 {
221 	unsigned long sie;
222 
223 	asm volatile("csrr %0, sie" : "=r" (sie));
224 
225 	return sie;
226 }
227 
228 static inline __noprof void write_sie(unsigned long sie)
229 {
230 	asm volatile("csrw sie, %0" : : "r" (sie));
231 }
232 
233 /* machine exception program counter */
234 static inline __noprof unsigned long read_mepc(void)
235 {
236 	unsigned long mepc;
237 
238 	asm volatile("csrr %0, mepc" : "=r" (mepc));
239 
240 	return mepc;
241 }
242 
243 static inline __noprof void write_mepc(unsigned long mepc)
244 {
245 	asm volatile("csrw mepc, %0" : : "r" (mepc));
246 }
247 
248 /* supervisor exception program counter */
249 static inline __noprof unsigned long read_sepc(void)
250 {
251 	unsigned long sepc;
252 
253 	asm volatile("csrr %0, sepc" : "=r" (sepc));
254 
255 	return sepc;
256 }
257 
258 static inline __noprof void write_sepc(unsigned long sepc)
259 {
260 	asm volatile("csrw sepc, %0" : : "r" (sepc));
261 }
262 
263 /* machine scratch register */
264 static inline __noprof unsigned long read_mscratch(void)
265 {
266 	unsigned long mscratch;
267 
268 	asm volatile("csrr %0, mscratch" : "=r" (mscratch));
269 
270 	return mscratch;
271 }
272 
273 static inline __noprof void write_mscratch(unsigned long mscratch)
274 {
275 	asm volatile("csrw mscratch, %0" : : "r" (mscratch));
276 }
277 
278 /* supervisor scratch register */
279 static inline __noprof unsigned long read_sscratch(void)
280 {
281 	unsigned long sscratch;
282 
283 	asm volatile("csrr %0, sscratch" : "=r" (sscratch));
284 
285 	return sscratch;
286 }
287 
288 static inline __noprof void write_sscratch(unsigned long sscratch)
289 {
290 	asm volatile("csrw sscratch, %0" : : "r" (sscratch));
291 }
292 
293 /* trap-return instructions */
294 static inline __noprof void mret(void)
295 {
296 	asm volatile("mret");
297 }
298 
299 static inline __noprof void sret(void)
300 {
301 	asm volatile("sret");
302 }
303 
304 static inline __noprof void uret(void)
305 {
306 	asm volatile("uret");
307 }
308 
309 #endif /*__ASSEMBLER__*/
310 
311 #endif /*RISCV_H*/
312