xref: /optee_os/core/arch/riscv/include/riscv.h (revision 1c025012fb55e89f8c2afc4358e58238da0db089)
1 /* SPDX-License-Identifier: BSD-2-Clause */
2 /*
3  * Copyright 2022 NXP
4  */
5 
6 #ifndef RISCV_H
7 #define RISCV_H
8 
9 #include <compiler.h>
10 #include <encoding.h>
11 #include <stdint.h>
12 #include <sys/cdefs.h>
13 #include <util.h>
14 
15 #define RISCV_XLEN_BITS		(__riscv_xlen)
16 #define RISCV_XLEN_BYTES	(__riscv_xlen / 8)
17 
18 #define REGOFF(x)			((x) * RISCV_XLEN_BYTES)
19 
20 #if __riscv_xlen == 32
21 #define STR       sw
22 #define LDR       lw
23 #else
24 #define STR       sd
25 #define LDR       ld
26 #endif
27 
28 #ifndef __ASSEMBLER__
29 
30 static inline __noprof void mb(void)
31 {
32 	asm volatile ("fence" : : : "memory");
33 }
34 
35 static inline __noprof unsigned long read_tp(void)
36 {
37 	unsigned long tp;
38 
39 	asm volatile("mv %0, tp" : "=&r"(tp));
40 	return tp;
41 }
42 
43 static inline __noprof void wfi(void)
44 {
45 	asm volatile ("wfi");
46 }
47 
48 static inline __noprof void flush_tlb(void)
49 {
50 	asm volatile("sfence.vma zero, zero");
51 }
52 
53 static inline __noprof void flush_tlb_entry(unsigned long va)
54 {
55 	asm volatile ("sfence.vma %0" : : "r" (va) : "memory");
56 }
57 
58 /* supervisor address translation and protection */
59 static inline __noprof unsigned long read_satp(void)
60 {
61 	unsigned long satp;
62 
63 	asm volatile("csrr %0, satp" : "=r" (satp));
64 
65 	return satp;
66 }
67 
68 static inline __noprof void write_satp(unsigned long satp)
69 {
70 	asm volatile("csrw satp, %0" : : "r" (satp));
71 }
72 
73 /* machine trap-vector base-address register */
74 static inline __noprof unsigned long read_mtvec(void)
75 {
76 	unsigned long mtvec;
77 
78 	asm volatile("csrr %0, mtvec" : "=r" (mtvec));
79 
80 	return mtvec;
81 }
82 
83 static inline __noprof void write_mtvec(unsigned long mtvec)
84 {
85 	asm volatile("csrw mtvec, %0" : : "r" (mtvec));
86 }
87 
88 /* supervisor trap-vector base-address register */
89 static inline __noprof unsigned long read_stvec(void)
90 {
91 	unsigned long stvec;
92 
93 	asm volatile("csrr %0, stvec" : "=r" (stvec));
94 
95 	return stvec;
96 }
97 
98 static inline __noprof void write_stvec(unsigned long stvec)
99 {
100 	asm volatile("csrw stvec, %0" : : "r" (stvec));
101 }
102 
103 /* machine status register */
104 static inline __noprof unsigned long read_mstatus(void)
105 {
106 	unsigned long mstatus;
107 
108 	asm volatile("csrr %0, mstatus" : "=r" (mstatus));
109 
110 	return mstatus;
111 }
112 
113 static inline __noprof void write_mstatus(unsigned long mstatus)
114 {
115 	asm volatile("csrw mstatus, %0" : : "r" (mstatus));
116 }
117 
118 /* supervisor status register */
119 static inline __noprof unsigned long read_sstatus(void)
120 {
121 	unsigned long sstatus;
122 
123 	asm volatile("csrr %0, sstatus" : "=r" (sstatus));
124 
125 	return sstatus;
126 }
127 
128 static inline __noprof void write_sstatus(unsigned long sstatus)
129 {
130 	asm volatile("csrw sstatus, %0" : : "r" (sstatus));
131 }
132 
133 static inline __noprof void set_sstatus(unsigned long sstatus)
134 {
135 	unsigned long x;
136 
137 	asm volatile ("csrrs %0, sstatus, %1" : "=r"(x) : "rK"(sstatus));
138 }
139 
140 /* machine exception delegation */
141 static inline __noprof unsigned long read_medeleg(void)
142 {
143 	unsigned long medeleg;
144 
145 	asm volatile("csrr %0, medeleg" : "=r" (medeleg));
146 
147 	return medeleg;
148 }
149 
150 static inline __noprof void write_medeleg(unsigned long medeleg)
151 {
152 	asm volatile("csrw medeleg, %0" : : "r" (medeleg));
153 }
154 
155 /* machine interrupt delegation */
156 static inline __noprof unsigned long read_mideleg(void)
157 {
158 	unsigned long mideleg;
159 
160 	asm volatile("csrr %0, mideleg" : "=r" (mideleg));
161 
162 	return mideleg;
163 }
164 
165 static inline __noprof void write_mideleg(unsigned long mideleg)
166 {
167 	asm volatile("csrw mideleg, %0" : : "r" (mideleg));
168 }
169 
170 /* machine interrupt-enable register */
171 static inline __noprof unsigned long read_mie(void)
172 {
173 	unsigned long mie;
174 
175 	asm volatile("csrr %0, mie" : "=r" (mie));
176 
177 	return mie;
178 }
179 
180 static inline __noprof void write_mie(unsigned long mie)
181 {
182 	asm volatile("csrw mie, %0" : : "r" (mie));
183 }
184 
185 /* supervisor interrupt-enable register */
186 static inline __noprof unsigned long read_sie(void)
187 {
188 	unsigned long sie;
189 
190 	asm volatile("csrr %0, sie" : "=r" (sie));
191 
192 	return sie;
193 }
194 
195 static inline __noprof void write_sie(unsigned long sie)
196 {
197 	asm volatile("csrw sie, %0" : : "r" (sie));
198 }
199 
200 /* machine exception program counter */
201 static inline __noprof unsigned long read_mepc(void)
202 {
203 	unsigned long mepc;
204 
205 	asm volatile("csrr %0, mepc" : "=r" (mepc));
206 
207 	return mepc;
208 }
209 
210 static inline __noprof void write_mepc(unsigned long mepc)
211 {
212 	asm volatile("csrw mepc, %0" : : "r" (mepc));
213 }
214 
215 /* supervisor exception program counter */
216 static inline __noprof unsigned long read_sepc(void)
217 {
218 	unsigned long sepc;
219 
220 	asm volatile("csrr %0, sepc" : "=r" (sepc));
221 
222 	return sepc;
223 }
224 
225 static inline __noprof void write_sepc(unsigned long sepc)
226 {
227 	asm volatile("csrw sepc, %0" : : "r" (sepc));
228 }
229 
230 /* machine scratch register */
231 static inline __noprof unsigned long read_mscratch(void)
232 {
233 	unsigned long mscratch;
234 
235 	asm volatile("csrr %0, mscratch" : "=r" (mscratch));
236 
237 	return mscratch;
238 }
239 
240 static inline __noprof void write_mscratch(unsigned long mscratch)
241 {
242 	asm volatile("csrw mscratch, %0" : : "r" (mscratch));
243 }
244 
245 /* supervisor scratch register */
246 static inline __noprof unsigned long read_sscratch(void)
247 {
248 	unsigned long sscratch;
249 
250 	asm volatile("csrr %0, sscratch" : "=r" (sscratch));
251 
252 	return sscratch;
253 }
254 
255 static inline __noprof void write_sscratch(unsigned long sscratch)
256 {
257 	asm volatile("csrw sscratch, %0" : : "r" (sscratch));
258 }
259 
260 /* trap-return instructions */
261 static inline __noprof void mret(void)
262 {
263 	asm volatile("mret");
264 }
265 
266 static inline __noprof void sret(void)
267 {
268 	asm volatile("sret");
269 }
270 
271 static inline __noprof void uret(void)
272 {
273 	asm volatile("uret");
274 }
275 
276 #endif /*__ASSEMBLER__*/
277 
278 #endif /*RISCV_H*/
279