1 /* SPDX-License-Identifier: BSD-2-Clause */
2 /*
3 * Copyright 2022-2023 NXP
4 */
5
6 #ifndef __RISCV_H
7 #define __RISCV_H
8
9 #include <compiler.h>
10 #include <encoding.h>
11 #include <stdbool.h>
12 #include <stdint.h>
13 #include <sys/cdefs.h>
14 #include <util.h>
15
16 /* The stack pointer is always kept 16-byte aligned */
17 #define STACK_ALIGNMENT 16
18
19 #define RISCV_XLEN_BITS (__riscv_xlen)
20 #define RISCV_XLEN_BYTES (__riscv_xlen / 8)
21
22 /* Bind registers to their ABI names */
23 #define REG_RA 1
24 #define REG_SP 2
25 #define REG_GP 3
26 #define REG_TP 4
27 #define REG_T0 5
28 #define REG_T2 7
29 #define REG_S0 8
30 #define REG_S1 9
31 #define REG_A0 10
32 #define REG_A1 11
33 #define REG_A2 12
34 #define REG_A3 13
35 #define REG_A5 15
36 #define REG_A7 17
37 #define REG_S2 18
38 #define REG_S11 27
39 #define REG_T3 28
40 #define REG_T6 31
41
42 #if defined(CFG_RISCV_M_MODE)
43 #define CSR_MODE_OFFSET PRV_M
44 #define XRET mret
45 #elif defined(CFG_RISCV_S_MODE)
46 #define CSR_MODE_OFFSET PRV_S
47 #define XRET sret
48 #endif
49
50 #define CSR_MODE_BITS SHIFT_U64(CSR_MODE_OFFSET, 8)
51
52 #define CSR_XSTATUS (CSR_MODE_BITS | 0x000)
53 #define CSR_XIE (CSR_MODE_BITS | 0x004)
54 #define CSR_XTVEC (CSR_MODE_BITS | 0x005)
55 #define CSR_XSCRATCH (CSR_MODE_BITS | 0x040)
56 #define CSR_XEPC (CSR_MODE_BITS | 0x041)
57 #define CSR_XCAUSE (CSR_MODE_BITS | 0x042)
58 #define CSR_XTVAL (CSR_MODE_BITS | 0x043)
59 #define CSR_XIP (CSR_MODE_BITS | 0x044)
60 #define CSR_XISELECT (CSR_MODE_BITS | 0x050)
61 #define CSR_XIREG (CSR_MODE_BITS | 0x051)
62 #define CSR_XTOPEI (CSR_MODE_BITS | 0x05C)
63
64 #define IRQ_XSOFT (CSR_MODE_OFFSET + 0)
65 #define IRQ_XTIMER (CSR_MODE_OFFSET + 4)
66 #define IRQ_XEXT (CSR_MODE_OFFSET + 8)
67
68 #define CSR_XIE_SIE BIT64(IRQ_XSOFT)
69 #define CSR_XIE_TIE BIT64(IRQ_XTIMER)
70 #define CSR_XIE_EIE BIT64(IRQ_XEXT)
71
72 #define CSR_XSTATUS_IE BIT(CSR_MODE_OFFSET + 0)
73 #define CSR_XSTATUS_PIE BIT(CSR_MODE_OFFSET + 4)
74 #define CSR_XSTATUS_SPP BIT(8)
75 #define CSR_XSTATUS_SUM BIT(18)
76 #define CSR_XSTATUS_MXR BIT(19)
77
78 #define CSR_XCAUSE_INTR_FLAG BIT64(__riscv_xlen - 1)
79
80 #ifndef __ASSEMBLER__
81
82 #define read_csr(csr) \
83 ({ \
84 unsigned long __tmp; \
85 asm volatile ("csrr %0, %1" : "=r"(__tmp) : "i"(csr)); \
86 __tmp; \
87 })
88
89 #define write_csr(csr, val) \
90 ({ \
91 asm volatile ("csrw %0, %1" : : "i"(csr), "rK"(val)); \
92 })
93
94 #define swap_csr(csr, val) \
95 ({ \
96 unsigned long __tmp; \
97 asm volatile ("csrrw %0, %1, %2" \
98 : "=r"(__tmp) : "i"(csr), "rK"(val)); \
99 __tmp; \
100 })
101
102 #define read_set_csr(csr, val) \
103 ({ \
104 unsigned long __tmp; \
105 asm volatile ("csrrs %0, %1, %2" \
106 : "=r"(__tmp) : "i"(csr), "rK"(val)); \
107 __tmp; \
108 })
109
110 #define set_csr(csr, val) \
111 ({ \
112 asm volatile ("csrs %0, %1" : : "i"(csr), "rK"(val)); \
113 })
114
115 #define read_clear_csr(csr, val) \
116 ({ \
117 unsigned long __tmp; \
118 asm volatile ("csrrc %0, %1, %2" \
119 : "=r"(__tmp) : "i"(csr), "rK"(val)); \
120 __tmp; \
121 })
122
123 #define clear_csr(csr, val) \
124 ({ \
125 asm volatile ("csrc %0, %1" : : "i"(csr), "rK"(val)); \
126 })
127
128 #define rdtime() read_csr(CSR_TIME)
129 #define rdcycle() read_csr(CSR_CYCLE)
130 #define rdinstret() read_csr(CSR_INSTRET)
131
mb(void)132 static inline __noprof void mb(void)
133 {
134 asm volatile ("fence" : : : "memory");
135 }
136
read_gp(void)137 static inline __noprof unsigned long read_gp(void)
138 {
139 unsigned long gp = 0;
140
141 asm volatile("mv %0, gp" : "=&r"(gp));
142 return gp;
143 }
144
read_tp(void)145 static inline __noprof unsigned long read_tp(void)
146 {
147 unsigned long tp = 0;
148
149 asm volatile("mv %0, tp" : "=&r"(tp));
150 return tp;
151 }
152
read_fp(void)153 static inline __noprof unsigned long read_fp(void)
154 {
155 unsigned long fp = 0;
156
157 asm volatile ("mv %0, s0" : "=r" (fp));
158
159 return fp;
160 }
161
read_pc(void)162 static inline __noprof unsigned long read_pc(void)
163 {
164 unsigned long pc = 0;
165
166 asm volatile ("auipc %0, 0" : "=r" (pc));
167
168 return pc;
169 }
170
wfi(void)171 static inline __noprof void wfi(void)
172 {
173 asm volatile ("wfi");
174 }
175
riscv_cpu_pause(void)176 static inline __noprof void riscv_cpu_pause(void)
177 {
178 unsigned long dummy = 0;
179
180 /*
181 * Use a divide instruction to force wait
182 * for multiple CPU cycles.
183 * Note: RISC-V does not raise an exception
184 * on divide by zero.
185 */
186 asm volatile ("div %0, %0, zero" : "=r" (dummy));
187
188 /*
189 * Use the encoding of the 'pause' instruction,
190 * thus no need to verify toolchain support for
191 * zihintpause.
192 * On hardware platforms that do not implement
193 * this extension, it will simply serve as a no-op.
194 */
195 asm volatile (".4byte 0x100000f"); /* pause */
196 barrier();
197 }
198
flush_tlb(void)199 static inline __noprof void flush_tlb(void)
200 {
201 asm volatile("sfence.vma zero, zero");
202 }
203
flush_tlb_entry(unsigned long va)204 static inline __noprof void flush_tlb_entry(unsigned long va)
205 {
206 asm volatile ("sfence.vma %0" : : "r" (va) : "memory");
207 }
208
209 /* supervisor address translation and protection */
read_satp(void)210 static inline __noprof unsigned long read_satp(void)
211 {
212 unsigned long satp;
213
214 asm volatile("csrr %0, satp" : "=r" (satp));
215
216 return satp;
217 }
218
write_satp(unsigned long satp)219 static inline __noprof void write_satp(unsigned long satp)
220 {
221 asm volatile("csrw satp, %0" : : "r" (satp));
222 }
223
224 /* machine trap-vector base-address register */
read_mtvec(void)225 static inline __noprof unsigned long read_mtvec(void)
226 {
227 unsigned long mtvec;
228
229 asm volatile("csrr %0, mtvec" : "=r" (mtvec));
230
231 return mtvec;
232 }
233
write_mtvec(unsigned long mtvec)234 static inline __noprof void write_mtvec(unsigned long mtvec)
235 {
236 asm volatile("csrw mtvec, %0" : : "r" (mtvec));
237 }
238
239 /* supervisor trap-vector base-address register */
read_stvec(void)240 static inline __noprof unsigned long read_stvec(void)
241 {
242 unsigned long stvec;
243
244 asm volatile("csrr %0, stvec" : "=r" (stvec));
245
246 return stvec;
247 }
248
write_stvec(unsigned long stvec)249 static inline __noprof void write_stvec(unsigned long stvec)
250 {
251 asm volatile("csrw stvec, %0" : : "r" (stvec));
252 }
253
254 /* machine status register */
read_mstatus(void)255 static inline __noprof unsigned long read_mstatus(void)
256 {
257 unsigned long mstatus;
258
259 asm volatile("csrr %0, mstatus" : "=r" (mstatus));
260
261 return mstatus;
262 }
263
write_mstatus(unsigned long mstatus)264 static inline __noprof void write_mstatus(unsigned long mstatus)
265 {
266 asm volatile("csrw mstatus, %0" : : "r" (mstatus));
267 }
268
269 /* supervisor status register */
read_sstatus(void)270 static inline __noprof unsigned long read_sstatus(void)
271 {
272 unsigned long sstatus;
273
274 asm volatile("csrr %0, sstatus" : "=r" (sstatus));
275
276 return sstatus;
277 }
278
write_sstatus(unsigned long sstatus)279 static inline __noprof void write_sstatus(unsigned long sstatus)
280 {
281 asm volatile("csrw sstatus, %0" : : "r" (sstatus));
282 }
283
set_sstatus(unsigned long sstatus)284 static inline __noprof void set_sstatus(unsigned long sstatus)
285 {
286 unsigned long x;
287
288 asm volatile ("csrrs %0, sstatus, %1" : "=r"(x) : "rK"(sstatus));
289 }
290
291 /* machine exception delegation */
read_medeleg(void)292 static inline __noprof unsigned long read_medeleg(void)
293 {
294 unsigned long medeleg;
295
296 asm volatile("csrr %0, medeleg" : "=r" (medeleg));
297
298 return medeleg;
299 }
300
write_medeleg(unsigned long medeleg)301 static inline __noprof void write_medeleg(unsigned long medeleg)
302 {
303 asm volatile("csrw medeleg, %0" : : "r" (medeleg));
304 }
305
306 /* machine interrupt delegation */
read_mideleg(void)307 static inline __noprof unsigned long read_mideleg(void)
308 {
309 unsigned long mideleg;
310
311 asm volatile("csrr %0, mideleg" : "=r" (mideleg));
312
313 return mideleg;
314 }
315
write_mideleg(unsigned long mideleg)316 static inline __noprof void write_mideleg(unsigned long mideleg)
317 {
318 asm volatile("csrw mideleg, %0" : : "r" (mideleg));
319 }
320
321 /* machine interrupt-enable register */
read_mie(void)322 static inline __noprof unsigned long read_mie(void)
323 {
324 unsigned long mie;
325
326 asm volatile("csrr %0, mie" : "=r" (mie));
327
328 return mie;
329 }
330
write_mie(unsigned long mie)331 static inline __noprof void write_mie(unsigned long mie)
332 {
333 asm volatile("csrw mie, %0" : : "r" (mie));
334 }
335
336 /* supervisor interrupt-enable register */
read_sie(void)337 static inline __noprof unsigned long read_sie(void)
338 {
339 unsigned long sie;
340
341 asm volatile("csrr %0, sie" : "=r" (sie));
342
343 return sie;
344 }
345
write_sie(unsigned long sie)346 static inline __noprof void write_sie(unsigned long sie)
347 {
348 asm volatile("csrw sie, %0" : : "r" (sie));
349 }
350
351 /* machine exception program counter */
read_mepc(void)352 static inline __noprof unsigned long read_mepc(void)
353 {
354 unsigned long mepc;
355
356 asm volatile("csrr %0, mepc" : "=r" (mepc));
357
358 return mepc;
359 }
360
write_mepc(unsigned long mepc)361 static inline __noprof void write_mepc(unsigned long mepc)
362 {
363 asm volatile("csrw mepc, %0" : : "r" (mepc));
364 }
365
366 /* supervisor exception program counter */
read_sepc(void)367 static inline __noprof unsigned long read_sepc(void)
368 {
369 unsigned long sepc;
370
371 asm volatile("csrr %0, sepc" : "=r" (sepc));
372
373 return sepc;
374 }
375
write_sepc(unsigned long sepc)376 static inline __noprof void write_sepc(unsigned long sepc)
377 {
378 asm volatile("csrw sepc, %0" : : "r" (sepc));
379 }
380
381 /* machine scratch register */
read_mscratch(void)382 static inline __noprof unsigned long read_mscratch(void)
383 {
384 unsigned long mscratch;
385
386 asm volatile("csrr %0, mscratch" : "=r" (mscratch));
387
388 return mscratch;
389 }
390
write_mscratch(unsigned long mscratch)391 static inline __noprof void write_mscratch(unsigned long mscratch)
392 {
393 asm volatile("csrw mscratch, %0" : : "r" (mscratch));
394 }
395
396 /* supervisor scratch register */
read_sscratch(void)397 static inline __noprof unsigned long read_sscratch(void)
398 {
399 unsigned long sscratch;
400
401 asm volatile("csrr %0, sscratch" : "=r" (sscratch));
402
403 return sscratch;
404 }
405
write_sscratch(unsigned long sscratch)406 static inline __noprof void write_sscratch(unsigned long sscratch)
407 {
408 asm volatile("csrw sscratch, %0" : : "r" (sscratch));
409 }
410
411 /* trap-return instructions */
mret(void)412 static inline __noprof void mret(void)
413 {
414 asm volatile("mret");
415 }
416
sret(void)417 static inline __noprof void sret(void)
418 {
419 asm volatile("sret");
420 }
421
uret(void)422 static inline __noprof void uret(void)
423 {
424 asm volatile("uret");
425 }
426
427 __noprof uint64_t read_time(void);
428
barrier_read_counter_timer(void)429 static inline __noprof uint64_t barrier_read_counter_timer(void)
430 {
431 mb(); /* Get timer value after pending operations have completed */
432 return read_time();
433 }
434
read_cntfrq(void)435 static inline __noprof uint32_t read_cntfrq(void)
436 {
437 return CFG_RISCV_MTIME_RATE;
438 }
439
440 __noprof bool riscv_detect_csr_seed(void);
441
442 #endif /*__ASSEMBLER__*/
443
444 #endif /*__RISCV_H*/
445