1 /* SPDX-License-Identifier: BSD-2-Clause */ 2 /* 3 * Copyright 2022-2023 NXP 4 */ 5 6 #ifndef __RISCV_H 7 #define __RISCV_H 8 9 #include <compiler.h> 10 #include <encoding.h> 11 #include <stdint.h> 12 #include <sys/cdefs.h> 13 #include <util.h> 14 15 #define RISCV_XLEN_BITS (__riscv_xlen) 16 #define RISCV_XLEN_BYTES (__riscv_xlen / 8) 17 18 /* Bind registers to their ABI names */ 19 #define REG_RA 1 20 #define REG_SP 2 21 #define REG_GP 3 22 #define REG_TP 4 23 #define REG_T0 5 24 #define REG_T2 7 25 #define REG_S0 8 26 #define REG_S1 9 27 #define REG_A0 10 28 #define REG_A1 11 29 #define REG_A2 12 30 #define REG_A3 13 31 #define REG_A5 15 32 #define REG_A7 17 33 #define REG_S2 18 34 #define REG_S11 27 35 #define REG_T3 28 36 #define REG_T6 31 37 38 #if defined(CFG_RISCV_M_MODE) 39 #define CSR_MODE_OFFSET PRV_M 40 #define XRET mret 41 #elif defined(CFG_RISCV_S_MODE) 42 #define CSR_MODE_OFFSET PRV_S 43 #define XRET sret 44 #endif 45 46 #define CSR_MODE_BITS SHIFT_U64(CSR_MODE_OFFSET, 8) 47 48 #define CSR_XSTATUS (CSR_MODE_BITS | 0x000) 49 #define CSR_XIE (CSR_MODE_BITS | 0x004) 50 #define CSR_XTVEC (CSR_MODE_BITS | 0x005) 51 #define CSR_XSCRATCH (CSR_MODE_BITS | 0x040) 52 #define CSR_XEPC (CSR_MODE_BITS | 0x041) 53 #define CSR_XCAUSE (CSR_MODE_BITS | 0x042) 54 #define CSR_XTVAL (CSR_MODE_BITS | 0x043) 55 #define CSR_XIP (CSR_MODE_BITS | 0x044) 56 57 #define IRQ_XSOFT (CSR_MODE_OFFSET + 0) 58 #define IRQ_XTIMER (CSR_MODE_OFFSET + 4) 59 #define IRQ_XEXT (CSR_MODE_OFFSET + 8) 60 61 #define CSR_XIE_SIE BIT64(IRQ_XSOFT) 62 #define CSR_XIE_TIE BIT64(IRQ_XTIMER) 63 #define CSR_XIE_EIE BIT64(IRQ_XEXT) 64 65 #define CSR_XSTATUS_IE BIT(CSR_MODE_OFFSET + 0) 66 #define CSR_XSTATUS_PIE BIT(CSR_MODE_OFFSET + 4) 67 #define CSR_XSTATUS_SPP BIT(8) 68 #define CSR_XSTATUS_SUM BIT(18) 69 #define CSR_XSTATUS_MXR BIT(19) 70 71 #ifndef __ASSEMBLER__ 72 73 #define read_csr(csr) \ 74 ({ \ 75 unsigned long __tmp; \ 76 asm volatile ("csrr %0, %1" : "=r"(__tmp) : "i"(csr)); \ 77 __tmp; \ 78 }) 79 80 #define write_csr(csr, val) \ 81 ({ \ 82 asm volatile ("csrw %0, %1" : : "i"(csr), "rK"(val)); \ 83 }) 84 85 #define swap_csr(csr, val) \ 86 ({ \ 87 unsigned long __tmp; \ 88 asm volatile ("csrrw %0, %1, %2" \ 89 : "=r"(__tmp) : "i"(csr), "rK"(val)); \ 90 __tmp; \ 91 }) 92 93 #define set_csr(csr, bit) \ 94 ({ \ 95 unsigned long __tmp; \ 96 asm volatile ("csrrs %0, %1, %2" \ 97 : "=r"(__tmp) : "i"(csr), "rK"(bit)); \ 98 __tmp; \ 99 }) 100 101 #define clear_csr(csr, bit) \ 102 ({ \ 103 unsigned long __tmp; \ 104 asm volatile ("csrrc %0, %1, %2" \ 105 : "=r"(__tmp) : "i"(csr), "rK"(bit)); \ 106 __tmp; \ 107 }) 108 109 #define rdtime() read_csr(CSR_TIME) 110 #define rdcycle() read_csr(CSR_CYCLE) 111 #define rdinstret() read_csr(CSR_INSTRET) 112 113 static inline __noprof void mb(void) 114 { 115 asm volatile ("fence" : : : "memory"); 116 } 117 118 static inline __noprof unsigned long read_gp(void) 119 { 120 unsigned long gp = 0; 121 122 asm volatile("mv %0, gp" : "=&r"(gp)); 123 return gp; 124 } 125 126 static inline __noprof unsigned long read_tp(void) 127 { 128 unsigned long tp = 0; 129 130 asm volatile("mv %0, tp" : "=&r"(tp)); 131 return tp; 132 } 133 134 static inline __noprof unsigned long read_fp(void) 135 { 136 unsigned long fp = 0; 137 138 asm volatile ("mv %0, s0" : "=r" (fp)); 139 140 return fp; 141 } 142 143 static inline __noprof unsigned long read_pc(void) 144 { 145 unsigned long pc = 0; 146 147 asm volatile ("auipc %0, 0" : "=r" (pc)); 148 149 return pc; 150 } 151 152 static inline __noprof void wfi(void) 153 { 154 asm volatile ("wfi"); 155 } 156 157 static inline __noprof void flush_tlb(void) 158 { 159 asm volatile("sfence.vma zero, zero"); 160 } 161 162 static inline __noprof void flush_tlb_entry(unsigned long va) 163 { 164 asm volatile ("sfence.vma %0" : : "r" (va) : "memory"); 165 } 166 167 /* supervisor address translation and protection */ 168 static inline __noprof unsigned long read_satp(void) 169 { 170 unsigned long satp; 171 172 asm volatile("csrr %0, satp" : "=r" (satp)); 173 174 return satp; 175 } 176 177 static inline __noprof void write_satp(unsigned long satp) 178 { 179 asm volatile("csrw satp, %0" : : "r" (satp)); 180 } 181 182 /* machine trap-vector base-address register */ 183 static inline __noprof unsigned long read_mtvec(void) 184 { 185 unsigned long mtvec; 186 187 asm volatile("csrr %0, mtvec" : "=r" (mtvec)); 188 189 return mtvec; 190 } 191 192 static inline __noprof void write_mtvec(unsigned long mtvec) 193 { 194 asm volatile("csrw mtvec, %0" : : "r" (mtvec)); 195 } 196 197 /* supervisor trap-vector base-address register */ 198 static inline __noprof unsigned long read_stvec(void) 199 { 200 unsigned long stvec; 201 202 asm volatile("csrr %0, stvec" : "=r" (stvec)); 203 204 return stvec; 205 } 206 207 static inline __noprof void write_stvec(unsigned long stvec) 208 { 209 asm volatile("csrw stvec, %0" : : "r" (stvec)); 210 } 211 212 /* machine status register */ 213 static inline __noprof unsigned long read_mstatus(void) 214 { 215 unsigned long mstatus; 216 217 asm volatile("csrr %0, mstatus" : "=r" (mstatus)); 218 219 return mstatus; 220 } 221 222 static inline __noprof void write_mstatus(unsigned long mstatus) 223 { 224 asm volatile("csrw mstatus, %0" : : "r" (mstatus)); 225 } 226 227 /* supervisor status register */ 228 static inline __noprof unsigned long read_sstatus(void) 229 { 230 unsigned long sstatus; 231 232 asm volatile("csrr %0, sstatus" : "=r" (sstatus)); 233 234 return sstatus; 235 } 236 237 static inline __noprof void write_sstatus(unsigned long sstatus) 238 { 239 asm volatile("csrw sstatus, %0" : : "r" (sstatus)); 240 } 241 242 static inline __noprof void set_sstatus(unsigned long sstatus) 243 { 244 unsigned long x; 245 246 asm volatile ("csrrs %0, sstatus, %1" : "=r"(x) : "rK"(sstatus)); 247 } 248 249 /* machine exception delegation */ 250 static inline __noprof unsigned long read_medeleg(void) 251 { 252 unsigned long medeleg; 253 254 asm volatile("csrr %0, medeleg" : "=r" (medeleg)); 255 256 return medeleg; 257 } 258 259 static inline __noprof void write_medeleg(unsigned long medeleg) 260 { 261 asm volatile("csrw medeleg, %0" : : "r" (medeleg)); 262 } 263 264 /* machine interrupt delegation */ 265 static inline __noprof unsigned long read_mideleg(void) 266 { 267 unsigned long mideleg; 268 269 asm volatile("csrr %0, mideleg" : "=r" (mideleg)); 270 271 return mideleg; 272 } 273 274 static inline __noprof void write_mideleg(unsigned long mideleg) 275 { 276 asm volatile("csrw mideleg, %0" : : "r" (mideleg)); 277 } 278 279 /* machine interrupt-enable register */ 280 static inline __noprof unsigned long read_mie(void) 281 { 282 unsigned long mie; 283 284 asm volatile("csrr %0, mie" : "=r" (mie)); 285 286 return mie; 287 } 288 289 static inline __noprof void write_mie(unsigned long mie) 290 { 291 asm volatile("csrw mie, %0" : : "r" (mie)); 292 } 293 294 /* supervisor interrupt-enable register */ 295 static inline __noprof unsigned long read_sie(void) 296 { 297 unsigned long sie; 298 299 asm volatile("csrr %0, sie" : "=r" (sie)); 300 301 return sie; 302 } 303 304 static inline __noprof void write_sie(unsigned long sie) 305 { 306 asm volatile("csrw sie, %0" : : "r" (sie)); 307 } 308 309 /* machine exception program counter */ 310 static inline __noprof unsigned long read_mepc(void) 311 { 312 unsigned long mepc; 313 314 asm volatile("csrr %0, mepc" : "=r" (mepc)); 315 316 return mepc; 317 } 318 319 static inline __noprof void write_mepc(unsigned long mepc) 320 { 321 asm volatile("csrw mepc, %0" : : "r" (mepc)); 322 } 323 324 /* supervisor exception program counter */ 325 static inline __noprof unsigned long read_sepc(void) 326 { 327 unsigned long sepc; 328 329 asm volatile("csrr %0, sepc" : "=r" (sepc)); 330 331 return sepc; 332 } 333 334 static inline __noprof void write_sepc(unsigned long sepc) 335 { 336 asm volatile("csrw sepc, %0" : : "r" (sepc)); 337 } 338 339 /* machine scratch register */ 340 static inline __noprof unsigned long read_mscratch(void) 341 { 342 unsigned long mscratch; 343 344 asm volatile("csrr %0, mscratch" : "=r" (mscratch)); 345 346 return mscratch; 347 } 348 349 static inline __noprof void write_mscratch(unsigned long mscratch) 350 { 351 asm volatile("csrw mscratch, %0" : : "r" (mscratch)); 352 } 353 354 /* supervisor scratch register */ 355 static inline __noprof unsigned long read_sscratch(void) 356 { 357 unsigned long sscratch; 358 359 asm volatile("csrr %0, sscratch" : "=r" (sscratch)); 360 361 return sscratch; 362 } 363 364 static inline __noprof void write_sscratch(unsigned long sscratch) 365 { 366 asm volatile("csrw sscratch, %0" : : "r" (sscratch)); 367 } 368 369 /* trap-return instructions */ 370 static inline __noprof void mret(void) 371 { 372 asm volatile("mret"); 373 } 374 375 static inline __noprof void sret(void) 376 { 377 asm volatile("sret"); 378 } 379 380 static inline __noprof void uret(void) 381 { 382 asm volatile("uret"); 383 } 384 385 __noprof uint64_t read_time(void); 386 387 static inline __noprof uint64_t barrier_read_counter_timer(void) 388 { 389 mb(); /* Get timer value after pending operations have completed */ 390 return read_time(); 391 } 392 393 static inline __noprof uint32_t read_cntfrq(void) 394 { 395 return CFG_RISCV_MTIME_RATE; 396 } 397 398 #endif /*__ASSEMBLER__*/ 399 400 #endif /*__RISCV_H*/ 401