1/* SPDX-License-Identifier: BSD-2-Clause */ 2/* 3 * Copyright 2022-2023 NXP 4 * Copyright 2024 Andes Technology Corporation 5 */ 6 7#include <asm.S> 8#include <generated/asm-defines.h> 9#include <keep.h> 10#include <kernel/thread.h> 11#include <kernel/thread_private.h> 12#include <mm/core_mmu.h> 13#include <riscv.h> 14#include <riscv_macros.S> 15 16.macro get_thread_ctx res, tmp0 17 lw \tmp0, THREAD_CORE_LOCAL_CURR_THREAD(tp) 18 la \res, threads 191: 20 beqz \tmp0, 2f 21 addi \res, \res, THREAD_CTX_SIZE 22 addi \tmp0, \tmp0, -1 23 bnez \tmp0, 1b 242: 25.endm 26 27.macro b_if_prev_priv_is_u reg, label 28 andi \reg, \reg, CSR_XSTATUS_SPP 29 beqz \reg, \label 30.endm 31 32/* size_t __get_core_pos(void); */ 33FUNC __get_core_pos , : , .identity_map 34 lw a0, THREAD_CORE_LOCAL_HART_ID(tp) 35 ret 36END_FUNC __get_core_pos 37 38FUNC thread_trap_vect , : 39 csrrw tp, CSR_XSCRATCH, tp 40 bnez tp, 0f 41 /* Read tp back */ 42 csrrw tp, CSR_XSCRATCH, tp 43 j trap_from_kernel 440: 45 /* Now tp is thread_core_local */ 46 j trap_from_user 47thread_trap_vect_end: 48END_FUNC thread_trap_vect 49 50LOCAL_FUNC trap_from_kernel, : 51 /* Save sp, a0, a1 into temporary spaces of thread_core_local */ 52 store_xregs tp, THREAD_CORE_LOCAL_X0, REG_SP 53 store_xregs tp, THREAD_CORE_LOCAL_X1, REG_A0, REG_A1 54 55 csrr a0, CSR_XCAUSE 56 /* MSB of cause differentiates between interrupts and exceptions */ 57 bge a0, zero, exception_from_kernel 58 59interrupt_from_kernel: 60 /* Get thread context as sp */ 61 get_thread_ctx sp, a0 62 63 /* Load and save kernel sp */ 64 load_xregs tp, THREAD_CORE_LOCAL_X0, REG_A0 65 store_xregs sp, THREAD_CTX_REG_SP, REG_A0 66 67 /* Restore user a0, a1 which can be saved later */ 68 load_xregs tp, THREAD_CORE_LOCAL_X1, REG_A0, REG_A1 69 70 /* Save all other GPRs */ 71 store_xregs sp, THREAD_CTX_REG_RA, REG_RA 72 store_xregs sp, THREAD_CTX_REG_GP, REG_GP 73 store_xregs sp, THREAD_CTX_REG_T0, REG_T0, REG_T2 74 store_xregs sp, THREAD_CTX_REG_S0, REG_S0, REG_S1 75 store_xregs sp, THREAD_CTX_REG_A0, REG_A0, REG_A7 76 store_xregs sp, THREAD_CTX_REG_S2, REG_S2, REG_S11 77 store_xregs sp, THREAD_CTX_REG_T3, REG_T3, REG_T6 78 /* Save XIE */ 79 csrr t0, CSR_XIE 80 store_xregs sp, THREAD_CTX_REG_IE, REG_T0 81 /* Mask all interrupts */ 82 csrw CSR_XIE, x0 83 /* Save XSTATUS */ 84 csrr t0, CSR_XSTATUS 85 store_xregs sp, THREAD_CTX_REG_STATUS, REG_T0 86 /* Save XEPC */ 87 csrr t0, CSR_XEPC 88 store_xregs sp, THREAD_CTX_REG_EPC, REG_T0 89 90 /* 91 * a0 = cause 92 * a1 = sp 93 * Call thread_interrupt_handler(cause, regs) 94 */ 95 csrr a0, CSR_XCAUSE 96 mv a1, sp 97 /* Load tmp_stack_va_end as current sp. */ 98 load_xregs tp, THREAD_CORE_LOCAL_TMP_STACK_VA_END, REG_SP 99 call thread_interrupt_handler 100 101 /* Get thread context as sp */ 102 get_thread_ctx sp, t0 103 /* Restore XEPC */ 104 load_xregs sp, THREAD_CTX_REG_EPC, REG_T0 105 csrw CSR_XEPC, t0 106 /* Restore XIE */ 107 load_xregs sp, THREAD_CTX_REG_IE, REG_T0 108 csrw CSR_XIE, t0 109 /* Restore XSTATUS */ 110 load_xregs sp, THREAD_CTX_REG_STATUS, REG_T0 111 csrw CSR_XSTATUS, t0 112 /* Set scratch as thread_core_local */ 113 csrw CSR_XSCRATCH, tp 114 /* Restore all GPRs */ 115 load_xregs sp, THREAD_CTX_REG_RA, REG_RA 116 load_xregs sp, THREAD_CTX_REG_GP, REG_GP 117 load_xregs sp, THREAD_CTX_REG_T0, REG_T0, REG_T2 118 load_xregs sp, THREAD_CTX_REG_S0, REG_S0, REG_S1 119 load_xregs sp, THREAD_CTX_REG_A0, REG_A0, REG_A7 120 load_xregs sp, THREAD_CTX_REG_S2, REG_S2, REG_S11 121 load_xregs sp, THREAD_CTX_REG_T3, REG_T3, REG_T6 122 load_xregs sp, THREAD_CTX_REG_SP, REG_SP 123 XRET 124 125exception_from_kernel: 126 /* 127 * Update core local flags. 128 * flags = (flags << THREAD_CLF_SAVED_SHIFT) | THREAD_CLF_ABORT; 129 */ 130 lw a0, THREAD_CORE_LOCAL_FLAGS(tp) 131 slli a0, a0, THREAD_CLF_SAVED_SHIFT 132 ori a0, a0, THREAD_CLF_ABORT 133 li a1, (THREAD_CLF_ABORT << THREAD_CLF_SAVED_SHIFT) 134 and a1, a0, a1 135 bnez a1, sel_tmp_sp 136 137 /* Select abort stack */ 138 load_xregs tp, THREAD_CORE_LOCAL_ABT_STACK_VA_END, REG_A1 139 j set_sp 140 141sel_tmp_sp: 142 /* We have an abort while using the abort stack, select tmp stack */ 143 load_xregs tp, THREAD_CORE_LOCAL_TMP_STACK_VA_END, REG_A1 144 ori a0, a0, THREAD_CLF_TMP /* flags |= THREAD_CLF_TMP; */ 145 146set_sp: 147 mv sp, a1 148 sw a0, THREAD_CORE_LOCAL_FLAGS(tp) 149 150 /* 151 * Save state on stack 152 */ 153 addi sp, sp, -THREAD_ABT_REGS_SIZE 154 155 /* Save kernel sp */ 156 load_xregs tp, THREAD_CORE_LOCAL_X0, REG_A0 157 store_xregs sp, THREAD_ABT_REG_SP, REG_A0 158 159 /* Restore kernel a0, a1 which can be saved later */ 160 load_xregs tp, THREAD_CORE_LOCAL_X1, REG_A0, REG_A1 161 162 /* Save all other GPRs */ 163 store_xregs sp, THREAD_ABT_REG_RA, REG_RA 164 store_xregs sp, THREAD_ABT_REG_GP, REG_GP 165 store_xregs sp, THREAD_ABT_REG_TP, REG_TP 166 store_xregs sp, THREAD_ABT_REG_T0, REG_T0, REG_T2 167 store_xregs sp, THREAD_ABT_REG_S0, REG_S0, REG_S1 168 store_xregs sp, THREAD_ABT_REG_A0, REG_A0, REG_A7 169 store_xregs sp, THREAD_ABT_REG_S2, REG_S2, REG_S11 170 store_xregs sp, THREAD_ABT_REG_T3, REG_T3, REG_T6 171 /* Save XIE */ 172 csrr t0, CSR_XIE 173 store_xregs sp, THREAD_ABT_REG_IE, REG_T0 174 /* Mask all interrupts */ 175 csrw CSR_XIE, x0 176 /* Save XSTATUS */ 177 csrr t0, CSR_XSTATUS 178 store_xregs sp, THREAD_ABT_REG_STATUS, REG_T0 179 /* Save XEPC */ 180 csrr t0, CSR_XEPC 181 store_xregs sp, THREAD_ABT_REG_EPC, REG_T0 182 /* Save XTVAL */ 183 csrr t0, CSR_XTVAL 184 store_xregs sp, THREAD_ABT_REG_TVAL, REG_T0 185 /* Save XCAUSE */ 186 csrr a0, CSR_XCAUSE 187 store_xregs sp, THREAD_ABT_REG_CAUSE, REG_A0 188 189 /* 190 * a0 = cause 191 * a1 = sp (struct thread_abort_regs *regs) 192 * Call abort_handler(cause, regs) 193 */ 194 mv a1, sp 195 call abort_handler 196 197 /* 198 * Restore state from stack 199 */ 200 201 /* Restore XEPC */ 202 load_xregs sp, THREAD_ABT_REG_EPC, REG_T0 203 csrw CSR_XEPC, t0 204 /* Restore XIE */ 205 load_xregs sp, THREAD_ABT_REG_IE, REG_T0 206 csrw CSR_XIE, t0 207 /* Restore XSTATUS */ 208 load_xregs sp, THREAD_ABT_REG_STATUS, REG_T0 209 csrw CSR_XSTATUS, t0 210 /* Set scratch as thread_core_local */ 211 csrw CSR_XSCRATCH, tp 212 213 /* Update core local flags */ 214 lw a0, THREAD_CORE_LOCAL_FLAGS(tp) 215 srli a0, a0, THREAD_CLF_SAVED_SHIFT 216 sw a0, THREAD_CORE_LOCAL_FLAGS(tp) 217 218 /* Restore all GPRs */ 219 load_xregs sp, THREAD_ABT_REG_RA, REG_RA 220 load_xregs sp, THREAD_ABT_REG_GP, REG_GP 221 load_xregs sp, THREAD_ABT_REG_TP, REG_TP 222 load_xregs sp, THREAD_ABT_REG_T0, REG_T0, REG_T2 223 load_xregs sp, THREAD_ABT_REG_S0, REG_S0, REG_S1 224 load_xregs sp, THREAD_ABT_REG_A0, REG_A0, REG_A7 225 load_xregs sp, THREAD_ABT_REG_S2, REG_S2, REG_S11 226 load_xregs sp, THREAD_ABT_REG_T3, REG_T3, REG_T6 227 load_xregs sp, THREAD_ABT_REG_SP, REG_SP 228 XRET 229END_FUNC trap_from_kernel 230 231LOCAL_FUNC trap_from_user, : 232 /* Save user sp, a0, a1 into temporary spaces of thread_core_local */ 233 store_xregs tp, THREAD_CORE_LOCAL_X0, REG_SP 234 store_xregs tp, THREAD_CORE_LOCAL_X1, REG_A0, REG_A1 235 236 csrr a0, CSR_XCAUSE 237 /* MSB of cause differentiates between interrupts and exceptions */ 238 bge a0, zero, exception_from_user 239 240interrupt_from_user: 241 /* Get thread context as sp */ 242 get_thread_ctx sp, a0 243 244 /* Save user sp */ 245 load_xregs tp, THREAD_CORE_LOCAL_X0, REG_A0 246 store_xregs sp, THREAD_CTX_REG_SP, REG_A0 247 248 /* Restore user a0, a1 which can be saved later */ 249 load_xregs tp, THREAD_CORE_LOCAL_X1, REG_A0, REG_A1 250 251 /* Save user gp */ 252 store_xregs sp, THREAD_CTX_REG_GP, REG_GP 253 254 /* 255 * Set the scratch register to 0 such in case of a recursive 256 * exception thread_trap_vect() knows that it is emitted from kernel. 257 */ 258 csrrw gp, CSR_XSCRATCH, zero 259 /* Save user tp we previously swapped into CSR_XSCRATCH */ 260 store_xregs sp, THREAD_CTX_REG_TP, REG_GP 261 /* Set kernel gp */ 262.option push 263.option norelax 264 la gp, __global_pointer$ 265.option pop 266 /* Save all other GPRs */ 267 store_xregs sp, THREAD_CTX_REG_RA, REG_RA 268 store_xregs sp, THREAD_CTX_REG_T0, REG_T0, REG_T2 269 store_xregs sp, THREAD_CTX_REG_S0, REG_S0, REG_S1 270 store_xregs sp, THREAD_CTX_REG_A0, REG_A0, REG_A7 271 store_xregs sp, THREAD_CTX_REG_S2, REG_S2, REG_S11 272 store_xregs sp, THREAD_CTX_REG_T3, REG_T3, REG_T6 273 /* Save XIE */ 274 csrr t0, CSR_XIE 275 store_xregs sp, THREAD_CTX_REG_IE, REG_T0 276 /* Mask all interrupts */ 277 csrw CSR_XIE, x0 278 /* Save XSTATUS */ 279 csrr t0, CSR_XSTATUS 280 store_xregs sp, THREAD_CTX_REG_STATUS, REG_T0 281 /* Save XEPC */ 282 csrr t0, CSR_XEPC 283 store_xregs sp, THREAD_CTX_REG_EPC, REG_T0 284 285 /* 286 * a0 = cause 287 * a1 = sp 288 * Call thread_interrupt_handler(cause, regs) 289 */ 290 csrr a0, CSR_XCAUSE 291 mv a1, sp 292 /* Load tmp_stack_va_end as current sp. */ 293 load_xregs tp, THREAD_CORE_LOCAL_TMP_STACK_VA_END, REG_SP 294 call thread_interrupt_handler 295 296 /* Get thread context as sp */ 297 get_thread_ctx sp, t0 298 /* Restore XEPC */ 299 load_xregs sp, THREAD_CTX_REG_EPC, REG_T0 300 csrw CSR_XEPC, t0 301 /* Restore XIE */ 302 load_xregs sp, THREAD_CTX_REG_IE, REG_T0 303 csrw CSR_XIE, t0 304 /* Restore XSTATUS */ 305 load_xregs sp, THREAD_CTX_REG_STATUS, REG_T0 306 csrw CSR_XSTATUS, t0 307 /* Set scratch as thread_core_local */ 308 csrw CSR_XSCRATCH, tp 309 /* Restore all GPRs */ 310 load_xregs sp, THREAD_CTX_REG_RA, REG_RA 311 load_xregs sp, THREAD_CTX_REG_GP, REG_GP 312 load_xregs sp, THREAD_CTX_REG_TP, REG_TP 313 load_xregs sp, THREAD_CTX_REG_T0, REG_T0, REG_T2 314 load_xregs sp, THREAD_CTX_REG_S0, REG_S0, REG_S1 315 load_xregs sp, THREAD_CTX_REG_A0, REG_A0, REG_A7 316 load_xregs sp, THREAD_CTX_REG_S2, REG_S2, REG_S11 317 load_xregs sp, THREAD_CTX_REG_T3, REG_T3, REG_T6 318 load_xregs sp, THREAD_CTX_REG_SP, REG_SP 319 XRET 320 321exception_from_user: 322 /* a0 is CSR_XCAUSE */ 323 li a1, CAUSE_USER_ECALL 324 bne a0, a1, abort_from_user 325ecall_from_user: 326 /* Load and set kernel sp from thread context */ 327 get_thread_ctx a0, a1 328 load_xregs a0, THREAD_CTX_KERN_SP, REG_SP 329 330 /* Now sp is kernel sp, create stack for struct thread_scall_regs */ 331 addi sp, sp, -THREAD_SCALL_REGS_SIZE 332 /* Save user sp */ 333 load_xregs tp, THREAD_CORE_LOCAL_X0, REG_A0 334 store_xregs sp, THREAD_SCALL_REG_SP, REG_A0 335 336 /* Restore user a0, a1 which can be saved later */ 337 load_xregs tp, THREAD_CORE_LOCAL_X1, REG_A0, REG_A1 338 339 /* Save user gp */ 340 store_xregs sp, THREAD_SCALL_REG_GP, REG_GP 341 /* 342 * Set the scratch register to 0 such in case of a recursive 343 * exception thread_trap_vect() knows that it is emitted from kernel. 344 */ 345 csrrw gp, CSR_XSCRATCH, zero 346 /* Save user tp we previously swapped into CSR_XSCRATCH */ 347 store_xregs sp, THREAD_SCALL_REG_TP, REG_GP 348 /* Set kernel gp */ 349.option push 350.option norelax 351 la gp, __global_pointer$ 352.option pop 353 354 /* Save other caller-saved registers */ 355 store_xregs sp, THREAD_SCALL_REG_RA, REG_RA 356 store_xregs sp, THREAD_SCALL_REG_T0, REG_T0, REG_T2 357 store_xregs sp, THREAD_SCALL_REG_A0, REG_A0, REG_A7 358 store_xregs sp, THREAD_SCALL_REG_T3, REG_T3, REG_T6 359 /* Save XIE */ 360 csrr a0, CSR_XIE 361 store_xregs sp, THREAD_SCALL_REG_IE, REG_A0 362 /* Mask all interrupts */ 363 csrw CSR_XIE, zero 364 /* Save XSTATUS */ 365 csrr a0, CSR_XSTATUS 366 store_xregs sp, THREAD_SCALL_REG_STATUS, REG_A0 367 /* Save XEPC */ 368 csrr a0, CSR_XEPC 369 store_xregs sp, THREAD_SCALL_REG_EPC, REG_A0 370 371 /* 372 * a0 = struct thread_scall_regs *regs 373 * Call thread_scall_handler(regs) 374 */ 375 mv a0, sp 376 call thread_scall_handler 377 378 /* 379 * Save kernel sp we'll had at the beginning of this function. 380 * This is when this TA has called another TA because 381 * __thread_enter_user_mode() also saves the stack pointer in this 382 * field. 383 */ 384 get_thread_ctx a0, a1 385 addi t0, sp, THREAD_SCALL_REGS_SIZE 386 store_xregs a0, THREAD_CTX_KERN_SP, REG_T0 387 388 /* 389 * We are returning to U-Mode, on return, the program counter 390 * is set to xsepc (pc=xepc), we add 4 (size of an instruction) 391 * to continue to next instruction. 392 */ 393 load_xregs sp, THREAD_SCALL_REG_EPC, REG_T0 394 addi t0, t0, 4 395 csrw CSR_XEPC, t0 396 397 /* Restore XIE */ 398 load_xregs sp, THREAD_SCALL_REG_IE, REG_T0 399 csrw CSR_XIE, t0 400 /* Restore XSTATUS */ 401 load_xregs sp, THREAD_SCALL_REG_STATUS, REG_T0 402 csrw CSR_XSTATUS, t0 403 /* Set scratch as thread_core_local */ 404 csrw CSR_XSCRATCH, tp 405 /* Restore caller-saved registers */ 406 load_xregs sp, THREAD_SCALL_REG_RA, REG_RA 407 load_xregs sp, THREAD_SCALL_REG_GP, REG_GP 408 load_xregs sp, THREAD_SCALL_REG_TP, REG_TP 409 load_xregs sp, THREAD_SCALL_REG_T0, REG_T0, REG_T2 410 load_xregs sp, THREAD_SCALL_REG_A0, REG_A0, REG_A7 411 load_xregs sp, THREAD_SCALL_REG_T3, REG_T3, REG_T6 412 load_xregs sp, THREAD_SCALL_REG_SP, REG_SP 413 XRET 414 415abort_from_user: 416 /* 417 * Update core local flags 418 */ 419 lw a0, THREAD_CORE_LOCAL_FLAGS(tp) 420 slli a0, a0, THREAD_CLF_SAVED_SHIFT 421 ori a0, a0, THREAD_CLF_ABORT 422 sw a0, THREAD_CORE_LOCAL_FLAGS(tp) 423 424 /* 425 * Save state on stack 426 */ 427 428 /* Load abt_stack_va_end and set it as sp */ 429 load_xregs tp, THREAD_CORE_LOCAL_ABT_STACK_VA_END, REG_SP 430 431 /* Now sp is abort sp, create stack for struct thread_abort_regs */ 432 addi sp, sp, -THREAD_ABT_REGS_SIZE 433 434 /* Save user sp */ 435 load_xregs tp, THREAD_CORE_LOCAL_X0, REG_A0 436 store_xregs sp, THREAD_ABT_REG_SP, REG_A0 437 438 /* Restore user a0, a1 which can be saved later */ 439 load_xregs tp, THREAD_CORE_LOCAL_X1, REG_A0, REG_A1 440 441 /* Save user gp */ 442 store_xregs sp, THREAD_ABT_REG_GP, REG_GP 443 444 /* 445 * Set the scratch register to 0 such in case of a recursive 446 * exception thread_trap_vect() knows that it is emitted from kernel. 447 */ 448 csrrw gp, CSR_XSCRATCH, zero 449 /* Save user tp we previously swapped into CSR_XSCRATCH */ 450 store_xregs sp, THREAD_ABT_REG_TP, REG_GP 451 /* Set kernel gp */ 452.option push 453.option norelax 454 la gp, __global_pointer$ 455.option pop 456 /* Save all other GPRs */ 457 store_xregs sp, THREAD_ABT_REG_RA, REG_RA 458 store_xregs sp, THREAD_ABT_REG_T0, REG_T0, REG_T2 459 store_xregs sp, THREAD_ABT_REG_S0, REG_S0, REG_S1 460 store_xregs sp, THREAD_ABT_REG_A0, REG_A0, REG_A7 461 store_xregs sp, THREAD_ABT_REG_S2, REG_S2, REG_S11 462 store_xregs sp, THREAD_ABT_REG_T3, REG_T3, REG_T6 463 /* Save XIE */ 464 csrr t0, CSR_XIE 465 store_xregs sp, THREAD_ABT_REG_IE, REG_T0 466 /* Mask all interrupts */ 467 csrw CSR_XIE, x0 468 /* Save XSTATUS */ 469 csrr t0, CSR_XSTATUS 470 store_xregs sp, THREAD_ABT_REG_STATUS, REG_T0 471 /* Save XEPC */ 472 csrr t0, CSR_XEPC 473 store_xregs sp, THREAD_ABT_REG_EPC, REG_T0 474 /* Save XTVAL */ 475 csrr t0, CSR_XTVAL 476 store_xregs sp, THREAD_ABT_REG_TVAL, REG_T0 477 /* Save XCAUSE */ 478 csrr a0, CSR_XCAUSE 479 store_xregs sp, THREAD_ABT_REG_CAUSE, REG_A0 480 481 /* 482 * a0 = cause 483 * a1 = sp (struct thread_abort_regs *regs) 484 * Call abort_handler(cause, regs) 485 */ 486 mv a1, sp 487 call abort_handler 488 489 /* 490 * Restore state from stack 491 */ 492 493 /* Restore XEPC */ 494 load_xregs sp, THREAD_ABT_REG_EPC, REG_T0 495 csrw CSR_XEPC, t0 496 /* Restore XIE */ 497 load_xregs sp, THREAD_ABT_REG_IE, REG_T0 498 csrw CSR_XIE, t0 499 /* Restore XSTATUS */ 500 load_xregs sp, THREAD_ABT_REG_STATUS, REG_T0 501 csrw CSR_XSTATUS, t0 502 /* Set scratch as thread_core_local */ 503 csrw CSR_XSCRATCH, tp 504 505 /* Update core local flags */ 506 lw a0, THREAD_CORE_LOCAL_FLAGS(tp) 507 srli a0, a0, THREAD_CLF_SAVED_SHIFT 508 sw a0, THREAD_CORE_LOCAL_FLAGS(tp) 509 510 /* Restore all GPRs */ 511 load_xregs sp, THREAD_ABT_REG_RA, REG_RA 512 load_xregs sp, THREAD_ABT_REG_GP, REG_GP 513 load_xregs sp, THREAD_ABT_REG_TP, REG_TP 514 load_xregs sp, THREAD_ABT_REG_T0, REG_T0, REG_T2 515 load_xregs sp, THREAD_ABT_REG_S0, REG_S0, REG_S1 516 load_xregs sp, THREAD_ABT_REG_A0, REG_A0, REG_A7 517 load_xregs sp, THREAD_ABT_REG_S2, REG_S2, REG_S11 518 load_xregs sp, THREAD_ABT_REG_T3, REG_T3, REG_T6 519 load_xregs sp, THREAD_ABT_REG_SP, REG_SP 520 XRET 521END_FUNC trap_from_user 522 523/* 524 * void thread_unwind_user_mode(uint32_t ret, uint32_t exit_status0, 525 * uint32_t exit_status1); 526 * See description in thread.h 527 */ 528FUNC thread_unwind_user_mode , : 529 530 /* Store the exit status */ 531 load_xregs sp, THREAD_USER_MODE_REC_CTX_REGS_PTR, REG_A3, REG_A5 532 sw a1, (a4) 533 sw a2, (a5) 534 535 /* Save user callee regs */ 536 store_xregs a3, THREAD_CTX_REG_S0, REG_S0, REG_S1 537 store_xregs a3, THREAD_CTX_REG_S2, REG_S2, REG_S11 538 store_xregs a3, THREAD_CTX_REG_SP, REG_SP, REG_TP 539 540 /* Restore kernel callee regs */ 541 mv a1, sp 542 543 load_xregs a1, THREAD_USER_MODE_REC_X1, REG_RA, REG_GP 544 load_xregs a1, THREAD_USER_MODE_REC_X8, REG_S0, REG_S1 545 load_xregs a1, THREAD_USER_MODE_REC_X18, REG_S2, REG_S11 546 547 add sp, sp, THREAD_USER_MODE_REC_SIZE 548 549 /* Return from the call of thread_enter_user_mode() */ 550 ret 551END_FUNC thread_unwind_user_mode 552 553/* 554 * void thread_exit_user_mode(unsigned long a0, unsigned long a1, 555 * unsigned long a2, unsigned long a3, 556 * unsigned long sp, unsigned long pc, 557 * unsigned long status); 558 */ 559FUNC thread_exit_user_mode , : 560 /* Set kernel stack pointer */ 561 mv sp, a4 562 563 /* Set xSTATUS */ 564 csrw CSR_XSTATUS, a6 565 566 /* 567 * Zeroize xSCRATCH to indicate to thread_trap_vect() 568 * that we are executing in kernel. 569 */ 570 csrw CSR_XSCRATCH, zero 571 572 /* 573 * Mask all interrupts first. Interrupts will be unmasked after 574 * returning from __thread_enter_user_mode(). 575 */ 576 csrw CSR_XIE, zero 577 578 /* Set epc as thread_unwind_user_mode() */ 579 csrw CSR_XEPC, a5 580 581 XRET 582END_FUNC thread_exit_user_mode 583 584/* 585 * uint32_t __thread_enter_user_mode(struct thread_ctx_regs *regs, 586 * uint32_t *exit_status0, 587 * uint32_t *exit_status1); 588 */ 589FUNC __thread_enter_user_mode , : 590 /* Disable kernel mode exceptions first */ 591 csrc CSR_XSTATUS, CSR_XSTATUS_IE 592 593 /* 594 * Create and fill in the struct thread_user_mode_rec 595 */ 596 addi sp, sp, -THREAD_USER_MODE_REC_SIZE 597 store_xregs sp, THREAD_USER_MODE_REC_CTX_REGS_PTR, REG_A0, REG_A2 598 store_xregs sp, THREAD_USER_MODE_REC_X1, REG_RA, REG_GP 599 store_xregs sp, THREAD_USER_MODE_REC_X8, REG_S0, REG_S1 600 store_xregs sp, THREAD_USER_MODE_REC_X18, REG_S2, REG_S11 601 602 /* 603 * Save the kernel stack pointer in the thread context 604 */ 605 606 /* Get pointer to current thread context */ 607 get_thread_ctx s0, s1 608 609 /* 610 * Save kernel stack pointer to ensure that 611 * thread_exit_user_mode() uses correct stack pointer. 612 */ 613 614 store_xregs s0, THREAD_CTX_KERN_SP, REG_SP 615 /* 616 * Save thread_core_local in xSCRATCH to ensure that thread_trap_vect() 617 * uses correct core local structure. 618 */ 619 csrw CSR_XSCRATCH, tp 620 621 /* Set user ie */ 622 load_xregs a0, THREAD_CTX_REG_IE, REG_S0 623 csrw CSR_XIE, s0 624 625 /* Set user status */ 626 load_xregs a0, THREAD_CTX_REG_STATUS, REG_S0 627 csrw CSR_XSTATUS, s0 628 629 /* Load the rest of the general purpose registers */ 630 load_xregs a0, THREAD_CTX_REG_RA, REG_RA, REG_TP 631 load_xregs a0, THREAD_CTX_REG_T0, REG_T0, REG_T2 632 load_xregs a0, THREAD_CTX_REG_S0, REG_S0, REG_S1 633 load_xregs a0, THREAD_CTX_REG_S2, REG_S2, REG_S11 634 load_xregs a0, THREAD_CTX_REG_T3, REG_T3, REG_T6 635 load_xregs a0, THREAD_CTX_REG_A0, REG_A0, REG_A7 636 637 /* Set exception program counter */ 638 csrw CSR_XEPC, ra 639 640 /* Jump into user mode */ 641 XRET 642END_FUNC __thread_enter_user_mode 643 644/* void thread_resume(struct thread_ctx_regs *regs) */ 645FUNC thread_resume , : 646 /* Disable global interrupts first */ 647 csrc CSR_XSTATUS, CSR_XSTATUS_IE 648 649 /* Restore epc */ 650 load_xregs a0, THREAD_CTX_REG_EPC, REG_T0 651 csrw CSR_XEPC, t0 652 653 /* Restore ie */ 654 load_xregs a0, THREAD_CTX_REG_IE, REG_T0 655 csrw CSR_XIE, t0 656 657 /* Restore status */ 658 load_xregs a0, THREAD_CTX_REG_STATUS, REG_T0 659 csrw CSR_XSTATUS, t0 660 661 /* Check if previous privilege mode by status.SPP */ 662 b_if_prev_priv_is_u t0, 1f 663 /* Set scratch as zero to indicate that we are in kernel mode */ 664 csrw CSR_XSCRATCH, zero 665 j 2f 6661: 667 /* Resume to U-mode, set scratch as tp to be used in the trap handler */ 668 csrw CSR_XSCRATCH, tp 6692: 670 /* Restore all general-purpose registers */ 671 load_xregs a0, THREAD_CTX_REG_RA, REG_RA, REG_TP 672 load_xregs a0, THREAD_CTX_REG_T0, REG_T0, REG_T2 673 load_xregs a0, THREAD_CTX_REG_S0, REG_S0, REG_S1 674 load_xregs a0, THREAD_CTX_REG_S2, REG_S2, REG_S11 675 load_xregs a0, THREAD_CTX_REG_T3, REG_T3, REG_T6 676 load_xregs a0, THREAD_CTX_REG_A0, REG_A0, REG_A7 677 678 XRET 679END_FUNC thread_resume 680