1/* SPDX-License-Identifier: BSD-2-Clause */ 2/* 3 * Copyright 2022-2023 NXP 4 * Copyright 2024 Andes Technology Corporation 5 */ 6 7#include <asm.S> 8#include <generated/asm-defines.h> 9#include <keep.h> 10#include <kernel/thread.h> 11#include <kernel/thread_private.h> 12#include <mm/core_mmu.h> 13#include <riscv.h> 14#include <riscv_macros.S> 15#include <tee/optee_abi.h> 16#include <tee/teeabi_opteed.h> 17#include <tee/teeabi_opteed_macros.h> 18 19.macro get_thread_ctx res, tmp0 20 lw \tmp0, THREAD_CORE_LOCAL_CURR_THREAD(tp) 21 la \res, threads 221: 23 beqz \tmp0, 2f 24 addi \res, \res, THREAD_CTX_SIZE 25 addi \tmp0, \tmp0, -1 26 bnez \tmp0, 1b 272: 28.endm 29 30.macro b_if_prev_priv_is_u reg, label 31 andi \reg, \reg, CSR_XSTATUS_SPP 32 beqz \reg, \label 33.endm 34 35/* size_t __get_core_pos(void); */ 36FUNC __get_core_pos , : , .identity_map 37 lw a0, THREAD_CORE_LOCAL_HART_ID(tp) 38 ret 39END_FUNC __get_core_pos 40 41FUNC thread_trap_vect , : 42 csrrw tp, CSR_XSCRATCH, tp 43 bnez tp, 0f 44 /* Read tp back */ 45 csrrw tp, CSR_XSCRATCH, tp 46 j trap_from_kernel 470: 48 /* Now tp is thread_core_local */ 49 j trap_from_user 50thread_trap_vect_end: 51END_FUNC thread_trap_vect 52 53LOCAL_FUNC trap_from_kernel, : 54 /* Save sp, a0, a1 into temporary spaces of thread_core_local */ 55 store_xregs tp, THREAD_CORE_LOCAL_X0, REG_SP 56 store_xregs tp, THREAD_CORE_LOCAL_X1, REG_A0, REG_A1 57 58 csrr a0, CSR_XCAUSE 59 /* MSB of cause differentiates between interrupts and exceptions */ 60 bge a0, zero, exception_from_kernel 61 62interrupt_from_kernel: 63 /* Get thread context as sp */ 64 get_thread_ctx sp, a0 65 66 /* Load and save kernel sp */ 67 load_xregs tp, THREAD_CORE_LOCAL_X0, REG_A0 68 store_xregs sp, THREAD_CTX_REG_SP, REG_A0 69 70 /* Restore user a0, a1 which can be saved later */ 71 load_xregs tp, THREAD_CORE_LOCAL_X1, REG_A0, REG_A1 72 73 /* Save all other GPRs */ 74 store_xregs sp, THREAD_CTX_REG_RA, REG_RA 75 store_xregs sp, THREAD_CTX_REG_GP, REG_GP 76 store_xregs sp, THREAD_CTX_REG_T0, REG_T0, REG_T2 77 store_xregs sp, THREAD_CTX_REG_S0, REG_S0, REG_S1 78 store_xregs sp, THREAD_CTX_REG_A0, REG_A0, REG_A7 79 store_xregs sp, THREAD_CTX_REG_S2, REG_S2, REG_S11 80 store_xregs sp, THREAD_CTX_REG_T3, REG_T3, REG_T6 81 /* Save XIE */ 82 csrr t0, CSR_XIE 83 store_xregs sp, THREAD_CTX_REG_IE, REG_T0 84 /* Mask all interrupts */ 85 csrw CSR_XIE, x0 86 /* Save XSTATUS */ 87 csrr t0, CSR_XSTATUS 88 store_xregs sp, THREAD_CTX_REG_STATUS, REG_T0 89 /* Save XEPC */ 90 csrr t0, CSR_XEPC 91 store_xregs sp, THREAD_CTX_REG_EPC, REG_T0 92 93 /* 94 * a0 = struct thread_ctx_regs *regs 95 * a1 = cause 96 */ 97 mv a0, sp 98 csrr a1, CSR_XCAUSE 99 /* Load tmp_stack_va_end as current sp. */ 100 load_xregs tp, THREAD_CORE_LOCAL_TMP_STACK_VA_END, REG_SP 101 call thread_interrupt_handler 102 103 /* Get thread context as sp */ 104 get_thread_ctx sp, t0 105 /* Restore XEPC */ 106 load_xregs sp, THREAD_CTX_REG_EPC, REG_T0 107 csrw CSR_XEPC, t0 108 /* Restore XIE */ 109 load_xregs sp, THREAD_CTX_REG_IE, REG_T0 110 csrw CSR_XIE, t0 111 /* Restore XSTATUS */ 112 load_xregs sp, THREAD_CTX_REG_STATUS, REG_T0 113 csrw CSR_XSTATUS, t0 114 /* Set scratch as thread_core_local */ 115 csrw CSR_XSCRATCH, tp 116 /* Restore all GPRs */ 117 load_xregs sp, THREAD_CTX_REG_RA, REG_RA 118 load_xregs sp, THREAD_CTX_REG_GP, REG_GP 119 load_xregs sp, THREAD_CTX_REG_T0, REG_T0, REG_T2 120 load_xregs sp, THREAD_CTX_REG_S0, REG_S0, REG_S1 121 load_xregs sp, THREAD_CTX_REG_A0, REG_A0, REG_A7 122 load_xregs sp, THREAD_CTX_REG_S2, REG_S2, REG_S11 123 load_xregs sp, THREAD_CTX_REG_T3, REG_T3, REG_T6 124 load_xregs sp, THREAD_CTX_REG_SP, REG_SP 125 XRET 126 127exception_from_kernel: 128 /* 129 * Update core local flags. 130 * flags = (flags << THREAD_CLF_SAVED_SHIFT) | THREAD_CLF_ABORT; 131 */ 132 lw a0, THREAD_CORE_LOCAL_FLAGS(tp) 133 slli a0, a0, THREAD_CLF_SAVED_SHIFT 134 ori a0, a0, THREAD_CLF_ABORT 135 li a1, (THREAD_CLF_ABORT << THREAD_CLF_SAVED_SHIFT) 136 and a1, a0, a1 137 bnez a1, sel_tmp_sp 138 139 /* Select abort stack */ 140 load_xregs tp, THREAD_CORE_LOCAL_ABT_STACK_VA_END, REG_A1 141 j set_sp 142 143sel_tmp_sp: 144 /* We have an abort while using the abort stack, select tmp stack */ 145 load_xregs tp, THREAD_CORE_LOCAL_TMP_STACK_VA_END, REG_A1 146 ori a0, a0, THREAD_CLF_TMP /* flags |= THREAD_CLF_TMP; */ 147 148set_sp: 149 mv sp, a1 150 sw a0, THREAD_CORE_LOCAL_FLAGS(tp) 151 152 /* 153 * Save state on stack 154 */ 155 addi sp, sp, -THREAD_ABT_REGS_SIZE 156 157 /* Save kernel sp */ 158 load_xregs tp, THREAD_CORE_LOCAL_X0, REG_A0 159 store_xregs sp, THREAD_ABT_REG_SP, REG_A0 160 161 /* Restore kernel a0, a1 which can be saved later */ 162 load_xregs tp, THREAD_CORE_LOCAL_X1, REG_A0, REG_A1 163 164 /* Save all other GPRs */ 165 store_xregs sp, THREAD_ABT_REG_RA, REG_RA 166 store_xregs sp, THREAD_ABT_REG_GP, REG_GP 167 store_xregs sp, THREAD_ABT_REG_TP, REG_TP 168 store_xregs sp, THREAD_ABT_REG_T0, REG_T0, REG_T2 169 store_xregs sp, THREAD_ABT_REG_S0, REG_S0, REG_S1 170 store_xregs sp, THREAD_ABT_REG_A0, REG_A0, REG_A7 171 store_xregs sp, THREAD_ABT_REG_S2, REG_S2, REG_S11 172 store_xregs sp, THREAD_ABT_REG_T3, REG_T3, REG_T6 173 /* Save XIE */ 174 csrr t0, CSR_XIE 175 store_xregs sp, THREAD_ABT_REG_IE, REG_T0 176 /* Mask all interrupts */ 177 csrw CSR_XIE, x0 178 /* Save XSTATUS */ 179 csrr t0, CSR_XSTATUS 180 store_xregs sp, THREAD_ABT_REG_STATUS, REG_T0 181 /* Save XEPC */ 182 csrr t0, CSR_XEPC 183 store_xregs sp, THREAD_ABT_REG_EPC, REG_T0 184 /* Save XTVAL */ 185 csrr t0, CSR_XTVAL 186 store_xregs sp, THREAD_ABT_REG_TVAL, REG_T0 187 /* Save XCAUSE */ 188 csrr a0, CSR_XCAUSE 189 store_xregs sp, THREAD_ABT_REG_CAUSE, REG_A0 190 191 /* 192 * a0 = cause 193 * a1 = sp (struct thread_abort_regs *regs) 194 * Call abort_handler(cause, regs) 195 */ 196 mv a1, sp 197 call abort_handler 198 199 /* 200 * Restore state from stack 201 */ 202 203 /* Restore XEPC */ 204 load_xregs sp, THREAD_ABT_REG_EPC, REG_T0 205 csrw CSR_XEPC, t0 206 /* Restore XIE */ 207 load_xregs sp, THREAD_ABT_REG_IE, REG_T0 208 csrw CSR_XIE, t0 209 /* Restore XSTATUS */ 210 load_xregs sp, THREAD_ABT_REG_STATUS, REG_T0 211 csrw CSR_XSTATUS, t0 212 /* Set scratch as thread_core_local */ 213 csrw CSR_XSCRATCH, tp 214 215 /* Update core local flags */ 216 lw a0, THREAD_CORE_LOCAL_FLAGS(tp) 217 srli a0, a0, THREAD_CLF_SAVED_SHIFT 218 sw a0, THREAD_CORE_LOCAL_FLAGS(tp) 219 220 /* Restore all GPRs */ 221 load_xregs sp, THREAD_ABT_REG_RA, REG_RA 222 load_xregs sp, THREAD_ABT_REG_GP, REG_GP 223 load_xregs sp, THREAD_ABT_REG_TP, REG_TP 224 load_xregs sp, THREAD_ABT_REG_T0, REG_T0, REG_T2 225 load_xregs sp, THREAD_ABT_REG_S0, REG_S0, REG_S1 226 load_xregs sp, THREAD_ABT_REG_A0, REG_A0, REG_A7 227 load_xregs sp, THREAD_ABT_REG_S2, REG_S2, REG_S11 228 load_xregs sp, THREAD_ABT_REG_T3, REG_T3, REG_T6 229 load_xregs sp, THREAD_ABT_REG_SP, REG_SP 230 XRET 231END_FUNC trap_from_kernel 232 233LOCAL_FUNC trap_from_user, : 234 /* Save user sp, a0, a1 into temporary spaces of thread_core_local */ 235 store_xregs tp, THREAD_CORE_LOCAL_X0, REG_SP 236 store_xregs tp, THREAD_CORE_LOCAL_X1, REG_A0, REG_A1 237 238 csrr a0, CSR_XCAUSE 239 /* MSB of cause differentiates between interrupts and exceptions */ 240 bge a0, zero, exception_from_user 241 242interrupt_from_user: 243 /* Get thread context as sp */ 244 get_thread_ctx sp, a0 245 246 /* Save user sp */ 247 load_xregs tp, THREAD_CORE_LOCAL_X0, REG_A0 248 store_xregs sp, THREAD_CTX_REG_SP, REG_A0 249 250 /* Restore user a0, a1 which can be saved later */ 251 load_xregs tp, THREAD_CORE_LOCAL_X1, REG_A0, REG_A1 252 253 /* Save user gp */ 254 store_xregs sp, THREAD_CTX_REG_GP, REG_GP 255 256 /* 257 * Set the scratch register to 0 such in case of a recursive 258 * exception thread_trap_vect() knows that it is emitted from kernel. 259 */ 260 csrrw gp, CSR_XSCRATCH, zero 261 /* Save user tp we previously swapped into CSR_XSCRATCH */ 262 store_xregs sp, THREAD_CTX_REG_TP, REG_GP 263 /* Set kernel gp */ 264.option push 265.option norelax 266 la gp, __global_pointer$ 267.option pop 268 /* Save all other GPRs */ 269 store_xregs sp, THREAD_CTX_REG_RA, REG_RA 270 store_xregs sp, THREAD_CTX_REG_T0, REG_T0, REG_T2 271 store_xregs sp, THREAD_CTX_REG_S0, REG_S0, REG_S1 272 store_xregs sp, THREAD_CTX_REG_A0, REG_A0, REG_A7 273 store_xregs sp, THREAD_CTX_REG_S2, REG_S2, REG_S11 274 store_xregs sp, THREAD_CTX_REG_T3, REG_T3, REG_T6 275 /* Save XIE */ 276 csrr t0, CSR_XIE 277 store_xregs sp, THREAD_CTX_REG_IE, REG_T0 278 /* Mask all interrupts */ 279 csrw CSR_XIE, x0 280 /* Save XSTATUS */ 281 csrr t0, CSR_XSTATUS 282 store_xregs sp, THREAD_CTX_REG_STATUS, REG_T0 283 /* Save XEPC */ 284 csrr t0, CSR_XEPC 285 store_xregs sp, THREAD_CTX_REG_EPC, REG_T0 286 287 /* 288 * a0 = struct thread_ctx_regs *regs 289 * a1 = cause 290 */ 291 mv a0, sp 292 csrr a1, CSR_XCAUSE 293 /* Load tmp_stack_va_end as current sp. */ 294 load_xregs tp, THREAD_CORE_LOCAL_TMP_STACK_VA_END, REG_SP 295 call thread_interrupt_handler 296 297 /* Get thread context as sp */ 298 get_thread_ctx sp, t0 299 /* Restore XEPC */ 300 load_xregs sp, THREAD_CTX_REG_EPC, REG_T0 301 csrw CSR_XEPC, t0 302 /* Restore XIE */ 303 load_xregs sp, THREAD_CTX_REG_IE, REG_T0 304 csrw CSR_XIE, t0 305 /* Restore XSTATUS */ 306 load_xregs sp, THREAD_CTX_REG_STATUS, REG_T0 307 csrw CSR_XSTATUS, t0 308 /* Set scratch as thread_core_local */ 309 csrw CSR_XSCRATCH, tp 310 /* Restore all GPRs */ 311 load_xregs sp, THREAD_CTX_REG_RA, REG_RA 312 load_xregs sp, THREAD_CTX_REG_GP, REG_GP 313 load_xregs sp, THREAD_CTX_REG_TP, REG_TP 314 load_xregs sp, THREAD_CTX_REG_T0, REG_T0, REG_T2 315 load_xregs sp, THREAD_CTX_REG_S0, REG_S0, REG_S1 316 load_xregs sp, THREAD_CTX_REG_A0, REG_A0, REG_A7 317 load_xregs sp, THREAD_CTX_REG_S2, REG_S2, REG_S11 318 load_xregs sp, THREAD_CTX_REG_T3, REG_T3, REG_T6 319 load_xregs sp, THREAD_CTX_REG_SP, REG_SP 320 XRET 321 322exception_from_user: 323 /* a0 is CSR_XCAUSE */ 324 li a1, CAUSE_USER_ECALL 325 bne a0, a1, abort_from_user 326ecall_from_user: 327 /* Load and set kernel sp from thread context */ 328 get_thread_ctx a0, a1 329 load_xregs a0, THREAD_CTX_KERN_SP, REG_SP 330 331 /* Now sp is kernel sp, create stack for struct thread_scall_regs */ 332 addi sp, sp, -THREAD_SCALL_REGS_SIZE 333 /* Save user sp */ 334 load_xregs tp, THREAD_CORE_LOCAL_X0, REG_A0 335 store_xregs sp, THREAD_SCALL_REG_SP, REG_A0 336 337 /* Restore user a0, a1 which can be saved later */ 338 load_xregs tp, THREAD_CORE_LOCAL_X1, REG_A0, REG_A1 339 340 /* Save user gp */ 341 store_xregs sp, THREAD_SCALL_REG_GP, REG_GP 342 /* 343 * Set the scratch register to 0 such in case of a recursive 344 * exception thread_trap_vect() knows that it is emitted from kernel. 345 */ 346 csrrw gp, CSR_XSCRATCH, zero 347 /* Save user tp we previously swapped into CSR_XSCRATCH */ 348 store_xregs sp, THREAD_SCALL_REG_TP, REG_GP 349 /* Set kernel gp */ 350.option push 351.option norelax 352 la gp, __global_pointer$ 353.option pop 354 355 /* Save other caller-saved registers */ 356 store_xregs sp, THREAD_SCALL_REG_RA, REG_RA 357 store_xregs sp, THREAD_SCALL_REG_T0, REG_T0, REG_T2 358 store_xregs sp, THREAD_SCALL_REG_A0, REG_A0, REG_A7 359 store_xregs sp, THREAD_SCALL_REG_T3, REG_T3, REG_T6 360 /* Save XIE */ 361 csrr a0, CSR_XIE 362 store_xregs sp, THREAD_SCALL_REG_IE, REG_A0 363 /* Mask all interrupts */ 364 csrw CSR_XIE, zero 365 /* Save XSTATUS */ 366 csrr a0, CSR_XSTATUS 367 store_xregs sp, THREAD_SCALL_REG_STATUS, REG_A0 368 /* Save XEPC */ 369 csrr a0, CSR_XEPC 370 store_xregs sp, THREAD_SCALL_REG_EPC, REG_A0 371 372 /* 373 * a0 = struct thread_scall_regs *regs 374 * Call thread_scall_handler(regs) 375 */ 376 mv a0, sp 377 call thread_scall_handler 378 379 /* 380 * Save kernel sp we'll had at the beginning of this function. 381 * This is when this TA has called another TA because 382 * __thread_enter_user_mode() also saves the stack pointer in this 383 * field. 384 */ 385 get_thread_ctx a0, a1 386 addi t0, sp, THREAD_SCALL_REGS_SIZE 387 store_xregs a0, THREAD_CTX_KERN_SP, REG_T0 388 389 /* 390 * We are returning to U-Mode, on return, the program counter 391 * is set to xsepc (pc=xepc), we add 4 (size of an instruction) 392 * to continue to next instruction. 393 */ 394 load_xregs sp, THREAD_SCALL_REG_EPC, REG_T0 395 addi t0, t0, 4 396 csrw CSR_XEPC, t0 397 398 /* Restore XIE */ 399 load_xregs sp, THREAD_SCALL_REG_IE, REG_T0 400 csrw CSR_XIE, t0 401 /* Restore XSTATUS */ 402 load_xregs sp, THREAD_SCALL_REG_STATUS, REG_T0 403 csrw CSR_XSTATUS, t0 404 /* Set scratch as thread_core_local */ 405 csrw CSR_XSCRATCH, tp 406 /* Restore caller-saved registers */ 407 load_xregs sp, THREAD_SCALL_REG_RA, REG_RA 408 load_xregs sp, THREAD_SCALL_REG_GP, REG_GP 409 load_xregs sp, THREAD_SCALL_REG_TP, REG_TP 410 load_xregs sp, THREAD_SCALL_REG_T0, REG_T0, REG_T2 411 load_xregs sp, THREAD_SCALL_REG_A0, REG_A0, REG_A7 412 load_xregs sp, THREAD_SCALL_REG_T3, REG_T3, REG_T6 413 load_xregs sp, THREAD_SCALL_REG_SP, REG_SP 414 XRET 415 416abort_from_user: 417 /* 418 * Update core local flags 419 */ 420 lw a0, THREAD_CORE_LOCAL_FLAGS(tp) 421 slli a0, a0, THREAD_CLF_SAVED_SHIFT 422 ori a0, a0, THREAD_CLF_ABORT 423 sw a0, THREAD_CORE_LOCAL_FLAGS(tp) 424 425 /* 426 * Save state on stack 427 */ 428 429 /* Load abt_stack_va_end and set it as sp */ 430 load_xregs tp, THREAD_CORE_LOCAL_ABT_STACK_VA_END, REG_SP 431 432 /* Now sp is abort sp, create stack for struct thread_abort_regs */ 433 addi sp, sp, -THREAD_ABT_REGS_SIZE 434 435 /* Save user sp */ 436 load_xregs tp, THREAD_CORE_LOCAL_X0, REG_A0 437 store_xregs sp, THREAD_ABT_REG_SP, REG_A0 438 439 /* Restore user a0, a1 which can be saved later */ 440 load_xregs tp, THREAD_CORE_LOCAL_X1, REG_A0, REG_A1 441 442 /* Save user gp */ 443 store_xregs sp, THREAD_ABT_REG_GP, REG_GP 444 445 /* 446 * Set the scratch register to 0 such in case of a recursive 447 * exception thread_trap_vect() knows that it is emitted from kernel. 448 */ 449 csrrw gp, CSR_XSCRATCH, zero 450 /* Save user tp we previously swapped into CSR_XSCRATCH */ 451 store_xregs sp, THREAD_ABT_REG_TP, REG_GP 452 /* Set kernel gp */ 453.option push 454.option norelax 455 la gp, __global_pointer$ 456.option pop 457 /* Save all other GPRs */ 458 store_xregs sp, THREAD_ABT_REG_RA, REG_RA 459 store_xregs sp, THREAD_ABT_REG_T0, REG_T0, REG_T2 460 store_xregs sp, THREAD_ABT_REG_S0, REG_S0, REG_S1 461 store_xregs sp, THREAD_ABT_REG_A0, REG_A0, REG_A7 462 store_xregs sp, THREAD_ABT_REG_S2, REG_S2, REG_S11 463 store_xregs sp, THREAD_ABT_REG_T3, REG_T3, REG_T6 464 /* Save XIE */ 465 csrr t0, CSR_XIE 466 store_xregs sp, THREAD_ABT_REG_IE, REG_T0 467 /* Mask all interrupts */ 468 csrw CSR_XIE, x0 469 /* Save XSTATUS */ 470 csrr t0, CSR_XSTATUS 471 store_xregs sp, THREAD_ABT_REG_STATUS, REG_T0 472 /* Save XEPC */ 473 csrr t0, CSR_XEPC 474 store_xregs sp, THREAD_ABT_REG_EPC, REG_T0 475 /* Save XTVAL */ 476 csrr t0, CSR_XTVAL 477 store_xregs sp, THREAD_ABT_REG_TVAL, REG_T0 478 /* Save XCAUSE */ 479 csrr a0, CSR_XCAUSE 480 store_xregs sp, THREAD_ABT_REG_CAUSE, REG_A0 481 482 /* 483 * a0 = cause 484 * a1 = sp (struct thread_abort_regs *regs) 485 * Call abort_handler(cause, regs) 486 */ 487 mv a1, sp 488 call abort_handler 489 490 /* 491 * Restore state from stack 492 */ 493 494 /* Restore XEPC */ 495 load_xregs sp, THREAD_ABT_REG_EPC, REG_T0 496 csrw CSR_XEPC, t0 497 /* Restore XIE */ 498 load_xregs sp, THREAD_ABT_REG_IE, REG_T0 499 csrw CSR_XIE, t0 500 /* Restore XSTATUS */ 501 load_xregs sp, THREAD_ABT_REG_STATUS, REG_T0 502 csrw CSR_XSTATUS, t0 503 /* Set scratch as thread_core_local */ 504 csrw CSR_XSCRATCH, tp 505 506 /* Update core local flags */ 507 lw a0, THREAD_CORE_LOCAL_FLAGS(tp) 508 srli a0, a0, THREAD_CLF_SAVED_SHIFT 509 sw a0, THREAD_CORE_LOCAL_FLAGS(tp) 510 511 /* Restore all GPRs */ 512 load_xregs sp, THREAD_ABT_REG_RA, REG_RA 513 load_xregs sp, THREAD_ABT_REG_GP, REG_GP 514 load_xregs sp, THREAD_ABT_REG_TP, REG_TP 515 load_xregs sp, THREAD_ABT_REG_T0, REG_T0, REG_T2 516 load_xregs sp, THREAD_ABT_REG_S0, REG_S0, REG_S1 517 load_xregs sp, THREAD_ABT_REG_A0, REG_A0, REG_A7 518 load_xregs sp, THREAD_ABT_REG_S2, REG_S2, REG_S11 519 load_xregs sp, THREAD_ABT_REG_T3, REG_T3, REG_T6 520 load_xregs sp, THREAD_ABT_REG_SP, REG_SP 521 XRET 522END_FUNC trap_from_user 523 524/* 525 * void thread_unwind_user_mode(uint32_t ret, uint32_t exit_status0, 526 * uint32_t exit_status1); 527 * See description in thread.h 528 */ 529FUNC thread_unwind_user_mode , : 530 531 /* Store the exit status */ 532 load_xregs sp, THREAD_USER_MODE_REC_CTX_REGS_PTR, REG_A3, REG_A5 533 sw a1, (a4) 534 sw a2, (a5) 535 536 /* Save user callee regs */ 537 store_xregs a3, THREAD_CTX_REG_S0, REG_S0, REG_S1 538 store_xregs a3, THREAD_CTX_REG_S2, REG_S2, REG_S11 539 store_xregs a3, THREAD_CTX_REG_SP, REG_SP, REG_TP 540 541 /* Restore kernel callee regs */ 542 mv a1, sp 543 544 load_xregs a1, THREAD_USER_MODE_REC_X1, REG_RA, REG_GP 545 load_xregs a1, THREAD_USER_MODE_REC_X8, REG_S0, REG_S1 546 load_xregs a1, THREAD_USER_MODE_REC_X18, REG_S2, REG_S11 547 548 add sp, sp, THREAD_USER_MODE_REC_SIZE 549 550 /* Return from the call of thread_enter_user_mode() */ 551 ret 552END_FUNC thread_unwind_user_mode 553 554/* 555 * void thread_exit_user_mode(unsigned long a0, unsigned long a1, 556 * unsigned long a2, unsigned long a3, 557 * unsigned long sp, unsigned long pc, 558 * unsigned long status); 559 */ 560FUNC thread_exit_user_mode , : 561 /* Set kernel stack pointer */ 562 mv sp, a4 563 564 /* Set xSTATUS */ 565 csrw CSR_XSTATUS, a6 566 567 /* 568 * Zeroize xSCRATCH to indicate to thread_trap_vect() 569 * that we are executing in kernel. 570 */ 571 csrw CSR_XSCRATCH, zero 572 573 /* 574 * Mask all interrupts first. Interrupts will be unmasked after 575 * returning from __thread_enter_user_mode(). 576 */ 577 csrw CSR_XIE, zero 578 579 /* Set epc as thread_unwind_user_mode() */ 580 csrw CSR_XEPC, a5 581 582 XRET 583END_FUNC thread_exit_user_mode 584 585/* 586 * uint32_t __thread_enter_user_mode(struct thread_ctx_regs *regs, 587 * uint32_t *exit_status0, 588 * uint32_t *exit_status1); 589 */ 590FUNC __thread_enter_user_mode , : 591 /* Disable kernel mode exceptions first */ 592 csrc CSR_XSTATUS, CSR_XSTATUS_IE 593 594 /* 595 * Create and fill in the struct thread_user_mode_rec 596 */ 597 addi sp, sp, -THREAD_USER_MODE_REC_SIZE 598 store_xregs sp, THREAD_USER_MODE_REC_CTX_REGS_PTR, REG_A0, REG_A2 599 store_xregs sp, THREAD_USER_MODE_REC_X1, REG_RA, REG_GP 600 store_xregs sp, THREAD_USER_MODE_REC_X8, REG_S0, REG_S1 601 store_xregs sp, THREAD_USER_MODE_REC_X18, REG_S2, REG_S11 602 603 /* 604 * Save the kernel stack pointer in the thread context 605 */ 606 607 /* Get pointer to current thread context */ 608 get_thread_ctx s0, s1 609 610 /* 611 * Save kernel stack pointer to ensure that 612 * thread_exit_user_mode() uses correct stack pointer. 613 */ 614 615 store_xregs s0, THREAD_CTX_KERN_SP, REG_SP 616 /* 617 * Save thread_core_local in xSCRATCH to ensure that thread_trap_vect() 618 * uses correct core local structure. 619 */ 620 csrw CSR_XSCRATCH, tp 621 622 /* Set user ie */ 623 load_xregs a0, THREAD_CTX_REG_IE, REG_S0 624 csrw CSR_XIE, s0 625 626 /* Set user status */ 627 load_xregs a0, THREAD_CTX_REG_STATUS, REG_S0 628 csrw CSR_XSTATUS, s0 629 630 /* Load the rest of the general purpose registers */ 631 load_xregs a0, THREAD_CTX_REG_RA, REG_RA, REG_TP 632 load_xregs a0, THREAD_CTX_REG_T0, REG_T0, REG_T2 633 load_xregs a0, THREAD_CTX_REG_S0, REG_S0, REG_S1 634 load_xregs a0, THREAD_CTX_REG_S2, REG_S2, REG_S11 635 load_xregs a0, THREAD_CTX_REG_T3, REG_T3, REG_T6 636 load_xregs a0, THREAD_CTX_REG_A0, REG_A0, REG_A7 637 638 /* Set exception program counter */ 639 csrw CSR_XEPC, ra 640 641 /* Jump into user mode */ 642 XRET 643END_FUNC __thread_enter_user_mode 644 645/* void thread_resume(struct thread_ctx_regs *regs) */ 646FUNC thread_resume , : 647 /* Disable global interrupts first */ 648 csrc CSR_XSTATUS, CSR_XSTATUS_IE 649 650 /* Restore epc */ 651 load_xregs a0, THREAD_CTX_REG_EPC, REG_T0 652 csrw CSR_XEPC, t0 653 654 /* Restore ie */ 655 load_xregs a0, THREAD_CTX_REG_IE, REG_T0 656 csrw CSR_XIE, t0 657 658 /* Restore status */ 659 load_xregs a0, THREAD_CTX_REG_STATUS, REG_T0 660 csrw CSR_XSTATUS, t0 661 662 /* Check if previous privilege mode by status.SPP */ 663 b_if_prev_priv_is_u t0, 1f 664 /* Set scratch as zero to indicate that we are in kernel mode */ 665 csrw CSR_XSCRATCH, zero 666 j 2f 6671: 668 /* Resume to U-mode, set scratch as tp to be used in the trap handler */ 669 csrw CSR_XSCRATCH, tp 6702: 671 /* Restore all general-purpose registers */ 672 load_xregs a0, THREAD_CTX_REG_RA, REG_RA, REG_TP 673 load_xregs a0, THREAD_CTX_REG_T0, REG_T0, REG_T2 674 load_xregs a0, THREAD_CTX_REG_S0, REG_S0, REG_S1 675 load_xregs a0, THREAD_CTX_REG_S2, REG_S2, REG_S11 676 load_xregs a0, THREAD_CTX_REG_T3, REG_T3, REG_T6 677 load_xregs a0, THREAD_CTX_REG_A0, REG_A0, REG_A7 678 679 XRET 680END_FUNC thread_resume 681 682/* void thread_foreign_interrupt_handler(struct thread_ctx_regs *regs) */ 683FUNC thread_foreign_interrupt_handler , : 684 /* Update 32-bit core local flags */ 685 lw s1, THREAD_CORE_LOCAL_FLAGS(tp) 686 slli s1, s1, THREAD_CLF_SAVED_SHIFT 687 ori s1, s1, (THREAD_CLF_TMP | THREAD_CLF_FIQ) 688 sw s1, THREAD_CORE_LOCAL_FLAGS(tp) 689 690 /* 691 * Mark current thread as suspended. 692 * a0 = THREAD_FLAGS_EXIT_ON_FOREIGN_INTR 693 * a1 = status 694 * a2 = epc 695 * thread_state_suspend(flags, status, pc) 696 */ 697 LDR a1, THREAD_CTX_REG_STATUS(a0) 698 LDR a2, THREAD_CTX_REG_EPC(a0) 699 li a0, THREAD_FLAGS_EXIT_ON_FOREIGN_INTR 700 call thread_state_suspend 701 /* Now return value a0 contains suspended thread ID. */ 702 703 /* Update core local flags */ 704 lw s1, THREAD_CORE_LOCAL_FLAGS(tp) 705 srli s1, s1, THREAD_CLF_SAVED_SHIFT 706 ori s1, s1, THREAD_CLF_TMP 707 sw s1, THREAD_CORE_LOCAL_FLAGS(tp) 708 709 /* Passing thread index in a0, and return to untrusted domain. */ 710 mv a4, a0 711 li a0, TEEABI_OPTEED_RETURN_CALL_DONE 712 li a1, OPTEE_ABI_RETURN_RPC_FOREIGN_INTR 713 li a2, 0 714 li a3, 0 715 li a5, 0 716 j thread_return_to_udomain 717END_FUNC thread_foreign_interrupt_handler 718