xref: /optee_os/core/arch/riscv/kernel/thread_rv.S (revision ef00a9232c4876b70c6aec88be33e6977303796b)
1/* SPDX-License-Identifier: BSD-2-Clause */
2/*
3 * Copyright 2022-2023 NXP
4 * Copyright 2024 Andes Technology Corporation
5 */
6
7#include <asm.S>
8#include <generated/asm-defines.h>
9#include <keep.h>
10#include <kernel/thread.h>
11#include <kernel/thread_private.h>
12#include <mm/core_mmu.h>
13#include <riscv.h>
14#include <riscv_macros.S>
15
16.macro get_thread_ctx res, tmp0
17	lw	\tmp0, THREAD_CORE_LOCAL_CURR_THREAD(tp)
18	la	\res, threads
191:
20	beqz	\tmp0, 2f
21	addi	\res, \res, THREAD_CTX_SIZE
22	addi	\tmp0, \tmp0, -1
23	bnez	\tmp0, 1b
242:
25.endm
26
27.macro b_if_prev_priv_is_u reg, label
28	andi	\reg, \reg, CSR_XSTATUS_SPP
29	beqz	\reg, \label
30.endm
31
32/* size_t __get_core_pos(void); */
33FUNC __get_core_pos , : , .identity_map
34	lw	a0, THREAD_CORE_LOCAL_HART_ID(tp)
35	ret
36END_FUNC __get_core_pos
37
38FUNC thread_trap_vect , :
39	csrrw	tp, CSR_XSCRATCH, tp
40	bnez	tp, 0f
41	/* Read tp back */
42	csrrw	tp, CSR_XSCRATCH, tp
43	j	trap_from_kernel
440:
45	/* Now tp is thread_core_local */
46	j	trap_from_user
47thread_trap_vect_end:
48END_FUNC thread_trap_vect
49
50LOCAL_FUNC trap_from_kernel, :
51	/* Save sp, a0, a1 into temporary spaces of thread_core_local */
52	store_xregs tp, THREAD_CORE_LOCAL_X0, REG_SP
53	store_xregs tp, THREAD_CORE_LOCAL_X1, REG_A0, REG_A1
54
55	csrr	a0, CSR_XCAUSE
56	/* MSB of cause differentiates between interrupts and exceptions */
57	bge	a0, zero, exception_from_kernel
58
59interrupt_from_kernel:
60	/* Get thread context as sp */
61	get_thread_ctx sp, a0
62
63	/* Load and save kernel sp */
64	load_xregs tp, THREAD_CORE_LOCAL_X0, REG_A0
65	store_xregs sp, THREAD_CTX_REG_SP, REG_A0
66
67	/* Restore user a0, a1 which can be saved later */
68	load_xregs tp, THREAD_CORE_LOCAL_X1, REG_A0, REG_A1
69
70	/* Save all other GPRs */
71	store_xregs sp, THREAD_CTX_REG_RA, REG_RA
72	store_xregs sp, THREAD_CTX_REG_GP, REG_GP
73	store_xregs sp, THREAD_CTX_REG_T0, REG_T0, REG_T2
74	store_xregs sp, THREAD_CTX_REG_S0, REG_S0, REG_S1
75	store_xregs sp, THREAD_CTX_REG_A0, REG_A0, REG_A7
76	store_xregs sp, THREAD_CTX_REG_S2, REG_S2, REG_S11
77	store_xregs sp, THREAD_CTX_REG_T3, REG_T3, REG_T6
78	/* Save XIE */
79	csrr	t0, CSR_XIE
80	store_xregs sp, THREAD_CTX_REG_IE, REG_T0
81	/* Mask all interrupts */
82	csrw	CSR_XIE, x0
83	/* Save XSTATUS */
84	csrr	t0, CSR_XSTATUS
85	store_xregs sp, THREAD_CTX_REG_STATUS, REG_T0
86	/* Save XEPC */
87	csrr	t0, CSR_XEPC
88	store_xregs sp, THREAD_CTX_REG_EPC, REG_T0
89
90	/*
91	 * a0 = struct thread_ctx_regs *regs
92	 * a1 = cause
93	 */
94	mv	a0, sp
95	csrr	a1, CSR_XCAUSE
96	/* Load tmp_stack_va_end as current sp. */
97	load_xregs tp, THREAD_CORE_LOCAL_TMP_STACK_VA_END, REG_SP
98	call	thread_interrupt_handler
99
100	/* Get thread context as sp */
101	get_thread_ctx sp, t0
102	/* Restore XEPC */
103	load_xregs sp, THREAD_CTX_REG_EPC, REG_T0
104	csrw	CSR_XEPC, t0
105	/* Restore XIE */
106	load_xregs sp, THREAD_CTX_REG_IE, REG_T0
107	csrw	CSR_XIE, t0
108	/* Restore XSTATUS */
109	load_xregs sp, THREAD_CTX_REG_STATUS, REG_T0
110	csrw	CSR_XSTATUS, t0
111	/* Set scratch as thread_core_local */
112	csrw	CSR_XSCRATCH, tp
113	/* Restore all GPRs */
114	load_xregs sp, THREAD_CTX_REG_RA, REG_RA
115	load_xregs sp, THREAD_CTX_REG_GP, REG_GP
116	load_xregs sp, THREAD_CTX_REG_T0, REG_T0, REG_T2
117	load_xregs sp, THREAD_CTX_REG_S0, REG_S0, REG_S1
118	load_xregs sp, THREAD_CTX_REG_A0, REG_A0, REG_A7
119	load_xregs sp, THREAD_CTX_REG_S2, REG_S2, REG_S11
120	load_xregs sp, THREAD_CTX_REG_T3, REG_T3, REG_T6
121	load_xregs sp, THREAD_CTX_REG_SP, REG_SP
122	XRET
123
124exception_from_kernel:
125	/*
126	 * Update core local flags.
127	 * flags = (flags << THREAD_CLF_SAVED_SHIFT) | THREAD_CLF_ABORT;
128	 */
129	lw	a0, THREAD_CORE_LOCAL_FLAGS(tp)
130	slli	a0, a0, THREAD_CLF_SAVED_SHIFT
131	ori	a0, a0, THREAD_CLF_ABORT
132	li	a1, (THREAD_CLF_ABORT << THREAD_CLF_SAVED_SHIFT)
133	and	a1, a0, a1
134	bnez	a1, sel_tmp_sp
135
136	/* Select abort stack */
137	load_xregs tp, THREAD_CORE_LOCAL_ABT_STACK_VA_END, REG_A1
138	j	set_sp
139
140sel_tmp_sp:
141	/* We have an abort while using the abort stack, select tmp stack */
142	load_xregs tp, THREAD_CORE_LOCAL_TMP_STACK_VA_END, REG_A1
143	ori	a0, a0, THREAD_CLF_TMP	/* flags |= THREAD_CLF_TMP; */
144
145set_sp:
146	mv	sp, a1
147	sw	a0, THREAD_CORE_LOCAL_FLAGS(tp)
148
149	/*
150	 * Save state on stack
151	 */
152	addi	sp, sp, -THREAD_ABT_REGS_SIZE
153
154	/* Save kernel sp */
155	load_xregs tp, THREAD_CORE_LOCAL_X0, REG_A0
156	store_xregs sp, THREAD_ABT_REG_SP, REG_A0
157
158	/* Restore kernel a0, a1 which can be saved later */
159	load_xregs tp, THREAD_CORE_LOCAL_X1, REG_A0, REG_A1
160
161	/* Save all other GPRs */
162	store_xregs sp, THREAD_ABT_REG_RA, REG_RA
163	store_xregs sp, THREAD_ABT_REG_GP, REG_GP
164	store_xregs sp, THREAD_ABT_REG_TP, REG_TP
165	store_xregs sp, THREAD_ABT_REG_T0, REG_T0, REG_T2
166	store_xregs sp, THREAD_ABT_REG_S0, REG_S0, REG_S1
167	store_xregs sp, THREAD_ABT_REG_A0, REG_A0, REG_A7
168	store_xregs sp, THREAD_ABT_REG_S2, REG_S2, REG_S11
169	store_xregs sp, THREAD_ABT_REG_T3, REG_T3, REG_T6
170	/* Save XIE */
171	csrr	t0, CSR_XIE
172	store_xregs sp, THREAD_ABT_REG_IE, REG_T0
173	/* Mask all interrupts */
174	csrw	CSR_XIE, x0
175	/* Save XSTATUS */
176	csrr	t0, CSR_XSTATUS
177	store_xregs sp, THREAD_ABT_REG_STATUS, REG_T0
178	/* Save XEPC */
179	csrr	t0, CSR_XEPC
180	store_xregs sp, THREAD_ABT_REG_EPC, REG_T0
181	/* Save XTVAL */
182	csrr	t0, CSR_XTVAL
183	store_xregs sp, THREAD_ABT_REG_TVAL, REG_T0
184	/* Save XCAUSE */
185	csrr	a0, CSR_XCAUSE
186	store_xregs sp, THREAD_ABT_REG_CAUSE, REG_A0
187
188	/*
189	 * a0 = cause
190	 * a1 = sp (struct thread_abort_regs *regs)
191	 * Call abort_handler(cause, regs)
192	 */
193	mv	a1, sp
194	call	abort_handler
195
196	/*
197	 * Restore state from stack
198	 */
199
200	/* Restore XEPC */
201	load_xregs sp, THREAD_ABT_REG_EPC, REG_T0
202	csrw	CSR_XEPC, t0
203	/* Restore XIE */
204	load_xregs sp, THREAD_ABT_REG_IE, REG_T0
205	csrw	CSR_XIE, t0
206	/* Restore XSTATUS */
207	load_xregs sp, THREAD_ABT_REG_STATUS, REG_T0
208	csrw	CSR_XSTATUS, t0
209	/* Set scratch as thread_core_local */
210	csrw	CSR_XSCRATCH, tp
211
212	/* Update core local flags */
213	lw	a0, THREAD_CORE_LOCAL_FLAGS(tp)
214	srli	a0, a0, THREAD_CLF_SAVED_SHIFT
215	sw	a0, THREAD_CORE_LOCAL_FLAGS(tp)
216
217	/* Restore all GPRs */
218	load_xregs sp, THREAD_ABT_REG_RA, REG_RA
219	load_xregs sp, THREAD_ABT_REG_GP, REG_GP
220	load_xregs sp, THREAD_ABT_REG_TP, REG_TP
221	load_xregs sp, THREAD_ABT_REG_T0, REG_T0, REG_T2
222	load_xregs sp, THREAD_ABT_REG_S0, REG_S0, REG_S1
223	load_xregs sp, THREAD_ABT_REG_A0, REG_A0, REG_A7
224	load_xregs sp, THREAD_ABT_REG_S2, REG_S2, REG_S11
225	load_xregs sp, THREAD_ABT_REG_T3, REG_T3, REG_T6
226	load_xregs sp, THREAD_ABT_REG_SP, REG_SP
227	XRET
228END_FUNC trap_from_kernel
229
230LOCAL_FUNC trap_from_user, :
231	/* Save user sp, a0, a1 into temporary spaces of thread_core_local */
232	store_xregs tp, THREAD_CORE_LOCAL_X0, REG_SP
233	store_xregs tp, THREAD_CORE_LOCAL_X1, REG_A0, REG_A1
234
235	csrr	a0, CSR_XCAUSE
236	/* MSB of cause differentiates between interrupts and exceptions */
237	bge	a0, zero, exception_from_user
238
239interrupt_from_user:
240	/* Get thread context as sp */
241	get_thread_ctx sp, a0
242
243	/* Save user sp */
244	load_xregs tp, THREAD_CORE_LOCAL_X0, REG_A0
245	store_xregs sp, THREAD_CTX_REG_SP, REG_A0
246
247	/* Restore user a0, a1 which can be saved later */
248	load_xregs tp, THREAD_CORE_LOCAL_X1, REG_A0, REG_A1
249
250	/* Save user gp */
251	store_xregs sp, THREAD_CTX_REG_GP, REG_GP
252
253	/*
254	 * Set the scratch register to 0 such in case of a recursive
255	 * exception thread_trap_vect() knows that it is emitted from kernel.
256	 */
257	csrrw	gp, CSR_XSCRATCH, zero
258	/* Save user tp we previously swapped into CSR_XSCRATCH */
259	store_xregs sp, THREAD_CTX_REG_TP, REG_GP
260	/* Set kernel gp */
261.option push
262.option norelax
263	la	gp, __global_pointer$
264.option pop
265	/* Save all other GPRs */
266	store_xregs sp, THREAD_CTX_REG_RA, REG_RA
267	store_xregs sp, THREAD_CTX_REG_T0, REG_T0, REG_T2
268	store_xregs sp, THREAD_CTX_REG_S0, REG_S0, REG_S1
269	store_xregs sp, THREAD_CTX_REG_A0, REG_A0, REG_A7
270	store_xregs sp, THREAD_CTX_REG_S2, REG_S2, REG_S11
271	store_xregs sp, THREAD_CTX_REG_T3, REG_T3, REG_T6
272	/* Save XIE */
273	csrr	t0, CSR_XIE
274	store_xregs sp, THREAD_CTX_REG_IE, REG_T0
275	/* Mask all interrupts */
276	csrw	CSR_XIE, x0
277	/* Save XSTATUS */
278	csrr	t0, CSR_XSTATUS
279	store_xregs sp, THREAD_CTX_REG_STATUS, REG_T0
280	/* Save XEPC */
281	csrr	t0, CSR_XEPC
282	store_xregs sp, THREAD_CTX_REG_EPC, REG_T0
283
284	/*
285	 * a0 = struct thread_ctx_regs *regs
286	 * a1 = cause
287	 */
288	mv	a0, sp
289	csrr	a1, CSR_XCAUSE
290	/* Load tmp_stack_va_end as current sp. */
291	load_xregs tp, THREAD_CORE_LOCAL_TMP_STACK_VA_END, REG_SP
292	call	thread_interrupt_handler
293
294	/* Get thread context as sp */
295	get_thread_ctx sp, t0
296	/* Restore XEPC */
297	load_xregs sp, THREAD_CTX_REG_EPC, REG_T0
298	csrw	CSR_XEPC, t0
299	/* Restore XIE */
300	load_xregs sp, THREAD_CTX_REG_IE, REG_T0
301	csrw	CSR_XIE, t0
302	/* Restore XSTATUS */
303	load_xregs sp, THREAD_CTX_REG_STATUS, REG_T0
304	csrw	CSR_XSTATUS, t0
305	/* Set scratch as thread_core_local */
306	csrw	CSR_XSCRATCH, tp
307	/* Restore all GPRs */
308	load_xregs sp, THREAD_CTX_REG_RA, REG_RA
309	load_xregs sp, THREAD_CTX_REG_GP, REG_GP
310	load_xregs sp, THREAD_CTX_REG_TP, REG_TP
311	load_xregs sp, THREAD_CTX_REG_T0, REG_T0, REG_T2
312	load_xregs sp, THREAD_CTX_REG_S0, REG_S0, REG_S1
313	load_xregs sp, THREAD_CTX_REG_A0, REG_A0, REG_A7
314	load_xregs sp, THREAD_CTX_REG_S2, REG_S2, REG_S11
315	load_xregs sp, THREAD_CTX_REG_T3, REG_T3, REG_T6
316	load_xregs sp, THREAD_CTX_REG_SP, REG_SP
317	XRET
318
319exception_from_user:
320	/* a0 is CSR_XCAUSE */
321	li	a1, CAUSE_USER_ECALL
322	bne	a0, a1, abort_from_user
323ecall_from_user:
324	/* Load and set kernel sp from thread context */
325	get_thread_ctx a0, a1
326	load_xregs a0, THREAD_CTX_KERN_SP, REG_SP
327
328	/* Now sp is kernel sp, create stack for struct thread_scall_regs */
329	addi	sp, sp, -THREAD_SCALL_REGS_SIZE
330	/* Save user sp */
331	load_xregs tp, THREAD_CORE_LOCAL_X0, REG_A0
332	store_xregs sp, THREAD_SCALL_REG_SP, REG_A0
333
334	/* Restore user a0, a1 which can be saved later */
335	load_xregs tp, THREAD_CORE_LOCAL_X1, REG_A0, REG_A1
336
337	/* Save user gp */
338	store_xregs sp, THREAD_SCALL_REG_GP, REG_GP
339	/*
340	 * Set the scratch register to 0 such in case of a recursive
341	 * exception thread_trap_vect() knows that it is emitted from kernel.
342	 */
343	csrrw	gp, CSR_XSCRATCH, zero
344	/* Save user tp we previously swapped into CSR_XSCRATCH */
345	store_xregs sp, THREAD_SCALL_REG_TP, REG_GP
346	/* Set kernel gp */
347.option push
348.option norelax
349	la	gp, __global_pointer$
350.option pop
351
352	/* Save other caller-saved registers */
353	store_xregs sp, THREAD_SCALL_REG_RA, REG_RA
354	store_xregs sp, THREAD_SCALL_REG_T0, REG_T0, REG_T2
355	store_xregs sp, THREAD_SCALL_REG_A0, REG_A0, REG_A7
356	store_xregs sp, THREAD_SCALL_REG_T3, REG_T3, REG_T6
357	/* Save XIE */
358	csrr	a0, CSR_XIE
359	store_xregs sp, THREAD_SCALL_REG_IE, REG_A0
360	/* Mask all interrupts */
361	csrw	CSR_XIE, zero
362	/* Save XSTATUS */
363	csrr	a0, CSR_XSTATUS
364	store_xregs sp, THREAD_SCALL_REG_STATUS, REG_A0
365	/* Save XEPC */
366	csrr	a0, CSR_XEPC
367	store_xregs sp, THREAD_SCALL_REG_EPC, REG_A0
368
369	/*
370	 * a0 = struct thread_scall_regs *regs
371	 * Call thread_scall_handler(regs)
372	 */
373	mv	a0, sp
374	call	thread_scall_handler
375
376	/*
377	 * Save kernel sp we'll had at the beginning of this function.
378	 * This is when this TA has called another TA because
379	 * __thread_enter_user_mode() also saves the stack pointer in this
380	 * field.
381	 */
382	get_thread_ctx a0, a1
383	addi	t0, sp, THREAD_SCALL_REGS_SIZE
384	store_xregs a0, THREAD_CTX_KERN_SP, REG_T0
385
386	/*
387	 * We are returning to U-Mode, on return, the program counter
388	 * is set to xsepc (pc=xepc), we add 4 (size of an instruction)
389	 * to continue to next instruction.
390	 */
391	load_xregs sp, THREAD_SCALL_REG_EPC, REG_T0
392	addi	t0, t0, 4
393	csrw	CSR_XEPC, t0
394
395	/* Restore XIE */
396	load_xregs sp, THREAD_SCALL_REG_IE, REG_T0
397	csrw	CSR_XIE, t0
398	/* Restore XSTATUS */
399	load_xregs sp, THREAD_SCALL_REG_STATUS, REG_T0
400	csrw	CSR_XSTATUS, t0
401	/* Set scratch as thread_core_local */
402	csrw	CSR_XSCRATCH, tp
403	/* Restore caller-saved registers */
404	load_xregs sp, THREAD_SCALL_REG_RA, REG_RA
405	load_xregs sp, THREAD_SCALL_REG_GP, REG_GP
406	load_xregs sp, THREAD_SCALL_REG_TP, REG_TP
407	load_xregs sp, THREAD_SCALL_REG_T0, REG_T0, REG_T2
408	load_xregs sp, THREAD_SCALL_REG_A0, REG_A0, REG_A7
409	load_xregs sp, THREAD_SCALL_REG_T3, REG_T3, REG_T6
410	load_xregs sp, THREAD_SCALL_REG_SP, REG_SP
411	XRET
412
413abort_from_user:
414	/*
415	 * Update core local flags
416	 */
417	lw	a0, THREAD_CORE_LOCAL_FLAGS(tp)
418	slli	a0, a0, THREAD_CLF_SAVED_SHIFT
419	ori	a0, a0, THREAD_CLF_ABORT
420	sw	a0, THREAD_CORE_LOCAL_FLAGS(tp)
421
422	/*
423	 * Save state on stack
424	 */
425
426	/* Load abt_stack_va_end and set it as sp */
427	load_xregs tp, THREAD_CORE_LOCAL_ABT_STACK_VA_END, REG_SP
428
429	/* Now sp is abort sp, create stack for struct thread_abort_regs */
430	addi	sp, sp, -THREAD_ABT_REGS_SIZE
431
432	/* Save user sp */
433	load_xregs tp, THREAD_CORE_LOCAL_X0, REG_A0
434	store_xregs sp, THREAD_ABT_REG_SP, REG_A0
435
436	/* Restore user a0, a1 which can be saved later */
437	load_xregs tp, THREAD_CORE_LOCAL_X1, REG_A0, REG_A1
438
439	/* Save user gp */
440	store_xregs sp, THREAD_ABT_REG_GP, REG_GP
441
442	/*
443	 * Set the scratch register to 0 such in case of a recursive
444	 * exception thread_trap_vect() knows that it is emitted from kernel.
445	 */
446	csrrw	gp, CSR_XSCRATCH, zero
447	/* Save user tp we previously swapped into CSR_XSCRATCH */
448	store_xregs sp, THREAD_ABT_REG_TP, REG_GP
449	/* Set kernel gp */
450.option push
451.option norelax
452	la	gp, __global_pointer$
453.option pop
454	/* Save all other GPRs */
455	store_xregs sp, THREAD_ABT_REG_RA, REG_RA
456	store_xregs sp, THREAD_ABT_REG_T0, REG_T0, REG_T2
457	store_xregs sp, THREAD_ABT_REG_S0, REG_S0, REG_S1
458	store_xregs sp, THREAD_ABT_REG_A0, REG_A0, REG_A7
459	store_xregs sp, THREAD_ABT_REG_S2, REG_S2, REG_S11
460	store_xregs sp, THREAD_ABT_REG_T3, REG_T3, REG_T6
461	/* Save XIE */
462	csrr	t0, CSR_XIE
463	store_xregs sp, THREAD_ABT_REG_IE, REG_T0
464	/* Mask all interrupts */
465	csrw	CSR_XIE, x0
466	/* Save XSTATUS */
467	csrr	t0, CSR_XSTATUS
468	store_xregs sp, THREAD_ABT_REG_STATUS, REG_T0
469	/* Save XEPC */
470	csrr	t0, CSR_XEPC
471	store_xregs sp, THREAD_ABT_REG_EPC, REG_T0
472	/* Save XTVAL */
473	csrr	t0, CSR_XTVAL
474	store_xregs sp, THREAD_ABT_REG_TVAL, REG_T0
475	/* Save XCAUSE */
476	csrr	a0, CSR_XCAUSE
477	store_xregs sp, THREAD_ABT_REG_CAUSE, REG_A0
478
479	/*
480	 * a0 = cause
481	 * a1 = sp (struct thread_abort_regs *regs)
482	 * Call abort_handler(cause, regs)
483	 */
484	mv	a1, sp
485	call	abort_handler
486
487	/*
488	 * Restore state from stack
489	 */
490
491	/* Restore XEPC */
492	load_xregs sp, THREAD_ABT_REG_EPC, REG_T0
493	csrw	CSR_XEPC, t0
494	/* Restore XIE */
495	load_xregs sp, THREAD_ABT_REG_IE, REG_T0
496	csrw	CSR_XIE, t0
497	/* Restore XSTATUS */
498	load_xregs sp, THREAD_ABT_REG_STATUS, REG_T0
499	csrw	CSR_XSTATUS, t0
500	/* Set scratch as thread_core_local */
501	csrw	CSR_XSCRATCH, tp
502
503	/* Update core local flags */
504	lw	a0, THREAD_CORE_LOCAL_FLAGS(tp)
505	srli	a0, a0, THREAD_CLF_SAVED_SHIFT
506	sw	a0, THREAD_CORE_LOCAL_FLAGS(tp)
507
508	/* Restore all GPRs */
509	load_xregs sp, THREAD_ABT_REG_RA, REG_RA
510	load_xregs sp, THREAD_ABT_REG_GP, REG_GP
511	load_xregs sp, THREAD_ABT_REG_TP, REG_TP
512	load_xregs sp, THREAD_ABT_REG_T0, REG_T0, REG_T2
513	load_xregs sp, THREAD_ABT_REG_S0, REG_S0, REG_S1
514	load_xregs sp, THREAD_ABT_REG_A0, REG_A0, REG_A7
515	load_xregs sp, THREAD_ABT_REG_S2, REG_S2, REG_S11
516	load_xregs sp, THREAD_ABT_REG_T3, REG_T3, REG_T6
517	load_xregs sp, THREAD_ABT_REG_SP, REG_SP
518	XRET
519END_FUNC trap_from_user
520
521/*
522 * void thread_unwind_user_mode(uint32_t ret, uint32_t exit_status0,
523 * 		uint32_t exit_status1);
524 * See description in thread.h
525 */
526FUNC thread_unwind_user_mode , :
527
528	/* Store the exit status */
529	load_xregs sp, THREAD_USER_MODE_REC_CTX_REGS_PTR, REG_A3, REG_A5
530	sw	a1, (a4)
531	sw	a2, (a5)
532
533	/* Save user callee regs */
534	store_xregs a3, THREAD_CTX_REG_S0, REG_S0, REG_S1
535	store_xregs a3, THREAD_CTX_REG_S2, REG_S2, REG_S11
536	store_xregs a3, THREAD_CTX_REG_SP, REG_SP, REG_TP
537
538	/* Restore kernel callee regs */
539	mv	a1, sp
540
541	load_xregs a1, THREAD_USER_MODE_REC_X1, REG_RA, REG_GP
542	load_xregs a1, THREAD_USER_MODE_REC_X8, REG_S0, REG_S1
543	load_xregs a1, THREAD_USER_MODE_REC_X18, REG_S2, REG_S11
544
545	add	sp, sp, THREAD_USER_MODE_REC_SIZE
546
547	/* Return from the call of thread_enter_user_mode() */
548	ret
549END_FUNC thread_unwind_user_mode
550
551/*
552 * void thread_exit_user_mode(unsigned long a0, unsigned long a1,
553 *			       unsigned long a2, unsigned long a3,
554 *			       unsigned long sp, unsigned long pc,
555 *			       unsigned long status);
556 */
557FUNC thread_exit_user_mode , :
558	/* Set kernel stack pointer */
559	mv	sp, a4
560
561	/* Set xSTATUS */
562	csrw	CSR_XSTATUS, a6
563
564	/*
565	 * Zeroize xSCRATCH to indicate to thread_trap_vect()
566	 * that we are executing in kernel.
567	 */
568	csrw	CSR_XSCRATCH, zero
569
570	/*
571	 * Mask all interrupts first. Interrupts will be unmasked after
572	 * returning from __thread_enter_user_mode().
573	 */
574	csrw	CSR_XIE, zero
575
576	/* Set epc as thread_unwind_user_mode() */
577	csrw	CSR_XEPC, a5
578
579	XRET
580END_FUNC thread_exit_user_mode
581
582/*
583 * uint32_t __thread_enter_user_mode(struct thread_ctx_regs *regs,
584 *				     uint32_t *exit_status0,
585 *				     uint32_t *exit_status1);
586 */
587FUNC __thread_enter_user_mode , :
588	/* Disable kernel mode exceptions first */
589	csrc	CSR_XSTATUS, CSR_XSTATUS_IE
590
591	/*
592	 * Create and fill in the struct thread_user_mode_rec
593	 */
594	addi	sp, sp, -THREAD_USER_MODE_REC_SIZE
595	store_xregs sp, THREAD_USER_MODE_REC_CTX_REGS_PTR, REG_A0, REG_A2
596	store_xregs sp, THREAD_USER_MODE_REC_X1, REG_RA, REG_GP
597	store_xregs sp, THREAD_USER_MODE_REC_X8, REG_S0, REG_S1
598	store_xregs sp, THREAD_USER_MODE_REC_X18, REG_S2, REG_S11
599
600	/*
601	 * Save the kernel stack pointer in the thread context
602	 */
603
604	/* Get pointer to current thread context */
605	get_thread_ctx s0, s1
606
607	/*
608	 * Save kernel stack pointer to ensure that
609	 * thread_exit_user_mode() uses correct stack pointer.
610	 */
611
612	store_xregs s0, THREAD_CTX_KERN_SP, REG_SP
613	/*
614	 * Save thread_core_local in xSCRATCH to ensure that thread_trap_vect()
615	 * uses correct core local structure.
616	 */
617	csrw	CSR_XSCRATCH, tp
618
619	/* Set user ie */
620	load_xregs a0, THREAD_CTX_REG_IE, REG_S0
621	csrw	CSR_XIE, s0
622
623	/* Set user status */
624	load_xregs a0, THREAD_CTX_REG_STATUS, REG_S0
625	csrw	CSR_XSTATUS, s0
626
627	/* Load the rest of the general purpose registers */
628	load_xregs a0, THREAD_CTX_REG_RA, REG_RA, REG_TP
629	load_xregs a0, THREAD_CTX_REG_T0, REG_T0, REG_T2
630	load_xregs a0, THREAD_CTX_REG_S0, REG_S0, REG_S1
631	load_xregs a0, THREAD_CTX_REG_S2, REG_S2, REG_S11
632	load_xregs a0, THREAD_CTX_REG_T3, REG_T3, REG_T6
633	load_xregs a0, THREAD_CTX_REG_A0, REG_A0, REG_A7
634
635	/* Set exception program counter */
636	csrw		CSR_XEPC, ra
637
638	/* Jump into user mode */
639	XRET
640END_FUNC __thread_enter_user_mode
641
642/* void thread_resume(struct thread_ctx_regs *regs) */
643FUNC thread_resume , :
644	/* Disable global interrupts first */
645	csrc	CSR_XSTATUS, CSR_XSTATUS_IE
646
647	/* Restore epc */
648	load_xregs a0, THREAD_CTX_REG_EPC, REG_T0
649	csrw	CSR_XEPC, t0
650
651	/* Restore ie */
652	load_xregs a0, THREAD_CTX_REG_IE, REG_T0
653	csrw	CSR_XIE, t0
654
655	/* Restore status */
656	load_xregs a0, THREAD_CTX_REG_STATUS, REG_T0
657	csrw	CSR_XSTATUS, t0
658
659	/* Check if previous privilege mode by status.SPP */
660	b_if_prev_priv_is_u t0, 1f
661	/* Set scratch as zero to indicate that we are in kernel mode */
662	csrw	CSR_XSCRATCH, zero
663	j	2f
6641:
665	/* Resume to U-mode, set scratch as tp to be used in the trap handler */
666	csrw	CSR_XSCRATCH, tp
6672:
668	/* Restore all general-purpose registers */
669	load_xregs a0, THREAD_CTX_REG_RA, REG_RA, REG_TP
670	load_xregs a0, THREAD_CTX_REG_T0, REG_T0, REG_T2
671	load_xregs a0, THREAD_CTX_REG_S0, REG_S0, REG_S1
672	load_xregs a0, THREAD_CTX_REG_S2, REG_S2, REG_S11
673	load_xregs a0, THREAD_CTX_REG_T3, REG_T3, REG_T6
674	load_xregs a0, THREAD_CTX_REG_A0, REG_A0, REG_A7
675
676	XRET
677END_FUNC thread_resume
678