xref: /rk3399_ARM-atf/bl1/aarch32/bl1_exceptions.S (revision f1cbbd6332bb85672dc72cbcc4ac7023323c6936)
1f3b4914bSYatharth Kochar/*
2*f1cbbd63SHarrison Mutai * Copyright (c) 2016-2022, ARM Limited and Contributors. All rights reserved.
3f3b4914bSYatharth Kochar *
482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause
5f3b4914bSYatharth Kochar */
6f3b4914bSYatharth Kochar
7f3b4914bSYatharth Kochar#include <arch.h>
8f3b4914bSYatharth Kochar#include <asm_macros.S>
909d40e0eSAntonio Nino Diaz#include <bl1/bl1.h>
1009d40e0eSAntonio Nino Diaz#include <common/bl_common.h>
11a4409008Sdp-arm#include <context.h>
1209d40e0eSAntonio Nino Diaz#include <lib/xlat_tables/xlat_tables.h>
13085e80ecSAntonio Nino Diaz#include <smccc_helpers.h>
14085e80ecSAntonio Nino Diaz#include <smccc_macros.S>
15f3b4914bSYatharth Kochar
16f3b4914bSYatharth Kochar	.globl	bl1_aarch32_smc_handler
17f3b4914bSYatharth Kochar
18f3b4914bSYatharth Kochar
19f3b4914bSYatharth Kocharfunc bl1_aarch32_smc_handler
20a4409008Sdp-arm	/* On SMC entry, `sp` points to `smc_ctx_t`. Save `lr`. */
21a4409008Sdp-arm	str	lr, [sp, #SMC_CTX_LR_MON]
22a4409008Sdp-arm
23f3b4914bSYatharth Kochar	/* ------------------------------------------------
24f3b4914bSYatharth Kochar	 * SMC in BL1 is handled assuming that the MMU is
25f3b4914bSYatharth Kochar	 * turned off by BL2.
26f3b4914bSYatharth Kochar	 * ------------------------------------------------
27f3b4914bSYatharth Kochar	 */
28f3b4914bSYatharth Kochar
29f3b4914bSYatharth Kochar	/* ----------------------------------------------
30a4409008Sdp-arm	 * Detect if this is a RUN_IMAGE or other SMC.
31f3b4914bSYatharth Kochar	 * ----------------------------------------------
32f3b4914bSYatharth Kochar	 */
33a4409008Sdp-arm	mov	lr, #BL1_SMC_RUN_IMAGE
34a4409008Sdp-arm	cmp	lr, r0
35a4409008Sdp-arm	bne	smc_handler
36f3b4914bSYatharth Kochar
37f3b4914bSYatharth Kochar	/* ------------------------------------------------
38f3b4914bSYatharth Kochar	 * Make sure only Secure world reaches here.
39f3b4914bSYatharth Kochar	 * ------------------------------------------------
40f3b4914bSYatharth Kochar	 */
41f3b4914bSYatharth Kochar	ldcopr  r8, SCR
42f3b4914bSYatharth Kochar	tst	r8, #SCR_NS_BIT
43f3b4914bSYatharth Kochar	blne	report_exception
44f3b4914bSYatharth Kochar
45f3b4914bSYatharth Kochar	/* ---------------------------------------------------------------------
46f3b4914bSYatharth Kochar	 * Pass control to next secure image.
47f3b4914bSYatharth Kochar	 * Here it expects r1 to contain the address of a entry_point_info_t
48f3b4914bSYatharth Kochar	 * structure describing the BL entrypoint.
49f3b4914bSYatharth Kochar	 * ---------------------------------------------------------------------
50f3b4914bSYatharth Kochar	 */
51f3b4914bSYatharth Kochar	mov	r8, r1
52f3b4914bSYatharth Kochar	mov	r0, r1
53f3b4914bSYatharth Kochar	bl	bl1_print_next_bl_ep_info
54f3b4914bSYatharth Kochar
55f3b4914bSYatharth Kochar#if SPIN_ON_BL1_EXIT
56f3b4914bSYatharth Kochar	bl	print_debug_loop_message
57f3b4914bSYatharth Kochardebug_loop:
58f3b4914bSYatharth Kochar	b	debug_loop
59f3b4914bSYatharth Kochar#endif
60f3b4914bSYatharth Kochar
61f3b4914bSYatharth Kochar	mov	r0, r8
62f3b4914bSYatharth Kochar	bl	bl1_plat_prepare_exit
63f3b4914bSYatharth Kochar
64f3b4914bSYatharth Kochar	stcopr	r0, TLBIALL
65f3b4914bSYatharth Kochar	dsb	sy
66f3b4914bSYatharth Kochar	isb
67f3b4914bSYatharth Kochar
68f3b4914bSYatharth Kochar	/*
69f3b4914bSYatharth Kochar	 * Extract PC and SPSR based on struct `entry_point_info_t`
70f3b4914bSYatharth Kochar	 * and load it in LR and SPSR registers respectively.
71f3b4914bSYatharth Kochar	 */
72f3b4914bSYatharth Kochar	ldr	lr, [r8, #ENTRY_POINT_INFO_PC_OFFSET]
73f3b4914bSYatharth Kochar	ldr	r1, [r8, #(ENTRY_POINT_INFO_PC_OFFSET + 4)]
74520f864eSBryan O'Donoghue	msr	spsr_xc, r1
75f3b4914bSYatharth Kochar
763fe81dcfSEtienne Carriere	/* Some BL32 stages expect lr_svc to provide the BL33 entry address */
773fe81dcfSEtienne Carriere	cps	#MODE32_svc
783fe81dcfSEtienne Carriere	ldr	lr, [r8, #ENTRY_POINT_INFO_LR_SVC_OFFSET]
793fe81dcfSEtienne Carriere	cps	#MODE32_mon
803fe81dcfSEtienne Carriere
81f3b4914bSYatharth Kochar	add	r8, r8, #ENTRY_POINT_INFO_ARGS_OFFSET
82f3b4914bSYatharth Kochar	ldm	r8, {r0, r1, r2, r3}
836bc24382SMadhukar Pappireddy	exception_return
84f3b4914bSYatharth Kocharendfunc bl1_aarch32_smc_handler
85a4409008Sdp-arm
86a4409008Sdp-arm	/* -----------------------------------------------------
87a4409008Sdp-arm	 * Save Secure/Normal world context and jump to
88a4409008Sdp-arm	 * BL1 SMC handler.
89a4409008Sdp-arm	 * -----------------------------------------------------
90a4409008Sdp-arm	 */
91a4409008Sdp-armfunc smc_handler
92a4409008Sdp-arm	/* -----------------------------------------------------
93a4409008Sdp-arm	 * Save the GP registers.
94a4409008Sdp-arm	 * -----------------------------------------------------
95a4409008Sdp-arm	 */
96085e80ecSAntonio Nino Diaz	smccc_save_gp_mode_regs
97a4409008Sdp-arm
98a4409008Sdp-arm	/*
99a4409008Sdp-arm	 * `sp` still points to `smc_ctx_t`. Save it to a register
100a4409008Sdp-arm	 * and restore the C runtime stack pointer to `sp`.
101a4409008Sdp-arm	 */
102a4409008Sdp-arm	mov	r6, sp
103a4409008Sdp-arm	ldr	sp, [r6, #SMC_CTX_SP_MON]
104a4409008Sdp-arm
105a4409008Sdp-arm	ldr	r0, [r6, #SMC_CTX_SCR]
106a4409008Sdp-arm	and	r7, r0, #SCR_NS_BIT		/* flags */
107a4409008Sdp-arm
108a4409008Sdp-arm	/* Switch to Secure Mode */
109a4409008Sdp-arm	bic	r0, #SCR_NS_BIT
110a4409008Sdp-arm	stcopr	r0, SCR
111a4409008Sdp-arm	isb
112a4409008Sdp-arm
113a4409008Sdp-arm	/* If caller is from Secure world then turn on the MMU */
114a4409008Sdp-arm	tst	r7, #SCR_NS_BIT
115a4409008Sdp-arm	bne	skip_mmu_on
116a4409008Sdp-arm
117a4409008Sdp-arm	/* Turn on the MMU */
118a4409008Sdp-arm	mov	r0, #DISABLE_DCACHE
1191a92a0e0SAntonio Nino Diaz	bl	enable_mmu_svc_mon
120a4409008Sdp-arm
121*f1cbbd63SHarrison Mutai	/*
122*f1cbbd63SHarrison Mutai	 * Invalidate `smc_ctx_t` in data cache to prevent dirty data being
123*f1cbbd63SHarrison Mutai	 * used.
124*f1cbbd63SHarrison Mutai	 */
125*f1cbbd63SHarrison Mutai	mov	r0, r6
126*f1cbbd63SHarrison Mutai	mov	r1, #SMC_CTX_SIZE
127*f1cbbd63SHarrison Mutai	bl	inv_dcache_range
128*f1cbbd63SHarrison Mutai
129a4409008Sdp-arm	/* Enable the data cache. */
130a4409008Sdp-arm	ldcopr	r9, SCTLR
131a4409008Sdp-arm	orr	r9, r9, #SCTLR_C_BIT
132a4409008Sdp-arm	stcopr	r9, SCTLR
133a4409008Sdp-arm	isb
134a4409008Sdp-arm
135a4409008Sdp-armskip_mmu_on:
136a4409008Sdp-arm	/* Prepare arguments for BL1 SMC wrapper. */
137a4409008Sdp-arm	ldr	r0, [r6, #SMC_CTX_GPREG_R0]	/* smc_fid */
138a4409008Sdp-arm	mov	r1, #0				/* cookie */
139a4409008Sdp-arm	mov	r2, r6				/* handle */
140a4409008Sdp-arm	mov	r3, r7				/* flags */
141a4409008Sdp-arm	bl	bl1_smc_wrapper
142a4409008Sdp-arm
143a4409008Sdp-arm	/* Get the smc_context for next BL image */
144a4409008Sdp-arm	bl	smc_get_next_ctx
145a4409008Sdp-arm	mov	r4, r0
146a4409008Sdp-arm
147a4409008Sdp-arm	/* Only turn-off MMU if going to secure world */
148a4409008Sdp-arm	ldr	r5, [r4, #SMC_CTX_SCR]
149a4409008Sdp-arm	tst	r5, #SCR_NS_BIT
150a4409008Sdp-arm	bne	skip_mmu_off
151a4409008Sdp-arm
152a4409008Sdp-arm	/* Disable the MMU */
153a4409008Sdp-arm	bl	disable_mmu_icache_secure
154a4409008Sdp-arm	stcopr	r0, TLBIALL
155a4409008Sdp-arm	dsb	sy
156a4409008Sdp-arm	isb
157a4409008Sdp-arm
158a4409008Sdp-armskip_mmu_off:
159a4409008Sdp-arm	/* -----------------------------------------------------
160a4409008Sdp-arm	 * Do the transition to next BL image.
161a4409008Sdp-arm	 * -----------------------------------------------------
162a4409008Sdp-arm	 */
163a4409008Sdp-arm	mov	r0, r4
164a4409008Sdp-arm	monitor_exit
165a4409008Sdp-armendfunc smc_handler
166