xref: /rk3399_ARM-atf/lib/aarch32/misc_helpers.S (revision 1a0a3f0622e4b569513304109d9a0d093b71228a)
1f24307deSSoby Mathew/*
2f24307deSSoby Mathew * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
3f24307deSSoby Mathew *
4f24307deSSoby Mathew * Redistribution and use in source and binary forms, with or without
5f24307deSSoby Mathew * modification, are permitted provided that the following conditions are met:
6f24307deSSoby Mathew *
7f24307deSSoby Mathew * Redistributions of source code must retain the above copyright notice, this
8f24307deSSoby Mathew * list of conditions and the following disclaimer.
9f24307deSSoby Mathew *
10f24307deSSoby Mathew * Redistributions in binary form must reproduce the above copyright notice,
11f24307deSSoby Mathew * this list of conditions and the following disclaimer in the documentation
12f24307deSSoby Mathew * and/or other materials provided with the distribution.
13f24307deSSoby Mathew *
14f24307deSSoby Mathew * Neither the name of ARM nor the names of its contributors may be used
15f24307deSSoby Mathew * to endorse or promote products derived from this software without specific
16f24307deSSoby Mathew * prior written permission.
17f24307deSSoby Mathew *
18f24307deSSoby Mathew * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19f24307deSSoby Mathew * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20f24307deSSoby Mathew * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21f24307deSSoby Mathew * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
22f24307deSSoby Mathew * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23f24307deSSoby Mathew * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24f24307deSSoby Mathew * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25f24307deSSoby Mathew * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26f24307deSSoby Mathew * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27f24307deSSoby Mathew * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28f24307deSSoby Mathew * POSSIBILITY OF SUCH DAMAGE.
29f24307deSSoby Mathew */
30f24307deSSoby Mathew
31f24307deSSoby Mathew#include <arch.h>
32f24307deSSoby Mathew#include <asm_macros.S>
33f24307deSSoby Mathew#include <assert_macros.S>
34f24307deSSoby Mathew
35*1a0a3f06SYatharth Kochar	.globl	smc
36f24307deSSoby Mathew	.globl	zeromem
37*1a0a3f06SYatharth Kochar	.globl	disable_mmu_icache_secure
38*1a0a3f06SYatharth Kochar	.globl	disable_mmu_secure
39*1a0a3f06SYatharth Kochar
40*1a0a3f06SYatharth Kocharfunc smc
41*1a0a3f06SYatharth Kochar	/*
42*1a0a3f06SYatharth Kochar	 * For AArch32 only r0-r3 will be in the registers;
43*1a0a3f06SYatharth Kochar	 * rest r4-r6 will be pushed on to the stack. So here, we'll
44*1a0a3f06SYatharth Kochar	 * have to load them from the stack to registers r4-r6 explicitly.
45*1a0a3f06SYatharth Kochar	 * Clobbers: r4-r6
46*1a0a3f06SYatharth Kochar	 */
47*1a0a3f06SYatharth Kochar	ldm	sp, {r4, r5, r6}
48*1a0a3f06SYatharth Kochar	smc	#0
49*1a0a3f06SYatharth Kocharendfunc smc
50f24307deSSoby Mathew
51f24307deSSoby Mathew/* -----------------------------------------------------------------------
52f24307deSSoby Mathew * void zeromem(void *mem, unsigned int length);
53f24307deSSoby Mathew *
54f24307deSSoby Mathew * Initialise a memory region to 0.
55f24307deSSoby Mathew * The memory address and length must be 4-byte aligned.
56f24307deSSoby Mathew * -----------------------------------------------------------------------
57f24307deSSoby Mathew */
58f24307deSSoby Mathewfunc zeromem
59f24307deSSoby Mathew#if ASM_ASSERTION
60f24307deSSoby Mathew	tst	r0, #0x3
61f24307deSSoby Mathew	ASM_ASSERT(eq)
62f24307deSSoby Mathew	tst	r1, #0x3
63f24307deSSoby Mathew	ASM_ASSERT(eq)
64f24307deSSoby Mathew#endif
65f24307deSSoby Mathew	add	r2, r0, r1
66f24307deSSoby Mathew	mov	r1, #0
67f24307deSSoby Mathewz_loop:
68f24307deSSoby Mathew	cmp	r2, r0
69f24307deSSoby Mathew	beq	z_end
70f24307deSSoby Mathew	str	r1, [r0], #4
71f24307deSSoby Mathew	b	z_loop
72f24307deSSoby Mathewz_end:
73f24307deSSoby Mathew	bx	lr
74f24307deSSoby Mathewendfunc zeromem
75*1a0a3f06SYatharth Kochar
76*1a0a3f06SYatharth Kochar/* ---------------------------------------------------------------------------
77*1a0a3f06SYatharth Kochar * Disable the MMU in Secure State
78*1a0a3f06SYatharth Kochar * ---------------------------------------------------------------------------
79*1a0a3f06SYatharth Kochar */
80*1a0a3f06SYatharth Kochar
81*1a0a3f06SYatharth Kocharfunc disable_mmu_secure
82*1a0a3f06SYatharth Kochar	mov	r1, #(SCTLR_M_BIT | SCTLR_C_BIT)
83*1a0a3f06SYatharth Kochardo_disable_mmu:
84*1a0a3f06SYatharth Kochar	ldcopr	r0, SCTLR
85*1a0a3f06SYatharth Kochar	bic	r0, r0, r1
86*1a0a3f06SYatharth Kochar	stcopr	r0, SCTLR
87*1a0a3f06SYatharth Kochar	isb				// ensure MMU is off
88*1a0a3f06SYatharth Kochar	dsb	sy
89*1a0a3f06SYatharth Kochar	bx	lr
90*1a0a3f06SYatharth Kocharendfunc disable_mmu_secure
91*1a0a3f06SYatharth Kochar
92*1a0a3f06SYatharth Kochar
93*1a0a3f06SYatharth Kocharfunc disable_mmu_icache_secure
94*1a0a3f06SYatharth Kochar	ldr	r1, =(SCTLR_M_BIT | SCTLR_C_BIT | SCTLR_I_BIT)
95*1a0a3f06SYatharth Kochar	b	do_disable_mmu
96*1a0a3f06SYatharth Kocharendfunc disable_mmu_icache_secure
97