xref: /rk3399_ARM-atf/lib/aarch32/misc_helpers.S (revision 9c1dceb106474966211a3782f8e89e195b4c079c)
1f24307deSSoby Mathew/*
2f24307deSSoby Mathew * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
3f24307deSSoby Mathew *
4f24307deSSoby Mathew * Redistribution and use in source and binary forms, with or without
5f24307deSSoby Mathew * modification, are permitted provided that the following conditions are met:
6f24307deSSoby Mathew *
7f24307deSSoby Mathew * Redistributions of source code must retain the above copyright notice, this
8f24307deSSoby Mathew * list of conditions and the following disclaimer.
9f24307deSSoby Mathew *
10f24307deSSoby Mathew * Redistributions in binary form must reproduce the above copyright notice,
11f24307deSSoby Mathew * this list of conditions and the following disclaimer in the documentation
12f24307deSSoby Mathew * and/or other materials provided with the distribution.
13f24307deSSoby Mathew *
14f24307deSSoby Mathew * Neither the name of ARM nor the names of its contributors may be used
15f24307deSSoby Mathew * to endorse or promote products derived from this software without specific
16f24307deSSoby Mathew * prior written permission.
17f24307deSSoby Mathew *
18f24307deSSoby Mathew * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19f24307deSSoby Mathew * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20f24307deSSoby Mathew * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21f24307deSSoby Mathew * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
22f24307deSSoby Mathew * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23f24307deSSoby Mathew * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24f24307deSSoby Mathew * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25f24307deSSoby Mathew * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26f24307deSSoby Mathew * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27f24307deSSoby Mathew * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28f24307deSSoby Mathew * POSSIBILITY OF SUCH DAMAGE.
29f24307deSSoby Mathew */
30f24307deSSoby Mathew
31f24307deSSoby Mathew#include <arch.h>
32f24307deSSoby Mathew#include <asm_macros.S>
33f24307deSSoby Mathew#include <assert_macros.S>
34f24307deSSoby Mathew
351a0a3f06SYatharth Kochar	.globl	smc
36f24307deSSoby Mathew	.globl	zeromem
37*9c1dceb1SYatharth Kochar	.globl	memcpy4
381a0a3f06SYatharth Kochar	.globl	disable_mmu_icache_secure
391a0a3f06SYatharth Kochar	.globl	disable_mmu_secure
401a0a3f06SYatharth Kochar
411a0a3f06SYatharth Kocharfunc smc
421a0a3f06SYatharth Kochar	/*
431a0a3f06SYatharth Kochar	 * For AArch32 only r0-r3 will be in the registers;
441a0a3f06SYatharth Kochar	 * rest r4-r6 will be pushed on to the stack. So here, we'll
451a0a3f06SYatharth Kochar	 * have to load them from the stack to registers r4-r6 explicitly.
461a0a3f06SYatharth Kochar	 * Clobbers: r4-r6
471a0a3f06SYatharth Kochar	 */
481a0a3f06SYatharth Kochar	ldm	sp, {r4, r5, r6}
491a0a3f06SYatharth Kochar	smc	#0
501a0a3f06SYatharth Kocharendfunc smc
51f24307deSSoby Mathew
52f24307deSSoby Mathew/* -----------------------------------------------------------------------
53f24307deSSoby Mathew * void zeromem(void *mem, unsigned int length);
54f24307deSSoby Mathew *
55f24307deSSoby Mathew * Initialise a memory region to 0.
56f24307deSSoby Mathew * The memory address and length must be 4-byte aligned.
57f24307deSSoby Mathew * -----------------------------------------------------------------------
58f24307deSSoby Mathew */
59f24307deSSoby Mathewfunc zeromem
60f24307deSSoby Mathew#if ASM_ASSERTION
61f24307deSSoby Mathew	tst	r0, #0x3
62f24307deSSoby Mathew	ASM_ASSERT(eq)
63f24307deSSoby Mathew	tst	r1, #0x3
64f24307deSSoby Mathew	ASM_ASSERT(eq)
65f24307deSSoby Mathew#endif
66f24307deSSoby Mathew	add	r2, r0, r1
67f24307deSSoby Mathew	mov	r1, #0
68f24307deSSoby Mathewz_loop:
69f24307deSSoby Mathew	cmp	r2, r0
70f24307deSSoby Mathew	beq	z_end
71f24307deSSoby Mathew	str	r1, [r0], #4
72f24307deSSoby Mathew	b	z_loop
73f24307deSSoby Mathewz_end:
74f24307deSSoby Mathew	bx	lr
75f24307deSSoby Mathewendfunc zeromem
761a0a3f06SYatharth Kochar
77*9c1dceb1SYatharth Kochar/* --------------------------------------------------------------------------
78*9c1dceb1SYatharth Kochar * void memcpy4(void *dest, const void *src, unsigned int length)
79*9c1dceb1SYatharth Kochar *
80*9c1dceb1SYatharth Kochar * Copy length bytes from memory area src to memory area dest.
81*9c1dceb1SYatharth Kochar * The memory areas should not overlap.
82*9c1dceb1SYatharth Kochar * Destination and source addresses must be 4-byte aligned.
83*9c1dceb1SYatharth Kochar * --------------------------------------------------------------------------
84*9c1dceb1SYatharth Kochar */
85*9c1dceb1SYatharth Kocharfunc memcpy4
86*9c1dceb1SYatharth Kochar#if ASM_ASSERTION
87*9c1dceb1SYatharth Kochar	orr	r3, r0, r1
88*9c1dceb1SYatharth Kochar	tst	r3, #0x3
89*9c1dceb1SYatharth Kochar	ASM_ASSERT(eq)
90*9c1dceb1SYatharth Kochar#endif
91*9c1dceb1SYatharth Kochar/* copy 4 bytes at a time */
92*9c1dceb1SYatharth Kocharm_loop4:
93*9c1dceb1SYatharth Kochar	cmp	r2, #4
94*9c1dceb1SYatharth Kochar	blt	m_loop1
95*9c1dceb1SYatharth Kochar	ldr	r3, [r1], #4
96*9c1dceb1SYatharth Kochar	str	r3, [r0], #4
97*9c1dceb1SYatharth Kochar	sub	r2, r2, #4
98*9c1dceb1SYatharth Kochar	b	m_loop4
99*9c1dceb1SYatharth Kochar/* copy byte per byte */
100*9c1dceb1SYatharth Kocharm_loop1:
101*9c1dceb1SYatharth Kochar	cmp	r2,#0
102*9c1dceb1SYatharth Kochar	beq	m_end
103*9c1dceb1SYatharth Kochar	ldrb	r3, [r1], #1
104*9c1dceb1SYatharth Kochar	strb	r3, [r0], #1
105*9c1dceb1SYatharth Kochar	subs	r2, r2, #1
106*9c1dceb1SYatharth Kochar	bne	m_loop1
107*9c1dceb1SYatharth Kocharm_end:
108*9c1dceb1SYatharth Kochar	bx	lr
109*9c1dceb1SYatharth Kocharendfunc memcpy4
110*9c1dceb1SYatharth Kochar
1111a0a3f06SYatharth Kochar/* ---------------------------------------------------------------------------
1121a0a3f06SYatharth Kochar * Disable the MMU in Secure State
1131a0a3f06SYatharth Kochar * ---------------------------------------------------------------------------
1141a0a3f06SYatharth Kochar */
1151a0a3f06SYatharth Kochar
1161a0a3f06SYatharth Kocharfunc disable_mmu_secure
1171a0a3f06SYatharth Kochar	mov	r1, #(SCTLR_M_BIT | SCTLR_C_BIT)
1181a0a3f06SYatharth Kochardo_disable_mmu:
1191a0a3f06SYatharth Kochar	ldcopr	r0, SCTLR
1201a0a3f06SYatharth Kochar	bic	r0, r0, r1
1211a0a3f06SYatharth Kochar	stcopr	r0, SCTLR
1221a0a3f06SYatharth Kochar	isb				// ensure MMU is off
1231a0a3f06SYatharth Kochar	dsb	sy
1241a0a3f06SYatharth Kochar	bx	lr
1251a0a3f06SYatharth Kocharendfunc disable_mmu_secure
1261a0a3f06SYatharth Kochar
1271a0a3f06SYatharth Kochar
1281a0a3f06SYatharth Kocharfunc disable_mmu_icache_secure
1291a0a3f06SYatharth Kochar	ldr	r1, =(SCTLR_M_BIT | SCTLR_C_BIT | SCTLR_I_BIT)
1301a0a3f06SYatharth Kochar	b	do_disable_mmu
1311a0a3f06SYatharth Kocharendfunc disable_mmu_icache_secure
132