xref: /rk3399_ARM-atf/lib/aarch64/misc_helpers.S (revision 5c3272a717f357872973c78007b659dca0e5c673)
14ecca339SDan Handley/*
24ecca339SDan Handley * Copyright (c) 2013-2014, ARM Limited and Contributors. All rights reserved.
34ecca339SDan Handley *
44ecca339SDan Handley * Redistribution and use in source and binary forms, with or without
54ecca339SDan Handley * modification, are permitted provided that the following conditions are met:
64ecca339SDan Handley *
74ecca339SDan Handley * Redistributions of source code must retain the above copyright notice, this
84ecca339SDan Handley * list of conditions and the following disclaimer.
94ecca339SDan Handley *
104ecca339SDan Handley * Redistributions in binary form must reproduce the above copyright notice,
114ecca339SDan Handley * this list of conditions and the following disclaimer in the documentation
124ecca339SDan Handley * and/or other materials provided with the distribution.
134ecca339SDan Handley *
144ecca339SDan Handley * Neither the name of ARM nor the names of its contributors may be used
154ecca339SDan Handley * to endorse or promote products derived from this software without specific
164ecca339SDan Handley * prior written permission.
174ecca339SDan Handley *
184ecca339SDan Handley * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
194ecca339SDan Handley * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
204ecca339SDan Handley * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
214ecca339SDan Handley * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
224ecca339SDan Handley * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
234ecca339SDan Handley * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
244ecca339SDan Handley * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
254ecca339SDan Handley * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
264ecca339SDan Handley * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
274ecca339SDan Handley * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
284ecca339SDan Handley * POSSIBILITY OF SUCH DAMAGE.
294ecca339SDan Handley */
304ecca339SDan Handley
3197043ac9SDan Handley#include <arch.h>
324ecca339SDan Handley#include <asm_macros.S>
334ecca339SDan Handley
344ecca339SDan Handley	.globl	get_afflvl_shift
354ecca339SDan Handley	.globl	mpidr_mask_lower_afflvls
364ecca339SDan Handley	.globl	eret
374ecca339SDan Handley	.globl	smc
384ecca339SDan Handley
394ecca339SDan Handley	.globl	zeromem16
404ecca339SDan Handley	.globl	memcpy16
414ecca339SDan Handley
422f5dcfefSAndrew Thoelke	.globl	disable_mmu_el3
432f5dcfefSAndrew Thoelke	.globl	disable_mmu_icache_el3
442f5dcfefSAndrew Thoelke
45*5c3272a7SAndrew Thoelke#if SUPPORT_VFP
46*5c3272a7SAndrew Thoelke	.globl	enable_vfp
47*5c3272a7SAndrew Thoelke#endif
48*5c3272a7SAndrew Thoelke
494ecca339SDan Handley
504ecca339SDan Handleyfunc get_afflvl_shift
514ecca339SDan Handley	cmp	x0, #3
524ecca339SDan Handley	cinc	x0, x0, eq
534ecca339SDan Handley	mov	x1, #MPIDR_AFFLVL_SHIFT
544ecca339SDan Handley	lsl	x0, x0, x1
554ecca339SDan Handley	ret
564ecca339SDan Handley
574ecca339SDan Handleyfunc mpidr_mask_lower_afflvls
584ecca339SDan Handley	cmp	x1, #3
594ecca339SDan Handley	cinc	x1, x1, eq
604ecca339SDan Handley	mov	x2, #MPIDR_AFFLVL_SHIFT
614ecca339SDan Handley	lsl	x2, x1, x2
624ecca339SDan Handley	lsr	x0, x0, x2
634ecca339SDan Handley	lsl	x0, x0, x2
644ecca339SDan Handley	ret
654ecca339SDan Handley
664ecca339SDan Handley
674ecca339SDan Handleyfunc eret
684ecca339SDan Handley	eret
694ecca339SDan Handley
704ecca339SDan Handley
714ecca339SDan Handleyfunc smc
724ecca339SDan Handley	smc	#0
734ecca339SDan Handley
744ecca339SDan Handley/* -----------------------------------------------------------------------
754ecca339SDan Handley * void zeromem16(void *mem, unsigned int length);
764ecca339SDan Handley *
774ecca339SDan Handley * Initialise a memory region to 0.
784ecca339SDan Handley * The memory address must be 16-byte aligned.
794ecca339SDan Handley * -----------------------------------------------------------------------
804ecca339SDan Handley */
814ecca339SDan Handleyfunc zeromem16
824ecca339SDan Handley	add	x2, x0, x1
834ecca339SDan Handley/* zero 16 bytes at a time */
844ecca339SDan Handleyz_loop16:
854ecca339SDan Handley	sub	x3, x2, x0
864ecca339SDan Handley	cmp	x3, #16
874ecca339SDan Handley	b.lt	z_loop1
884ecca339SDan Handley	stp	xzr, xzr, [x0], #16
894ecca339SDan Handley	b	z_loop16
904ecca339SDan Handley/* zero byte per byte */
914ecca339SDan Handleyz_loop1:
924ecca339SDan Handley	cmp	x0, x2
934ecca339SDan Handley	b.eq	z_end
944ecca339SDan Handley	strb	wzr, [x0], #1
954ecca339SDan Handley	b	z_loop1
964ecca339SDan Handleyz_end:	ret
974ecca339SDan Handley
984ecca339SDan Handley
994ecca339SDan Handley/* --------------------------------------------------------------------------
1004ecca339SDan Handley * void memcpy16(void *dest, const void *src, unsigned int length)
1014ecca339SDan Handley *
1024ecca339SDan Handley * Copy length bytes from memory area src to memory area dest.
1034ecca339SDan Handley * The memory areas should not overlap.
1044ecca339SDan Handley * Destination and source addresses must be 16-byte aligned.
1054ecca339SDan Handley * --------------------------------------------------------------------------
1064ecca339SDan Handley */
1074ecca339SDan Handleyfunc memcpy16
1084ecca339SDan Handley/* copy 16 bytes at a time */
1094ecca339SDan Handleym_loop16:
1104ecca339SDan Handley	cmp	x2, #16
1114ecca339SDan Handley	b.lt	m_loop1
1124ecca339SDan Handley	ldp	x3, x4, [x1], #16
1134ecca339SDan Handley	stp	x3, x4, [x0], #16
1144ecca339SDan Handley	sub	x2, x2, #16
1154ecca339SDan Handley	b	m_loop16
1164ecca339SDan Handley/* copy byte per byte */
1174ecca339SDan Handleym_loop1:
1184ecca339SDan Handley	cbz	x2, m_end
1194ecca339SDan Handley	ldrb	w3, [x1], #1
1204ecca339SDan Handley	strb	w3, [x0], #1
1214ecca339SDan Handley	subs	x2, x2, #1
1224ecca339SDan Handley	b.ne	m_loop1
1234ecca339SDan Handleym_end:	ret
1242f5dcfefSAndrew Thoelke
1252f5dcfefSAndrew Thoelke/* ---------------------------------------------------------------------------
1262f5dcfefSAndrew Thoelke * Disable the MMU at EL3
1272f5dcfefSAndrew Thoelke * This is implemented in assembler to ensure that the data cache is cleaned
1282f5dcfefSAndrew Thoelke * and invalidated after the MMU is disabled without any intervening cacheable
1292f5dcfefSAndrew Thoelke * data accesses
1302f5dcfefSAndrew Thoelke * ---------------------------------------------------------------------------
1312f5dcfefSAndrew Thoelke */
1322f5dcfefSAndrew Thoelke
1332f5dcfefSAndrew Thoelkefunc disable_mmu_el3
1342f5dcfefSAndrew Thoelke	mov	x1, #(SCTLR_M_BIT | SCTLR_C_BIT)
1352f5dcfefSAndrew Thoelkedo_disable_mmu:
1362f5dcfefSAndrew Thoelke	mrs	x0, sctlr_el3
1372f5dcfefSAndrew Thoelke	bic	x0, x0, x1
1382f5dcfefSAndrew Thoelke	msr	sctlr_el3, x0
1392f5dcfefSAndrew Thoelke	isb				// ensure MMU is off
1402f5dcfefSAndrew Thoelke	mov	x0, #DCCISW		// DCache clean and invalidate
1412f5dcfefSAndrew Thoelke	b	dcsw_op_all
1422f5dcfefSAndrew Thoelke
1432f5dcfefSAndrew Thoelke
1442f5dcfefSAndrew Thoelkefunc disable_mmu_icache_el3
1452f5dcfefSAndrew Thoelke	mov	x1, #(SCTLR_M_BIT | SCTLR_C_BIT | SCTLR_I_BIT)
1462f5dcfefSAndrew Thoelke	b	do_disable_mmu
1472f5dcfefSAndrew Thoelke
148*5c3272a7SAndrew Thoelke
149*5c3272a7SAndrew Thoelke/* ---------------------------------------------------------------------------
150*5c3272a7SAndrew Thoelke * Enable the use of VFP at EL3
151*5c3272a7SAndrew Thoelke * ---------------------------------------------------------------------------
152*5c3272a7SAndrew Thoelke */
153*5c3272a7SAndrew Thoelke#if SUPPORT_VFP
154*5c3272a7SAndrew Thoelkefunc enable_vfp
155*5c3272a7SAndrew Thoelke	mrs	x0, cpacr_el1
156*5c3272a7SAndrew Thoelke	orr	x0, x0, #CPACR_VFP_BITS
157*5c3272a7SAndrew Thoelke	msr	cpacr_el1, x0
158*5c3272a7SAndrew Thoelke	mrs	x0, cptr_el3
159*5c3272a7SAndrew Thoelke	mov	x1, #AARCH64_CPTR_TFP
160*5c3272a7SAndrew Thoelke	bic	x0, x0, x1
161*5c3272a7SAndrew Thoelke	msr	cptr_el3, x0
162*5c3272a7SAndrew Thoelke	isb
163*5c3272a7SAndrew Thoelke	ret
164*5c3272a7SAndrew Thoelke#endif
165