xref: /rk3399_ARM-atf/lib/aarch64/misc_helpers.S (revision 54dc71e7ec9f2a069907e8c0c24b5c8f8cc5f66a)
14ecca339SDan Handley/*
24ecca339SDan Handley * Copyright (c) 2013-2014, ARM Limited and Contributors. All rights reserved.
34ecca339SDan Handley *
44ecca339SDan Handley * Redistribution and use in source and binary forms, with or without
54ecca339SDan Handley * modification, are permitted provided that the following conditions are met:
64ecca339SDan Handley *
74ecca339SDan Handley * Redistributions of source code must retain the above copyright notice, this
84ecca339SDan Handley * list of conditions and the following disclaimer.
94ecca339SDan Handley *
104ecca339SDan Handley * Redistributions in binary form must reproduce the above copyright notice,
114ecca339SDan Handley * this list of conditions and the following disclaimer in the documentation
124ecca339SDan Handley * and/or other materials provided with the distribution.
134ecca339SDan Handley *
144ecca339SDan Handley * Neither the name of ARM nor the names of its contributors may be used
154ecca339SDan Handley * to endorse or promote products derived from this software without specific
164ecca339SDan Handley * prior written permission.
174ecca339SDan Handley *
184ecca339SDan Handley * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
194ecca339SDan Handley * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
204ecca339SDan Handley * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
214ecca339SDan Handley * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
224ecca339SDan Handley * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
234ecca339SDan Handley * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
244ecca339SDan Handley * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
254ecca339SDan Handley * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
264ecca339SDan Handley * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
274ecca339SDan Handley * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
284ecca339SDan Handley * POSSIBILITY OF SUCH DAMAGE.
294ecca339SDan Handley */
304ecca339SDan Handley
3197043ac9SDan Handley#include <arch.h>
324ecca339SDan Handley#include <asm_macros.S>
33bc920128SSoby Mathew#include <assert_macros.S>
344ecca339SDan Handley
354ecca339SDan Handley	.globl	get_afflvl_shift
364ecca339SDan Handley	.globl	mpidr_mask_lower_afflvls
374ecca339SDan Handley	.globl	eret
384ecca339SDan Handley	.globl	smc
394ecca339SDan Handley
404ecca339SDan Handley	.globl	zeromem16
414ecca339SDan Handley	.globl	memcpy16
424ecca339SDan Handley
432f5dcfefSAndrew Thoelke	.globl	disable_mmu_el3
442f5dcfefSAndrew Thoelke	.globl	disable_mmu_icache_el3
452f5dcfefSAndrew Thoelke
465c3272a7SAndrew Thoelke#if SUPPORT_VFP
475c3272a7SAndrew Thoelke	.globl	enable_vfp
485c3272a7SAndrew Thoelke#endif
495c3272a7SAndrew Thoelke
504ecca339SDan Handleyfunc get_afflvl_shift
514ecca339SDan Handley	cmp	x0, #3
524ecca339SDan Handley	cinc	x0, x0, eq
534ecca339SDan Handley	mov	x1, #MPIDR_AFFLVL_SHIFT
544ecca339SDan Handley	lsl	x0, x0, x1
554ecca339SDan Handley	ret
568b779620SKévin Petitendfunc get_afflvl_shift
574ecca339SDan Handley
584ecca339SDan Handleyfunc mpidr_mask_lower_afflvls
594ecca339SDan Handley	cmp	x1, #3
604ecca339SDan Handley	cinc	x1, x1, eq
614ecca339SDan Handley	mov	x2, #MPIDR_AFFLVL_SHIFT
624ecca339SDan Handley	lsl	x2, x1, x2
634ecca339SDan Handley	lsr	x0, x0, x2
644ecca339SDan Handley	lsl	x0, x0, x2
654ecca339SDan Handley	ret
668b779620SKévin Petitendfunc mpidr_mask_lower_afflvls
674ecca339SDan Handley
684ecca339SDan Handley
694ecca339SDan Handleyfunc eret
704ecca339SDan Handley	eret
718b779620SKévin Petitendfunc eret
724ecca339SDan Handley
734ecca339SDan Handley
744ecca339SDan Handleyfunc smc
754ecca339SDan Handley	smc	#0
768b779620SKévin Petitendfunc smc
774ecca339SDan Handley
784ecca339SDan Handley/* -----------------------------------------------------------------------
794ecca339SDan Handley * void zeromem16(void *mem, unsigned int length);
804ecca339SDan Handley *
814ecca339SDan Handley * Initialise a memory region to 0.
824ecca339SDan Handley * The memory address must be 16-byte aligned.
834ecca339SDan Handley * -----------------------------------------------------------------------
844ecca339SDan Handley */
854ecca339SDan Handleyfunc zeromem16
86bc920128SSoby Mathew#if ASM_ASSERTION
87bc920128SSoby Mathew	tst	x0, #0xf
88bc920128SSoby Mathew	ASM_ASSERT(eq)
89bc920128SSoby Mathew#endif
904ecca339SDan Handley	add	x2, x0, x1
914ecca339SDan Handley/* zero 16 bytes at a time */
924ecca339SDan Handleyz_loop16:
934ecca339SDan Handley	sub	x3, x2, x0
944ecca339SDan Handley	cmp	x3, #16
954ecca339SDan Handley	b.lt	z_loop1
964ecca339SDan Handley	stp	xzr, xzr, [x0], #16
974ecca339SDan Handley	b	z_loop16
984ecca339SDan Handley/* zero byte per byte */
994ecca339SDan Handleyz_loop1:
1004ecca339SDan Handley	cmp	x0, x2
1014ecca339SDan Handley	b.eq	z_end
1024ecca339SDan Handley	strb	wzr, [x0], #1
1034ecca339SDan Handley	b	z_loop1
1048b779620SKévin Petitz_end:
1058b779620SKévin Petit	ret
1068b779620SKévin Petitendfunc zeromem16
1074ecca339SDan Handley
1084ecca339SDan Handley
1094ecca339SDan Handley/* --------------------------------------------------------------------------
1104ecca339SDan Handley * void memcpy16(void *dest, const void *src, unsigned int length)
1114ecca339SDan Handley *
1124ecca339SDan Handley * Copy length bytes from memory area src to memory area dest.
1134ecca339SDan Handley * The memory areas should not overlap.
1144ecca339SDan Handley * Destination and source addresses must be 16-byte aligned.
1154ecca339SDan Handley * --------------------------------------------------------------------------
1164ecca339SDan Handley */
1174ecca339SDan Handleyfunc memcpy16
118bc920128SSoby Mathew#if ASM_ASSERTION
119bc920128SSoby Mathew	orr	x3, x0, x1
120bc920128SSoby Mathew	tst	x3, #0xf
121bc920128SSoby Mathew	ASM_ASSERT(eq)
122bc920128SSoby Mathew#endif
1234ecca339SDan Handley/* copy 16 bytes at a time */
1244ecca339SDan Handleym_loop16:
1254ecca339SDan Handley	cmp	x2, #16
1264ecca339SDan Handley	b.lt	m_loop1
1274ecca339SDan Handley	ldp	x3, x4, [x1], #16
1284ecca339SDan Handley	stp	x3, x4, [x0], #16
1294ecca339SDan Handley	sub	x2, x2, #16
1304ecca339SDan Handley	b	m_loop16
1314ecca339SDan Handley/* copy byte per byte */
1324ecca339SDan Handleym_loop1:
1334ecca339SDan Handley	cbz	x2, m_end
1344ecca339SDan Handley	ldrb	w3, [x1], #1
1354ecca339SDan Handley	strb	w3, [x0], #1
1364ecca339SDan Handley	subs	x2, x2, #1
1374ecca339SDan Handley	b.ne	m_loop1
1388b779620SKévin Petitm_end:
1398b779620SKévin Petit	ret
1408b779620SKévin Petitendfunc memcpy16
1412f5dcfefSAndrew Thoelke
1422f5dcfefSAndrew Thoelke/* ---------------------------------------------------------------------------
1432f5dcfefSAndrew Thoelke * Disable the MMU at EL3
1442f5dcfefSAndrew Thoelke * ---------------------------------------------------------------------------
1452f5dcfefSAndrew Thoelke */
1462f5dcfefSAndrew Thoelke
1472f5dcfefSAndrew Thoelkefunc disable_mmu_el3
1482f5dcfefSAndrew Thoelke	mov	x1, #(SCTLR_M_BIT | SCTLR_C_BIT)
1492f5dcfefSAndrew Thoelkedo_disable_mmu:
1502f5dcfefSAndrew Thoelke	mrs	x0, sctlr_el3
1512f5dcfefSAndrew Thoelke	bic	x0, x0, x1
1522f5dcfefSAndrew Thoelke	msr	sctlr_el3, x0
1532f5dcfefSAndrew Thoelke	isb				// ensure MMU is off
154*54dc71e7SAchin Gupta	dsb	sy
155*54dc71e7SAchin Gupta	ret
1568b779620SKévin Petitendfunc disable_mmu_el3
1572f5dcfefSAndrew Thoelke
1582f5dcfefSAndrew Thoelke
1592f5dcfefSAndrew Thoelkefunc disable_mmu_icache_el3
1602f5dcfefSAndrew Thoelke	mov	x1, #(SCTLR_M_BIT | SCTLR_C_BIT | SCTLR_I_BIT)
1612f5dcfefSAndrew Thoelke	b	do_disable_mmu
1628b779620SKévin Petitendfunc disable_mmu_icache_el3
1632f5dcfefSAndrew Thoelke
1645c3272a7SAndrew Thoelke/* ---------------------------------------------------------------------------
1655c3272a7SAndrew Thoelke * Enable the use of VFP at EL3
1665c3272a7SAndrew Thoelke * ---------------------------------------------------------------------------
1675c3272a7SAndrew Thoelke */
1685c3272a7SAndrew Thoelke#if SUPPORT_VFP
1695c3272a7SAndrew Thoelkefunc enable_vfp
1705c3272a7SAndrew Thoelke	mrs	x0, cpacr_el1
1715c3272a7SAndrew Thoelke	orr	x0, x0, #CPACR_VFP_BITS
1725c3272a7SAndrew Thoelke	msr	cpacr_el1, x0
1735c3272a7SAndrew Thoelke	mrs	x0, cptr_el3
1745c3272a7SAndrew Thoelke	mov	x1, #AARCH64_CPTR_TFP
1755c3272a7SAndrew Thoelke	bic	x0, x0, x1
1765c3272a7SAndrew Thoelke	msr	cptr_el3, x0
1775c3272a7SAndrew Thoelke	isb
1785c3272a7SAndrew Thoelke	ret
1798b779620SKévin Petitendfunc enable_vfp
1805c3272a7SAndrew Thoelke#endif
181