xref: /rk3399_ARM-atf/lib/aarch64/misc_helpers.S (revision bc9201289c9ae4ccfc6b11048431d47eba547a44)
14ecca339SDan Handley/*
24ecca339SDan Handley * Copyright (c) 2013-2014, ARM Limited and Contributors. All rights reserved.
34ecca339SDan Handley *
44ecca339SDan Handley * Redistribution and use in source and binary forms, with or without
54ecca339SDan Handley * modification, are permitted provided that the following conditions are met:
64ecca339SDan Handley *
74ecca339SDan Handley * Redistributions of source code must retain the above copyright notice, this
84ecca339SDan Handley * list of conditions and the following disclaimer.
94ecca339SDan Handley *
104ecca339SDan Handley * Redistributions in binary form must reproduce the above copyright notice,
114ecca339SDan Handley * this list of conditions and the following disclaimer in the documentation
124ecca339SDan Handley * and/or other materials provided with the distribution.
134ecca339SDan Handley *
144ecca339SDan Handley * Neither the name of ARM nor the names of its contributors may be used
154ecca339SDan Handley * to endorse or promote products derived from this software without specific
164ecca339SDan Handley * prior written permission.
174ecca339SDan Handley *
184ecca339SDan Handley * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
194ecca339SDan Handley * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
204ecca339SDan Handley * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
214ecca339SDan Handley * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
224ecca339SDan Handley * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
234ecca339SDan Handley * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
244ecca339SDan Handley * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
254ecca339SDan Handley * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
264ecca339SDan Handley * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
274ecca339SDan Handley * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
284ecca339SDan Handley * POSSIBILITY OF SUCH DAMAGE.
294ecca339SDan Handley */
304ecca339SDan Handley
3197043ac9SDan Handley#include <arch.h>
324ecca339SDan Handley#include <asm_macros.S>
33*bc920128SSoby Mathew#include <assert_macros.S>
344ecca339SDan Handley
354ecca339SDan Handley	.globl	get_afflvl_shift
364ecca339SDan Handley	.globl	mpidr_mask_lower_afflvls
374ecca339SDan Handley	.globl	eret
384ecca339SDan Handley	.globl	smc
394ecca339SDan Handley
404ecca339SDan Handley	.globl	zeromem16
414ecca339SDan Handley	.globl	memcpy16
424ecca339SDan Handley
432f5dcfefSAndrew Thoelke	.globl	disable_mmu_el3
442f5dcfefSAndrew Thoelke	.globl	disable_mmu_icache_el3
452f5dcfefSAndrew Thoelke
465c3272a7SAndrew Thoelke#if SUPPORT_VFP
475c3272a7SAndrew Thoelke	.globl	enable_vfp
485c3272a7SAndrew Thoelke#endif
495c3272a7SAndrew Thoelke
504ecca339SDan Handleyfunc get_afflvl_shift
514ecca339SDan Handley	cmp	x0, #3
524ecca339SDan Handley	cinc	x0, x0, eq
534ecca339SDan Handley	mov	x1, #MPIDR_AFFLVL_SHIFT
544ecca339SDan Handley	lsl	x0, x0, x1
554ecca339SDan Handley	ret
564ecca339SDan Handley
574ecca339SDan Handleyfunc mpidr_mask_lower_afflvls
584ecca339SDan Handley	cmp	x1, #3
594ecca339SDan Handley	cinc	x1, x1, eq
604ecca339SDan Handley	mov	x2, #MPIDR_AFFLVL_SHIFT
614ecca339SDan Handley	lsl	x2, x1, x2
624ecca339SDan Handley	lsr	x0, x0, x2
634ecca339SDan Handley	lsl	x0, x0, x2
644ecca339SDan Handley	ret
654ecca339SDan Handley
664ecca339SDan Handley
674ecca339SDan Handleyfunc eret
684ecca339SDan Handley	eret
694ecca339SDan Handley
704ecca339SDan Handley
714ecca339SDan Handleyfunc smc
724ecca339SDan Handley	smc	#0
734ecca339SDan Handley
744ecca339SDan Handley/* -----------------------------------------------------------------------
754ecca339SDan Handley * void zeromem16(void *mem, unsigned int length);
764ecca339SDan Handley *
774ecca339SDan Handley * Initialise a memory region to 0.
784ecca339SDan Handley * The memory address must be 16-byte aligned.
794ecca339SDan Handley * -----------------------------------------------------------------------
804ecca339SDan Handley */
814ecca339SDan Handleyfunc zeromem16
82*bc920128SSoby Mathew#if ASM_ASSERTION
83*bc920128SSoby Mathew	tst	x0, #0xf
84*bc920128SSoby Mathew	ASM_ASSERT(eq)
85*bc920128SSoby Mathew#endif
864ecca339SDan Handley	add	x2, x0, x1
874ecca339SDan Handley/* zero 16 bytes at a time */
884ecca339SDan Handleyz_loop16:
894ecca339SDan Handley	sub	x3, x2, x0
904ecca339SDan Handley	cmp	x3, #16
914ecca339SDan Handley	b.lt	z_loop1
924ecca339SDan Handley	stp	xzr, xzr, [x0], #16
934ecca339SDan Handley	b	z_loop16
944ecca339SDan Handley/* zero byte per byte */
954ecca339SDan Handleyz_loop1:
964ecca339SDan Handley	cmp	x0, x2
974ecca339SDan Handley	b.eq	z_end
984ecca339SDan Handley	strb	wzr, [x0], #1
994ecca339SDan Handley	b	z_loop1
1004ecca339SDan Handleyz_end:	ret
1014ecca339SDan Handley
1024ecca339SDan Handley
1034ecca339SDan Handley/* --------------------------------------------------------------------------
1044ecca339SDan Handley * void memcpy16(void *dest, const void *src, unsigned int length)
1054ecca339SDan Handley *
1064ecca339SDan Handley * Copy length bytes from memory area src to memory area dest.
1074ecca339SDan Handley * The memory areas should not overlap.
1084ecca339SDan Handley * Destination and source addresses must be 16-byte aligned.
1094ecca339SDan Handley * --------------------------------------------------------------------------
1104ecca339SDan Handley */
1114ecca339SDan Handleyfunc memcpy16
112*bc920128SSoby Mathew#if ASM_ASSERTION
113*bc920128SSoby Mathew	orr	x3, x0, x1
114*bc920128SSoby Mathew	tst	x3, #0xf
115*bc920128SSoby Mathew	ASM_ASSERT(eq)
116*bc920128SSoby Mathew#endif
1174ecca339SDan Handley/* copy 16 bytes at a time */
1184ecca339SDan Handleym_loop16:
1194ecca339SDan Handley	cmp	x2, #16
1204ecca339SDan Handley	b.lt	m_loop1
1214ecca339SDan Handley	ldp	x3, x4, [x1], #16
1224ecca339SDan Handley	stp	x3, x4, [x0], #16
1234ecca339SDan Handley	sub	x2, x2, #16
1244ecca339SDan Handley	b	m_loop16
1254ecca339SDan Handley/* copy byte per byte */
1264ecca339SDan Handleym_loop1:
1274ecca339SDan Handley	cbz	x2, m_end
1284ecca339SDan Handley	ldrb	w3, [x1], #1
1294ecca339SDan Handley	strb	w3, [x0], #1
1304ecca339SDan Handley	subs	x2, x2, #1
1314ecca339SDan Handley	b.ne	m_loop1
1324ecca339SDan Handleym_end:	ret
1332f5dcfefSAndrew Thoelke
1342f5dcfefSAndrew Thoelke/* ---------------------------------------------------------------------------
1352f5dcfefSAndrew Thoelke * Disable the MMU at EL3
1362f5dcfefSAndrew Thoelke * This is implemented in assembler to ensure that the data cache is cleaned
1372f5dcfefSAndrew Thoelke * and invalidated after the MMU is disabled without any intervening cacheable
1382f5dcfefSAndrew Thoelke * data accesses
1392f5dcfefSAndrew Thoelke * ---------------------------------------------------------------------------
1402f5dcfefSAndrew Thoelke */
1412f5dcfefSAndrew Thoelke
1422f5dcfefSAndrew Thoelkefunc disable_mmu_el3
1432f5dcfefSAndrew Thoelke	mov	x1, #(SCTLR_M_BIT | SCTLR_C_BIT)
1442f5dcfefSAndrew Thoelkedo_disable_mmu:
1452f5dcfefSAndrew Thoelke	mrs	x0, sctlr_el3
1462f5dcfefSAndrew Thoelke	bic	x0, x0, x1
1472f5dcfefSAndrew Thoelke	msr	sctlr_el3, x0
1482f5dcfefSAndrew Thoelke	isb				// ensure MMU is off
1492f5dcfefSAndrew Thoelke	mov	x0, #DCCISW		// DCache clean and invalidate
1502f5dcfefSAndrew Thoelke	b	dcsw_op_all
1512f5dcfefSAndrew Thoelke
1522f5dcfefSAndrew Thoelke
1532f5dcfefSAndrew Thoelkefunc disable_mmu_icache_el3
1542f5dcfefSAndrew Thoelke	mov	x1, #(SCTLR_M_BIT | SCTLR_C_BIT | SCTLR_I_BIT)
1552f5dcfefSAndrew Thoelke	b	do_disable_mmu
1562f5dcfefSAndrew Thoelke
1575c3272a7SAndrew Thoelke/* ---------------------------------------------------------------------------
1585c3272a7SAndrew Thoelke * Enable the use of VFP at EL3
1595c3272a7SAndrew Thoelke * ---------------------------------------------------------------------------
1605c3272a7SAndrew Thoelke */
1615c3272a7SAndrew Thoelke#if SUPPORT_VFP
1625c3272a7SAndrew Thoelkefunc enable_vfp
1635c3272a7SAndrew Thoelke	mrs	x0, cpacr_el1
1645c3272a7SAndrew Thoelke	orr	x0, x0, #CPACR_VFP_BITS
1655c3272a7SAndrew Thoelke	msr	cpacr_el1, x0
1665c3272a7SAndrew Thoelke	mrs	x0, cptr_el3
1675c3272a7SAndrew Thoelke	mov	x1, #AARCH64_CPTR_TFP
1685c3272a7SAndrew Thoelke	bic	x0, x0, x1
1695c3272a7SAndrew Thoelke	msr	cptr_el3, x0
1705c3272a7SAndrew Thoelke	isb
1715c3272a7SAndrew Thoelke	ret
1725c3272a7SAndrew Thoelke#endif
173