xref: /rk3399_ARM-atf/include/arch/aarch64/asm_macros.S (revision d04c04a4e8d968f9f82de810a3c763474e3faeb7)
1f5478dedSAntonio Nino Diaz/*
2f461fe34SAnthony Steinhauser * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
3f5478dedSAntonio Nino Diaz *
4f5478dedSAntonio Nino Diaz * SPDX-License-Identifier: BSD-3-Clause
5f5478dedSAntonio Nino Diaz */
6f5478dedSAntonio Nino Diaz#ifndef ASM_MACROS_S
7f5478dedSAntonio Nino Diaz#define ASM_MACROS_S
8f5478dedSAntonio Nino Diaz
9f5478dedSAntonio Nino Diaz#include <arch.h>
1009d40e0eSAntonio Nino Diaz#include <common/asm_macros_common.S>
1109d40e0eSAntonio Nino Diaz#include <lib/spinlock.h>
12f5478dedSAntonio Nino Diaz
13f5478dedSAntonio Nino Diaz/*
14f5478dedSAntonio Nino Diaz * TLBI instruction with type specifier that implements the workaround for
15f85edceaSSoby Mathew * errata 813419 of Cortex-A57 or errata 1286807 of Cortex-A76.
16f5478dedSAntonio Nino Diaz */
17f85edceaSSoby Mathew#if ERRATA_A57_813419 || ERRATA_A76_1286807
18f5478dedSAntonio Nino Diaz#define TLB_INVALIDATE(_type) \
19f5478dedSAntonio Nino Diaz	tlbi	_type; \
20f5478dedSAntonio Nino Diaz	dsb	ish; \
21f5478dedSAntonio Nino Diaz	tlbi	_type
22f5478dedSAntonio Nino Diaz#else
23f5478dedSAntonio Nino Diaz#define TLB_INVALIDATE(_type) \
24f5478dedSAntonio Nino Diaz	tlbi	_type
25f5478dedSAntonio Nino Diaz#endif
26f5478dedSAntonio Nino Diaz
27f5478dedSAntonio Nino Diaz
28f8328853SBoyan Karatotev	/*
29f8328853SBoyan Karatotev	 * Create a stack frame at the start of an assembly function. Will also
30f8328853SBoyan Karatotev	 * add all necessary call frame information (cfi) directives for a
31f8328853SBoyan Karatotev	 * pretty stack trace. This is necessary as there is quite a bit of
32f8328853SBoyan Karatotev	 * flexibility within a stack frame and the stack pointer can move
33f8328853SBoyan Karatotev	 * around throughout the function. If the debugger isn't told where to
34f8328853SBoyan Karatotev	 * find things, it gets lost, gives up and displays nothing. So inform
35f8328853SBoyan Karatotev	 * the debugger of what's where. Anchor the Canonical Frame Address
36f8328853SBoyan Karatotev	 * (CFA; the thing used to track what's where) to the frame pointer as
37f8328853SBoyan Karatotev	 * that's not expected to change in the function body and no extra
38f8328853SBoyan Karatotev	 * bookkeeping will be necessary, allowing free movement of the sp
39f8328853SBoyan Karatotev	 *
40f8328853SBoyan Karatotev	 *   _frame_size: requested space for caller to use. Must be a mutliple
41f8328853SBoyan Karatotev	 *     of 16 for stack pointer alignment
42f8328853SBoyan Karatotev	 */
43f8328853SBoyan Karatotev	.macro	func_prologue _frame_size=0
44f8328853SBoyan Karatotev	.if \_frame_size & 0xf
45f8328853SBoyan Karatotev	.error "frame_size must have stack pointer alignment (multiple of 16)"
46f8328853SBoyan Karatotev	.endif
47f8328853SBoyan Karatotev
48f8328853SBoyan Karatotev	/* put frame record at top of frame */
49f5478dedSAntonio Nino Diaz	stp	x29, x30, [sp, #-0x10]!
50f5478dedSAntonio Nino Diaz	mov	x29,sp
51f8328853SBoyan Karatotev	.if \_frame_size
52f8328853SBoyan Karatotev	sub	sp, sp, #\_frame_size
53f8328853SBoyan Karatotev	.endif
54f8328853SBoyan Karatotev
55f8328853SBoyan Karatotev	/* point CFA to start of frame record, i.e. x29 + 0x10 */
56f8328853SBoyan Karatotev	.cfi_def_cfa	x29,  0x10
57f8328853SBoyan Karatotev	/* inform it about x29, x30 locations */
58f8328853SBoyan Karatotev	.cfi_offset	x30, -0x8
59f8328853SBoyan Karatotev	.cfi_offset	x29, -0x10
60f5478dedSAntonio Nino Diaz	.endm
61f5478dedSAntonio Nino Diaz
62f8328853SBoyan Karatotev	/*
63f8328853SBoyan Karatotev	 * Clear stack frame at the end of an assembly function.
64f8328853SBoyan Karatotev	 *
65f8328853SBoyan Karatotev	 *   _frame_size: the value passed to func_prologue
66f8328853SBoyan Karatotev	 */
67f8328853SBoyan Karatotev	.macro	func_epilogue _frame_size=0
68f8328853SBoyan Karatotev	/* remove requested space */
69f8328853SBoyan Karatotev	.if \_frame_size
70f8328853SBoyan Karatotev	add	sp, sp, #\_frame_size
71f8328853SBoyan Karatotev	.endif
72f5478dedSAntonio Nino Diaz	ldp	x29, x30, [sp], #0x10
73f5478dedSAntonio Nino Diaz	.endm
74f5478dedSAntonio Nino Diaz
75f5478dedSAntonio Nino Diaz
76f5478dedSAntonio Nino Diaz	.macro	dcache_line_size  reg, tmp
77f5478dedSAntonio Nino Diaz	mrs	\tmp, ctr_el0
78f5478dedSAntonio Nino Diaz	ubfx	\tmp, \tmp, #16, #4
79f5478dedSAntonio Nino Diaz	mov	\reg, #4
80f5478dedSAntonio Nino Diaz	lsl	\reg, \reg, \tmp
81f5478dedSAntonio Nino Diaz	.endm
82f5478dedSAntonio Nino Diaz
83f5478dedSAntonio Nino Diaz
84f5478dedSAntonio Nino Diaz	.macro	icache_line_size  reg, tmp
85f5478dedSAntonio Nino Diaz	mrs	\tmp, ctr_el0
86f5478dedSAntonio Nino Diaz	and	\tmp, \tmp, #0xf
87f5478dedSAntonio Nino Diaz	mov	\reg, #4
88f5478dedSAntonio Nino Diaz	lsl	\reg, \reg, \tmp
89f5478dedSAntonio Nino Diaz	.endm
90f5478dedSAntonio Nino Diaz
91f5478dedSAntonio Nino Diaz
92f5478dedSAntonio Nino Diaz	.macro	smc_check  label
93f5478dedSAntonio Nino Diaz	mrs	x0, esr_el3
94f5478dedSAntonio Nino Diaz	ubfx	x0, x0, #ESR_EC_SHIFT, #ESR_EC_LENGTH
95f5478dedSAntonio Nino Diaz	cmp	x0, #EC_AARCH64_SMC
96f5478dedSAntonio Nino Diaz	b.ne	$label
97f5478dedSAntonio Nino Diaz	.endm
98f5478dedSAntonio Nino Diaz
99f5478dedSAntonio Nino Diaz	/*
100f5478dedSAntonio Nino Diaz	 * Declare the exception vector table, enforcing it is aligned on a
101f5478dedSAntonio Nino Diaz	 * 2KB boundary, as required by the ARMv8 architecture.
102f5478dedSAntonio Nino Diaz	 * Use zero bytes as the fill value to be stored in the padding bytes
103f5478dedSAntonio Nino Diaz	 * so that it inserts illegal AArch64 instructions. This increases
104f5478dedSAntonio Nino Diaz	 * security, robustness and potentially facilitates debugging.
105f5478dedSAntonio Nino Diaz	 */
106f5478dedSAntonio Nino Diaz	.macro vector_base  label, section_name=.vectors
107f5478dedSAntonio Nino Diaz	.section \section_name, "ax"
108f5478dedSAntonio Nino Diaz	.align 11, 0
109f5478dedSAntonio Nino Diaz	\label:
110f5478dedSAntonio Nino Diaz	.endm
111f5478dedSAntonio Nino Diaz
112f5478dedSAntonio Nino Diaz	/*
113f5478dedSAntonio Nino Diaz	 * Create an entry in the exception vector table, enforcing it is
114f5478dedSAntonio Nino Diaz	 * aligned on a 128-byte boundary, as required by the ARMv8 architecture.
115f5478dedSAntonio Nino Diaz	 * Use zero bytes as the fill value to be stored in the padding bytes
116f5478dedSAntonio Nino Diaz	 * so that it inserts illegal AArch64 instructions. This increases
117f5478dedSAntonio Nino Diaz	 * security, robustness and potentially facilitates debugging.
118f5478dedSAntonio Nino Diaz	 */
119f5478dedSAntonio Nino Diaz	.macro vector_entry  label, section_name=.vectors
120f5478dedSAntonio Nino Diaz	.cfi_sections .debug_frame
121f5478dedSAntonio Nino Diaz	.section \section_name, "ax"
122f5478dedSAntonio Nino Diaz	.align 7, 0
123f5478dedSAntonio Nino Diaz	.type \label, %function
124f5478dedSAntonio Nino Diaz	.cfi_startproc
125f5478dedSAntonio Nino Diaz	\label:
126f5478dedSAntonio Nino Diaz	.endm
127f5478dedSAntonio Nino Diaz
128f5478dedSAntonio Nino Diaz	/*
129f5478dedSAntonio Nino Diaz	 * Add the bytes until fill the full exception vector, whose size is always
130f5478dedSAntonio Nino Diaz	 * 32 instructions. If there are more than 32 instructions in the
131f5478dedSAntonio Nino Diaz	 * exception vector then an error is emitted.
132f5478dedSAntonio Nino Diaz	 */
133f5478dedSAntonio Nino Diaz	.macro end_vector_entry label
134f5478dedSAntonio Nino Diaz	.cfi_endproc
135f5478dedSAntonio Nino Diaz	.fill	\label + (32 * 4) - .
136f5478dedSAntonio Nino Diaz	.endm
137f5478dedSAntonio Nino Diaz
138f5478dedSAntonio Nino Diaz	/*
139f5478dedSAntonio Nino Diaz	 * This macro calculates the base address of the current CPU's MP stack
140f5478dedSAntonio Nino Diaz	 * using the plat_my_core_pos() index, the name of the stack storage
141f5478dedSAntonio Nino Diaz	 * and the size of each stack
142f5478dedSAntonio Nino Diaz	 * Out: X0 = physical address of stack base
143f5478dedSAntonio Nino Diaz	 * Clobber: X30, X1, X2
144f5478dedSAntonio Nino Diaz	 */
145f5478dedSAntonio Nino Diaz	.macro get_my_mp_stack _name, _size
146f5478dedSAntonio Nino Diaz	bl	plat_my_core_pos
147f5478dedSAntonio Nino Diaz	adrp	x2, (\_name + \_size)
148f5478dedSAntonio Nino Diaz	add	x2, x2, :lo12:(\_name + \_size)
149f5478dedSAntonio Nino Diaz	mov x1, #\_size
150f5478dedSAntonio Nino Diaz	madd x0, x0, x1, x2
151f5478dedSAntonio Nino Diaz	.endm
152f5478dedSAntonio Nino Diaz
153f5478dedSAntonio Nino Diaz	/*
154f5478dedSAntonio Nino Diaz	 * This macro calculates the base address of a UP stack using the
155f5478dedSAntonio Nino Diaz	 * name of the stack storage and the size of the stack
156f5478dedSAntonio Nino Diaz	 * Out: X0 = physical address of stack base
157f5478dedSAntonio Nino Diaz	 */
158f5478dedSAntonio Nino Diaz	.macro get_up_stack _name, _size
159f5478dedSAntonio Nino Diaz	adrp	x0, (\_name + \_size)
160f5478dedSAntonio Nino Diaz	add	x0, x0, :lo12:(\_name + \_size)
161f5478dedSAntonio Nino Diaz	.endm
162f5478dedSAntonio Nino Diaz
163f5478dedSAntonio Nino Diaz	/*
164f5478dedSAntonio Nino Diaz	 * Helper macro to generate the best mov/movk combinations according
165f5478dedSAntonio Nino Diaz	 * the value to be moved. The 16 bits from '_shift' are tested and
166f5478dedSAntonio Nino Diaz	 * if not zero, they are moved into '_reg' without affecting
167f5478dedSAntonio Nino Diaz	 * other bits.
168f5478dedSAntonio Nino Diaz	 */
169f5478dedSAntonio Nino Diaz	.macro _mov_imm16 _reg, _val, _shift
170f5478dedSAntonio Nino Diaz		.if (\_val >> \_shift) & 0xffff
171f5478dedSAntonio Nino Diaz			.if (\_val & (1 << \_shift - 1))
172f5478dedSAntonio Nino Diaz				movk	\_reg, (\_val >> \_shift) & 0xffff, LSL \_shift
173f5478dedSAntonio Nino Diaz			.else
174f5478dedSAntonio Nino Diaz				mov	\_reg, \_val & (0xffff << \_shift)
175f5478dedSAntonio Nino Diaz			.endif
176f5478dedSAntonio Nino Diaz		.endif
177f5478dedSAntonio Nino Diaz	.endm
178f5478dedSAntonio Nino Diaz
179f5478dedSAntonio Nino Diaz	/*
180f5478dedSAntonio Nino Diaz	 * Helper macro to load arbitrary values into 32 or 64-bit registers
181f5478dedSAntonio Nino Diaz	 * which generates the best mov/movk combinations. Many base addresses
182f5478dedSAntonio Nino Diaz	 * are 64KB aligned the macro will eliminate updating bits 15:0 in
183f5478dedSAntonio Nino Diaz	 * that case
184f5478dedSAntonio Nino Diaz	 */
185f5478dedSAntonio Nino Diaz	.macro mov_imm _reg, _val
186f5478dedSAntonio Nino Diaz		.if (\_val) == 0
187f5478dedSAntonio Nino Diaz			mov	\_reg, #0
188f5478dedSAntonio Nino Diaz		.else
189f5478dedSAntonio Nino Diaz			_mov_imm16	\_reg, (\_val), 0
190f5478dedSAntonio Nino Diaz			_mov_imm16	\_reg, (\_val), 16
191f5478dedSAntonio Nino Diaz			_mov_imm16	\_reg, (\_val), 32
192f5478dedSAntonio Nino Diaz			_mov_imm16	\_reg, (\_val), 48
193f5478dedSAntonio Nino Diaz		.endif
194f5478dedSAntonio Nino Diaz	.endm
195f5478dedSAntonio Nino Diaz
196f5478dedSAntonio Nino Diaz	/*
197f5478dedSAntonio Nino Diaz	 * Macro to mark instances where we're jumping to a function and don't
198f5478dedSAntonio Nino Diaz	 * expect a return. To provide the function being jumped to with
199f5478dedSAntonio Nino Diaz	 * additional information, we use 'bl' instruction to jump rather than
200f5478dedSAntonio Nino Diaz	 * 'b'.
201f5478dedSAntonio Nino Diaz         *
202f5478dedSAntonio Nino Diaz	 * Debuggers infer the location of a call from where LR points to, which
203f5478dedSAntonio Nino Diaz	 * is usually the instruction after 'bl'. If this macro expansion
204f5478dedSAntonio Nino Diaz	 * happens to be the last location in a function, that'll cause the LR
205f5478dedSAntonio Nino Diaz	 * to point a location beyond the function, thereby misleading debugger
206f5478dedSAntonio Nino Diaz	 * back trace. We therefore insert a 'nop' after the function call for
207f5478dedSAntonio Nino Diaz	 * debug builds, unless 'skip_nop' parameter is non-zero.
208f5478dedSAntonio Nino Diaz	 */
209f5478dedSAntonio Nino Diaz	.macro no_ret _func:req, skip_nop=0
210f5478dedSAntonio Nino Diaz	bl	\_func
211f5478dedSAntonio Nino Diaz#if DEBUG
212f5478dedSAntonio Nino Diaz	.ifeq \skip_nop
213f5478dedSAntonio Nino Diaz	nop
214f5478dedSAntonio Nino Diaz	.endif
215f5478dedSAntonio Nino Diaz#endif
216f5478dedSAntonio Nino Diaz	.endm
217f5478dedSAntonio Nino Diaz
218f5478dedSAntonio Nino Diaz	/*
219f5478dedSAntonio Nino Diaz	 * Reserve space for a spin lock in assembly file.
220f5478dedSAntonio Nino Diaz	 */
221f5478dedSAntonio Nino Diaz	.macro define_asm_spinlock _name:req
222f5478dedSAntonio Nino Diaz	.align	SPINLOCK_ASM_ALIGN
223f5478dedSAntonio Nino Diaz	\_name:
224f5478dedSAntonio Nino Diaz	.space	SPINLOCK_ASM_SIZE
225f5478dedSAntonio Nino Diaz	.endm
226f5478dedSAntonio Nino Diaz
2277d5036b8SManish Pandey	/*
2287d5036b8SManish Pandey	 * With RAS extension executes esb instruction, else NOP
2297d5036b8SManish Pandey	 */
230f5478dedSAntonio Nino Diaz	.macro esb
231f5478dedSAntonio Nino Diaz	.inst	0xd503221f
232f5478dedSAntonio Nino Diaz	.endm
233f5478dedSAntonio Nino Diaz
2349fc59639SAlexei Fedorov	/*
2359fc59639SAlexei Fedorov	 * Helper macro to read system register value into x0
2369fc59639SAlexei Fedorov	 */
2379fc59639SAlexei Fedorov	.macro	read reg:req
2389fc59639SAlexei Fedorov#if ENABLE_BTI
2399fc59639SAlexei Fedorov	bti	j
2409fc59639SAlexei Fedorov#endif
2419fc59639SAlexei Fedorov	mrs	x0, \reg
2429fc59639SAlexei Fedorov	ret
2439fc59639SAlexei Fedorov	.endm
2449fc59639SAlexei Fedorov
2459fc59639SAlexei Fedorov	/*
2469fc59639SAlexei Fedorov	 * Helper macro to write value from x1 to system register
2479fc59639SAlexei Fedorov	 */
2489fc59639SAlexei Fedorov	.macro	write reg:req
2499fc59639SAlexei Fedorov#if ENABLE_BTI
2509fc59639SAlexei Fedorov	bti	j
2519fc59639SAlexei Fedorov#endif
2529fc59639SAlexei Fedorov	msr	\reg, x1
2539fc59639SAlexei Fedorov	ret
2549fc59639SAlexei Fedorov	.endm
2559fc59639SAlexei Fedorov
256f461fe34SAnthony Steinhauser	/*
257387b8801SAndre Przywara	 * The "sb" instruction was introduced later into the architecture,
258387b8801SAndre Przywara	 * so not all toolchains understand it. Some deny its usage unless
259387b8801SAndre Przywara	 * a supported processor is specified on the build command line.
260387b8801SAndre Przywara	 * Use sb's system register encoding to work around this, we already
261387b8801SAndre Przywara	 * guard the sb execution with a feature flag.
262387b8801SAndre Przywara	 */
263387b8801SAndre Przywara
264387b8801SAndre Przywara	.macro sb_barrier_insn
265387b8801SAndre Przywara	msr	SYSREG_SB, xzr
266387b8801SAndre Przywara	.endm
267387b8801SAndre Przywara
268387b8801SAndre Przywara	/*
269e74d6581SBipin Ravi	 * Macro for using speculation barrier instruction introduced by
270e74d6581SBipin Ravi	 * FEAT_SB, if it's enabled.
271e74d6581SBipin Ravi	 */
272e74d6581SBipin Ravi	.macro speculation_barrier
273e74d6581SBipin Ravi#if ENABLE_FEAT_SB
274387b8801SAndre Przywara	sb_barrier_insn
275e74d6581SBipin Ravi#else
276e74d6581SBipin Ravi	dsb	sy
277e74d6581SBipin Ravi	isb
278e74d6581SBipin Ravi#endif
279e74d6581SBipin Ravi	.endm
280e74d6581SBipin Ravi
281e74d6581SBipin Ravi	/*
2824e04478aSChris Kay	 * Macro for mitigating against speculative execution beyond ERET. Uses the
2834e04478aSChris Kay	 * speculation barrier instruction introduced by FEAT_SB, if it's enabled.
284f461fe34SAnthony Steinhauser	 */
285f461fe34SAnthony Steinhauser	.macro exception_return
286f461fe34SAnthony Steinhauser	eret
2874e04478aSChris Kay#if ENABLE_FEAT_SB
288387b8801SAndre Przywara	sb_barrier_insn
289ccfb5c81SMadhukar Pappireddy#else
290f461fe34SAnthony Steinhauser	dsb	nsh
291f461fe34SAnthony Steinhauser	isb
292ccfb5c81SMadhukar Pappireddy#endif
293f461fe34SAnthony Steinhauser	.endm
294f461fe34SAnthony Steinhauser
295*d04c04a4SManish Pandey	/*
296*d04c04a4SManish Pandey	 * Macro to unmask External Aborts by changing PSTATE.A bit.
297*d04c04a4SManish Pandey	 * Put explicit synchronization event to ensure newly unmasked interrupt
298*d04c04a4SManish Pandey	 * is taken immediately.
299*d04c04a4SManish Pandey	 */
300*d04c04a4SManish Pandey	.macro  unmask_async_ea
301*d04c04a4SManish Pandey	msr     daifclr, #DAIF_ABT_BIT
302*d04c04a4SManish Pandey	isb
303*d04c04a4SManish Pandey	.endm
304f5478dedSAntonio Nino Diaz#endif /* ASM_MACROS_S */
305