xref: /rk3399_ARM-atf/include/arch/aarch64/asm_macros.S (revision 665e71b8ea28162ec7737c1411bca3ea89e5957e)
1/*
2 * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6#ifndef ASM_MACROS_S
7#define ASM_MACROS_S
8
9#include <arch.h>
10#include <common/asm_macros_common.S>
11#include <lib/spinlock.h>
12
13#if ENABLE_BTI && !ARM_ARCH_AT_LEAST(8, 5)
14#error Branch Target Identification requires ARM_ARCH_MINOR >= 5
15#endif
16
17/*
18 * TLBI instruction with type specifier that implements the workaround for
19 * errata 813419 of Cortex-A57 or errata 1286807 of Cortex-A76.
20 */
21#if ERRATA_A57_813419 || ERRATA_A76_1286807
22#define TLB_INVALIDATE(_type) \
23	tlbi	_type; \
24	dsb	ish; \
25	tlbi	_type
26#else
27#define TLB_INVALIDATE(_type) \
28	tlbi	_type
29#endif
30
31
32	.macro	func_prologue
33	stp	x29, x30, [sp, #-0x10]!
34	mov	x29,sp
35	.endm
36
37	.macro	func_epilogue
38	ldp	x29, x30, [sp], #0x10
39	.endm
40
41
42	.macro	dcache_line_size  reg, tmp
43	mrs	\tmp, ctr_el0
44	ubfx	\tmp, \tmp, #16, #4
45	mov	\reg, #4
46	lsl	\reg, \reg, \tmp
47	.endm
48
49
50	.macro	icache_line_size  reg, tmp
51	mrs	\tmp, ctr_el0
52	and	\tmp, \tmp, #0xf
53	mov	\reg, #4
54	lsl	\reg, \reg, \tmp
55	.endm
56
57
58	.macro	smc_check  label
59	mrs	x0, esr_el3
60	ubfx	x0, x0, #ESR_EC_SHIFT, #ESR_EC_LENGTH
61	cmp	x0, #EC_AARCH64_SMC
62	b.ne	$label
63	.endm
64
65	/*
66	 * Declare the exception vector table, enforcing it is aligned on a
67	 * 2KB boundary, as required by the ARMv8 architecture.
68	 * Use zero bytes as the fill value to be stored in the padding bytes
69	 * so that it inserts illegal AArch64 instructions. This increases
70	 * security, robustness and potentially facilitates debugging.
71	 */
72	.macro vector_base  label, section_name=.vectors
73	.section \section_name, "ax"
74	.align 11, 0
75	\label:
76	.endm
77
78	/*
79	 * Create an entry in the exception vector table, enforcing it is
80	 * aligned on a 128-byte boundary, as required by the ARMv8 architecture.
81	 * Use zero bytes as the fill value to be stored in the padding bytes
82	 * so that it inserts illegal AArch64 instructions. This increases
83	 * security, robustness and potentially facilitates debugging.
84	 */
85	.macro vector_entry  label, section_name=.vectors
86	.cfi_sections .debug_frame
87	.section \section_name, "ax"
88	.align 7, 0
89	.type \label, %function
90	.cfi_startproc
91	\label:
92	.endm
93
94	/*
95	 * Add the bytes until fill the full exception vector, whose size is always
96	 * 32 instructions. If there are more than 32 instructions in the
97	 * exception vector then an error is emitted.
98	 */
99	.macro end_vector_entry label
100	.cfi_endproc
101	.fill	\label + (32 * 4) - .
102	.endm
103
104	/*
105	 * This macro calculates the base address of the current CPU's MP stack
106	 * using the plat_my_core_pos() index, the name of the stack storage
107	 * and the size of each stack
108	 * Out: X0 = physical address of stack base
109	 * Clobber: X30, X1, X2
110	 */
111	.macro get_my_mp_stack _name, _size
112	bl	plat_my_core_pos
113	adrp	x2, (\_name + \_size)
114	add	x2, x2, :lo12:(\_name + \_size)
115	mov x1, #\_size
116	madd x0, x0, x1, x2
117	.endm
118
119	/*
120	 * This macro calculates the base address of a UP stack using the
121	 * name of the stack storage and the size of the stack
122	 * Out: X0 = physical address of stack base
123	 */
124	.macro get_up_stack _name, _size
125	adrp	x0, (\_name + \_size)
126	add	x0, x0, :lo12:(\_name + \_size)
127	.endm
128
129	/*
130	 * Helper macro to generate the best mov/movk combinations according
131	 * the value to be moved. The 16 bits from '_shift' are tested and
132	 * if not zero, they are moved into '_reg' without affecting
133	 * other bits.
134	 */
135	.macro _mov_imm16 _reg, _val, _shift
136		.if (\_val >> \_shift) & 0xffff
137			.if (\_val & (1 << \_shift - 1))
138				movk	\_reg, (\_val >> \_shift) & 0xffff, LSL \_shift
139			.else
140				mov	\_reg, \_val & (0xffff << \_shift)
141			.endif
142		.endif
143	.endm
144
145	/*
146	 * Helper macro to load arbitrary values into 32 or 64-bit registers
147	 * which generates the best mov/movk combinations. Many base addresses
148	 * are 64KB aligned the macro will eliminate updating bits 15:0 in
149	 * that case
150	 */
151	.macro mov_imm _reg, _val
152		.if (\_val) == 0
153			mov	\_reg, #0
154		.else
155			_mov_imm16	\_reg, (\_val), 0
156			_mov_imm16	\_reg, (\_val), 16
157			_mov_imm16	\_reg, (\_val), 32
158			_mov_imm16	\_reg, (\_val), 48
159		.endif
160	.endm
161
162	/*
163	 * Macro to mark instances where we're jumping to a function and don't
164	 * expect a return. To provide the function being jumped to with
165	 * additional information, we use 'bl' instruction to jump rather than
166	 * 'b'.
167         *
168	 * Debuggers infer the location of a call from where LR points to, which
169	 * is usually the instruction after 'bl'. If this macro expansion
170	 * happens to be the last location in a function, that'll cause the LR
171	 * to point a location beyond the function, thereby misleading debugger
172	 * back trace. We therefore insert a 'nop' after the function call for
173	 * debug builds, unless 'skip_nop' parameter is non-zero.
174	 */
175	.macro no_ret _func:req, skip_nop=0
176	bl	\_func
177#if DEBUG
178	.ifeq \skip_nop
179	nop
180	.endif
181#endif
182	.endm
183
184	/*
185	 * Reserve space for a spin lock in assembly file.
186	 */
187	.macro define_asm_spinlock _name:req
188	.align	SPINLOCK_ASM_ALIGN
189	\_name:
190	.space	SPINLOCK_ASM_SIZE
191	.endm
192
193#if RAS_EXTENSION
194	.macro esb
195	.inst	0xd503221f
196	.endm
197#endif
198
199	/*
200	 * Helper macro to read system register value into x0
201	 */
202	.macro	read reg:req
203#if ENABLE_BTI
204	bti	j
205#endif
206	mrs	x0, \reg
207	ret
208	.endm
209
210	/*
211	 * Helper macro to write value from x1 to system register
212	 */
213	.macro	write reg:req
214#if ENABLE_BTI
215	bti	j
216#endif
217	msr	\reg, x1
218	ret
219	.endm
220
221	/*
222	 * Macro for mitigating against speculative execution beyond ERET.
223	 */
224	.macro exception_return
225	eret
226	dsb nsh
227	isb
228	.endm
229
230#endif /* ASM_MACROS_S */
231