1f5478dedSAntonio Nino Diaz/* 2f461fe34SAnthony Steinhauser * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved. 3f5478dedSAntonio Nino Diaz * 4f5478dedSAntonio Nino Diaz * SPDX-License-Identifier: BSD-3-Clause 5f5478dedSAntonio Nino Diaz */ 6f5478dedSAntonio Nino Diaz#ifndef ASM_MACROS_S 7f5478dedSAntonio Nino Diaz#define ASM_MACROS_S 8f5478dedSAntonio Nino Diaz 9f5478dedSAntonio Nino Diaz#include <arch.h> 1009d40e0eSAntonio Nino Diaz#include <common/asm_macros_common.S> 1109d40e0eSAntonio Nino Diaz#include <lib/spinlock.h> 12f5478dedSAntonio Nino Diaz 139fc59639SAlexei Fedorov#if ENABLE_BTI && !ARM_ARCH_AT_LEAST(8, 5) 149fc59639SAlexei Fedorov#error Branch Target Identification requires ARM_ARCH_MINOR >= 5 159fc59639SAlexei Fedorov#endif 169fc59639SAlexei Fedorov 17f5478dedSAntonio Nino Diaz/* 18f5478dedSAntonio Nino Diaz * TLBI instruction with type specifier that implements the workaround for 19f85edceaSSoby Mathew * errata 813419 of Cortex-A57 or errata 1286807 of Cortex-A76. 20f5478dedSAntonio Nino Diaz */ 21f85edceaSSoby Mathew#if ERRATA_A57_813419 || ERRATA_A76_1286807 22f5478dedSAntonio Nino Diaz#define TLB_INVALIDATE(_type) \ 23f5478dedSAntonio Nino Diaz tlbi _type; \ 24f5478dedSAntonio Nino Diaz dsb ish; \ 25f5478dedSAntonio Nino Diaz tlbi _type 26f5478dedSAntonio Nino Diaz#else 27f5478dedSAntonio Nino Diaz#define TLB_INVALIDATE(_type) \ 28f5478dedSAntonio Nino Diaz tlbi _type 29f5478dedSAntonio Nino Diaz#endif 30f5478dedSAntonio Nino Diaz 31f5478dedSAntonio Nino Diaz 32f5478dedSAntonio Nino Diaz .macro func_prologue 33f5478dedSAntonio Nino Diaz stp x29, x30, [sp, #-0x10]! 34f5478dedSAntonio Nino Diaz mov x29,sp 35f5478dedSAntonio Nino Diaz .endm 36f5478dedSAntonio Nino Diaz 37f5478dedSAntonio Nino Diaz .macro func_epilogue 38f5478dedSAntonio Nino Diaz ldp x29, x30, [sp], #0x10 39f5478dedSAntonio Nino Diaz .endm 40f5478dedSAntonio Nino Diaz 41f5478dedSAntonio Nino Diaz 42f5478dedSAntonio Nino Diaz .macro dcache_line_size reg, tmp 43f5478dedSAntonio Nino Diaz mrs \tmp, ctr_el0 44f5478dedSAntonio Nino Diaz ubfx \tmp, \tmp, #16, #4 45f5478dedSAntonio Nino Diaz mov \reg, #4 46f5478dedSAntonio Nino Diaz lsl \reg, \reg, \tmp 47f5478dedSAntonio Nino Diaz .endm 48f5478dedSAntonio Nino Diaz 49f5478dedSAntonio Nino Diaz 50f5478dedSAntonio Nino Diaz .macro icache_line_size reg, tmp 51f5478dedSAntonio Nino Diaz mrs \tmp, ctr_el0 52f5478dedSAntonio Nino Diaz and \tmp, \tmp, #0xf 53f5478dedSAntonio Nino Diaz mov \reg, #4 54f5478dedSAntonio Nino Diaz lsl \reg, \reg, \tmp 55f5478dedSAntonio Nino Diaz .endm 56f5478dedSAntonio Nino Diaz 57f5478dedSAntonio Nino Diaz 58f5478dedSAntonio Nino Diaz .macro smc_check label 59f5478dedSAntonio Nino Diaz mrs x0, esr_el3 60f5478dedSAntonio Nino Diaz ubfx x0, x0, #ESR_EC_SHIFT, #ESR_EC_LENGTH 61f5478dedSAntonio Nino Diaz cmp x0, #EC_AARCH64_SMC 62f5478dedSAntonio Nino Diaz b.ne $label 63f5478dedSAntonio Nino Diaz .endm 64f5478dedSAntonio Nino Diaz 65f5478dedSAntonio Nino Diaz /* 66f5478dedSAntonio Nino Diaz * Declare the exception vector table, enforcing it is aligned on a 67f5478dedSAntonio Nino Diaz * 2KB boundary, as required by the ARMv8 architecture. 68f5478dedSAntonio Nino Diaz * Use zero bytes as the fill value to be stored in the padding bytes 69f5478dedSAntonio Nino Diaz * so that it inserts illegal AArch64 instructions. This increases 70f5478dedSAntonio Nino Diaz * security, robustness and potentially facilitates debugging. 71f5478dedSAntonio Nino Diaz */ 72f5478dedSAntonio Nino Diaz .macro vector_base label, section_name=.vectors 73f5478dedSAntonio Nino Diaz .section \section_name, "ax" 74f5478dedSAntonio Nino Diaz .align 11, 0 75f5478dedSAntonio Nino Diaz \label: 76f5478dedSAntonio Nino Diaz .endm 77f5478dedSAntonio Nino Diaz 78f5478dedSAntonio Nino Diaz /* 79f5478dedSAntonio Nino Diaz * Create an entry in the exception vector table, enforcing it is 80f5478dedSAntonio Nino Diaz * aligned on a 128-byte boundary, as required by the ARMv8 architecture. 81f5478dedSAntonio Nino Diaz * Use zero bytes as the fill value to be stored in the padding bytes 82f5478dedSAntonio Nino Diaz * so that it inserts illegal AArch64 instructions. This increases 83f5478dedSAntonio Nino Diaz * security, robustness and potentially facilitates debugging. 84f5478dedSAntonio Nino Diaz */ 85f5478dedSAntonio Nino Diaz .macro vector_entry label, section_name=.vectors 86f5478dedSAntonio Nino Diaz .cfi_sections .debug_frame 87f5478dedSAntonio Nino Diaz .section \section_name, "ax" 88f5478dedSAntonio Nino Diaz .align 7, 0 89f5478dedSAntonio Nino Diaz .type \label, %function 90f5478dedSAntonio Nino Diaz .cfi_startproc 91f5478dedSAntonio Nino Diaz \label: 92f5478dedSAntonio Nino Diaz .endm 93f5478dedSAntonio Nino Diaz 94f5478dedSAntonio Nino Diaz /* 95f5478dedSAntonio Nino Diaz * Add the bytes until fill the full exception vector, whose size is always 96f5478dedSAntonio Nino Diaz * 32 instructions. If there are more than 32 instructions in the 97f5478dedSAntonio Nino Diaz * exception vector then an error is emitted. 98f5478dedSAntonio Nino Diaz */ 99f5478dedSAntonio Nino Diaz .macro end_vector_entry label 100f5478dedSAntonio Nino Diaz .cfi_endproc 101f5478dedSAntonio Nino Diaz .fill \label + (32 * 4) - . 102f5478dedSAntonio Nino Diaz .endm 103f5478dedSAntonio Nino Diaz 104f5478dedSAntonio Nino Diaz /* 105f5478dedSAntonio Nino Diaz * This macro calculates the base address of the current CPU's MP stack 106f5478dedSAntonio Nino Diaz * using the plat_my_core_pos() index, the name of the stack storage 107f5478dedSAntonio Nino Diaz * and the size of each stack 108f5478dedSAntonio Nino Diaz * Out: X0 = physical address of stack base 109f5478dedSAntonio Nino Diaz * Clobber: X30, X1, X2 110f5478dedSAntonio Nino Diaz */ 111f5478dedSAntonio Nino Diaz .macro get_my_mp_stack _name, _size 112f5478dedSAntonio Nino Diaz bl plat_my_core_pos 113f5478dedSAntonio Nino Diaz adrp x2, (\_name + \_size) 114f5478dedSAntonio Nino Diaz add x2, x2, :lo12:(\_name + \_size) 115f5478dedSAntonio Nino Diaz mov x1, #\_size 116f5478dedSAntonio Nino Diaz madd x0, x0, x1, x2 117f5478dedSAntonio Nino Diaz .endm 118f5478dedSAntonio Nino Diaz 119f5478dedSAntonio Nino Diaz /* 120f5478dedSAntonio Nino Diaz * This macro calculates the base address of a UP stack using the 121f5478dedSAntonio Nino Diaz * name of the stack storage and the size of the stack 122f5478dedSAntonio Nino Diaz * Out: X0 = physical address of stack base 123f5478dedSAntonio Nino Diaz */ 124f5478dedSAntonio Nino Diaz .macro get_up_stack _name, _size 125f5478dedSAntonio Nino Diaz adrp x0, (\_name + \_size) 126f5478dedSAntonio Nino Diaz add x0, x0, :lo12:(\_name + \_size) 127f5478dedSAntonio Nino Diaz .endm 128f5478dedSAntonio Nino Diaz 129f5478dedSAntonio Nino Diaz /* 130f5478dedSAntonio Nino Diaz * Helper macro to generate the best mov/movk combinations according 131f5478dedSAntonio Nino Diaz * the value to be moved. The 16 bits from '_shift' are tested and 132f5478dedSAntonio Nino Diaz * if not zero, they are moved into '_reg' without affecting 133f5478dedSAntonio Nino Diaz * other bits. 134f5478dedSAntonio Nino Diaz */ 135f5478dedSAntonio Nino Diaz .macro _mov_imm16 _reg, _val, _shift 136f5478dedSAntonio Nino Diaz .if (\_val >> \_shift) & 0xffff 137f5478dedSAntonio Nino Diaz .if (\_val & (1 << \_shift - 1)) 138f5478dedSAntonio Nino Diaz movk \_reg, (\_val >> \_shift) & 0xffff, LSL \_shift 139f5478dedSAntonio Nino Diaz .else 140f5478dedSAntonio Nino Diaz mov \_reg, \_val & (0xffff << \_shift) 141f5478dedSAntonio Nino Diaz .endif 142f5478dedSAntonio Nino Diaz .endif 143f5478dedSAntonio Nino Diaz .endm 144f5478dedSAntonio Nino Diaz 145f5478dedSAntonio Nino Diaz /* 146f5478dedSAntonio Nino Diaz * Helper macro to load arbitrary values into 32 or 64-bit registers 147f5478dedSAntonio Nino Diaz * which generates the best mov/movk combinations. Many base addresses 148f5478dedSAntonio Nino Diaz * are 64KB aligned the macro will eliminate updating bits 15:0 in 149f5478dedSAntonio Nino Diaz * that case 150f5478dedSAntonio Nino Diaz */ 151f5478dedSAntonio Nino Diaz .macro mov_imm _reg, _val 152f5478dedSAntonio Nino Diaz .if (\_val) == 0 153f5478dedSAntonio Nino Diaz mov \_reg, #0 154f5478dedSAntonio Nino Diaz .else 155f5478dedSAntonio Nino Diaz _mov_imm16 \_reg, (\_val), 0 156f5478dedSAntonio Nino Diaz _mov_imm16 \_reg, (\_val), 16 157f5478dedSAntonio Nino Diaz _mov_imm16 \_reg, (\_val), 32 158f5478dedSAntonio Nino Diaz _mov_imm16 \_reg, (\_val), 48 159f5478dedSAntonio Nino Diaz .endif 160f5478dedSAntonio Nino Diaz .endm 161f5478dedSAntonio Nino Diaz 162f5478dedSAntonio Nino Diaz /* 163f5478dedSAntonio Nino Diaz * Macro to mark instances where we're jumping to a function and don't 164f5478dedSAntonio Nino Diaz * expect a return. To provide the function being jumped to with 165f5478dedSAntonio Nino Diaz * additional information, we use 'bl' instruction to jump rather than 166f5478dedSAntonio Nino Diaz * 'b'. 167f5478dedSAntonio Nino Diaz * 168f5478dedSAntonio Nino Diaz * Debuggers infer the location of a call from where LR points to, which 169f5478dedSAntonio Nino Diaz * is usually the instruction after 'bl'. If this macro expansion 170f5478dedSAntonio Nino Diaz * happens to be the last location in a function, that'll cause the LR 171f5478dedSAntonio Nino Diaz * to point a location beyond the function, thereby misleading debugger 172f5478dedSAntonio Nino Diaz * back trace. We therefore insert a 'nop' after the function call for 173f5478dedSAntonio Nino Diaz * debug builds, unless 'skip_nop' parameter is non-zero. 174f5478dedSAntonio Nino Diaz */ 175f5478dedSAntonio Nino Diaz .macro no_ret _func:req, skip_nop=0 176f5478dedSAntonio Nino Diaz bl \_func 177f5478dedSAntonio Nino Diaz#if DEBUG 178f5478dedSAntonio Nino Diaz .ifeq \skip_nop 179f5478dedSAntonio Nino Diaz nop 180f5478dedSAntonio Nino Diaz .endif 181f5478dedSAntonio Nino Diaz#endif 182f5478dedSAntonio Nino Diaz .endm 183f5478dedSAntonio Nino Diaz 184f5478dedSAntonio Nino Diaz /* 185f5478dedSAntonio Nino Diaz * Reserve space for a spin lock in assembly file. 186f5478dedSAntonio Nino Diaz */ 187f5478dedSAntonio Nino Diaz .macro define_asm_spinlock _name:req 188f5478dedSAntonio Nino Diaz .align SPINLOCK_ASM_ALIGN 189f5478dedSAntonio Nino Diaz \_name: 190f5478dedSAntonio Nino Diaz .space SPINLOCK_ASM_SIZE 191f5478dedSAntonio Nino Diaz .endm 192f5478dedSAntonio Nino Diaz 193f5478dedSAntonio Nino Diaz#if RAS_EXTENSION 194f5478dedSAntonio Nino Diaz .macro esb 195f5478dedSAntonio Nino Diaz .inst 0xd503221f 196f5478dedSAntonio Nino Diaz .endm 197f5478dedSAntonio Nino Diaz#endif 198f5478dedSAntonio Nino Diaz 1999fc59639SAlexei Fedorov /* 2009fc59639SAlexei Fedorov * Helper macro to read system register value into x0 2019fc59639SAlexei Fedorov */ 2029fc59639SAlexei Fedorov .macro read reg:req 2039fc59639SAlexei Fedorov#if ENABLE_BTI 2049fc59639SAlexei Fedorov bti j 2059fc59639SAlexei Fedorov#endif 2069fc59639SAlexei Fedorov mrs x0, \reg 2079fc59639SAlexei Fedorov ret 2089fc59639SAlexei Fedorov .endm 2099fc59639SAlexei Fedorov 2109fc59639SAlexei Fedorov /* 2119fc59639SAlexei Fedorov * Helper macro to write value from x1 to system register 2129fc59639SAlexei Fedorov */ 2139fc59639SAlexei Fedorov .macro write reg:req 2149fc59639SAlexei Fedorov#if ENABLE_BTI 2159fc59639SAlexei Fedorov bti j 2169fc59639SAlexei Fedorov#endif 2179fc59639SAlexei Fedorov msr \reg, x1 2189fc59639SAlexei Fedorov ret 2199fc59639SAlexei Fedorov .endm 2209fc59639SAlexei Fedorov 221f461fe34SAnthony Steinhauser /* 222*4e04478aSChris Kay * Macro for mitigating against speculative execution beyond ERET. Uses the 223*4e04478aSChris Kay * speculation barrier instruction introduced by FEAT_SB, if it's enabled. 224f461fe34SAnthony Steinhauser */ 225f461fe34SAnthony Steinhauser .macro exception_return 226f461fe34SAnthony Steinhauser eret 227*4e04478aSChris Kay#if ENABLE_FEAT_SB 228ccfb5c81SMadhukar Pappireddy sb 229ccfb5c81SMadhukar Pappireddy#else 230f461fe34SAnthony Steinhauser dsb nsh 231f461fe34SAnthony Steinhauser isb 232ccfb5c81SMadhukar Pappireddy#endif 233f461fe34SAnthony Steinhauser .endm 234f461fe34SAnthony Steinhauser 235f5478dedSAntonio Nino Diaz#endif /* ASM_MACROS_S */ 236