1f5478dedSAntonio Nino Diaz/* 2f461fe34SAnthony Steinhauser * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved. 3f5478dedSAntonio Nino Diaz * 4f5478dedSAntonio Nino Diaz * SPDX-License-Identifier: BSD-3-Clause 5f5478dedSAntonio Nino Diaz */ 6f5478dedSAntonio Nino Diaz#ifndef ASM_MACROS_S 7f5478dedSAntonio Nino Diaz#define ASM_MACROS_S 8f5478dedSAntonio Nino Diaz 9f5478dedSAntonio Nino Diaz#include <arch.h> 1009d40e0eSAntonio Nino Diaz#include <common/asm_macros_common.S> 1109d40e0eSAntonio Nino Diaz#include <lib/spinlock.h> 12f5478dedSAntonio Nino Diaz 13f5478dedSAntonio Nino Diaz/* 14f5478dedSAntonio Nino Diaz * TLBI instruction with type specifier that implements the workaround for 15f85edceaSSoby Mathew * errata 813419 of Cortex-A57 or errata 1286807 of Cortex-A76. 16f5478dedSAntonio Nino Diaz */ 17f85edceaSSoby Mathew#if ERRATA_A57_813419 || ERRATA_A76_1286807 18f5478dedSAntonio Nino Diaz#define TLB_INVALIDATE(_type) \ 19f5478dedSAntonio Nino Diaz tlbi _type; \ 20f5478dedSAntonio Nino Diaz dsb ish; \ 21f5478dedSAntonio Nino Diaz tlbi _type 22f5478dedSAntonio Nino Diaz#else 23f5478dedSAntonio Nino Diaz#define TLB_INVALIDATE(_type) \ 24f5478dedSAntonio Nino Diaz tlbi _type 25f5478dedSAntonio Nino Diaz#endif 26f5478dedSAntonio Nino Diaz 27f5478dedSAntonio Nino Diaz 28f5478dedSAntonio Nino Diaz .macro func_prologue 29f5478dedSAntonio Nino Diaz stp x29, x30, [sp, #-0x10]! 30f5478dedSAntonio Nino Diaz mov x29,sp 31f5478dedSAntonio Nino Diaz .endm 32f5478dedSAntonio Nino Diaz 33f5478dedSAntonio Nino Diaz .macro func_epilogue 34f5478dedSAntonio Nino Diaz ldp x29, x30, [sp], #0x10 35f5478dedSAntonio Nino Diaz .endm 36f5478dedSAntonio Nino Diaz 37f5478dedSAntonio Nino Diaz 38f5478dedSAntonio Nino Diaz .macro dcache_line_size reg, tmp 39f5478dedSAntonio Nino Diaz mrs \tmp, ctr_el0 40f5478dedSAntonio Nino Diaz ubfx \tmp, \tmp, #16, #4 41f5478dedSAntonio Nino Diaz mov \reg, #4 42f5478dedSAntonio Nino Diaz lsl \reg, \reg, \tmp 43f5478dedSAntonio Nino Diaz .endm 44f5478dedSAntonio Nino Diaz 45f5478dedSAntonio Nino Diaz 46f5478dedSAntonio Nino Diaz .macro icache_line_size reg, tmp 47f5478dedSAntonio Nino Diaz mrs \tmp, ctr_el0 48f5478dedSAntonio Nino Diaz and \tmp, \tmp, #0xf 49f5478dedSAntonio Nino Diaz mov \reg, #4 50f5478dedSAntonio Nino Diaz lsl \reg, \reg, \tmp 51f5478dedSAntonio Nino Diaz .endm 52f5478dedSAntonio Nino Diaz 53f5478dedSAntonio Nino Diaz 54f5478dedSAntonio Nino Diaz .macro smc_check label 55f5478dedSAntonio Nino Diaz mrs x0, esr_el3 56f5478dedSAntonio Nino Diaz ubfx x0, x0, #ESR_EC_SHIFT, #ESR_EC_LENGTH 57f5478dedSAntonio Nino Diaz cmp x0, #EC_AARCH64_SMC 58f5478dedSAntonio Nino Diaz b.ne $label 59f5478dedSAntonio Nino Diaz .endm 60f5478dedSAntonio Nino Diaz 61f5478dedSAntonio Nino Diaz /* 62f5478dedSAntonio Nino Diaz * Declare the exception vector table, enforcing it is aligned on a 63f5478dedSAntonio Nino Diaz * 2KB boundary, as required by the ARMv8 architecture. 64f5478dedSAntonio Nino Diaz * Use zero bytes as the fill value to be stored in the padding bytes 65f5478dedSAntonio Nino Diaz * so that it inserts illegal AArch64 instructions. This increases 66f5478dedSAntonio Nino Diaz * security, robustness and potentially facilitates debugging. 67f5478dedSAntonio Nino Diaz */ 68f5478dedSAntonio Nino Diaz .macro vector_base label, section_name=.vectors 69f5478dedSAntonio Nino Diaz .section \section_name, "ax" 70f5478dedSAntonio Nino Diaz .align 11, 0 71f5478dedSAntonio Nino Diaz \label: 72f5478dedSAntonio Nino Diaz .endm 73f5478dedSAntonio Nino Diaz 74f5478dedSAntonio Nino Diaz /* 75f5478dedSAntonio Nino Diaz * Create an entry in the exception vector table, enforcing it is 76f5478dedSAntonio Nino Diaz * aligned on a 128-byte boundary, as required by the ARMv8 architecture. 77f5478dedSAntonio Nino Diaz * Use zero bytes as the fill value to be stored in the padding bytes 78f5478dedSAntonio Nino Diaz * so that it inserts illegal AArch64 instructions. This increases 79f5478dedSAntonio Nino Diaz * security, robustness and potentially facilitates debugging. 80f5478dedSAntonio Nino Diaz */ 81f5478dedSAntonio Nino Diaz .macro vector_entry label, section_name=.vectors 82f5478dedSAntonio Nino Diaz .cfi_sections .debug_frame 83f5478dedSAntonio Nino Diaz .section \section_name, "ax" 84f5478dedSAntonio Nino Diaz .align 7, 0 85f5478dedSAntonio Nino Diaz .type \label, %function 86f5478dedSAntonio Nino Diaz .cfi_startproc 87f5478dedSAntonio Nino Diaz \label: 88f5478dedSAntonio Nino Diaz .endm 89f5478dedSAntonio Nino Diaz 90f5478dedSAntonio Nino Diaz /* 91f5478dedSAntonio Nino Diaz * Add the bytes until fill the full exception vector, whose size is always 92f5478dedSAntonio Nino Diaz * 32 instructions. If there are more than 32 instructions in the 93f5478dedSAntonio Nino Diaz * exception vector then an error is emitted. 94f5478dedSAntonio Nino Diaz */ 95f5478dedSAntonio Nino Diaz .macro end_vector_entry label 96f5478dedSAntonio Nino Diaz .cfi_endproc 97f5478dedSAntonio Nino Diaz .fill \label + (32 * 4) - . 98f5478dedSAntonio Nino Diaz .endm 99f5478dedSAntonio Nino Diaz 100f5478dedSAntonio Nino Diaz /* 101f5478dedSAntonio Nino Diaz * This macro calculates the base address of the current CPU's MP stack 102f5478dedSAntonio Nino Diaz * using the plat_my_core_pos() index, the name of the stack storage 103f5478dedSAntonio Nino Diaz * and the size of each stack 104f5478dedSAntonio Nino Diaz * Out: X0 = physical address of stack base 105f5478dedSAntonio Nino Diaz * Clobber: X30, X1, X2 106f5478dedSAntonio Nino Diaz */ 107f5478dedSAntonio Nino Diaz .macro get_my_mp_stack _name, _size 108f5478dedSAntonio Nino Diaz bl plat_my_core_pos 109f5478dedSAntonio Nino Diaz adrp x2, (\_name + \_size) 110f5478dedSAntonio Nino Diaz add x2, x2, :lo12:(\_name + \_size) 111f5478dedSAntonio Nino Diaz mov x1, #\_size 112f5478dedSAntonio Nino Diaz madd x0, x0, x1, x2 113f5478dedSAntonio Nino Diaz .endm 114f5478dedSAntonio Nino Diaz 115f5478dedSAntonio Nino Diaz /* 116f5478dedSAntonio Nino Diaz * This macro calculates the base address of a UP stack using the 117f5478dedSAntonio Nino Diaz * name of the stack storage and the size of the stack 118f5478dedSAntonio Nino Diaz * Out: X0 = physical address of stack base 119f5478dedSAntonio Nino Diaz */ 120f5478dedSAntonio Nino Diaz .macro get_up_stack _name, _size 121f5478dedSAntonio Nino Diaz adrp x0, (\_name + \_size) 122f5478dedSAntonio Nino Diaz add x0, x0, :lo12:(\_name + \_size) 123f5478dedSAntonio Nino Diaz .endm 124f5478dedSAntonio Nino Diaz 125f5478dedSAntonio Nino Diaz /* 126f5478dedSAntonio Nino Diaz * Helper macro to generate the best mov/movk combinations according 127f5478dedSAntonio Nino Diaz * the value to be moved. The 16 bits from '_shift' are tested and 128f5478dedSAntonio Nino Diaz * if not zero, they are moved into '_reg' without affecting 129f5478dedSAntonio Nino Diaz * other bits. 130f5478dedSAntonio Nino Diaz */ 131f5478dedSAntonio Nino Diaz .macro _mov_imm16 _reg, _val, _shift 132f5478dedSAntonio Nino Diaz .if (\_val >> \_shift) & 0xffff 133f5478dedSAntonio Nino Diaz .if (\_val & (1 << \_shift - 1)) 134f5478dedSAntonio Nino Diaz movk \_reg, (\_val >> \_shift) & 0xffff, LSL \_shift 135f5478dedSAntonio Nino Diaz .else 136f5478dedSAntonio Nino Diaz mov \_reg, \_val & (0xffff << \_shift) 137f5478dedSAntonio Nino Diaz .endif 138f5478dedSAntonio Nino Diaz .endif 139f5478dedSAntonio Nino Diaz .endm 140f5478dedSAntonio Nino Diaz 141f5478dedSAntonio Nino Diaz /* 142f5478dedSAntonio Nino Diaz * Helper macro to load arbitrary values into 32 or 64-bit registers 143f5478dedSAntonio Nino Diaz * which generates the best mov/movk combinations. Many base addresses 144f5478dedSAntonio Nino Diaz * are 64KB aligned the macro will eliminate updating bits 15:0 in 145f5478dedSAntonio Nino Diaz * that case 146f5478dedSAntonio Nino Diaz */ 147f5478dedSAntonio Nino Diaz .macro mov_imm _reg, _val 148f5478dedSAntonio Nino Diaz .if (\_val) == 0 149f5478dedSAntonio Nino Diaz mov \_reg, #0 150f5478dedSAntonio Nino Diaz .else 151f5478dedSAntonio Nino Diaz _mov_imm16 \_reg, (\_val), 0 152f5478dedSAntonio Nino Diaz _mov_imm16 \_reg, (\_val), 16 153f5478dedSAntonio Nino Diaz _mov_imm16 \_reg, (\_val), 32 154f5478dedSAntonio Nino Diaz _mov_imm16 \_reg, (\_val), 48 155f5478dedSAntonio Nino Diaz .endif 156f5478dedSAntonio Nino Diaz .endm 157f5478dedSAntonio Nino Diaz 158f5478dedSAntonio Nino Diaz /* 159f5478dedSAntonio Nino Diaz * Macro to mark instances where we're jumping to a function and don't 160f5478dedSAntonio Nino Diaz * expect a return. To provide the function being jumped to with 161f5478dedSAntonio Nino Diaz * additional information, we use 'bl' instruction to jump rather than 162f5478dedSAntonio Nino Diaz * 'b'. 163f5478dedSAntonio Nino Diaz * 164f5478dedSAntonio Nino Diaz * Debuggers infer the location of a call from where LR points to, which 165f5478dedSAntonio Nino Diaz * is usually the instruction after 'bl'. If this macro expansion 166f5478dedSAntonio Nino Diaz * happens to be the last location in a function, that'll cause the LR 167f5478dedSAntonio Nino Diaz * to point a location beyond the function, thereby misleading debugger 168f5478dedSAntonio Nino Diaz * back trace. We therefore insert a 'nop' after the function call for 169f5478dedSAntonio Nino Diaz * debug builds, unless 'skip_nop' parameter is non-zero. 170f5478dedSAntonio Nino Diaz */ 171f5478dedSAntonio Nino Diaz .macro no_ret _func:req, skip_nop=0 172f5478dedSAntonio Nino Diaz bl \_func 173f5478dedSAntonio Nino Diaz#if DEBUG 174f5478dedSAntonio Nino Diaz .ifeq \skip_nop 175f5478dedSAntonio Nino Diaz nop 176f5478dedSAntonio Nino Diaz .endif 177f5478dedSAntonio Nino Diaz#endif 178f5478dedSAntonio Nino Diaz .endm 179f5478dedSAntonio Nino Diaz 180f5478dedSAntonio Nino Diaz /* 181f5478dedSAntonio Nino Diaz * Reserve space for a spin lock in assembly file. 182f5478dedSAntonio Nino Diaz */ 183f5478dedSAntonio Nino Diaz .macro define_asm_spinlock _name:req 184f5478dedSAntonio Nino Diaz .align SPINLOCK_ASM_ALIGN 185f5478dedSAntonio Nino Diaz \_name: 186f5478dedSAntonio Nino Diaz .space SPINLOCK_ASM_SIZE 187f5478dedSAntonio Nino Diaz .endm 188f5478dedSAntonio Nino Diaz 189f5478dedSAntonio Nino Diaz#if RAS_EXTENSION 190f5478dedSAntonio Nino Diaz .macro esb 191f5478dedSAntonio Nino Diaz .inst 0xd503221f 192f5478dedSAntonio Nino Diaz .endm 193f5478dedSAntonio Nino Diaz#endif 194f5478dedSAntonio Nino Diaz 1959fc59639SAlexei Fedorov /* 1969fc59639SAlexei Fedorov * Helper macro to read system register value into x0 1979fc59639SAlexei Fedorov */ 1989fc59639SAlexei Fedorov .macro read reg:req 1999fc59639SAlexei Fedorov#if ENABLE_BTI 2009fc59639SAlexei Fedorov bti j 2019fc59639SAlexei Fedorov#endif 2029fc59639SAlexei Fedorov mrs x0, \reg 2039fc59639SAlexei Fedorov ret 2049fc59639SAlexei Fedorov .endm 2059fc59639SAlexei Fedorov 2069fc59639SAlexei Fedorov /* 2079fc59639SAlexei Fedorov * Helper macro to write value from x1 to system register 2089fc59639SAlexei Fedorov */ 2099fc59639SAlexei Fedorov .macro write reg:req 2109fc59639SAlexei Fedorov#if ENABLE_BTI 2119fc59639SAlexei Fedorov bti j 2129fc59639SAlexei Fedorov#endif 2139fc59639SAlexei Fedorov msr \reg, x1 2149fc59639SAlexei Fedorov ret 2159fc59639SAlexei Fedorov .endm 2169fc59639SAlexei Fedorov 217f461fe34SAnthony Steinhauser /* 218*e74d6581SBipin Ravi * Macro for using speculation barrier instruction introduced by 219*e74d6581SBipin Ravi * FEAT_SB, if it's enabled. 220*e74d6581SBipin Ravi */ 221*e74d6581SBipin Ravi .macro speculation_barrier 222*e74d6581SBipin Ravi#if ENABLE_FEAT_SB 223*e74d6581SBipin Ravi sb 224*e74d6581SBipin Ravi#else 225*e74d6581SBipin Ravi dsb sy 226*e74d6581SBipin Ravi isb 227*e74d6581SBipin Ravi#endif 228*e74d6581SBipin Ravi .endm 229*e74d6581SBipin Ravi 230*e74d6581SBipin Ravi /* 2314e04478aSChris Kay * Macro for mitigating against speculative execution beyond ERET. Uses the 2324e04478aSChris Kay * speculation barrier instruction introduced by FEAT_SB, if it's enabled. 233f461fe34SAnthony Steinhauser */ 234f461fe34SAnthony Steinhauser .macro exception_return 235f461fe34SAnthony Steinhauser eret 2364e04478aSChris Kay#if ENABLE_FEAT_SB 237ccfb5c81SMadhukar Pappireddy sb 238ccfb5c81SMadhukar Pappireddy#else 239f461fe34SAnthony Steinhauser dsb nsh 240f461fe34SAnthony Steinhauser isb 241ccfb5c81SMadhukar Pappireddy#endif 242f461fe34SAnthony Steinhauser .endm 243f461fe34SAnthony Steinhauser 244f5478dedSAntonio Nino Diaz#endif /* ASM_MACROS_S */ 245