1*c2d621dbSPankaj Gupta/* 2*c2d621dbSPankaj Gupta * Copyright 2018-2020 NXP 3*c2d621dbSPankaj Gupta * 4*c2d621dbSPankaj Gupta * SPDX-License-Identifier: BSD-3-Clause 5*c2d621dbSPankaj Gupta * 6*c2d621dbSPankaj Gupta */ 7*c2d621dbSPankaj Gupta 8*c2d621dbSPankaj Gupta#include <asm_macros.S> 9*c2d621dbSPankaj Gupta#include <bl31_data.h> 10*c2d621dbSPankaj Gupta 11*c2d621dbSPankaj Gupta.global el2_2_aarch32 12*c2d621dbSPankaj Gupta.global prefetch_disable 13*c2d621dbSPankaj Gupta 14*c2d621dbSPankaj Gupta#define SPSR_EL3_M4 0x10 15*c2d621dbSPankaj Gupta#define SPSR_EL_MASK 0xC 16*c2d621dbSPankaj Gupta#define SPSR_EL2 0x8 17*c2d621dbSPankaj Gupta#define SCR_EL3_4_EL2_AARCH32 0x131 18*c2d621dbSPankaj Gupta#define SPSR32_EL2_LE 0x1DA 19*c2d621dbSPankaj Gupta 20*c2d621dbSPankaj Gupta#define MIDR_PARTNUM_START 4 21*c2d621dbSPankaj Gupta#define MIDR_PARTNUM_WIDTH 12 22*c2d621dbSPankaj Gupta#define MIDR_PARTNUM_A53 0xD03 23*c2d621dbSPankaj Gupta#define MIDR_PARTNUM_A57 0xD07 24*c2d621dbSPankaj Gupta#define MIDR_PARTNUM_A72 0xD08 25*c2d621dbSPankaj Gupta 26*c2d621dbSPankaj Gupta/* 27*c2d621dbSPankaj Gupta * uint64_t el2_2_aarch32(u_register_t smc_id, 28*c2d621dbSPankaj Gupta * u_register_t start_addr, 29*c2d621dbSPankaj Gupta * u_register_t parm1, 30*c2d621dbSPankaj Gupta * u_register_t parm2) 31*c2d621dbSPankaj Gupta * this function allows changing the execution width of EL2 from Aarch64 32*c2d621dbSPankaj Gupta * to Aarch32 33*c2d621dbSPankaj Gupta * Note: MUST be called from EL2 @ Aarch64 34*c2d621dbSPankaj Gupta * in: x0 = smc function id 35*c2d621dbSPankaj Gupta * x1 = start address for EL2 @ Aarch32 36*c2d621dbSPankaj Gupta * x2 = first parameter to pass to EL2 @ Aarch32 37*c2d621dbSPankaj Gupta * x3 = second parameter to pass to EL2 @ Aarch32 38*c2d621dbSPankaj Gupta * out: x0 = 0, on success 39*c2d621dbSPankaj Gupta * x0 = -1, on failure 40*c2d621dbSPankaj Gupta * uses x0, x1, x2, x3 41*c2d621dbSPankaj Gupta */ 42*c2d621dbSPankaj Guptafunc el2_2_aarch32 43*c2d621dbSPankaj Gupta 44*c2d621dbSPankaj Gupta /* check that caller is EL2 @ Aarch64 - err return if not */ 45*c2d621dbSPankaj Gupta mrs x0, spsr_el3 46*c2d621dbSPankaj Gupta /* see if we were called from Aarch32 */ 47*c2d621dbSPankaj Gupta tst x0, #SPSR_EL3_M4 48*c2d621dbSPankaj Gupta b.ne 2f 49*c2d621dbSPankaj Gupta 50*c2d621dbSPankaj Gupta /* see if we were called from EL2 */ 51*c2d621dbSPankaj Gupta and x0, x0, SPSR_EL_MASK 52*c2d621dbSPankaj Gupta cmp x0, SPSR_EL2 53*c2d621dbSPankaj Gupta b.ne 2f 54*c2d621dbSPankaj Gupta 55*c2d621dbSPankaj Gupta /* set ELR_EL3 */ 56*c2d621dbSPankaj Gupta msr elr_el3, x1 57*c2d621dbSPankaj Gupta 58*c2d621dbSPankaj Gupta /* set scr_el3 */ 59*c2d621dbSPankaj Gupta mov x0, #SCR_EL3_4_EL2_AARCH32 60*c2d621dbSPankaj Gupta msr scr_el3, x0 61*c2d621dbSPankaj Gupta 62*c2d621dbSPankaj Gupta /* set sctlr_el2 */ 63*c2d621dbSPankaj Gupta ldr x1, =SCTLR_EL2_RES1 64*c2d621dbSPankaj Gupta msr sctlr_el2, x1 65*c2d621dbSPankaj Gupta 66*c2d621dbSPankaj Gupta /* set spsr_el3 */ 67*c2d621dbSPankaj Gupta ldr x0, =SPSR32_EL2_LE 68*c2d621dbSPankaj Gupta msr spsr_el3, x0 69*c2d621dbSPankaj Gupta 70*c2d621dbSPankaj Gupta /* x2 = parm 1 71*c2d621dbSPankaj Gupta * x3 = parm2 72*c2d621dbSPankaj Gupta */ 73*c2d621dbSPankaj Gupta 74*c2d621dbSPankaj Gupta /* set the parameters to be passed-thru to EL2 @ Aarch32 */ 75*c2d621dbSPankaj Gupta mov x1, x2 76*c2d621dbSPankaj Gupta mov x2, x3 77*c2d621dbSPankaj Gupta 78*c2d621dbSPankaj Gupta /* x1 = parm 1 79*c2d621dbSPankaj Gupta * x2 = parm2 80*c2d621dbSPankaj Gupta */ 81*c2d621dbSPankaj Gupta 82*c2d621dbSPankaj Gupta mov x0, xzr 83*c2d621dbSPankaj Gupta /* invalidate the icache */ 84*c2d621dbSPankaj Gupta ic iallu 85*c2d621dbSPankaj Gupta dsb sy 86*c2d621dbSPankaj Gupta isb 87*c2d621dbSPankaj Gupta b 1f 88*c2d621dbSPankaj Gupta2: 89*c2d621dbSPankaj Gupta /* error return */ 90*c2d621dbSPankaj Gupta mvn x0, xzr 91*c2d621dbSPankaj Gupta ret 92*c2d621dbSPankaj Gupta1: 93*c2d621dbSPankaj Gupta eret 94*c2d621dbSPankaj Guptaendfunc el2_2_aarch32 95*c2d621dbSPankaj Gupta 96*c2d621dbSPankaj Gupta/* 97*c2d621dbSPankaj Gupta * int prefetch_disable(u_register_t smc_id, u_register_t mask) 98*c2d621dbSPankaj Gupta * this function marks cores which need to have the prefetch disabled - 99*c2d621dbSPankaj Gupta * secondary cores have prefetch disabled when they are released from reset - 100*c2d621dbSPankaj Gupta * the bootcore has prefetch disabled when this call is made 101*c2d621dbSPankaj Gupta * in: x0 = function id 102*c2d621dbSPankaj Gupta * x1 = core mask, where bit[0]=core0, bit[1]=core1, etc 103*c2d621dbSPankaj Gupta * if a bit in the mask is set, then prefetch is disabled for that 104*c2d621dbSPankaj Gupta * core 105*c2d621dbSPankaj Gupta * out: x0 = SMC_SUCCESS 106*c2d621dbSPankaj Gupta */ 107*c2d621dbSPankaj Guptafunc prefetch_disable 108*c2d621dbSPankaj Gupta stp x4, x30, [sp, #-16]! 109*c2d621dbSPankaj Gupta 110*c2d621dbSPankaj Gupta mov x3, x1 111*c2d621dbSPankaj Gupta 112*c2d621dbSPankaj Gupta /* x1 = core prefetch disable mask */ 113*c2d621dbSPankaj Gupta /* x3 = core prefetch disable mask */ 114*c2d621dbSPankaj Gupta 115*c2d621dbSPankaj Gupta /* store the mask */ 116*c2d621dbSPankaj Gupta mov x0, #PREFETCH_DIS_OFFSET 117*c2d621dbSPankaj Gupta bl _set_global_data 118*c2d621dbSPankaj Gupta 119*c2d621dbSPankaj Gupta /* x3 = core prefetch disable mask */ 120*c2d621dbSPankaj Gupta 121*c2d621dbSPankaj Gupta /* see if we need to disable prefetch on THIS core */ 122*c2d621dbSPankaj Gupta bl plat_my_core_mask 123*c2d621dbSPankaj Gupta 124*c2d621dbSPankaj Gupta /* x0 = core mask lsb */ 125*c2d621dbSPankaj Gupta /* x3 = core prefetch disable mask */ 126*c2d621dbSPankaj Gupta 127*c2d621dbSPankaj Gupta tst x3, x0 128*c2d621dbSPankaj Gupta b.eq 1f 129*c2d621dbSPankaj Gupta 130*c2d621dbSPankaj Gupta /* read midr_el1 */ 131*c2d621dbSPankaj Gupta mrs x1, midr_el1 132*c2d621dbSPankaj Gupta 133*c2d621dbSPankaj Gupta /* x1 = midr_el1 */ 134*c2d621dbSPankaj Gupta 135*c2d621dbSPankaj Gupta mov x0, xzr 136*c2d621dbSPankaj Gupta bfxil x0, x1, #MIDR_PARTNUM_START, #MIDR_PARTNUM_WIDTH 137*c2d621dbSPankaj Gupta 138*c2d621dbSPankaj Gupta /* x0 = part number (a53, a57, a72, etc) */ 139*c2d621dbSPankaj Gupta 140*c2d621dbSPankaj Gupta /* branch on cpu-specific */ 141*c2d621dbSPankaj Gupta cmp x0, #MIDR_PARTNUM_A57 142*c2d621dbSPankaj Gupta b.eq 1f 143*c2d621dbSPankaj Gupta cmp x0, #MIDR_PARTNUM_A72 144*c2d621dbSPankaj Gupta b.ne 1f 145*c2d621dbSPankaj Gupta 146*c2d621dbSPankaj Gupta bl _disable_ldstr_pfetch_A72 147*c2d621dbSPankaj Gupta b 1f 148*c2d621dbSPankaj Gupta1: 149*c2d621dbSPankaj Gupta ldp x4, x30, [sp], #16 150*c2d621dbSPankaj Gupta mov x0, xzr 151*c2d621dbSPankaj Gupta ret 152*c2d621dbSPankaj Guptaendfunc prefetch_disable 153