1e33b78a6SSoby Mathew/* 2aadb4b56SBoyan Karatotev * Copyright (c) 2016-2025, Arm Limited and Contributors. All rights reserved. 3e33b78a6SSoby Mathew * 482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause 5e33b78a6SSoby Mathew */ 6e33b78a6SSoby Mathew 7e33b78a6SSoby Mathew#include <arch.h> 8e33b78a6SSoby Mathew#include <asm_macros.S> 9e33b78a6SSoby Mathew#include <assert_macros.S> 10e33b78a6SSoby Mathew#include <cpu_macros.S> 11c2ad38ceSVarun Wadekar#include <common/bl_common.h> 12dd9fae1cSBoyan Karatotev#include <lib/cpus/cpu_ops.h> 1309d40e0eSAntonio Nino Diaz#include <lib/el3_runtime/cpu_data.h> 14e33b78a6SSoby Mathew 15*71f7a363SBoyan Karatotev /* 16*71f7a363SBoyan Karatotev * The below function returns the cpu_ops structure matching the 17*71f7a363SBoyan Karatotev * midr of the core. It reads the MIDR and finds the matching 18*71f7a363SBoyan Karatotev * entry in cpu_ops entries. Only the implementation and part number 19*71f7a363SBoyan Karatotev * are used to match the entries. 20*71f7a363SBoyan Karatotev * Return : 21*71f7a363SBoyan Karatotev * r0 - The matching cpu_ops pointer on Success 22*71f7a363SBoyan Karatotev * r0 - 0 on failure. 23*71f7a363SBoyan Karatotev * Clobbers: r0 - r5 24*71f7a363SBoyan Karatotev */ 25*71f7a363SBoyan Karatotev.macro get_cpu_ops_ptr_impl 26*71f7a363SBoyan Karatotev /* Get the cpu_ops start and end locations */ 27*71f7a363SBoyan Karatotev ldr r4, =(__CPU_OPS_START__ + CPU_MIDR) 28*71f7a363SBoyan Karatotev ldr r5, =(__CPU_OPS_END__ + CPU_MIDR) 29*71f7a363SBoyan Karatotev 30*71f7a363SBoyan Karatotev /* Initialize the return parameter */ 31*71f7a363SBoyan Karatotev mov r0, #0 32*71f7a363SBoyan Karatotev 33*71f7a363SBoyan Karatotev /* Read the MIDR_EL1 */ 34*71f7a363SBoyan Karatotev ldcopr r2, MIDR 35*71f7a363SBoyan Karatotev ldr r3, =CPU_IMPL_PN_MASK 36*71f7a363SBoyan Karatotev 37*71f7a363SBoyan Karatotev /* Retain only the implementation and part number using mask */ 38*71f7a363SBoyan Karatotev and r2, r2, r3 39*71f7a363SBoyan Karatotev1: 40*71f7a363SBoyan Karatotev /* Check if we have reached end of list */ 41*71f7a363SBoyan Karatotev cmp r4, r5 42*71f7a363SBoyan Karatotev bhs error_exit\@ 43*71f7a363SBoyan Karatotev 44*71f7a363SBoyan Karatotev /* load the midr from the cpu_ops */ 45*71f7a363SBoyan Karatotev ldr r1, [r4], #CPU_OPS_SIZE 46*71f7a363SBoyan Karatotev and r1, r1, r3 47*71f7a363SBoyan Karatotev 48*71f7a363SBoyan Karatotev /* Check if midr matches to midr of this core */ 49*71f7a363SBoyan Karatotev cmp r1, r2 50*71f7a363SBoyan Karatotev bne 1b 51*71f7a363SBoyan Karatotev 52*71f7a363SBoyan Karatotev /* Subtract the increment and offset to get the cpu-ops pointer */ 53*71f7a363SBoyan Karatotev sub r0, r4, #(CPU_OPS_SIZE + CPU_MIDR) 54*71f7a363SBoyan Karatotev#if ENABLE_ASSERTIONS 55*71f7a363SBoyan Karatotev cmp r0, #0 56*71f7a363SBoyan Karatotev ASM_ASSERT(ne) 57*71f7a363SBoyan Karatotev#endif 58*71f7a363SBoyan Karatoteverror_exit\@: 59*71f7a363SBoyan Karatotev.endm 60*71f7a363SBoyan Karatotev 6142d4d3baSArvind Ram Prakash#if defined(IMAGE_BL1) || defined(IMAGE_BL32) || \ 6242d4d3baSArvind Ram Prakash (defined(IMAGE_BL2) && RESET_TO_BL2) 63e33b78a6SSoby Mathew /* 64e33b78a6SSoby Mathew * The reset handler common to all platforms. After a matching 65e33b78a6SSoby Mathew * cpu_ops structure entry is found, the correponding reset_handler 66e33b78a6SSoby Mathew * in the cpu_ops is invoked. The reset handler is invoked very early 67e33b78a6SSoby Mathew * in the boot sequence and it is assumed that we can clobber r0 - r10 68e33b78a6SSoby Mathew * without the need to follow AAPCS. 69e33b78a6SSoby Mathew * Clobbers: r0 - r10 70e33b78a6SSoby Mathew */ 71e33b78a6SSoby Mathew .globl reset_handler 72e33b78a6SSoby Mathewfunc reset_handler 73c6c10b02SHeiko Stuebner mov r8, lr 74e33b78a6SSoby Mathew 75c6c10b02SHeiko Stuebner /* The plat_reset_handler can clobber r0 - r7 */ 76e33b78a6SSoby Mathew bl plat_reset_handler 77e33b78a6SSoby Mathew 78e33b78a6SSoby Mathew /* Get the matching cpu_ops pointer (clobbers: r0 - r5) */ 79*71f7a363SBoyan Karatotev get_cpu_ops_ptr_impl 80e33b78a6SSoby Mathew 81044bb2faSAntonio Nino Diaz#if ENABLE_ASSERTIONS 82e33b78a6SSoby Mathew cmp r0, #0 83e33b78a6SSoby Mathew ASM_ASSERT(ne) 84e33b78a6SSoby Mathew#endif 85e33b78a6SSoby Mathew 86e33b78a6SSoby Mathew /* Get the cpu_ops reset handler */ 87e33b78a6SSoby Mathew ldr r1, [r0, #CPU_RESET_FUNC] 88e33b78a6SSoby Mathew cmp r1, #0 89c6c10b02SHeiko Stuebner mov lr, r8 90e33b78a6SSoby Mathew bxne r1 91e33b78a6SSoby Mathew bx lr 92e33b78a6SSoby Mathewendfunc reset_handler 93e33b78a6SSoby Mathew 94b1d27b48SRoberto Vargas#endif 951a0a3f06SYatharth Kochar 96*71f7a363SBoyan Karatotev .globl get_cpu_ops_ptr 97*71f7a363SBoyan Karatotev/* performs an AAPCS compliant call to get_cpu_ops_ptr_impl */ 98*71f7a363SBoyan Karatotevfunc get_cpu_ops_ptr 99*71f7a363SBoyan Karatotev push {r4 - r5, lr} 100*71f7a363SBoyan Karatotev get_cpu_ops_ptr_impl 101*71f7a363SBoyan Karatotev pop {r4 - r5, pc} 102*71f7a363SBoyan Karatotevendfunc get_cpu_ops_ptr 103*71f7a363SBoyan Karatotev 104e33b78a6SSoby Mathew/* 10510bcd761SJeenu Viswambharan * Extract CPU revision and variant, and combine them into a single numeric for 10610bcd761SJeenu Viswambharan * easier comparison. 10710bcd761SJeenu Viswambharan */ 10810bcd761SJeenu Viswambharan .globl cpu_get_rev_var 10910bcd761SJeenu Viswambharanfunc cpu_get_rev_var 11010bcd761SJeenu Viswambharan ldcopr r1, MIDR 11110bcd761SJeenu Viswambharan 11210bcd761SJeenu Viswambharan /* 11310bcd761SJeenu Viswambharan * Extract the variant[23:20] and revision[3:0] from r1 and pack it in 11410bcd761SJeenu Viswambharan * r0[0:7] as variant[7:4] and revision[3:0]: 11510bcd761SJeenu Viswambharan * 11610bcd761SJeenu Viswambharan * First extract r1[23:16] to r0[7:0] and zero fill the rest. Then 11710bcd761SJeenu Viswambharan * extract r1[3:0] into r0[3:0] retaining other bits. 11810bcd761SJeenu Viswambharan */ 11910bcd761SJeenu Viswambharan ubfx r0, r1, #(MIDR_VAR_SHIFT - MIDR_REV_BITS), #(MIDR_REV_BITS + MIDR_VAR_BITS) 12010bcd761SJeenu Viswambharan bfi r0, r1, #MIDR_REV_SHIFT, #MIDR_REV_BITS 12110bcd761SJeenu Viswambharan bx lr 12210bcd761SJeenu Viswambharanendfunc cpu_get_rev_var 12310bcd761SJeenu Viswambharan 12410bcd761SJeenu Viswambharan/* 12510bcd761SJeenu Viswambharan * Compare the CPU's revision-variant (r0) with a given value (r1), for errata 12610bcd761SJeenu Viswambharan * application purposes. If the revision-variant is less than or same as a given 12710bcd761SJeenu Viswambharan * value, indicates that errata applies; otherwise not. 12810bcd761SJeenu Viswambharan */ 12910bcd761SJeenu Viswambharan .globl cpu_rev_var_ls 13010bcd761SJeenu Viswambharanfunc cpu_rev_var_ls 13110bcd761SJeenu Viswambharan cmp r0, r1 13210bcd761SJeenu Viswambharan movls r0, #ERRATA_APPLIES 13310bcd761SJeenu Viswambharan movhi r0, #ERRATA_NOT_APPLIES 13410bcd761SJeenu Viswambharan bx lr 13510bcd761SJeenu Viswambharanendfunc cpu_rev_var_ls 13610bcd761SJeenu Viswambharan 13756e04999SDimitris Papastamos/* 13856e04999SDimitris Papastamos * Compare the CPU's revision-variant (r0) with a given value (r1), for errata 13956e04999SDimitris Papastamos * application purposes. If the revision-variant is higher than or same as a 14056e04999SDimitris Papastamos * given value, indicates that errata applies; otherwise not. 14156e04999SDimitris Papastamos */ 14256e04999SDimitris Papastamos .globl cpu_rev_var_hs 14356e04999SDimitris Papastamosfunc cpu_rev_var_hs 14456e04999SDimitris Papastamos cmp r0, r1 14556e04999SDimitris Papastamos movge r0, #ERRATA_APPLIES 14656e04999SDimitris Papastamos movlt r0, #ERRATA_NOT_APPLIES 14756e04999SDimitris Papastamos bx lr 14856e04999SDimitris Papastamosendfunc cpu_rev_var_hs 149