1/* 2 * Copyright (c) 2016-2025, Arm Limited and Contributors. All rights reserved. 3 * 4 * SPDX-License-Identifier: BSD-3-Clause 5 */ 6 7#include <arch.h> 8#include <asm_macros.S> 9#include <assert_macros.S> 10#include <cpu_macros.S> 11#include <common/bl_common.h> 12#include <lib/cpus/cpu_ops.h> 13#include <lib/el3_runtime/cpu_data.h> 14 15 /* 16 * The below function returns the cpu_ops structure matching the 17 * midr of the core. It reads the MIDR and finds the matching 18 * entry in cpu_ops entries. Only the implementation and part number 19 * are used to match the entries. 20 * Return : 21 * r0 - The matching cpu_ops pointer on Success 22 * r0 - 0 on failure. 23 * Clobbers: r0 - r5 24 */ 25.macro get_cpu_ops_ptr_impl 26 /* Get the cpu_ops start and end locations */ 27 ldr r4, =(__CPU_OPS_START__ + CPU_MIDR) 28 ldr r5, =(__CPU_OPS_END__ + CPU_MIDR) 29 30 /* Initialize the return parameter */ 31 mov r0, #0 32 33 /* Read the MIDR_EL1 */ 34 ldcopr r2, MIDR 35 ldr r3, =CPU_IMPL_PN_MASK 36 37 /* Retain only the implementation and part number using mask */ 38 and r2, r2, r3 391: 40 /* Check if we have reached end of list */ 41 cmp r4, r5 42 bhs error_exit\@ 43 44 /* load the midr from the cpu_ops */ 45 ldr r1, [r4], #CPU_OPS_SIZE 46 and r1, r1, r3 47 48 /* Check if midr matches to midr of this core */ 49 cmp r1, r2 50 bne 1b 51 52 /* Subtract the increment and offset to get the cpu-ops pointer */ 53 sub r0, r4, #(CPU_OPS_SIZE + CPU_MIDR) 54#if ENABLE_ASSERTIONS 55 cmp r0, #0 56 ASM_ASSERT(ne) 57#endif 58error_exit\@: 59.endm 60 61#if defined(IMAGE_BL1) || defined(IMAGE_BL32) || \ 62 (defined(IMAGE_BL2) && RESET_TO_BL2) 63 /* 64 * The reset handler common to all platforms. After a matching 65 * cpu_ops structure entry is found, the correponding reset_handler 66 * in the cpu_ops is invoked. The reset handler is invoked very early 67 * in the boot sequence and it is assumed that we can clobber r0 - r10 68 * without the need to follow AAPCS. 69 * Clobbers: r0 - r10 70 */ 71 .globl reset_handler 72func reset_handler 73 mov r8, lr 74 75 /* The plat_reset_handler can clobber r0 - r7 */ 76 bl plat_reset_handler 77 78 /* Get the matching cpu_ops pointer (clobbers: r0 - r5) */ 79 get_cpu_ops_ptr_impl 80 81#if ENABLE_ASSERTIONS 82 cmp r0, #0 83 ASM_ASSERT(ne) 84#endif 85 86 /* Get the cpu_ops reset handler */ 87 ldr r1, [r0, #CPU_RESET_FUNC] 88 cmp r1, #0 89 mov lr, r8 90 bxne r1 91 bx lr 92endfunc reset_handler 93 94#endif 95 96 .globl get_cpu_ops_ptr 97/* performs an AAPCS compliant call to get_cpu_ops_ptr_impl */ 98func get_cpu_ops_ptr 99 push {r4 - r5, lr} 100 get_cpu_ops_ptr_impl 101 pop {r4 - r5, pc} 102endfunc get_cpu_ops_ptr 103 104/* 105 * Extract CPU revision and variant, and combine them into a single numeric for 106 * easier comparison. 107 */ 108 .globl cpu_get_rev_var 109func cpu_get_rev_var 110 ldcopr r1, MIDR 111 112 /* 113 * Extract the variant[23:20] and revision[3:0] from r1 and pack it in 114 * r0[0:7] as variant[7:4] and revision[3:0]: 115 * 116 * First extract r1[23:16] to r0[7:0] and zero fill the rest. Then 117 * extract r1[3:0] into r0[3:0] retaining other bits. 118 */ 119 ubfx r0, r1, #(MIDR_VAR_SHIFT - MIDR_REV_BITS), #(MIDR_REV_BITS + MIDR_VAR_BITS) 120 bfi r0, r1, #MIDR_REV_SHIFT, #MIDR_REV_BITS 121 bx lr 122endfunc cpu_get_rev_var 123 124/* 125 * Compare the CPU's revision-variant (r0) with a given value (r1), for errata 126 * application purposes. If the revision-variant is less than or same as a given 127 * value, indicates that errata applies; otherwise not. 128 */ 129 .globl cpu_rev_var_ls 130func cpu_rev_var_ls 131 cmp r0, r1 132 movls r0, #ERRATA_APPLIES 133 movhi r0, #ERRATA_NOT_APPLIES 134 bx lr 135endfunc cpu_rev_var_ls 136 137/* 138 * Compare the CPU's revision-variant (r0) with a given value (r1), for errata 139 * application purposes. If the revision-variant is higher than or same as a 140 * given value, indicates that errata applies; otherwise not. 141 */ 142 .globl cpu_rev_var_hs 143func cpu_rev_var_hs 144 cmp r0, r1 145 movge r0, #ERRATA_APPLIES 146 movlt r0, #ERRATA_NOT_APPLIES 147 bx lr 148endfunc cpu_rev_var_hs 149