1/* 2 * Copyright (c) 2014-2025, Arm Limited and Contributors. All rights reserved. 3 * 4 * SPDX-License-Identifier: BSD-3-Clause 5 */ 6 7#include <arch.h> 8#include <asm_macros.S> 9#include <assert_macros.S> 10#include <common/bl_common.h> 11#include <common/debug.h> 12#include <cpu_macros.S> 13#include <lib/cpus/cpu_ops.h> 14#include <lib/cpus/errata.h> 15#include <lib/el3_runtime/cpu_data.h> 16 17 /* 18 * Initializes the cpu_ops_ptr if not already initialized 19 * in cpu_data. This can be called without a runtime stack, but may 20 * only be called after the MMU is enabled. 21 * clobbers: x0 - x6, x10 22 */ 23 .globl init_cpu_ops 24func init_cpu_ops 25 mrs x6, tpidr_el3 26 ldr x0, [x6, #CPU_DATA_CPU_OPS_PTR] 27 cbnz x0, 1f 28 mov x10, x30 29 bl get_cpu_ops_ptr 30 str x0, [x6, #CPU_DATA_CPU_OPS_PTR]! 31 mov x30, x10 321: 33 ret 34endfunc init_cpu_ops 35 36#if defined(IMAGE_BL31) && CRASH_REPORTING 37 /* 38 * The cpu specific registers which need to be reported in a crash 39 * are reported via cpu_ops cpu_reg_dump function. After a matching 40 * cpu_ops structure entry is found, the correponding cpu_reg_dump 41 * in the cpu_ops is invoked. 42 */ 43 .globl do_cpu_reg_dump 44func do_cpu_reg_dump 45 mov x16, x30 46 47 /* Get the matching cpu_ops pointer */ 48 bl get_cpu_ops_ptr 49 cbz x0, 1f 50 51 /* Get the cpu_ops cpu_reg_dump */ 52 ldr x2, [x0, #CPU_REG_DUMP] 53 cbz x2, 1f 54 blr x2 551: 56 mov x30, x16 57 ret 58endfunc do_cpu_reg_dump 59#endif 60 61 /* 62 * The below function returns the cpu_ops structure matching the 63 * midr of the core. It reads the MIDR_EL1 and finds the matching 64 * entry in cpu_ops entries. Only the implementation and part number 65 * are used to match the entries. 66 * 67 * If cpu_ops for the MIDR_EL1 cannot be found and 68 * SUPPORT_UNKNOWN_MPID is enabled, it will try to look for a 69 * default cpu_ops with an MIDR value of 0. 70 * (Implementation number 0x0 should be reserved for software use 71 * and therefore no clashes should happen with that default value). 72 * 73 * Return : 74 * x0 - The matching cpu_ops pointer on Success 75 * x0 - 0 on failure. 76 * Clobbers : x0 - x5 77 */ 78 .globl get_cpu_ops_ptr 79func get_cpu_ops_ptr 80 /* Read the MIDR_EL1 */ 81 mrs x2, midr_el1 82 mov_imm x3, CPU_IMPL_PN_MASK 83 84 /* Retain only the implementation and part number using mask */ 85 and w2, w2, w3 86 87 /* Get the cpu_ops end location */ 88 adr_l x5, (__CPU_OPS_END__ + CPU_MIDR) 89 90 /* Initialize the return parameter */ 91 mov x0, #0 921: 93 /* Get the cpu_ops start location */ 94 adr_l x4, (__CPU_OPS_START__ + CPU_MIDR) 95 962: 97 /* Check if we have reached end of list */ 98 cmp x4, x5 99 b.eq search_def_ptr 100 101 /* load the midr from the cpu_ops */ 102 ldr x1, [x4], #CPU_OPS_SIZE 103 and w1, w1, w3 104 105 /* Check if midr matches to midr of this core */ 106 cmp w1, w2 107 b.ne 2b 108 109 /* Subtract the increment and offset to get the cpu-ops pointer */ 110 sub x0, x4, #(CPU_OPS_SIZE + CPU_MIDR) 111#if ENABLE_ASSERTIONS 112 cmp x0, #0 113 ASM_ASSERT(ne) 114#endif 115#ifdef SUPPORT_UNKNOWN_MPID 116 cbnz x2, exit_mpid_found 117 /* Mark the unsupported MPID flag */ 118 adrp x1, unsupported_mpid_flag 119 add x1, x1, :lo12:unsupported_mpid_flag 120 str w2, [x1] 121exit_mpid_found: 122#endif 123 ret 124 125 /* 126 * Search again for a default pointer (MIDR = 0x0) 127 * or return error if already searched. 128 */ 129search_def_ptr: 130#ifdef SUPPORT_UNKNOWN_MPID 131 cbz x2, error_exit 132 mov x2, #0 133 b 1b 134error_exit: 135#endif 136#if ENABLE_ASSERTIONS 137 /* 138 * Assert if invalid cpu_ops obtained. If this is not valid, it may 139 * suggest that the proper CPU file hasn't been included. 140 */ 141 cmp x0, #0 142 ASM_ASSERT(ne) 143#endif 144 ret 145endfunc get_cpu_ops_ptr 146 147 .globl cpu_get_rev_var 148func cpu_get_rev_var 149 get_rev_var x0, x1 150 ret 151endfunc cpu_get_rev_var 152