xref: /rk3399_ARM-atf/lib/cpus/aarch64/cpu_helpers.S (revision 05d22c3045e2e972c2262b9ccd6c82cb7545bf83)
1/*
2 * Copyright (c) 2014-2025, Arm Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <assert_macros.S>
10#include <common/bl_common.h>
11#include <common/debug.h>
12#include <cpu_macros.S>
13#include <lib/cpus/cpu_ops.h>
14#include <lib/cpus/errata.h>
15#include <lib/el3_runtime/cpu_data.h>
16
17#if defined(IMAGE_BL31) && CRASH_REPORTING
18	/*
19	 * The cpu specific registers which need to be reported in a crash
20	 * are reported via cpu_ops cpu_reg_dump function. After a matching
21	 * cpu_ops structure entry is found, the correponding cpu_reg_dump
22	 * in the cpu_ops is invoked.
23	 */
24	.globl	do_cpu_reg_dump
25func do_cpu_reg_dump
26	mov	x16, x30
27
28	/* Get the matching cpu_ops pointer */
29	bl	get_cpu_ops_ptr
30	cbz	x0, 1f
31
32	/* Get the cpu_ops cpu_reg_dump */
33	ldr	x2, [x0, #CPU_REG_DUMP]
34	cbz	x2, 1f
35	blr	x2
361:
37	mov	x30, x16
38	ret
39endfunc do_cpu_reg_dump
40#endif
41
42	/*
43	 * The below function returns the cpu_ops structure matching the
44	 * midr of the core. It reads the MIDR_EL1 and finds the matching
45	 * entry in cpu_ops entries. Only the implementation and part number
46	 * are used to match the entries.
47	 *
48	 * If cpu_ops for the MIDR_EL1 cannot be found and
49	 * SUPPORT_UNKNOWN_MPID is enabled, it will try to look for a
50	 * default cpu_ops with an MIDR value of 0.
51	 * (Implementation number 0x0 should be reserved for software use
52	 * and therefore no clashes should happen with that default value).
53	 *
54	 * Return :
55	 *     x0 - The matching cpu_ops pointer on Success
56	 *     x0 - 0 on failure.
57	 * Clobbers : x0 - x5
58	 */
59	.globl	get_cpu_ops_ptr
60func get_cpu_ops_ptr
61	/* Read the MIDR_EL1 */
62	mrs	x2, midr_el1
63	mov_imm	x3, CPU_IMPL_PN_MASK
64
65	/* Retain only the implementation and part number using mask */
66	and	w2, w2, w3
67
68	/* Get the cpu_ops end location */
69	adr_l	x5, (__CPU_OPS_END__ + CPU_MIDR)
70
71	/* Initialize the return parameter */
72	mov	x0, #0
731:
74	/* Get the cpu_ops start location */
75	adr_l	x4, (__CPU_OPS_START__ + CPU_MIDR)
76
772:
78	/* Check if we have reached end of list */
79	cmp	x4, x5
80	b.eq	search_def_ptr
81
82	/* load the midr from the cpu_ops */
83	ldr	x1, [x4], #CPU_OPS_SIZE
84	and	w1, w1, w3
85
86	/* Check if midr matches to midr of this core */
87	cmp	w1, w2
88	b.ne	2b
89
90	/* Subtract the increment and offset to get the cpu-ops pointer */
91	sub	x0, x4, #(CPU_OPS_SIZE + CPU_MIDR)
92#if ENABLE_ASSERTIONS
93	cmp	x0, #0
94	ASM_ASSERT(ne)
95#endif
96#ifdef SUPPORT_UNKNOWN_MPID
97	cbnz	x2, exit_mpid_found
98	/* Mark the unsupported MPID flag */
99	adrp	x1, unsupported_mpid_flag
100	add	x1, x1, :lo12:unsupported_mpid_flag
101	str	w2, [x1]
102exit_mpid_found:
103#endif
104	ret
105
106	/*
107	 * Search again for a default pointer (MIDR = 0x0)
108	 * or return error if already searched.
109	 */
110search_def_ptr:
111#ifdef SUPPORT_UNKNOWN_MPID
112	cbz	x2, error_exit
113	mov	x2, #0
114	b	1b
115error_exit:
116#endif
117#if ENABLE_ASSERTIONS
118	/*
119	 * Assert if invalid cpu_ops obtained. If this is not valid, it may
120	 * suggest that the proper CPU file hasn't been included.
121	 */
122	cmp	x0, #0
123	ASM_ASSERT(ne)
124#endif
125	ret
126endfunc get_cpu_ops_ptr
127
128	.globl	cpu_get_rev_var
129func cpu_get_rev_var
130	get_rev_var x0, x1
131	ret
132endfunc cpu_get_rev_var
133