xref: /rk3399_ARM-atf/include/lib/cpus/aarch64/cpu_macros.S (revision 79a97b2ef723365663b403223002d29aeb675c85)
1add40351SSoby Mathew/*
2add40351SSoby Mathew * Copyright (c) 2014, ARM Limited and Contributors. All rights reserved.
3add40351SSoby Mathew *
4add40351SSoby Mathew * Redistribution and use in source and binary forms, with or without
5add40351SSoby Mathew * modification, are permitted provided that the following conditions are met:
6add40351SSoby Mathew *
7add40351SSoby Mathew * Redistributions of source code must retain the above copyright notice, this
8add40351SSoby Mathew * list of conditions and the following disclaimer.
9add40351SSoby Mathew *
10add40351SSoby Mathew * Redistributions in binary form must reproduce the above copyright notice,
11add40351SSoby Mathew * this list of conditions and the following disclaimer in the documentation
12add40351SSoby Mathew * and/or other materials provided with the distribution.
13add40351SSoby Mathew *
14add40351SSoby Mathew * Neither the name of ARM nor the names of its contributors may be used
15add40351SSoby Mathew * to endorse or promote products derived from this software without specific
16add40351SSoby Mathew * prior written permission.
17add40351SSoby Mathew *
18add40351SSoby Mathew * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19add40351SSoby Mathew * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20add40351SSoby Mathew * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21add40351SSoby Mathew * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
22add40351SSoby Mathew * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23add40351SSoby Mathew * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24add40351SSoby Mathew * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25add40351SSoby Mathew * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26add40351SSoby Mathew * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27add40351SSoby Mathew * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28add40351SSoby Mathew * POSSIBILITY OF SUCH DAMAGE.
29add40351SSoby Mathew */
30add40351SSoby Mathew
31add40351SSoby Mathew#include <arch.h>
32add40351SSoby Mathew
33add40351SSoby Mathew#define CPU_IMPL_PN_MASK	(MIDR_IMPL_MASK << MIDR_IMPL_SHIFT) | \
34add40351SSoby Mathew				(MIDR_PN_MASK << MIDR_PN_SHIFT)
35add40351SSoby Mathew
36add40351SSoby Mathew	/*
37add40351SSoby Mathew	 * Define the offsets to the fields in cpu_ops structure.
38add40351SSoby Mathew	 */
39add40351SSoby Mathew	.struct 0
40add40351SSoby MathewCPU_MIDR: /* cpu_ops midr */
41add40351SSoby Mathew	.space  8
42add40351SSoby Mathew/* Reset fn is needed in BL at reset vector */
43*79a97b2eSYatharth Kochar#if IMAGE_BL1 || IMAGE_BL31
44add40351SSoby MathewCPU_RESET_FUNC: /* cpu_ops reset_func */
45add40351SSoby Mathew	.space  8
46add40351SSoby Mathew#endif
47add40351SSoby Mathew#if IMAGE_BL31 /* The power down core and cluster is needed only in BL3-1 */
48add40351SSoby MathewCPU_PWR_DWN_CORE: /* cpu_ops core_pwr_dwn */
49add40351SSoby Mathew	.space  8
50add40351SSoby MathewCPU_PWR_DWN_CLUSTER: /* cpu_ops cluster_pwr_dwn */
51add40351SSoby Mathew	.space  8
52add40351SSoby Mathew#endif
53d3f70af6SSoby Mathew#if (IMAGE_BL31 && CRASH_REPORTING)
54d3f70af6SSoby MathewCPU_REG_DUMP: /* cpu specific register dump for crash reporting */
55d3f70af6SSoby Mathew	.space  8
56d3f70af6SSoby Mathew#endif
57add40351SSoby MathewCPU_OPS_SIZE = .
58add40351SSoby Mathew
59add40351SSoby Mathew	/*
60add40351SSoby Mathew	 * Convenience macro to declare cpu_ops structure.
61add40351SSoby Mathew	 * Make sure the structure fields are as per the offsets
62add40351SSoby Mathew	 * defined above.
63add40351SSoby Mathew	 */
64add40351SSoby Mathew	.macro declare_cpu_ops _name:req, _midr:req, _noresetfunc = 0
65add40351SSoby Mathew	.section cpu_ops, "a"; .align 3
66add40351SSoby Mathew	.type cpu_ops_\_name, %object
67add40351SSoby Mathew	.quad \_midr
68*79a97b2eSYatharth Kochar#if IMAGE_BL1 || IMAGE_BL31
69add40351SSoby Mathew	.if \_noresetfunc
70add40351SSoby Mathew	.quad 0
71add40351SSoby Mathew	.else
72add40351SSoby Mathew	.quad \_name\()_reset_func
73add40351SSoby Mathew	.endif
74add40351SSoby Mathew#endif
75add40351SSoby Mathew#if IMAGE_BL31
76add40351SSoby Mathew	.quad \_name\()_core_pwr_dwn
77add40351SSoby Mathew	.quad \_name\()_cluster_pwr_dwn
78add40351SSoby Mathew#endif
79d3f70af6SSoby Mathew#if (IMAGE_BL31 && CRASH_REPORTING)
80d3f70af6SSoby Mathew	.quad \_name\()_cpu_reg_dump
81d3f70af6SSoby Mathew#endif
82add40351SSoby Mathew	.endm
83