xref: /rk3399_ARM-atf/include/lib/cpus/aarch64/cpu_macros.S (revision add403514d0f792b9df3c81006cd9a9395b213f6)
1*add40351SSoby Mathew/*
2*add40351SSoby Mathew * Copyright (c) 2014, ARM Limited and Contributors. All rights reserved.
3*add40351SSoby Mathew *
4*add40351SSoby Mathew * Redistribution and use in source and binary forms, with or without
5*add40351SSoby Mathew * modification, are permitted provided that the following conditions are met:
6*add40351SSoby Mathew *
7*add40351SSoby Mathew * Redistributions of source code must retain the above copyright notice, this
8*add40351SSoby Mathew * list of conditions and the following disclaimer.
9*add40351SSoby Mathew *
10*add40351SSoby Mathew * Redistributions in binary form must reproduce the above copyright notice,
11*add40351SSoby Mathew * this list of conditions and the following disclaimer in the documentation
12*add40351SSoby Mathew * and/or other materials provided with the distribution.
13*add40351SSoby Mathew *
14*add40351SSoby Mathew * Neither the name of ARM nor the names of its contributors may be used
15*add40351SSoby Mathew * to endorse or promote products derived from this software without specific
16*add40351SSoby Mathew * prior written permission.
17*add40351SSoby Mathew *
18*add40351SSoby Mathew * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19*add40351SSoby Mathew * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20*add40351SSoby Mathew * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21*add40351SSoby Mathew * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
22*add40351SSoby Mathew * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23*add40351SSoby Mathew * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24*add40351SSoby Mathew * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25*add40351SSoby Mathew * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26*add40351SSoby Mathew * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27*add40351SSoby Mathew * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28*add40351SSoby Mathew * POSSIBILITY OF SUCH DAMAGE.
29*add40351SSoby Mathew */
30*add40351SSoby Mathew
31*add40351SSoby Mathew#include <arch.h>
32*add40351SSoby Mathew
33*add40351SSoby Mathew#define CPU_IMPL_PN_MASK	(MIDR_IMPL_MASK << MIDR_IMPL_SHIFT) | \
34*add40351SSoby Mathew				(MIDR_PN_MASK << MIDR_PN_SHIFT)
35*add40351SSoby Mathew
36*add40351SSoby Mathew	/*
37*add40351SSoby Mathew	 * Define the offsets to the fields in cpu_ops structure.
38*add40351SSoby Mathew	 */
39*add40351SSoby Mathew	.struct 0
40*add40351SSoby MathewCPU_MIDR: /* cpu_ops midr */
41*add40351SSoby Mathew	.space  8
42*add40351SSoby Mathew/* Reset fn is needed in BL at reset vector */
43*add40351SSoby Mathew#if IMAGE_BL1 || (IMAGE_BL31 && RESET_TO_BL31)
44*add40351SSoby MathewCPU_RESET_FUNC: /* cpu_ops reset_func */
45*add40351SSoby Mathew	.space  8
46*add40351SSoby Mathew#endif
47*add40351SSoby Mathew#if IMAGE_BL31 /* The power down core and cluster is needed only in BL3-1 */
48*add40351SSoby MathewCPU_PWR_DWN_CORE: /* cpu_ops core_pwr_dwn */
49*add40351SSoby Mathew	.space  8
50*add40351SSoby MathewCPU_PWR_DWN_CLUSTER: /* cpu_ops cluster_pwr_dwn */
51*add40351SSoby Mathew	.space  8
52*add40351SSoby Mathew#endif
53*add40351SSoby MathewCPU_OPS_SIZE = .
54*add40351SSoby Mathew
55*add40351SSoby Mathew	/*
56*add40351SSoby Mathew	 * Convenience macro to declare cpu_ops structure.
57*add40351SSoby Mathew	 * Make sure the structure fields are as per the offsets
58*add40351SSoby Mathew	 * defined above.
59*add40351SSoby Mathew	 */
60*add40351SSoby Mathew	.macro declare_cpu_ops _name:req, _midr:req, _noresetfunc = 0
61*add40351SSoby Mathew	.section cpu_ops, "a"; .align 3
62*add40351SSoby Mathew	.type cpu_ops_\_name, %object
63*add40351SSoby Mathew	.quad \_midr
64*add40351SSoby Mathew#if IMAGE_BL1 || (IMAGE_BL31 && RESET_TO_BL31)
65*add40351SSoby Mathew	.if \_noresetfunc
66*add40351SSoby Mathew	.quad 0
67*add40351SSoby Mathew	.else
68*add40351SSoby Mathew	.quad \_name\()_reset_func
69*add40351SSoby Mathew	.endif
70*add40351SSoby Mathew#endif
71*add40351SSoby Mathew#if IMAGE_BL31
72*add40351SSoby Mathew	.quad \_name\()_core_pwr_dwn
73*add40351SSoby Mathew	.quad \_name\()_cluster_pwr_dwn
74*add40351SSoby Mathew#endif
75*add40351SSoby Mathew	.endm
76