xref: /rk3399_ARM-atf/lib/cpus/aarch64/cortex_a75.S (revision 6eabbb07d7ee2aac3a8e8e734649c8eaa8385af6)
1/*
2 * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <bl_common.h>
10#include <cpu_macros.S>
11#include <plat_macros.S>
12#include <cortex_a75.h>
13
14	.globl	cortex_a75_amu_cnt_read
15	.globl	cortex_a75_amu_cnt_write
16	.globl	cortex_a75_amu_read_cpuamcntenset_el0
17	.globl	cortex_a75_amu_read_cpuamcntenclr_el0
18	.globl	cortex_a75_amu_write_cpuamcntenset_el0
19	.globl	cortex_a75_amu_write_cpuamcntenclr_el0
20
21/*
22 * uint64_t cortex_a75_amu_cnt_read(int idx);
23 *
24 * Given `idx`, read the corresponding AMU counter
25 * and return it in `x0`.
26 */
27func cortex_a75_amu_cnt_read
28	adr	x1, 1f
29	lsl	x0, x0, #3
30	add	x1, x1, x0
31	br	x1
32
331:
34	mrs	x0, CPUAMEVCNTR0_EL0
35	ret
36	mrs	x0, CPUAMEVCNTR1_EL0
37	ret
38	mrs	x0, CPUAMEVCNTR2_EL0
39	ret
40	mrs	x0, CPUAMEVCNTR3_EL0
41	ret
42	mrs	x0, CPUAMEVCNTR4_EL0
43	ret
44endfunc cortex_a75_amu_cnt_read
45
46/*
47 * void cortex_a75_amu_cnt_write(int idx, uint64_t val);
48 *
49 * Given `idx`, write `val` to the corresponding AMU counter.
50 */
51func cortex_a75_amu_cnt_write
52	adr	x2, 1f
53	lsl	x0, x0, #3
54	add	x2, x2, x0
55	br	x2
56
571:
58	msr	CPUAMEVCNTR0_EL0, x0
59	ret
60	msr	CPUAMEVCNTR1_EL0, x0
61	ret
62	msr	CPUAMEVCNTR2_EL0, x0
63	ret
64	msr	CPUAMEVCNTR3_EL0, x0
65	ret
66	msr	CPUAMEVCNTR4_EL0, x0
67	ret
68endfunc cortex_a75_amu_cnt_write
69
70/*
71 * unsigned int cortex_a75_amu_read_cpuamcntenset_el0(void);
72 *
73 * Read the `CPUAMCNTENSET_EL0` CPU register and return
74 * it in `x0`.
75 */
76func cortex_a75_amu_read_cpuamcntenset_el0
77	mrs	x0, CPUAMCNTENSET_EL0
78	ret
79endfunc cortex_a75_amu_read_cpuamcntenset_el0
80
81/*
82 * unsigned int cortex_a75_amu_read_cpuamcntenclr_el0(void);
83 *
84 * Read the `CPUAMCNTENCLR_EL0` CPU register and return
85 * it in `x0`.
86 */
87func cortex_a75_amu_read_cpuamcntenclr_el0
88	mrs	x0, CPUAMCNTENCLR_EL0
89	ret
90endfunc cortex_a75_amu_read_cpuamcntenclr_el0
91
92/*
93 * void cortex_a75_amu_write_cpuamcntenset_el0(unsigned int mask);
94 *
95 * Write `mask` to the `CPUAMCNTENSET_EL0` CPU register.
96 */
97func cortex_a75_amu_write_cpuamcntenset_el0
98	msr	CPUAMCNTENSET_EL0, x0
99	ret
100endfunc cortex_a75_amu_write_cpuamcntenset_el0
101
102/*
103 * void cortex_a75_amu_write_cpuamcntenclr_el0(unsigned int mask);
104 *
105 * Write `mask` to the `CPUAMCNTENCLR_EL0` CPU register.
106 */
107func cortex_a75_amu_write_cpuamcntenclr_el0
108	mrs	x0, CPUAMCNTENCLR_EL0
109	ret
110endfunc cortex_a75_amu_write_cpuamcntenclr_el0
111
112func cortex_a75_reset_func
113#if IMAGE_BL31 && WORKAROUND_CVE_2017_5715
114	mrs	x0, id_aa64pfr0_el1
115	ubfx	x0, x0, #ID_AA64PFR0_CSV2_SHIFT, #ID_AA64PFR0_CSV2_LENGTH
116	/*
117	 * If the field equals to 1 then branch targets trained in one
118	 * context cannot affect speculative execution in a different context.
119	 */
120	cmp	x0, #1
121	beq	1f
122
123	adr	x0, workaround_bpiall_vbar0_runtime_exceptions
124	msr	vbar_el3, x0
1251:
126#endif
127
128#if ENABLE_AMU
129	/* Make sure accesses from EL0/EL1 and EL2 are not trapped to EL3 */
130	mrs	x0, actlr_el3
131	orr	x0, x0, #CORTEX_A75_ACTLR_AMEN_BIT
132	msr	actlr_el3, x0
133	isb
134
135	/* Make sure accesses from EL0/EL1 are not trapped to EL2 */
136	mrs	x0, actlr_el2
137	orr	x0, x0, #CORTEX_A75_ACTLR_AMEN_BIT
138	msr	actlr_el2, x0
139	isb
140
141	/* Enable group0 counters */
142	mov	x0, #CORTEX_A75_AMU_GROUP0_MASK
143	msr	CPUAMCNTENSET_EL0, x0
144	isb
145
146	/* Enable group1 counters */
147	mov	x0, #CORTEX_A75_AMU_GROUP1_MASK
148	msr	CPUAMCNTENSET_EL0, x0
149	isb
150#endif
151	ret
152endfunc cortex_a75_reset_func
153
154func check_errata_cve_2017_5715
155	mrs	x0, id_aa64pfr0_el1
156	ubfx	x0, x0, #ID_AA64PFR0_CSV2_SHIFT, #ID_AA64PFR0_CSV2_LENGTH
157	/*
158	 * If the field equals to 1 then branch targets trained in one
159	 * context cannot affect speculative execution in a different context.
160	 */
161	cmp	x0, #1
162	beq	1f
163
164#if WORKAROUND_CVE_2017_5715
165	mov	x0, #ERRATA_APPLIES
166#else
167	mov	x0, #ERRATA_MISSING
168#endif
169	ret
1701:
171	mov	x0, #ERRATA_NOT_APPLIES
172	ret
173endfunc check_errata_cve_2017_5715
174
175	/* ---------------------------------------------
176	 * HW will do the cache maintenance while powering down
177	 * ---------------------------------------------
178	 */
179func cortex_a75_core_pwr_dwn
180	/* ---------------------------------------------
181	 * Enable CPU power down bit in power control register
182	 * ---------------------------------------------
183	 */
184	mrs	x0, CORTEX_A75_CPUPWRCTLR_EL1
185	orr	x0, x0, #CORTEX_A75_CORE_PWRDN_EN_MASK
186	msr	CORTEX_A75_CPUPWRCTLR_EL1, x0
187	isb
188	ret
189endfunc cortex_a75_core_pwr_dwn
190
191#if REPORT_ERRATA
192/*
193 * Errata printing function for Cortex A75. Must follow AAPCS.
194 */
195func cortex_a75_errata_report
196	stp	x8, x30, [sp, #-16]!
197
198	bl	cpu_get_rev_var
199	mov	x8, x0
200
201	/*
202	 * Report all errata. The revision-variant information is passed to
203	 * checking functions of each errata.
204	 */
205	report_errata WORKAROUND_CVE_2017_5715, cortex_a75, cve_2017_5715
206
207	ldp	x8, x30, [sp], #16
208	ret
209endfunc cortex_a75_errata_report
210#endif
211
212	/* ---------------------------------------------
213	 * This function provides cortex_a75 specific
214	 * register information for crash reporting.
215	 * It needs to return with x6 pointing to
216	 * a list of register names in ascii and
217	 * x8 - x15 having values of registers to be
218	 * reported.
219	 * ---------------------------------------------
220	 */
221.section .rodata.cortex_a75_regs, "aS"
222cortex_a75_regs:  /* The ascii list of register names to be reported */
223	.asciz	"cpuectlr_el1", ""
224
225func cortex_a75_cpu_reg_dump
226	adr	x6, cortex_a75_regs
227	mrs	x8, CORTEX_A75_CPUECTLR_EL1
228	ret
229endfunc cortex_a75_cpu_reg_dump
230
231declare_cpu_ops cortex_a75, CORTEX_A75_MIDR, \
232	cortex_a75_reset_func, \
233	cortex_a75_core_pwr_dwn
234