xref: /rk3399_ARM-atf/lib/cpus/aarch32/cortex_a15.S (revision cc4f3838633e8faab00323228140c025d173ae00)
110922e7aSEtienne Carriere/*
2*3fb52e41SRyan Everett * Copyright (c) 2016-2024, Arm Limited and Contributors. All rights reserved.
310922e7aSEtienne Carriere *
410922e7aSEtienne Carriere * SPDX-License-Identifier: BSD-3-Clause
510922e7aSEtienne Carriere */
610922e7aSEtienne Carriere
710922e7aSEtienne Carriere#include <arch.h>
810922e7aSEtienne Carriere#include <asm_macros.S>
910922e7aSEtienne Carriere#include <assert_macros.S>
1010922e7aSEtienne Carriere#include <cortex_a15.h>
1110922e7aSEtienne Carriere#include <cpu_macros.S>
1210922e7aSEtienne Carriere
1310922e7aSEtienne Carriere/*
1410922e7aSEtienne Carriere * Cortex-A15 support LPAE and Virtualization Extensions.
1510922e7aSEtienne Carriere * Don't care if confiugration uses or not LPAE and VE.
1610922e7aSEtienne Carriere * Therefore, where we don't check ARCH_IS_ARMV7_WITH_LPAE/VE
1710922e7aSEtienne Carriere */
1810922e7aSEtienne Carriere
1910922e7aSEtienne Carriere	.macro assert_cache_enabled
2010922e7aSEtienne Carriere#if ENABLE_ASSERTIONS
2110922e7aSEtienne Carriere		ldcopr	r0, SCTLR
2210922e7aSEtienne Carriere		tst	r0, #SCTLR_C_BIT
2310922e7aSEtienne Carriere		ASM_ASSERT(eq)
2410922e7aSEtienne Carriere#endif
2510922e7aSEtienne Carriere	.endm
2610922e7aSEtienne Carriere
2710922e7aSEtienne Carrierefunc cortex_a15_disable_smp
2810922e7aSEtienne Carriere	ldcopr	r0, ACTLR
2910922e7aSEtienne Carriere	bic	r0, #CORTEX_A15_ACTLR_SMP_BIT
3010922e7aSEtienne Carriere	stcopr	r0, ACTLR
3110922e7aSEtienne Carriere	isb
3275a1ada9SAmbroise Vincent#if ERRATA_A15_816470
3375a1ada9SAmbroise Vincent	/*
3475a1ada9SAmbroise Vincent	 * Invalidate any TLB address
3575a1ada9SAmbroise Vincent	 */
3675a1ada9SAmbroise Vincent	mov	r0, #0
3775a1ada9SAmbroise Vincent	stcopr	r0, TLBIMVA
3875a1ada9SAmbroise Vincent#endif
3910922e7aSEtienne Carriere	dsb	sy
4010922e7aSEtienne Carriere	bx	lr
4110922e7aSEtienne Carriereendfunc cortex_a15_disable_smp
4210922e7aSEtienne Carriere
4310922e7aSEtienne Carrierefunc cortex_a15_enable_smp
4410922e7aSEtienne Carriere	ldcopr	r0, ACTLR
4510922e7aSEtienne Carriere	orr	r0, #CORTEX_A15_ACTLR_SMP_BIT
4610922e7aSEtienne Carriere	stcopr	r0, ACTLR
4710922e7aSEtienne Carriere	isb
4810922e7aSEtienne Carriere	bx	lr
4910922e7aSEtienne Carriereendfunc cortex_a15_enable_smp
5010922e7aSEtienne Carriere
5175a1ada9SAmbroise Vincent	/* ----------------------------------------------------
5275a1ada9SAmbroise Vincent	 * Errata Workaround for Cortex A15 Errata #816470.
5375a1ada9SAmbroise Vincent	 * This applies only to revision >= r3p0 of Cortex A15.
5475a1ada9SAmbroise Vincent	 * ----------------------------------------------------
5575a1ada9SAmbroise Vincent	 */
5675a1ada9SAmbroise Vincentfunc check_errata_816470
5775a1ada9SAmbroise Vincent	/*
5875a1ada9SAmbroise Vincent	 * Even though this is only needed for revision >= r3p0, it is always
5975a1ada9SAmbroise Vincent	 * applied because of the low cost of the workaround.
6075a1ada9SAmbroise Vincent	 */
6175a1ada9SAmbroise Vincent	mov	r0, #ERRATA_APPLIES
6275a1ada9SAmbroise Vincent	bx	lr
6375a1ada9SAmbroise Vincentendfunc check_errata_816470
6475a1ada9SAmbroise Vincent
65cbc8cae7SSona Mathewadd_erratum_entry cortex_a15, ERRATUM(816470), ERRATA_A15_816470
665f2c690dSAmbroise Vincent	/* ----------------------------------------------------
675f2c690dSAmbroise Vincent	 * Errata Workaround for Cortex A15 Errata #827671.
685f2c690dSAmbroise Vincent	 * This applies only to revision >= r3p0 of Cortex A15.
695f2c690dSAmbroise Vincent	 * Inputs:
705f2c690dSAmbroise Vincent	 * r0: variant[4:7] and revision[0:3] of current cpu.
715f2c690dSAmbroise Vincent	 * Shall clobber: r0-r3
725f2c690dSAmbroise Vincent	 * ----------------------------------------------------
735f2c690dSAmbroise Vincent	 */
745f2c690dSAmbroise Vincentfunc errata_a15_827671_wa
755f2c690dSAmbroise Vincent	/*
765f2c690dSAmbroise Vincent	 * Compare r0 against revision r3p0
775f2c690dSAmbroise Vincent	 */
785f2c690dSAmbroise Vincent	mov	r2, lr
795f2c690dSAmbroise Vincent	bl	check_errata_827671
805f2c690dSAmbroise Vincent	cmp	r0, #ERRATA_NOT_APPLIES
815f2c690dSAmbroise Vincent	beq	1f
825f2c690dSAmbroise Vincent	ldcopr	r0, CORTEX_A15_ACTLR2
835f2c690dSAmbroise Vincent	orr	r0, #CORTEX_A15_ACTLR2_INV_DCC_BIT
845f2c690dSAmbroise Vincent	stcopr	r0, CORTEX_A15_ACTLR2
855f2c690dSAmbroise Vincent	isb
865f2c690dSAmbroise Vincent1:
875f2c690dSAmbroise Vincent	bx	r2
885f2c690dSAmbroise Vincentendfunc errata_a15_827671_wa
895f2c690dSAmbroise Vincent
905f2c690dSAmbroise Vincentfunc check_errata_827671
915f2c690dSAmbroise Vincent	mov	r1, #0x30
925f2c690dSAmbroise Vincent	b	cpu_rev_var_hs
935f2c690dSAmbroise Vincentendfunc check_errata_827671
945f2c690dSAmbroise Vincent
95cbc8cae7SSona Mathewadd_erratum_entry cortex_a15, ERRATUM(827671), ERRATA_A15_827671
96cbc8cae7SSona Mathew
97e4b34efaSDimitris Papastamosfunc check_errata_cve_2017_5715
98e4b34efaSDimitris Papastamos#if WORKAROUND_CVE_2017_5715
99e4b34efaSDimitris Papastamos	mov	r0, #ERRATA_APPLIES
100e4b34efaSDimitris Papastamos#else
101e4b34efaSDimitris Papastamos	mov	r0, #ERRATA_MISSING
102e4b34efaSDimitris Papastamos#endif
103e4b34efaSDimitris Papastamos	bx	lr
104e4b34efaSDimitris Papastamosendfunc check_errata_cve_2017_5715
105e4b34efaSDimitris Papastamos
106cbc8cae7SSona Mathewadd_erratum_entry cortex_a15, CVE(2017, 5715), WORKAROUND_CVE_2017_5715
107cbc8cae7SSona Mathew
108187a6176SJohn Powellfunc check_errata_cve_2022_23960
109187a6176SJohn Powell#if WORKAROUND_CVE_2022_23960
110187a6176SJohn Powell	mov	r0, #ERRATA_APPLIES
111187a6176SJohn Powell#else
112187a6176SJohn Powell	mov	r0, #ERRATA_MISSING
113187a6176SJohn Powell#endif
114187a6176SJohn Powell	bx	lr
115187a6176SJohn Powellendfunc check_errata_cve_2022_23960
116187a6176SJohn Powell
117cbc8cae7SSona Mathewadd_erratum_entry cortex_a15, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
118e4b34efaSDimitris Papastamos
11910922e7aSEtienne Carrierefunc cortex_a15_reset_func
1205f2c690dSAmbroise Vincent	mov	r5, lr
1215f2c690dSAmbroise Vincent	bl	cpu_get_rev_var
1225f2c690dSAmbroise Vincent
1235f2c690dSAmbroise Vincent#if ERRATA_A15_827671
1245f2c690dSAmbroise Vincent	bl	errata_a15_827671_wa
1255f2c690dSAmbroise Vincent#endif
1265f2c690dSAmbroise Vincent
127187a6176SJohn Powell#if IMAGE_BL32 && (WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960)
128e4b34efaSDimitris Papastamos	ldcopr	r0, ACTLR
129e4b34efaSDimitris Papastamos	orr	r0, #CORTEX_A15_ACTLR_INV_BTB_BIT
130e4b34efaSDimitris Papastamos	stcopr	r0, ACTLR
131187a6176SJohn Powell	ldr	r0, =wa_cve_2017_5715_icache_inv_vbar
132e4b34efaSDimitris Papastamos	stcopr	r0, VBAR
133e4b34efaSDimitris Papastamos	stcopr	r0, MVBAR
134e4b34efaSDimitris Papastamos	/* isb will be applied in the course of the reset func */
135e4b34efaSDimitris Papastamos#endif
1365f2c690dSAmbroise Vincent
1375f2c690dSAmbroise Vincent	mov	lr, r5
13810922e7aSEtienne Carriere	b	cortex_a15_enable_smp
13910922e7aSEtienne Carriereendfunc cortex_a15_reset_func
14010922e7aSEtienne Carriere
14110922e7aSEtienne Carrierefunc cortex_a15_core_pwr_dwn
14210922e7aSEtienne Carriere	push	{r12, lr}
14310922e7aSEtienne Carriere
14410922e7aSEtienne Carriere	assert_cache_enabled
14510922e7aSEtienne Carriere
14610922e7aSEtienne Carriere	/* Flush L1 cache */
14710922e7aSEtienne Carriere	mov	r0, #DC_OP_CISW
14810922e7aSEtienne Carriere	bl	dcsw_op_level1
14910922e7aSEtienne Carriere
15010922e7aSEtienne Carriere	/* Exit cluster coherency */
15110922e7aSEtienne Carriere	pop	{r12, lr}
15210922e7aSEtienne Carriere	b	cortex_a15_disable_smp
15310922e7aSEtienne Carriereendfunc cortex_a15_core_pwr_dwn
15410922e7aSEtienne Carriere
15510922e7aSEtienne Carrierefunc cortex_a15_cluster_pwr_dwn
15610922e7aSEtienne Carriere	push	{r12, lr}
15710922e7aSEtienne Carriere
15810922e7aSEtienne Carriere	assert_cache_enabled
15910922e7aSEtienne Carriere
16010922e7aSEtienne Carriere	/* Flush L1 caches */
16110922e7aSEtienne Carriere	mov	r0, #DC_OP_CISW
16210922e7aSEtienne Carriere	bl	dcsw_op_level1
16310922e7aSEtienne Carriere
16410922e7aSEtienne Carriere	bl	plat_disable_acp
16510922e7aSEtienne Carriere
166c5c160cdSStephan Gerhold	/* Flush L2 caches */
167c5c160cdSStephan Gerhold	mov	r0, #DC_OP_CISW
168c5c160cdSStephan Gerhold	bl	dcsw_op_level2
169c5c160cdSStephan Gerhold
17010922e7aSEtienne Carriere	/* Exit cluster coherency */
17110922e7aSEtienne Carriere	pop	{r12, lr}
17210922e7aSEtienne Carriere	b	cortex_a15_disable_smp
17310922e7aSEtienne Carriereendfunc cortex_a15_cluster_pwr_dwn
17410922e7aSEtienne Carriere
17510922e7aSEtienne Carrieredeclare_cpu_ops cortex_a15, CORTEX_A15_MIDR, \
17610922e7aSEtienne Carriere	cortex_a15_reset_func, \
17710922e7aSEtienne Carriere	cortex_a15_core_pwr_dwn, \
17810922e7aSEtienne Carriere	cortex_a15_cluster_pwr_dwn
179