1/* 2 * Copyright (c) 2016-2019, ARM Limited and Contributors. All rights reserved. 3 * 4 * SPDX-License-Identifier: BSD-3-Clause 5 */ 6 7#include <arch.h> 8#include <asm_macros.S> 9#include <assert_macros.S> 10#include <cortex_a15.h> 11#include <cpu_macros.S> 12 13/* 14 * Cortex-A15 support LPAE and Virtualization Extensions. 15 * Don't care if confiugration uses or not LPAE and VE. 16 * Therefore, where we don't check ARCH_IS_ARMV7_WITH_LPAE/VE 17 */ 18 19 .macro assert_cache_enabled 20#if ENABLE_ASSERTIONS 21 ldcopr r0, SCTLR 22 tst r0, #SCTLR_C_BIT 23 ASM_ASSERT(eq) 24#endif 25 .endm 26 27func cortex_a15_disable_smp 28 ldcopr r0, ACTLR 29 bic r0, #CORTEX_A15_ACTLR_SMP_BIT 30 stcopr r0, ACTLR 31 isb 32#if ERRATA_A15_816470 33 /* 34 * Invalidate any TLB address 35 */ 36 mov r0, #0 37 stcopr r0, TLBIMVA 38#endif 39 dsb sy 40 bx lr 41endfunc cortex_a15_disable_smp 42 43func cortex_a15_enable_smp 44 ldcopr r0, ACTLR 45 orr r0, #CORTEX_A15_ACTLR_SMP_BIT 46 stcopr r0, ACTLR 47 isb 48 bx lr 49endfunc cortex_a15_enable_smp 50 51 /* ---------------------------------------------------- 52 * Errata Workaround for Cortex A15 Errata #816470. 53 * This applies only to revision >= r3p0 of Cortex A15. 54 * ---------------------------------------------------- 55 */ 56func check_errata_816470 57 /* 58 * Even though this is only needed for revision >= r3p0, it is always 59 * applied because of the low cost of the workaround. 60 */ 61 mov r0, #ERRATA_APPLIES 62 bx lr 63endfunc check_errata_816470 64 65func check_errata_cve_2017_5715 66#if WORKAROUND_CVE_2017_5715 67 mov r0, #ERRATA_APPLIES 68#else 69 mov r0, #ERRATA_MISSING 70#endif 71 bx lr 72endfunc check_errata_cve_2017_5715 73 74#if REPORT_ERRATA 75/* 76 * Errata printing function for Cortex A15. Must follow AAPCS. 77 */ 78func cortex_a15_errata_report 79 push {r12, lr} 80 81 bl cpu_get_rev_var 82 mov r4, r0 83 84 /* 85 * Report all errata. The revision-variant information is passed to 86 * checking functions of each errata. 87 */ 88 report_errata ERRATA_A15_816470, cortex_a15, 816470 89 report_errata WORKAROUND_CVE_2017_5715, cortex_a15, cve_2017_5715 90 91 pop {r12, lr} 92 bx lr 93endfunc cortex_a15_errata_report 94#endif 95 96func cortex_a15_reset_func 97#if IMAGE_BL32 && WORKAROUND_CVE_2017_5715 98 ldcopr r0, ACTLR 99 orr r0, #CORTEX_A15_ACTLR_INV_BTB_BIT 100 stcopr r0, ACTLR 101 ldr r0, =workaround_icache_inv_runtime_exceptions 102 stcopr r0, VBAR 103 stcopr r0, MVBAR 104 /* isb will be applied in the course of the reset func */ 105#endif 106 b cortex_a15_enable_smp 107endfunc cortex_a15_reset_func 108 109func cortex_a15_core_pwr_dwn 110 push {r12, lr} 111 112 assert_cache_enabled 113 114 /* Flush L1 cache */ 115 mov r0, #DC_OP_CISW 116 bl dcsw_op_level1 117 118 /* Exit cluster coherency */ 119 pop {r12, lr} 120 b cortex_a15_disable_smp 121endfunc cortex_a15_core_pwr_dwn 122 123func cortex_a15_cluster_pwr_dwn 124 push {r12, lr} 125 126 assert_cache_enabled 127 128 /* Flush L1 caches */ 129 mov r0, #DC_OP_CISW 130 bl dcsw_op_level1 131 132 bl plat_disable_acp 133 134 /* Exit cluster coherency */ 135 pop {r12, lr} 136 b cortex_a15_disable_smp 137endfunc cortex_a15_cluster_pwr_dwn 138 139declare_cpu_ops cortex_a15, CORTEX_A15_MIDR, \ 140 cortex_a15_reset_func, \ 141 cortex_a15_core_pwr_dwn, \ 142 cortex_a15_cluster_pwr_dwn 143