1 /* 2 * Copyright (c) 2018-2020, ARM Limited and Contributors. All rights reserved. 3 * 4 * SPDX-License-Identifier: BSD-3-Clause 5 */ 6 7 #include <common/debug.h> 8 #include <common/runtime_svc.h> 9 #include <lib/cpus/errata_report.h> 10 #include <lib/cpus/wa_cve_2017_5715.h> 11 #include <lib/cpus/wa_cve_2018_3639.h> 12 #include <lib/smccc.h> 13 #include <services/arm_arch_svc.h> 14 #include <smccc_helpers.h> 15 #include <plat/common/platform.h> 16 17 static int32_t smccc_version(void) 18 { 19 return MAKE_SMCCC_VERSION(SMCCC_MAJOR_VERSION, SMCCC_MINOR_VERSION); 20 } 21 22 static int32_t smccc_arch_features(u_register_t arg1) 23 { 24 switch (arg1) { 25 case SMCCC_VERSION: 26 case SMCCC_ARCH_FEATURES: 27 case SMCCC_ARCH_SOC_ID: 28 return SMC_OK; 29 #if WORKAROUND_CVE_2017_5715 30 case SMCCC_ARCH_WORKAROUND_1: 31 if (check_wa_cve_2017_5715() == ERRATA_NOT_APPLIES) 32 return 1; 33 return 0; /* ERRATA_APPLIES || ERRATA_MISSING */ 34 #endif 35 36 #if WORKAROUND_CVE_2018_3639 37 case SMCCC_ARCH_WORKAROUND_2: { 38 #if DYNAMIC_WORKAROUND_CVE_2018_3639 39 unsigned long long ssbs; 40 41 /* 42 * Firmware doesn't have to carry out dynamic workaround if the 43 * PE implements architectural Speculation Store Bypass Safe 44 * (SSBS) feature. 45 */ 46 ssbs = (read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_SSBS_SHIFT) & 47 ID_AA64PFR1_EL1_SSBS_MASK; 48 49 /* 50 * If architectural SSBS is available on this PE, no firmware 51 * mitigation via SMCCC_ARCH_WORKAROUND_2 is required. 52 */ 53 if (ssbs != SSBS_UNAVAILABLE) 54 return 1; 55 56 /* 57 * On a platform where at least one CPU requires 58 * dynamic mitigation but others are either unaffected 59 * or permanently mitigated, report the latter as not 60 * needing dynamic mitigation. 61 */ 62 if (wa_cve_2018_3639_get_disable_ptr() == NULL) 63 return 1; 64 /* 65 * If we get here, this CPU requires dynamic mitigation 66 * so report it as such. 67 */ 68 return 0; 69 #else 70 /* Either the CPUs are unaffected or permanently mitigated */ 71 return SMC_ARCH_CALL_NOT_REQUIRED; 72 #endif 73 } 74 #endif 75 76 /* Fallthrough */ 77 78 default: 79 return SMC_UNK; 80 } 81 } 82 83 /* return soc revision or soc version on success otherwise 84 * return invalid parameter */ 85 static int32_t smccc_arch_id(u_register_t arg1) 86 { 87 if (arg1 == SMCCC_GET_SOC_REVISION) { 88 return plat_get_soc_revision(); 89 } 90 if (arg1 == SMCCC_GET_SOC_VERSION) { 91 return plat_get_soc_version(); 92 } 93 return SMC_ARCH_CALL_INVAL_PARAM; 94 } 95 96 /* 97 * Top-level Arm Architectural Service SMC handler. 98 */ 99 static uintptr_t arm_arch_svc_smc_handler(uint32_t smc_fid, 100 u_register_t x1, 101 u_register_t x2, 102 u_register_t x3, 103 u_register_t x4, 104 void *cookie, 105 void *handle, 106 u_register_t flags) 107 { 108 switch (smc_fid) { 109 case SMCCC_VERSION: 110 SMC_RET1(handle, smccc_version()); 111 case SMCCC_ARCH_FEATURES: 112 SMC_RET1(handle, smccc_arch_features(x1)); 113 case SMCCC_ARCH_SOC_ID: 114 SMC_RET1(handle, smccc_arch_id(x1)); 115 #if WORKAROUND_CVE_2017_5715 116 case SMCCC_ARCH_WORKAROUND_1: 117 /* 118 * The workaround has already been applied on affected PEs 119 * during entry to EL3. On unaffected PEs, this function 120 * has no effect. 121 */ 122 SMC_RET0(handle); 123 #endif 124 #if WORKAROUND_CVE_2018_3639 125 case SMCCC_ARCH_WORKAROUND_2: 126 /* 127 * The workaround has already been applied on affected PEs 128 * requiring dynamic mitigation during entry to EL3. 129 * On unaffected or statically mitigated PEs, this function 130 * has no effect. 131 */ 132 SMC_RET0(handle); 133 #endif 134 default: 135 WARN("Unimplemented Arm Architecture Service Call: 0x%x \n", 136 smc_fid); 137 SMC_RET1(handle, SMC_UNK); 138 } 139 } 140 141 /* Register Standard Service Calls as runtime service */ 142 DECLARE_RT_SVC( 143 arm_arch_svc, 144 OEN_ARM_START, 145 OEN_ARM_END, 146 SMC_TYPE_FAST, 147 NULL, 148 arm_arch_svc_smc_handler 149 ); 150