1 /* 2 * Copyright (c) 2018-2020, ARM Limited and Contributors. All rights reserved. 3 * 4 * SPDX-License-Identifier: BSD-3-Clause 5 */ 6 7 #include <common/debug.h> 8 #include <common/runtime_svc.h> 9 #include <lib/cpus/errata_report.h> 10 #include <lib/cpus/wa_cve_2017_5715.h> 11 #include <lib/cpus/wa_cve_2018_3639.h> 12 #include <lib/smccc.h> 13 #include <services/arm_arch_svc.h> 14 #include <smccc_helpers.h> 15 #include <plat/common/platform.h> 16 17 static int32_t smccc_version(void) 18 { 19 return MAKE_SMCCC_VERSION(SMCCC_MAJOR_VERSION, SMCCC_MINOR_VERSION); 20 } 21 22 static int32_t smccc_arch_features(u_register_t arg1) 23 { 24 switch (arg1) { 25 case SMCCC_VERSION: 26 case SMCCC_ARCH_FEATURES: 27 return SMC_ARCH_CALL_SUCCESS; 28 case SMCCC_ARCH_SOC_ID: 29 return plat_is_smccc_feature_available(arg1); 30 #if WORKAROUND_CVE_2017_5715 31 case SMCCC_ARCH_WORKAROUND_1: 32 if (check_wa_cve_2017_5715() == ERRATA_NOT_APPLIES) 33 return 1; 34 return 0; /* ERRATA_APPLIES || ERRATA_MISSING */ 35 #endif 36 37 #if WORKAROUND_CVE_2018_3639 38 case SMCCC_ARCH_WORKAROUND_2: { 39 #if DYNAMIC_WORKAROUND_CVE_2018_3639 40 unsigned long long ssbs; 41 42 /* 43 * Firmware doesn't have to carry out dynamic workaround if the 44 * PE implements architectural Speculation Store Bypass Safe 45 * (SSBS) feature. 46 */ 47 ssbs = (read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_SSBS_SHIFT) & 48 ID_AA64PFR1_EL1_SSBS_MASK; 49 50 /* 51 * If architectural SSBS is available on this PE, no firmware 52 * mitigation via SMCCC_ARCH_WORKAROUND_2 is required. 53 */ 54 if (ssbs != SSBS_UNAVAILABLE) 55 return 1; 56 57 /* 58 * On a platform where at least one CPU requires 59 * dynamic mitigation but others are either unaffected 60 * or permanently mitigated, report the latter as not 61 * needing dynamic mitigation. 62 */ 63 if (wa_cve_2018_3639_get_disable_ptr() == NULL) 64 return 1; 65 /* 66 * If we get here, this CPU requires dynamic mitigation 67 * so report it as such. 68 */ 69 return 0; 70 #else 71 /* Either the CPUs are unaffected or permanently mitigated */ 72 return SMC_ARCH_CALL_NOT_REQUIRED; 73 #endif 74 } 75 #endif 76 77 /* Fallthrough */ 78 79 default: 80 return SMC_UNK; 81 } 82 } 83 84 /* return soc revision or soc version on success otherwise 85 * return invalid parameter */ 86 static int32_t smccc_arch_id(u_register_t arg1) 87 { 88 if (arg1 == SMCCC_GET_SOC_REVISION) { 89 return plat_get_soc_revision(); 90 } 91 if (arg1 == SMCCC_GET_SOC_VERSION) { 92 return plat_get_soc_version(); 93 } 94 return SMC_ARCH_CALL_INVAL_PARAM; 95 } 96 97 /* 98 * Top-level Arm Architectural Service SMC handler. 99 */ 100 static uintptr_t arm_arch_svc_smc_handler(uint32_t smc_fid, 101 u_register_t x1, 102 u_register_t x2, 103 u_register_t x3, 104 u_register_t x4, 105 void *cookie, 106 void *handle, 107 u_register_t flags) 108 { 109 switch (smc_fid) { 110 case SMCCC_VERSION: 111 SMC_RET1(handle, smccc_version()); 112 case SMCCC_ARCH_FEATURES: 113 SMC_RET1(handle, smccc_arch_features(x1)); 114 case SMCCC_ARCH_SOC_ID: 115 SMC_RET1(handle, smccc_arch_id(x1)); 116 #if WORKAROUND_CVE_2017_5715 117 case SMCCC_ARCH_WORKAROUND_1: 118 /* 119 * The workaround has already been applied on affected PEs 120 * during entry to EL3. On unaffected PEs, this function 121 * has no effect. 122 */ 123 SMC_RET0(handle); 124 #endif 125 #if WORKAROUND_CVE_2018_3639 126 case SMCCC_ARCH_WORKAROUND_2: 127 /* 128 * The workaround has already been applied on affected PEs 129 * requiring dynamic mitigation during entry to EL3. 130 * On unaffected or statically mitigated PEs, this function 131 * has no effect. 132 */ 133 SMC_RET0(handle); 134 #endif 135 default: 136 WARN("Unimplemented Arm Architecture Service Call: 0x%x \n", 137 smc_fid); 138 SMC_RET1(handle, SMC_UNK); 139 } 140 } 141 142 /* Register Standard Service Calls as runtime service */ 143 DECLARE_RT_SVC( 144 arm_arch_svc, 145 OEN_ARM_START, 146 OEN_ARM_END, 147 SMC_TYPE_FAST, 148 NULL, 149 arm_arch_svc_smc_handler 150 ); 151