1 /* 2 * Copyright (c) 2018-2020, ARM Limited and Contributors. All rights reserved. 3 * 4 * SPDX-License-Identifier: BSD-3-Clause 5 */ 6 7 #include <common/debug.h> 8 #include <common/runtime_svc.h> 9 #include <lib/cpus/errata_report.h> 10 #include <lib/cpus/wa_cve_2017_5715.h> 11 #include <lib/cpus/wa_cve_2018_3639.h> 12 #include <lib/smccc.h> 13 #include <services/arm_arch_svc.h> 14 #include <smccc_helpers.h> 15 #include <plat/common/platform.h> 16 17 static int32_t smccc_version(void) 18 { 19 return MAKE_SMCCC_VERSION(SMCCC_MAJOR_VERSION, SMCCC_MINOR_VERSION); 20 } 21 22 static int32_t smccc_arch_features(u_register_t arg1, u_register_t arg2) 23 { 24 switch (arg1) { 25 case SMCCC_VERSION: 26 case SMCCC_ARCH_FEATURES: 27 return SMC_OK; 28 case SMCCC_ARCH_SOC_ID: 29 if (arg2 == SMCCC_GET_SOC_REVISION) { 30 return plat_get_soc_revision(); 31 } 32 if (arg2 == SMCCC_GET_SOC_VERSION) { 33 return plat_get_soc_version(); 34 } 35 return SMC_ARCH_CALL_INVAL_PARAM; 36 #if WORKAROUND_CVE_2017_5715 37 case SMCCC_ARCH_WORKAROUND_1: 38 if (check_wa_cve_2017_5715() == ERRATA_NOT_APPLIES) 39 return 1; 40 return 0; /* ERRATA_APPLIES || ERRATA_MISSING */ 41 #endif 42 43 #if WORKAROUND_CVE_2018_3639 44 case SMCCC_ARCH_WORKAROUND_2: { 45 #if DYNAMIC_WORKAROUND_CVE_2018_3639 46 unsigned long long ssbs; 47 48 /* 49 * Firmware doesn't have to carry out dynamic workaround if the 50 * PE implements architectural Speculation Store Bypass Safe 51 * (SSBS) feature. 52 */ 53 ssbs = (read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_SSBS_SHIFT) & 54 ID_AA64PFR1_EL1_SSBS_MASK; 55 56 /* 57 * If architectural SSBS is available on this PE, no firmware 58 * mitigation via SMCCC_ARCH_WORKAROUND_2 is required. 59 */ 60 if (ssbs != SSBS_UNAVAILABLE) 61 return 1; 62 63 /* 64 * On a platform where at least one CPU requires 65 * dynamic mitigation but others are either unaffected 66 * or permanently mitigated, report the latter as not 67 * needing dynamic mitigation. 68 */ 69 if (wa_cve_2018_3639_get_disable_ptr() == NULL) 70 return 1; 71 /* 72 * If we get here, this CPU requires dynamic mitigation 73 * so report it as such. 74 */ 75 return 0; 76 #else 77 /* Either the CPUs are unaffected or permanently mitigated */ 78 return SMC_ARCH_CALL_NOT_REQUIRED; 79 #endif 80 } 81 #endif 82 83 /* Fallthrough */ 84 85 default: 86 return SMC_UNK; 87 } 88 } 89 90 /* 91 * Top-level Arm Architectural Service SMC handler. 92 */ 93 static uintptr_t arm_arch_svc_smc_handler(uint32_t smc_fid, 94 u_register_t x1, 95 u_register_t x2, 96 u_register_t x3, 97 u_register_t x4, 98 void *cookie, 99 void *handle, 100 u_register_t flags) 101 { 102 switch (smc_fid) { 103 case SMCCC_VERSION: 104 SMC_RET1(handle, smccc_version()); 105 case SMCCC_ARCH_FEATURES: 106 SMC_RET1(handle, smccc_arch_features(x1, x2)); 107 #if WORKAROUND_CVE_2017_5715 108 case SMCCC_ARCH_WORKAROUND_1: 109 /* 110 * The workaround has already been applied on affected PEs 111 * during entry to EL3. On unaffected PEs, this function 112 * has no effect. 113 */ 114 SMC_RET0(handle); 115 #endif 116 #if WORKAROUND_CVE_2018_3639 117 case SMCCC_ARCH_WORKAROUND_2: 118 /* 119 * The workaround has already been applied on affected PEs 120 * requiring dynamic mitigation during entry to EL3. 121 * On unaffected or statically mitigated PEs, this function 122 * has no effect. 123 */ 124 SMC_RET0(handle); 125 #endif 126 default: 127 WARN("Unimplemented Arm Architecture Service Call: 0x%x \n", 128 smc_fid); 129 SMC_RET1(handle, SMC_UNK); 130 } 131 } 132 133 /* Register Standard Service Calls as runtime service */ 134 DECLARE_RT_SVC( 135 arm_arch_svc, 136 OEN_ARM_START, 137 OEN_ARM_END, 138 SMC_TYPE_FAST, 139 NULL, 140 arm_arch_svc_smc_handler 141 ); 142