1 /*
2  * Copyright (c) 2018-2023, Arm Limited and Contributors. All rights reserved.
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #include <common/debug.h>
8 #include <common/runtime_svc.h>
9 #include <lib/cpus/errata.h>
10 #include <lib/cpus/wa_cve_2017_5715.h>
11 #include <lib/cpus/wa_cve_2018_3639.h>
12 #include <lib/cpus/wa_cve_2022_23960.h>
13 #include <lib/smccc.h>
14 #include <services/arm_arch_svc.h>
15 #include <smccc_helpers.h>
16 #include <plat/common/platform.h>
17 
smccc_version(void)18 static int32_t smccc_version(void)
19 {
20 	return MAKE_SMCCC_VERSION(SMCCC_MAJOR_VERSION, SMCCC_MINOR_VERSION);
21 }
22 
smccc_arch_features(u_register_t arg1)23 static int32_t smccc_arch_features(u_register_t arg1)
24 {
25 	switch (arg1) {
26 	case SMCCC_VERSION:
27 	case SMCCC_ARCH_FEATURES:
28 		return SMC_ARCH_CALL_SUCCESS;
29 	case SMCCC_ARCH_SOC_ID:
30 		return plat_is_smccc_feature_available(arg1);
31 #ifdef __aarch64__
32 	/* Workaround checks are currently only implemented for aarch64 */
33 #if WORKAROUND_CVE_2017_5715
34 	case SMCCC_ARCH_WORKAROUND_1:
35 		if (check_wa_cve_2017_5715() == ERRATA_NOT_APPLIES)
36 			return 1;
37 		return 0; /* ERRATA_APPLIES || ERRATA_MISSING */
38 #endif
39 
40 #if WORKAROUND_CVE_2018_3639
41 	case SMCCC_ARCH_WORKAROUND_2: {
42 #if DYNAMIC_WORKAROUND_CVE_2018_3639
43 		unsigned long long ssbs;
44 
45 		/*
46 		 * Firmware doesn't have to carry out dynamic workaround if the
47 		 * PE implements architectural Speculation Store Bypass Safe
48 		 * (SSBS) feature.
49 		 */
50 		ssbs = (read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_SSBS_SHIFT) &
51 			ID_AA64PFR1_EL1_SSBS_MASK;
52 
53 		/*
54 		 * If architectural SSBS is available on this PE, no firmware
55 		 * mitigation via SMCCC_ARCH_WORKAROUND_2 is required.
56 		 */
57 		if (ssbs != SSBS_UNAVAILABLE)
58 			return 1;
59 
60 		/*
61 		 * On a platform where at least one CPU requires
62 		 * dynamic mitigation but others are either unaffected
63 		 * or permanently mitigated, report the latter as not
64 		 * needing dynamic mitigation.
65 		 */
66 		if (wa_cve_2018_3639_get_disable_ptr() == NULL)
67 			return 1;
68 		/*
69 		 * If we get here, this CPU requires dynamic mitigation
70 		 * so report it as such.
71 		 */
72 		return 0;
73 #else
74 		/* Either the CPUs are unaffected or permanently mitigated */
75 		return SMC_ARCH_CALL_NOT_REQUIRED;
76 #endif
77 	}
78 #endif
79 
80 #if (WORKAROUND_CVE_2022_23960 || WORKAROUND_CVE_2017_5715)
81 	case SMCCC_ARCH_WORKAROUND_3:
82 		/*
83 		 * SMCCC_ARCH_WORKAROUND_3 should also take into account
84 		 * CVE-2017-5715 since this SMC can be used instead of
85 		 * SMCCC_ARCH_WORKAROUND_1.
86 		 */
87 		if ((check_smccc_arch_wa3_applies() == ERRATA_NOT_APPLIES) &&
88 		    (check_wa_cve_2017_5715() == ERRATA_NOT_APPLIES)) {
89 			return 1;
90 		}
91 		return 0; /* ERRATA_APPLIES || ERRATA_MISSING */
92 #endif
93 #endif /* __aarch64__ */
94 
95 	/* Fallthrough */
96 
97 	default:
98 		return SMC_UNK;
99 	}
100 }
101 
102 /* return soc revision or soc version on success otherwise
103  * return invalid parameter */
smccc_arch_id(u_register_t arg1)104 static int32_t smccc_arch_id(u_register_t arg1)
105 {
106 	if (arg1 == SMCCC_GET_SOC_REVISION) {
107 		return plat_get_soc_revision();
108 	}
109 	if (arg1 == SMCCC_GET_SOC_VERSION) {
110 		return plat_get_soc_version();
111 	}
112 	return SMC_ARCH_CALL_INVAL_PARAM;
113 }
114 
115 /*
116  * Top-level Arm Architectural Service SMC handler.
117  */
arm_arch_svc_smc_handler(uint32_t smc_fid,u_register_t x1,u_register_t x2,u_register_t x3,u_register_t x4,void * cookie,void * handle,u_register_t flags)118 static uintptr_t arm_arch_svc_smc_handler(uint32_t smc_fid,
119 	u_register_t x1,
120 	u_register_t x2,
121 	u_register_t x3,
122 	u_register_t x4,
123 	void *cookie,
124 	void *handle,
125 	u_register_t flags)
126 {
127 	switch (smc_fid) {
128 	case SMCCC_VERSION:
129 		SMC_RET1(handle, smccc_version());
130 	case SMCCC_ARCH_FEATURES:
131 		SMC_RET1(handle, smccc_arch_features(x1));
132 	case SMCCC_ARCH_SOC_ID:
133 		SMC_RET1(handle, smccc_arch_id(x1));
134 #ifdef __aarch64__
135 #if WORKAROUND_CVE_2017_5715
136 	case SMCCC_ARCH_WORKAROUND_1:
137 		/*
138 		 * The workaround has already been applied on affected PEs
139 		 * during entry to EL3. On unaffected PEs, this function
140 		 * has no effect.
141 		 */
142 		SMC_RET0(handle);
143 #endif
144 #if WORKAROUND_CVE_2018_3639
145 	case SMCCC_ARCH_WORKAROUND_2:
146 		/*
147 		 * The workaround has already been applied on affected PEs
148 		 * requiring dynamic mitigation during entry to EL3.
149 		 * On unaffected or statically mitigated PEs, this function
150 		 * has no effect.
151 		 */
152 		SMC_RET0(handle);
153 #endif
154 #if (WORKAROUND_CVE_2022_23960 || WORKAROUND_CVE_2017_5715)
155 	case SMCCC_ARCH_WORKAROUND_3:
156 		/*
157 		 * The workaround has already been applied on affected PEs
158 		 * during entry to EL3. On unaffected PEs, this function
159 		 * has no effect.
160 		 */
161 		SMC_RET0(handle);
162 #endif
163 #endif /* __aarch64__ */
164 	default:
165 		WARN("Unimplemented Arm Architecture Service Call: 0x%x \n",
166 			smc_fid);
167 		SMC_RET1(handle, SMC_UNK);
168 	}
169 }
170 
171 /* Register Standard Service Calls as runtime service */
172 DECLARE_RT_SVC(
173 		arm_arch_svc,
174 		OEN_ARM_START,
175 		OEN_ARM_END,
176 		SMC_TYPE_FAST,
177 		NULL,
178 		arm_arch_svc_smc_handler
179 );
180