1 /*
2  * Copyright (c) 2018-2021, ARM Limited and Contributors. All rights reserved.
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #include <common/debug.h>
8 #include <common/runtime_svc.h>
9 #include <lib/cpus/errata_report.h>
10 #include <lib/cpus/wa_cve_2017_5715.h>
11 #include <lib/cpus/wa_cve_2018_3639.h>
12 #include <lib/smccc.h>
13 #include <services/arm_arch_svc.h>
14 #include <services/rmi_svc.h>
15 #include <services/rmmd_svc.h>
16 #include <smccc_helpers.h>
17 #include <plat/common/platform.h>
18 
19 #if ENABLE_RME
20 /* Setup Arm architecture Services */
arm_arch_svc_setup(void)21 static int32_t arm_arch_svc_setup(void)
22 {
23 	return rmmd_setup();
24 }
25 #endif
26 
smccc_version(void)27 static int32_t smccc_version(void)
28 {
29 	return MAKE_SMCCC_VERSION(SMCCC_MAJOR_VERSION, SMCCC_MINOR_VERSION);
30 }
31 
smccc_arch_features(u_register_t arg1)32 static int32_t smccc_arch_features(u_register_t arg1)
33 {
34 	switch (arg1) {
35 	case SMCCC_VERSION:
36 	case SMCCC_ARCH_FEATURES:
37 		return SMC_ARCH_CALL_SUCCESS;
38 	case SMCCC_ARCH_SOC_ID:
39 		return plat_is_smccc_feature_available(arg1);
40 #if WORKAROUND_CVE_2017_5715
41 	case SMCCC_ARCH_WORKAROUND_1:
42 		if (check_wa_cve_2017_5715() == ERRATA_NOT_APPLIES)
43 			return 1;
44 		return 0; /* ERRATA_APPLIES || ERRATA_MISSING */
45 #endif
46 
47 #if WORKAROUND_CVE_2018_3639
48 	case SMCCC_ARCH_WORKAROUND_2: {
49 #if DYNAMIC_WORKAROUND_CVE_2018_3639
50 		unsigned long long ssbs;
51 
52 		/*
53 		 * Firmware doesn't have to carry out dynamic workaround if the
54 		 * PE implements architectural Speculation Store Bypass Safe
55 		 * (SSBS) feature.
56 		 */
57 		ssbs = (read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_SSBS_SHIFT) &
58 			ID_AA64PFR1_EL1_SSBS_MASK;
59 
60 		/*
61 		 * If architectural SSBS is available on this PE, no firmware
62 		 * mitigation via SMCCC_ARCH_WORKAROUND_2 is required.
63 		 */
64 		if (ssbs != SSBS_UNAVAILABLE)
65 			return 1;
66 
67 		/*
68 		 * On a platform where at least one CPU requires
69 		 * dynamic mitigation but others are either unaffected
70 		 * or permanently mitigated, report the latter as not
71 		 * needing dynamic mitigation.
72 		 */
73 		if (wa_cve_2018_3639_get_disable_ptr() == NULL)
74 			return 1;
75 		/*
76 		 * If we get here, this CPU requires dynamic mitigation
77 		 * so report it as such.
78 		 */
79 		return 0;
80 #else
81 		/* Either the CPUs are unaffected or permanently mitigated */
82 		return SMC_ARCH_CALL_NOT_REQUIRED;
83 #endif
84 	}
85 #endif
86 
87 	/* Fallthrough */
88 
89 	default:
90 		return SMC_UNK;
91 	}
92 }
93 
94 /* return soc revision or soc version on success otherwise
95  * return invalid parameter */
smccc_arch_id(u_register_t arg1)96 static int32_t smccc_arch_id(u_register_t arg1)
97 {
98 	if (arg1 == SMCCC_GET_SOC_REVISION) {
99 		return plat_get_soc_revision();
100 	}
101 	if (arg1 == SMCCC_GET_SOC_VERSION) {
102 		return plat_get_soc_version();
103 	}
104 	return SMC_ARCH_CALL_INVAL_PARAM;
105 }
106 
107 /*
108  * Top-level Arm Architectural Service SMC handler.
109  */
arm_arch_svc_smc_handler(uint32_t smc_fid,u_register_t x1,u_register_t x2,u_register_t x3,u_register_t x4,void * cookie,void * handle,u_register_t flags)110 static uintptr_t arm_arch_svc_smc_handler(uint32_t smc_fid,
111 	u_register_t x1,
112 	u_register_t x2,
113 	u_register_t x3,
114 	u_register_t x4,
115 	void *cookie,
116 	void *handle,
117 	u_register_t flags)
118 {
119 	switch (smc_fid) {
120 	case SMCCC_VERSION:
121 		SMC_RET1(handle, smccc_version());
122 	case SMCCC_ARCH_FEATURES:
123 		SMC_RET1(handle, smccc_arch_features(x1));
124 	case SMCCC_ARCH_SOC_ID:
125 		SMC_RET1(handle, smccc_arch_id(x1));
126 #if WORKAROUND_CVE_2017_5715
127 	case SMCCC_ARCH_WORKAROUND_1:
128 		/*
129 		 * The workaround has already been applied on affected PEs
130 		 * during entry to EL3.  On unaffected PEs, this function
131 		 * has no effect.
132 		 */
133 		SMC_RET0(handle);
134 #endif
135 #if WORKAROUND_CVE_2018_3639
136 	case SMCCC_ARCH_WORKAROUND_2:
137 		/*
138 		 * The workaround has already been applied on affected PEs
139 		 * requiring dynamic mitigation during entry to EL3.
140 		 * On unaffected or statically mitigated PEs, this function
141 		 * has no effect.
142 		 */
143 		SMC_RET0(handle);
144 #endif
145 	default:
146 #if ENABLE_RME
147 		/*
148 		 * RMI functions are allocated from the Arch service range. Call
149 		 * the RMM dispatcher to handle RMI calls.
150 		 */
151 		if (is_rmi_fid(smc_fid)) {
152 			return rmmd_rmi_handler(smc_fid, x1, x2, x3, x4, cookie,
153 						handle, flags);
154 		}
155 #endif
156 		WARN("Unimplemented Arm Architecture Service Call: 0x%x \n",
157 			smc_fid);
158 		SMC_RET1(handle, SMC_UNK);
159 	}
160 }
161 
162 /* Register Standard Service Calls as runtime service */
163 DECLARE_RT_SVC(
164 		arm_arch_svc,
165 		OEN_ARM_START,
166 		OEN_ARM_END,
167 		SMC_TYPE_FAST,
168 #if ENABLE_RME
169 		arm_arch_svc_setup,
170 #else
171 		NULL,
172 #endif
173 		arm_arch_svc_smc_handler
174 );
175