arm_arch_svc_setup.c 3.36 KB
Newer Older
1
/*
2
 * Copyright (c) 2018-2020, ARM Limited and Contributors. All rights reserved.
3
4
5
6
 *
 * SPDX-License-Identifier: BSD-3-Clause
 */

7
8
9
10
11
12
13
#include <common/debug.h>
#include <common/runtime_svc.h>
#include <lib/cpus/errata_report.h>
#include <lib/cpus/wa_cve_2017_5715.h>
#include <lib/cpus/wa_cve_2018_3639.h>
#include <lib/smccc.h>
#include <services/arm_arch_svc.h>
Antonio Nino Diaz's avatar
Antonio Nino Diaz committed
14
#include <smccc_helpers.h>
15
#include <plat/common/platform.h>
16
17
18
19
20
21

static int32_t smccc_version(void)
{
	return MAKE_SMCCC_VERSION(SMCCC_MAJOR_VERSION, SMCCC_MINOR_VERSION);
}

22
static int32_t smccc_arch_features(u_register_t arg1, u_register_t arg2)
23
{
24
	switch (arg1) {
25
26
27
	case SMCCC_VERSION:
	case SMCCC_ARCH_FEATURES:
		return SMC_OK;
28
29
30
31
32
33
34
35
	case SMCCC_ARCH_SOC_ID:
		if (arg2 == SMCCC_GET_SOC_REVISION) {
			return plat_get_soc_revision();
		}
		if (arg2 == SMCCC_GET_SOC_VERSION) {
			return plat_get_soc_version();
		}
		return SMC_ARCH_CALL_INVAL_PARAM;
36
#if WORKAROUND_CVE_2017_5715
37
	case SMCCC_ARCH_WORKAROUND_1:
38
		if (check_wa_cve_2017_5715() == ERRATA_NOT_APPLIES)
39
			return 1;
40
		return 0; /* ERRATA_APPLIES || ERRATA_MISSING */
41
#endif
42

43
#if WORKAROUND_CVE_2018_3639
44
	case SMCCC_ARCH_WORKAROUND_2: {
45
#if DYNAMIC_WORKAROUND_CVE_2018_3639
46
47
48
49
50
51
52
		unsigned long long ssbs;

		/*
		 * Firmware doesn't have to carry out dynamic workaround if the
		 * PE implements architectural Speculation Store Bypass Safe
		 * (SSBS) feature.
		 */
53
		ssbs = (read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_SSBS_SHIFT) &
54
55
56
57
58
59
60
61
62
			ID_AA64PFR1_EL1_SSBS_MASK;

		/*
		 * If architectural SSBS is available on this PE, no firmware
		 * mitigation via SMCCC_ARCH_WORKAROUND_2 is required.
		 */
		if (ssbs != SSBS_UNAVAILABLE)
			return 1;

63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
		/*
		 * On a platform where at least one CPU requires
		 * dynamic mitigation but others are either unaffected
		 * or permanently mitigated, report the latter as not
		 * needing dynamic mitigation.
		 */
		if (wa_cve_2018_3639_get_disable_ptr() == NULL)
			return 1;
		/*
		 * If we get here, this CPU requires dynamic mitigation
		 * so report it as such.
		 */
		return 0;
#else
		/* Either the CPUs are unaffected or permanently mitigated */
78
		return SMC_ARCH_CALL_NOT_REQUIRED;
79
#endif
80
	}
81
#endif
82
83
84

	/* Fallthrough */

85
86
87
88
89
90
91
92
	default:
		return SMC_UNK;
	}
}

/*
 * Top-level Arm Architectural Service SMC handler.
 */
93
static uintptr_t arm_arch_svc_smc_handler(uint32_t smc_fid,
94
95
96
97
98
99
100
101
102
103
104
105
	u_register_t x1,
	u_register_t x2,
	u_register_t x3,
	u_register_t x4,
	void *cookie,
	void *handle,
	u_register_t flags)
{
	switch (smc_fid) {
	case SMCCC_VERSION:
		SMC_RET1(handle, smccc_version());
	case SMCCC_ARCH_FEATURES:
106
		SMC_RET1(handle, smccc_arch_features(x1, x2));
107
108
109
110
111
112
113
114
#if WORKAROUND_CVE_2017_5715
	case SMCCC_ARCH_WORKAROUND_1:
		/*
		 * The workaround has already been applied on affected PEs
		 * during entry to EL3.  On unaffected PEs, this function
		 * has no effect.
		 */
		SMC_RET0(handle);
115
116
117
118
119
120
121
122
123
124
#endif
#if WORKAROUND_CVE_2018_3639
	case SMCCC_ARCH_WORKAROUND_2:
		/*
		 * The workaround has already been applied on affected PEs
		 * requiring dynamic mitigation during entry to EL3.
		 * On unaffected or statically mitigated PEs, this function
		 * has no effect.
		 */
		SMC_RET0(handle);
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
#endif
	default:
		WARN("Unimplemented Arm Architecture Service Call: 0x%x \n",
			smc_fid);
		SMC_RET1(handle, SMC_UNK);
	}
}

/* Register Standard Service Calls as runtime service */
DECLARE_RT_SVC(
		arm_arch_svc,
		OEN_ARM_START,
		OEN_ARM_END,
		SMC_TYPE_FAST,
		NULL,
		arm_arch_svc_smc_handler
);