arm_arch_svc_setup.c 3.09 KB
Newer Older
1
/*
2
 * Copyright (c) 2018-2019, ARM Limited and Contributors. All rights reserved.
3
4
5
6
 *
 * SPDX-License-Identifier: BSD-3-Clause
 */

7
8
9
10
11
12
13
#include <common/debug.h>
#include <common/runtime_svc.h>
#include <lib/cpus/errata_report.h>
#include <lib/cpus/wa_cve_2017_5715.h>
#include <lib/cpus/wa_cve_2018_3639.h>
#include <lib/smccc.h>
#include <services/arm_arch_svc.h>
Antonio Nino Diaz's avatar
Antonio Nino Diaz committed
14
#include <smccc_helpers.h>
15
16
17
18
19
20
21
22
23
24
25
26

static int32_t smccc_version(void)
{
	return MAKE_SMCCC_VERSION(SMCCC_MAJOR_VERSION, SMCCC_MINOR_VERSION);
}

static int32_t smccc_arch_features(u_register_t arg)
{
	switch (arg) {
	case SMCCC_VERSION:
	case SMCCC_ARCH_FEATURES:
		return SMC_OK;
27
#if WORKAROUND_CVE_2017_5715
28
	case SMCCC_ARCH_WORKAROUND_1:
29
		if (check_wa_cve_2017_5715() == ERRATA_NOT_APPLIES)
30
			return 1;
31
		return 0; /* ERRATA_APPLIES || ERRATA_MISSING */
32
#endif
33

34
#if WORKAROUND_CVE_2018_3639
35
	case SMCCC_ARCH_WORKAROUND_2: {
36
#if DYNAMIC_WORKAROUND_CVE_2018_3639
37
38
39
40
41
42
43
		unsigned long long ssbs;

		/*
		 * Firmware doesn't have to carry out dynamic workaround if the
		 * PE implements architectural Speculation Store Bypass Safe
		 * (SSBS) feature.
		 */
44
		ssbs = (read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_SSBS_SHIFT) &
45
46
47
48
49
50
51
52
53
			ID_AA64PFR1_EL1_SSBS_MASK;

		/*
		 * If architectural SSBS is available on this PE, no firmware
		 * mitigation via SMCCC_ARCH_WORKAROUND_2 is required.
		 */
		if (ssbs != SSBS_UNAVAILABLE)
			return 1;

54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
		/*
		 * On a platform where at least one CPU requires
		 * dynamic mitigation but others are either unaffected
		 * or permanently mitigated, report the latter as not
		 * needing dynamic mitigation.
		 */
		if (wa_cve_2018_3639_get_disable_ptr() == NULL)
			return 1;
		/*
		 * If we get here, this CPU requires dynamic mitigation
		 * so report it as such.
		 */
		return 0;
#else
		/* Either the CPUs are unaffected or permanently mitigated */
69
		return SMC_ARCH_CALL_NOT_REQUIRED;
70
#endif
71
	}
72
#endif
73
74
75

	/* Fallthrough */

76
77
78
79
80
81
82
83
	default:
		return SMC_UNK;
	}
}

/*
 * Top-level Arm Architectural Service SMC handler.
 */
84
static uintptr_t arm_arch_svc_smc_handler(uint32_t smc_fid,
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
	u_register_t x1,
	u_register_t x2,
	u_register_t x3,
	u_register_t x4,
	void *cookie,
	void *handle,
	u_register_t flags)
{
	switch (smc_fid) {
	case SMCCC_VERSION:
		SMC_RET1(handle, smccc_version());
	case SMCCC_ARCH_FEATURES:
		SMC_RET1(handle, smccc_arch_features(x1));
#if WORKAROUND_CVE_2017_5715
	case SMCCC_ARCH_WORKAROUND_1:
		/*
		 * The workaround has already been applied on affected PEs
		 * during entry to EL3.  On unaffected PEs, this function
		 * has no effect.
		 */
		SMC_RET0(handle);
106
107
108
109
110
111
112
113
114
115
#endif
#if WORKAROUND_CVE_2018_3639
	case SMCCC_ARCH_WORKAROUND_2:
		/*
		 * The workaround has already been applied on affected PEs
		 * requiring dynamic mitigation during entry to EL3.
		 * On unaffected or statically mitigated PEs, this function
		 * has no effect.
		 */
		SMC_RET0(handle);
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
#endif
	default:
		WARN("Unimplemented Arm Architecture Service Call: 0x%x \n",
			smc_fid);
		SMC_RET1(handle, SMC_UNK);
	}
}

/* Register Standard Service Calls as runtime service */
DECLARE_RT_SVC(
		arm_arch_svc,
		OEN_ARM_START,
		OEN_ARM_END,
		SMC_TYPE_FAST,
		NULL,
		arm_arch_svc_smc_handler
);