Commit b8a25bbb authored by Dimitris Papastamos's avatar Dimitris Papastamos
Browse files

Implement static workaround for CVE-2018-3639

For affected CPUs, this approach enables the mitigation during EL3
initialization, following every PE reset. No mechanism is provided to
disable the mitigation at runtime.

This approach permanently mitigates the entire software stack and no
additional mitigation code is required in other software components.

TF-A implements this approach for the following affected CPUs:

*   Cortex-A57 and Cortex-A72, by setting bit 55 (Disable load pass store) of
    `CPUACTLR_EL1` (`S3_1_C15_C2_0`).

*   Cortex-A73, by setting bit 3 of `S3_0_C15_C0_0` (not documented in the
    Technical Reference Manual (TRM)).

*   Cortex-A75, by setting bit 35 (reserved in TRM) of `CPUACTLR_EL1`
    (`S3_0_C15_C1_0`).

Additionally, a new SMC interface is implemented to allow software
executing in lower ELs to discover whether the system is mitigated
against CVE-2018-3639.

Refer to "Firmware interfaces for mitigating cache speculation
vulnerabilities System Software on Arm Systems"[0] for more
information.

[0] https://developer.arm.com/cache-speculation-vulnerability-firmware-specification



Change-Id: I084aa7c3bc7c26bf2df2248301270f77bed22ceb
Signed-off-by: default avatarDimitris Papastamos <dimitris.papastamos@arm.com>
parent 2c3a1078
......@@ -24,6 +24,12 @@ vulnerability workarounds should be applied at runtime.
with the recommendation in the spec regarding workaround discovery.
Defaults to 1.
- ``WORKAROUND_CVE_2018_3639``: Enables the security workaround for
`CVE-2018-3639`_. Defaults to 1. The TF-A project recommends to keep
the default value of 1 even on platforms that are unaffected by
CVE-2018-3639, in order to comply with the recommendation in the spec
regarding workaround discovery.
CPU Errata Workarounds
----------------------
......
......@@ -44,6 +44,7 @@
#define CORTEX_A57_CPUACTLR_EL1 S3_1_C15_C2_0
#define CORTEX_A57_CPUACTLR_EL1_DIS_LOAD_PASS_DMB (ULL(1) << 59)
#define CORTEX_A57_CPUACTLR_EL1_DIS_LOAD_PASS_STORE (ULL(1) << 55)
#define CORTEX_A57_CPUACTLR_EL1_GRE_NGRE_AS_NGNRE (ULL(1) << 54)
#define CORTEX_A57_CPUACTLR_EL1_DIS_OVERREAD (ULL(1) << 52)
#define CORTEX_A57_CPUACTLR_EL1_NO_ALLOC_WBWA (ULL(1) << 49)
......
......@@ -32,6 +32,7 @@
#define CORTEX_A72_CPUACTLR_EL1 S3_1_C15_C2_0
#define CORTEX_A72_CPUACTLR_EL1_DISABLE_L1_DCACHE_HW_PFTCH (ULL(1) << 56)
#define CORTEX_A72_CPUACTLR_EL1_DIS_LOAD_PASS_STORE (ULL(1) << 55)
#define CORTEX_A72_CPUACTLR_EL1_NO_ALLOC_WBWA (ULL(1) << 49)
#define CORTEX_A72_CPUACTLR_EL1_DCC_AS_DCCI (ULL(1) << 44)
#define CORTEX_A72_CPUACTLR_EL1_DIS_INSTR_PREFETCH (ULL(1) << 32)
......
......@@ -22,4 +22,11 @@
******************************************************************************/
#define CORTEX_A73_L2MERRSR_EL1 S3_1_C15_C2_3 /* Instruction def. */
/*******************************************************************************
* CPU implementation defined register specific definitions.
******************************************************************************/
#define CORTEX_A73_IMP_DEF_REG1 S3_0_C15_C0_0
#define CORTEX_A73_IMP_DEF_REG1_DISABLE_LOAD_PASS_STORE (1 << 3)
#endif /* __CORTEX_A73_H__ */
......@@ -16,6 +16,13 @@
#define CORTEX_A75_CPUPWRCTLR_EL1 S3_0_C15_C2_7
#define CORTEX_A75_CPUECTLR_EL1 S3_0_C15_C1_4
/*******************************************************************************
* CPU Auxiliary Control register specific definitions.
******************************************************************************/
#define CORTEX_A75_CPUACTLR_EL1 S3_0_C15_C1_0
#define CORTEX_A75_CPUACTLR_EL1_DISABLE_LOAD_PASS_STORE (1 << 35)
/* Definitions of register field mask in CORTEX_A75_CPUPWRCTLR_EL1 */
#define CORTEX_A75_CORE_PWRDN_EN_MASK 0x1
......
......@@ -10,5 +10,8 @@
#define SMCCC_VERSION U(0x80000000)
#define SMCCC_ARCH_FEATURES U(0x80000001)
#define SMCCC_ARCH_WORKAROUND_1 U(0x80008000)
#define SMCCC_ARCH_WORKAROUND_2 U(0x80007FFF)
#define SMCCC_ARCH_NOT_REQUIRED -2
#endif /* __ARM_ARCH_SVC_H__ */
......@@ -337,6 +337,15 @@ func check_errata_cve_2017_5715
ret
endfunc check_errata_cve_2017_5715
func check_errata_cve_2018_3639
#if WORKAROUND_CVE_2018_3639
mov x0, #ERRATA_APPLIES
#else
mov x0, #ERRATA_MISSING
#endif
ret
endfunc check_errata_cve_2018_3639
/* -------------------------------------------------
* The CPU Ops reset function for Cortex-A57.
* Shall clobber: x0-x19
......@@ -397,6 +406,14 @@ func cortex_a57_reset_func
msr vbar_el3, x0
#endif
#if WORKAROUND_CVE_2018_3639
mrs x0, CORTEX_A57_CPUACTLR_EL1
orr x0, x0, #CORTEX_A57_CPUACTLR_EL1_DIS_LOAD_PASS_STORE
msr CORTEX_A57_CPUACTLR_EL1, x0
isb
dsb sy
#endif
/* ---------------------------------------------
* Enable the SMP bit.
* ---------------------------------------------
......@@ -528,6 +545,7 @@ func cortex_a57_errata_report
report_errata ERRATA_A57_833471, cortex_a57, 833471
report_errata ERRATA_A57_859972, cortex_a57, 859972
report_errata WORKAROUND_CVE_2017_5715, cortex_a57, cve_2017_5715
report_errata WORKAROUND_CVE_2018_3639, cortex_a57, cve_2018_3639
ldp x8, x30, [sp], #16
ret
......
......@@ -110,6 +110,15 @@ func check_errata_cve_2017_5715
ret
endfunc check_errata_cve_2017_5715
func check_errata_cve_2018_3639
#if WORKAROUND_CVE_2018_3639
mov x0, #ERRATA_APPLIES
#else
mov x0, #ERRATA_MISSING
#endif
ret
endfunc check_errata_cve_2018_3639
/* -------------------------------------------------
* The CPU Ops reset function for Cortex-A72.
* -------------------------------------------------
......@@ -131,6 +140,14 @@ func cortex_a72_reset_func
1:
#endif
#if WORKAROUND_CVE_2018_3639
mrs x0, CORTEX_A72_CPUACTLR_EL1
orr x0, x0, #CORTEX_A72_CPUACTLR_EL1_DIS_LOAD_PASS_STORE
msr CORTEX_A72_CPUACTLR_EL1, x0
isb
dsb sy
#endif
/* ---------------------------------------------
* Enable the SMP bit.
* ---------------------------------------------
......@@ -265,6 +282,7 @@ func cortex_a72_errata_report
*/
report_errata ERRATA_A72_859971, cortex_a72, 859971
report_errata WORKAROUND_CVE_2017_5715, cortex_a72, cve_2017_5715
report_errata WORKAROUND_CVE_2018_3639, cortex_a72, cve_2018_3639
ldp x8, x30, [sp], #16
ret
......
......@@ -43,6 +43,13 @@ func cortex_a73_reset_func
1:
#endif
#if WORKAROUND_CVE_2018_3639
mrs x0, CORTEX_A73_IMP_DEF_REG1
orr x0, x0, #CORTEX_A73_IMP_DEF_REG1_DISABLE_LOAD_PASS_STORE
msr CORTEX_A73_IMP_DEF_REG1, x0
isb
#endif
/* ---------------------------------------------
* Enable the SMP bit.
* Clobbers : x0
......@@ -129,6 +136,15 @@ func check_errata_cve_2017_5715
ret
endfunc check_errata_cve_2017_5715
func check_errata_cve_2018_3639
#if WORKAROUND_CVE_2018_3639
mov x0, #ERRATA_APPLIES
#else
mov x0, #ERRATA_MISSING
#endif
ret
endfunc check_errata_cve_2018_3639
#if REPORT_ERRATA
/*
* Errata printing function for Cortex A75. Must follow AAPCS.
......@@ -144,6 +160,7 @@ func cortex_a73_errata_report
* checking functions of each errata.
*/
report_errata WORKAROUND_CVE_2017_5715, cortex_a73, cve_2017_5715
report_errata WORKAROUND_CVE_2018_3639, cortex_a73, cve_2018_3639
ldp x8, x30, [sp], #16
ret
......
......@@ -18,6 +18,13 @@ func cortex_a75_reset_func
1:
#endif
#if WORKAROUND_CVE_2018_3639
mrs x0, CORTEX_A75_CPUACTLR_EL1
orr x0, x0, #CORTEX_A75_CPUACTLR_EL1_DISABLE_LOAD_PASS_STORE
msr CORTEX_A75_CPUACTLR_EL1, x0
isb
#endif
#if ENABLE_AMU
/* Make sure accesses from EL0/EL1 and EL2 are not trapped to EL3 */
mrs x0, actlr_el3
......@@ -57,6 +64,15 @@ func check_errata_cve_2017_5715
ret
endfunc check_errata_cve_2017_5715
func check_errata_cve_2018_3639
#if WORKAROUND_CVE_2018_3639
mov x0, #ERRATA_APPLIES
#else
mov x0, #ERRATA_MISSING
#endif
ret
endfunc check_errata_cve_2018_3639
/* ---------------------------------------------
* HW will do the cache maintenance while powering down
* ---------------------------------------------
......@@ -88,6 +104,7 @@ func cortex_a75_errata_report
* checking functions of each errata.
*/
report_errata WORKAROUND_CVE_2017_5715, cortex_a75, cve_2017_5715
report_errata WORKAROUND_CVE_2018_3639, cortex_a75, cve_2018_3639
ldp x8, x30, [sp], #16
ret
......
......@@ -17,6 +17,7 @@ A53_DISABLE_NON_TEMPORAL_HINT ?=1
A57_DISABLE_NON_TEMPORAL_HINT ?=1
WORKAROUND_CVE_2017_5715 ?=1
WORKAROUND_CVE_2018_3639 ?=1
# Process SKIP_A57_L1_FLUSH_PWR_DWN flag
$(eval $(call assert_boolean,SKIP_A57_L1_FLUSH_PWR_DWN))
......@@ -34,6 +35,10 @@ $(eval $(call add_define,A57_DISABLE_NON_TEMPORAL_HINT))
$(eval $(call assert_boolean,WORKAROUND_CVE_2017_5715))
$(eval $(call add_define,WORKAROUND_CVE_2017_5715))
# Process WORKAROUND_CVE_2018_3639 flag
$(eval $(call assert_boolean,WORKAROUND_CVE_2018_3639))
$(eval $(call add_define,WORKAROUND_CVE_2018_3639))
# CPU Errata Build flags.
# These should be enabled by the platform if the erratum workaround needs to be
# applied.
......
......@@ -28,6 +28,10 @@ static int32_t smccc_arch_features(u_register_t arg)
if (check_wa_cve_2017_5715() == ERRATA_NOT_APPLIES)
return 1;
return 0; /* ERRATA_APPLIES || ERRATA_MISSING */
#endif
#if WORKAROUND_CVE_2018_3639
case SMCCC_ARCH_WORKAROUND_2:
return SMCCC_ARCH_NOT_REQUIRED;
#endif
default:
return SMC_UNK;
......@@ -59,6 +63,16 @@ static uintptr_t arm_arch_svc_smc_handler(uint32_t smc_fid,
* has no effect.
*/
SMC_RET0(handle);
#endif
#if WORKAROUND_CVE_2018_3639
case SMCCC_ARCH_WORKAROUND_2:
/*
* The workaround has already been applied on affected PEs
* requiring dynamic mitigation during entry to EL3.
* On unaffected or statically mitigated PEs, this function
* has no effect.
*/
SMC_RET0(handle);
#endif
default:
WARN("Unimplemented Arm Architecture Service Call: 0x%x \n",
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment