Commit f62ad322 authored by Dimitris Papastamos's avatar Dimitris Papastamos
Browse files

Workaround for CVE-2017-5715 on Cortex A57 and A72



Invalidate the Branch Target Buffer (BTB) on entry to EL3 by disabling
and enabling the MMU.  To achieve this without performing any branch
instruction, a per-cpu vbar is installed which executes the workaround
and then branches off to the corresponding vector entry in the main
vector table.  A side effect of this change is that the main vbar is
configured before any reset handling.  This is to allow the per-cpu
reset function to override the vbar setting.

This workaround is enabled by default on the affected CPUs.

Change-Id: I97788d38463a5840a410e3cea85ed297a1678265
Signed-off-by: default avatarDimitris Papastamos <dimitris.papastamos@arm.com>
parent 08e06be8
...@@ -14,6 +14,26 @@ ...@@ -14,6 +14,26 @@
.globl runtime_exceptions .globl runtime_exceptions
.globl sync_exception_sp_el0
.globl irq_sp_el0
.globl fiq_sp_el0
.globl serror_sp_el0
.globl sync_exception_sp_elx
.globl irq_sp_elx
.globl fiq_sp_elx
.globl serror_sp_elx
.globl sync_exception_aarch64
.globl irq_aarch64
.globl fiq_aarch64
.globl serror_aarch64
.globl sync_exception_aarch32
.globl irq_aarch32
.globl fiq_aarch32
.globl serror_aarch32
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* This macro handles Synchronous exceptions. * This macro handles Synchronous exceptions.
* Only SMC exceptions are supported. * Only SMC exceptions are supported.
......
...@@ -58,6 +58,10 @@ ifeq (${ENABLE_SVE_FOR_NS},1) ...@@ -58,6 +58,10 @@ ifeq (${ENABLE_SVE_FOR_NS},1)
BL31_SOURCES += lib/extensions/sve/sve.c BL31_SOURCES += lib/extensions/sve/sve.c
endif endif
ifeq (${WORKAROUND_CVE_2017_5715},1)
BL31_SOURCES += lib/cpus/aarch64/workaround_cve_2017_5715_mmu.S
endif
BL31_LINKERFILE := bl31/bl31.ld.S BL31_LINKERFILE := bl31/bl31.ld.S
# Flag used to indicate if Crash reporting via console should be included # Flag used to indicate if Crash reporting via console should be included
......
...@@ -11,6 +11,15 @@ This document describes the various build options present in the CPU specific ...@@ -11,6 +11,15 @@ This document describes the various build options present in the CPU specific
operations framework to enable errata workarounds and to enable optimizations operations framework to enable errata workarounds and to enable optimizations
for a specific CPU on a platform. for a specific CPU on a platform.
Security Vulnerability Workarounds
----------------------------------
ARM Trusted Firmware exports a series of build flags which control which
security vulnerability workarounds should be applied at runtime.
- ``WORKAROUND_CVE_2017_5715``: Enables the security workaround for
`CVE-2017-5715`_. Defaults to 1.
CPU Errata Workarounds CPU Errata Workarounds
---------------------- ----------------------
...@@ -142,6 +151,7 @@ architecture that can be enabled by the platform as desired. ...@@ -142,6 +151,7 @@ architecture that can be enabled by the platform as desired.
*Copyright (c) 2014-2016, ARM Limited and Contributors. All rights reserved.* *Copyright (c) 2014-2016, ARM Limited and Contributors. All rights reserved.*
.. _CVE-2017-5715: http://www.cve.mitre.org/cgi-bin/cvename.cgi?name=2017-5715
.. _Cortex-A53 MPCore Software Developers Errata Notice: http://infocenter.arm.com/help/topic/com.arm.doc.epm048406/Cortex_A53_MPCore_Software_Developers_Errata_Notice.pdf .. _Cortex-A53 MPCore Software Developers Errata Notice: http://infocenter.arm.com/help/topic/com.arm.doc.epm048406/Cortex_A53_MPCore_Software_Developers_Errata_Notice.pdf
.. _Cortex-A57 MPCore Software Developers Errata Notice: http://infocenter.arm.com/help/topic/com.arm.doc.epm049219/cortex_a57_mpcore_software_developers_errata_notice.pdf .. _Cortex-A57 MPCore Software Developers Errata Notice: http://infocenter.arm.com/help/topic/com.arm.doc.epm049219/cortex_a57_mpcore_software_developers_errata_notice.pdf
.. _Cortex-A72 MPCore Software Developers Errata Notice: http://infocenter.arm.com/help/topic/com.arm.doc.epm012079/index.html .. _Cortex-A72 MPCore Software Developers Errata Notice: http://infocenter.arm.com/help/topic/com.arm.doc.epm012079/index.html
......
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
/* /*
* Helper macro to initialise EL3 registers we care about. * Helper macro to initialise EL3 registers we care about.
*/ */
.macro el3_arch_init_common _exception_vectors .macro el3_arch_init_common
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* SCTLR_EL3 has already been initialised - read current value before * SCTLR_EL3 has already been initialised - read current value before
* modifying. * modifying.
...@@ -49,14 +49,6 @@ ...@@ -49,14 +49,6 @@
bl init_cpu_data_ptr bl init_cpu_data_ptr
#endif /* IMAGE_BL31 */ #endif /* IMAGE_BL31 */
/* ---------------------------------------------------------------------
* Set the exception vectors.
* ---------------------------------------------------------------------
*/
adr x0, \_exception_vectors
msr vbar_el3, x0
isb
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* Initialise SCR_EL3, setting all fields rather than relying on hw. * Initialise SCR_EL3, setting all fields rather than relying on hw.
* All fields are architecturally UNKNOWN on reset. The following fields * All fields are architecturally UNKNOWN on reset. The following fields
...@@ -220,6 +212,14 @@ ...@@ -220,6 +212,14 @@
do_cold_boot: do_cold_boot:
.endif /* _warm_boot_mailbox */ .endif /* _warm_boot_mailbox */
/* ---------------------------------------------------------------------
* Set the exception vectors.
* ---------------------------------------------------------------------
*/
adr x0, \_exception_vectors
msr vbar_el3, x0
isb
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* It is a cold boot. * It is a cold boot.
* Perform any processor specific actions upon reset e.g. cache, TLB * Perform any processor specific actions upon reset e.g. cache, TLB
...@@ -228,7 +228,7 @@ ...@@ -228,7 +228,7 @@
*/ */
bl reset_handler bl reset_handler
el3_arch_init_common \_exception_vectors el3_arch_init_common
.if \_secondary_cold_boot .if \_secondary_cold_boot
/* ------------------------------------------------------------- /* -------------------------------------------------------------
......
...@@ -383,6 +383,11 @@ func cortex_a57_reset_func ...@@ -383,6 +383,11 @@ func cortex_a57_reset_func
bl errata_a57_859972_wa bl errata_a57_859972_wa
#endif #endif
#if IMAGE_BL31 && WORKAROUND_CVE_2017_5715
adr x0, workaround_mmu_runtime_exceptions
msr vbar_el3, x0
#endif
/* --------------------------------------------- /* ---------------------------------------------
* Enable the SMP bit. * Enable the SMP bit.
* --------------------------------------------- * ---------------------------------------------
......
...@@ -110,6 +110,12 @@ func cortex_a72_reset_func ...@@ -110,6 +110,12 @@ func cortex_a72_reset_func
mov x0, x18 mov x0, x18
bl errata_a72_859971_wa bl errata_a72_859971_wa
#endif #endif
#if IMAGE_BL31 && WORKAROUND_CVE_2017_5715
adr x0, workaround_mmu_runtime_exceptions
msr vbar_el3, x0
#endif
/* --------------------------------------------- /* ---------------------------------------------
* Enable the SMP bit. * Enable the SMP bit.
* --------------------------------------------- * ---------------------------------------------
......
/*
* Copyright (c) 2017, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
#include <arch.h>
#include <asm_macros.S>
#include <context.h>
.globl workaround_mmu_runtime_exceptions
vector_base workaround_mmu_runtime_exceptions
.macro apply_workaround
stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
mrs x0, sctlr_el3
/* Disable MMU */
bic x1, x0, #SCTLR_M_BIT
msr sctlr_el3, x1
isb
/* Restore MMU config */
msr sctlr_el3, x0
isb
ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
.endm
/* ---------------------------------------------------------------------
* Current EL with SP_EL0 : 0x0 - 0x200
* ---------------------------------------------------------------------
*/
vector_entry workaround_mmu_sync_exception_sp_el0
b sync_exception_sp_el0
check_vector_size workaround_mmu_sync_exception_sp_el0
vector_entry workaround_mmu_irq_sp_el0
b irq_sp_el0
check_vector_size workaround_mmu_irq_sp_el0
vector_entry workaround_mmu_fiq_sp_el0
b fiq_sp_el0
check_vector_size workaround_mmu_fiq_sp_el0
vector_entry workaround_mmu_serror_sp_el0
b serror_sp_el0
check_vector_size workaround_mmu_serror_sp_el0
/* ---------------------------------------------------------------------
* Current EL with SP_ELx: 0x200 - 0x400
* ---------------------------------------------------------------------
*/
vector_entry workaround_mmu_sync_exception_sp_elx
b sync_exception_sp_elx
check_vector_size workaround_mmu_sync_exception_sp_elx
vector_entry workaround_mmu_irq_sp_elx
b irq_sp_elx
check_vector_size workaround_mmu_irq_sp_elx
vector_entry workaround_mmu_fiq_sp_elx
b fiq_sp_elx
check_vector_size workaround_mmu_fiq_sp_elx
vector_entry workaround_mmu_serror_sp_elx
b serror_sp_elx
check_vector_size workaround_mmu_serror_sp_elx
/* ---------------------------------------------------------------------
* Lower EL using AArch64 : 0x400 - 0x600
* ---------------------------------------------------------------------
*/
vector_entry workaround_mmu_sync_exception_aarch64
apply_workaround
b sync_exception_aarch64
check_vector_size workaround_mmu_sync_exception_aarch64
vector_entry workaround_mmu_irq_aarch64
apply_workaround
b irq_aarch64
check_vector_size workaround_mmu_irq_aarch64
vector_entry workaround_mmu_fiq_aarch64
apply_workaround
b fiq_aarch64
check_vector_size workaround_mmu_fiq_aarch64
vector_entry workaround_mmu_serror_aarch64
apply_workaround
b serror_aarch64
check_vector_size workaround_mmu_serror_aarch64
/* ---------------------------------------------------------------------
* Lower EL using AArch32 : 0x600 - 0x800
* ---------------------------------------------------------------------
*/
vector_entry workaround_mmu_sync_exception_aarch32
apply_workaround
b sync_exception_aarch32
check_vector_size workaround_mmu_sync_exception_aarch32
vector_entry workaround_mmu_irq_aarch32
apply_workaround
b irq_aarch32
check_vector_size workaround_mmu_irq_aarch32
vector_entry workaround_mmu_fiq_aarch32
apply_workaround
b fiq_aarch32
check_vector_size workaround_mmu_fiq_aarch32
vector_entry workaround_mmu_serror_aarch32
apply_workaround
b serror_aarch32
check_vector_size workaround_mmu_serror_aarch32
...@@ -16,6 +16,8 @@ A53_DISABLE_NON_TEMPORAL_HINT ?=1 ...@@ -16,6 +16,8 @@ A53_DISABLE_NON_TEMPORAL_HINT ?=1
# It is enabled by default. # It is enabled by default.
A57_DISABLE_NON_TEMPORAL_HINT ?=1 A57_DISABLE_NON_TEMPORAL_HINT ?=1
WORKAROUND_CVE_2017_5715 ?=1
# Process SKIP_A57_L1_FLUSH_PWR_DWN flag # Process SKIP_A57_L1_FLUSH_PWR_DWN flag
$(eval $(call assert_boolean,SKIP_A57_L1_FLUSH_PWR_DWN)) $(eval $(call assert_boolean,SKIP_A57_L1_FLUSH_PWR_DWN))
$(eval $(call add_define,SKIP_A57_L1_FLUSH_PWR_DWN)) $(eval $(call add_define,SKIP_A57_L1_FLUSH_PWR_DWN))
...@@ -28,6 +30,9 @@ $(eval $(call add_define,A53_DISABLE_NON_TEMPORAL_HINT)) ...@@ -28,6 +30,9 @@ $(eval $(call add_define,A53_DISABLE_NON_TEMPORAL_HINT))
$(eval $(call assert_boolean,A57_DISABLE_NON_TEMPORAL_HINT)) $(eval $(call assert_boolean,A57_DISABLE_NON_TEMPORAL_HINT))
$(eval $(call add_define,A57_DISABLE_NON_TEMPORAL_HINT)) $(eval $(call add_define,A57_DISABLE_NON_TEMPORAL_HINT))
# Process WORKAROUND_CVE_2017_5715 flag
$(eval $(call assert_boolean,WORKAROUND_CVE_2017_5715))
$(eval $(call add_define,WORKAROUND_CVE_2017_5715))
# CPU Errata Build flags. # CPU Errata Build flags.
# These should be enabled by the platform if the erratum workaround needs to be # These should be enabled by the platform if the erratum workaround needs to be
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment