Unverified Commit d003b190 authored by Dimitris Papastamos's avatar Dimitris Papastamos Committed by GitHub
Browse files

Merge pull request #1392 from dp-arm/dp/cve_2018_3639

Implement workaround for CVE-2018-3639 on Cortex A57/A72/A73 and A75
parents edcd266e fe007b2e
...@@ -61,8 +61,8 @@ BL31_SOURCES += lib/extensions/sve/sve.c ...@@ -61,8 +61,8 @@ BL31_SOURCES += lib/extensions/sve/sve.c
endif endif
ifeq (${WORKAROUND_CVE_2017_5715},1) ifeq (${WORKAROUND_CVE_2017_5715},1)
BL31_SOURCES += lib/cpus/aarch64/workaround_cve_2017_5715_bpiall.S \ BL31_SOURCES += lib/cpus/aarch64/wa_cve_2017_5715_bpiall.S \
lib/cpus/aarch64/workaround_cve_2017_5715_mmu.S lib/cpus/aarch64/wa_cve_2017_5715_mmu.S
endif endif
BL31_LINKERFILE := bl31/bl31.ld.S BL31_LINKERFILE := bl31/bl31.ld.S
......
...@@ -29,8 +29,8 @@ BL32_SOURCES += lib/extensions/amu/aarch32/amu.c\ ...@@ -29,8 +29,8 @@ BL32_SOURCES += lib/extensions/amu/aarch32/amu.c\
endif endif
ifeq (${WORKAROUND_CVE_2017_5715},1) ifeq (${WORKAROUND_CVE_2017_5715},1)
BL32_SOURCES += bl32/sp_min/workaround_cve_2017_5715_bpiall.S \ BL32_SOURCES += bl32/sp_min/wa_cve_2017_5715_bpiall.S \
bl32/sp_min/workaround_cve_2017_5715_icache_inv.S bl32/sp_min/wa_cve_2017_5715_icache_inv.S
endif endif
BL32_LINKERFILE := bl32/sp_min/sp_min.ld.S BL32_LINKERFILE := bl32/sp_min/sp_min.ld.S
......
...@@ -6,9 +6,9 @@ ...@@ -6,9 +6,9 @@
#include <asm_macros.S> #include <asm_macros.S>
.globl workaround_bpiall_runtime_exceptions .globl wa_cve_2017_5715_bpiall_vbar
vector_base workaround_bpiall_runtime_exceptions vector_base wa_cve_2017_5715_bpiall_vbar
/* We encode the exception entry in the bottom 3 bits of SP */ /* We encode the exception entry in the bottom 3 bits of SP */
add sp, sp, #1 /* Reset: 0b111 */ add sp, sp, #1 /* Reset: 0b111 */
add sp, sp, #1 /* Undef: 0b110 */ add sp, sp, #1 /* Undef: 0b110 */
......
...@@ -6,9 +6,9 @@ ...@@ -6,9 +6,9 @@
#include <asm_macros.S> #include <asm_macros.S>
.globl workaround_icache_inv_runtime_exceptions .globl wa_cve_2017_5715_icache_inv_vbar
vector_base workaround_icache_inv_runtime_exceptions vector_base wa_cve_2017_5715_icache_inv_vbar
/* We encode the exception entry in the bottom 3 bits of SP */ /* We encode the exception entry in the bottom 3 bits of SP */
add sp, sp, #1 /* Reset: 0b111 */ add sp, sp, #1 /* Reset: 0b111 */
add sp, sp, #1 /* Undef: 0b110 */ add sp, sp, #1 /* Undef: 0b110 */
......
...@@ -24,6 +24,17 @@ vulnerability workarounds should be applied at runtime. ...@@ -24,6 +24,17 @@ vulnerability workarounds should be applied at runtime.
with the recommendation in the spec regarding workaround discovery. with the recommendation in the spec regarding workaround discovery.
Defaults to 1. Defaults to 1.
- ``WORKAROUND_CVE_2018_3639``: Enables the security workaround for
`CVE-2018-3639`_. Defaults to 1. The TF-A project recommends to keep
the default value of 1 even on platforms that are unaffected by
CVE-2018-3639, in order to comply with the recommendation in the spec
regarding workaround discovery.
- ``DYNAMIC_WORKAROUND_CVE_2018_3639``: Enables dynamic mitigation for
`CVE-2018-3639`_. This build option should be set to 1 if the target
platform contains at least 1 CPU that requires dynamic mitigation.
Defaults to 0.
CPU Errata Workarounds CPU Errata Workarounds
---------------------- ----------------------
......
...@@ -44,6 +44,7 @@ ...@@ -44,6 +44,7 @@
#define CORTEX_A57_CPUACTLR p15, 0, c15 #define CORTEX_A57_CPUACTLR p15, 0, c15
#define CORTEX_A57_CPUACTLR_DIS_LOAD_PASS_DMB (ULL(1) << 59) #define CORTEX_A57_CPUACTLR_DIS_LOAD_PASS_DMB (ULL(1) << 59)
#define CORTEX_A57_CPUACTLR_DIS_LOAD_PASS_STORE (ULL(1) << 55)
#define CORTEX_A57_CPUACTLR_GRE_NGRE_AS_NGNRE (ULL(1) << 54) #define CORTEX_A57_CPUACTLR_GRE_NGRE_AS_NGNRE (ULL(1) << 54)
#define CORTEX_A57_CPUACTLR_DIS_OVERREAD (ULL(1) << 52) #define CORTEX_A57_CPUACTLR_DIS_OVERREAD (ULL(1) << 52)
#define CORTEX_A57_CPUACTLR_NO_ALLOC_WBWA (ULL(1) << 49) #define CORTEX_A57_CPUACTLR_NO_ALLOC_WBWA (ULL(1) << 49)
......
...@@ -32,6 +32,7 @@ ...@@ -32,6 +32,7 @@
#define CORTEX_A72_CPUACTLR p15, 0, c15 #define CORTEX_A72_CPUACTLR p15, 0, c15
#define CORTEX_A72_CPUACTLR_DISABLE_L1_DCACHE_HW_PFTCH (ULL(1) << 56) #define CORTEX_A72_CPUACTLR_DISABLE_L1_DCACHE_HW_PFTCH (ULL(1) << 56)
#define CORTEX_A72_CPUACTLR_DIS_LOAD_PASS_STORE (ULL(1) << 55)
#define CORTEX_A72_CPUACTLR_NO_ALLOC_WBWA (ULL(1) << 49) #define CORTEX_A72_CPUACTLR_NO_ALLOC_WBWA (ULL(1) << 49)
#define CORTEX_A72_CPUACTLR_DCC_AS_DCCI (ULL(1) << 44) #define CORTEX_A72_CPUACTLR_DCC_AS_DCCI (ULL(1) << 44)
#define CORTEX_A72_CPUACTLR_DIS_INSTR_PREFETCH (ULL(1) << 32) #define CORTEX_A72_CPUACTLR_DIS_INSTR_PREFETCH (ULL(1) << 32)
......
...@@ -44,6 +44,7 @@ ...@@ -44,6 +44,7 @@
#define CORTEX_A57_CPUACTLR_EL1 S3_1_C15_C2_0 #define CORTEX_A57_CPUACTLR_EL1 S3_1_C15_C2_0
#define CORTEX_A57_CPUACTLR_EL1_DIS_LOAD_PASS_DMB (ULL(1) << 59) #define CORTEX_A57_CPUACTLR_EL1_DIS_LOAD_PASS_DMB (ULL(1) << 59)
#define CORTEX_A57_CPUACTLR_EL1_DIS_LOAD_PASS_STORE (ULL(1) << 55)
#define CORTEX_A57_CPUACTLR_EL1_GRE_NGRE_AS_NGNRE (ULL(1) << 54) #define CORTEX_A57_CPUACTLR_EL1_GRE_NGRE_AS_NGNRE (ULL(1) << 54)
#define CORTEX_A57_CPUACTLR_EL1_DIS_OVERREAD (ULL(1) << 52) #define CORTEX_A57_CPUACTLR_EL1_DIS_OVERREAD (ULL(1) << 52)
#define CORTEX_A57_CPUACTLR_EL1_NO_ALLOC_WBWA (ULL(1) << 49) #define CORTEX_A57_CPUACTLR_EL1_NO_ALLOC_WBWA (ULL(1) << 49)
......
...@@ -32,6 +32,7 @@ ...@@ -32,6 +32,7 @@
#define CORTEX_A72_CPUACTLR_EL1 S3_1_C15_C2_0 #define CORTEX_A72_CPUACTLR_EL1 S3_1_C15_C2_0
#define CORTEX_A72_CPUACTLR_EL1_DISABLE_L1_DCACHE_HW_PFTCH (ULL(1) << 56) #define CORTEX_A72_CPUACTLR_EL1_DISABLE_L1_DCACHE_HW_PFTCH (ULL(1) << 56)
#define CORTEX_A72_CPUACTLR_EL1_DIS_LOAD_PASS_STORE (ULL(1) << 55)
#define CORTEX_A72_CPUACTLR_EL1_NO_ALLOC_WBWA (ULL(1) << 49) #define CORTEX_A72_CPUACTLR_EL1_NO_ALLOC_WBWA (ULL(1) << 49)
#define CORTEX_A72_CPUACTLR_EL1_DCC_AS_DCCI (ULL(1) << 44) #define CORTEX_A72_CPUACTLR_EL1_DCC_AS_DCCI (ULL(1) << 44)
#define CORTEX_A72_CPUACTLR_EL1_DIS_INSTR_PREFETCH (ULL(1) << 32) #define CORTEX_A72_CPUACTLR_EL1_DIS_INSTR_PREFETCH (ULL(1) << 32)
......
...@@ -22,4 +22,11 @@ ...@@ -22,4 +22,11 @@
******************************************************************************/ ******************************************************************************/
#define CORTEX_A73_L2MERRSR_EL1 S3_1_C15_C2_3 /* Instruction def. */ #define CORTEX_A73_L2MERRSR_EL1 S3_1_C15_C2_3 /* Instruction def. */
/*******************************************************************************
* CPU implementation defined register specific definitions.
******************************************************************************/
#define CORTEX_A73_IMP_DEF_REG1 S3_0_C15_C0_0
#define CORTEX_A73_IMP_DEF_REG1_DISABLE_LOAD_PASS_STORE (1 << 3)
#endif /* __CORTEX_A73_H__ */ #endif /* __CORTEX_A73_H__ */
...@@ -16,6 +16,13 @@ ...@@ -16,6 +16,13 @@
#define CORTEX_A75_CPUPWRCTLR_EL1 S3_0_C15_C2_7 #define CORTEX_A75_CPUPWRCTLR_EL1 S3_0_C15_C2_7
#define CORTEX_A75_CPUECTLR_EL1 S3_0_C15_C1_4 #define CORTEX_A75_CPUECTLR_EL1 S3_0_C15_C1_4
/*******************************************************************************
* CPU Auxiliary Control register specific definitions.
******************************************************************************/
#define CORTEX_A75_CPUACTLR_EL1 S3_0_C15_C1_0
#define CORTEX_A75_CPUACTLR_EL1_DISABLE_LOAD_PASS_STORE (1 << 35)
/* Definitions of register field mask in CORTEX_A75_CPUPWRCTLR_EL1 */ /* Definitions of register field mask in CORTEX_A75_CPUPWRCTLR_EL1 */
#define CORTEX_A75_CORE_PWRDN_EN_MASK 0x1 #define CORTEX_A75_CORE_PWRDN_EN_MASK 0x1
......
...@@ -18,6 +18,9 @@ ...@@ -18,6 +18,9 @@
/* Special constant to specify that CPU has no reset function */ /* Special constant to specify that CPU has no reset function */
#define CPU_NO_RESET_FUNC 0 #define CPU_NO_RESET_FUNC 0
#define CPU_NO_EXTRA1_FUNC 0
#define CPU_NO_EXTRA2_FUNC 0
/* Word size for 64-bit CPUs */ /* Word size for 64-bit CPUs */
#define CPU_WORD_SIZE 8 #define CPU_WORD_SIZE 8
...@@ -48,6 +51,8 @@ CPU_RESET_FUNC: /* cpu_ops reset_func */ ...@@ -48,6 +51,8 @@ CPU_RESET_FUNC: /* cpu_ops reset_func */
#endif #endif
CPU_EXTRA1_FUNC: CPU_EXTRA1_FUNC:
.space 8 .space 8
CPU_EXTRA2_FUNC:
.space 8
#ifdef IMAGE_BL31 /* The power down core and cluster is needed only in BL31 */ #ifdef IMAGE_BL31 /* The power down core and cluster is needed only in BL31 */
CPU_PWR_DWN_OPS: /* cpu_ops power down functions */ CPU_PWR_DWN_OPS: /* cpu_ops power down functions */
.space (8 * CPU_MAX_PWR_DWN_OPS) .space (8 * CPU_MAX_PWR_DWN_OPS)
...@@ -119,6 +124,10 @@ CPU_OPS_SIZE = . ...@@ -119,6 +124,10 @@ CPU_OPS_SIZE = .
* This is a placeholder for future per CPU operations. Currently, * This is a placeholder for future per CPU operations. Currently,
* some CPUs use this entry to set a test function to determine if * some CPUs use this entry to set a test function to determine if
* the workaround for CVE-2017-5715 needs to be applied or not. * the workaround for CVE-2017-5715 needs to be applied or not.
* _extra2:
* This is a placeholder for future per CPU operations. Currently
* some CPUs use this entry to set a function to disable the
* workaround for CVE-2018-3639.
* _power_down_ops: * _power_down_ops:
* Comma-separated list of functions to perform power-down * Comma-separated list of functions to perform power-down
* operatios on the CPU. At least one, and up to * operatios on the CPU. At least one, and up to
...@@ -129,7 +138,7 @@ CPU_OPS_SIZE = . ...@@ -129,7 +138,7 @@ CPU_OPS_SIZE = .
* used to handle power down at subsequent levels * used to handle power down at subsequent levels
*/ */
.macro declare_cpu_ops_base _name:req, _midr:req, _resetfunc:req, \ .macro declare_cpu_ops_base _name:req, _midr:req, _resetfunc:req, \
_extra1:req, _power_down_ops:vararg _extra1:req, _extra2:req, _power_down_ops:vararg
.section cpu_ops, "a" .section cpu_ops, "a"
.align 3 .align 3
.type cpu_ops_\_name, %object .type cpu_ops_\_name, %object
...@@ -138,6 +147,7 @@ CPU_OPS_SIZE = . ...@@ -138,6 +147,7 @@ CPU_OPS_SIZE = .
.quad \_resetfunc .quad \_resetfunc
#endif #endif
.quad \_extra1 .quad \_extra1
.quad \_extra2
#ifdef IMAGE_BL31 #ifdef IMAGE_BL31
1: 1:
/* Insert list of functions */ /* Insert list of functions */
...@@ -196,14 +206,15 @@ CPU_OPS_SIZE = . ...@@ -196,14 +206,15 @@ CPU_OPS_SIZE = .
.macro declare_cpu_ops _name:req, _midr:req, _resetfunc:req, \ .macro declare_cpu_ops _name:req, _midr:req, _resetfunc:req, \
_power_down_ops:vararg _power_down_ops:vararg
declare_cpu_ops_base \_name, \_midr, \_resetfunc, 0, \ declare_cpu_ops_base \_name, \_midr, \_resetfunc, 0, 0, \
\_power_down_ops \_power_down_ops
.endm .endm
.macro declare_cpu_ops_workaround_cve_2017_5715 _name:req, _midr:req, \ .macro declare_cpu_ops_wa _name:req, _midr:req, \
_resetfunc:req, _extra1:req, _power_down_ops:vararg _resetfunc:req, _extra1:req, _extra2:req, \
_power_down_ops:vararg
declare_cpu_ops_base \_name, \_midr, \_resetfunc, \ declare_cpu_ops_base \_name, \_midr, \_resetfunc, \
\_extra1, \_power_down_ops \_extra1, \_extra2, \_power_down_ops
.endm .endm
#if REPORT_ERRATA #if REPORT_ERRATA
......
...@@ -4,9 +4,9 @@ ...@@ -4,9 +4,9 @@
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
#ifndef __WORKAROUND_CVE_2017_5715_H__ #ifndef __WA_CVE_2017_5715_H__
#define __WORKAROUND_CVE_2017_5715_H__ #define __WA_CVE_2017_5715_H__
int check_workaround_cve_2017_5715(void); int check_wa_cve_2017_5715(void);
#endif /* __WORKAROUND_CVE_2017_5715_H__ */ #endif /* __WA_CVE_2017_5715_H__ */
/*
* Copyright (c) 2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
#ifndef __WA_CVE_2018_3639_H__
#define __WA_CVE_2018_3639_H__
void *wa_cve_2018_3639_get_disable_ptr(void);
#endif /* __WA_CVE_2018_3639_H__ */
...@@ -128,8 +128,8 @@ ...@@ -128,8 +128,8 @@
* Constants that allow assembler code to access members of and the 'fp_regs' * Constants that allow assembler code to access members of and the 'fp_regs'
* structure at their correct offsets. * structure at their correct offsets.
******************************************************************************/ ******************************************************************************/
#if CTX_INCLUDE_FPREGS
#define CTX_FPREGS_OFFSET (CTX_SYSREGS_OFFSET + CTX_SYSREGS_END) #define CTX_FPREGS_OFFSET (CTX_SYSREGS_OFFSET + CTX_SYSREGS_END)
#if CTX_INCLUDE_FPREGS
#define CTX_FP_Q0 U(0x0) #define CTX_FP_Q0 U(0x0)
#define CTX_FP_Q1 U(0x10) #define CTX_FP_Q1 U(0x10)
#define CTX_FP_Q2 U(0x20) #define CTX_FP_Q2 U(0x20)
...@@ -170,8 +170,14 @@ ...@@ -170,8 +170,14 @@
#else #else
#define CTX_FPREGS_END U(0x210) /* Align to the next 16 byte boundary */ #define CTX_FPREGS_END U(0x210) /* Align to the next 16 byte boundary */
#endif #endif
#else
#define CTX_FPREGS_END U(0)
#endif #endif
#define CTX_CVE_2018_3639_OFFSET (CTX_FPREGS_OFFSET + CTX_FPREGS_END)
#define CTX_CVE_2018_3639_DISABLE U(0)
#define CTX_CVE_2018_3639_END U(0x10) /* Align to the next 16 byte boundary */
#ifndef __ASSEMBLY__ #ifndef __ASSEMBLY__
#include <cassert.h> #include <cassert.h>
...@@ -195,6 +201,7 @@ ...@@ -195,6 +201,7 @@
#define CTX_FPREG_ALL (CTX_FPREGS_END >> DWORD_SHIFT) #define CTX_FPREG_ALL (CTX_FPREGS_END >> DWORD_SHIFT)
#endif #endif
#define CTX_EL3STATE_ALL (CTX_EL3STATE_END >> DWORD_SHIFT) #define CTX_EL3STATE_ALL (CTX_EL3STATE_END >> DWORD_SHIFT)
#define CTX_CVE_2018_3639_ALL (CTX_CVE_2018_3639_END >> DWORD_SHIFT)
/* /*
* AArch64 general purpose register context structure. Usually x0-x18, * AArch64 general purpose register context structure. Usually x0-x18,
...@@ -227,6 +234,9 @@ DEFINE_REG_STRUCT(fp_regs, CTX_FPREG_ALL); ...@@ -227,6 +234,9 @@ DEFINE_REG_STRUCT(fp_regs, CTX_FPREG_ALL);
*/ */
DEFINE_REG_STRUCT(el3_state, CTX_EL3STATE_ALL); DEFINE_REG_STRUCT(el3_state, CTX_EL3STATE_ALL);
/* Function pointer used by CVE-2018-3639 dynamic mitigation */
DEFINE_REG_STRUCT(cve_2018_3639, CTX_CVE_2018_3639_ALL);
/* /*
* Macros to access members of any of the above structures using their * Macros to access members of any of the above structures using their
* offsets * offsets
...@@ -251,6 +261,7 @@ typedef struct cpu_context { ...@@ -251,6 +261,7 @@ typedef struct cpu_context {
#if CTX_INCLUDE_FPREGS #if CTX_INCLUDE_FPREGS
fp_regs_t fpregs_ctx; fp_regs_t fpregs_ctx;
#endif #endif
cve_2018_3639_t cve_2018_3639_ctx;
} cpu_context_t; } cpu_context_t;
/* Macros to access members of the 'cpu_context_t' structure */ /* Macros to access members of the 'cpu_context_t' structure */
...@@ -276,6 +287,8 @@ CASSERT(CTX_FPREGS_OFFSET == __builtin_offsetof(cpu_context_t, fpregs_ctx), \ ...@@ -276,6 +287,8 @@ CASSERT(CTX_FPREGS_OFFSET == __builtin_offsetof(cpu_context_t, fpregs_ctx), \
#endif #endif
CASSERT(CTX_EL3STATE_OFFSET == __builtin_offsetof(cpu_context_t, el3state_ctx), \ CASSERT(CTX_EL3STATE_OFFSET == __builtin_offsetof(cpu_context_t, el3state_ctx), \
assert_core_context_el3state_offset_mismatch); assert_core_context_el3state_offset_mismatch);
CASSERT(CTX_CVE_2018_3639_OFFSET == __builtin_offsetof(cpu_context_t, cve_2018_3639_ctx), \
assert_core_context_cve_2018_3639_offset_mismatch);
/* /*
* Helper macro to set the general purpose registers that correspond to * Helper macro to set the general purpose registers that correspond to
......
...@@ -10,5 +10,8 @@ ...@@ -10,5 +10,8 @@
#define SMCCC_VERSION U(0x80000000) #define SMCCC_VERSION U(0x80000000)
#define SMCCC_ARCH_FEATURES U(0x80000001) #define SMCCC_ARCH_FEATURES U(0x80000001)
#define SMCCC_ARCH_WORKAROUND_1 U(0x80008000) #define SMCCC_ARCH_WORKAROUND_1 U(0x80008000)
#define SMCCC_ARCH_WORKAROUND_2 U(0x80007FFF)
#define SMCCC_ARCH_NOT_REQUIRED -2
#endif /* __ARM_ARCH_SVC_H__ */ #endif /* __ARM_ARCH_SVC_H__ */
...@@ -337,6 +337,15 @@ func check_errata_cve_2017_5715 ...@@ -337,6 +337,15 @@ func check_errata_cve_2017_5715
bx lr bx lr
endfunc check_errata_cve_2017_5715 endfunc check_errata_cve_2017_5715
func check_errata_cve_2018_3639
#if WORKAROUND_CVE_2018_3639
mov r0, #ERRATA_APPLIES
#else
mov r0, #ERRATA_MISSING
#endif
bx lr
endfunc check_errata_cve_2018_3639
/* ------------------------------------------------- /* -------------------------------------------------
* The CPU Ops reset function for Cortex-A57. * The CPU Ops reset function for Cortex-A57.
* Shall clobber: r0-r6 * Shall clobber: r0-r6
...@@ -392,6 +401,14 @@ func cortex_a57_reset_func ...@@ -392,6 +401,14 @@ func cortex_a57_reset_func
bl errata_a57_859972_wa bl errata_a57_859972_wa
#endif #endif
#if WORKAROUND_CVE_2018_3639
ldcopr16 r0, r1, CORTEX_A57_CPUACTLR
orr64_imm r0, r1, CORTEX_A57_CPUACTLR_DIS_LOAD_PASS_STORE
stcopr16 r0, r1, CORTEX_A57_CPUACTLR
isb
dsb sy
#endif
/* --------------------------------------------- /* ---------------------------------------------
* Enable the SMP bit. * Enable the SMP bit.
* --------------------------------------------- * ---------------------------------------------
...@@ -525,6 +542,7 @@ func cortex_a57_errata_report ...@@ -525,6 +542,7 @@ func cortex_a57_errata_report
report_errata ERRATA_A57_833471, cortex_a57, 833471 report_errata ERRATA_A57_833471, cortex_a57, 833471
report_errata ERRATA_A57_859972, cortex_a57, 859972 report_errata ERRATA_A57_859972, cortex_a57, 859972
report_errata WORKAROUND_CVE_2017_5715, cortex_a57, cve_2017_5715 report_errata WORKAROUND_CVE_2017_5715, cortex_a57, cve_2017_5715
report_errata WORKAROUND_CVE_2018_3639, cortex_a57, cve_2018_3639
pop {r12, lr} pop {r12, lr}
bx lr bx lr
......
...@@ -92,6 +92,15 @@ func check_errata_cve_2017_5715 ...@@ -92,6 +92,15 @@ func check_errata_cve_2017_5715
bx lr bx lr
endfunc check_errata_cve_2017_5715 endfunc check_errata_cve_2017_5715
func check_errata_cve_2018_3639
#if WORKAROUND_CVE_2018_3639
mov r0, #ERRATA_APPLIES
#else
mov r0, #ERRATA_MISSING
#endif
bx lr
endfunc check_errata_cve_2018_3639
/* ------------------------------------------------- /* -------------------------------------------------
* The CPU Ops reset function for Cortex-A72. * The CPU Ops reset function for Cortex-A72.
* ------------------------------------------------- * -------------------------------------------------
...@@ -105,6 +114,15 @@ func cortex_a72_reset_func ...@@ -105,6 +114,15 @@ func cortex_a72_reset_func
mov r0, r4 mov r0, r4
bl errata_a72_859971_wa bl errata_a72_859971_wa
#endif #endif
#if WORKAROUND_CVE_2018_3639
ldcopr16 r0, r1, CORTEX_A72_CPUACTLR
orr64_imm r0, r1, CORTEX_A72_CPUACTLR_DIS_LOAD_PASS_STORE
stcopr16 r0, r1, CORTEX_A72_CPUACTLR
isb
dsb sy
#endif
/* --------------------------------------------- /* ---------------------------------------------
* Enable the SMP bit. * Enable the SMP bit.
* --------------------------------------------- * ---------------------------------------------
...@@ -241,6 +259,7 @@ func cortex_a72_errata_report ...@@ -241,6 +259,7 @@ func cortex_a72_errata_report
*/ */
report_errata ERRATA_A72_859971, cortex_a72, 859971 report_errata ERRATA_A72_859971, cortex_a72, 859971
report_errata WORKAROUND_CVE_2017_5715, cortex_a72, cve_2017_5715 report_errata WORKAROUND_CVE_2017_5715, cortex_a72, cve_2017_5715
report_errata WORKAROUND_CVE_2018_3639, cortex_a72, cve_2018_3639
pop {r12, lr} pop {r12, lr}
bx lr bx lr
......
...@@ -337,6 +337,15 @@ func check_errata_cve_2017_5715 ...@@ -337,6 +337,15 @@ func check_errata_cve_2017_5715
ret ret
endfunc check_errata_cve_2017_5715 endfunc check_errata_cve_2017_5715
func check_errata_cve_2018_3639
#if WORKAROUND_CVE_2018_3639
mov x0, #ERRATA_APPLIES
#else
mov x0, #ERRATA_MISSING
#endif
ret
endfunc check_errata_cve_2018_3639
/* ------------------------------------------------- /* -------------------------------------------------
* The CPU Ops reset function for Cortex-A57. * The CPU Ops reset function for Cortex-A57.
* Shall clobber: x0-x19 * Shall clobber: x0-x19
...@@ -393,10 +402,18 @@ func cortex_a57_reset_func ...@@ -393,10 +402,18 @@ func cortex_a57_reset_func
#endif #endif
#if IMAGE_BL31 && WORKAROUND_CVE_2017_5715 #if IMAGE_BL31 && WORKAROUND_CVE_2017_5715
adr x0, workaround_mmu_runtime_exceptions adr x0, wa_cve_2017_5715_mmu_vbar
msr vbar_el3, x0 msr vbar_el3, x0
#endif #endif
#if WORKAROUND_CVE_2018_3639
mrs x0, CORTEX_A57_CPUACTLR_EL1
orr x0, x0, #CORTEX_A57_CPUACTLR_EL1_DIS_LOAD_PASS_STORE
msr CORTEX_A57_CPUACTLR_EL1, x0
isb
dsb sy
#endif
/* --------------------------------------------- /* ---------------------------------------------
* Enable the SMP bit. * Enable the SMP bit.
* --------------------------------------------- * ---------------------------------------------
...@@ -528,6 +545,7 @@ func cortex_a57_errata_report ...@@ -528,6 +545,7 @@ func cortex_a57_errata_report
report_errata ERRATA_A57_833471, cortex_a57, 833471 report_errata ERRATA_A57_833471, cortex_a57, 833471
report_errata ERRATA_A57_859972, cortex_a57, 859972 report_errata ERRATA_A57_859972, cortex_a57, 859972
report_errata WORKAROUND_CVE_2017_5715, cortex_a57, cve_2017_5715 report_errata WORKAROUND_CVE_2017_5715, cortex_a57, cve_2017_5715
report_errata WORKAROUND_CVE_2018_3639, cortex_a57, cve_2018_3639
ldp x8, x30, [sp], #16 ldp x8, x30, [sp], #16
ret ret
...@@ -555,8 +573,9 @@ func cortex_a57_cpu_reg_dump ...@@ -555,8 +573,9 @@ func cortex_a57_cpu_reg_dump
ret ret
endfunc cortex_a57_cpu_reg_dump endfunc cortex_a57_cpu_reg_dump
declare_cpu_ops_workaround_cve_2017_5715 cortex_a57, CORTEX_A57_MIDR, \ declare_cpu_ops_wa cortex_a57, CORTEX_A57_MIDR, \
cortex_a57_reset_func, \ cortex_a57_reset_func, \
check_errata_cve_2017_5715, \ check_errata_cve_2017_5715, \
CPU_NO_EXTRA2_FUNC, \
cortex_a57_core_pwr_dwn, \ cortex_a57_core_pwr_dwn, \
cortex_a57_cluster_pwr_dwn cortex_a57_cluster_pwr_dwn
...@@ -110,6 +110,15 @@ func check_errata_cve_2017_5715 ...@@ -110,6 +110,15 @@ func check_errata_cve_2017_5715
ret ret
endfunc check_errata_cve_2017_5715 endfunc check_errata_cve_2017_5715
func check_errata_cve_2018_3639
#if WORKAROUND_CVE_2018_3639
mov x0, #ERRATA_APPLIES
#else
mov x0, #ERRATA_MISSING
#endif
ret
endfunc check_errata_cve_2018_3639
/* ------------------------------------------------- /* -------------------------------------------------
* The CPU Ops reset function for Cortex-A72. * The CPU Ops reset function for Cortex-A72.
* ------------------------------------------------- * -------------------------------------------------
...@@ -126,11 +135,19 @@ func cortex_a72_reset_func ...@@ -126,11 +135,19 @@ func cortex_a72_reset_func
#if IMAGE_BL31 && WORKAROUND_CVE_2017_5715 #if IMAGE_BL31 && WORKAROUND_CVE_2017_5715
cpu_check_csv2 x0, 1f cpu_check_csv2 x0, 1f
adr x0, workaround_mmu_runtime_exceptions adr x0, wa_cve_2017_5715_mmu_vbar
msr vbar_el3, x0 msr vbar_el3, x0
1: 1:
#endif #endif
#if WORKAROUND_CVE_2018_3639
mrs x0, CORTEX_A72_CPUACTLR_EL1
orr x0, x0, #CORTEX_A72_CPUACTLR_EL1_DIS_LOAD_PASS_STORE
msr CORTEX_A72_CPUACTLR_EL1, x0
isb
dsb sy
#endif
/* --------------------------------------------- /* ---------------------------------------------
* Enable the SMP bit. * Enable the SMP bit.
* --------------------------------------------- * ---------------------------------------------
...@@ -265,6 +282,7 @@ func cortex_a72_errata_report ...@@ -265,6 +282,7 @@ func cortex_a72_errata_report
*/ */
report_errata ERRATA_A72_859971, cortex_a72, 859971 report_errata ERRATA_A72_859971, cortex_a72, 859971
report_errata WORKAROUND_CVE_2017_5715, cortex_a72, cve_2017_5715 report_errata WORKAROUND_CVE_2017_5715, cortex_a72, cve_2017_5715
report_errata WORKAROUND_CVE_2018_3639, cortex_a72, cve_2018_3639
ldp x8, x30, [sp], #16 ldp x8, x30, [sp], #16
ret ret
...@@ -292,8 +310,9 @@ func cortex_a72_cpu_reg_dump ...@@ -292,8 +310,9 @@ func cortex_a72_cpu_reg_dump
ret ret
endfunc cortex_a72_cpu_reg_dump endfunc cortex_a72_cpu_reg_dump
declare_cpu_ops_workaround_cve_2017_5715 cortex_a72, CORTEX_A72_MIDR, \ declare_cpu_ops_wa cortex_a72, CORTEX_A72_MIDR, \
cortex_a72_reset_func, \ cortex_a72_reset_func, \
check_errata_cve_2017_5715, \ check_errata_cve_2017_5715, \
CPU_NO_EXTRA2_FUNC, \
cortex_a72_core_pwr_dwn, \ cortex_a72_core_pwr_dwn, \
cortex_a72_cluster_pwr_dwn cortex_a72_cluster_pwr_dwn
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment