Unverified Commit d003b190 authored by Dimitris Papastamos's avatar Dimitris Papastamos Committed by GitHub
Browse files

Merge pull request #1392 from dp-arm/dp/cve_2018_3639

Implement workaround for CVE-2018-3639 on Cortex A57/A72/A73 and A75
parents edcd266e fe007b2e
...@@ -38,11 +38,18 @@ endfunc cortex_a73_disable_smp ...@@ -38,11 +38,18 @@ endfunc cortex_a73_disable_smp
func cortex_a73_reset_func func cortex_a73_reset_func
#if IMAGE_BL31 && WORKAROUND_CVE_2017_5715 #if IMAGE_BL31 && WORKAROUND_CVE_2017_5715
cpu_check_csv2 x0, 1f cpu_check_csv2 x0, 1f
adr x0, workaround_bpiall_vbar0_runtime_exceptions adr x0, wa_cve_2017_5715_bpiall_vbar
msr vbar_el3, x0 msr vbar_el3, x0
1: 1:
#endif #endif
#if WORKAROUND_CVE_2018_3639
mrs x0, CORTEX_A73_IMP_DEF_REG1
orr x0, x0, #CORTEX_A73_IMP_DEF_REG1_DISABLE_LOAD_PASS_STORE
msr CORTEX_A73_IMP_DEF_REG1, x0
isb
#endif
/* --------------------------------------------- /* ---------------------------------------------
* Enable the SMP bit. * Enable the SMP bit.
* Clobbers : x0 * Clobbers : x0
...@@ -129,6 +136,15 @@ func check_errata_cve_2017_5715 ...@@ -129,6 +136,15 @@ func check_errata_cve_2017_5715
ret ret
endfunc check_errata_cve_2017_5715 endfunc check_errata_cve_2017_5715
func check_errata_cve_2018_3639
#if WORKAROUND_CVE_2018_3639
mov x0, #ERRATA_APPLIES
#else
mov x0, #ERRATA_MISSING
#endif
ret
endfunc check_errata_cve_2018_3639
#if REPORT_ERRATA #if REPORT_ERRATA
/* /*
* Errata printing function for Cortex A75. Must follow AAPCS. * Errata printing function for Cortex A75. Must follow AAPCS.
...@@ -144,6 +160,7 @@ func cortex_a73_errata_report ...@@ -144,6 +160,7 @@ func cortex_a73_errata_report
* checking functions of each errata. * checking functions of each errata.
*/ */
report_errata WORKAROUND_CVE_2017_5715, cortex_a73, cve_2017_5715 report_errata WORKAROUND_CVE_2017_5715, cortex_a73, cve_2017_5715
report_errata WORKAROUND_CVE_2018_3639, cortex_a73, cve_2018_3639
ldp x8, x30, [sp], #16 ldp x8, x30, [sp], #16
ret ret
...@@ -170,8 +187,9 @@ func cortex_a73_cpu_reg_dump ...@@ -170,8 +187,9 @@ func cortex_a73_cpu_reg_dump
ret ret
endfunc cortex_a73_cpu_reg_dump endfunc cortex_a73_cpu_reg_dump
declare_cpu_ops_workaround_cve_2017_5715 cortex_a73, CORTEX_A73_MIDR, \ declare_cpu_ops_wa cortex_a73, CORTEX_A73_MIDR, \
cortex_a73_reset_func, \ cortex_a73_reset_func, \
check_errata_cve_2017_5715, \ check_errata_cve_2017_5715, \
CPU_NO_EXTRA2_FUNC, \
cortex_a73_core_pwr_dwn, \ cortex_a73_core_pwr_dwn, \
cortex_a73_cluster_pwr_dwn cortex_a73_cluster_pwr_dwn
...@@ -13,11 +13,18 @@ ...@@ -13,11 +13,18 @@
func cortex_a75_reset_func func cortex_a75_reset_func
#if IMAGE_BL31 && WORKAROUND_CVE_2017_5715 #if IMAGE_BL31 && WORKAROUND_CVE_2017_5715
cpu_check_csv2 x0, 1f cpu_check_csv2 x0, 1f
adr x0, workaround_bpiall_vbar0_runtime_exceptions adr x0, wa_cve_2017_5715_bpiall_vbar
msr vbar_el3, x0 msr vbar_el3, x0
1: 1:
#endif #endif
#if WORKAROUND_CVE_2018_3639
mrs x0, CORTEX_A75_CPUACTLR_EL1
orr x0, x0, #CORTEX_A75_CPUACTLR_EL1_DISABLE_LOAD_PASS_STORE
msr CORTEX_A75_CPUACTLR_EL1, x0
isb
#endif
#if ENABLE_AMU #if ENABLE_AMU
/* Make sure accesses from EL0/EL1 and EL2 are not trapped to EL3 */ /* Make sure accesses from EL0/EL1 and EL2 are not trapped to EL3 */
mrs x0, actlr_el3 mrs x0, actlr_el3
...@@ -57,6 +64,15 @@ func check_errata_cve_2017_5715 ...@@ -57,6 +64,15 @@ func check_errata_cve_2017_5715
ret ret
endfunc check_errata_cve_2017_5715 endfunc check_errata_cve_2017_5715
func check_errata_cve_2018_3639
#if WORKAROUND_CVE_2018_3639
mov x0, #ERRATA_APPLIES
#else
mov x0, #ERRATA_MISSING
#endif
ret
endfunc check_errata_cve_2018_3639
/* --------------------------------------------- /* ---------------------------------------------
* HW will do the cache maintenance while powering down * HW will do the cache maintenance while powering down
* --------------------------------------------- * ---------------------------------------------
...@@ -88,6 +104,7 @@ func cortex_a75_errata_report ...@@ -88,6 +104,7 @@ func cortex_a75_errata_report
* checking functions of each errata. * checking functions of each errata.
*/ */
report_errata WORKAROUND_CVE_2017_5715, cortex_a75, cve_2017_5715 report_errata WORKAROUND_CVE_2017_5715, cortex_a75, cve_2017_5715
report_errata WORKAROUND_CVE_2018_3639, cortex_a75, cve_2018_3639
ldp x8, x30, [sp], #16 ldp x8, x30, [sp], #16
ret ret
...@@ -113,7 +130,8 @@ func cortex_a75_cpu_reg_dump ...@@ -113,7 +130,8 @@ func cortex_a75_cpu_reg_dump
ret ret
endfunc cortex_a75_cpu_reg_dump endfunc cortex_a75_cpu_reg_dump
declare_cpu_ops_workaround_cve_2017_5715 cortex_a75, CORTEX_A75_MIDR, \ declare_cpu_ops_wa cortex_a75, CORTEX_A75_MIDR, \
cortex_a75_reset_func, \ cortex_a75_reset_func, \
check_errata_cve_2017_5715, \ check_errata_cve_2017_5715, \
CPU_NO_EXTRA2_FUNC, \
cortex_a75_core_pwr_dwn cortex_a75_core_pwr_dwn
...@@ -285,7 +285,7 @@ endfunc print_errata_status ...@@ -285,7 +285,7 @@ endfunc print_errata_status
#endif #endif
/* /*
* int check_workaround_cve_2017_5715(void); * int check_wa_cve_2017_5715(void);
* *
* This function returns: * This function returns:
* - ERRATA_APPLIES when firmware mitigation is required. * - ERRATA_APPLIES when firmware mitigation is required.
...@@ -296,8 +296,8 @@ endfunc print_errata_status ...@@ -296,8 +296,8 @@ endfunc print_errata_status
* NOTE: Must be called only after cpu_ops have been initialized * NOTE: Must be called only after cpu_ops have been initialized
* in per-CPU data. * in per-CPU data.
*/ */
.globl check_workaround_cve_2017_5715 .globl check_wa_cve_2017_5715
func check_workaround_cve_2017_5715 func check_wa_cve_2017_5715
mrs x0, tpidr_el3 mrs x0, tpidr_el3
#if ENABLE_ASSERTIONS #if ENABLE_ASSERTIONS
cmp x0, #0 cmp x0, #0
...@@ -315,4 +315,28 @@ func check_workaround_cve_2017_5715 ...@@ -315,4 +315,28 @@ func check_workaround_cve_2017_5715
1: 1:
mov x0, #ERRATA_NOT_APPLIES mov x0, #ERRATA_NOT_APPLIES
ret ret
endfunc check_workaround_cve_2017_5715 endfunc check_wa_cve_2017_5715
/*
* void *wa_cve_2018_3639_get_disable_ptr(void);
*
* Returns a function pointer which is used to disable mitigation
* for CVE-2018-3639.
* The function pointer is only returned on cores that employ
* dynamic mitigation. If the core uses static mitigation or is
* unaffected by CVE-2018-3639 this function returns NULL.
*
* NOTE: Must be called only after cpu_ops have been initialized
* in per-CPU data.
*/
.globl wa_cve_2018_3639_get_disable_ptr
func wa_cve_2018_3639_get_disable_ptr
mrs x0, tpidr_el3
#if ENABLE_ASSERTIONS
cmp x0, #0
ASM_ASSERT(ne)
#endif
ldr x0, [x0, #CPU_DATA_CPU_OPS_PTR]
ldr x0, [x0, #CPU_EXTRA2_FUNC]
ret
endfunc wa_cve_2018_3639_get_disable_ptr
...@@ -9,13 +9,13 @@ ...@@ -9,13 +9,13 @@
#include <asm_macros.S> #include <asm_macros.S>
#include <context.h> #include <context.h>
.globl workaround_bpiall_vbar0_runtime_exceptions .globl wa_cve_2017_5715_bpiall_vbar
#define EMIT_BPIALL 0xee070fd5 #define EMIT_BPIALL 0xee070fd5
#define EMIT_SMC 0xe1600070 #define EMIT_SMC 0xe1600070
#define ESR_EL3_A64_SMC0 0x5e000000 #define ESR_EL3_A64_SMC0 0x5e000000
.macro enter_workaround _from_vector .macro apply_cve_2017_5715_wa _from_vector
/* /*
* Save register state to enable a call to AArch32 S-EL1 and return * Save register state to enable a call to AArch32 S-EL1 and return
* Identify the original calling vector in w2 (==_from_vector) * Identify the original calling vector in w2 (==_from_vector)
...@@ -66,7 +66,7 @@ ...@@ -66,7 +66,7 @@
movz w8, SPSR_MODE32(MODE32_svc, SPSR_T_ARM, SPSR_E_LITTLE, SPSR_AIF_MASK) movz w8, SPSR_MODE32(MODE32_svc, SPSR_T_ARM, SPSR_E_LITTLE, SPSR_AIF_MASK)
/* Switch EL3 exception vectors while the workaround is executing. */ /* Switch EL3 exception vectors while the workaround is executing. */
adr x9, workaround_bpiall_vbar1_runtime_exceptions adr x9, wa_cve_2017_5715_bpiall_ret_vbar
/* Setup SCTLR_EL1 with MMU off and I$ on */ /* Setup SCTLR_EL1 with MMU off and I$ on */
ldr x10, stub_sel1_sctlr ldr x10, stub_sel1_sctlr
...@@ -93,13 +93,13 @@ ...@@ -93,13 +93,13 @@
* is not enabled, the existing runtime exception vector table is used. * is not enabled, the existing runtime exception vector table is used.
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
vector_base workaround_bpiall_vbar0_runtime_exceptions vector_base wa_cve_2017_5715_bpiall_vbar
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* Current EL with SP_EL0 : 0x0 - 0x200 * Current EL with SP_EL0 : 0x0 - 0x200
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
vector_entry workaround_bpiall_vbar0_sync_exception_sp_el0 vector_entry bpiall_sync_exception_sp_el0
b sync_exception_sp_el0 b sync_exception_sp_el0
nop /* to force 8 byte alignment for the following stub */ nop /* to force 8 byte alignment for the following stub */
...@@ -114,79 +114,79 @@ aarch32_stub: ...@@ -114,79 +114,79 @@ aarch32_stub:
.word EMIT_BPIALL .word EMIT_BPIALL
.word EMIT_SMC .word EMIT_SMC
check_vector_size workaround_bpiall_vbar0_sync_exception_sp_el0 check_vector_size bpiall_sync_exception_sp_el0
vector_entry workaround_bpiall_vbar0_irq_sp_el0 vector_entry bpiall_irq_sp_el0
b irq_sp_el0 b irq_sp_el0
check_vector_size workaround_bpiall_vbar0_irq_sp_el0 check_vector_size bpiall_irq_sp_el0
vector_entry workaround_bpiall_vbar0_fiq_sp_el0 vector_entry bpiall_fiq_sp_el0
b fiq_sp_el0 b fiq_sp_el0
check_vector_size workaround_bpiall_vbar0_fiq_sp_el0 check_vector_size bpiall_fiq_sp_el0
vector_entry workaround_bpiall_vbar0_serror_sp_el0 vector_entry bpiall_serror_sp_el0
b serror_sp_el0 b serror_sp_el0
check_vector_size workaround_bpiall_vbar0_serror_sp_el0 check_vector_size bpiall_serror_sp_el0
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* Current EL with SP_ELx: 0x200 - 0x400 * Current EL with SP_ELx: 0x200 - 0x400
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
vector_entry workaround_bpiall_vbar0_sync_exception_sp_elx vector_entry bpiall_sync_exception_sp_elx
b sync_exception_sp_elx b sync_exception_sp_elx
check_vector_size workaround_bpiall_vbar0_sync_exception_sp_elx check_vector_size bpiall_sync_exception_sp_elx
vector_entry workaround_bpiall_vbar0_irq_sp_elx vector_entry bpiall_irq_sp_elx
b irq_sp_elx b irq_sp_elx
check_vector_size workaround_bpiall_vbar0_irq_sp_elx check_vector_size bpiall_irq_sp_elx
vector_entry workaround_bpiall_vbar0_fiq_sp_elx vector_entry bpiall_fiq_sp_elx
b fiq_sp_elx b fiq_sp_elx
check_vector_size workaround_bpiall_vbar0_fiq_sp_elx check_vector_size bpiall_fiq_sp_elx
vector_entry workaround_bpiall_vbar0_serror_sp_elx vector_entry bpiall_serror_sp_elx
b serror_sp_elx b serror_sp_elx
check_vector_size workaround_bpiall_vbar0_serror_sp_elx check_vector_size bpiall_serror_sp_elx
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* Lower EL using AArch64 : 0x400 - 0x600 * Lower EL using AArch64 : 0x400 - 0x600
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
vector_entry workaround_bpiall_vbar0_sync_exception_aarch64 vector_entry bpiall_sync_exception_aarch64
enter_workaround 1 apply_cve_2017_5715_wa 1
check_vector_size workaround_bpiall_vbar0_sync_exception_aarch64 check_vector_size bpiall_sync_exception_aarch64
vector_entry workaround_bpiall_vbar0_irq_aarch64 vector_entry bpiall_irq_aarch64
enter_workaround 2 apply_cve_2017_5715_wa 2
check_vector_size workaround_bpiall_vbar0_irq_aarch64 check_vector_size bpiall_irq_aarch64
vector_entry workaround_bpiall_vbar0_fiq_aarch64 vector_entry bpiall_fiq_aarch64
enter_workaround 4 apply_cve_2017_5715_wa 4
check_vector_size workaround_bpiall_vbar0_fiq_aarch64 check_vector_size bpiall_fiq_aarch64
vector_entry workaround_bpiall_vbar0_serror_aarch64 vector_entry bpiall_serror_aarch64
enter_workaround 8 apply_cve_2017_5715_wa 8
check_vector_size workaround_bpiall_vbar0_serror_aarch64 check_vector_size bpiall_serror_aarch64
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* Lower EL using AArch32 : 0x600 - 0x800 * Lower EL using AArch32 : 0x600 - 0x800
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
vector_entry workaround_bpiall_vbar0_sync_exception_aarch32 vector_entry bpiall_sync_exception_aarch32
enter_workaround 1 apply_cve_2017_5715_wa 1
check_vector_size workaround_bpiall_vbar0_sync_exception_aarch32 check_vector_size bpiall_sync_exception_aarch32
vector_entry workaround_bpiall_vbar0_irq_aarch32 vector_entry bpiall_irq_aarch32
enter_workaround 2 apply_cve_2017_5715_wa 2
check_vector_size workaround_bpiall_vbar0_irq_aarch32 check_vector_size bpiall_irq_aarch32
vector_entry workaround_bpiall_vbar0_fiq_aarch32 vector_entry bpiall_fiq_aarch32
enter_workaround 4 apply_cve_2017_5715_wa 4
check_vector_size workaround_bpiall_vbar0_fiq_aarch32 check_vector_size bpiall_fiq_aarch32
vector_entry workaround_bpiall_vbar0_serror_aarch32 vector_entry bpiall_serror_aarch32
enter_workaround 8 apply_cve_2017_5715_wa 8
check_vector_size workaround_bpiall_vbar0_serror_aarch32 check_vector_size bpiall_serror_aarch32
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* This vector table is used while the workaround is executing. It * This vector table is used while the workaround is executing. It
...@@ -195,73 +195,73 @@ vector_entry workaround_bpiall_vbar0_serror_aarch32 ...@@ -195,73 +195,73 @@ vector_entry workaround_bpiall_vbar0_serror_aarch32
* EL3 state before proceeding with the normal runtime exception vector. * EL3 state before proceeding with the normal runtime exception vector.
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
vector_base workaround_bpiall_vbar1_runtime_exceptions vector_base wa_cve_2017_5715_bpiall_ret_vbar
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* Current EL with SP_EL0 : 0x0 - 0x200 (UNUSED) * Current EL with SP_EL0 : 0x0 - 0x200 (UNUSED)
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
vector_entry workaround_bpiall_vbar1_sync_exception_sp_el0 vector_entry bpiall_ret_sync_exception_sp_el0
b report_unhandled_exception b report_unhandled_exception
check_vector_size workaround_bpiall_vbar1_sync_exception_sp_el0 check_vector_size bpiall_ret_sync_exception_sp_el0
vector_entry workaround_bpiall_vbar1_irq_sp_el0 vector_entry bpiall_ret_irq_sp_el0
b report_unhandled_interrupt b report_unhandled_interrupt
check_vector_size workaround_bpiall_vbar1_irq_sp_el0 check_vector_size bpiall_ret_irq_sp_el0
vector_entry workaround_bpiall_vbar1_fiq_sp_el0 vector_entry bpiall_ret_fiq_sp_el0
b report_unhandled_interrupt b report_unhandled_interrupt
check_vector_size workaround_bpiall_vbar1_fiq_sp_el0 check_vector_size bpiall_ret_fiq_sp_el0
vector_entry workaround_bpiall_vbar1_serror_sp_el0 vector_entry bpiall_ret_serror_sp_el0
b report_unhandled_exception b report_unhandled_exception
check_vector_size workaround_bpiall_vbar1_serror_sp_el0 check_vector_size bpiall_ret_serror_sp_el0
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* Current EL with SP_ELx: 0x200 - 0x400 (UNUSED) * Current EL with SP_ELx: 0x200 - 0x400 (UNUSED)
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
vector_entry workaround_bpiall_vbar1_sync_exception_sp_elx vector_entry bpiall_ret_sync_exception_sp_elx
b report_unhandled_exception b report_unhandled_exception
check_vector_size workaround_bpiall_vbar1_sync_exception_sp_elx check_vector_size bpiall_ret_sync_exception_sp_elx
vector_entry workaround_bpiall_vbar1_irq_sp_elx vector_entry bpiall_ret_irq_sp_elx
b report_unhandled_interrupt b report_unhandled_interrupt
check_vector_size workaround_bpiall_vbar1_irq_sp_elx check_vector_size bpiall_ret_irq_sp_elx
vector_entry workaround_bpiall_vbar1_fiq_sp_elx vector_entry bpiall_ret_fiq_sp_elx
b report_unhandled_interrupt b report_unhandled_interrupt
check_vector_size workaround_bpiall_vbar1_fiq_sp_elx check_vector_size bpiall_ret_fiq_sp_elx
vector_entry workaround_bpiall_vbar1_serror_sp_elx vector_entry bpiall_ret_serror_sp_elx
b report_unhandled_exception b report_unhandled_exception
check_vector_size workaround_bpiall_vbar1_serror_sp_elx check_vector_size bpiall_ret_serror_sp_elx
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* Lower EL using AArch64 : 0x400 - 0x600 (UNUSED) * Lower EL using AArch64 : 0x400 - 0x600 (UNUSED)
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
vector_entry workaround_bpiall_vbar1_sync_exception_aarch64 vector_entry bpiall_ret_sync_exception_aarch64
b report_unhandled_exception b report_unhandled_exception
check_vector_size workaround_bpiall_vbar1_sync_exception_aarch64 check_vector_size bpiall_ret_sync_exception_aarch64
vector_entry workaround_bpiall_vbar1_irq_aarch64 vector_entry bpiall_ret_irq_aarch64
b report_unhandled_interrupt b report_unhandled_interrupt
check_vector_size workaround_bpiall_vbar1_irq_aarch64 check_vector_size bpiall_ret_irq_aarch64
vector_entry workaround_bpiall_vbar1_fiq_aarch64 vector_entry bpiall_ret_fiq_aarch64
b report_unhandled_interrupt b report_unhandled_interrupt
check_vector_size workaround_bpiall_vbar1_fiq_aarch64 check_vector_size bpiall_ret_fiq_aarch64
vector_entry workaround_bpiall_vbar1_serror_aarch64 vector_entry bpiall_ret_serror_aarch64
b report_unhandled_exception b report_unhandled_exception
check_vector_size workaround_bpiall_vbar1_serror_aarch64 check_vector_size bpiall_ret_serror_aarch64
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* Lower EL using AArch32 : 0x600 - 0x800 * Lower EL using AArch32 : 0x600 - 0x800
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
vector_entry workaround_bpiall_vbar1_sync_exception_aarch32 vector_entry bpiall_ret_sync_exception_aarch32
/* /*
* w2 indicates which SEL1 stub was run and thus which original vector was used * w2 indicates which SEL1 stub was run and thus which original vector was used
* w3-w6 contain saved system register state (esr_el3 in w3) * w3-w6 contain saved system register state (esr_el3 in w3)
...@@ -281,7 +281,7 @@ vector_entry workaround_bpiall_vbar1_sync_exception_aarch32 ...@@ -281,7 +281,7 @@ vector_entry workaround_bpiall_vbar1_sync_exception_aarch32
* to workaround entry table in preparation for subsequent * to workaround entry table in preparation for subsequent
* Sync/IRQ/FIQ/SError exceptions. * Sync/IRQ/FIQ/SError exceptions.
*/ */
adr x0, workaround_bpiall_vbar0_runtime_exceptions adr x0, wa_cve_2017_5715_bpiall_vbar
msr vbar_el3, x0 msr vbar_el3, x0
/* /*
...@@ -324,34 +324,34 @@ vector_entry workaround_bpiall_vbar1_sync_exception_aarch32 ...@@ -324,34 +324,34 @@ vector_entry workaround_bpiall_vbar1_sync_exception_aarch32
1: 1:
ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
b sync_exception_aarch64 b sync_exception_aarch64
check_vector_size workaround_bpiall_vbar1_sync_exception_aarch32 check_vector_size bpiall_ret_sync_exception_aarch32
vector_entry workaround_bpiall_vbar1_irq_aarch32 vector_entry bpiall_ret_irq_aarch32
b report_unhandled_interrupt b report_unhandled_interrupt
/* /*
* Post-workaround fan-out for non-sync exceptions * Post-workaround fan-out for non-sync exceptions
*/ */
workaround_not_sync: workaround_not_sync:
tbnz w2, #3, workaround_bpiall_vbar1_serror tbnz w2, #3, bpiall_ret_serror
tbnz w2, #2, workaround_bpiall_vbar1_fiq tbnz w2, #2, bpiall_ret_fiq
/* IRQ */ /* IRQ */
ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
b irq_aarch64 b irq_aarch64
workaround_bpiall_vbar1_fiq: bpiall_ret_fiq:
ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
b fiq_aarch64 b fiq_aarch64
workaround_bpiall_vbar1_serror: bpiall_ret_serror:
ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
b serror_aarch64 b serror_aarch64
check_vector_size workaround_bpiall_vbar1_irq_aarch32 check_vector_size bpiall_ret_irq_aarch32
vector_entry workaround_bpiall_vbar1_fiq_aarch32 vector_entry bpiall_ret_fiq_aarch32
b report_unhandled_interrupt b report_unhandled_interrupt
check_vector_size workaround_bpiall_vbar1_fiq_aarch32 check_vector_size bpiall_ret_fiq_aarch32
vector_entry workaround_bpiall_vbar1_serror_aarch32 vector_entry bpiall_ret_serror_aarch32
b report_unhandled_exception b report_unhandled_exception
check_vector_size workaround_bpiall_vbar1_serror_aarch32 check_vector_size bpiall_ret_serror_aarch32
...@@ -9,13 +9,13 @@ ...@@ -9,13 +9,13 @@
#include <asm_macros.S> #include <asm_macros.S>
#include <context.h> #include <context.h>
.globl workaround_mmu_runtime_exceptions .globl wa_cve_2017_5715_mmu_vbar
#define ESR_EL3_A64_SMC0 0x5e000000 #define ESR_EL3_A64_SMC0 0x5e000000
vector_base workaround_mmu_runtime_exceptions vector_base wa_cve_2017_5715_mmu_vbar
.macro apply_workaround _is_sync_exception .macro apply_cve_2017_5715_wa _is_sync_exception
stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
mrs x1, sctlr_el3 mrs x1, sctlr_el3
/* Disable MMU */ /* Disable MMU */
...@@ -63,86 +63,86 @@ vector_base workaround_mmu_runtime_exceptions ...@@ -63,86 +63,86 @@ vector_base workaround_mmu_runtime_exceptions
* Current EL with SP_EL0 : 0x0 - 0x200 * Current EL with SP_EL0 : 0x0 - 0x200
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
vector_entry workaround_mmu_sync_exception_sp_el0 vector_entry mmu_sync_exception_sp_el0
b sync_exception_sp_el0 b sync_exception_sp_el0
check_vector_size workaround_mmu_sync_exception_sp_el0 check_vector_size mmu_sync_exception_sp_el0
vector_entry workaround_mmu_irq_sp_el0 vector_entry mmu_irq_sp_el0
b irq_sp_el0 b irq_sp_el0
check_vector_size workaround_mmu_irq_sp_el0 check_vector_size mmu_irq_sp_el0
vector_entry workaround_mmu_fiq_sp_el0 vector_entry mmu_fiq_sp_el0
b fiq_sp_el0 b fiq_sp_el0
check_vector_size workaround_mmu_fiq_sp_el0 check_vector_size mmu_fiq_sp_el0
vector_entry workaround_mmu_serror_sp_el0 vector_entry mmu_serror_sp_el0
b serror_sp_el0 b serror_sp_el0
check_vector_size workaround_mmu_serror_sp_el0 check_vector_size mmu_serror_sp_el0
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* Current EL with SP_ELx: 0x200 - 0x400 * Current EL with SP_ELx: 0x200 - 0x400
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
vector_entry workaround_mmu_sync_exception_sp_elx vector_entry mmu_sync_exception_sp_elx
b sync_exception_sp_elx b sync_exception_sp_elx
check_vector_size workaround_mmu_sync_exception_sp_elx check_vector_size mmu_sync_exception_sp_elx
vector_entry workaround_mmu_irq_sp_elx vector_entry mmu_irq_sp_elx
b irq_sp_elx b irq_sp_elx
check_vector_size workaround_mmu_irq_sp_elx check_vector_size mmu_irq_sp_elx
vector_entry workaround_mmu_fiq_sp_elx vector_entry mmu_fiq_sp_elx
b fiq_sp_elx b fiq_sp_elx
check_vector_size workaround_mmu_fiq_sp_elx check_vector_size mmu_fiq_sp_elx
vector_entry workaround_mmu_serror_sp_elx vector_entry mmu_serror_sp_elx
b serror_sp_elx b serror_sp_elx
check_vector_size workaround_mmu_serror_sp_elx check_vector_size mmu_serror_sp_elx
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* Lower EL using AArch64 : 0x400 - 0x600 * Lower EL using AArch64 : 0x400 - 0x600
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
vector_entry workaround_mmu_sync_exception_aarch64 vector_entry mmu_sync_exception_aarch64
apply_workaround _is_sync_exception=1 apply_cve_2017_5715_wa _is_sync_exception=1
b sync_exception_aarch64 b sync_exception_aarch64
check_vector_size workaround_mmu_sync_exception_aarch64 check_vector_size mmu_sync_exception_aarch64
vector_entry workaround_mmu_irq_aarch64 vector_entry mmu_irq_aarch64
apply_workaround _is_sync_exception=0 apply_cve_2017_5715_wa _is_sync_exception=0
b irq_aarch64 b irq_aarch64
check_vector_size workaround_mmu_irq_aarch64 check_vector_size mmu_irq_aarch64
vector_entry workaround_mmu_fiq_aarch64 vector_entry mmu_fiq_aarch64
apply_workaround _is_sync_exception=0 apply_cve_2017_5715_wa _is_sync_exception=0
b fiq_aarch64 b fiq_aarch64
check_vector_size workaround_mmu_fiq_aarch64 check_vector_size mmu_fiq_aarch64
vector_entry workaround_mmu_serror_aarch64 vector_entry mmu_serror_aarch64
apply_workaround _is_sync_exception=0 apply_cve_2017_5715_wa _is_sync_exception=0
b serror_aarch64 b serror_aarch64
check_vector_size workaround_mmu_serror_aarch64 check_vector_size mmu_serror_aarch64
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* Lower EL using AArch32 : 0x600 - 0x800 * Lower EL using AArch32 : 0x600 - 0x800
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
vector_entry workaround_mmu_sync_exception_aarch32 vector_entry mmu_sync_exception_aarch32
apply_workaround _is_sync_exception=1 apply_cve_2017_5715_wa _is_sync_exception=1
b sync_exception_aarch32 b sync_exception_aarch32
check_vector_size workaround_mmu_sync_exception_aarch32 check_vector_size mmu_sync_exception_aarch32
vector_entry workaround_mmu_irq_aarch32 vector_entry mmu_irq_aarch32
apply_workaround _is_sync_exception=0 apply_cve_2017_5715_wa _is_sync_exception=0
b irq_aarch32 b irq_aarch32
check_vector_size workaround_mmu_irq_aarch32 check_vector_size mmu_irq_aarch32
vector_entry workaround_mmu_fiq_aarch32 vector_entry mmu_fiq_aarch32
apply_workaround _is_sync_exception=0 apply_cve_2017_5715_wa _is_sync_exception=0
b fiq_aarch32 b fiq_aarch32
check_vector_size workaround_mmu_fiq_aarch32 check_vector_size mmu_fiq_aarch32
vector_entry workaround_mmu_serror_aarch32 vector_entry mmu_serror_aarch32
apply_workaround _is_sync_exception=0 apply_cve_2017_5715_wa _is_sync_exception=0
b serror_aarch32 b serror_aarch32
check_vector_size workaround_mmu_serror_aarch32 check_vector_size mmu_serror_aarch32
...@@ -17,6 +17,8 @@ A53_DISABLE_NON_TEMPORAL_HINT ?=1 ...@@ -17,6 +17,8 @@ A53_DISABLE_NON_TEMPORAL_HINT ?=1
A57_DISABLE_NON_TEMPORAL_HINT ?=1 A57_DISABLE_NON_TEMPORAL_HINT ?=1
WORKAROUND_CVE_2017_5715 ?=1 WORKAROUND_CVE_2017_5715 ?=1
WORKAROUND_CVE_2018_3639 ?=1
DYNAMIC_WORKAROUND_CVE_2018_3639 ?=0
# Process SKIP_A57_L1_FLUSH_PWR_DWN flag # Process SKIP_A57_L1_FLUSH_PWR_DWN flag
$(eval $(call assert_boolean,SKIP_A57_L1_FLUSH_PWR_DWN)) $(eval $(call assert_boolean,SKIP_A57_L1_FLUSH_PWR_DWN))
...@@ -34,6 +36,19 @@ $(eval $(call add_define,A57_DISABLE_NON_TEMPORAL_HINT)) ...@@ -34,6 +36,19 @@ $(eval $(call add_define,A57_DISABLE_NON_TEMPORAL_HINT))
$(eval $(call assert_boolean,WORKAROUND_CVE_2017_5715)) $(eval $(call assert_boolean,WORKAROUND_CVE_2017_5715))
$(eval $(call add_define,WORKAROUND_CVE_2017_5715)) $(eval $(call add_define,WORKAROUND_CVE_2017_5715))
# Process WORKAROUND_CVE_2018_3639 flag
$(eval $(call assert_boolean,WORKAROUND_CVE_2018_3639))
$(eval $(call add_define,WORKAROUND_CVE_2018_3639))
$(eval $(call assert_boolean,DYNAMIC_WORKAROUND_CVE_2018_3639))
$(eval $(call add_define,DYNAMIC_WORKAROUND_CVE_2018_3639))
ifneq (${DYNAMIC_WORKAROUND_CVE_2018_3639},0)
ifeq (${WORKAROUND_CVE_2018_3639},0)
$(error "Error: WORKAROUND_CVE_2018_3639 must be 1 if DYNAMIC_WORKAROUND_CVE_2018_3639 is 1")
endif
endif
# CPU Errata Build flags. # CPU Errata Build flags.
# These should be enabled by the platform if the erratum workaround needs to be # These should be enabled by the platform if the erratum workaround needs to be
# applied. # applied.
......
...@@ -404,6 +404,15 @@ func el3_exit ...@@ -404,6 +404,15 @@ func el3_exit
msr spsr_el3, x16 msr spsr_el3, x16
msr elr_el3, x17 msr elr_el3, x17
#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
/* Restore mitigation state as it was on entry to EL3 */
ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
cmp x17, xzr
beq 1f
blr x17
#endif
1:
/* Restore saved general purpose registers and return */ /* Restore saved general purpose registers and return */
b restore_gp_registers_eret b restore_gp_registers_eret
endfunc el3_exit endfunc el3_exit
...@@ -10,7 +10,8 @@ ...@@ -10,7 +10,8 @@
#include <runtime_svc.h> #include <runtime_svc.h>
#include <smccc.h> #include <smccc.h>
#include <smccc_helpers.h> #include <smccc_helpers.h>
#include <workaround_cve_2017_5715.h> #include <wa_cve_2017_5715.h>
#include <wa_cve_2018_3639.h>
static int32_t smccc_version(void) static int32_t smccc_version(void)
{ {
...@@ -25,9 +26,30 @@ static int32_t smccc_arch_features(u_register_t arg) ...@@ -25,9 +26,30 @@ static int32_t smccc_arch_features(u_register_t arg)
return SMC_OK; return SMC_OK;
#if WORKAROUND_CVE_2017_5715 #if WORKAROUND_CVE_2017_5715
case SMCCC_ARCH_WORKAROUND_1: case SMCCC_ARCH_WORKAROUND_1:
if (check_workaround_cve_2017_5715() == ERRATA_NOT_APPLIES) if (check_wa_cve_2017_5715() == ERRATA_NOT_APPLIES)
return 1; return 1;
return 0; /* ERRATA_APPLIES || ERRATA_MISSING */ return 0; /* ERRATA_APPLIES || ERRATA_MISSING */
#endif
#if WORKAROUND_CVE_2018_3639
case SMCCC_ARCH_WORKAROUND_2:
#if DYNAMIC_WORKAROUND_CVE_2018_3639
/*
* On a platform where at least one CPU requires
* dynamic mitigation but others are either unaffected
* or permanently mitigated, report the latter as not
* needing dynamic mitigation.
*/
if (wa_cve_2018_3639_get_disable_ptr() == NULL)
return 1;
/*
* If we get here, this CPU requires dynamic mitigation
* so report it as such.
*/
return 0;
#else
/* Either the CPUs are unaffected or permanently mitigated */
return SMCCC_ARCH_NOT_REQUIRED;
#endif
#endif #endif
default: default:
return SMC_UNK; return SMC_UNK;
...@@ -59,6 +81,16 @@ static uintptr_t arm_arch_svc_smc_handler(uint32_t smc_fid, ...@@ -59,6 +81,16 @@ static uintptr_t arm_arch_svc_smc_handler(uint32_t smc_fid,
* has no effect. * has no effect.
*/ */
SMC_RET0(handle); SMC_RET0(handle);
#endif
#if WORKAROUND_CVE_2018_3639
case SMCCC_ARCH_WORKAROUND_2:
/*
* The workaround has already been applied on affected PEs
* requiring dynamic mitigation during entry to EL3.
* On unaffected or statically mitigated PEs, this function
* has no effect.
*/
SMC_RET0(handle);
#endif #endif
default: default:
WARN("Unimplemented Arm Architecture Service Call: 0x%x \n", WARN("Unimplemented Arm Architecture Service Call: 0x%x \n",
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment