Commit 96edbe03 authored by Madhukar Pappireddy's avatar Madhukar Pappireddy Committed by TrustedFirmware Code Review
Browse files

Merge "Fix exception handlers in BL31: Use DSB to synchronize pending EA" into integration

parents 477e28de c2d32a5f
/* /*
* Copyright (c) 2018-2019, ARM Limited and Contributors. All rights reserved. * Copyright (c) 2018-2021, ARM Limited and Contributors. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
...@@ -15,6 +15,7 @@ ...@@ -15,6 +15,7 @@
#include <context.h> #include <context.h>
.globl handle_lower_el_ea_esb .globl handle_lower_el_ea_esb
.globl handle_lower_el_async_ea
.globl enter_lower_el_sync_ea .globl enter_lower_el_sync_ea
.globl enter_lower_el_async_ea .globl enter_lower_el_async_ea
...@@ -133,6 +134,7 @@ func enter_lower_el_async_ea ...@@ -133,6 +134,7 @@ func enter_lower_el_async_ea
*/ */
str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
handle_lower_el_async_ea:
/* /*
* Save general purpose and ARMv8.3-PAuth registers (if enabled). * Save general purpose and ARMv8.3-PAuth registers (if enabled).
* If Secure Cycle Counter is not disabled in MDCR_EL3 when * If Secure Cycle Counter is not disabled in MDCR_EL3 when
......
/* /*
* Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved. * Copyright (c) 2013-2021, ARM Limited and Contributors. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
...@@ -45,8 +45,9 @@ ...@@ -45,8 +45,9 @@
* instruction. When an error is thus synchronized, the handling is * instruction. When an error is thus synchronized, the handling is
* delegated to platform EA handler. * delegated to platform EA handler.
* *
* Without RAS_EXTENSION, this macro just saves x30, and unmasks * Without RAS_EXTENSION, this macro synchronizes pending errors using
* Asynchronous External Aborts. * a DSB, unmasks Asynchronous External Aborts and saves X30 before
* setting the flag CTX_IS_IN_EL3.
*/ */
.macro check_and_unmask_ea .macro check_and_unmask_ea
#if RAS_EXTENSION #if RAS_EXTENSION
...@@ -79,13 +80,89 @@ ...@@ -79,13 +80,89 @@
bl restore_gp_pmcr_pauth_regs bl restore_gp_pmcr_pauth_regs
1: 1:
#else #else
/*
* For SoCs which do not implement RAS, use DSB as a barrier to
* synchronize pending external aborts.
*/
dsb sy
/* Unmask the SError interrupt */ /* Unmask the SError interrupt */
msr daifclr, #DAIF_ABT_BIT msr daifclr, #DAIF_ABT_BIT
/* Use ISB for the above unmask operation to take effect immediately */
isb
/*
* Refer Note 1. No need to restore X30 as both handle_sync_exception
* and handle_interrupt_exception macro which follow this macro modify
* X30 anyway.
*/
str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
mov x30, #1
str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
dmb sy
#endif #endif
.endm .endm
#if !RAS_EXTENSION
/*
* Note 1: The explicit DSB at the entry of various exception vectors
* for handling exceptions from lower ELs can inadvertently trigger an
* SError exception in EL3 due to pending asynchronous aborts in lower
* ELs. This will end up being handled by serror_sp_elx which will
* ultimately panic and die.
* The way to workaround is to update a flag to indicate if the exception
* truly came from EL3. This flag is allocated in the cpu_context
* structure and located at offset "CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3"
* This is not a bullet proof solution to the problem at hand because
* we assume the instructions following "isb" that help to update the
* flag execute without causing further exceptions.
*/
/* ---------------------------------------------------------------------
* This macro handles Asynchronous External Aborts.
* ---------------------------------------------------------------------
*/
.macro handle_async_ea
/*
* Use a barrier to synchronize pending external aborts.
*/
dsb sy
/* Unmask the SError interrupt */
msr daifclr, #DAIF_ABT_BIT
/* Use ISB for the above unmask operation to take effect immediately */
isb
/* Refer Note 1 */
str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
mov x30, #1
str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
dmb sy
b handle_lower_el_async_ea
.endm
/*
* This macro checks if the exception was taken due to SError in EL3 or
* because of pending asynchronous external aborts from lower EL that got
* triggered due to explicit synchronization in EL3. Refer Note 1.
*/
.macro check_if_serror_from_EL3
/* Assumes SP_EL3 on entry */
str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
ldr x30, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
cbnz x30, exp_from_EL3
/* Handle asynchronous external abort from lower EL */
b handle_lower_el_async_ea
exp_from_EL3:
/* Jump to plat_handle_el3_ea which does not return */
.endm
#endif
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* This macro handles Synchronous exceptions. * This macro handles Synchronous exceptions.
* Only SMC exceptions are supported. * Only SMC exceptions are supported.
...@@ -272,6 +349,9 @@ vector_entry fiq_sp_elx ...@@ -272,6 +349,9 @@ vector_entry fiq_sp_elx
end_vector_entry fiq_sp_elx end_vector_entry fiq_sp_elx
vector_entry serror_sp_elx vector_entry serror_sp_elx
#if !RAS_EXTENSION
check_if_serror_from_EL3
#endif
no_ret plat_handle_el3_ea no_ret plat_handle_el3_ea
end_vector_entry serror_sp_elx end_vector_entry serror_sp_elx
...@@ -305,8 +385,12 @@ end_vector_entry fiq_aarch64 ...@@ -305,8 +385,12 @@ end_vector_entry fiq_aarch64
vector_entry serror_aarch64 vector_entry serror_aarch64
apply_at_speculative_wa apply_at_speculative_wa
#if RAS_EXTENSION
msr daifclr, #DAIF_ABT_BIT msr daifclr, #DAIF_ABT_BIT
b enter_lower_el_async_ea b enter_lower_el_async_ea
#else
handle_async_ea
#endif
end_vector_entry serror_aarch64 end_vector_entry serror_aarch64
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
...@@ -339,8 +423,12 @@ end_vector_entry fiq_aarch32 ...@@ -339,8 +423,12 @@ end_vector_entry fiq_aarch32
vector_entry serror_aarch32 vector_entry serror_aarch32
apply_at_speculative_wa apply_at_speculative_wa
#if RAS_EXTENSION
msr daifclr, #DAIF_ABT_BIT msr daifclr, #DAIF_ABT_BIT
b enter_lower_el_async_ea b enter_lower_el_async_ea
#else
handle_async_ea
#endif
end_vector_entry serror_aarch32 end_vector_entry serror_aarch32
#ifdef MONITOR_TRAPS #ifdef MONITOR_TRAPS
......
/* /*
* Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved. * Copyright (c) 2013-2021, ARM Limited and Contributors. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
...@@ -60,7 +60,8 @@ ...@@ -60,7 +60,8 @@
#define CTX_SPSR_EL3 U(0x18) #define CTX_SPSR_EL3 U(0x18)
#define CTX_ELR_EL3 U(0x20) #define CTX_ELR_EL3 U(0x20)
#define CTX_PMCR_EL0 U(0x28) #define CTX_PMCR_EL0 U(0x28)
#define CTX_EL3STATE_END U(0x30) #define CTX_IS_IN_EL3 U(0x30)
#define CTX_EL3STATE_END U(0x40) /* Align to the next 16 byte boundary */
/******************************************************************************* /*******************************************************************************
* Constants that allow assembler code to access members of and the * Constants that allow assembler code to access members of and the
......
/* /*
* Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved. * Copyright (c) 2013-2021, ARM Limited and Contributors. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
...@@ -995,6 +995,11 @@ func el3_exit ...@@ -995,6 +995,11 @@ func el3_exit
* ---------------------------------------------------------- * ----------------------------------------------------------
*/ */
esb esb
#else
dsb sy
#endif
#ifdef IMAGE_BL31
str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
#endif #endif
exception_return exception_return
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment