Commit f461fe34 authored by Anthony Steinhauser's avatar Anthony Steinhauser
Browse files

Prevent speculative execution past ERET

Even though ERET always causes a jump to another address, aarch64 CPUs
speculatively execute following instructions as if the ERET
instruction was not a jump instruction.
The speculative execution does not cross privilege-levels (to the jump
target as one would expect), but it continues on the kernel privilege
level as if the ERET instruction did not change the control flow -
thus execution anything that is accidentally linked after the ERET
instruction. Later, the results of this speculative execution are
always architecturally discarded, however they can leak data using
microarchitectural side channels. This speculative execution is very
reliable (seems to be unconditional) and it manages to complete even
relatively performance-heavy operations (e.g. multiple dependent
fetches from uncached memory).

This was fixed in Linux, FreeBSD, OpenBSD and Optee OS:
https://github.com/torvalds/linux/commit/679db70801da9fda91d26caf13bf5b5ccc74e8e8
https://github.com/freebsd/freebsd/commit/29fb48ace4186a41c409fde52bcf4216e9e50b61
https://github.com/openbsd/src/commit/3a08873ece1cb28ace89fd65e8f3c1375cc98de2
https://github.com/OP-TEE/optee_os/commit/abfd092aa19f9c0251e3d5551e2d68a9ebcfec8a

It is demonstrated in a SafeSide example:
https://github.com/google/safeside/blob/master/demos/eret_hvc_smc_wrapper.cc
https://github.com/google/safeside/blob/master/kernel_modules/kmod_eret_hvc_smc/eret_hvc_smc_module.c

Signed-off-by: default avatarAnthony Steinhauser <asteinhauser@google.com>
Change-Id: Iead39b0b9fb4b8d8b5609daaa8be81497ba63a0f
parent d81e38f6
/* /*
* Copyright (c) 2013-2019, ARM Limited and Contributors. All rights reserved. * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
...@@ -202,7 +202,7 @@ debug_loop: ...@@ -202,7 +202,7 @@ debug_loop:
ldp x4, x5, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x20)] ldp x4, x5, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x20)]
ldp x2, x3, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x10)] ldp x2, x3, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x10)]
ldp x0, x1, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x0)] ldp x0, x1, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x0)]
eret exception_return
endfunc smc_handler64 endfunc smc_handler64
unexpected_sync_exception: unexpected_sync_exception:
......
/* /*
* Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved. * Copyright (c) 2017-2020, ARM Limited and Contributors. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
...@@ -95,5 +95,5 @@ func bl2_run_next_image ...@@ -95,5 +95,5 @@ func bl2_run_next_image
ldp x4, x5, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x20)] ldp x4, x5, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x20)]
ldp x2, x3, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x10)] ldp x2, x3, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x10)]
ldp x0, x1, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x0)] ldp x0, x1, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x0)]
eret exception_return
endfunc bl2_run_next_image endfunc bl2_run_next_image
...@@ -456,7 +456,7 @@ smc_unknown: ...@@ -456,7 +456,7 @@ smc_unknown:
smc_prohibited: smc_prohibited:
ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
mov x0, #SMC_UNK mov x0, #SMC_UNK
eret exception_return
#if DEBUG #if DEBUG
rt_svc_fw_critical_error: rt_svc_fw_critical_error:
......
/* /*
* Copyright (c) 2013-2016, ARM Limited and Contributors. All rights reserved. * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
...@@ -64,7 +64,7 @@ ...@@ -64,7 +64,7 @@
smc #0 smc #0
interrupt_exit_\label: interrupt_exit_\label:
restore_caller_regs_and_lr restore_caller_regs_and_lr
eret exception_return
.endm .endm
.globl tsp_exceptions .globl tsp_exceptions
......
/* /*
* Copyright (c) 2013-2019, ARM Limited and Contributors. All rights reserved. * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
...@@ -218,4 +218,13 @@ ...@@ -218,4 +218,13 @@
ret ret
.endm .endm
/*
* Macro for mitigating against speculative execution beyond ERET.
*/
.macro exception_return
eret
dsb nsh
isb
.endm
#endif /* ASM_MACROS_S */ #endif /* ASM_MACROS_S */
/* /*
* Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved. * Copyright (c) 2017-2020, ARM Limited and Contributors. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
...@@ -85,7 +85,7 @@ ...@@ -85,7 +85,7 @@
bic x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE bic x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
csel x3, x3, x1, eq csel x3, x3, x1, eq
msr CORTEX_A76_CPUACTLR2_EL1, x3 msr CORTEX_A76_CPUACTLR2_EL1, x3
eret /* ERET implies ISB */ exception_return /* exception_return contains ISB */
.endif .endif
1: 1:
/* /*
......
/* /*
* Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved. * Copyright (c) 2017-2020, ARM Limited and Contributors. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
...@@ -553,7 +553,7 @@ func neoverse_n1_errata_ic_trap_handler ...@@ -553,7 +553,7 @@ func neoverse_n1_errata_ic_trap_handler
*/ */
esb esb
#endif #endif
eret exception_return
1: 1:
ret ret
endfunc neoverse_n1_errata_ic_trap_handler endfunc neoverse_n1_errata_ic_trap_handler
......
/* /*
* Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved. * Copyright (c) 2017-2020, ARM Limited and Contributors. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
...@@ -48,7 +48,7 @@ vector_base wa_cve_2017_5715_mmu_vbar ...@@ -48,7 +48,7 @@ vector_base wa_cve_2017_5715_mmu_vbar
ccmp w0, w1, #0, eq ccmp w0, w1, #0, eq
/* Static predictor will predict a fall through */ /* Static predictor will predict a fall through */
bne 1f bne 1f
eret exception_return
1: 1:
.endif .endif
......
...@@ -534,6 +534,6 @@ func el3_exit ...@@ -534,6 +534,6 @@ func el3_exit
*/ */
esb esb
#endif #endif
eret exception_return
endfunc el3_exit endfunc el3_exit
/* /*
* Copyright (c) 2013-2014, ARM Limited and Contributors. All rights reserved. * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2015-2019, Renesas Electronics Corporation. All rights reserved. * Copyright (c) 2015-2019, Renesas Electronics Corporation. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
...@@ -189,7 +189,7 @@ func bl2_enter_bl31 ...@@ -189,7 +189,7 @@ func bl2_enter_bl31
ldp x0, x1, [x20, #ENTRY_POINT_INFO_PC_OFFSET] ldp x0, x1, [x20, #ENTRY_POINT_INFO_PC_OFFSET]
msr elr_el3, x0 msr elr_el3, x0
msr spsr_el3, x1 msr spsr_el3, x1
eret exception_return
endfunc bl2_enter_bl31 endfunc bl2_enter_bl31
/* ----------------------------------------------------- /* -----------------------------------------------------
......
/* /*
* Copyright (c) 2017, ARM Limited and Contributors. All rights reserved. * Copyright (c) 2017-2020, ARM Limited and Contributors. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
...@@ -87,7 +87,7 @@ vector_entry SynchronousExceptionA64, .spm_shim_exceptions ...@@ -87,7 +87,7 @@ vector_entry SynchronousExceptionA64, .spm_shim_exceptions
do_smc: do_smc:
mrs x30, tpidr_el1 mrs x30, tpidr_el1
smc #0 smc #0
eret exception_return
/* AArch64 system instructions trap are handled as a panic for now */ /* AArch64 system instructions trap are handled as a panic for now */
handle_sys_trap: handle_sys_trap:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment