diff --git a/bl32/sp_min/aarch32/entrypoint.S b/bl32/sp_min/aarch32/entrypoint.S index b2b7953f829b6ed6e19e0f75d04b185a3d4d5f04..e7528d38eac79747b2623265c8244b091a3aa4ca 100644 --- a/bl32/sp_min/aarch32/entrypoint.S +++ b/bl32/sp_min/aarch32/entrypoint.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -17,6 +17,8 @@ .globl sp_min_vector_table .globl sp_min_entrypoint .globl sp_min_warm_entrypoint + .globl sp_min_handle_smc + .globl sp_min_handle_fiq .macro route_fiq_to_sp_min reg /* ----------------------------------------------------- @@ -43,12 +45,12 @@ vector_base sp_min_vector_table b sp_min_entrypoint b plat_panic_handler /* Undef */ - b handle_smc /* Syscall */ + b sp_min_handle_smc /* Syscall */ b plat_panic_handler /* Prefetch abort */ b plat_panic_handler /* Data abort */ b plat_panic_handler /* Reserved */ b plat_panic_handler /* IRQ */ - b handle_fiq /* FIQ */ + b sp_min_handle_fiq /* FIQ */ /* @@ -151,7 +153,7 @@ endfunc sp_min_entrypoint /* * SMC handling function for SP_MIN. */ -func handle_smc +func sp_min_handle_smc /* On SMC entry, `sp` points to `smc_ctx_t`. Save `lr`. */ str lr, [sp, #SMC_CTX_LR_MON] @@ -199,12 +201,12 @@ func handle_smc /* `r0` points to `smc_ctx_t` */ b sp_min_exit -endfunc handle_smc +endfunc sp_min_handle_smc /* * Secure Interrupts handling function for SP_MIN. */ -func handle_fiq +func sp_min_handle_fiq #if !SP_MIN_WITH_SECURE_FIQ b plat_panic_handler #else @@ -242,7 +244,7 @@ func handle_fiq b sp_min_exit #endif -endfunc handle_fiq +endfunc sp_min_handle_fiq /* * The Warm boot entrypoint for SP_MIN. diff --git a/bl32/sp_min/sp_min.mk b/bl32/sp_min/sp_min.mk index 56489a3c5ac4997e30f5e0446537ef60d6510df3..67a1981e08422df100526bbe7cdbbd81f779a7d1 100644 --- a/bl32/sp_min/sp_min.mk +++ b/bl32/sp_min/sp_min.mk @@ -1,5 +1,5 @@ # -# Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved. +# Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved. # # SPDX-License-Identifier: BSD-3-Clause # @@ -26,6 +26,11 @@ ifeq (${ENABLE_AMU}, 1) BL32_SOURCES += lib/extensions/amu/aarch32/amu.c endif +ifeq (${WORKAROUND_CVE_2017_5715},1) +BL32_SOURCES += bl32/sp_min/workaround_cve_2017_5715_bpiall.S \ + bl32/sp_min/workaround_cve_2017_5715_icache_inv.S +endif + BL32_LINKERFILE := bl32/sp_min/sp_min.ld.S # Include the platform-specific SP_MIN Makefile diff --git a/bl32/sp_min/workaround_cve_2017_5715_bpiall.S b/bl32/sp_min/workaround_cve_2017_5715_bpiall.S new file mode 100644 index 0000000000000000000000000000000000000000..5387cefc95ab9427772394fc9ffda38da2ba2abd --- /dev/null +++ b/bl32/sp_min/workaround_cve_2017_5715_bpiall.S @@ -0,0 +1,74 @@ +/* + * Copyright (c) 2018, ARM Limited and Contributors. All rights reserved. + * + * SPDX-License-Identifier: BSD-3-Clause + */ + +#include <asm_macros.S> + + .globl workaround_bpiall_runtime_exceptions + +vector_base workaround_bpiall_runtime_exceptions + /* We encode the exception entry in the bottom 3 bits of SP */ + add sp, sp, #1 /* Reset: 0b111 */ + add sp, sp, #1 /* Undef: 0b110 */ + add sp, sp, #1 /* Syscall: 0b101 */ + add sp, sp, #1 /* Prefetch abort: 0b100 */ + add sp, sp, #1 /* Data abort: 0b011 */ + add sp, sp, #1 /* Reserved: 0b010 */ + add sp, sp, #1 /* IRQ: 0b001 */ + nop /* FIQ: 0b000 */ + + /* + * Invalidate the branch predictor, `r0` is a dummy register + * and is unused. + */ + stcopr r0, BPIALL + isb + + /* + * As we cannot use any temporary registers and cannot + * clobber SP, we can decode the exception entry using + * an unrolled binary search. + * + * Note, if this code is re-used by other secure payloads, + * the below exception entry vectors must be changed to + * the vectors specific to that secure payload. + */ + + tst sp, #4 + bne 1f + + tst sp, #2 + bne 3f + + /* Expected encoding: 0x1 and 0x0 */ + tst sp, #1 + /* Restore original value of SP by clearing the bottom 3 bits */ + bic sp, sp, #0x7 + bne plat_panic_handler /* IRQ */ + b sp_min_handle_fiq /* FIQ */ + +1: + tst sp, #2 + bne 2f + + /* Expected encoding: 0x4 and 0x5 */ + tst sp, #1 + bic sp, sp, #0x7 + bne sp_min_handle_smc /* Syscall */ + b plat_panic_handler /* Prefetch abort */ + +2: + /* Expected encoding: 0x7 and 0x6 */ + tst sp, #1 + bic sp, sp, #0x7 + bne sp_min_entrypoint /* Reset */ + b plat_panic_handler /* Undef */ + +3: + /* Expected encoding: 0x2 and 0x3 */ + tst sp, #1 + bic sp, sp, #0x7 + bne plat_panic_handler /* Data abort */ + b plat_panic_handler /* Reserved */ diff --git a/bl32/sp_min/workaround_cve_2017_5715_icache_inv.S b/bl32/sp_min/workaround_cve_2017_5715_icache_inv.S new file mode 100644 index 0000000000000000000000000000000000000000..9102b02f9826fb08d0b3f7eb0fd23f24810e551a --- /dev/null +++ b/bl32/sp_min/workaround_cve_2017_5715_icache_inv.S @@ -0,0 +1,75 @@ +/* + * Copyright (c) 2018, ARM Limited and Contributors. All rights reserved. + * + * SPDX-License-Identifier: BSD-3-Clause + */ + +#include <asm_macros.S> + + .globl workaround_icache_inv_runtime_exceptions + +vector_base workaround_icache_inv_runtime_exceptions + /* We encode the exception entry in the bottom 3 bits of SP */ + add sp, sp, #1 /* Reset: 0b111 */ + add sp, sp, #1 /* Undef: 0b110 */ + add sp, sp, #1 /* Syscall: 0b101 */ + add sp, sp, #1 /* Prefetch abort: 0b100 */ + add sp, sp, #1 /* Data abort: 0b011 */ + add sp, sp, #1 /* Reserved: 0b010 */ + add sp, sp, #1 /* IRQ: 0b001 */ + nop /* FIQ: 0b000 */ + + /* + * Invalidate the instruction cache, which we assume also + * invalidates the branch predictor. This may depend on + * other CPU specific changes (e.g. an ACTLR setting). + */ + stcopr r0, ICIALLU + isb + + /* + * As we cannot use any temporary registers and cannot + * clobber SP, we can decode the exception entry using + * an unrolled binary search. + * + * Note, if this code is re-used by other secure payloads, + * the below exception entry vectors must be changed to + * the vectors specific to that secure payload. + */ + + tst sp, #4 + bne 1f + + tst sp, #2 + bne 3f + + /* Expected encoding: 0x1 and 0x0 */ + tst sp, #1 + /* Restore original value of SP by clearing the bottom 3 bits */ + bic sp, sp, #0x7 + bne plat_panic_handler /* IRQ */ + b sp_min_handle_fiq /* FIQ */ + +1: + /* Expected encoding: 0x4 and 0x5 */ + tst sp, #2 + bne 2f + + tst sp, #1 + bic sp, sp, #0x7 + bne sp_min_handle_smc /* Syscall */ + b plat_panic_handler /* Prefetch abort */ + +2: + /* Expected encoding: 0x7 and 0x6 */ + tst sp, #1 + bic sp, sp, #0x7 + bne sp_min_entrypoint /* Reset */ + b plat_panic_handler /* Undef */ + +3: + /* Expected encoding: 0x2 and 0x3 */ + tst sp, #1 + bic sp, sp, #0x7 + bne plat_panic_handler /* Data abort */ + b plat_panic_handler /* Reserved */ diff --git a/include/common/aarch32/el3_common_macros.S b/include/common/aarch32/el3_common_macros.S index d654b6523a020858ea4b9bf8262c0c1dc32e88c5..5db885448418d5e99930d0c3b355e8a8859fd2d5 100644 --- a/include/common/aarch32/el3_common_macros.S +++ b/include/common/aarch32/el3_common_macros.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -14,7 +14,7 @@ /* * Helper macro to initialise EL3 registers we care about. */ - .macro el3_arch_init_common _exception_vectors + .macro el3_arch_init_common /* --------------------------------------------------------------------- * SCTLR has already been initialised - read current value before * modifying. @@ -33,15 +33,6 @@ stcopr r0, SCTLR isb - /* --------------------------------------------------------------------- - * Set the exception vectors (VBAR/MVBAR). - * --------------------------------------------------------------------- - */ - ldr r0, =\_exception_vectors - stcopr r0, VBAR - stcopr r0, MVBAR - isb - /* --------------------------------------------------------------------- * Initialise SCR, setting all fields rather than relying on the hw. * @@ -210,6 +201,15 @@ bxne r0 .endif /* _warm_boot_mailbox */ + /* --------------------------------------------------------------------- + * Set the exception vectors (VBAR/MVBAR). + * --------------------------------------------------------------------- + */ + ldr r0, =\_exception_vectors + stcopr r0, VBAR + stcopr r0, MVBAR + isb + /* --------------------------------------------------------------------- * It is a cold boot. * Perform any processor specific actions upon reset e.g. cache, TLB @@ -218,7 +218,7 @@ */ bl reset_handler - el3_arch_init_common \_exception_vectors + el3_arch_init_common .if \_secondary_cold_boot /* ------------------------------------------------------------- diff --git a/include/lib/aarch32/arch.h b/include/lib/aarch32/arch.h index 4d2a5fc9f5ecd911e544a40e63992296a0ffb9bd..134d53468c3ce4cb541a2fae6ce4bcb0abb280eb 100644 --- a/include/lib/aarch32/arch.h +++ b/include/lib/aarch32/arch.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -426,6 +426,8 @@ #define TLBIMVAA p15, 0, c8, c7, 3 #define TLBIMVAAIS p15, 0, c8, c3, 3 #define BPIALLIS p15, 0, c7, c1, 6 +#define BPIALL p15, 0, c7, c5, 6 +#define ICIALLU p15, 0, c7, c5, 0 #define HSCTLR p15, 4, c1, c0, 0 #define HCR p15, 4, c1, c1, 0 #define HCPTR p15, 4, c1, c1, 2 diff --git a/include/lib/aarch32/smcc_helpers.h b/include/lib/aarch32/smcc_helpers.h index 53f1aa4ab03f53bd305019b3ac524c310ac273de..ed3b722fe11538b55d409f98623418f66b1995e2 100644 --- a/include/lib/aarch32/smcc_helpers.h +++ b/include/lib/aarch32/smcc_helpers.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -22,7 +22,7 @@ #define SMC_CTX_LR_MON 0x80 #define SMC_CTX_SCR 0x84 #define SMC_CTX_PMCR 0x88 -#define SMC_CTX_SIZE 0x8C +#define SMC_CTX_SIZE 0x90 #ifndef __ASSEMBLY__ #include <cassert.h> @@ -75,7 +75,13 @@ typedef struct smc_ctx { u_register_t lr_mon; u_register_t scr; u_register_t pmcr; -} smc_ctx_t; + /* + * The workaround for CVE-2017-5715 requires storing information in + * the bottom 3 bits of the stack pointer. Add a padding field to + * force the size of the struct to be a multiple of 8. + */ + u_register_t pad; +} smc_ctx_t __aligned(8); /* * Compile time assertions related to the 'smc_context' structure to @@ -99,6 +105,7 @@ CASSERT(SMC_CTX_LR_MON == __builtin_offsetof(smc_ctx_t, lr_mon), \ CASSERT(SMC_CTX_SPSR_MON == __builtin_offsetof(smc_ctx_t, spsr_mon), \ assert_smc_ctx_spsr_mon_offset_mismatch); +CASSERT((sizeof(smc_ctx_t) & 0x7) == 0, assert_smc_ctx_not_aligned); CASSERT(SMC_CTX_SIZE == sizeof(smc_ctx_t), assert_smc_ctx_size_mismatch); /* Convenience macros to return from SMC handler */ diff --git a/include/lib/cpus/aarch32/cortex_a15.h b/include/lib/cpus/aarch32/cortex_a15.h index 905c139dab826b5e6f76923c32ac6315c909efea..0f01a4336740d2d0510614dafe945ee6aeddf5e6 100644 --- a/include/lib/cpus/aarch32/cortex_a15.h +++ b/include/lib/cpus/aarch32/cortex_a15.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2017, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -15,6 +15,7 @@ /******************************************************************************* * CPU Auxiliary Control register specific definitions. ******************************************************************************/ +#define CORTEX_A15_ACTLR_INV_BTB_BIT (1 << 0) #define CORTEX_A15_ACTLR_SMP_BIT (1 << 6) #endif /* __CORTEX_A15_H__ */ diff --git a/lib/cpus/aarch32/cortex_a15.S b/lib/cpus/aarch32/cortex_a15.S index 0d5a1165820075354186a1d25283c23465ac2010..b6c61ab7f57a4d7a5d94f6d0a8640bb8b3587d5b 100644 --- a/lib/cpus/aarch32/cortex_a15.S +++ b/lib/cpus/aarch32/cortex_a15.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -41,7 +41,46 @@ func cortex_a15_enable_smp bx lr endfunc cortex_a15_enable_smp +func check_errata_cve_2017_5715 +#if WORKAROUND_CVE_2017_5715 + mov r0, #ERRATA_APPLIES +#else + mov r0, #ERRATA_MISSING +#endif + bx lr +endfunc check_errata_cve_2017_5715 + +#if REPORT_ERRATA +/* + * Errata printing function for Cortex A15. Must follow AAPCS. + */ +func cortex_a15_errata_report + push {r12, lr} + + bl cpu_get_rev_var + mov r4, r0 + + /* + * Report all errata. The revision-variant information is passed to + * checking functions of each errata. + */ + report_errata WORKAROUND_CVE_2017_5715, cortex_a15, cve_2017_5715 + + pop {r12, lr} + bx lr +endfunc cortex_a15_errata_report +#endif + func cortex_a15_reset_func +#if IMAGE_BL32 && WORKAROUND_CVE_2017_5715 + ldcopr r0, ACTLR + orr r0, #CORTEX_A15_ACTLR_INV_BTB_BIT + stcopr r0, ACTLR + ldr r0, =workaround_icache_inv_runtime_exceptions + stcopr r0, VBAR + stcopr r0, MVBAR + /* isb will be applied in the course of the reset func */ +#endif b cortex_a15_enable_smp endfunc cortex_a15_reset_func diff --git a/lib/cpus/aarch32/cortex_a17.S b/lib/cpus/aarch32/cortex_a17.S index 316d4f053c2526dc815cf5fe709d4b9c7aaf8250..b84c1267198f9bdeb152df3bf6b1b37667cec0e3 100644 --- a/lib/cpus/aarch32/cortex_a17.S +++ b/lib/cpus/aarch32/cortex_a17.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2017, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -35,7 +35,43 @@ func cortex_a17_enable_smp bx lr endfunc cortex_a17_enable_smp +func check_errata_cve_2017_5715 +#if WORKAROUND_CVE_2017_5715 + mov r0, #ERRATA_APPLIES +#else + mov r0, #ERRATA_MISSING +#endif + bx lr +endfunc check_errata_cve_2017_5715 + +#if REPORT_ERRATA +/* + * Errata printing function for Cortex A17. Must follow AAPCS. + */ +func cortex_a17_errata_report + push {r12, lr} + + bl cpu_get_rev_var + mov r4, r0 + + /* + * Report all errata. The revision-variant information is passed to + * checking functions of each errata. + */ + report_errata WORKAROUND_CVE_2017_5715, cortex_a17, cve_2017_5715 + + pop {r12, lr} + bx lr +endfunc cortex_a17_errata_report +#endif + func cortex_a17_reset_func +#if IMAGE_BL32 && WORKAROUND_CVE_2017_5715 + ldr r0, =workaround_bpiall_runtime_exceptions + stcopr r0, VBAR + stcopr r0, MVBAR + /* isb will be applied in the course of the reset func */ +#endif b cortex_a17_enable_smp endfunc cortex_a17_reset_func diff --git a/lib/cpus/aarch32/cortex_a57.S b/lib/cpus/aarch32/cortex_a57.S index 64a6d6745170a88f23fc9cfce2c516b896c3010d..f446bfffaa5459fbefcc45dc8beb70607ea57c54 100644 --- a/lib/cpus/aarch32/cortex_a57.S +++ b/lib/cpus/aarch32/cortex_a57.S @@ -332,6 +332,11 @@ func check_errata_859972 b cpu_rev_var_ls endfunc check_errata_859972 +func check_errata_cve_2017_5715 + mov r0, #ERRATA_MISSING + bx lr +endfunc check_errata_cve_2017_5715 + /* ------------------------------------------------- * The CPU Ops reset function for Cortex-A57. * Shall clobber: r0-r6 @@ -519,6 +524,7 @@ func cortex_a57_errata_report report_errata ERRATA_A57_829520, cortex_a57, 829520 report_errata ERRATA_A57_833471, cortex_a57, 833471 report_errata ERRATA_A57_859972, cortex_a57, 859972 + report_errata WORKAROUND_CVE_2017_5715, cortex_a57, cve_2017_5715 pop {r12, lr} bx lr diff --git a/lib/cpus/aarch32/cortex_a72.S b/lib/cpus/aarch32/cortex_a72.S index 35b9bc2e6d61b515044bdc6124ba70b8cdb6a128..56e91f5c0147b551282632fe15ccd843bba8a818 100644 --- a/lib/cpus/aarch32/cortex_a72.S +++ b/lib/cpus/aarch32/cortex_a72.S @@ -87,6 +87,10 @@ func check_errata_859971 b cpu_rev_var_ls endfunc check_errata_859971 +func check_errata_cve_2017_5715 + mov r0, #ERRATA_MISSING + bx lr +endfunc check_errata_cve_2017_5715 /* ------------------------------------------------- * The CPU Ops reset function for Cortex-A72. @@ -236,6 +240,7 @@ func cortex_a72_errata_report * checking functions of each errata. */ report_errata ERRATA_A72_859971, cortex_a72, 859971 + report_errata WORKAROUND_CVE_2017_5715, cortex_a72, cve_2017_5715 pop {r12, lr} bx lr diff --git a/lib/cpus/aarch32/cortex_a9.S b/lib/cpus/aarch32/cortex_a9.S index 4f30f84a941b91edb39a9835a187d62c8f30b0e9..1fb10b2054dceb3e3911cf2c5572c3bc43e30793 100644 --- a/lib/cpus/aarch32/cortex_a9.S +++ b/lib/cpus/aarch32/cortex_a9.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -35,7 +35,43 @@ func cortex_a9_enable_smp bx lr endfunc cortex_a9_enable_smp +func check_errata_cve_2017_5715 +#if WORKAROUND_CVE_2017_5715 + mov r0, #ERRATA_APPLIES +#else + mov r0, #ERRATA_MISSING +#endif + bx lr +endfunc check_errata_cve_2017_5715 + +#if REPORT_ERRATA +/* + * Errata printing function for Cortex A9. Must follow AAPCS. + */ +func cortex_a9_errata_report + push {r12, lr} + + bl cpu_get_rev_var + mov r4, r0 + + /* + * Report all errata. The revision-variant information is passed to + * checking functions of each errata. + */ + report_errata WORKAROUND_CVE_2017_5715, cortex_a9, cve_2017_5715 + + pop {r12, lr} + bx lr +endfunc cortex_a9_errata_report +#endif + func cortex_a9_reset_func +#if IMAGE_BL32 && WORKAROUND_CVE_2017_5715 + ldr r0, =workaround_bpiall_runtime_exceptions + stcopr r0, VBAR + stcopr r0, MVBAR + /* isb will be applied in the course of the reset func */ +#endif b cortex_a9_enable_smp endfunc cortex_a9_reset_func diff --git a/lib/cpus/aarch64/cortex_a57.S b/lib/cpus/aarch64/cortex_a57.S index 683be47e5203dacb439782e354ceaab12425ca09..c82ebfc95179a29f33f27b105f4ca2475447b016 100644 --- a/lib/cpus/aarch64/cortex_a57.S +++ b/lib/cpus/aarch64/cortex_a57.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2014-2017, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2014-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -328,6 +328,15 @@ func check_errata_859972 b cpu_rev_var_ls endfunc check_errata_859972 +func check_errata_cve_2017_5715 +#if WORKAROUND_CVE_2017_5715 + mov x0, #ERRATA_APPLIES +#else + mov x0, #ERRATA_MISSING +#endif + ret +endfunc check_errata_cve_2017_5715 + /* ------------------------------------------------- * The CPU Ops reset function for Cortex-A57. * Shall clobber: x0-x19 @@ -518,7 +527,7 @@ func cortex_a57_errata_report report_errata ERRATA_A57_829520, cortex_a57, 829520 report_errata ERRATA_A57_833471, cortex_a57, 833471 report_errata ERRATA_A57_859972, cortex_a57, 859972 - + report_errata WORKAROUND_CVE_2017_5715, cortex_a57, cve_2017_5715 ldp x8, x30, [sp], #16 ret diff --git a/lib/cpus/aarch64/cortex_a72.S b/lib/cpus/aarch64/cortex_a72.S index 93821b7493f3e4b192a707e09ec3dc99e499bb39..9633aa8f54b55c52d981df5eabbd74129e2f0af7 100644 --- a/lib/cpus/aarch64/cortex_a72.S +++ b/lib/cpus/aarch64/cortex_a72.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2015-2017, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2015-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -97,6 +97,15 @@ func check_errata_859971 b cpu_rev_var_ls endfunc check_errata_859971 +func check_errata_cve_2017_5715 +#if WORKAROUND_CVE_2017_5715 + mov x0, #ERRATA_APPLIES +#else + mov x0, #ERRATA_MISSING +#endif + ret +endfunc check_errata_cve_2017_5715 + /* ------------------------------------------------- * The CPU Ops reset function for Cortex-A72. * ------------------------------------------------- @@ -249,6 +258,7 @@ func cortex_a72_errata_report * checking functions of each errata. */ report_errata ERRATA_A72_859971, cortex_a72, 859971 + report_errata WORKAROUND_CVE_2017_5715, cortex_a72, cve_2017_5715 ldp x8, x30, [sp], #16 ret diff --git a/lib/cpus/aarch64/cortex_a73.S b/lib/cpus/aarch64/cortex_a73.S index c43f07ec154a8deeb10dce25770fe5d814f0b59c..11680a09dd09ef016613e23daa43b9c98da2e255 100644 --- a/lib/cpus/aarch64/cortex_a73.S +++ b/lib/cpus/aarch64/cortex_a73.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -114,6 +114,36 @@ func cortex_a73_cluster_pwr_dwn b cortex_a73_disable_smp endfunc cortex_a73_cluster_pwr_dwn +func check_errata_cve_2017_5715 +#if WORKAROUND_CVE_2017_5715 + mov x0, #ERRATA_APPLIES +#else + mov x0, #ERRATA_MISSING +#endif + ret +endfunc check_errata_cve_2017_5715 + +#if REPORT_ERRATA +/* + * Errata printing function for Cortex A75. Must follow AAPCS. + */ +func cortex_a73_errata_report + stp x8, x30, [sp, #-16]! + + bl cpu_get_rev_var + mov x8, x0 + + /* + * Report all errata. The revision-variant information is passed to + * checking functions of each errata. + */ + report_errata WORKAROUND_CVE_2017_5715, cortex_a73, cve_2017_5715 + + ldp x8, x30, [sp], #16 + ret +endfunc cortex_a73_errata_report +#endif + /* --------------------------------------------- * This function provides cortex_a73 specific * register information for crash reporting. diff --git a/lib/cpus/aarch64/cortex_a75.S b/lib/cpus/aarch64/cortex_a75.S index e66ad06670eb65b4df676164a67e0b0b2c706f1f..946f988434a462ee91675d649fbe272154c2e846 100644 --- a/lib/cpus/aarch64/cortex_a75.S +++ b/lib/cpus/aarch64/cortex_a75.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2017, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -151,6 +151,27 @@ func cortex_a75_reset_func ret endfunc cortex_a75_reset_func +func check_errata_cve_2017_5715 + mrs x0, id_aa64pfr0_el1 + ubfx x0, x0, #ID_AA64PFR0_CSV2_SHIFT, #ID_AA64PFR0_CSV2_LENGTH + /* + * If the field equals to 1 then branch targets trained in one + * context cannot affect speculative execution in a different context. + */ + cmp x0, #1 + beq 1f + +#if WORKAROUND_CVE_2017_5715 + mov x0, #ERRATA_APPLIES +#else + mov x0, #ERRATA_MISSING +#endif + ret +1: + mov x0, #ERRATA_NOT_APPLIES + ret +endfunc check_errata_cve_2017_5715 + /* --------------------------------------------- * HW will do the cache maintenance while powering down * --------------------------------------------- @@ -167,6 +188,27 @@ func cortex_a75_core_pwr_dwn ret endfunc cortex_a75_core_pwr_dwn +#if REPORT_ERRATA +/* + * Errata printing function for Cortex A75. Must follow AAPCS. + */ +func cortex_a75_errata_report + stp x8, x30, [sp, #-16]! + + bl cpu_get_rev_var + mov x8, x0 + + /* + * Report all errata. The revision-variant information is passed to + * checking functions of each errata. + */ + report_errata WORKAROUND_CVE_2017_5715, cortex_a75, cve_2017_5715 + + ldp x8, x30, [sp], #16 + ret +endfunc cortex_a75_errata_report +#endif + /* --------------------------------------------- * This function provides cortex_a75 specific * register information for crash reporting. diff --git a/lib/cpus/errata_report.c b/lib/cpus/errata_report.c index 182679d137c1cdedf1aaba588534261b09c205db..c679336c1b654e0504b047152171f518eee11158 100644 --- a/lib/cpus/errata_report.c +++ b/lib/cpus/errata_report.c @@ -1,5 +1,5 @@ /* - * Copyright (c) 2017, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -27,7 +27,7 @@ #endif /* Errata format: BL stage, CPU, errata ID, message */ -#define ERRATA_FORMAT "%s: %s: errata workaround for %s was %s\n" +#define ERRATA_FORMAT "%s: %s: CPU workaround for %s was %s\n" /* * Returns whether errata needs to be reported. Passed arguments are private to