From c0ca14d605daa7636d73ba69d86191827a3e250c Mon Sep 17 00:00:00 2001 From: Dimitris Papastamos Date: Tue, 16 Jan 2018 10:42:20 +0000 Subject: [PATCH 1/4] Change the default errata format string As we are using the errata framework to handle workarounds in a more general sense, change the default string to reflect that. Change-Id: I2e266af2392c9d95e18fe4e965f9a1d46fd0e95e Signed-off-by: Dimitris Papastamos --- lib/cpus/errata_report.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/cpus/errata_report.c b/lib/cpus/errata_report.c index 8d9f704a3d..c030679113 100644 --- a/lib/cpus/errata_report.c +++ b/lib/cpus/errata_report.c @@ -1,5 +1,5 @@ /* - * Copyright (c) 2017, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -25,7 +25,7 @@ #endif /* Errata format: BL stage, CPU, errata ID, message */ -#define ERRATA_FORMAT "%s: %s: errata workaround for %s was %s\n" +#define ERRATA_FORMAT "%s: %s: CPU workaround for %s was %s\n" /* * Returns whether errata needs to be reported. Passed arguments are private to From eec9e7d1e6ebb4c7e12687a55ae27ba9e481f7d9 Mon Sep 17 00:00:00 2001 From: Dimitris Papastamos Date: Tue, 16 Jan 2018 10:32:47 +0000 Subject: [PATCH 2/4] Print erratum application report for CVE-2017-5715 Even though the workaround for CVE-2017-5715 is not a CPU erratum, the code is piggybacking on the errata framework to print whether the workaround was applied, missing or not needed. Change-Id: I821197a4b8560c73fd894cd7cd9ecf9503c72fa3 Signed-off-by: Dimitris Papastamos --- lib/cpus/aarch64/cortex_a57.S | 13 +++++++++-- lib/cpus/aarch64/cortex_a72.S | 12 +++++++++- lib/cpus/aarch64/cortex_a73.S | 32 ++++++++++++++++++++++++- lib/cpus/aarch64/cortex_a75.S | 44 ++++++++++++++++++++++++++++++++++- 4 files changed, 96 insertions(+), 5 deletions(-) diff --git a/lib/cpus/aarch64/cortex_a57.S b/lib/cpus/aarch64/cortex_a57.S index 683be47e52..c82ebfc951 100644 --- a/lib/cpus/aarch64/cortex_a57.S +++ b/lib/cpus/aarch64/cortex_a57.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2014-2017, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2014-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -328,6 +328,15 @@ func check_errata_859972 b cpu_rev_var_ls endfunc check_errata_859972 +func check_errata_cve_2017_5715 +#if WORKAROUND_CVE_2017_5715 + mov x0, #ERRATA_APPLIES +#else + mov x0, #ERRATA_MISSING +#endif + ret +endfunc check_errata_cve_2017_5715 + /* ------------------------------------------------- * The CPU Ops reset function for Cortex-A57. * Shall clobber: x0-x19 @@ -518,7 +527,7 @@ func cortex_a57_errata_report report_errata ERRATA_A57_829520, cortex_a57, 829520 report_errata ERRATA_A57_833471, cortex_a57, 833471 report_errata ERRATA_A57_859972, cortex_a57, 859972 - + report_errata WORKAROUND_CVE_2017_5715, cortex_a57, cve_2017_5715 ldp x8, x30, [sp], #16 ret diff --git a/lib/cpus/aarch64/cortex_a72.S b/lib/cpus/aarch64/cortex_a72.S index 93821b7493..9633aa8f54 100644 --- a/lib/cpus/aarch64/cortex_a72.S +++ b/lib/cpus/aarch64/cortex_a72.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2015-2017, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2015-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -97,6 +97,15 @@ func check_errata_859971 b cpu_rev_var_ls endfunc check_errata_859971 +func check_errata_cve_2017_5715 +#if WORKAROUND_CVE_2017_5715 + mov x0, #ERRATA_APPLIES +#else + mov x0, #ERRATA_MISSING +#endif + ret +endfunc check_errata_cve_2017_5715 + /* ------------------------------------------------- * The CPU Ops reset function for Cortex-A72. * ------------------------------------------------- @@ -249,6 +258,7 @@ func cortex_a72_errata_report * checking functions of each errata. */ report_errata ERRATA_A72_859971, cortex_a72, 859971 + report_errata WORKAROUND_CVE_2017_5715, cortex_a72, cve_2017_5715 ldp x8, x30, [sp], #16 ret diff --git a/lib/cpus/aarch64/cortex_a73.S b/lib/cpus/aarch64/cortex_a73.S index c43f07ec15..11680a09dd 100644 --- a/lib/cpus/aarch64/cortex_a73.S +++ b/lib/cpus/aarch64/cortex_a73.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -114,6 +114,36 @@ func cortex_a73_cluster_pwr_dwn b cortex_a73_disable_smp endfunc cortex_a73_cluster_pwr_dwn +func check_errata_cve_2017_5715 +#if WORKAROUND_CVE_2017_5715 + mov x0, #ERRATA_APPLIES +#else + mov x0, #ERRATA_MISSING +#endif + ret +endfunc check_errata_cve_2017_5715 + +#if REPORT_ERRATA +/* + * Errata printing function for Cortex A75. Must follow AAPCS. + */ +func cortex_a73_errata_report + stp x8, x30, [sp, #-16]! + + bl cpu_get_rev_var + mov x8, x0 + + /* + * Report all errata. The revision-variant information is passed to + * checking functions of each errata. + */ + report_errata WORKAROUND_CVE_2017_5715, cortex_a73, cve_2017_5715 + + ldp x8, x30, [sp], #16 + ret +endfunc cortex_a73_errata_report +#endif + /* --------------------------------------------- * This function provides cortex_a73 specific * register information for crash reporting. diff --git a/lib/cpus/aarch64/cortex_a75.S b/lib/cpus/aarch64/cortex_a75.S index e66ad06670..946f988434 100644 --- a/lib/cpus/aarch64/cortex_a75.S +++ b/lib/cpus/aarch64/cortex_a75.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2017, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -151,6 +151,27 @@ func cortex_a75_reset_func ret endfunc cortex_a75_reset_func +func check_errata_cve_2017_5715 + mrs x0, id_aa64pfr0_el1 + ubfx x0, x0, #ID_AA64PFR0_CSV2_SHIFT, #ID_AA64PFR0_CSV2_LENGTH + /* + * If the field equals to 1 then branch targets trained in one + * context cannot affect speculative execution in a different context. + */ + cmp x0, #1 + beq 1f + +#if WORKAROUND_CVE_2017_5715 + mov x0, #ERRATA_APPLIES +#else + mov x0, #ERRATA_MISSING +#endif + ret +1: + mov x0, #ERRATA_NOT_APPLIES + ret +endfunc check_errata_cve_2017_5715 + /* --------------------------------------------- * HW will do the cache maintenance while powering down * --------------------------------------------- @@ -167,6 +188,27 @@ func cortex_a75_core_pwr_dwn ret endfunc cortex_a75_core_pwr_dwn +#if REPORT_ERRATA +/* + * Errata printing function for Cortex A75. Must follow AAPCS. + */ +func cortex_a75_errata_report + stp x8, x30, [sp, #-16]! + + bl cpu_get_rev_var + mov x8, x0 + + /* + * Report all errata. The revision-variant information is passed to + * checking functions of each errata. + */ + report_errata WORKAROUND_CVE_2017_5715, cortex_a75, cve_2017_5715 + + ldp x8, x30, [sp], #16 + ret +endfunc cortex_a75_errata_report +#endif + /* --------------------------------------------- * This function provides cortex_a75 specific * register information for crash reporting. From 7343505d9661ab6481e99fa96d60f2a8447a4565 Mon Sep 17 00:00:00 2001 From: Dimitris Papastamos Date: Tue, 2 Jan 2018 11:37:02 +0000 Subject: [PATCH 3/4] sp_min: Implement workaround for CVE-2017-5715 This patch introduces two workarounds for ARMv7 systems. The workarounds need to be applied prior to any `branch` instruction in secure world. This is achieved using a custom vector table where each entry is an `add sp, sp, #1` instruction. On entry to monitor mode, once the sequence of `ADD` instructions is executed, the branch target buffer (BTB) is invalidated. The bottom bits of `SP` are then used to decode the exception entry type. A side effect of this change is that the exception vectors are installed before the CPU specific reset function. This is now consistent with how it is done on AArch64. Note, on AArch32 systems, the exception vectors are typically tightly integrated with the secure payload (e.g. the Trusted OS). This workaround will need porting to each secure payload that requires it. The patch to modify the AArch32 per-cpu vbar to the corresponding workaround vector table according to the CPU type will be done in a later patch. Change-Id: I5786872497d359e496ebe0757e8017fa98f753fa Signed-off-by: Dimitris Papastamos --- bl32/sp_min/aarch32/entrypoint.S | 16 ++-- bl32/sp_min/sp_min.mk | 7 +- bl32/sp_min/workaround_cve_2017_5715_bpiall.S | 74 ++++++++++++++++++ .../workaround_cve_2017_5715_icache_inv.S | 75 +++++++++++++++++++ include/common/aarch32/el3_common_macros.S | 24 +++--- include/lib/aarch32/arch.h | 4 +- include/lib/aarch32/smcc_helpers.h | 13 +++- 7 files changed, 189 insertions(+), 24 deletions(-) create mode 100644 bl32/sp_min/workaround_cve_2017_5715_bpiall.S create mode 100644 bl32/sp_min/workaround_cve_2017_5715_icache_inv.S diff --git a/bl32/sp_min/aarch32/entrypoint.S b/bl32/sp_min/aarch32/entrypoint.S index b2b7953f82..e7528d38ea 100644 --- a/bl32/sp_min/aarch32/entrypoint.S +++ b/bl32/sp_min/aarch32/entrypoint.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -17,6 +17,8 @@ .globl sp_min_vector_table .globl sp_min_entrypoint .globl sp_min_warm_entrypoint + .globl sp_min_handle_smc + .globl sp_min_handle_fiq .macro route_fiq_to_sp_min reg /* ----------------------------------------------------- @@ -43,12 +45,12 @@ vector_base sp_min_vector_table b sp_min_entrypoint b plat_panic_handler /* Undef */ - b handle_smc /* Syscall */ + b sp_min_handle_smc /* Syscall */ b plat_panic_handler /* Prefetch abort */ b plat_panic_handler /* Data abort */ b plat_panic_handler /* Reserved */ b plat_panic_handler /* IRQ */ - b handle_fiq /* FIQ */ + b sp_min_handle_fiq /* FIQ */ /* @@ -151,7 +153,7 @@ endfunc sp_min_entrypoint /* * SMC handling function for SP_MIN. */ -func handle_smc +func sp_min_handle_smc /* On SMC entry, `sp` points to `smc_ctx_t`. Save `lr`. */ str lr, [sp, #SMC_CTX_LR_MON] @@ -199,12 +201,12 @@ func handle_smc /* `r0` points to `smc_ctx_t` */ b sp_min_exit -endfunc handle_smc +endfunc sp_min_handle_smc /* * Secure Interrupts handling function for SP_MIN. */ -func handle_fiq +func sp_min_handle_fiq #if !SP_MIN_WITH_SECURE_FIQ b plat_panic_handler #else @@ -242,7 +244,7 @@ func handle_fiq b sp_min_exit #endif -endfunc handle_fiq +endfunc sp_min_handle_fiq /* * The Warm boot entrypoint for SP_MIN. diff --git a/bl32/sp_min/sp_min.mk b/bl32/sp_min/sp_min.mk index 56489a3c5a..67a1981e08 100644 --- a/bl32/sp_min/sp_min.mk +++ b/bl32/sp_min/sp_min.mk @@ -1,5 +1,5 @@ # -# Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved. +# Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved. # # SPDX-License-Identifier: BSD-3-Clause # @@ -26,6 +26,11 @@ ifeq (${ENABLE_AMU}, 1) BL32_SOURCES += lib/extensions/amu/aarch32/amu.c endif +ifeq (${WORKAROUND_CVE_2017_5715},1) +BL32_SOURCES += bl32/sp_min/workaround_cve_2017_5715_bpiall.S \ + bl32/sp_min/workaround_cve_2017_5715_icache_inv.S +endif + BL32_LINKERFILE := bl32/sp_min/sp_min.ld.S # Include the platform-specific SP_MIN Makefile diff --git a/bl32/sp_min/workaround_cve_2017_5715_bpiall.S b/bl32/sp_min/workaround_cve_2017_5715_bpiall.S new file mode 100644 index 0000000000..5387cefc95 --- /dev/null +++ b/bl32/sp_min/workaround_cve_2017_5715_bpiall.S @@ -0,0 +1,74 @@ +/* + * Copyright (c) 2018, ARM Limited and Contributors. All rights reserved. + * + * SPDX-License-Identifier: BSD-3-Clause + */ + +#include + + .globl workaround_bpiall_runtime_exceptions + +vector_base workaround_bpiall_runtime_exceptions + /* We encode the exception entry in the bottom 3 bits of SP */ + add sp, sp, #1 /* Reset: 0b111 */ + add sp, sp, #1 /* Undef: 0b110 */ + add sp, sp, #1 /* Syscall: 0b101 */ + add sp, sp, #1 /* Prefetch abort: 0b100 */ + add sp, sp, #1 /* Data abort: 0b011 */ + add sp, sp, #1 /* Reserved: 0b010 */ + add sp, sp, #1 /* IRQ: 0b001 */ + nop /* FIQ: 0b000 */ + + /* + * Invalidate the branch predictor, `r0` is a dummy register + * and is unused. + */ + stcopr r0, BPIALL + isb + + /* + * As we cannot use any temporary registers and cannot + * clobber SP, we can decode the exception entry using + * an unrolled binary search. + * + * Note, if this code is re-used by other secure payloads, + * the below exception entry vectors must be changed to + * the vectors specific to that secure payload. + */ + + tst sp, #4 + bne 1f + + tst sp, #2 + bne 3f + + /* Expected encoding: 0x1 and 0x0 */ + tst sp, #1 + /* Restore original value of SP by clearing the bottom 3 bits */ + bic sp, sp, #0x7 + bne plat_panic_handler /* IRQ */ + b sp_min_handle_fiq /* FIQ */ + +1: + tst sp, #2 + bne 2f + + /* Expected encoding: 0x4 and 0x5 */ + tst sp, #1 + bic sp, sp, #0x7 + bne sp_min_handle_smc /* Syscall */ + b plat_panic_handler /* Prefetch abort */ + +2: + /* Expected encoding: 0x7 and 0x6 */ + tst sp, #1 + bic sp, sp, #0x7 + bne sp_min_entrypoint /* Reset */ + b plat_panic_handler /* Undef */ + +3: + /* Expected encoding: 0x2 and 0x3 */ + tst sp, #1 + bic sp, sp, #0x7 + bne plat_panic_handler /* Data abort */ + b plat_panic_handler /* Reserved */ diff --git a/bl32/sp_min/workaround_cve_2017_5715_icache_inv.S b/bl32/sp_min/workaround_cve_2017_5715_icache_inv.S new file mode 100644 index 0000000000..9102b02f98 --- /dev/null +++ b/bl32/sp_min/workaround_cve_2017_5715_icache_inv.S @@ -0,0 +1,75 @@ +/* + * Copyright (c) 2018, ARM Limited and Contributors. All rights reserved. + * + * SPDX-License-Identifier: BSD-3-Clause + */ + +#include + + .globl workaround_icache_inv_runtime_exceptions + +vector_base workaround_icache_inv_runtime_exceptions + /* We encode the exception entry in the bottom 3 bits of SP */ + add sp, sp, #1 /* Reset: 0b111 */ + add sp, sp, #1 /* Undef: 0b110 */ + add sp, sp, #1 /* Syscall: 0b101 */ + add sp, sp, #1 /* Prefetch abort: 0b100 */ + add sp, sp, #1 /* Data abort: 0b011 */ + add sp, sp, #1 /* Reserved: 0b010 */ + add sp, sp, #1 /* IRQ: 0b001 */ + nop /* FIQ: 0b000 */ + + /* + * Invalidate the instruction cache, which we assume also + * invalidates the branch predictor. This may depend on + * other CPU specific changes (e.g. an ACTLR setting). + */ + stcopr r0, ICIALLU + isb + + /* + * As we cannot use any temporary registers and cannot + * clobber SP, we can decode the exception entry using + * an unrolled binary search. + * + * Note, if this code is re-used by other secure payloads, + * the below exception entry vectors must be changed to + * the vectors specific to that secure payload. + */ + + tst sp, #4 + bne 1f + + tst sp, #2 + bne 3f + + /* Expected encoding: 0x1 and 0x0 */ + tst sp, #1 + /* Restore original value of SP by clearing the bottom 3 bits */ + bic sp, sp, #0x7 + bne plat_panic_handler /* IRQ */ + b sp_min_handle_fiq /* FIQ */ + +1: + /* Expected encoding: 0x4 and 0x5 */ + tst sp, #2 + bne 2f + + tst sp, #1 + bic sp, sp, #0x7 + bne sp_min_handle_smc /* Syscall */ + b plat_panic_handler /* Prefetch abort */ + +2: + /* Expected encoding: 0x7 and 0x6 */ + tst sp, #1 + bic sp, sp, #0x7 + bne sp_min_entrypoint /* Reset */ + b plat_panic_handler /* Undef */ + +3: + /* Expected encoding: 0x2 and 0x3 */ + tst sp, #1 + bic sp, sp, #0x7 + bne plat_panic_handler /* Data abort */ + b plat_panic_handler /* Reserved */ diff --git a/include/common/aarch32/el3_common_macros.S b/include/common/aarch32/el3_common_macros.S index 59e99f89aa..74fb58296a 100644 --- a/include/common/aarch32/el3_common_macros.S +++ b/include/common/aarch32/el3_common_macros.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -14,7 +14,7 @@ /* * Helper macro to initialise EL3 registers we care about. */ - .macro el3_arch_init_common _exception_vectors + .macro el3_arch_init_common /* --------------------------------------------------------------------- * SCTLR has already been initialised - read current value before * modifying. @@ -33,15 +33,6 @@ stcopr r0, SCTLR isb - /* --------------------------------------------------------------------- - * Set the exception vectors (VBAR/MVBAR). - * --------------------------------------------------------------------- - */ - ldr r0, =\_exception_vectors - stcopr r0, VBAR - stcopr r0, MVBAR - isb - /* --------------------------------------------------------------------- * Initialise SCR, setting all fields rather than relying on the hw. * @@ -210,6 +201,15 @@ bxne r0 .endif /* _warm_boot_mailbox */ + /* --------------------------------------------------------------------- + * Set the exception vectors (VBAR/MVBAR). + * --------------------------------------------------------------------- + */ + ldr r0, =\_exception_vectors + stcopr r0, VBAR + stcopr r0, MVBAR + isb + /* --------------------------------------------------------------------- * It is a cold boot. * Perform any processor specific actions upon reset e.g. cache, TLB @@ -218,7 +218,7 @@ */ bl reset_handler - el3_arch_init_common \_exception_vectors + el3_arch_init_common .if \_secondary_cold_boot /* ------------------------------------------------------------- diff --git a/include/lib/aarch32/arch.h b/include/lib/aarch32/arch.h index 4d2a5fc9f5..134d53468c 100644 --- a/include/lib/aarch32/arch.h +++ b/include/lib/aarch32/arch.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -426,6 +426,8 @@ #define TLBIMVAA p15, 0, c8, c7, 3 #define TLBIMVAAIS p15, 0, c8, c3, 3 #define BPIALLIS p15, 0, c7, c1, 6 +#define BPIALL p15, 0, c7, c5, 6 +#define ICIALLU p15, 0, c7, c5, 0 #define HSCTLR p15, 4, c1, c0, 0 #define HCR p15, 4, c1, c1, 0 #define HCPTR p15, 4, c1, c1, 2 diff --git a/include/lib/aarch32/smcc_helpers.h b/include/lib/aarch32/smcc_helpers.h index 53f1aa4ab0..ed3b722fe1 100644 --- a/include/lib/aarch32/smcc_helpers.h +++ b/include/lib/aarch32/smcc_helpers.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -22,7 +22,7 @@ #define SMC_CTX_LR_MON 0x80 #define SMC_CTX_SCR 0x84 #define SMC_CTX_PMCR 0x88 -#define SMC_CTX_SIZE 0x8C +#define SMC_CTX_SIZE 0x90 #ifndef __ASSEMBLY__ #include @@ -75,7 +75,13 @@ typedef struct smc_ctx { u_register_t lr_mon; u_register_t scr; u_register_t pmcr; -} smc_ctx_t; + /* + * The workaround for CVE-2017-5715 requires storing information in + * the bottom 3 bits of the stack pointer. Add a padding field to + * force the size of the struct to be a multiple of 8. + */ + u_register_t pad; +} smc_ctx_t __aligned(8); /* * Compile time assertions related to the 'smc_context' structure to @@ -99,6 +105,7 @@ CASSERT(SMC_CTX_LR_MON == __builtin_offsetof(smc_ctx_t, lr_mon), \ CASSERT(SMC_CTX_SPSR_MON == __builtin_offsetof(smc_ctx_t, spsr_mon), \ assert_smc_ctx_spsr_mon_offset_mismatch); +CASSERT((sizeof(smc_ctx_t) & 0x7) == 0, assert_smc_ctx_not_aligned); CASSERT(SMC_CTX_SIZE == sizeof(smc_ctx_t), assert_smc_ctx_size_mismatch); /* Convenience macros to return from SMC handler */ From e4b34efa18f1cac10aa8541bc0a1dbab49886009 Mon Sep 17 00:00:00 2001 From: Dimitris Papastamos Date: Wed, 3 Jan 2018 10:48:59 +0000 Subject: [PATCH 4/4] Workaround for CVE-2017-5715 for Cortex A9, A15 and A17 A per-cpu vbar is installed that implements the workaround by invalidating the branch target buffer (BTB) directly in the case of A9 and A17 and indirectly by invalidating the icache in the case of A15. For Cortex A57 and A72 there is currently no workaround implemented when EL3 is in AArch32 mode so report it as missing. For other vulnerable CPUs (e.g. Cortex A73 and Cortex A75), there are no changes since there is currently no upstream AArch32 EL3 support for these CPUs. Change-Id: Ib42c6ef0b3c9ff2878a9e53839de497ff736258f Signed-off-by: Dimitris Papastamos --- include/lib/cpus/aarch32/cortex_a15.h | 3 +- lib/cpus/aarch32/cortex_a15.S | 41 ++++++++++++++++++++++++++- lib/cpus/aarch32/cortex_a17.S | 38 ++++++++++++++++++++++++- lib/cpus/aarch32/cortex_a57.S | 6 ++++ lib/cpus/aarch32/cortex_a72.S | 5 ++++ lib/cpus/aarch32/cortex_a9.S | 38 ++++++++++++++++++++++++- 6 files changed, 127 insertions(+), 4 deletions(-) diff --git a/include/lib/cpus/aarch32/cortex_a15.h b/include/lib/cpus/aarch32/cortex_a15.h index 905c139dab..0f01a43367 100644 --- a/include/lib/cpus/aarch32/cortex_a15.h +++ b/include/lib/cpus/aarch32/cortex_a15.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2017, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -15,6 +15,7 @@ /******************************************************************************* * CPU Auxiliary Control register specific definitions. ******************************************************************************/ +#define CORTEX_A15_ACTLR_INV_BTB_BIT (1 << 0) #define CORTEX_A15_ACTLR_SMP_BIT (1 << 6) #endif /* __CORTEX_A15_H__ */ diff --git a/lib/cpus/aarch32/cortex_a15.S b/lib/cpus/aarch32/cortex_a15.S index 0d5a116582..b6c61ab7f5 100644 --- a/lib/cpus/aarch32/cortex_a15.S +++ b/lib/cpus/aarch32/cortex_a15.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -41,7 +41,46 @@ func cortex_a15_enable_smp bx lr endfunc cortex_a15_enable_smp +func check_errata_cve_2017_5715 +#if WORKAROUND_CVE_2017_5715 + mov r0, #ERRATA_APPLIES +#else + mov r0, #ERRATA_MISSING +#endif + bx lr +endfunc check_errata_cve_2017_5715 + +#if REPORT_ERRATA +/* + * Errata printing function for Cortex A15. Must follow AAPCS. + */ +func cortex_a15_errata_report + push {r12, lr} + + bl cpu_get_rev_var + mov r4, r0 + + /* + * Report all errata. The revision-variant information is passed to + * checking functions of each errata. + */ + report_errata WORKAROUND_CVE_2017_5715, cortex_a15, cve_2017_5715 + + pop {r12, lr} + bx lr +endfunc cortex_a15_errata_report +#endif + func cortex_a15_reset_func +#if IMAGE_BL32 && WORKAROUND_CVE_2017_5715 + ldcopr r0, ACTLR + orr r0, #CORTEX_A15_ACTLR_INV_BTB_BIT + stcopr r0, ACTLR + ldr r0, =workaround_icache_inv_runtime_exceptions + stcopr r0, VBAR + stcopr r0, MVBAR + /* isb will be applied in the course of the reset func */ +#endif b cortex_a15_enable_smp endfunc cortex_a15_reset_func diff --git a/lib/cpus/aarch32/cortex_a17.S b/lib/cpus/aarch32/cortex_a17.S index 316d4f053c..b84c126719 100644 --- a/lib/cpus/aarch32/cortex_a17.S +++ b/lib/cpus/aarch32/cortex_a17.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2017, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -35,7 +35,43 @@ func cortex_a17_enable_smp bx lr endfunc cortex_a17_enable_smp +func check_errata_cve_2017_5715 +#if WORKAROUND_CVE_2017_5715 + mov r0, #ERRATA_APPLIES +#else + mov r0, #ERRATA_MISSING +#endif + bx lr +endfunc check_errata_cve_2017_5715 + +#if REPORT_ERRATA +/* + * Errata printing function for Cortex A17. Must follow AAPCS. + */ +func cortex_a17_errata_report + push {r12, lr} + + bl cpu_get_rev_var + mov r4, r0 + + /* + * Report all errata. The revision-variant information is passed to + * checking functions of each errata. + */ + report_errata WORKAROUND_CVE_2017_5715, cortex_a17, cve_2017_5715 + + pop {r12, lr} + bx lr +endfunc cortex_a17_errata_report +#endif + func cortex_a17_reset_func +#if IMAGE_BL32 && WORKAROUND_CVE_2017_5715 + ldr r0, =workaround_bpiall_runtime_exceptions + stcopr r0, VBAR + stcopr r0, MVBAR + /* isb will be applied in the course of the reset func */ +#endif b cortex_a17_enable_smp endfunc cortex_a17_reset_func diff --git a/lib/cpus/aarch32/cortex_a57.S b/lib/cpus/aarch32/cortex_a57.S index 64a6d67451..f446bfffaa 100644 --- a/lib/cpus/aarch32/cortex_a57.S +++ b/lib/cpus/aarch32/cortex_a57.S @@ -332,6 +332,11 @@ func check_errata_859972 b cpu_rev_var_ls endfunc check_errata_859972 +func check_errata_cve_2017_5715 + mov r0, #ERRATA_MISSING + bx lr +endfunc check_errata_cve_2017_5715 + /* ------------------------------------------------- * The CPU Ops reset function for Cortex-A57. * Shall clobber: r0-r6 @@ -519,6 +524,7 @@ func cortex_a57_errata_report report_errata ERRATA_A57_829520, cortex_a57, 829520 report_errata ERRATA_A57_833471, cortex_a57, 833471 report_errata ERRATA_A57_859972, cortex_a57, 859972 + report_errata WORKAROUND_CVE_2017_5715, cortex_a57, cve_2017_5715 pop {r12, lr} bx lr diff --git a/lib/cpus/aarch32/cortex_a72.S b/lib/cpus/aarch32/cortex_a72.S index 7550520643..878e6b19ec 100644 --- a/lib/cpus/aarch32/cortex_a72.S +++ b/lib/cpus/aarch32/cortex_a72.S @@ -87,6 +87,10 @@ func check_errata_859971 b cpu_rev_var_ls endfunc check_errata_859971 +func check_errata_cve_2017_5715 + mov r0, #ERRATA_MISSING + bx lr +endfunc check_errata_cve_2017_5715 /* ------------------------------------------------- * The CPU Ops reset function for Cortex-A72. @@ -236,6 +240,7 @@ func cortex_a72_errata_report * checking functions of each errata. */ report_errata ERRATA_A72_859971, cortex_a72, 859971 + report_errata WORKAROUND_CVE_2017_5715, cortex_a72, cve_2017_5715 pop {r12, lr} bx lr diff --git a/lib/cpus/aarch32/cortex_a9.S b/lib/cpus/aarch32/cortex_a9.S index 4f30f84a94..1fb10b2054 100644 --- a/lib/cpus/aarch32/cortex_a9.S +++ b/lib/cpus/aarch32/cortex_a9.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -35,7 +35,43 @@ func cortex_a9_enable_smp bx lr endfunc cortex_a9_enable_smp +func check_errata_cve_2017_5715 +#if WORKAROUND_CVE_2017_5715 + mov r0, #ERRATA_APPLIES +#else + mov r0, #ERRATA_MISSING +#endif + bx lr +endfunc check_errata_cve_2017_5715 + +#if REPORT_ERRATA +/* + * Errata printing function for Cortex A9. Must follow AAPCS. + */ +func cortex_a9_errata_report + push {r12, lr} + + bl cpu_get_rev_var + mov r4, r0 + + /* + * Report all errata. The revision-variant information is passed to + * checking functions of each errata. + */ + report_errata WORKAROUND_CVE_2017_5715, cortex_a9, cve_2017_5715 + + pop {r12, lr} + bx lr +endfunc cortex_a9_errata_report +#endif + func cortex_a9_reset_func +#if IMAGE_BL32 && WORKAROUND_CVE_2017_5715 + ldr r0, =workaround_bpiall_runtime_exceptions + stcopr r0, VBAR + stcopr r0, MVBAR + /* isb will be applied in the course of the reset func */ +#endif b cortex_a9_enable_smp endfunc cortex_a9_reset_func