Merge pull request #1228 from dp-arm/dp/cve_2017_5715

Workarounds for CVE-2017-5715 on A9/A15 and A17 + serial console reporting
This commit is contained in:
davidcunado-arm 2018-01-25 00:06:50 +00:00 committed by GitHub
commit d95eb476d5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 414 additions and 35 deletions

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -17,6 +17,8 @@
.globl sp_min_vector_table
.globl sp_min_entrypoint
.globl sp_min_warm_entrypoint
.globl sp_min_handle_smc
.globl sp_min_handle_fiq
.macro route_fiq_to_sp_min reg
/* -----------------------------------------------------
@ -43,12 +45,12 @@
vector_base sp_min_vector_table
b sp_min_entrypoint
b plat_panic_handler /* Undef */
b handle_smc /* Syscall */
b sp_min_handle_smc /* Syscall */
b plat_panic_handler /* Prefetch abort */
b plat_panic_handler /* Data abort */
b plat_panic_handler /* Reserved */
b plat_panic_handler /* IRQ */
b handle_fiq /* FIQ */
b sp_min_handle_fiq /* FIQ */
/*
@ -151,7 +153,7 @@ endfunc sp_min_entrypoint
/*
* SMC handling function for SP_MIN.
*/
func handle_smc
func sp_min_handle_smc
/* On SMC entry, `sp` points to `smc_ctx_t`. Save `lr`. */
str lr, [sp, #SMC_CTX_LR_MON]
@ -199,12 +201,12 @@ func handle_smc
/* `r0` points to `smc_ctx_t` */
b sp_min_exit
endfunc handle_smc
endfunc sp_min_handle_smc
/*
* Secure Interrupts handling function for SP_MIN.
*/
func handle_fiq
func sp_min_handle_fiq
#if !SP_MIN_WITH_SECURE_FIQ
b plat_panic_handler
#else
@ -242,7 +244,7 @@ func handle_fiq
b sp_min_exit
#endif
endfunc handle_fiq
endfunc sp_min_handle_fiq
/*
* The Warm boot entrypoint for SP_MIN.

View File

@ -1,5 +1,5 @@
#
# Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved.
# Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
@ -26,6 +26,11 @@ ifeq (${ENABLE_AMU}, 1)
BL32_SOURCES += lib/extensions/amu/aarch32/amu.c
endif
ifeq (${WORKAROUND_CVE_2017_5715},1)
BL32_SOURCES += bl32/sp_min/workaround_cve_2017_5715_bpiall.S \
bl32/sp_min/workaround_cve_2017_5715_icache_inv.S
endif
BL32_LINKERFILE := bl32/sp_min/sp_min.ld.S
# Include the platform-specific SP_MIN Makefile

View File

@ -0,0 +1,74 @@
/*
* Copyright (c) 2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
#include <asm_macros.S>
.globl workaround_bpiall_runtime_exceptions
vector_base workaround_bpiall_runtime_exceptions
/* We encode the exception entry in the bottom 3 bits of SP */
add sp, sp, #1 /* Reset: 0b111 */
add sp, sp, #1 /* Undef: 0b110 */
add sp, sp, #1 /* Syscall: 0b101 */
add sp, sp, #1 /* Prefetch abort: 0b100 */
add sp, sp, #1 /* Data abort: 0b011 */
add sp, sp, #1 /* Reserved: 0b010 */
add sp, sp, #1 /* IRQ: 0b001 */
nop /* FIQ: 0b000 */
/*
* Invalidate the branch predictor, `r0` is a dummy register
* and is unused.
*/
stcopr r0, BPIALL
isb
/*
* As we cannot use any temporary registers and cannot
* clobber SP, we can decode the exception entry using
* an unrolled binary search.
*
* Note, if this code is re-used by other secure payloads,
* the below exception entry vectors must be changed to
* the vectors specific to that secure payload.
*/
tst sp, #4
bne 1f
tst sp, #2
bne 3f
/* Expected encoding: 0x1 and 0x0 */
tst sp, #1
/* Restore original value of SP by clearing the bottom 3 bits */
bic sp, sp, #0x7
bne plat_panic_handler /* IRQ */
b sp_min_handle_fiq /* FIQ */
1:
tst sp, #2
bne 2f
/* Expected encoding: 0x4 and 0x5 */
tst sp, #1
bic sp, sp, #0x7
bne sp_min_handle_smc /* Syscall */
b plat_panic_handler /* Prefetch abort */
2:
/* Expected encoding: 0x7 and 0x6 */
tst sp, #1
bic sp, sp, #0x7
bne sp_min_entrypoint /* Reset */
b plat_panic_handler /* Undef */
3:
/* Expected encoding: 0x2 and 0x3 */
tst sp, #1
bic sp, sp, #0x7
bne plat_panic_handler /* Data abort */
b plat_panic_handler /* Reserved */

View File

@ -0,0 +1,75 @@
/*
* Copyright (c) 2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
#include <asm_macros.S>
.globl workaround_icache_inv_runtime_exceptions
vector_base workaround_icache_inv_runtime_exceptions
/* We encode the exception entry in the bottom 3 bits of SP */
add sp, sp, #1 /* Reset: 0b111 */
add sp, sp, #1 /* Undef: 0b110 */
add sp, sp, #1 /* Syscall: 0b101 */
add sp, sp, #1 /* Prefetch abort: 0b100 */
add sp, sp, #1 /* Data abort: 0b011 */
add sp, sp, #1 /* Reserved: 0b010 */
add sp, sp, #1 /* IRQ: 0b001 */
nop /* FIQ: 0b000 */
/*
* Invalidate the instruction cache, which we assume also
* invalidates the branch predictor. This may depend on
* other CPU specific changes (e.g. an ACTLR setting).
*/
stcopr r0, ICIALLU
isb
/*
* As we cannot use any temporary registers and cannot
* clobber SP, we can decode the exception entry using
* an unrolled binary search.
*
* Note, if this code is re-used by other secure payloads,
* the below exception entry vectors must be changed to
* the vectors specific to that secure payload.
*/
tst sp, #4
bne 1f
tst sp, #2
bne 3f
/* Expected encoding: 0x1 and 0x0 */
tst sp, #1
/* Restore original value of SP by clearing the bottom 3 bits */
bic sp, sp, #0x7
bne plat_panic_handler /* IRQ */
b sp_min_handle_fiq /* FIQ */
1:
/* Expected encoding: 0x4 and 0x5 */
tst sp, #2
bne 2f
tst sp, #1
bic sp, sp, #0x7
bne sp_min_handle_smc /* Syscall */
b plat_panic_handler /* Prefetch abort */
2:
/* Expected encoding: 0x7 and 0x6 */
tst sp, #1
bic sp, sp, #0x7
bne sp_min_entrypoint /* Reset */
b plat_panic_handler /* Undef */
3:
/* Expected encoding: 0x2 and 0x3 */
tst sp, #1
bic sp, sp, #0x7
bne plat_panic_handler /* Data abort */
b plat_panic_handler /* Reserved */

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -14,7 +14,7 @@
/*
* Helper macro to initialise EL3 registers we care about.
*/
.macro el3_arch_init_common _exception_vectors
.macro el3_arch_init_common
/* ---------------------------------------------------------------------
* SCTLR has already been initialised - read current value before
* modifying.
@ -33,15 +33,6 @@
stcopr r0, SCTLR
isb
/* ---------------------------------------------------------------------
* Set the exception vectors (VBAR/MVBAR).
* ---------------------------------------------------------------------
*/
ldr r0, =\_exception_vectors
stcopr r0, VBAR
stcopr r0, MVBAR
isb
/* ---------------------------------------------------------------------
* Initialise SCR, setting all fields rather than relying on the hw.
*
@ -210,6 +201,15 @@
bxne r0
.endif /* _warm_boot_mailbox */
/* ---------------------------------------------------------------------
* Set the exception vectors (VBAR/MVBAR).
* ---------------------------------------------------------------------
*/
ldr r0, =\_exception_vectors
stcopr r0, VBAR
stcopr r0, MVBAR
isb
/* ---------------------------------------------------------------------
* It is a cold boot.
* Perform any processor specific actions upon reset e.g. cache, TLB
@ -218,7 +218,7 @@
*/
bl reset_handler
el3_arch_init_common \_exception_vectors
el3_arch_init_common
.if \_secondary_cold_boot
/* -------------------------------------------------------------

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -426,6 +426,8 @@
#define TLBIMVAA p15, 0, c8, c7, 3
#define TLBIMVAAIS p15, 0, c8, c3, 3
#define BPIALLIS p15, 0, c7, c1, 6
#define BPIALL p15, 0, c7, c5, 6
#define ICIALLU p15, 0, c7, c5, 0
#define HSCTLR p15, 4, c1, c0, 0
#define HCR p15, 4, c1, c1, 0
#define HCPTR p15, 4, c1, c1, 2

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -22,7 +22,7 @@
#define SMC_CTX_LR_MON 0x80
#define SMC_CTX_SCR 0x84
#define SMC_CTX_PMCR 0x88
#define SMC_CTX_SIZE 0x8C
#define SMC_CTX_SIZE 0x90
#ifndef __ASSEMBLY__
#include <cassert.h>
@ -75,7 +75,13 @@ typedef struct smc_ctx {
u_register_t lr_mon;
u_register_t scr;
u_register_t pmcr;
} smc_ctx_t;
/*
* The workaround for CVE-2017-5715 requires storing information in
* the bottom 3 bits of the stack pointer. Add a padding field to
* force the size of the struct to be a multiple of 8.
*/
u_register_t pad;
} smc_ctx_t __aligned(8);
/*
* Compile time assertions related to the 'smc_context' structure to
@ -99,6 +105,7 @@ CASSERT(SMC_CTX_LR_MON == __builtin_offsetof(smc_ctx_t, lr_mon), \
CASSERT(SMC_CTX_SPSR_MON == __builtin_offsetof(smc_ctx_t, spsr_mon), \
assert_smc_ctx_spsr_mon_offset_mismatch);
CASSERT((sizeof(smc_ctx_t) & 0x7) == 0, assert_smc_ctx_not_aligned);
CASSERT(SMC_CTX_SIZE == sizeof(smc_ctx_t), assert_smc_ctx_size_mismatch);
/* Convenience macros to return from SMC handler */

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2017, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -15,6 +15,7 @@
/*******************************************************************************
* CPU Auxiliary Control register specific definitions.
******************************************************************************/
#define CORTEX_A15_ACTLR_INV_BTB_BIT (1 << 0)
#define CORTEX_A15_ACTLR_SMP_BIT (1 << 6)
#endif /* __CORTEX_A15_H__ */

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -41,7 +41,46 @@ func cortex_a15_enable_smp
bx lr
endfunc cortex_a15_enable_smp
func check_errata_cve_2017_5715
#if WORKAROUND_CVE_2017_5715
mov r0, #ERRATA_APPLIES
#else
mov r0, #ERRATA_MISSING
#endif
bx lr
endfunc check_errata_cve_2017_5715
#if REPORT_ERRATA
/*
* Errata printing function for Cortex A15. Must follow AAPCS.
*/
func cortex_a15_errata_report
push {r12, lr}
bl cpu_get_rev_var
mov r4, r0
/*
* Report all errata. The revision-variant information is passed to
* checking functions of each errata.
*/
report_errata WORKAROUND_CVE_2017_5715, cortex_a15, cve_2017_5715
pop {r12, lr}
bx lr
endfunc cortex_a15_errata_report
#endif
func cortex_a15_reset_func
#if IMAGE_BL32 && WORKAROUND_CVE_2017_5715
ldcopr r0, ACTLR
orr r0, #CORTEX_A15_ACTLR_INV_BTB_BIT
stcopr r0, ACTLR
ldr r0, =workaround_icache_inv_runtime_exceptions
stcopr r0, VBAR
stcopr r0, MVBAR
/* isb will be applied in the course of the reset func */
#endif
b cortex_a15_enable_smp
endfunc cortex_a15_reset_func

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2017, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -35,7 +35,43 @@ func cortex_a17_enable_smp
bx lr
endfunc cortex_a17_enable_smp
func check_errata_cve_2017_5715
#if WORKAROUND_CVE_2017_5715
mov r0, #ERRATA_APPLIES
#else
mov r0, #ERRATA_MISSING
#endif
bx lr
endfunc check_errata_cve_2017_5715
#if REPORT_ERRATA
/*
* Errata printing function for Cortex A17. Must follow AAPCS.
*/
func cortex_a17_errata_report
push {r12, lr}
bl cpu_get_rev_var
mov r4, r0
/*
* Report all errata. The revision-variant information is passed to
* checking functions of each errata.
*/
report_errata WORKAROUND_CVE_2017_5715, cortex_a17, cve_2017_5715
pop {r12, lr}
bx lr
endfunc cortex_a17_errata_report
#endif
func cortex_a17_reset_func
#if IMAGE_BL32 && WORKAROUND_CVE_2017_5715
ldr r0, =workaround_bpiall_runtime_exceptions
stcopr r0, VBAR
stcopr r0, MVBAR
/* isb will be applied in the course of the reset func */
#endif
b cortex_a17_enable_smp
endfunc cortex_a17_reset_func

View File

@ -332,6 +332,11 @@ func check_errata_859972
b cpu_rev_var_ls
endfunc check_errata_859972
func check_errata_cve_2017_5715
mov r0, #ERRATA_MISSING
bx lr
endfunc check_errata_cve_2017_5715
/* -------------------------------------------------
* The CPU Ops reset function for Cortex-A57.
* Shall clobber: r0-r6
@ -519,6 +524,7 @@ func cortex_a57_errata_report
report_errata ERRATA_A57_829520, cortex_a57, 829520
report_errata ERRATA_A57_833471, cortex_a57, 833471
report_errata ERRATA_A57_859972, cortex_a57, 859972
report_errata WORKAROUND_CVE_2017_5715, cortex_a57, cve_2017_5715
pop {r12, lr}
bx lr

View File

@ -87,6 +87,10 @@ func check_errata_859971
b cpu_rev_var_ls
endfunc check_errata_859971
func check_errata_cve_2017_5715
mov r0, #ERRATA_MISSING
bx lr
endfunc check_errata_cve_2017_5715
/* -------------------------------------------------
* The CPU Ops reset function for Cortex-A72.
@ -236,6 +240,7 @@ func cortex_a72_errata_report
* checking functions of each errata.
*/
report_errata ERRATA_A72_859971, cortex_a72, 859971
report_errata WORKAROUND_CVE_2017_5715, cortex_a72, cve_2017_5715
pop {r12, lr}
bx lr

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -35,7 +35,43 @@ func cortex_a9_enable_smp
bx lr
endfunc cortex_a9_enable_smp
func check_errata_cve_2017_5715
#if WORKAROUND_CVE_2017_5715
mov r0, #ERRATA_APPLIES
#else
mov r0, #ERRATA_MISSING
#endif
bx lr
endfunc check_errata_cve_2017_5715
#if REPORT_ERRATA
/*
* Errata printing function for Cortex A9. Must follow AAPCS.
*/
func cortex_a9_errata_report
push {r12, lr}
bl cpu_get_rev_var
mov r4, r0
/*
* Report all errata. The revision-variant information is passed to
* checking functions of each errata.
*/
report_errata WORKAROUND_CVE_2017_5715, cortex_a9, cve_2017_5715
pop {r12, lr}
bx lr
endfunc cortex_a9_errata_report
#endif
func cortex_a9_reset_func
#if IMAGE_BL32 && WORKAROUND_CVE_2017_5715
ldr r0, =workaround_bpiall_runtime_exceptions
stcopr r0, VBAR
stcopr r0, MVBAR
/* isb will be applied in the course of the reset func */
#endif
b cortex_a9_enable_smp
endfunc cortex_a9_reset_func

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2014-2017, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2014-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -328,6 +328,15 @@ func check_errata_859972
b cpu_rev_var_ls
endfunc check_errata_859972
func check_errata_cve_2017_5715
#if WORKAROUND_CVE_2017_5715
mov x0, #ERRATA_APPLIES
#else
mov x0, #ERRATA_MISSING
#endif
ret
endfunc check_errata_cve_2017_5715
/* -------------------------------------------------
* The CPU Ops reset function for Cortex-A57.
* Shall clobber: x0-x19
@ -518,7 +527,7 @@ func cortex_a57_errata_report
report_errata ERRATA_A57_829520, cortex_a57, 829520
report_errata ERRATA_A57_833471, cortex_a57, 833471
report_errata ERRATA_A57_859972, cortex_a57, 859972
report_errata WORKAROUND_CVE_2017_5715, cortex_a57, cve_2017_5715
ldp x8, x30, [sp], #16
ret

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2015-2017, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2015-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -97,6 +97,15 @@ func check_errata_859971
b cpu_rev_var_ls
endfunc check_errata_859971
func check_errata_cve_2017_5715
#if WORKAROUND_CVE_2017_5715
mov x0, #ERRATA_APPLIES
#else
mov x0, #ERRATA_MISSING
#endif
ret
endfunc check_errata_cve_2017_5715
/* -------------------------------------------------
* The CPU Ops reset function for Cortex-A72.
* -------------------------------------------------
@ -249,6 +258,7 @@ func cortex_a72_errata_report
* checking functions of each errata.
*/
report_errata ERRATA_A72_859971, cortex_a72, 859971
report_errata WORKAROUND_CVE_2017_5715, cortex_a72, cve_2017_5715
ldp x8, x30, [sp], #16
ret

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -114,6 +114,36 @@ func cortex_a73_cluster_pwr_dwn
b cortex_a73_disable_smp
endfunc cortex_a73_cluster_pwr_dwn
func check_errata_cve_2017_5715
#if WORKAROUND_CVE_2017_5715
mov x0, #ERRATA_APPLIES
#else
mov x0, #ERRATA_MISSING
#endif
ret
endfunc check_errata_cve_2017_5715
#if REPORT_ERRATA
/*
* Errata printing function for Cortex A75. Must follow AAPCS.
*/
func cortex_a73_errata_report
stp x8, x30, [sp, #-16]!
bl cpu_get_rev_var
mov x8, x0
/*
* Report all errata. The revision-variant information is passed to
* checking functions of each errata.
*/
report_errata WORKAROUND_CVE_2017_5715, cortex_a73, cve_2017_5715
ldp x8, x30, [sp], #16
ret
endfunc cortex_a73_errata_report
#endif
/* ---------------------------------------------
* This function provides cortex_a73 specific
* register information for crash reporting.

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2017, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -151,6 +151,27 @@ func cortex_a75_reset_func
ret
endfunc cortex_a75_reset_func
func check_errata_cve_2017_5715
mrs x0, id_aa64pfr0_el1
ubfx x0, x0, #ID_AA64PFR0_CSV2_SHIFT, #ID_AA64PFR0_CSV2_LENGTH
/*
* If the field equals to 1 then branch targets trained in one
* context cannot affect speculative execution in a different context.
*/
cmp x0, #1
beq 1f
#if WORKAROUND_CVE_2017_5715
mov x0, #ERRATA_APPLIES
#else
mov x0, #ERRATA_MISSING
#endif
ret
1:
mov x0, #ERRATA_NOT_APPLIES
ret
endfunc check_errata_cve_2017_5715
/* ---------------------------------------------
* HW will do the cache maintenance while powering down
* ---------------------------------------------
@ -167,6 +188,27 @@ func cortex_a75_core_pwr_dwn
ret
endfunc cortex_a75_core_pwr_dwn
#if REPORT_ERRATA
/*
* Errata printing function for Cortex A75. Must follow AAPCS.
*/
func cortex_a75_errata_report
stp x8, x30, [sp, #-16]!
bl cpu_get_rev_var
mov x8, x0
/*
* Report all errata. The revision-variant information is passed to
* checking functions of each errata.
*/
report_errata WORKAROUND_CVE_2017_5715, cortex_a75, cve_2017_5715
ldp x8, x30, [sp], #16
ret
endfunc cortex_a75_errata_report
#endif
/* ---------------------------------------------
* This function provides cortex_a75 specific
* register information for crash reporting.

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2017, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -27,7 +27,7 @@
#endif
/* Errata format: BL stage, CPU, errata ID, message */
#define ERRATA_FORMAT "%s: %s: errata workaround for %s was %s\n"
#define ERRATA_FORMAT "%s: %s: CPU workaround for %s was %s\n"
/*
* Returns whether errata needs to be reported. Passed arguments are private to