Merge pull request #1392 from dp-arm/dp/cve_2018_3639

Implement workaround for CVE-2018-3639 on Cortex A57/A72/A73 and A75
This commit is contained in:
Dimitris Papastamos 2018-05-29 09:28:05 +01:00 committed by GitHub
commit d003b19093
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
29 changed files with 424 additions and 165 deletions

View File

@ -61,8 +61,8 @@ BL31_SOURCES += lib/extensions/sve/sve.c
endif endif
ifeq (${WORKAROUND_CVE_2017_5715},1) ifeq (${WORKAROUND_CVE_2017_5715},1)
BL31_SOURCES += lib/cpus/aarch64/workaround_cve_2017_5715_bpiall.S \ BL31_SOURCES += lib/cpus/aarch64/wa_cve_2017_5715_bpiall.S \
lib/cpus/aarch64/workaround_cve_2017_5715_mmu.S lib/cpus/aarch64/wa_cve_2017_5715_mmu.S
endif endif
BL31_LINKERFILE := bl31/bl31.ld.S BL31_LINKERFILE := bl31/bl31.ld.S

View File

@ -29,8 +29,8 @@ BL32_SOURCES += lib/extensions/amu/aarch32/amu.c\
endif endif
ifeq (${WORKAROUND_CVE_2017_5715},1) ifeq (${WORKAROUND_CVE_2017_5715},1)
BL32_SOURCES += bl32/sp_min/workaround_cve_2017_5715_bpiall.S \ BL32_SOURCES += bl32/sp_min/wa_cve_2017_5715_bpiall.S \
bl32/sp_min/workaround_cve_2017_5715_icache_inv.S bl32/sp_min/wa_cve_2017_5715_icache_inv.S
endif endif
BL32_LINKERFILE := bl32/sp_min/sp_min.ld.S BL32_LINKERFILE := bl32/sp_min/sp_min.ld.S

View File

@ -6,9 +6,9 @@
#include <asm_macros.S> #include <asm_macros.S>
.globl workaround_bpiall_runtime_exceptions .globl wa_cve_2017_5715_bpiall_vbar
vector_base workaround_bpiall_runtime_exceptions vector_base wa_cve_2017_5715_bpiall_vbar
/* We encode the exception entry in the bottom 3 bits of SP */ /* We encode the exception entry in the bottom 3 bits of SP */
add sp, sp, #1 /* Reset: 0b111 */ add sp, sp, #1 /* Reset: 0b111 */
add sp, sp, #1 /* Undef: 0b110 */ add sp, sp, #1 /* Undef: 0b110 */

View File

@ -6,9 +6,9 @@
#include <asm_macros.S> #include <asm_macros.S>
.globl workaround_icache_inv_runtime_exceptions .globl wa_cve_2017_5715_icache_inv_vbar
vector_base workaround_icache_inv_runtime_exceptions vector_base wa_cve_2017_5715_icache_inv_vbar
/* We encode the exception entry in the bottom 3 bits of SP */ /* We encode the exception entry in the bottom 3 bits of SP */
add sp, sp, #1 /* Reset: 0b111 */ add sp, sp, #1 /* Reset: 0b111 */
add sp, sp, #1 /* Undef: 0b110 */ add sp, sp, #1 /* Undef: 0b110 */

View File

@ -24,6 +24,17 @@ vulnerability workarounds should be applied at runtime.
with the recommendation in the spec regarding workaround discovery. with the recommendation in the spec regarding workaround discovery.
Defaults to 1. Defaults to 1.
- ``WORKAROUND_CVE_2018_3639``: Enables the security workaround for
`CVE-2018-3639`_. Defaults to 1. The TF-A project recommends to keep
the default value of 1 even on platforms that are unaffected by
CVE-2018-3639, in order to comply with the recommendation in the spec
regarding workaround discovery.
- ``DYNAMIC_WORKAROUND_CVE_2018_3639``: Enables dynamic mitigation for
`CVE-2018-3639`_. This build option should be set to 1 if the target
platform contains at least 1 CPU that requires dynamic mitigation.
Defaults to 0.
CPU Errata Workarounds CPU Errata Workarounds
---------------------- ----------------------

View File

@ -44,6 +44,7 @@
#define CORTEX_A57_CPUACTLR p15, 0, c15 #define CORTEX_A57_CPUACTLR p15, 0, c15
#define CORTEX_A57_CPUACTLR_DIS_LOAD_PASS_DMB (ULL(1) << 59) #define CORTEX_A57_CPUACTLR_DIS_LOAD_PASS_DMB (ULL(1) << 59)
#define CORTEX_A57_CPUACTLR_DIS_LOAD_PASS_STORE (ULL(1) << 55)
#define CORTEX_A57_CPUACTLR_GRE_NGRE_AS_NGNRE (ULL(1) << 54) #define CORTEX_A57_CPUACTLR_GRE_NGRE_AS_NGNRE (ULL(1) << 54)
#define CORTEX_A57_CPUACTLR_DIS_OVERREAD (ULL(1) << 52) #define CORTEX_A57_CPUACTLR_DIS_OVERREAD (ULL(1) << 52)
#define CORTEX_A57_CPUACTLR_NO_ALLOC_WBWA (ULL(1) << 49) #define CORTEX_A57_CPUACTLR_NO_ALLOC_WBWA (ULL(1) << 49)

View File

@ -32,6 +32,7 @@
#define CORTEX_A72_CPUACTLR p15, 0, c15 #define CORTEX_A72_CPUACTLR p15, 0, c15
#define CORTEX_A72_CPUACTLR_DISABLE_L1_DCACHE_HW_PFTCH (ULL(1) << 56) #define CORTEX_A72_CPUACTLR_DISABLE_L1_DCACHE_HW_PFTCH (ULL(1) << 56)
#define CORTEX_A72_CPUACTLR_DIS_LOAD_PASS_STORE (ULL(1) << 55)
#define CORTEX_A72_CPUACTLR_NO_ALLOC_WBWA (ULL(1) << 49) #define CORTEX_A72_CPUACTLR_NO_ALLOC_WBWA (ULL(1) << 49)
#define CORTEX_A72_CPUACTLR_DCC_AS_DCCI (ULL(1) << 44) #define CORTEX_A72_CPUACTLR_DCC_AS_DCCI (ULL(1) << 44)
#define CORTEX_A72_CPUACTLR_DIS_INSTR_PREFETCH (ULL(1) << 32) #define CORTEX_A72_CPUACTLR_DIS_INSTR_PREFETCH (ULL(1) << 32)

View File

@ -44,6 +44,7 @@
#define CORTEX_A57_CPUACTLR_EL1 S3_1_C15_C2_0 #define CORTEX_A57_CPUACTLR_EL1 S3_1_C15_C2_0
#define CORTEX_A57_CPUACTLR_EL1_DIS_LOAD_PASS_DMB (ULL(1) << 59) #define CORTEX_A57_CPUACTLR_EL1_DIS_LOAD_PASS_DMB (ULL(1) << 59)
#define CORTEX_A57_CPUACTLR_EL1_DIS_LOAD_PASS_STORE (ULL(1) << 55)
#define CORTEX_A57_CPUACTLR_EL1_GRE_NGRE_AS_NGNRE (ULL(1) << 54) #define CORTEX_A57_CPUACTLR_EL1_GRE_NGRE_AS_NGNRE (ULL(1) << 54)
#define CORTEX_A57_CPUACTLR_EL1_DIS_OVERREAD (ULL(1) << 52) #define CORTEX_A57_CPUACTLR_EL1_DIS_OVERREAD (ULL(1) << 52)
#define CORTEX_A57_CPUACTLR_EL1_NO_ALLOC_WBWA (ULL(1) << 49) #define CORTEX_A57_CPUACTLR_EL1_NO_ALLOC_WBWA (ULL(1) << 49)

View File

@ -32,6 +32,7 @@
#define CORTEX_A72_CPUACTLR_EL1 S3_1_C15_C2_0 #define CORTEX_A72_CPUACTLR_EL1 S3_1_C15_C2_0
#define CORTEX_A72_CPUACTLR_EL1_DISABLE_L1_DCACHE_HW_PFTCH (ULL(1) << 56) #define CORTEX_A72_CPUACTLR_EL1_DISABLE_L1_DCACHE_HW_PFTCH (ULL(1) << 56)
#define CORTEX_A72_CPUACTLR_EL1_DIS_LOAD_PASS_STORE (ULL(1) << 55)
#define CORTEX_A72_CPUACTLR_EL1_NO_ALLOC_WBWA (ULL(1) << 49) #define CORTEX_A72_CPUACTLR_EL1_NO_ALLOC_WBWA (ULL(1) << 49)
#define CORTEX_A72_CPUACTLR_EL1_DCC_AS_DCCI (ULL(1) << 44) #define CORTEX_A72_CPUACTLR_EL1_DCC_AS_DCCI (ULL(1) << 44)
#define CORTEX_A72_CPUACTLR_EL1_DIS_INSTR_PREFETCH (ULL(1) << 32) #define CORTEX_A72_CPUACTLR_EL1_DIS_INSTR_PREFETCH (ULL(1) << 32)

View File

@ -22,4 +22,11 @@
******************************************************************************/ ******************************************************************************/
#define CORTEX_A73_L2MERRSR_EL1 S3_1_C15_C2_3 /* Instruction def. */ #define CORTEX_A73_L2MERRSR_EL1 S3_1_C15_C2_3 /* Instruction def. */
/*******************************************************************************
* CPU implementation defined register specific definitions.
******************************************************************************/
#define CORTEX_A73_IMP_DEF_REG1 S3_0_C15_C0_0
#define CORTEX_A73_IMP_DEF_REG1_DISABLE_LOAD_PASS_STORE (1 << 3)
#endif /* __CORTEX_A73_H__ */ #endif /* __CORTEX_A73_H__ */

View File

@ -16,6 +16,13 @@
#define CORTEX_A75_CPUPWRCTLR_EL1 S3_0_C15_C2_7 #define CORTEX_A75_CPUPWRCTLR_EL1 S3_0_C15_C2_7
#define CORTEX_A75_CPUECTLR_EL1 S3_0_C15_C1_4 #define CORTEX_A75_CPUECTLR_EL1 S3_0_C15_C1_4
/*******************************************************************************
* CPU Auxiliary Control register specific definitions.
******************************************************************************/
#define CORTEX_A75_CPUACTLR_EL1 S3_0_C15_C1_0
#define CORTEX_A75_CPUACTLR_EL1_DISABLE_LOAD_PASS_STORE (1 << 35)
/* Definitions of register field mask in CORTEX_A75_CPUPWRCTLR_EL1 */ /* Definitions of register field mask in CORTEX_A75_CPUPWRCTLR_EL1 */
#define CORTEX_A75_CORE_PWRDN_EN_MASK 0x1 #define CORTEX_A75_CORE_PWRDN_EN_MASK 0x1

View File

@ -18,6 +18,9 @@
/* Special constant to specify that CPU has no reset function */ /* Special constant to specify that CPU has no reset function */
#define CPU_NO_RESET_FUNC 0 #define CPU_NO_RESET_FUNC 0
#define CPU_NO_EXTRA1_FUNC 0
#define CPU_NO_EXTRA2_FUNC 0
/* Word size for 64-bit CPUs */ /* Word size for 64-bit CPUs */
#define CPU_WORD_SIZE 8 #define CPU_WORD_SIZE 8
@ -48,6 +51,8 @@ CPU_RESET_FUNC: /* cpu_ops reset_func */
#endif #endif
CPU_EXTRA1_FUNC: CPU_EXTRA1_FUNC:
.space 8 .space 8
CPU_EXTRA2_FUNC:
.space 8
#ifdef IMAGE_BL31 /* The power down core and cluster is needed only in BL31 */ #ifdef IMAGE_BL31 /* The power down core and cluster is needed only in BL31 */
CPU_PWR_DWN_OPS: /* cpu_ops power down functions */ CPU_PWR_DWN_OPS: /* cpu_ops power down functions */
.space (8 * CPU_MAX_PWR_DWN_OPS) .space (8 * CPU_MAX_PWR_DWN_OPS)
@ -119,6 +124,10 @@ CPU_OPS_SIZE = .
* This is a placeholder for future per CPU operations. Currently, * This is a placeholder for future per CPU operations. Currently,
* some CPUs use this entry to set a test function to determine if * some CPUs use this entry to set a test function to determine if
* the workaround for CVE-2017-5715 needs to be applied or not. * the workaround for CVE-2017-5715 needs to be applied or not.
* _extra2:
* This is a placeholder for future per CPU operations. Currently
* some CPUs use this entry to set a function to disable the
* workaround for CVE-2018-3639.
* _power_down_ops: * _power_down_ops:
* Comma-separated list of functions to perform power-down * Comma-separated list of functions to perform power-down
* operatios on the CPU. At least one, and up to * operatios on the CPU. At least one, and up to
@ -129,7 +138,7 @@ CPU_OPS_SIZE = .
* used to handle power down at subsequent levels * used to handle power down at subsequent levels
*/ */
.macro declare_cpu_ops_base _name:req, _midr:req, _resetfunc:req, \ .macro declare_cpu_ops_base _name:req, _midr:req, _resetfunc:req, \
_extra1:req, _power_down_ops:vararg _extra1:req, _extra2:req, _power_down_ops:vararg
.section cpu_ops, "a" .section cpu_ops, "a"
.align 3 .align 3
.type cpu_ops_\_name, %object .type cpu_ops_\_name, %object
@ -138,6 +147,7 @@ CPU_OPS_SIZE = .
.quad \_resetfunc .quad \_resetfunc
#endif #endif
.quad \_extra1 .quad \_extra1
.quad \_extra2
#ifdef IMAGE_BL31 #ifdef IMAGE_BL31
1: 1:
/* Insert list of functions */ /* Insert list of functions */
@ -196,14 +206,15 @@ CPU_OPS_SIZE = .
.macro declare_cpu_ops _name:req, _midr:req, _resetfunc:req, \ .macro declare_cpu_ops _name:req, _midr:req, _resetfunc:req, \
_power_down_ops:vararg _power_down_ops:vararg
declare_cpu_ops_base \_name, \_midr, \_resetfunc, 0, \ declare_cpu_ops_base \_name, \_midr, \_resetfunc, 0, 0, \
\_power_down_ops \_power_down_ops
.endm .endm
.macro declare_cpu_ops_workaround_cve_2017_5715 _name:req, _midr:req, \ .macro declare_cpu_ops_wa _name:req, _midr:req, \
_resetfunc:req, _extra1:req, _power_down_ops:vararg _resetfunc:req, _extra1:req, _extra2:req, \
_power_down_ops:vararg
declare_cpu_ops_base \_name, \_midr, \_resetfunc, \ declare_cpu_ops_base \_name, \_midr, \_resetfunc, \
\_extra1, \_power_down_ops \_extra1, \_extra2, \_power_down_ops
.endm .endm
#if REPORT_ERRATA #if REPORT_ERRATA

View File

@ -0,0 +1,12 @@
/*
* Copyright (c) 2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
#ifndef __WA_CVE_2017_5715_H__
#define __WA_CVE_2017_5715_H__
int check_wa_cve_2017_5715(void);
#endif /* __WA_CVE_2017_5715_H__ */

View File

@ -0,0 +1,12 @@
/*
* Copyright (c) 2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
#ifndef __WA_CVE_2018_3639_H__
#define __WA_CVE_2018_3639_H__
void *wa_cve_2018_3639_get_disable_ptr(void);
#endif /* __WA_CVE_2018_3639_H__ */

View File

@ -1,12 +0,0 @@
/*
* Copyright (c) 2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
#ifndef __WORKAROUND_CVE_2017_5715_H__
#define __WORKAROUND_CVE_2017_5715_H__
int check_workaround_cve_2017_5715(void);
#endif /* __WORKAROUND_CVE_2017_5715_H__ */

View File

@ -128,8 +128,8 @@
* Constants that allow assembler code to access members of and the 'fp_regs' * Constants that allow assembler code to access members of and the 'fp_regs'
* structure at their correct offsets. * structure at their correct offsets.
******************************************************************************/ ******************************************************************************/
#if CTX_INCLUDE_FPREGS
#define CTX_FPREGS_OFFSET (CTX_SYSREGS_OFFSET + CTX_SYSREGS_END) #define CTX_FPREGS_OFFSET (CTX_SYSREGS_OFFSET + CTX_SYSREGS_END)
#if CTX_INCLUDE_FPREGS
#define CTX_FP_Q0 U(0x0) #define CTX_FP_Q0 U(0x0)
#define CTX_FP_Q1 U(0x10) #define CTX_FP_Q1 U(0x10)
#define CTX_FP_Q2 U(0x20) #define CTX_FP_Q2 U(0x20)
@ -170,8 +170,14 @@
#else #else
#define CTX_FPREGS_END U(0x210) /* Align to the next 16 byte boundary */ #define CTX_FPREGS_END U(0x210) /* Align to the next 16 byte boundary */
#endif #endif
#else
#define CTX_FPREGS_END U(0)
#endif #endif
#define CTX_CVE_2018_3639_OFFSET (CTX_FPREGS_OFFSET + CTX_FPREGS_END)
#define CTX_CVE_2018_3639_DISABLE U(0)
#define CTX_CVE_2018_3639_END U(0x10) /* Align to the next 16 byte boundary */
#ifndef __ASSEMBLY__ #ifndef __ASSEMBLY__
#include <cassert.h> #include <cassert.h>
@ -195,6 +201,7 @@
#define CTX_FPREG_ALL (CTX_FPREGS_END >> DWORD_SHIFT) #define CTX_FPREG_ALL (CTX_FPREGS_END >> DWORD_SHIFT)
#endif #endif
#define CTX_EL3STATE_ALL (CTX_EL3STATE_END >> DWORD_SHIFT) #define CTX_EL3STATE_ALL (CTX_EL3STATE_END >> DWORD_SHIFT)
#define CTX_CVE_2018_3639_ALL (CTX_CVE_2018_3639_END >> DWORD_SHIFT)
/* /*
* AArch64 general purpose register context structure. Usually x0-x18, * AArch64 general purpose register context structure. Usually x0-x18,
@ -227,6 +234,9 @@ DEFINE_REG_STRUCT(fp_regs, CTX_FPREG_ALL);
*/ */
DEFINE_REG_STRUCT(el3_state, CTX_EL3STATE_ALL); DEFINE_REG_STRUCT(el3_state, CTX_EL3STATE_ALL);
/* Function pointer used by CVE-2018-3639 dynamic mitigation */
DEFINE_REG_STRUCT(cve_2018_3639, CTX_CVE_2018_3639_ALL);
/* /*
* Macros to access members of any of the above structures using their * Macros to access members of any of the above structures using their
* offsets * offsets
@ -251,6 +261,7 @@ typedef struct cpu_context {
#if CTX_INCLUDE_FPREGS #if CTX_INCLUDE_FPREGS
fp_regs_t fpregs_ctx; fp_regs_t fpregs_ctx;
#endif #endif
cve_2018_3639_t cve_2018_3639_ctx;
} cpu_context_t; } cpu_context_t;
/* Macros to access members of the 'cpu_context_t' structure */ /* Macros to access members of the 'cpu_context_t' structure */
@ -276,6 +287,8 @@ CASSERT(CTX_FPREGS_OFFSET == __builtin_offsetof(cpu_context_t, fpregs_ctx), \
#endif #endif
CASSERT(CTX_EL3STATE_OFFSET == __builtin_offsetof(cpu_context_t, el3state_ctx), \ CASSERT(CTX_EL3STATE_OFFSET == __builtin_offsetof(cpu_context_t, el3state_ctx), \
assert_core_context_el3state_offset_mismatch); assert_core_context_el3state_offset_mismatch);
CASSERT(CTX_CVE_2018_3639_OFFSET == __builtin_offsetof(cpu_context_t, cve_2018_3639_ctx), \
assert_core_context_cve_2018_3639_offset_mismatch);
/* /*
* Helper macro to set the general purpose registers that correspond to * Helper macro to set the general purpose registers that correspond to

View File

@ -10,5 +10,8 @@
#define SMCCC_VERSION U(0x80000000) #define SMCCC_VERSION U(0x80000000)
#define SMCCC_ARCH_FEATURES U(0x80000001) #define SMCCC_ARCH_FEATURES U(0x80000001)
#define SMCCC_ARCH_WORKAROUND_1 U(0x80008000) #define SMCCC_ARCH_WORKAROUND_1 U(0x80008000)
#define SMCCC_ARCH_WORKAROUND_2 U(0x80007FFF)
#define SMCCC_ARCH_NOT_REQUIRED -2
#endif /* __ARM_ARCH_SVC_H__ */ #endif /* __ARM_ARCH_SVC_H__ */

View File

@ -337,6 +337,15 @@ func check_errata_cve_2017_5715
bx lr bx lr
endfunc check_errata_cve_2017_5715 endfunc check_errata_cve_2017_5715
func check_errata_cve_2018_3639
#if WORKAROUND_CVE_2018_3639
mov r0, #ERRATA_APPLIES
#else
mov r0, #ERRATA_MISSING
#endif
bx lr
endfunc check_errata_cve_2018_3639
/* ------------------------------------------------- /* -------------------------------------------------
* The CPU Ops reset function for Cortex-A57. * The CPU Ops reset function for Cortex-A57.
* Shall clobber: r0-r6 * Shall clobber: r0-r6
@ -392,6 +401,14 @@ func cortex_a57_reset_func
bl errata_a57_859972_wa bl errata_a57_859972_wa
#endif #endif
#if WORKAROUND_CVE_2018_3639
ldcopr16 r0, r1, CORTEX_A57_CPUACTLR
orr64_imm r0, r1, CORTEX_A57_CPUACTLR_DIS_LOAD_PASS_STORE
stcopr16 r0, r1, CORTEX_A57_CPUACTLR
isb
dsb sy
#endif
/* --------------------------------------------- /* ---------------------------------------------
* Enable the SMP bit. * Enable the SMP bit.
* --------------------------------------------- * ---------------------------------------------
@ -525,6 +542,7 @@ func cortex_a57_errata_report
report_errata ERRATA_A57_833471, cortex_a57, 833471 report_errata ERRATA_A57_833471, cortex_a57, 833471
report_errata ERRATA_A57_859972, cortex_a57, 859972 report_errata ERRATA_A57_859972, cortex_a57, 859972
report_errata WORKAROUND_CVE_2017_5715, cortex_a57, cve_2017_5715 report_errata WORKAROUND_CVE_2017_5715, cortex_a57, cve_2017_5715
report_errata WORKAROUND_CVE_2018_3639, cortex_a57, cve_2018_3639
pop {r12, lr} pop {r12, lr}
bx lr bx lr

View File

@ -92,6 +92,15 @@ func check_errata_cve_2017_5715
bx lr bx lr
endfunc check_errata_cve_2017_5715 endfunc check_errata_cve_2017_5715
func check_errata_cve_2018_3639
#if WORKAROUND_CVE_2018_3639
mov r0, #ERRATA_APPLIES
#else
mov r0, #ERRATA_MISSING
#endif
bx lr
endfunc check_errata_cve_2018_3639
/* ------------------------------------------------- /* -------------------------------------------------
* The CPU Ops reset function for Cortex-A72. * The CPU Ops reset function for Cortex-A72.
* ------------------------------------------------- * -------------------------------------------------
@ -105,6 +114,15 @@ func cortex_a72_reset_func
mov r0, r4 mov r0, r4
bl errata_a72_859971_wa bl errata_a72_859971_wa
#endif #endif
#if WORKAROUND_CVE_2018_3639
ldcopr16 r0, r1, CORTEX_A72_CPUACTLR
orr64_imm r0, r1, CORTEX_A72_CPUACTLR_DIS_LOAD_PASS_STORE
stcopr16 r0, r1, CORTEX_A72_CPUACTLR
isb
dsb sy
#endif
/* --------------------------------------------- /* ---------------------------------------------
* Enable the SMP bit. * Enable the SMP bit.
* --------------------------------------------- * ---------------------------------------------
@ -241,6 +259,7 @@ func cortex_a72_errata_report
*/ */
report_errata ERRATA_A72_859971, cortex_a72, 859971 report_errata ERRATA_A72_859971, cortex_a72, 859971
report_errata WORKAROUND_CVE_2017_5715, cortex_a72, cve_2017_5715 report_errata WORKAROUND_CVE_2017_5715, cortex_a72, cve_2017_5715
report_errata WORKAROUND_CVE_2018_3639, cortex_a72, cve_2018_3639
pop {r12, lr} pop {r12, lr}
bx lr bx lr

View File

@ -337,6 +337,15 @@ func check_errata_cve_2017_5715
ret ret
endfunc check_errata_cve_2017_5715 endfunc check_errata_cve_2017_5715
func check_errata_cve_2018_3639
#if WORKAROUND_CVE_2018_3639
mov x0, #ERRATA_APPLIES
#else
mov x0, #ERRATA_MISSING
#endif
ret
endfunc check_errata_cve_2018_3639
/* ------------------------------------------------- /* -------------------------------------------------
* The CPU Ops reset function for Cortex-A57. * The CPU Ops reset function for Cortex-A57.
* Shall clobber: x0-x19 * Shall clobber: x0-x19
@ -393,10 +402,18 @@ func cortex_a57_reset_func
#endif #endif
#if IMAGE_BL31 && WORKAROUND_CVE_2017_5715 #if IMAGE_BL31 && WORKAROUND_CVE_2017_5715
adr x0, workaround_mmu_runtime_exceptions adr x0, wa_cve_2017_5715_mmu_vbar
msr vbar_el3, x0 msr vbar_el3, x0
#endif #endif
#if WORKAROUND_CVE_2018_3639
mrs x0, CORTEX_A57_CPUACTLR_EL1
orr x0, x0, #CORTEX_A57_CPUACTLR_EL1_DIS_LOAD_PASS_STORE
msr CORTEX_A57_CPUACTLR_EL1, x0
isb
dsb sy
#endif
/* --------------------------------------------- /* ---------------------------------------------
* Enable the SMP bit. * Enable the SMP bit.
* --------------------------------------------- * ---------------------------------------------
@ -528,6 +545,7 @@ func cortex_a57_errata_report
report_errata ERRATA_A57_833471, cortex_a57, 833471 report_errata ERRATA_A57_833471, cortex_a57, 833471
report_errata ERRATA_A57_859972, cortex_a57, 859972 report_errata ERRATA_A57_859972, cortex_a57, 859972
report_errata WORKAROUND_CVE_2017_5715, cortex_a57, cve_2017_5715 report_errata WORKAROUND_CVE_2017_5715, cortex_a57, cve_2017_5715
report_errata WORKAROUND_CVE_2018_3639, cortex_a57, cve_2018_3639
ldp x8, x30, [sp], #16 ldp x8, x30, [sp], #16
ret ret
@ -555,8 +573,9 @@ func cortex_a57_cpu_reg_dump
ret ret
endfunc cortex_a57_cpu_reg_dump endfunc cortex_a57_cpu_reg_dump
declare_cpu_ops_workaround_cve_2017_5715 cortex_a57, CORTEX_A57_MIDR, \ declare_cpu_ops_wa cortex_a57, CORTEX_A57_MIDR, \
cortex_a57_reset_func, \ cortex_a57_reset_func, \
check_errata_cve_2017_5715, \ check_errata_cve_2017_5715, \
CPU_NO_EXTRA2_FUNC, \
cortex_a57_core_pwr_dwn, \ cortex_a57_core_pwr_dwn, \
cortex_a57_cluster_pwr_dwn cortex_a57_cluster_pwr_dwn

View File

@ -110,6 +110,15 @@ func check_errata_cve_2017_5715
ret ret
endfunc check_errata_cve_2017_5715 endfunc check_errata_cve_2017_5715
func check_errata_cve_2018_3639
#if WORKAROUND_CVE_2018_3639
mov x0, #ERRATA_APPLIES
#else
mov x0, #ERRATA_MISSING
#endif
ret
endfunc check_errata_cve_2018_3639
/* ------------------------------------------------- /* -------------------------------------------------
* The CPU Ops reset function for Cortex-A72. * The CPU Ops reset function for Cortex-A72.
* ------------------------------------------------- * -------------------------------------------------
@ -126,11 +135,19 @@ func cortex_a72_reset_func
#if IMAGE_BL31 && WORKAROUND_CVE_2017_5715 #if IMAGE_BL31 && WORKAROUND_CVE_2017_5715
cpu_check_csv2 x0, 1f cpu_check_csv2 x0, 1f
adr x0, workaround_mmu_runtime_exceptions adr x0, wa_cve_2017_5715_mmu_vbar
msr vbar_el3, x0 msr vbar_el3, x0
1: 1:
#endif #endif
#if WORKAROUND_CVE_2018_3639
mrs x0, CORTEX_A72_CPUACTLR_EL1
orr x0, x0, #CORTEX_A72_CPUACTLR_EL1_DIS_LOAD_PASS_STORE
msr CORTEX_A72_CPUACTLR_EL1, x0
isb
dsb sy
#endif
/* --------------------------------------------- /* ---------------------------------------------
* Enable the SMP bit. * Enable the SMP bit.
* --------------------------------------------- * ---------------------------------------------
@ -265,6 +282,7 @@ func cortex_a72_errata_report
*/ */
report_errata ERRATA_A72_859971, cortex_a72, 859971 report_errata ERRATA_A72_859971, cortex_a72, 859971
report_errata WORKAROUND_CVE_2017_5715, cortex_a72, cve_2017_5715 report_errata WORKAROUND_CVE_2017_5715, cortex_a72, cve_2017_5715
report_errata WORKAROUND_CVE_2018_3639, cortex_a72, cve_2018_3639
ldp x8, x30, [sp], #16 ldp x8, x30, [sp], #16
ret ret
@ -292,8 +310,9 @@ func cortex_a72_cpu_reg_dump
ret ret
endfunc cortex_a72_cpu_reg_dump endfunc cortex_a72_cpu_reg_dump
declare_cpu_ops_workaround_cve_2017_5715 cortex_a72, CORTEX_A72_MIDR, \ declare_cpu_ops_wa cortex_a72, CORTEX_A72_MIDR, \
cortex_a72_reset_func, \ cortex_a72_reset_func, \
check_errata_cve_2017_5715, \ check_errata_cve_2017_5715, \
CPU_NO_EXTRA2_FUNC, \
cortex_a72_core_pwr_dwn, \ cortex_a72_core_pwr_dwn, \
cortex_a72_cluster_pwr_dwn cortex_a72_cluster_pwr_dwn

View File

@ -38,11 +38,18 @@ endfunc cortex_a73_disable_smp
func cortex_a73_reset_func func cortex_a73_reset_func
#if IMAGE_BL31 && WORKAROUND_CVE_2017_5715 #if IMAGE_BL31 && WORKAROUND_CVE_2017_5715
cpu_check_csv2 x0, 1f cpu_check_csv2 x0, 1f
adr x0, workaround_bpiall_vbar0_runtime_exceptions adr x0, wa_cve_2017_5715_bpiall_vbar
msr vbar_el3, x0 msr vbar_el3, x0
1: 1:
#endif #endif
#if WORKAROUND_CVE_2018_3639
mrs x0, CORTEX_A73_IMP_DEF_REG1
orr x0, x0, #CORTEX_A73_IMP_DEF_REG1_DISABLE_LOAD_PASS_STORE
msr CORTEX_A73_IMP_DEF_REG1, x0
isb
#endif
/* --------------------------------------------- /* ---------------------------------------------
* Enable the SMP bit. * Enable the SMP bit.
* Clobbers : x0 * Clobbers : x0
@ -129,6 +136,15 @@ func check_errata_cve_2017_5715
ret ret
endfunc check_errata_cve_2017_5715 endfunc check_errata_cve_2017_5715
func check_errata_cve_2018_3639
#if WORKAROUND_CVE_2018_3639
mov x0, #ERRATA_APPLIES
#else
mov x0, #ERRATA_MISSING
#endif
ret
endfunc check_errata_cve_2018_3639
#if REPORT_ERRATA #if REPORT_ERRATA
/* /*
* Errata printing function for Cortex A75. Must follow AAPCS. * Errata printing function for Cortex A75. Must follow AAPCS.
@ -144,6 +160,7 @@ func cortex_a73_errata_report
* checking functions of each errata. * checking functions of each errata.
*/ */
report_errata WORKAROUND_CVE_2017_5715, cortex_a73, cve_2017_5715 report_errata WORKAROUND_CVE_2017_5715, cortex_a73, cve_2017_5715
report_errata WORKAROUND_CVE_2018_3639, cortex_a73, cve_2018_3639
ldp x8, x30, [sp], #16 ldp x8, x30, [sp], #16
ret ret
@ -170,8 +187,9 @@ func cortex_a73_cpu_reg_dump
ret ret
endfunc cortex_a73_cpu_reg_dump endfunc cortex_a73_cpu_reg_dump
declare_cpu_ops_workaround_cve_2017_5715 cortex_a73, CORTEX_A73_MIDR, \ declare_cpu_ops_wa cortex_a73, CORTEX_A73_MIDR, \
cortex_a73_reset_func, \ cortex_a73_reset_func, \
check_errata_cve_2017_5715, \ check_errata_cve_2017_5715, \
CPU_NO_EXTRA2_FUNC, \
cortex_a73_core_pwr_dwn, \ cortex_a73_core_pwr_dwn, \
cortex_a73_cluster_pwr_dwn cortex_a73_cluster_pwr_dwn

View File

@ -13,11 +13,18 @@
func cortex_a75_reset_func func cortex_a75_reset_func
#if IMAGE_BL31 && WORKAROUND_CVE_2017_5715 #if IMAGE_BL31 && WORKAROUND_CVE_2017_5715
cpu_check_csv2 x0, 1f cpu_check_csv2 x0, 1f
adr x0, workaround_bpiall_vbar0_runtime_exceptions adr x0, wa_cve_2017_5715_bpiall_vbar
msr vbar_el3, x0 msr vbar_el3, x0
1: 1:
#endif #endif
#if WORKAROUND_CVE_2018_3639
mrs x0, CORTEX_A75_CPUACTLR_EL1
orr x0, x0, #CORTEX_A75_CPUACTLR_EL1_DISABLE_LOAD_PASS_STORE
msr CORTEX_A75_CPUACTLR_EL1, x0
isb
#endif
#if ENABLE_AMU #if ENABLE_AMU
/* Make sure accesses from EL0/EL1 and EL2 are not trapped to EL3 */ /* Make sure accesses from EL0/EL1 and EL2 are not trapped to EL3 */
mrs x0, actlr_el3 mrs x0, actlr_el3
@ -57,6 +64,15 @@ func check_errata_cve_2017_5715
ret ret
endfunc check_errata_cve_2017_5715 endfunc check_errata_cve_2017_5715
func check_errata_cve_2018_3639
#if WORKAROUND_CVE_2018_3639
mov x0, #ERRATA_APPLIES
#else
mov x0, #ERRATA_MISSING
#endif
ret
endfunc check_errata_cve_2018_3639
/* --------------------------------------------- /* ---------------------------------------------
* HW will do the cache maintenance while powering down * HW will do the cache maintenance while powering down
* --------------------------------------------- * ---------------------------------------------
@ -88,6 +104,7 @@ func cortex_a75_errata_report
* checking functions of each errata. * checking functions of each errata.
*/ */
report_errata WORKAROUND_CVE_2017_5715, cortex_a75, cve_2017_5715 report_errata WORKAROUND_CVE_2017_5715, cortex_a75, cve_2017_5715
report_errata WORKAROUND_CVE_2018_3639, cortex_a75, cve_2018_3639
ldp x8, x30, [sp], #16 ldp x8, x30, [sp], #16
ret ret
@ -113,7 +130,8 @@ func cortex_a75_cpu_reg_dump
ret ret
endfunc cortex_a75_cpu_reg_dump endfunc cortex_a75_cpu_reg_dump
declare_cpu_ops_workaround_cve_2017_5715 cortex_a75, CORTEX_A75_MIDR, \ declare_cpu_ops_wa cortex_a75, CORTEX_A75_MIDR, \
cortex_a75_reset_func, \ cortex_a75_reset_func, \
check_errata_cve_2017_5715, \ check_errata_cve_2017_5715, \
CPU_NO_EXTRA2_FUNC, \
cortex_a75_core_pwr_dwn cortex_a75_core_pwr_dwn

View File

@ -285,7 +285,7 @@ endfunc print_errata_status
#endif #endif
/* /*
* int check_workaround_cve_2017_5715(void); * int check_wa_cve_2017_5715(void);
* *
* This function returns: * This function returns:
* - ERRATA_APPLIES when firmware mitigation is required. * - ERRATA_APPLIES when firmware mitigation is required.
@ -296,8 +296,8 @@ endfunc print_errata_status
* NOTE: Must be called only after cpu_ops have been initialized * NOTE: Must be called only after cpu_ops have been initialized
* in per-CPU data. * in per-CPU data.
*/ */
.globl check_workaround_cve_2017_5715 .globl check_wa_cve_2017_5715
func check_workaround_cve_2017_5715 func check_wa_cve_2017_5715
mrs x0, tpidr_el3 mrs x0, tpidr_el3
#if ENABLE_ASSERTIONS #if ENABLE_ASSERTIONS
cmp x0, #0 cmp x0, #0
@ -315,4 +315,28 @@ func check_workaround_cve_2017_5715
1: 1:
mov x0, #ERRATA_NOT_APPLIES mov x0, #ERRATA_NOT_APPLIES
ret ret
endfunc check_workaround_cve_2017_5715 endfunc check_wa_cve_2017_5715
/*
* void *wa_cve_2018_3639_get_disable_ptr(void);
*
* Returns a function pointer which is used to disable mitigation
* for CVE-2018-3639.
* The function pointer is only returned on cores that employ
* dynamic mitigation. If the core uses static mitigation or is
* unaffected by CVE-2018-3639 this function returns NULL.
*
* NOTE: Must be called only after cpu_ops have been initialized
* in per-CPU data.
*/
.globl wa_cve_2018_3639_get_disable_ptr
func wa_cve_2018_3639_get_disable_ptr
mrs x0, tpidr_el3
#if ENABLE_ASSERTIONS
cmp x0, #0
ASM_ASSERT(ne)
#endif
ldr x0, [x0, #CPU_DATA_CPU_OPS_PTR]
ldr x0, [x0, #CPU_EXTRA2_FUNC]
ret
endfunc wa_cve_2018_3639_get_disable_ptr

View File

@ -9,13 +9,13 @@
#include <asm_macros.S> #include <asm_macros.S>
#include <context.h> #include <context.h>
.globl workaround_bpiall_vbar0_runtime_exceptions .globl wa_cve_2017_5715_bpiall_vbar
#define EMIT_BPIALL 0xee070fd5 #define EMIT_BPIALL 0xee070fd5
#define EMIT_SMC 0xe1600070 #define EMIT_SMC 0xe1600070
#define ESR_EL3_A64_SMC0 0x5e000000 #define ESR_EL3_A64_SMC0 0x5e000000
.macro enter_workaround _from_vector .macro apply_cve_2017_5715_wa _from_vector
/* /*
* Save register state to enable a call to AArch32 S-EL1 and return * Save register state to enable a call to AArch32 S-EL1 and return
* Identify the original calling vector in w2 (==_from_vector) * Identify the original calling vector in w2 (==_from_vector)
@ -66,7 +66,7 @@
movz w8, SPSR_MODE32(MODE32_svc, SPSR_T_ARM, SPSR_E_LITTLE, SPSR_AIF_MASK) movz w8, SPSR_MODE32(MODE32_svc, SPSR_T_ARM, SPSR_E_LITTLE, SPSR_AIF_MASK)
/* Switch EL3 exception vectors while the workaround is executing. */ /* Switch EL3 exception vectors while the workaround is executing. */
adr x9, workaround_bpiall_vbar1_runtime_exceptions adr x9, wa_cve_2017_5715_bpiall_ret_vbar
/* Setup SCTLR_EL1 with MMU off and I$ on */ /* Setup SCTLR_EL1 with MMU off and I$ on */
ldr x10, stub_sel1_sctlr ldr x10, stub_sel1_sctlr
@ -93,13 +93,13 @@
* is not enabled, the existing runtime exception vector table is used. * is not enabled, the existing runtime exception vector table is used.
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
vector_base workaround_bpiall_vbar0_runtime_exceptions vector_base wa_cve_2017_5715_bpiall_vbar
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* Current EL with SP_EL0 : 0x0 - 0x200 * Current EL with SP_EL0 : 0x0 - 0x200
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
vector_entry workaround_bpiall_vbar0_sync_exception_sp_el0 vector_entry bpiall_sync_exception_sp_el0
b sync_exception_sp_el0 b sync_exception_sp_el0
nop /* to force 8 byte alignment for the following stub */ nop /* to force 8 byte alignment for the following stub */
@ -114,79 +114,79 @@ aarch32_stub:
.word EMIT_BPIALL .word EMIT_BPIALL
.word EMIT_SMC .word EMIT_SMC
check_vector_size workaround_bpiall_vbar0_sync_exception_sp_el0 check_vector_size bpiall_sync_exception_sp_el0
vector_entry workaround_bpiall_vbar0_irq_sp_el0 vector_entry bpiall_irq_sp_el0
b irq_sp_el0 b irq_sp_el0
check_vector_size workaround_bpiall_vbar0_irq_sp_el0 check_vector_size bpiall_irq_sp_el0
vector_entry workaround_bpiall_vbar0_fiq_sp_el0 vector_entry bpiall_fiq_sp_el0
b fiq_sp_el0 b fiq_sp_el0
check_vector_size workaround_bpiall_vbar0_fiq_sp_el0 check_vector_size bpiall_fiq_sp_el0
vector_entry workaround_bpiall_vbar0_serror_sp_el0 vector_entry bpiall_serror_sp_el0
b serror_sp_el0 b serror_sp_el0
check_vector_size workaround_bpiall_vbar0_serror_sp_el0 check_vector_size bpiall_serror_sp_el0
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* Current EL with SP_ELx: 0x200 - 0x400 * Current EL with SP_ELx: 0x200 - 0x400
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
vector_entry workaround_bpiall_vbar0_sync_exception_sp_elx vector_entry bpiall_sync_exception_sp_elx
b sync_exception_sp_elx b sync_exception_sp_elx
check_vector_size workaround_bpiall_vbar0_sync_exception_sp_elx check_vector_size bpiall_sync_exception_sp_elx
vector_entry workaround_bpiall_vbar0_irq_sp_elx vector_entry bpiall_irq_sp_elx
b irq_sp_elx b irq_sp_elx
check_vector_size workaround_bpiall_vbar0_irq_sp_elx check_vector_size bpiall_irq_sp_elx
vector_entry workaround_bpiall_vbar0_fiq_sp_elx vector_entry bpiall_fiq_sp_elx
b fiq_sp_elx b fiq_sp_elx
check_vector_size workaround_bpiall_vbar0_fiq_sp_elx check_vector_size bpiall_fiq_sp_elx
vector_entry workaround_bpiall_vbar0_serror_sp_elx vector_entry bpiall_serror_sp_elx
b serror_sp_elx b serror_sp_elx
check_vector_size workaround_bpiall_vbar0_serror_sp_elx check_vector_size bpiall_serror_sp_elx
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* Lower EL using AArch64 : 0x400 - 0x600 * Lower EL using AArch64 : 0x400 - 0x600
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
vector_entry workaround_bpiall_vbar0_sync_exception_aarch64 vector_entry bpiall_sync_exception_aarch64
enter_workaround 1 apply_cve_2017_5715_wa 1
check_vector_size workaround_bpiall_vbar0_sync_exception_aarch64 check_vector_size bpiall_sync_exception_aarch64
vector_entry workaround_bpiall_vbar0_irq_aarch64 vector_entry bpiall_irq_aarch64
enter_workaround 2 apply_cve_2017_5715_wa 2
check_vector_size workaround_bpiall_vbar0_irq_aarch64 check_vector_size bpiall_irq_aarch64
vector_entry workaround_bpiall_vbar0_fiq_aarch64 vector_entry bpiall_fiq_aarch64
enter_workaround 4 apply_cve_2017_5715_wa 4
check_vector_size workaround_bpiall_vbar0_fiq_aarch64 check_vector_size bpiall_fiq_aarch64
vector_entry workaround_bpiall_vbar0_serror_aarch64 vector_entry bpiall_serror_aarch64
enter_workaround 8 apply_cve_2017_5715_wa 8
check_vector_size workaround_bpiall_vbar0_serror_aarch64 check_vector_size bpiall_serror_aarch64
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* Lower EL using AArch32 : 0x600 - 0x800 * Lower EL using AArch32 : 0x600 - 0x800
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
vector_entry workaround_bpiall_vbar0_sync_exception_aarch32 vector_entry bpiall_sync_exception_aarch32
enter_workaround 1 apply_cve_2017_5715_wa 1
check_vector_size workaround_bpiall_vbar0_sync_exception_aarch32 check_vector_size bpiall_sync_exception_aarch32
vector_entry workaround_bpiall_vbar0_irq_aarch32 vector_entry bpiall_irq_aarch32
enter_workaround 2 apply_cve_2017_5715_wa 2
check_vector_size workaround_bpiall_vbar0_irq_aarch32 check_vector_size bpiall_irq_aarch32
vector_entry workaround_bpiall_vbar0_fiq_aarch32 vector_entry bpiall_fiq_aarch32
enter_workaround 4 apply_cve_2017_5715_wa 4
check_vector_size workaround_bpiall_vbar0_fiq_aarch32 check_vector_size bpiall_fiq_aarch32
vector_entry workaround_bpiall_vbar0_serror_aarch32 vector_entry bpiall_serror_aarch32
enter_workaround 8 apply_cve_2017_5715_wa 8
check_vector_size workaround_bpiall_vbar0_serror_aarch32 check_vector_size bpiall_serror_aarch32
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* This vector table is used while the workaround is executing. It * This vector table is used while the workaround is executing. It
@ -195,73 +195,73 @@ vector_entry workaround_bpiall_vbar0_serror_aarch32
* EL3 state before proceeding with the normal runtime exception vector. * EL3 state before proceeding with the normal runtime exception vector.
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
vector_base workaround_bpiall_vbar1_runtime_exceptions vector_base wa_cve_2017_5715_bpiall_ret_vbar
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* Current EL with SP_EL0 : 0x0 - 0x200 (UNUSED) * Current EL with SP_EL0 : 0x0 - 0x200 (UNUSED)
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
vector_entry workaround_bpiall_vbar1_sync_exception_sp_el0 vector_entry bpiall_ret_sync_exception_sp_el0
b report_unhandled_exception b report_unhandled_exception
check_vector_size workaround_bpiall_vbar1_sync_exception_sp_el0 check_vector_size bpiall_ret_sync_exception_sp_el0
vector_entry workaround_bpiall_vbar1_irq_sp_el0 vector_entry bpiall_ret_irq_sp_el0
b report_unhandled_interrupt b report_unhandled_interrupt
check_vector_size workaround_bpiall_vbar1_irq_sp_el0 check_vector_size bpiall_ret_irq_sp_el0
vector_entry workaround_bpiall_vbar1_fiq_sp_el0 vector_entry bpiall_ret_fiq_sp_el0
b report_unhandled_interrupt b report_unhandled_interrupt
check_vector_size workaround_bpiall_vbar1_fiq_sp_el0 check_vector_size bpiall_ret_fiq_sp_el0
vector_entry workaround_bpiall_vbar1_serror_sp_el0 vector_entry bpiall_ret_serror_sp_el0
b report_unhandled_exception b report_unhandled_exception
check_vector_size workaround_bpiall_vbar1_serror_sp_el0 check_vector_size bpiall_ret_serror_sp_el0
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* Current EL with SP_ELx: 0x200 - 0x400 (UNUSED) * Current EL with SP_ELx: 0x200 - 0x400 (UNUSED)
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
vector_entry workaround_bpiall_vbar1_sync_exception_sp_elx vector_entry bpiall_ret_sync_exception_sp_elx
b report_unhandled_exception b report_unhandled_exception
check_vector_size workaround_bpiall_vbar1_sync_exception_sp_elx check_vector_size bpiall_ret_sync_exception_sp_elx
vector_entry workaround_bpiall_vbar1_irq_sp_elx vector_entry bpiall_ret_irq_sp_elx
b report_unhandled_interrupt b report_unhandled_interrupt
check_vector_size workaround_bpiall_vbar1_irq_sp_elx check_vector_size bpiall_ret_irq_sp_elx
vector_entry workaround_bpiall_vbar1_fiq_sp_elx vector_entry bpiall_ret_fiq_sp_elx
b report_unhandled_interrupt b report_unhandled_interrupt
check_vector_size workaround_bpiall_vbar1_fiq_sp_elx check_vector_size bpiall_ret_fiq_sp_elx
vector_entry workaround_bpiall_vbar1_serror_sp_elx vector_entry bpiall_ret_serror_sp_elx
b report_unhandled_exception b report_unhandled_exception
check_vector_size workaround_bpiall_vbar1_serror_sp_elx check_vector_size bpiall_ret_serror_sp_elx
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* Lower EL using AArch64 : 0x400 - 0x600 (UNUSED) * Lower EL using AArch64 : 0x400 - 0x600 (UNUSED)
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
vector_entry workaround_bpiall_vbar1_sync_exception_aarch64 vector_entry bpiall_ret_sync_exception_aarch64
b report_unhandled_exception b report_unhandled_exception
check_vector_size workaround_bpiall_vbar1_sync_exception_aarch64 check_vector_size bpiall_ret_sync_exception_aarch64
vector_entry workaround_bpiall_vbar1_irq_aarch64 vector_entry bpiall_ret_irq_aarch64
b report_unhandled_interrupt b report_unhandled_interrupt
check_vector_size workaround_bpiall_vbar1_irq_aarch64 check_vector_size bpiall_ret_irq_aarch64
vector_entry workaround_bpiall_vbar1_fiq_aarch64 vector_entry bpiall_ret_fiq_aarch64
b report_unhandled_interrupt b report_unhandled_interrupt
check_vector_size workaround_bpiall_vbar1_fiq_aarch64 check_vector_size bpiall_ret_fiq_aarch64
vector_entry workaround_bpiall_vbar1_serror_aarch64 vector_entry bpiall_ret_serror_aarch64
b report_unhandled_exception b report_unhandled_exception
check_vector_size workaround_bpiall_vbar1_serror_aarch64 check_vector_size bpiall_ret_serror_aarch64
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* Lower EL using AArch32 : 0x600 - 0x800 * Lower EL using AArch32 : 0x600 - 0x800
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
vector_entry workaround_bpiall_vbar1_sync_exception_aarch32 vector_entry bpiall_ret_sync_exception_aarch32
/* /*
* w2 indicates which SEL1 stub was run and thus which original vector was used * w2 indicates which SEL1 stub was run and thus which original vector was used
* w3-w6 contain saved system register state (esr_el3 in w3) * w3-w6 contain saved system register state (esr_el3 in w3)
@ -281,7 +281,7 @@ vector_entry workaround_bpiall_vbar1_sync_exception_aarch32
* to workaround entry table in preparation for subsequent * to workaround entry table in preparation for subsequent
* Sync/IRQ/FIQ/SError exceptions. * Sync/IRQ/FIQ/SError exceptions.
*/ */
adr x0, workaround_bpiall_vbar0_runtime_exceptions adr x0, wa_cve_2017_5715_bpiall_vbar
msr vbar_el3, x0 msr vbar_el3, x0
/* /*
@ -324,34 +324,34 @@ vector_entry workaround_bpiall_vbar1_sync_exception_aarch32
1: 1:
ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
b sync_exception_aarch64 b sync_exception_aarch64
check_vector_size workaround_bpiall_vbar1_sync_exception_aarch32 check_vector_size bpiall_ret_sync_exception_aarch32
vector_entry workaround_bpiall_vbar1_irq_aarch32 vector_entry bpiall_ret_irq_aarch32
b report_unhandled_interrupt b report_unhandled_interrupt
/* /*
* Post-workaround fan-out for non-sync exceptions * Post-workaround fan-out for non-sync exceptions
*/ */
workaround_not_sync: workaround_not_sync:
tbnz w2, #3, workaround_bpiall_vbar1_serror tbnz w2, #3, bpiall_ret_serror
tbnz w2, #2, workaround_bpiall_vbar1_fiq tbnz w2, #2, bpiall_ret_fiq
/* IRQ */ /* IRQ */
ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
b irq_aarch64 b irq_aarch64
workaround_bpiall_vbar1_fiq: bpiall_ret_fiq:
ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
b fiq_aarch64 b fiq_aarch64
workaround_bpiall_vbar1_serror: bpiall_ret_serror:
ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
b serror_aarch64 b serror_aarch64
check_vector_size workaround_bpiall_vbar1_irq_aarch32 check_vector_size bpiall_ret_irq_aarch32
vector_entry workaround_bpiall_vbar1_fiq_aarch32 vector_entry bpiall_ret_fiq_aarch32
b report_unhandled_interrupt b report_unhandled_interrupt
check_vector_size workaround_bpiall_vbar1_fiq_aarch32 check_vector_size bpiall_ret_fiq_aarch32
vector_entry workaround_bpiall_vbar1_serror_aarch32 vector_entry bpiall_ret_serror_aarch32
b report_unhandled_exception b report_unhandled_exception
check_vector_size workaround_bpiall_vbar1_serror_aarch32 check_vector_size bpiall_ret_serror_aarch32

View File

@ -9,13 +9,13 @@
#include <asm_macros.S> #include <asm_macros.S>
#include <context.h> #include <context.h>
.globl workaround_mmu_runtime_exceptions .globl wa_cve_2017_5715_mmu_vbar
#define ESR_EL3_A64_SMC0 0x5e000000 #define ESR_EL3_A64_SMC0 0x5e000000
vector_base workaround_mmu_runtime_exceptions vector_base wa_cve_2017_5715_mmu_vbar
.macro apply_workaround _is_sync_exception .macro apply_cve_2017_5715_wa _is_sync_exception
stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
mrs x1, sctlr_el3 mrs x1, sctlr_el3
/* Disable MMU */ /* Disable MMU */
@ -63,86 +63,86 @@ vector_base workaround_mmu_runtime_exceptions
* Current EL with SP_EL0 : 0x0 - 0x200 * Current EL with SP_EL0 : 0x0 - 0x200
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
vector_entry workaround_mmu_sync_exception_sp_el0 vector_entry mmu_sync_exception_sp_el0
b sync_exception_sp_el0 b sync_exception_sp_el0
check_vector_size workaround_mmu_sync_exception_sp_el0 check_vector_size mmu_sync_exception_sp_el0
vector_entry workaround_mmu_irq_sp_el0 vector_entry mmu_irq_sp_el0
b irq_sp_el0 b irq_sp_el0
check_vector_size workaround_mmu_irq_sp_el0 check_vector_size mmu_irq_sp_el0
vector_entry workaround_mmu_fiq_sp_el0 vector_entry mmu_fiq_sp_el0
b fiq_sp_el0 b fiq_sp_el0
check_vector_size workaround_mmu_fiq_sp_el0 check_vector_size mmu_fiq_sp_el0
vector_entry workaround_mmu_serror_sp_el0 vector_entry mmu_serror_sp_el0
b serror_sp_el0 b serror_sp_el0
check_vector_size workaround_mmu_serror_sp_el0 check_vector_size mmu_serror_sp_el0
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* Current EL with SP_ELx: 0x200 - 0x400 * Current EL with SP_ELx: 0x200 - 0x400
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
vector_entry workaround_mmu_sync_exception_sp_elx vector_entry mmu_sync_exception_sp_elx
b sync_exception_sp_elx b sync_exception_sp_elx
check_vector_size workaround_mmu_sync_exception_sp_elx check_vector_size mmu_sync_exception_sp_elx
vector_entry workaround_mmu_irq_sp_elx vector_entry mmu_irq_sp_elx
b irq_sp_elx b irq_sp_elx
check_vector_size workaround_mmu_irq_sp_elx check_vector_size mmu_irq_sp_elx
vector_entry workaround_mmu_fiq_sp_elx vector_entry mmu_fiq_sp_elx
b fiq_sp_elx b fiq_sp_elx
check_vector_size workaround_mmu_fiq_sp_elx check_vector_size mmu_fiq_sp_elx
vector_entry workaround_mmu_serror_sp_elx vector_entry mmu_serror_sp_elx
b serror_sp_elx b serror_sp_elx
check_vector_size workaround_mmu_serror_sp_elx check_vector_size mmu_serror_sp_elx
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* Lower EL using AArch64 : 0x400 - 0x600 * Lower EL using AArch64 : 0x400 - 0x600
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
vector_entry workaround_mmu_sync_exception_aarch64 vector_entry mmu_sync_exception_aarch64
apply_workaround _is_sync_exception=1 apply_cve_2017_5715_wa _is_sync_exception=1
b sync_exception_aarch64 b sync_exception_aarch64
check_vector_size workaround_mmu_sync_exception_aarch64 check_vector_size mmu_sync_exception_aarch64
vector_entry workaround_mmu_irq_aarch64 vector_entry mmu_irq_aarch64
apply_workaround _is_sync_exception=0 apply_cve_2017_5715_wa _is_sync_exception=0
b irq_aarch64 b irq_aarch64
check_vector_size workaround_mmu_irq_aarch64 check_vector_size mmu_irq_aarch64
vector_entry workaround_mmu_fiq_aarch64 vector_entry mmu_fiq_aarch64
apply_workaround _is_sync_exception=0 apply_cve_2017_5715_wa _is_sync_exception=0
b fiq_aarch64 b fiq_aarch64
check_vector_size workaround_mmu_fiq_aarch64 check_vector_size mmu_fiq_aarch64
vector_entry workaround_mmu_serror_aarch64 vector_entry mmu_serror_aarch64
apply_workaround _is_sync_exception=0 apply_cve_2017_5715_wa _is_sync_exception=0
b serror_aarch64 b serror_aarch64
check_vector_size workaround_mmu_serror_aarch64 check_vector_size mmu_serror_aarch64
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* Lower EL using AArch32 : 0x600 - 0x800 * Lower EL using AArch32 : 0x600 - 0x800
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
vector_entry workaround_mmu_sync_exception_aarch32 vector_entry mmu_sync_exception_aarch32
apply_workaround _is_sync_exception=1 apply_cve_2017_5715_wa _is_sync_exception=1
b sync_exception_aarch32 b sync_exception_aarch32
check_vector_size workaround_mmu_sync_exception_aarch32 check_vector_size mmu_sync_exception_aarch32
vector_entry workaround_mmu_irq_aarch32 vector_entry mmu_irq_aarch32
apply_workaround _is_sync_exception=0 apply_cve_2017_5715_wa _is_sync_exception=0
b irq_aarch32 b irq_aarch32
check_vector_size workaround_mmu_irq_aarch32 check_vector_size mmu_irq_aarch32
vector_entry workaround_mmu_fiq_aarch32 vector_entry mmu_fiq_aarch32
apply_workaround _is_sync_exception=0 apply_cve_2017_5715_wa _is_sync_exception=0
b fiq_aarch32 b fiq_aarch32
check_vector_size workaround_mmu_fiq_aarch32 check_vector_size mmu_fiq_aarch32
vector_entry workaround_mmu_serror_aarch32 vector_entry mmu_serror_aarch32
apply_workaround _is_sync_exception=0 apply_cve_2017_5715_wa _is_sync_exception=0
b serror_aarch32 b serror_aarch32
check_vector_size workaround_mmu_serror_aarch32 check_vector_size mmu_serror_aarch32

View File

@ -17,6 +17,8 @@ A53_DISABLE_NON_TEMPORAL_HINT ?=1
A57_DISABLE_NON_TEMPORAL_HINT ?=1 A57_DISABLE_NON_TEMPORAL_HINT ?=1
WORKAROUND_CVE_2017_5715 ?=1 WORKAROUND_CVE_2017_5715 ?=1
WORKAROUND_CVE_2018_3639 ?=1
DYNAMIC_WORKAROUND_CVE_2018_3639 ?=0
# Process SKIP_A57_L1_FLUSH_PWR_DWN flag # Process SKIP_A57_L1_FLUSH_PWR_DWN flag
$(eval $(call assert_boolean,SKIP_A57_L1_FLUSH_PWR_DWN)) $(eval $(call assert_boolean,SKIP_A57_L1_FLUSH_PWR_DWN))
@ -34,6 +36,19 @@ $(eval $(call add_define,A57_DISABLE_NON_TEMPORAL_HINT))
$(eval $(call assert_boolean,WORKAROUND_CVE_2017_5715)) $(eval $(call assert_boolean,WORKAROUND_CVE_2017_5715))
$(eval $(call add_define,WORKAROUND_CVE_2017_5715)) $(eval $(call add_define,WORKAROUND_CVE_2017_5715))
# Process WORKAROUND_CVE_2018_3639 flag
$(eval $(call assert_boolean,WORKAROUND_CVE_2018_3639))
$(eval $(call add_define,WORKAROUND_CVE_2018_3639))
$(eval $(call assert_boolean,DYNAMIC_WORKAROUND_CVE_2018_3639))
$(eval $(call add_define,DYNAMIC_WORKAROUND_CVE_2018_3639))
ifneq (${DYNAMIC_WORKAROUND_CVE_2018_3639},0)
ifeq (${WORKAROUND_CVE_2018_3639},0)
$(error "Error: WORKAROUND_CVE_2018_3639 must be 1 if DYNAMIC_WORKAROUND_CVE_2018_3639 is 1")
endif
endif
# CPU Errata Build flags. # CPU Errata Build flags.
# These should be enabled by the platform if the erratum workaround needs to be # These should be enabled by the platform if the erratum workaround needs to be
# applied. # applied.

View File

@ -404,6 +404,15 @@ func el3_exit
msr spsr_el3, x16 msr spsr_el3, x16
msr elr_el3, x17 msr elr_el3, x17
#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
/* Restore mitigation state as it was on entry to EL3 */
ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
cmp x17, xzr
beq 1f
blr x17
#endif
1:
/* Restore saved general purpose registers and return */ /* Restore saved general purpose registers and return */
b restore_gp_registers_eret b restore_gp_registers_eret
endfunc el3_exit endfunc el3_exit

View File

@ -10,7 +10,8 @@
#include <runtime_svc.h> #include <runtime_svc.h>
#include <smccc.h> #include <smccc.h>
#include <smccc_helpers.h> #include <smccc_helpers.h>
#include <workaround_cve_2017_5715.h> #include <wa_cve_2017_5715.h>
#include <wa_cve_2018_3639.h>
static int32_t smccc_version(void) static int32_t smccc_version(void)
{ {
@ -25,9 +26,30 @@ static int32_t smccc_arch_features(u_register_t arg)
return SMC_OK; return SMC_OK;
#if WORKAROUND_CVE_2017_5715 #if WORKAROUND_CVE_2017_5715
case SMCCC_ARCH_WORKAROUND_1: case SMCCC_ARCH_WORKAROUND_1:
if (check_workaround_cve_2017_5715() == ERRATA_NOT_APPLIES) if (check_wa_cve_2017_5715() == ERRATA_NOT_APPLIES)
return 1; return 1;
return 0; /* ERRATA_APPLIES || ERRATA_MISSING */ return 0; /* ERRATA_APPLIES || ERRATA_MISSING */
#endif
#if WORKAROUND_CVE_2018_3639
case SMCCC_ARCH_WORKAROUND_2:
#if DYNAMIC_WORKAROUND_CVE_2018_3639
/*
* On a platform where at least one CPU requires
* dynamic mitigation but others are either unaffected
* or permanently mitigated, report the latter as not
* needing dynamic mitigation.
*/
if (wa_cve_2018_3639_get_disable_ptr() == NULL)
return 1;
/*
* If we get here, this CPU requires dynamic mitigation
* so report it as such.
*/
return 0;
#else
/* Either the CPUs are unaffected or permanently mitigated */
return SMCCC_ARCH_NOT_REQUIRED;
#endif
#endif #endif
default: default:
return SMC_UNK; return SMC_UNK;
@ -59,6 +81,16 @@ static uintptr_t arm_arch_svc_smc_handler(uint32_t smc_fid,
* has no effect. * has no effect.
*/ */
SMC_RET0(handle); SMC_RET0(handle);
#endif
#if WORKAROUND_CVE_2018_3639
case SMCCC_ARCH_WORKAROUND_2:
/*
* The workaround has already been applied on affected PEs
* requiring dynamic mitigation during entry to EL3.
* On unaffected or statically mitigated PEs, this function
* has no effect.
*/
SMC_RET0(handle);
#endif #endif
default: default:
WARN("Unimplemented Arm Architecture Service Call: 0x%x \n", WARN("Unimplemented Arm Architecture Service Call: 0x%x \n",