From e8383be4a167d81256fa9d673fcee248412b6aa0 Mon Sep 17 00:00:00 2001 From: Ambroise Vincent Date: Thu, 7 Mar 2019 14:31:33 +0000 Subject: [PATCH 1/2] Cortex-A76: fix spelling Change-Id: I6adf7c14e8a974a7d40d51615b5e69eab1a7436f Signed-off-by: Ambroise Vincent --- lib/cpus/aarch64/cortex_a76.S | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/lib/cpus/aarch64/cortex_a76.S b/lib/cpus/aarch64/cortex_a76.S index ac513432f..322e2f929 100644 --- a/lib/cpus/aarch64/cortex_a76.S +++ b/lib/cpus/aarch64/cortex_a76.S @@ -22,11 +22,11 @@ /* * This macro applies the mitigation for CVE-2018-3639. - * It implements a fash path where `SMCCC_ARCH_WORKAROUND_2` + * It implements a fast path where `SMCCC_ARCH_WORKAROUND_2` * SMC calls from a lower EL running in AArch32 or AArch64 * will go through the fast and return early. * - * The macro saves x2-x3 to the context. In the fast path + * The macro saves x2-x3 to the context. In the fast path * x0-x3 registers do not need to be restored as the calling * context will have saved them. */ @@ -63,7 +63,7 @@ * When the calling context wants mitigation disabled, * we program the mitigation disable function in the * CPU context, which gets invoked on subsequent exits from - * EL3 via the `el3_exit` function. Otherwise NULL is + * EL3 via the `el3_exit` function. Otherwise NULL is * programmed in the CPU context, which results in caller's * inheriting the EL3 mitigation state (enabled) on subsequent * `el3_exit`. @@ -82,7 +82,7 @@ .endif 1: /* - * Always enable v4 mitigation during EL3 execution. This is not + * Always enable v4 mitigation during EL3 execution. This is not * required for the fast path above because it does not perform any * memory loads. */ @@ -319,7 +319,7 @@ func cortex_a76_reset_func /* If the PE implements SSBS, we don't need the dynamic workaround */ mrs x0, id_aa64pfr1_el1 lsr x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT - and x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK + and x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK cbnz x0, 1f mrs x0, CORTEX_A76_CPUACTLR2_EL1 @@ -330,7 +330,7 @@ func cortex_a76_reset_func #ifdef IMAGE_BL31 /* * The Cortex-A76 generic vectors are overwritten to use the vectors - * defined above. This is required in order to apply mitigation + * defined above. This is required in order to apply mitigation * against CVE-2018-3639 on exception entry from lower ELs. */ adr x0, cortex_a76_wa_cve_2018_3639_a76_vbar From d0d115e21428295c0f63cab9f38a7b2de8104242 Mon Sep 17 00:00:00 2001 From: Ambroise Vincent Date: Thu, 7 Mar 2019 14:33:02 +0000 Subject: [PATCH 2/2] Cortex-A76: Optimize CVE_2018_3639 workaround Switched from a static check to a runtime assert to make sure a workaround is implemented for CVE_2018_3639. This allows platforms that know they have the SSBS hardware workaround in the CPU to compile out code under DYNAMIC_WORKAROUND_CVE_2018_3639. The gain in memory size without the dynamic workaround is 4KB in bl31. Change-Id: I61bb7d87c59964b0c7faac5d6bc7fc5c4651cbf3 Signed-off-by: Ambroise Vincent --- lib/cpus/aarch64/cortex_a76.S | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/lib/cpus/aarch64/cortex_a76.S b/lib/cpus/aarch64/cortex_a76.S index 322e2f929..e544018c8 100644 --- a/lib/cpus/aarch64/cortex_a76.S +++ b/lib/cpus/aarch64/cortex_a76.S @@ -13,13 +13,10 @@ #include #include -#if !DYNAMIC_WORKAROUND_CVE_2018_3639 -#error Cortex A76 requires DYNAMIC_WORKAROUND_CVE_2018_3639=1 -#endif - #define ESR_EL3_A64_SMC0 0x5e000000 #define ESR_EL3_A32_SMC0 0x4e000000 +#if DYNAMIC_WORKAROUND_CVE_2018_3639 /* * This macro applies the mitigation for CVE-2018-3639. * It implements a fast path where `SMCCC_ARCH_WORKAROUND_2` @@ -188,6 +185,7 @@ vector_entry cortex_a76_serror_aarch32 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0 b serror_aarch32 end_vector_entry cortex_a76_serror_aarch32 +#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */ /* -------------------------------------------------- * Errata Workaround for Cortex A76 Errata #1073348. @@ -320,8 +318,12 @@ func cortex_a76_reset_func mrs x0, id_aa64pfr1_el1 lsr x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT and x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK +#if !DYNAMIC_WORKAROUND_CVE_2018_3639 && ENABLE_ASSERTIONS + cmp x0, 0 + ASM_ASSERT(ne) +#endif +#if DYNAMIC_WORKAROUND_CVE_2018_3639 cbnz x0, 1f - mrs x0, CORTEX_A76_CPUACTLR2_EL1 orr x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE msr CORTEX_A76_CPUACTLR2_EL1, x0 @@ -336,10 +338,11 @@ func cortex_a76_reset_func adr x0, cortex_a76_wa_cve_2018_3639_a76_vbar msr vbar_el3, x0 isb -#endif +#endif /* IMAGE_BL31 */ 1: -#endif +#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */ +#endif /* WORKAROUND_CVE_2018_3639 */ #if ERRATA_DSU_936184 bl errata_dsu_936184_wa