From 596d20d9e4d50c02b5a0cce8cad2a1c205cd687a Mon Sep 17 00:00:00 2001 From: Zelalem Aweke Date: Fri, 15 Oct 2021 17:25:52 -0500 Subject: [PATCH] fix(pie): invalidate data cache in the entire image range if PIE is enabled Currently on image entry, the data cache in the RW address range is invalidated before MMU is enabled to safeguard against potential stale data from previous firmware stage. If PIE is enabled however, RO sections including the GOT may be also modified during pie fixup. Therefore, to be on the safe side, invalidate the entire image region if PIE is enabled. Signed-off-by: Zelalem Aweke Change-Id: I7ee2a324fe4377b026e32f9ab842617ad4e09d89 --- bl32/tsp/aarch64/tsp_entrypoint.S | 22 +++++++++++++++++++--- include/arch/aarch32/el3_common_macros.S | 13 ++++++++++++- include/arch/aarch64/el3_common_macros.S | 15 ++++++++++++++- 3 files changed, 45 insertions(+), 5 deletions(-) diff --git a/bl32/tsp/aarch64/tsp_entrypoint.S b/bl32/tsp/aarch64/tsp_entrypoint.S index 795c5865e..7d77f478b 100644 --- a/bl32/tsp/aarch64/tsp_entrypoint.S +++ b/bl32/tsp/aarch64/tsp_entrypoint.S @@ -100,11 +100,27 @@ func tsp_entrypoint _align=3 * sections. This is done to safeguard against * possible corruption of this memory by dirty * cache lines in a system cache as a result of - * use by an earlier boot loader stage. + * use by an earlier boot loader stage. If PIE + * is enabled however, RO sections including the + * GOT may be modified during pie fixup. + * Therefore, to be on the safe side, invalidate + * the entire image region if PIE is enabled. * --------------------------------------------- */ - adr x0, __RW_START__ - adr x1, __RW_END__ +#if ENABLE_PIE +#if SEPARATE_CODE_AND_RODATA + adrp x0, __TEXT_START__ + add x0, x0, :lo12:__TEXT_START__ +#else + adrp x0, __RO_START__ + add x0, x0, :lo12:__RO_START__ +#endif /* SEPARATE_CODE_AND_RODATA */ +#else + adrp x0, __RW_START__ + add x0, x0, :lo12:__RW_START__ +#endif /* ENABLE_PIE */ + adrp x1, __RW_END__ + add x1, x1, :lo12:__RW_END__ sub x1, x1, x0 bl inv_dcache_range diff --git a/include/arch/aarch32/el3_common_macros.S b/include/arch/aarch32/el3_common_macros.S index 65f9a8e6b..ad2a03911 100644 --- a/include/arch/aarch32/el3_common_macros.S +++ b/include/arch/aarch32/el3_common_macros.S @@ -380,10 +380,21 @@ * includes the data and NOBITS sections. This is done to * safeguard against possible corruption of this memory by * dirty cache lines in a system cache as a result of use by - * an earlier boot loader stage. + * an earlier boot loader stage. If PIE is enabled however, + * RO sections including the GOT may be modified during + * pie fixup. Therefore, to be on the safe side, invalidate + * the entire image region if PIE is enabled. * ----------------------------------------------------------------- */ +#if ENABLE_PIE +#if SEPARATE_CODE_AND_RODATA + ldr r0, =__TEXT_START__ +#else + ldr r0, =__RO_START__ +#endif /* SEPARATE_CODE_AND_RODATA */ +#else ldr r0, =__RW_START__ +#endif /* ENABLE_PIE */ ldr r1, =__RW_END__ sub r1, r1, r0 bl inv_dcache_range diff --git a/include/arch/aarch64/el3_common_macros.S b/include/arch/aarch64/el3_common_macros.S index 7d6a9638d..8e8d33480 100644 --- a/include/arch/aarch64/el3_common_macros.S +++ b/include/arch/aarch64/el3_common_macros.S @@ -430,11 +430,24 @@ * includes the data and NOBITS sections. This is done to * safeguard against possible corruption of this memory by * dirty cache lines in a system cache as a result of use by - * an earlier boot loader stage. + * an earlier boot loader stage. If PIE is enabled however, + * RO sections including the GOT may be modified during + * pie fixup. Therefore, to be on the safe side, invalidate + * the entire image region if PIE is enabled. * ------------------------------------------------------------- */ +#if ENABLE_PIE +#if SEPARATE_CODE_AND_RODATA + adrp x0, __TEXT_START__ + add x0, x0, :lo12:__TEXT_START__ +#else + adrp x0, __RO_START__ + add x0, x0, :lo12:__RO_START__ +#endif /* SEPARATE_CODE_AND_RODATA */ +#else adrp x0, __RW_START__ add x0, x0, :lo12:__RW_START__ +#endif /* ENABLE_PIE */ adrp x1, __RW_END__ add x1, x1, :lo12:__RW_END__ sub x1, x1, x0