diff --git a/bl32/tsp/aarch64/tsp_entrypoint.S b/bl32/tsp/aarch64/tsp_entrypoint.S index 795c5865e..7d77f478b 100644 --- a/bl32/tsp/aarch64/tsp_entrypoint.S +++ b/bl32/tsp/aarch64/tsp_entrypoint.S @@ -100,11 +100,27 @@ func tsp_entrypoint _align=3 * sections. This is done to safeguard against * possible corruption of this memory by dirty * cache lines in a system cache as a result of - * use by an earlier boot loader stage. + * use by an earlier boot loader stage. If PIE + * is enabled however, RO sections including the + * GOT may be modified during pie fixup. + * Therefore, to be on the safe side, invalidate + * the entire image region if PIE is enabled. * --------------------------------------------- */ - adr x0, __RW_START__ - adr x1, __RW_END__ +#if ENABLE_PIE +#if SEPARATE_CODE_AND_RODATA + adrp x0, __TEXT_START__ + add x0, x0, :lo12:__TEXT_START__ +#else + adrp x0, __RO_START__ + add x0, x0, :lo12:__RO_START__ +#endif /* SEPARATE_CODE_AND_RODATA */ +#else + adrp x0, __RW_START__ + add x0, x0, :lo12:__RW_START__ +#endif /* ENABLE_PIE */ + adrp x1, __RW_END__ + add x1, x1, :lo12:__RW_END__ sub x1, x1, x0 bl inv_dcache_range diff --git a/include/arch/aarch32/el3_common_macros.S b/include/arch/aarch32/el3_common_macros.S index 65f9a8e6b..ad2a03911 100644 --- a/include/arch/aarch32/el3_common_macros.S +++ b/include/arch/aarch32/el3_common_macros.S @@ -380,10 +380,21 @@ * includes the data and NOBITS sections. This is done to * safeguard against possible corruption of this memory by * dirty cache lines in a system cache as a result of use by - * an earlier boot loader stage. + * an earlier boot loader stage. If PIE is enabled however, + * RO sections including the GOT may be modified during + * pie fixup. Therefore, to be on the safe side, invalidate + * the entire image region if PIE is enabled. * ----------------------------------------------------------------- */ +#if ENABLE_PIE +#if SEPARATE_CODE_AND_RODATA + ldr r0, =__TEXT_START__ +#else + ldr r0, =__RO_START__ +#endif /* SEPARATE_CODE_AND_RODATA */ +#else ldr r0, =__RW_START__ +#endif /* ENABLE_PIE */ ldr r1, =__RW_END__ sub r1, r1, r0 bl inv_dcache_range diff --git a/include/arch/aarch64/el3_common_macros.S b/include/arch/aarch64/el3_common_macros.S index 7d6a9638d..8e8d33480 100644 --- a/include/arch/aarch64/el3_common_macros.S +++ b/include/arch/aarch64/el3_common_macros.S @@ -430,11 +430,24 @@ * includes the data and NOBITS sections. This is done to * safeguard against possible corruption of this memory by * dirty cache lines in a system cache as a result of use by - * an earlier boot loader stage. + * an earlier boot loader stage. If PIE is enabled however, + * RO sections including the GOT may be modified during + * pie fixup. Therefore, to be on the safe side, invalidate + * the entire image region if PIE is enabled. * ------------------------------------------------------------- */ +#if ENABLE_PIE +#if SEPARATE_CODE_AND_RODATA + adrp x0, __TEXT_START__ + add x0, x0, :lo12:__TEXT_START__ +#else + adrp x0, __RO_START__ + add x0, x0, :lo12:__RO_START__ +#endif /* SEPARATE_CODE_AND_RODATA */ +#else adrp x0, __RW_START__ add x0, x0, :lo12:__RW_START__ +#endif /* ENABLE_PIE */ adrp x1, __RW_END__ add x1, x1, :lo12:__RW_END__ sub x1, x1, x0