Merge pull request #775 from soby-mathew/sm/AArch32_stack_align

AArch32: Fix the stack alignment issue
This commit is contained in:
danh-arm 2016-12-14 09:25:15 +00:00 committed by GitHub
commit 9509f4f67a
7 changed files with 32 additions and 22 deletions

View File

@ -168,8 +168,11 @@ func handle_smc
mov r2, r0 /* handle */ mov r2, r0 /* handle */
ldcopr r0, SCR ldcopr r0, SCR
/* Save SCR in stack */ /*
push {r0} * Save SCR in stack. r1 is pushed to meet the 8 byte
* stack alignment requirement.
*/
push {r0, r1}
and r3, r0, #SCR_NS_BIT /* flags */ and r3, r0, #SCR_NS_BIT /* flags */
/* Switch to Secure Mode*/ /* Switch to Secure Mode*/
@ -191,7 +194,7 @@ func handle_smc
/* r0 points to smc context */ /* r0 points to smc context */
/* Restore SCR from stack */ /* Restore SCR from stack */
pop {r1} pop {r1, r2}
stcopr r1, SCR stcopr r1, SCR
isb isb

View File

@ -39,6 +39,7 @@
#define SMC_CTX_GPREG_R2 0x8 #define SMC_CTX_GPREG_R2 0x8
#define SMC_CTX_GPREG_R3 0xC #define SMC_CTX_GPREG_R3 0xC
#define SMC_CTX_GPREG_R4 0x10 #define SMC_CTX_GPREG_R4 0x10
#define SMC_CTX_GPREG_R5 0x14
#define SMC_CTX_SP_USR 0x34 #define SMC_CTX_SP_USR 0x34
#define SMC_CTX_SPSR_MON 0x78 #define SMC_CTX_SPSR_MON 0x78
#define SMC_CTX_LR_MON 0x7C #define SMC_CTX_LR_MON 0x7C

View File

@ -38,22 +38,22 @@
* contains the pointer to the `smc_context_t`. * contains the pointer to the `smc_context_t`.
*/ */
.macro smcc_save_gp_mode_regs .macro smcc_save_gp_mode_regs
push {r0-r3, lr} push {r0-r4, lr}
ldcopr r0, SCR ldcopr r0, SCR
and r0, r0, #SCR_NS_BIT and r0, r0, #SCR_NS_BIT
bl smc_get_ctx bl smc_get_ctx
/* Save r4 - r12 in the SMC context */ /* Save r5 - r12 in the SMC context */
add r1, r0, #SMC_CTX_GPREG_R4 add r1, r0, #SMC_CTX_GPREG_R5
stm r1!, {r4-r12} stm r1!, {r5-r12}
/* /*
* Pop r0 - r3, lr to r4 - r7, lr from stack and then save * Pop r0 - r4, lr to r4 - r8, lr from stack and then save
* it to SMC context. * it to SMC context.
*/ */
pop {r4-r7, lr} pop {r4-r8, lr}
stm r0, {r4-r7} stm r0, {r4-r8}
/* Save the banked registers including the current SPSR and LR */ /* Save the banked registers including the current SPSR and LR */
mrs r4, sp_usr mrs r4, sp_usr

View File

@ -72,7 +72,8 @@ endfunc cortex_a32_reset_func
* ---------------------------------------------------- * ----------------------------------------------------
*/ */
func cortex_a32_core_pwr_dwn func cortex_a32_core_pwr_dwn
push {lr} /* r12 is pushed to meet the 8 byte stack alignment requirement */
push {r12, lr}
/* Assert if cache is enabled */ /* Assert if cache is enabled */
#if ASM_ASSERTION #if ASM_ASSERTION
@ -92,7 +93,7 @@ func cortex_a32_core_pwr_dwn
* Come out of intra cluster coherency * Come out of intra cluster coherency
* --------------------------------------------- * ---------------------------------------------
*/ */
pop {lr} pop {r12, lr}
b cortex_a32_disable_smp b cortex_a32_disable_smp
endfunc cortex_a32_core_pwr_dwn endfunc cortex_a32_core_pwr_dwn
@ -102,7 +103,8 @@ endfunc cortex_a32_core_pwr_dwn
* ------------------------------------------------------- * -------------------------------------------------------
*/ */
func cortex_a32_cluster_pwr_dwn func cortex_a32_cluster_pwr_dwn
push {lr} /* r12 is pushed to meet the 8 byte stack alignment requirement */
push {r12, lr}
/* Assert if cache is enabled */ /* Assert if cache is enabled */
#if ASM_ASSERTION #if ASM_ASSERTION
@ -135,7 +137,7 @@ func cortex_a32_cluster_pwr_dwn
* Come out of intra cluster coherency * Come out of intra cluster coherency
* --------------------------------------------- * ---------------------------------------------
*/ */
pop {lr} pop {r12, lr}
b cortex_a32_disable_smp b cortex_a32_disable_smp
endfunc cortex_a32_cluster_pwr_dwn endfunc cortex_a32_cluster_pwr_dwn

View File

@ -76,9 +76,10 @@ endfunc reset_handler
*/ */
.globl prepare_core_pwr_dwn .globl prepare_core_pwr_dwn
func prepare_core_pwr_dwn func prepare_core_pwr_dwn
push {lr} /* r12 is pushed to meet the 8 byte stack alignment requirement */
push {r12, lr}
bl _cpu_data bl _cpu_data
pop {lr} pop {r12, lr}
ldr r1, [r0, #CPU_DATA_CPU_OPS_PTR] ldr r1, [r0, #CPU_DATA_CPU_OPS_PTR]
#if ASM_ASSERTION #if ASM_ASSERTION
@ -98,9 +99,10 @@ endfunc prepare_core_pwr_dwn
*/ */
.globl prepare_cluster_pwr_dwn .globl prepare_cluster_pwr_dwn
func prepare_cluster_pwr_dwn func prepare_cluster_pwr_dwn
push {lr} /* r12 is pushed to meet the 8 byte stack alignment requirement */
push {r12, lr}
bl _cpu_data bl _cpu_data
pop {lr} pop {r12, lr}
ldr r1, [r0, #CPU_DATA_CPU_OPS_PTR] ldr r1, [r0, #CPU_DATA_CPU_OPS_PTR]
#if ASM_ASSERTION #if ASM_ASSERTION

View File

@ -41,9 +41,10 @@
* ----------------------------------------------------------------- * -----------------------------------------------------------------
*/ */
func _cpu_data func _cpu_data
push {lr} /* r12 is pushed to meet the 8 byte stack alignment requirement */
push {r12, lr}
bl plat_my_core_pos bl plat_my_core_pos
pop {lr} pop {r12, lr}
b _cpu_data_by_index b _cpu_data_by_index
endfunc _cpu_data endfunc _cpu_data

View File

@ -93,7 +93,8 @@ endfunc psci_do_pwrdown_cache_maintenance
* ----------------------------------------------------------------------- * -----------------------------------------------------------------------
*/ */
func psci_do_pwrup_cache_maintenance func psci_do_pwrup_cache_maintenance
push {lr} /* r12 is pushed to meet the 8 byte stack alignment requirement */
push {r12, lr}
/* --------------------------------------------- /* ---------------------------------------------
* Ensure any inflight stack writes have made it * Ensure any inflight stack writes have made it
@ -123,7 +124,7 @@ func psci_do_pwrup_cache_maintenance
stcopr r0, SCTLR stcopr r0, SCTLR
isb isb
pop {pc} pop {r12, pc}
endfunc psci_do_pwrup_cache_maintenance endfunc psci_do_pwrup_cache_maintenance
/* --------------------------------------------- /* ---------------------------------------------