AArch32: Add SMCC context

This patch defines a SMCC context to save and restore
registers during a SMC call. It also adds appropriate helpers
to save and restore from this context for use by AArch32
secure payload and BL stages.

Change-Id: I64c8d6fe1d6cac22e1f1f39ea1b54ee1b1b72248
This commit is contained in:
Soby Mathew 2016-05-05 12:53:53 +01:00
parent 1ae0a49a37
commit 3e3616ab21
2 changed files with 289 additions and 0 deletions

View File

@ -0,0 +1,171 @@
/*
* Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of ARM nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef __SMCC_HELPERS_H__
#define __SMCC_HELPERS_H__
#include <smcc.h>
/* These are offsets to registers in smc_ctx_t */
#define SMC_CTX_GPREG_R0 0x0
#define SMC_CTX_GPREG_R1 0x4
#define SMC_CTX_GPREG_R2 0x8
#define SMC_CTX_GPREG_R3 0xC
#define SMC_CTX_GPREG_R4 0x10
#define SMC_CTX_SP_USR 0x34
#define SMC_CTX_SPSR_MON 0x78
#define SMC_CTX_LR_MON 0x7C
#define SMC_CTX_SIZE 0x80
#ifndef __ASSEMBLY__
#include <cassert.h>
#include <types.h>
/*
* The generic structure to save arguments and callee saved registers during
* an SMC. Also this structure is used to store the result return values after
* the completion of SMC service.
*/
typedef struct smc_ctx {
u_register_t r0;
u_register_t r1;
u_register_t r2;
u_register_t r3;
u_register_t r4;
u_register_t r5;
u_register_t r6;
u_register_t r7;
u_register_t r8;
u_register_t r9;
u_register_t r10;
u_register_t r11;
u_register_t r12;
/* spsr_usr doesn't exist */
u_register_t sp_usr;
u_register_t lr_usr;
u_register_t spsr_irq;
u_register_t sp_irq;
u_register_t lr_irq;
u_register_t spsr_fiq;
u_register_t sp_fiq;
u_register_t lr_fiq;
u_register_t spsr_svc;
u_register_t sp_svc;
u_register_t lr_svc;
u_register_t spsr_abt;
u_register_t sp_abt;
u_register_t lr_abt;
u_register_t spsr_und;
u_register_t sp_und;
u_register_t lr_und;
u_register_t spsr_mon;
/* No need to save 'sp_mon' because we are already in monitor mode */
u_register_t lr_mon;
} smc_ctx_t;
/*
* Compile time assertions related to the 'smc_context' structure to
* ensure that the assembler and the compiler view of the offsets of
* the structure members is the same.
*/
CASSERT(SMC_CTX_GPREG_R0 == __builtin_offsetof(smc_ctx_t, r0), \
assert_smc_ctx_greg_r0_offset_mismatch);
CASSERT(SMC_CTX_GPREG_R1 == __builtin_offsetof(smc_ctx_t, r1), \
assert_smc_ctx_greg_r1_offset_mismatch);
CASSERT(SMC_CTX_GPREG_R2 == __builtin_offsetof(smc_ctx_t, r2), \
assert_smc_ctx_greg_r2_offset_mismatch);
CASSERT(SMC_CTX_GPREG_R3 == __builtin_offsetof(smc_ctx_t, r3), \
assert_smc_ctx_greg_r3_offset_mismatch);
CASSERT(SMC_CTX_GPREG_R4 == __builtin_offsetof(smc_ctx_t, r4), \
assert_smc_ctx_greg_r4_offset_mismatch);
CASSERT(SMC_CTX_SP_USR == __builtin_offsetof(smc_ctx_t, sp_usr), \
assert_smc_ctx_sp_usr_offset_mismatch);
CASSERT(SMC_CTX_LR_MON == __builtin_offsetof(smc_ctx_t, lr_mon), \
assert_smc_ctx_lr_mon_offset_mismatch);
CASSERT(SMC_CTX_SPSR_MON == __builtin_offsetof(smc_ctx_t, spsr_mon), \
assert_smc_ctx_spsr_mon_offset_mismatch);
CASSERT(SMC_CTX_SIZE == sizeof(smc_ctx_t), assert_smc_ctx_size_mismatch);
/* Convenience macros to return from SMC handler */
#define SMC_RET0(_h) { \
return (uintptr_t)(_h); \
}
#define SMC_RET1(_h, _r0) { \
((smc_ctx_t *)(_h))->r0 = (_r0); \
SMC_RET0(_h); \
}
#define SMC_RET2(_h, _r0, _r1) { \
((smc_ctx_t *)(_h))->r1 = (_r1); \
SMC_RET1(_h, (_r0)); \
}
#define SMC_RET3(_h, _r0, _r1, _r2) { \
((smc_ctx_t *)(_h))->r2 = (_r2); \
SMC_RET2(_h, (_r0), (_r1)); \
}
#define SMC_RET4(_h, _r0, _r1, _r2, _r3) { \
((smc_ctx_t *)(_h))->r3 = (_r3); \
SMC_RET3(_h, (_r0), (_r1), (_r2)); \
}
/* Return a UUID in the SMC return registers */
#define SMC_UUID_RET(_h, _uuid) \
SMC_RET4(handle, ((const uint32_t *) &(_uuid))[0], \
((const uint32_t *) &(_uuid))[1], \
((const uint32_t *) &(_uuid))[2], \
((const uint32_t *) &(_uuid))[3])
/*
* Helper macro to retrieve the SMC parameters from smc_ctx_t.
*/
#define get_smc_params_from_ctx(_hdl, _r1, _r2, _r3, _r4) { \
_r1 = ((smc_ctx_t *)_hdl)->r1; \
_r2 = ((smc_ctx_t *)_hdl)->r2; \
_r3 = ((smc_ctx_t *)_hdl)->r3; \
_r4 = ((smc_ctx_t *)_hdl)->r4; \
}
/* ------------------------------------------------------------------------
* Helper APIs for setting and retrieving appropriate `smc_ctx_t`.
* These functions need to implemented by the BL including this library.
* ------------------------------------------------------------------------
*/
/* Get the pointer to `smc_ctx_t` corresponding to the security state. */
void *smc_get_ctx(int security_state);
/* Set the next `smc_ctx_t` corresponding to the security state. */
void smc_set_next_ctx(int security_state);
/* Get the pointer to next `smc_ctx_t` already set by `smc_set_next_ctx()`. */
void *smc_get_next_ctx(void);
#endif /*__ASSEMBLY__*/
#endif /* __SMCC_HELPERS_H__ */

View File

@ -0,0 +1,118 @@
/*
* Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of ARM nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef __SMCC_MACROS_S__
#define __SMCC_MACROS_S__
#include <arch.h>
/*
* Macro to save the General purpose registers including the banked
* registers to the SMC context on entry due a SMC call. On return, r0
* contains the pointer to the `smc_context_t`.
*/
.macro smcc_save_gp_mode_regs
push {r0-r3, lr}
ldcopr r0, SCR
and r0, r0, #SCR_NS_BIT
bl smc_get_ctx
/* Save r4 - r12 in the SMC context */
add r1, r0, #SMC_CTX_GPREG_R4
stm r1!, {r4-r12}
/*
* Pop r0 - r3, lr to r4 - r7, lr from stack and then save
* it to SMC context.
*/
pop {r4-r7, lr}
stm r0, {r4-r7}
/* Save the banked registers including the current SPSR and LR */
mrs r4, sp_usr
mrs r5, lr_usr
mrs r6, spsr_irq
mrs r7, sp_irq
mrs r8, lr_irq
mrs r9, spsr_fiq
mrs r10, sp_fiq
mrs r11, lr_fiq
mrs r12, spsr_svc
stm r1!, {r4-r12}
mrs r4, sp_svc
mrs r5, lr_svc
mrs r6, spsr_abt
mrs r7, sp_abt
mrs r8, lr_abt
mrs r9, spsr_und
mrs r10, sp_und
mrs r11, lr_und
mrs r12, spsr
stm r1!, {r4-r12, lr}
.endm
/*
* Macro to restore the General purpose registers including the banked
* registers from the SMC context prior to exit from the SMC call.
* r0 must point to the `smc_context_t` to restore from.
*/
.macro smcc_restore_gp_mode_regs
/* Restore the banked registers including the current SPSR and LR */
add r1, r0, #SMC_CTX_SP_USR
ldm r1!, {r4-r12}
msr sp_usr, r4
msr lr_usr, r5
msr spsr_irq, r6
msr sp_irq, r7
msr lr_irq, r8
msr spsr_fiq, r9
msr sp_fiq, r10
msr lr_fiq, r11
msr spsr_svc, r12
ldm r1!, {r4-r12, lr}
msr sp_svc, r4
msr lr_svc, r5
msr spsr_abt, r6
msr sp_abt, r7
msr lr_abt, r8
msr spsr_und, r9
msr sp_und, r10
msr lr_und, r11
msr spsr, r12
/* Restore the rest of the general purpose registers */
ldm r0, {r0-r12}
.endm
#endif /* __SMCC_MACROS_S__ */