SPMD: save/restore EL2 system registers.

NOTE: Not all EL-2 system registers are saved/restored.
This subset includes registers recognized by ARMv8.0

Change-Id: I9993c7d78d8f5f8e72d1c6c8d6fd871283aa3ce0
Signed-off-by: Jose Marinho <jose.marinho@arm.com>
Signed-off-by: Olivier Deprez <olivier.deprez@arm.com>
Signed-off-by: Artsem Artsemenka <artsem.artsemenka@arm.com>
Signed-off-by: Max Shvetsov <maksims.svecovs@arm.com>
This commit is contained in:
Max Shvetsov 2020-02-25 13:56:19 +00:00
parent 2403813779
commit 28f39f02ad
8 changed files with 608 additions and 36 deletions

View File

@ -412,40 +412,45 @@ INCLUDE_TBBR_MK := 1
################################################################################
ifneq (${SPD},none)
ifeq (${ARCH},aarch32)
ifeq (${ARCH},aarch32)
$(error "Error: SPD is incompatible with AArch32.")
endif
ifdef EL3_PAYLOAD_BASE
endif
ifdef EL3_PAYLOAD_BASE
$(warning "SPD and EL3_PAYLOAD_BASE are incompatible build options.")
$(warning "The SPD and its BL32 companion will be present but ignored.")
endif
ifeq (${SPD},spmd)
# SPMD is located in std_svc directory
SPD_DIR := std_svc
else
# All other SPDs in spd directory
SPD_DIR := spd
endif
endif
# We expect to locate an spd.mk under the specified SPD directory
SPD_MAKE := $(wildcard services/${SPD_DIR}/${SPD}/${SPD}.mk)
ifeq (${SPD},spmd)
# SPMD is located in std_svc directory
SPD_DIR := std_svc
ifeq (${SPD_MAKE},)
$(error Error: No services/${SPD_DIR}/${SPD}/${SPD}.mk located)
ifeq ($(CTX_INCLUDE_EL2_REGS),0)
$(error spmd requires CTX_INCLUDE_EL2_REGS option)
endif
$(info Including ${SPD_MAKE})
include ${SPD_MAKE}
else
# All other SPDs in spd directory
SPD_DIR := spd
endif
# If there's BL32 companion for the chosen SPD, we expect that the SPD's
# Makefile would set NEED_BL32 to "yes". In this case, the build system
# supports two mutually exclusive options:
# * BL32 is built from source: then BL32_SOURCES must contain the list
# of source files to build BL32
# * BL32 is a prebuilt binary: then BL32 must point to the image file
# that will be included in the FIP
# If both BL32_SOURCES and BL32 are defined, the binary takes precedence
# over the sources.
# We expect to locate an spd.mk under the specified SPD directory
SPD_MAKE := $(wildcard services/${SPD_DIR}/${SPD}/${SPD}.mk)
ifeq (${SPD_MAKE},)
$(error Error: No services/${SPD_DIR}/${SPD}/${SPD}.mk located)
endif
$(info Including ${SPD_MAKE})
include ${SPD_MAKE}
# If there's BL32 companion for the chosen SPD, we expect that the SPD's
# Makefile would set NEED_BL32 to "yes". In this case, the build system
# supports two mutually exclusive options:
# * BL32 is built from source: then BL32_SOURCES must contain the list
# of source files to build BL32
# * BL32 is a prebuilt binary: then BL32 must point to the image file
# that will be included in the FIP
# If both BL32_SOURCES and BL32 are defined, the binary takes precedence
# over the sources.
endif
################################################################################
@ -761,6 +766,7 @@ $(eval $(call assert_boolean,CTX_INCLUDE_AARCH32_REGS))
$(eval $(call assert_boolean,CTX_INCLUDE_FPREGS))
$(eval $(call assert_boolean,CTX_INCLUDE_PAUTH_REGS))
$(eval $(call assert_boolean,CTX_INCLUDE_MTE_REGS))
$(eval $(call assert_boolean,CTX_INCLUDE_EL2_REGS))
$(eval $(call assert_boolean,DEBUG))
$(eval $(call assert_boolean,DYN_DISABLE_AUTH))
$(eval $(call assert_boolean,EL3_EXCEPTION_HANDLING))
@ -832,6 +838,7 @@ $(eval $(call add_define,CTX_INCLUDE_FPREGS))
$(eval $(call add_define,CTX_INCLUDE_PAUTH_REGS))
$(eval $(call add_define,EL3_EXCEPTION_HANDLING))
$(eval $(call add_define,CTX_INCLUDE_MTE_REGS))
$(eval $(call add_define,CTX_INCLUDE_EL2_REGS))
$(eval $(call add_define,ENABLE_AMU))
$(eval $(call add_define,ENABLE_ASSERTIONS))
$(eval $(call add_define,ENABLE_BTI))

View File

@ -96,6 +96,33 @@
#define ICC_EOIR1_EL1 S3_0_c12_c12_1
#define ICC_SGI0R_EL1 S3_0_c12_c11_7
/*******************************************************************************
* Definitions for EL2 system registers for save/restore routine
******************************************************************************/
#define CNTPOFF_EL2 S3_4_C14_C0_6
#define HAFGRTR_EL2 S3_4_C3_C1_6
#define HDFGRTR_EL2 S3_4_C3_C1_4
#define HDFGWTR_EL2 S3_4_C3_C1_5
#define HFGITR_EL2 S3_4_C1_C1_6
#define HFGRTR_EL2 S3_4_C1_C1_4
#define HFGWTR_EL2 S3_4_C1_C1_5
#define ICH_EISR_EL2 S3_4_C12_C11_3
#define ICH_ELRSR_EL2 S3_4_C12_C11_5
#define ICH_HCR_EL2 S3_4_C12_C11_0
#define ICH_MISR_EL2 S3_4_C12_C11_2
#define ICH_VMCR_EL2 S3_4_C12_C11_7
#define ICH_VTR_EL2 S3_4_C12_C11_1
#define MPAMVPM0_EL2 S3_4_C10_C5_0
#define MPAMVPM1_EL2 S3_4_C10_C5_1
#define MPAMVPM2_EL2 S3_4_C10_C5_2
#define MPAMVPM3_EL2 S3_4_C10_C5_3
#define MPAMVPM4_EL2 S3_4_C10_C5_4
#define MPAMVPM5_EL2 S3_4_C10_C5_5
#define MPAMVPM6_EL2 S3_4_C10_C5_6
#define MPAMVPM7_EL2 S3_4_C10_C5_7
#define MPAMVPMV_EL2 S3_4_C10_C4_1
/*******************************************************************************
* Generic timer memory mapped registers & offsets
******************************************************************************/

View File

@ -135,10 +135,88 @@
#define CTX_MTE_REGS_END CTX_TIMER_SYSREGS_END
#endif /* CTX_INCLUDE_MTE_REGS */
/*
* S-EL2 register set
*/
#if CTX_INCLUDE_EL2_REGS
/* For later discussion
* ICH_AP0R<n>_EL2
* ICH_AP1R<n>_EL2
* AMEVCNTVOFF0<n>_EL2
* AMEVCNTVOFF1<n>_EL2
* ICH_LR<n>_EL2
*/
#define CTX_ACTLR_EL2 (CTX_MTE_REGS_END + U(0x0))
#define CTX_AFSR0_EL2 (CTX_MTE_REGS_END + U(0x8))
#define CTX_AFSR1_EL2 (CTX_MTE_REGS_END + U(0x10))
#define CTX_AMAIR_EL2 (CTX_MTE_REGS_END + U(0x18))
#define CTX_CNTHCTL_EL2 (CTX_MTE_REGS_END + U(0x20))
#define CTX_CNTHP_CTL_EL2 (CTX_MTE_REGS_END + U(0x28))
#define CTX_CNTHP_CVAL_EL2 (CTX_MTE_REGS_END + U(0x30))
#define CTX_CNTHP_TVAL_EL2 (CTX_MTE_REGS_END + U(0x38))
#define CTX_CNTPOFF_EL2 (CTX_MTE_REGS_END + U(0x40))
#define CTX_CNTVOFF_EL2 (CTX_MTE_REGS_END + U(0x48))
#define CTX_CPTR_EL2 (CTX_MTE_REGS_END + U(0x50))
#define CTX_DBGVCR32_EL2 (CTX_MTE_REGS_END + U(0x58))
#define CTX_ELR_EL2 (CTX_MTE_REGS_END + U(0x60))
#define CTX_ESR_EL2 (CTX_MTE_REGS_END + U(0x68))
#define CTX_FAR_EL2 (CTX_MTE_REGS_END + U(0x70))
#define CTX_FPEXC32_EL2 (CTX_MTE_REGS_END + U(0x78))
#define CTX_HACR_EL2 (CTX_MTE_REGS_END + U(0x80))
#define CTX_HAFGRTR_EL2 (CTX_MTE_REGS_END + U(0x88))
#define CTX_HCR_EL2 (CTX_MTE_REGS_END + U(0x90))
#define CTX_HDFGRTR_EL2 (CTX_MTE_REGS_END + U(0x98))
#define CTX_HDFGWTR_EL2 (CTX_MTE_REGS_END + U(0xA0))
#define CTX_HFGITR_EL2 (CTX_MTE_REGS_END + U(0xA8))
#define CTX_HFGRTR_EL2 (CTX_MTE_REGS_END + U(0xB0))
#define CTX_HFGWTR_EL2 (CTX_MTE_REGS_END + U(0xB8))
#define CTX_HPFAR_EL2 (CTX_MTE_REGS_END + U(0xC0))
#define CTX_HSTR_EL2 (CTX_MTE_REGS_END + U(0xC8))
#define CTX_ICC_SRE_EL2 (CTX_MTE_REGS_END + U(0xD0))
#define CTX_ICH_EISR_EL2 (CTX_MTE_REGS_END + U(0xD8))
#define CTX_ICH_ELRSR_EL2 (CTX_MTE_REGS_END + U(0xE0))
#define CTX_ICH_HCR_EL2 (CTX_MTE_REGS_END + U(0xE8))
#define CTX_ICH_MISR_EL2 (CTX_MTE_REGS_END + U(0xF0))
#define CTX_ICH_VMCR_EL2 (CTX_MTE_REGS_END + U(0xF8))
#define CTX_ICH_VTR_EL2 (CTX_MTE_REGS_END + U(0x100))
#define CTX_MAIR_EL2 (CTX_MTE_REGS_END + U(0x108))
#define CTX_MDCR_EL2 (CTX_MTE_REGS_END + U(0x110))
#define CTX_MPAM2_EL2 (CTX_MTE_REGS_END + U(0x118))
#define CTX_MPAMHCR_EL2 (CTX_MTE_REGS_END + U(0x120))
#define CTX_MPAMVPM0_EL2 (CTX_MTE_REGS_END + U(0x128))
#define CTX_MPAMVPM1_EL2 (CTX_MTE_REGS_END + U(0x130))
#define CTX_MPAMVPM2_EL2 (CTX_MTE_REGS_END + U(0x138))
#define CTX_MPAMVPM3_EL2 (CTX_MTE_REGS_END + U(0x140))
#define CTX_MPAMVPM4_EL2 (CTX_MTE_REGS_END + U(0x148))
#define CTX_MPAMVPM5_EL2 (CTX_MTE_REGS_END + U(0x150))
#define CTX_MPAMVPM6_EL2 (CTX_MTE_REGS_END + U(0x158))
#define CTX_MPAMVPM7_EL2 (CTX_MTE_REGS_END + U(0x160))
#define CTX_MPAMVPMV_EL2 (CTX_MTE_REGS_END + U(0x168))
#define CTX_RMR_EL2 (CTX_MTE_REGS_END + U(0x170))
#define CTX_SCTLR_EL2 (CTX_MTE_REGS_END + U(0x178))
#define CTX_SPSR_EL2 (CTX_MTE_REGS_END + U(0x180))
#define CTX_SP_EL2 (CTX_MTE_REGS_END + U(0x188))
#define CTX_TCR_EL2 (CTX_MTE_REGS_END + U(0x190))
#define CTX_TPIDR_EL2 (CTX_MTE_REGS_END + U(0x198))
#define CTX_TTBR0_EL2 (CTX_MTE_REGS_END + U(0x1A0))
#define CTX_VBAR_EL2 (CTX_MTE_REGS_END + U(0x1A8))
#define CTX_VMPIDR_EL2 (CTX_MTE_REGS_END + U(0x1B0))
#define CTX_VPIDR_EL2 (CTX_MTE_REGS_END + U(0x1B8))
#define CTX_VTCR_EL2 (CTX_MTE_REGS_END + U(0x1C0))
#define CTX_VTTBR_EL2 (CTX_MTE_REGS_END + U(0x1C8))
#define CTX_ZCR_EL2 (CTX_MTE_REGS_END + U(0x1B0))
/* Align to the next 16 byte boundary */
#define CTX_EL2_REGS_END (CTX_MTE_REGS_END + U(0x1C0))
#else
#define CTX_EL2_REGS_END CTX_MTE_REGS_END
#endif /* CTX_INCLUDE_EL2_REGS */
/*
* End of system registers.
*/
#define CTX_SYSREGS_END CTX_MTE_REGS_END
#define CTX_SYSREGS_END CTX_EL2_REGS_END
/*******************************************************************************
* Constants that allow assembler code to access members of and the 'fp_regs'
@ -255,11 +333,10 @@
DEFINE_REG_STRUCT(gp_regs, CTX_GPREG_ALL);
/*
* AArch64 EL1 system register context structure for preserving the
* architectural state during switches from one security state to
* another in EL1.
* AArch64 EL1/EL2 system register context structure for preserving the
* architectural state during world switches.
*/
DEFINE_REG_STRUCT(el1_sys_regs, CTX_SYSREG_ALL);
DEFINE_REG_STRUCT(sys_regs, CTX_SYSREG_ALL);
/*
* AArch64 floating point register context structure for preserving
@ -304,7 +381,7 @@ DEFINE_REG_STRUCT(pauth, CTX_PAUTH_REGS_ALL);
typedef struct cpu_context {
gp_regs_t gpregs_ctx;
el3_state_t el3state_ctx;
el1_sys_regs_t sysregs_ctx;
sys_regs_t sysregs_ctx;
#if CTX_INCLUDE_FPREGS
fp_regs_t fpregs_ctx;
#endif
@ -387,8 +464,14 @@ CASSERT(CTX_PAUTH_REGS_OFFSET == __builtin_offsetof(cpu_context_t, pauth_ctx), \
/*******************************************************************************
* Function prototypes
******************************************************************************/
void el1_sysregs_context_save(el1_sys_regs_t *regs);
void el1_sysregs_context_restore(el1_sys_regs_t *regs);
void el1_sysregs_context_save(sys_regs_t *regs);
void el1_sysregs_context_restore(sys_regs_t *regs);
#if CTX_INCLUDE_EL2_REGS
void el2_sysregs_context_save(sys_regs_t *regs);
void el2_sysregs_context_restore(sys_regs_t *regs);
#endif
#if CTX_INCLUDE_FPREGS
void fpregs_context_save(fp_regs_t *regs);
void fpregs_context_restore(fp_regs_t *regs);

View File

@ -36,6 +36,11 @@ void cm_setup_context(cpu_context_t *ctx, const entry_point_info_t *ep);
void cm_prepare_el3_exit(uint32_t security_state);
#ifdef __aarch64__
#if CTX_INCLUDE_EL2_REGS
void cm_el2_sysregs_context_save(uint32_t security_state);
void cm_el2_sysregs_context_restore(uint32_t security_state);
#endif
void cm_el1_sysregs_context_save(uint32_t security_state);
void cm_el1_sysregs_context_restore(uint32_t security_state);
void cm_set_elr_el3(uint32_t security_state, uintptr_t entrypoint);

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2013-2019, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -9,6 +9,11 @@
#include <assert_macros.S>
#include <context.h>
#if CTX_INCLUDE_EL2_REGS
.global el2_sysregs_context_save
.global el2_sysregs_context_restore
#endif
.global el1_sysregs_context_save
.global el1_sysregs_context_restore
#if CTX_INCLUDE_FPREGS
@ -19,6 +24,390 @@
.global restore_gp_pmcr_pauth_regs
.global el3_exit
#if CTX_INCLUDE_EL2_REGS
/* -----------------------------------------------------
* The following function strictly follows the AArch64
* PCS to use x9-x17 (temporary caller-saved registers)
* to save EL1 system register context. It assumes that
* 'x0' is pointing to a 'el1_sys_regs' structure where
* the register context will be saved.
* -----------------------------------------------------
*/
func el2_sysregs_context_save
mrs x9, actlr_el2
str x9, [x0, #CTX_ACTLR_EL2]
mrs x9, afsr0_el2
str x9, [x0, #CTX_AFSR0_EL2]
mrs x9, afsr1_el2
str x9, [x0, #CTX_AFSR1_EL2]
mrs x9, amair_el2
str x9, [x0, #CTX_AMAIR_EL2]
mrs x9, cnthctl_el2
str x9, [x0, #CTX_CNTHCTL_EL2]
mrs x9, cnthp_ctl_el2
str x9, [x0, #CTX_CNTHP_CTL_EL2]
mrs x9, cnthp_cval_el2
str x9, [x0, #CTX_CNTHP_CVAL_EL2]
mrs x9, cnthp_tval_el2
str x9, [x0, #CTX_CNTHP_TVAL_EL2]
mrs x9, CNTPOFF_EL2
str x9, [x0, #CTX_CNTPOFF_EL2]
mrs x9, cntvoff_el2
str x9, [x0, #CTX_CNTVOFF_EL2]
mrs x9, cptr_el2
str x9, [x0, #CTX_CPTR_EL2]
mrs x9, dbgvcr32_el2
str x9, [x0, #CTX_DBGVCR32_EL2]
mrs x9, elr_el2
str x9, [x0, #CTX_ELR_EL2]
mrs x9, esr_el2
str x9, [x0, #CTX_ESR_EL2]
mrs x9, far_el2
str x9, [x0, #CTX_FAR_EL2]
mrs x9, fpexc32_el2
str x9, [x0, #CTX_FPEXC32_EL2]
mrs x9, hacr_el2
str x9, [x0, #CTX_HACR_EL2]
mrs x9, HAFGRTR_EL2
str x9, [x0, #CTX_HAFGRTR_EL2]
mrs x9, hcr_el2
str x9, [x0, #CTX_HCR_EL2]
mrs x9, HDFGRTR_EL2
str x9, [x0, #CTX_HDFGRTR_EL2]
mrs x9, HDFGWTR_EL2
str x9, [x0, #CTX_HDFGWTR_EL2]
mrs x9, HFGITR_EL2
str x9, [x0, #CTX_HFGITR_EL2]
mrs x9, HFGRTR_EL2
str x9, [x0, #CTX_HFGRTR_EL2]
mrs x9, HFGWTR_EL2
str x9, [x0, #CTX_HFGWTR_EL2]
mrs x9, hpfar_el2
str x9, [x0, #CTX_HPFAR_EL2]
mrs x9, hstr_el2
str x9, [x0, #CTX_HSTR_EL2]
mrs x9, ICC_SRE_EL2
str x9, [x0, #CTX_ICC_SRE_EL2]
mrs x9, ICH_EISR_EL2
str x9, [x0, #CTX_ICH_EISR_EL2]
mrs x9, ICH_ELRSR_EL2
str x9, [x0, #CTX_ICH_ELRSR_EL2]
mrs x9, ICH_HCR_EL2
str x9, [x0, #CTX_ICH_HCR_EL2]
mrs x9, ICH_MISR_EL2
str x9, [x0, #CTX_ICH_MISR_EL2]
mrs x9, ICH_VMCR_EL2
str x9, [x0, #CTX_ICH_VMCR_EL2]
mrs x9, ICH_VTR_EL2
str x9, [x0, #CTX_ICH_VTR_EL2]
mrs x9, mair_el2
str x9, [x0, #CTX_MAIR_EL2]
mrs x9, mdcr_el2
str x9, [x0, #CTX_MDCR_EL2]
mrs x9, MPAM2_EL2
str x9, [x0, #CTX_MPAM2_EL2]
mrs x9, MPAMHCR_EL2
str x9, [x0, #CTX_MPAMHCR_EL2]
mrs x9, MPAMVPM0_EL2
str x9, [x0, #CTX_MPAMVPM0_EL2]
mrs x9, MPAMVPM1_EL2
str x9, [x0, #CTX_MPAMVPM1_EL2]
mrs x9, MPAMVPM2_EL2
str x9, [x0, #CTX_MPAMVPM2_EL2]
mrs x9, MPAMVPM3_EL2
str x9, [x0, #CTX_MPAMVPM3_EL2]
mrs x9, MPAMVPM4_EL2
str x9, [x0, #CTX_MPAMVPM4_EL2]
mrs x9, MPAMVPM5_EL2
str x9, [x0, #CTX_MPAMVPM5_EL2]
mrs x9, MPAMVPM6_EL2
str x9, [x0, #CTX_MPAMVPM6_EL2]
mrs x9, MPAMVPM7_EL2
str x9, [x0, #CTX_MPAMVPM7_EL2]
mrs x9, MPAMVPMV_EL2
str x9, [x0, #CTX_MPAMVPMV_EL2]
mrs x9, rmr_el2
str x9, [x0, #CTX_RMR_EL2]
mrs x9, sctlr_el2
str x9, [x0, #CTX_SCTLR_EL2]
mrs x9, spsr_el2
str x9, [x0, #CTX_SPSR_EL2]
mrs x9, sp_el2
str x9, [x0, #CTX_SP_EL2]
mrs x9, tcr_el2
str x9, [x0, #CTX_TCR_EL2]
mrs x9, tpidr_el2
str x9, [x0, #CTX_TPIDR_EL2]
mrs x9, ttbr0_el2
str x9, [x0, #CTX_TTBR0_EL2]
mrs x9, vbar_el2
str x9, [x0, #CTX_VBAR_EL2]
mrs x9, vmpidr_el2
str x9, [x0, #CTX_VMPIDR_EL2]
mrs x9, vpidr_el2
str x9, [x0, #CTX_VPIDR_EL2]
mrs x9, vtcr_el2
str x9, [x0, #CTX_VTCR_EL2]
mrs x9, vttbr_el2
str x9, [x0, #CTX_VTTBR_EL2]
mrs x9, ZCR_EL2
str x9, [x0, #CTX_ZCR_EL2]
ret
endfunc el2_sysregs_context_save
/* -----------------------------------------------------
* The following function strictly follows the AArch64
* PCS to use x9-x17 (temporary caller-saved registers)
* to restore EL1 system register context. It assumes
* that 'x0' is pointing to a 'el1_sys_regs' structure
* from where the register context will be restored
* -----------------------------------------------------
*/
func el2_sysregs_context_restore
ldr x9, [x0, #CTX_ACTLR_EL2]
msr actlr_el2, x9
ldr x9, [x0, #CTX_AFSR0_EL2]
msr afsr0_el2, x9
ldr x9, [x0, #CTX_AFSR1_EL2]
msr afsr1_el2, x9
ldr x9, [x0, #CTX_AMAIR_EL2]
msr amair_el2, x9
ldr x9, [x0, #CTX_CNTHCTL_EL2]
msr cnthctl_el2, x9
ldr x9, [x0, #CTX_CNTHP_CTL_EL2]
msr cnthp_ctl_el2, x9
ldr x9, [x0, #CTX_CNTHP_CVAL_EL2]
msr cnthp_cval_el2, x9
ldr x9, [x0, #CTX_CNTHP_TVAL_EL2]
msr cnthp_tval_el2, x9
ldr x9, [x0, #CTX_CNTPOFF_EL2]
msr CNTPOFF_EL2, x9
ldr x9, [x0, #CTX_CNTVOFF_EL2]
msr cntvoff_el2, x9
ldr x9, [x0, #CTX_CPTR_EL2]
msr cptr_el2, x9
ldr x9, [x0, #CTX_DBGVCR32_EL2]
msr dbgvcr32_el2, x9
ldr x9, [x0, #CTX_ELR_EL2]
msr elr_el2, x9
ldr x9, [x0, #CTX_ESR_EL2]
msr esr_el2, x9
ldr x9, [x0, #CTX_FAR_EL2]
msr far_el2, x9
ldr x9, [x0, #CTX_FPEXC32_EL2]
msr fpexc32_el2, x9
ldr x9, [x0, #CTX_HACR_EL2]
msr hacr_el2, x9
ldr x9, [x0, #CTX_HAFGRTR_EL2]
msr HAFGRTR_EL2, x9
ldr x9, [x0, #CTX_HCR_EL2]
msr hcr_el2, x9
ldr x9, [x0, #CTX_HDFGRTR_EL2]
msr HDFGRTR_EL2, x9
ldr x9, [x0, #CTX_HDFGWTR_EL2]
msr HDFGWTR_EL2, x9
ldr x9, [x0, #CTX_HFGITR_EL2]
msr HFGITR_EL2, x9
ldr x9, [x0, #CTX_HFGRTR_EL2]
msr HFGRTR_EL2, x9
ldr x9, [x0, #CTX_HFGWTR_EL2]
msr HFGWTR_EL2, x9
ldr x9, [x0, #CTX_HPFAR_EL2]
msr hpfar_el2, x9
ldr x9, [x0, #CTX_HSTR_EL2]
msr hstr_el2, x9
ldr x9, [x0, #CTX_ICC_SRE_EL2]
msr ICC_SRE_EL2, x9
ldr x9, [x0, #CTX_ICH_EISR_EL2]
msr ICH_EISR_EL2, x9
ldr x9, [x0, #CTX_ICH_ELRSR_EL2]
msr ICH_ELRSR_EL2, x9
ldr x9, [x0, #CTX_ICH_HCR_EL2]
msr ICH_HCR_EL2, x9
ldr x9, [x0, #CTX_ICH_MISR_EL2]
msr ICH_MISR_EL2, x9
ldr x9, [x0, #CTX_ICH_VMCR_EL2]
msr ICH_VMCR_EL2, x9
ldr x9, [x0, #CTX_ICH_VTR_EL2]
msr ICH_VTR_EL2, x9
ldr x9, [x0, #CTX_MAIR_EL2]
msr mair_el2, x9
ldr x9, [x0, #CTX_MDCR_EL2]
msr mdcr_el2, x9
ldr x9, [x0, #CTX_MPAM2_EL2]
msr MPAM2_EL2, x9
ldr x9, [x0, #CTX_MPAMHCR_EL2]
msr MPAMHCR_EL2, x9
ldr x9, [x0, #CTX_MPAMVPM0_EL2]
msr MPAMVPM0_EL2, x9
ldr x9, [x0, #CTX_MPAMVPM1_EL2]
msr MPAMVPM1_EL2, x9
ldr x9, [x0, #CTX_MPAMVPM2_EL2]
msr MPAMVPM2_EL2, x9
ldr x9, [x0, #CTX_MPAMVPM3_EL2]
msr MPAMVPM3_EL2, x9
ldr x9, [x0, #CTX_MPAMVPM4_EL2]
msr MPAMVPM4_EL2, x9
ldr x9, [x0, #CTX_MPAMVPM5_EL2]
msr MPAMVPM5_EL2, x9
ldr x9, [x0, #CTX_MPAMVPM6_EL2]
msr MPAMVPM6_EL2, x9
ldr x9, [x0, #CTX_MPAMVPM7_EL2]
msr MPAMVPM7_EL2, x9
ldr x9, [x0, #CTX_MPAMVPMV_EL2]
msr MPAMVPMV_EL2, x9
ldr x9, [x0, #CTX_RMR_EL2]
msr rmr_el2, x9
ldr x9, [x0, #CTX_SCTLR_EL2]
msr sctlr_el2, x9
ldr x9, [x0, #CTX_SPSR_EL2]
msr spsr_el2, x9
ldr x9, [x0, #CTX_SP_EL2]
msr sp_el2, x9
ldr x9, [x0, #CTX_TCR_EL2]
msr tcr_el2, x9
ldr x9, [x0, #CTX_TPIDR_EL2]
msr tpidr_el2, x9
ldr x9, [x0, #CTX_TTBR0_EL2]
msr ttbr0_el2, x9
ldr x9, [x0, #CTX_VBAR_EL2]
msr vbar_el2, x9
ldr x9, [x0, #CTX_VMPIDR_EL2]
msr vmpidr_el2, x9
ldr x9, [x0, #CTX_VPIDR_EL2]
msr vpidr_el2, x9
ldr x9, [x0, #CTX_VTCR_EL2]
msr vtcr_el2, x9
ldr x9, [x0, #CTX_VTTBR_EL2]
msr vttbr_el2, x9
ldr x9, [x0, #CTX_ZCR_EL2]
msr ZCR_EL2, x9
ret
endfunc el2_sysregs_context_restore
#endif /* CTX_INCLUDE_EL2_REGS */
/* ------------------------------------------------------------------
* The following function strictly follows the AArch64 PCS to use
* x9-x17 (temporary caller-saved registers) to save EL1 system

View File

@ -530,6 +530,52 @@ void cm_prepare_el3_exit(uint32_t security_state)
cm_set_next_eret_context(security_state);
}
#if CTX_INCLUDE_EL2_REGS
/*******************************************************************************
* Save EL2 sysreg context
******************************************************************************/
void cm_el2_sysregs_context_save(uint32_t security_state)
{
u_register_t scr_el3 = read_scr();
/*
* Always save the non-secure EL2 context, only save the
* S-EL2 context if S-EL2 is enabled.
*/
if ((security_state == NON_SECURE) ||
((scr_el3 & SCR_EEL2_BIT) != 0U)) {
cpu_context_t *ctx;
ctx = cm_get_context(security_state);
assert(ctx != NULL);
el2_sysregs_context_save(get_sysregs_ctx(ctx));
}
}
/*******************************************************************************
* Restore EL2 sysreg context
******************************************************************************/
void cm_el2_sysregs_context_restore(uint32_t security_state)
{
u_register_t scr_el3 = read_scr();
/*
* Always restore the non-secure EL2 context, only restore the
* S-EL2 context if S-EL2 is enabled.
*/
if ((security_state == NON_SECURE) ||
((scr_el3 & SCR_EEL2_BIT) != 0U)) {
cpu_context_t *ctx;
ctx = cm_get_context(security_state);
assert(ctx != NULL);
el2_sysregs_context_restore(get_sysregs_ctx(ctx));
}
}
#endif /* CTX_INCLUDE_EL2_REGS */
/*******************************************************************************
* The next four functions are used by runtime services to save and restore
* EL1 context on the 'cpu_context' structure for the specified security

View File

@ -262,3 +262,8 @@ USE_SPINLOCK_CAS := 0
# Enable Link Time Optimization
ENABLE_LTO := 0
# Build flag to include EL2 registers in cpu context save and restore during
# S-EL2 firmware entry/exit. This flag is to be used with SPD=spmd option.
# Default is 0.
CTX_INCLUDE_EL2_REGS := 0

View File

@ -49,6 +49,7 @@ uint64_t spmd_spm_core_sync_entry(spmd_spm_core_context_t *spmc_ctx)
/* Restore the context assigned above */
cm_el1_sysregs_context_restore(SECURE);
cm_el2_sysregs_context_restore(SECURE);
cm_set_next_eret_context(SECURE);
/* Invalidate TLBs at EL1. */
@ -60,6 +61,7 @@ uint64_t spmd_spm_core_sync_entry(spmd_spm_core_context_t *spmc_ctx)
/* Save secure state */
cm_el1_sysregs_context_save(SECURE);
cm_el2_sysregs_context_save(SECURE);
return rc;
}
@ -321,9 +323,11 @@ uint64_t spmd_smc_handler(uint32_t smc_fid, uint64_t x1, uint64_t x2,
/* Save incoming security state */
cm_el1_sysregs_context_save(in_sstate);
cm_el2_sysregs_context_save(in_sstate);
/* Restore outgoing security state */
cm_el1_sysregs_context_restore(out_sstate);
cm_el2_sysregs_context_restore(out_sstate);
cm_set_next_eret_context(out_sstate);
SMC_RET8(cm_get_context(out_sstate), smc_fid, x1, x2, x3, x4,
@ -366,9 +370,11 @@ uint64_t spmd_smc_handler(uint32_t smc_fid, uint64_t x1, uint64_t x2,
if (in_sstate == NON_SECURE) {
/* Save incoming security state */
cm_el1_sysregs_context_save(in_sstate);
cm_el2_sysregs_context_save(in_sstate);
/* Restore outgoing security state */
cm_el1_sysregs_context_restore(out_sstate);
cm_el2_sysregs_context_restore(out_sstate);
cm_set_next_eret_context(out_sstate);
SMC_RET8(cm_get_context(out_sstate), smc_fid,
@ -432,9 +438,11 @@ uint64_t spmd_smc_handler(uint32_t smc_fid, uint64_t x1, uint64_t x2,
/* Save incoming security state */
cm_el1_sysregs_context_save(in_sstate);
cm_el2_sysregs_context_save(in_sstate);
/* Restore outgoing security state */
cm_el1_sysregs_context_restore(out_sstate);
cm_el2_sysregs_context_restore(out_sstate);
cm_set_next_eret_context(out_sstate);
SMC_RET8(cm_get_context(out_sstate), smc_fid, x1, x2, x3, x4,
@ -466,9 +474,11 @@ uint64_t spmd_smc_handler(uint32_t smc_fid, uint64_t x1, uint64_t x2,
/* Save incoming security state */
cm_el1_sysregs_context_save(in_sstate);
cm_el2_sysregs_context_save(in_sstate);
/* Restore outgoing security state */
cm_el1_sysregs_context_restore(out_sstate);
cm_el2_sysregs_context_restore(out_sstate);
cm_set_next_eret_context(out_sstate);
SMC_RET8(cm_get_context(out_sstate), smc_fid, x1, x2, x3, x4,