SPMD: Adds partially supported EL2 registers.

This patch adds EL2 registers that are supported up to ARMv8.6.
ARM_ARCH_MINOR has to specified to enable save/restore routine.

Note: Following registers are still not covered in save/restore.
 * AMEVCNTVOFF0<n>_EL2
 * AMEVCNTVOFF1<n>_EL2
 * ICH_AP0R<n>_EL2
 * ICH_AP1R<n>_EL2
 * ICH_LR<n>_EL2

Change-Id: I4813f3243e56e21cb297b31ef549a4b38d4876e1
Signed-off-by: Max Shvetsov <maksims.svecovs@arm.com>
This commit is contained in:
Max Shvetsov 2020-02-17 16:15:47 +00:00
parent 28f39f02ad
commit 2825946e92
6 changed files with 455 additions and 412 deletions

View File

@ -107,12 +107,8 @@
#define HFGITR_EL2 S3_4_C1_C1_6
#define HFGRTR_EL2 S3_4_C1_C1_4
#define HFGWTR_EL2 S3_4_C1_C1_5
#define ICH_EISR_EL2 S3_4_C12_C11_3
#define ICH_ELRSR_EL2 S3_4_C12_C11_5
#define ICH_HCR_EL2 S3_4_C12_C11_0
#define ICH_MISR_EL2 S3_4_C12_C11_2
#define ICH_VMCR_EL2 S3_4_C12_C11_7
#define ICH_VTR_EL2 S3_4_C12_C11_1
#define MPAMVPM0_EL2 S3_4_C10_C5_0
#define MPAMVPM1_EL2 S3_4_C10_C5_1
#define MPAMVPM2_EL2 S3_4_C10_C5_2
@ -122,6 +118,9 @@
#define MPAMVPM6_EL2 S3_4_C10_C5_6
#define MPAMVPM7_EL2 S3_4_C10_C5_7
#define MPAMVPMV_EL2 S3_4_C10_C4_1
#define TRFCR_EL2 S3_4_C1_C2_1
#define PMSCR_EL2 S3_4_C9_C9_0
#define TFSR_EL2 S3_4_C5_C6_0
/*******************************************************************************
* Generic timer memory mapped registers & offsets

View File

@ -68,7 +68,7 @@
* registers are only 32-bits wide but are stored as 64-bit values for
* convenience
******************************************************************************/
#define CTX_SYSREGS_OFFSET (CTX_EL3STATE_OFFSET + CTX_EL3STATE_END)
#define CTX_EL1_SYSREGS_OFFSET (CTX_EL3STATE_OFFSET + CTX_EL3STATE_END)
#define CTX_SPSR_EL1 U(0x0)
#define CTX_ELR_EL1 U(0x8)
#define CTX_SCTLR_EL1 U(0x10)
@ -136,7 +136,12 @@
#endif /* CTX_INCLUDE_MTE_REGS */
/*
* S-EL2 register set
* End of system registers.
*/
#define CTX_EL1_SYSREGS_END CTX_MTE_REGS_END
/*
* EL2 register set
*/
#if CTX_INCLUDE_EL2_REGS
@ -147,82 +152,104 @@
* AMEVCNTVOFF1<n>_EL2
* ICH_LR<n>_EL2
*/
#define CTX_ACTLR_EL2 (CTX_MTE_REGS_END + U(0x0))
#define CTX_AFSR0_EL2 (CTX_MTE_REGS_END + U(0x8))
#define CTX_AFSR1_EL2 (CTX_MTE_REGS_END + U(0x10))
#define CTX_AMAIR_EL2 (CTX_MTE_REGS_END + U(0x18))
#define CTX_CNTHCTL_EL2 (CTX_MTE_REGS_END + U(0x20))
#define CTX_CNTHP_CTL_EL2 (CTX_MTE_REGS_END + U(0x28))
#define CTX_CNTHP_CVAL_EL2 (CTX_MTE_REGS_END + U(0x30))
#define CTX_CNTHP_TVAL_EL2 (CTX_MTE_REGS_END + U(0x38))
#define CTX_CNTPOFF_EL2 (CTX_MTE_REGS_END + U(0x40))
#define CTX_CNTVOFF_EL2 (CTX_MTE_REGS_END + U(0x48))
#define CTX_CPTR_EL2 (CTX_MTE_REGS_END + U(0x50))
#define CTX_DBGVCR32_EL2 (CTX_MTE_REGS_END + U(0x58))
#define CTX_ELR_EL2 (CTX_MTE_REGS_END + U(0x60))
#define CTX_ESR_EL2 (CTX_MTE_REGS_END + U(0x68))
#define CTX_FAR_EL2 (CTX_MTE_REGS_END + U(0x70))
#define CTX_FPEXC32_EL2 (CTX_MTE_REGS_END + U(0x78))
#define CTX_HACR_EL2 (CTX_MTE_REGS_END + U(0x80))
#define CTX_HAFGRTR_EL2 (CTX_MTE_REGS_END + U(0x88))
#define CTX_HCR_EL2 (CTX_MTE_REGS_END + U(0x90))
#define CTX_HDFGRTR_EL2 (CTX_MTE_REGS_END + U(0x98))
#define CTX_HDFGWTR_EL2 (CTX_MTE_REGS_END + U(0xA0))
#define CTX_HFGITR_EL2 (CTX_MTE_REGS_END + U(0xA8))
#define CTX_HFGRTR_EL2 (CTX_MTE_REGS_END + U(0xB0))
#define CTX_HFGWTR_EL2 (CTX_MTE_REGS_END + U(0xB8))
#define CTX_HPFAR_EL2 (CTX_MTE_REGS_END + U(0xC0))
#define CTX_HSTR_EL2 (CTX_MTE_REGS_END + U(0xC8))
#define CTX_ICC_SRE_EL2 (CTX_MTE_REGS_END + U(0xD0))
#define CTX_ICH_EISR_EL2 (CTX_MTE_REGS_END + U(0xD8))
#define CTX_ICH_ELRSR_EL2 (CTX_MTE_REGS_END + U(0xE0))
#define CTX_ICH_HCR_EL2 (CTX_MTE_REGS_END + U(0xE8))
#define CTX_ICH_MISR_EL2 (CTX_MTE_REGS_END + U(0xF0))
#define CTX_ICH_VMCR_EL2 (CTX_MTE_REGS_END + U(0xF8))
#define CTX_ICH_VTR_EL2 (CTX_MTE_REGS_END + U(0x100))
#define CTX_MAIR_EL2 (CTX_MTE_REGS_END + U(0x108))
#define CTX_MDCR_EL2 (CTX_MTE_REGS_END + U(0x110))
#define CTX_MPAM2_EL2 (CTX_MTE_REGS_END + U(0x118))
#define CTX_MPAMHCR_EL2 (CTX_MTE_REGS_END + U(0x120))
#define CTX_MPAMVPM0_EL2 (CTX_MTE_REGS_END + U(0x128))
#define CTX_MPAMVPM1_EL2 (CTX_MTE_REGS_END + U(0x130))
#define CTX_MPAMVPM2_EL2 (CTX_MTE_REGS_END + U(0x138))
#define CTX_MPAMVPM3_EL2 (CTX_MTE_REGS_END + U(0x140))
#define CTX_MPAMVPM4_EL2 (CTX_MTE_REGS_END + U(0x148))
#define CTX_MPAMVPM5_EL2 (CTX_MTE_REGS_END + U(0x150))
#define CTX_MPAMVPM6_EL2 (CTX_MTE_REGS_END + U(0x158))
#define CTX_MPAMVPM7_EL2 (CTX_MTE_REGS_END + U(0x160))
#define CTX_MPAMVPMV_EL2 (CTX_MTE_REGS_END + U(0x168))
#define CTX_RMR_EL2 (CTX_MTE_REGS_END + U(0x170))
#define CTX_SCTLR_EL2 (CTX_MTE_REGS_END + U(0x178))
#define CTX_SPSR_EL2 (CTX_MTE_REGS_END + U(0x180))
#define CTX_SP_EL2 (CTX_MTE_REGS_END + U(0x188))
#define CTX_TCR_EL2 (CTX_MTE_REGS_END + U(0x190))
#define CTX_TPIDR_EL2 (CTX_MTE_REGS_END + U(0x198))
#define CTX_TTBR0_EL2 (CTX_MTE_REGS_END + U(0x1A0))
#define CTX_VBAR_EL2 (CTX_MTE_REGS_END + U(0x1A8))
#define CTX_VMPIDR_EL2 (CTX_MTE_REGS_END + U(0x1B0))
#define CTX_VPIDR_EL2 (CTX_MTE_REGS_END + U(0x1B8))
#define CTX_VTCR_EL2 (CTX_MTE_REGS_END + U(0x1C0))
#define CTX_VTTBR_EL2 (CTX_MTE_REGS_END + U(0x1C8))
#define CTX_ZCR_EL2 (CTX_MTE_REGS_END + U(0x1B0))
#define CTX_EL2_SYSREGS_OFFSET (CTX_EL1_SYSREGS_OFFSET + CTX_EL1_SYSREGS_END)
#define CTX_ACTLR_EL2 U(0x0)
#define CTX_AFSR0_EL2 U(0x8)
#define CTX_AFSR1_EL2 U(0x10)
#define CTX_AMAIR_EL2 U(0x18)
#define CTX_CNTHCTL_EL2 U(0x20)
#define CTX_CNTHP_CTL_EL2 U(0x28)
#define CTX_CNTHP_CVAL_EL2 U(0x30)
#define CTX_CNTHP_TVAL_EL2 U(0x38)
#define CTX_CNTVOFF_EL2 U(0x40)
#define CTX_CPTR_EL2 U(0x48)
#define CTX_DBGVCR32_EL2 U(0x50)
#define CTX_ELR_EL2 U(0x58)
#define CTX_ESR_EL2 U(0x60)
#define CTX_FAR_EL2 U(0x68)
#define CTX_FPEXC32_EL2 U(0x70)
#define CTX_HACR_EL2 U(0x78)
#define CTX_HCR_EL2 U(0x80)
#define CTX_HPFAR_EL2 U(0x88)
#define CTX_HSTR_EL2 U(0x90)
#define CTX_ICC_SRE_EL2 U(0x98)
#define CTX_ICH_HCR_EL2 U(0xa0)
#define CTX_ICH_VMCR_EL2 U(0xa8)
#define CTX_MAIR_EL2 U(0xb0)
#define CTX_MDCR_EL2 U(0xb8)
#define CTX_PMSCR_EL2 U(0xc0)
#define CTX_SCTLR_EL2 U(0xc8)
#define CTX_SPSR_EL2 U(0xd0)
#define CTX_SP_EL2 U(0xd8)
#define CTX_TCR_EL2 U(0xe0)
#define CTX_TRFCR_EL2 U(0xe8)
#define CTX_TTBR0_EL2 U(0xf0)
#define CTX_VBAR_EL2 U(0xf8)
#define CTX_VMPIDR_EL2 U(0x100)
#define CTX_VPIDR_EL2 U(0x108)
#define CTX_VTCR_EL2 U(0x110)
#define CTX_VTTBR_EL2 U(0x118)
// Only if MTE registers in use
#define CTX_TFSR_EL2 U(0x120)
// Only if ENABLE_MPAM_FOR_LOWER_ELS==1
#define CTX_MPAM2_EL2 U(0x128)
#define CTX_MPAMHCR_EL2 U(0x130)
#define CTX_MPAMVPM0_EL2 U(0x138)
#define CTX_MPAMVPM1_EL2 U(0x140)
#define CTX_MPAMVPM2_EL2 U(0x148)
#define CTX_MPAMVPM3_EL2 U(0x150)
#define CTX_MPAMVPM4_EL2 U(0x158)
#define CTX_MPAMVPM5_EL2 U(0x160)
#define CTX_MPAMVPM6_EL2 U(0x168)
#define CTX_MPAMVPM7_EL2 U(0x170)
#define CTX_MPAMVPMV_EL2 U(0x178)
// Starting with Armv8.6
#define CTX_HAFGRTR_EL2 U(0x180)
#define CTX_HDFGRTR_EL2 U(0x188)
#define CTX_HDFGWTR_EL2 U(0x190)
#define CTX_HFGITR_EL2 U(0x198)
#define CTX_HFGRTR_EL2 U(0x1a0)
#define CTX_HFGWTR_EL2 U(0x1a8)
#define CTX_CNTPOFF_EL2 U(0x1b0)
// Starting with Armv8.4
#define CTX_CNTHPS_CTL_EL2 U(0x1b8)
#define CTX_CNTHPS_CVAL_EL2 U(0x1c0)
#define CTX_CNTHPS_TVAL_EL2 U(0x1c8)
#define CTX_CNTHVS_CTL_EL2 U(0x1d0)
#define CTX_CNTHVS_CVAL_EL2 U(0x1d8)
#define CTX_CNTHVS_TVAL_EL2 U(0x1e0)
#define CTX_CNTHV_CTL_EL2 U(0x1e8)
#define CTX_CNTHV_CVAL_EL2 U(0x1f0)
#define CTX_CNTHV_TVAL_EL2 U(0x1f8)
#define CTX_CONTEXTIDR_EL2 U(0x200)
#define CTX_SDER32_EL2 U(0x208)
#define CTX_TTBR1_EL2 U(0x210)
#define CTX_VDISR_EL2 U(0x218)
#define CTX_VNCR_EL2 U(0x220)
#define CTX_VSESR_EL2 U(0x228)
#define CTX_VSTCR_EL2 U(0x230)
#define CTX_VSTTBR_EL2 U(0x238)
// Starting with Armv8.5
#define CTX_SCXTNUM_EL2 U(0x240)
/* Align to the next 16 byte boundary */
#define CTX_EL2_REGS_END (CTX_MTE_REGS_END + U(0x1C0))
#else
#define CTX_EL2_REGS_END CTX_MTE_REGS_END
#define CTX_EL2_SYSREGS_END U(0x250)
#endif /* CTX_INCLUDE_EL2_REGS */
/*
* End of system registers.
*/
#define CTX_SYSREGS_END CTX_EL2_REGS_END
/*******************************************************************************
* Constants that allow assembler code to access members of and the 'fp_regs'
* structure at their correct offsets.
******************************************************************************/
#define CTX_FPREGS_OFFSET (CTX_SYSREGS_OFFSET + CTX_SYSREGS_END)
#if CTX_INCLUDE_EL2_REGS
# define CTX_FPREGS_OFFSET (CTX_EL2_SYSREGS_OFFSET + CTX_EL2_SYSREGS_END)
#else
# define CTX_FPREGS_OFFSET (CTX_EL1_SYSREGS_OFFSET + CTX_EL1_SYSREGS_END)
#endif
#if CTX_INCLUDE_FPREGS
#define CTX_FP_Q0 U(0x0)
#define CTX_FP_Q1 U(0x10)
@ -313,7 +340,10 @@
/* Constants to determine the size of individual context structures */
#define CTX_GPREG_ALL (CTX_GPREGS_END >> DWORD_SHIFT)
#define CTX_SYSREG_ALL (CTX_SYSREGS_END >> DWORD_SHIFT)
#define CTX_EL1_SYSREGS_ALL (CTX_EL1_SYSREGS_END >> DWORD_SHIFT)
#if CTX_INCLUDE_EL2_REGS
# define CTX_EL2_SYSREGS_ALL (CTX_EL2_SYSREGS_END >> DWORD_SHIFT)
#endif
#if CTX_INCLUDE_FPREGS
# define CTX_FPREG_ALL (CTX_FPREGS_END >> DWORD_SHIFT)
#endif
@ -333,10 +363,19 @@
DEFINE_REG_STRUCT(gp_regs, CTX_GPREG_ALL);
/*
* AArch64 EL1/EL2 system register context structure for preserving the
* AArch64 EL1 system register context structure for preserving the
* architectural state during world switches.
*/
DEFINE_REG_STRUCT(sys_regs, CTX_SYSREG_ALL);
DEFINE_REG_STRUCT(el1_sysregs, CTX_EL1_SYSREGS_ALL);
/*
* AArch64 EL2 system register context structure for preserving the
* architectural state during world switches.
*/
#if CTX_INCLUDE_EL2_REGS
DEFINE_REG_STRUCT(el2_sysregs, CTX_EL2_SYSREGS_ALL);
#endif
/*
* AArch64 floating point register context structure for preserving
@ -381,7 +420,10 @@ DEFINE_REG_STRUCT(pauth, CTX_PAUTH_REGS_ALL);
typedef struct cpu_context {
gp_regs_t gpregs_ctx;
el3_state_t el3state_ctx;
sys_regs_t sysregs_ctx;
el1_sysregs_t el1_sysregs_ctx;
#if CTX_INCLUDE_EL2_REGS
el2_sysregs_t el2_sysregs_ctx;
#endif
#if CTX_INCLUDE_FPREGS
fp_regs_t fpregs_ctx;
#endif
@ -396,7 +438,10 @@ typedef struct cpu_context {
#if CTX_INCLUDE_FPREGS
# define get_fpregs_ctx(h) (&((cpu_context_t *) h)->fpregs_ctx)
#endif
#define get_sysregs_ctx(h) (&((cpu_context_t *) h)->sysregs_ctx)
#define get_el1_sysregs_ctx(h) (&((cpu_context_t *) h)->el1_sysregs_ctx)
#if CTX_INCLUDE_EL2_REGS
# define get_el2_sysregs_ctx(h) (&((cpu_context_t *) h)->el2_sysregs_ctx)
#endif
#define get_gpregs_ctx(h) (&((cpu_context_t *) h)->gpregs_ctx)
#define get_cve_2018_3639_ctx(h) (&((cpu_context_t *) h)->cve_2018_3639_ctx)
#if CTX_INCLUDE_PAUTH_REGS
@ -410,8 +455,12 @@ typedef struct cpu_context {
*/
CASSERT(CTX_GPREGS_OFFSET == __builtin_offsetof(cpu_context_t, gpregs_ctx), \
assert_core_context_gp_offset_mismatch);
CASSERT(CTX_SYSREGS_OFFSET == __builtin_offsetof(cpu_context_t, sysregs_ctx), \
assert_core_context_sys_offset_mismatch);
CASSERT(CTX_EL1_SYSREGS_OFFSET == __builtin_offsetof(cpu_context_t, el1_sysregs_ctx), \
assert_core_context_el1_sys_offset_mismatch);
#if CTX_INCLUDE_EL2_REGS
CASSERT(CTX_EL2_SYSREGS_OFFSET == __builtin_offsetof(cpu_context_t, el2_sysregs_ctx), \
assert_core_context_el2_sys_offset_mismatch);
#endif
#if CTX_INCLUDE_FPREGS
CASSERT(CTX_FPREGS_OFFSET == __builtin_offsetof(cpu_context_t, fpregs_ctx), \
assert_core_context_fp_offset_mismatch);
@ -464,12 +513,12 @@ CASSERT(CTX_PAUTH_REGS_OFFSET == __builtin_offsetof(cpu_context_t, pauth_ctx), \
/*******************************************************************************
* Function prototypes
******************************************************************************/
void el1_sysregs_context_save(sys_regs_t *regs);
void el1_sysregs_context_restore(sys_regs_t *regs);
void el1_sysregs_context_save(el1_sysregs_t *regs);
void el1_sysregs_context_restore(el1_sysregs_t *regs);
#if CTX_INCLUDE_EL2_REGS
void el2_sysregs_context_save(sys_regs_t *regs);
void el2_sysregs_context_restore(sys_regs_t *regs);
void el2_sysregs_context_save(el2_sysregs_t *regs);
void el2_sysregs_context_restore(el2_sysregs_t *regs);
#endif
#if CTX_INCLUDE_FPREGS

View File

@ -29,189 +29,187 @@
/* -----------------------------------------------------
* The following function strictly follows the AArch64
* PCS to use x9-x17 (temporary caller-saved registers)
* to save EL1 system register context. It assumes that
* 'x0' is pointing to a 'el1_sys_regs' structure where
* to save EL2 system register context. It assumes that
* 'x0' is pointing to a 'el2_sys_regs' structure where
* the register context will be saved.
*
* The following registers are not added.
* AMEVCNTVOFF0<n>_EL2
* AMEVCNTVOFF1<n>_EL2
* ICH_AP0R<n>_EL2
* ICH_AP1R<n>_EL2
* ICH_LR<n>_EL2
* -----------------------------------------------------
*/
func el2_sysregs_context_save
mrs x9, actlr_el2
str x9, [x0, #CTX_ACTLR_EL2]
mrs x10, afsr0_el2
stp x9, x10, [x0, #CTX_ACTLR_EL2]
mrs x9, afsr0_el2
str x9, [x0, #CTX_AFSR0_EL2]
mrs x11, afsr1_el2
mrs x12, amair_el2
stp x11, x12, [x0, #CTX_AFSR1_EL2]
mrs x9, afsr1_el2
str x9, [x0, #CTX_AFSR1_EL2]
mrs x13, cnthctl_el2
mrs x14, cnthp_ctl_el2
stp x13, x14, [x0, #CTX_CNTHCTL_EL2]
mrs x9, amair_el2
str x9, [x0, #CTX_AMAIR_EL2]
mrs x9, cnthctl_el2
str x9, [x0, #CTX_CNTHCTL_EL2]
mrs x9, cnthp_ctl_el2
str x9, [x0, #CTX_CNTHP_CTL_EL2]
mrs x9, cnthp_cval_el2
str x9, [x0, #CTX_CNTHP_CVAL_EL2]
mrs x9, cnthp_tval_el2
str x9, [x0, #CTX_CNTHP_TVAL_EL2]
mrs x9, CNTPOFF_EL2
str x9, [x0, #CTX_CNTPOFF_EL2]
mrs x9, cntvoff_el2
str x9, [x0, #CTX_CNTVOFF_EL2]
mrs x15, cnthp_cval_el2
mrs x16, cnthp_tval_el2
stp x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
mrs x17, cntvoff_el2
mrs x9, cptr_el2
str x9, [x0, #CTX_CPTR_EL2]
stp x17, x9, [x0, #CTX_CNTVOFF_EL2]
mrs x9, dbgvcr32_el2
str x9, [x0, #CTX_DBGVCR32_EL2]
mrs x10, dbgvcr32_el2
mrs x11, elr_el2
stp x10, x11, [x0, #CTX_DBGVCR32_EL2]
mrs x9, elr_el2
str x9, [x0, #CTX_ELR_EL2]
mrs x14, esr_el2
mrs x15, far_el2
stp x14, x15, [x0, #CTX_ESR_EL2]
mrs x9, esr_el2
str x9, [x0, #CTX_ESR_EL2]
mrs x9, far_el2
str x9, [x0, #CTX_FAR_EL2]
mrs x9, fpexc32_el2
str x9, [x0, #CTX_FPEXC32_EL2]
mrs x9, hacr_el2
str x9, [x0, #CTX_HACR_EL2]
mrs x9, HAFGRTR_EL2
str x9, [x0, #CTX_HAFGRTR_EL2]
mrs x16, fpexc32_el2
mrs x17, hacr_el2
stp x16, x17, [x0, #CTX_FPEXC32_EL2]
mrs x9, hcr_el2
str x9, [x0, #CTX_HCR_EL2]
mrs x10, hpfar_el2
stp x9, x10, [x0, #CTX_HCR_EL2]
mrs x9, HDFGRTR_EL2
str x9, [x0, #CTX_HDFGRTR_EL2]
mrs x11, hstr_el2
mrs x12, ICC_SRE_EL2
stp x11, x12, [x0, #CTX_HSTR_EL2]
mrs x9, HDFGWTR_EL2
str x9, [x0, #CTX_HDFGWTR_EL2]
mrs x13, ICH_HCR_EL2
mrs x14, ICH_VMCR_EL2
stp x13, x14, [x0, #CTX_ICH_HCR_EL2]
mrs x9, HFGITR_EL2
str x9, [x0, #CTX_HFGITR_EL2]
mrs x9, HFGRTR_EL2
str x9, [x0, #CTX_HFGRTR_EL2]
mrs x9, HFGWTR_EL2
str x9, [x0, #CTX_HFGWTR_EL2]
mrs x9, hpfar_el2
str x9, [x0, #CTX_HPFAR_EL2]
mrs x9, hstr_el2
str x9, [x0, #CTX_HSTR_EL2]
mrs x9, ICC_SRE_EL2
str x9, [x0, #CTX_ICC_SRE_EL2]
mrs x9, ICH_EISR_EL2
str x9, [x0, #CTX_ICH_EISR_EL2]
mrs x9, ICH_ELRSR_EL2
str x9, [x0, #CTX_ICH_ELRSR_EL2]
mrs x9, ICH_HCR_EL2
str x9, [x0, #CTX_ICH_HCR_EL2]
mrs x9, ICH_MISR_EL2
str x9, [x0, #CTX_ICH_MISR_EL2]
mrs x9, ICH_VMCR_EL2
str x9, [x0, #CTX_ICH_VMCR_EL2]
mrs x9, ICH_VTR_EL2
str x9, [x0, #CTX_ICH_VTR_EL2]
mrs x9, mair_el2
str x9, [x0, #CTX_MAIR_EL2]
mrs x9, mdcr_el2
str x9, [x0, #CTX_MDCR_EL2]
mrs x9, MPAM2_EL2
str x9, [x0, #CTX_MPAM2_EL2]
mrs x9, MPAMHCR_EL2
str x9, [x0, #CTX_MPAMHCR_EL2]
mrs x9, MPAMVPM0_EL2
str x9, [x0, #CTX_MPAMVPM0_EL2]
mrs x9, MPAMVPM1_EL2
str x9, [x0, #CTX_MPAMVPM1_EL2]
mrs x9, MPAMVPM2_EL2
str x9, [x0, #CTX_MPAMVPM2_EL2]
mrs x9, MPAMVPM3_EL2
str x9, [x0, #CTX_MPAMVPM3_EL2]
mrs x9, MPAMVPM4_EL2
str x9, [x0, #CTX_MPAMVPM4_EL2]
mrs x9, MPAMVPM5_EL2
str x9, [x0, #CTX_MPAMVPM5_EL2]
mrs x9, MPAMVPM6_EL2
str x9, [x0, #CTX_MPAMVPM6_EL2]
mrs x9, MPAMVPM7_EL2
str x9, [x0, #CTX_MPAMVPM7_EL2]
mrs x9, MPAMVPMV_EL2
str x9, [x0, #CTX_MPAMVPMV_EL2]
mrs x9, rmr_el2
str x9, [x0, #CTX_RMR_EL2]
mrs x15, mair_el2
mrs x16, mdcr_el2
stp x15, x16, [x0, #CTX_MAIR_EL2]
mrs x17, PMSCR_EL2
mrs x9, sctlr_el2
str x9, [x0, #CTX_SCTLR_EL2]
stp x17, x9, [x0, #CTX_PMSCR_EL2]
mrs x9, spsr_el2
str x9, [x0, #CTX_SPSR_EL2]
mrs x10, spsr_el2
mrs x11, sp_el2
stp x10, x11, [x0, #CTX_SPSR_EL2]
mrs x9, sp_el2
str x9, [x0, #CTX_SP_EL2]
mrs x12, tcr_el2
mrs x13, TRFCR_EL2
stp x12, x13, [x0, #CTX_TCR_EL2]
mrs x9, tcr_el2
str x9, [x0, #CTX_TCR_EL2]
mrs x14, ttbr0_el2
mrs x15, vbar_el2
stp x14, x15, [x0, #CTX_TTBR0_EL2]
mrs x9, tpidr_el2
str x9, [x0, #CTX_TPIDR_EL2]
mrs x9, ttbr0_el2
str x9, [x0, #CTX_TTBR0_EL2]
mrs x9, vbar_el2
str x9, [x0, #CTX_VBAR_EL2]
mrs x9, vmpidr_el2
str x9, [x0, #CTX_VMPIDR_EL2]
mrs x9, vpidr_el2
str x9, [x0, #CTX_VPIDR_EL2]
mrs x16, vmpidr_el2
mrs x17, vpidr_el2
stp x16, x17, [x0, #CTX_VMPIDR_EL2]
mrs x9, vtcr_el2
str x9, [x0, #CTX_VTCR_EL2]
mrs x10, vttbr_el2
stp x9, x10, [x0, #CTX_VTCR_EL2]
mrs x9, vttbr_el2
str x9, [x0, #CTX_VTTBR_EL2]
#if CTX_INCLUDE_MTE_REGS
mrs x11, TFSR_EL2
str x11, [x0, #CTX_TFSR_EL2]
#endif
mrs x9, ZCR_EL2
str x9, [x0, #CTX_ZCR_EL2]
#if ENABLE_MPAM_FOR_LOWER_ELS
mrs x9, MPAM2_EL2
mrs x10, MPAMHCR_EL2
stp x9, x10, [x0, #CTX_MPAM2_EL2]
mrs x11, MPAMVPM0_EL2
mrs x12, MPAMVPM1_EL2
stp x11, x12, [x0, #CTX_MPAMVPM0_EL2]
mrs x13, MPAMVPM2_EL2
mrs x14, MPAMVPM3_EL2
stp x13, x14, [x0, #CTX_MPAMVPM2_EL2]
mrs x15, MPAMVPM4_EL2
mrs x16, MPAMVPM5_EL2
stp x15, x16, [x0, #CTX_MPAMVPM4_EL2]
mrs x17, MPAMVPM6_EL2
mrs x9, MPAMVPM7_EL2
stp x17, x9, [x0, #CTX_MPAMVPM6_EL2]
mrs x10, MPAMVPMV_EL2
str x10, [x0, #CTX_MPAMVPMV_EL2]
#endif
#if ARM_ARCH_AT_LEAST(8, 6)
mrs x11, HAFGRTR_EL2
mrs x12, HDFGRTR_EL2
stp x11, x12, [x0, #CTX_HAFGRTR_EL2]
mrs x13, HDFGWTR_EL2
mrs x14, HFGITR_EL2
stp x13, x14, [x0, #CTX_HDFGWTR_EL2]
mrs x15, HFGRTR_EL2
mrs x16, HFGWTR_EL2
stp x15, x16, [x0, #CTX_HFGRTR_EL2]
mrs x17, CNTPOFF_EL2
str x17, [x0, #CTX_CNTPOFF_EL2]
#endif
#if ARM_ARCH_AT_LEAST(8, 4)
mrs x9, cnthps_ctl_el2
mrs x10, cnthps_cval_el2
stp x9, x10, [x0, #CTX_CNTHPS_CTL_EL2]
mrs x11, cnthps_tval_el2
mrs x12, cnthvs_ctl_el2
stp x11, x12, [x0, #CTX_CNTHPS_TVAL_EL2]
mrs x13, cnthvs_cval_el2
mrs x14, cnthvs_tval_el2
stp x13, x14, [x0, #CTX_CNTHVS_CVAL_EL2]
mrs x15, cnthv_ctl_el2
mrs x16, cnthv_cval_el2
stp x15, x16, [x0, #CTX_CNTHV_CTL_EL2]
mrs x17, cnthv_tval_el2
mrs x9, contextidr_el2
stp x17, x9, [x0, #CTX_CNTHV_TVAL_EL2]
mrs x10, sder32_el2
str x10, [x0, #CTX_SDER32_EL2]
mrs x11, ttbr1_el2
str x11, [x0, #CTX_TTBR1_EL2]
mrs x12, vdisr_el2
str x12, [x0, #CTX_VDISR_EL2]
mrs x13, vncr_el2
str x13, [x0, #CTX_VNCR_EL2]
mrs x14, vsesr_el2
str x14, [x0, #CTX_VSESR_EL2]
mrs x15, vstcr_el2
str x15, [x0, #CTX_VSTCR_EL2]
mrs x16, vsttbr_el2
str x16, [x0, #CTX_VSTTBR_EL2]
#endif
#if ARM_ARCH_AT_LEAST(8, 5)
mrs x17, scxtnum_el2
str x17, [x0, #CTX_SCXTNUM_EL2]
#endif
ret
endfunc el2_sysregs_context_save
@ -219,189 +217,186 @@ endfunc el2_sysregs_context_save
/* -----------------------------------------------------
* The following function strictly follows the AArch64
* PCS to use x9-x17 (temporary caller-saved registers)
* to restore EL1 system register context. It assumes
* that 'x0' is pointing to a 'el1_sys_regs' structure
* to restore EL2 system register context. It assumes
* that 'x0' is pointing to a 'el2_sys_regs' structure
* from where the register context will be restored
* The following registers are not restored
* AMEVCNTVOFF0<n>_EL2
* AMEVCNTVOFF1<n>_EL2
* ICH_AP0R<n>_EL2
* ICH_AP1R<n>_EL2
* ICH_LR<n>_EL2
* -----------------------------------------------------
*/
func el2_sysregs_context_restore
ldr x9, [x0, #CTX_ACTLR_EL2]
ldp x9, x10, [x0, #CTX_ACTLR_EL2]
msr actlr_el2, x9
msr afsr0_el2, x10
ldr x9, [x0, #CTX_AFSR0_EL2]
msr afsr0_el2, x9
ldp x11, x12, [x0, #CTX_AFSR1_EL2]
msr afsr1_el2, x11
msr amair_el2, x12
ldr x9, [x0, #CTX_AFSR1_EL2]
msr afsr1_el2, x9
ldp x13, x14, [x0, #CTX_CNTHCTL_EL2]
msr cnthctl_el2, x13
msr cnthp_ctl_el2, x14
ldr x9, [x0, #CTX_AMAIR_EL2]
msr amair_el2, x9
ldp x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
msr cnthp_cval_el2, x15
msr cnthp_tval_el2, x16
ldr x9, [x0, #CTX_CNTHCTL_EL2]
msr cnthctl_el2, x9
ldr x9, [x0, #CTX_CNTHP_CTL_EL2]
msr cnthp_ctl_el2, x9
ldr x9, [x0, #CTX_CNTHP_CVAL_EL2]
msr cnthp_cval_el2, x9
ldr x9, [x0, #CTX_CNTHP_TVAL_EL2]
msr cnthp_tval_el2, x9
ldr x9, [x0, #CTX_CNTPOFF_EL2]
msr CNTPOFF_EL2, x9
ldr x9, [x0, #CTX_CNTVOFF_EL2]
msr cntvoff_el2, x9
ldr x9, [x0, #CTX_CPTR_EL2]
ldp x17, x9, [x0, #CTX_CNTVOFF_EL2]
msr cntvoff_el2, x17
msr cptr_el2, x9
ldr x9, [x0, #CTX_DBGVCR32_EL2]
msr dbgvcr32_el2, x9
ldp x10, x11, [x0, #CTX_DBGVCR32_EL2]
msr dbgvcr32_el2, x10
msr elr_el2, x11
ldr x9, [x0, #CTX_ELR_EL2]
msr elr_el2, x9
ldp x14, x15, [x0, #CTX_ESR_EL2]
msr esr_el2, x14
msr far_el2, x15
ldr x9, [x0, #CTX_ESR_EL2]
msr esr_el2, x9
ldp x16, x17, [x0, #CTX_FPEXC32_EL2]
msr fpexc32_el2, x16
msr hacr_el2, x17
ldr x9, [x0, #CTX_FAR_EL2]
msr far_el2, x9
ldr x9, [x0, #CTX_FPEXC32_EL2]
msr fpexc32_el2, x9
ldr x9, [x0, #CTX_HACR_EL2]
msr hacr_el2, x9
ldr x9, [x0, #CTX_HAFGRTR_EL2]
msr HAFGRTR_EL2, x9
ldr x9, [x0, #CTX_HCR_EL2]
ldp x9, x10, [x0, #CTX_HCR_EL2]
msr hcr_el2, x9
msr hpfar_el2, x10
ldr x9, [x0, #CTX_HDFGRTR_EL2]
msr HDFGRTR_EL2, x9
ldp x11, x12, [x0, #CTX_HSTR_EL2]
msr hstr_el2, x11
msr ICC_SRE_EL2, x12
ldr x9, [x0, #CTX_HDFGWTR_EL2]
msr HDFGWTR_EL2, x9
ldp x13, x14, [x0, #CTX_ICH_HCR_EL2]
msr ICH_HCR_EL2, x13
msr ICH_VMCR_EL2, x14
ldr x9, [x0, #CTX_HFGITR_EL2]
msr HFGITR_EL2, x9
ldp x15, x16, [x0, #CTX_MAIR_EL2]
msr mair_el2, x15
msr mdcr_el2, x16
ldr x9, [x0, #CTX_HFGRTR_EL2]
msr HFGRTR_EL2, x9
ldr x9, [x0, #CTX_HFGWTR_EL2]
msr HFGWTR_EL2, x9
ldr x9, [x0, #CTX_HPFAR_EL2]
msr hpfar_el2, x9
ldr x9, [x0, #CTX_HSTR_EL2]
msr hstr_el2, x9
ldr x9, [x0, #CTX_ICC_SRE_EL2]
msr ICC_SRE_EL2, x9
ldr x9, [x0, #CTX_ICH_EISR_EL2]
msr ICH_EISR_EL2, x9
ldr x9, [x0, #CTX_ICH_ELRSR_EL2]
msr ICH_ELRSR_EL2, x9
ldr x9, [x0, #CTX_ICH_HCR_EL2]
msr ICH_HCR_EL2, x9
ldr x9, [x0, #CTX_ICH_MISR_EL2]
msr ICH_MISR_EL2, x9
ldr x9, [x0, #CTX_ICH_VMCR_EL2]
msr ICH_VMCR_EL2, x9
ldr x9, [x0, #CTX_ICH_VTR_EL2]
msr ICH_VTR_EL2, x9
ldr x9, [x0, #CTX_MAIR_EL2]
msr mair_el2, x9
ldr x9, [x0, #CTX_MDCR_EL2]
msr mdcr_el2, x9
ldr x9, [x0, #CTX_MPAM2_EL2]
msr MPAM2_EL2, x9
ldr x9, [x0, #CTX_MPAMHCR_EL2]
msr MPAMHCR_EL2, x9
ldr x9, [x0, #CTX_MPAMVPM0_EL2]
msr MPAMVPM0_EL2, x9
ldr x9, [x0, #CTX_MPAMVPM1_EL2]
msr MPAMVPM1_EL2, x9
ldr x9, [x0, #CTX_MPAMVPM2_EL2]
msr MPAMVPM2_EL2, x9
ldr x9, [x0, #CTX_MPAMVPM3_EL2]
msr MPAMVPM3_EL2, x9
ldr x9, [x0, #CTX_MPAMVPM4_EL2]
msr MPAMVPM4_EL2, x9
ldr x9, [x0, #CTX_MPAMVPM5_EL2]
msr MPAMVPM5_EL2, x9
ldr x9, [x0, #CTX_MPAMVPM6_EL2]
msr MPAMVPM6_EL2, x9
ldr x9, [x0, #CTX_MPAMVPM7_EL2]
msr MPAMVPM7_EL2, x9
ldr x9, [x0, #CTX_MPAMVPMV_EL2]
msr MPAMVPMV_EL2, x9
ldr x9, [x0, #CTX_RMR_EL2]
msr rmr_el2, x9
ldr x9, [x0, #CTX_SCTLR_EL2]
ldp x17, x9, [x0, #CTX_PMSCR_EL2]
msr PMSCR_EL2, x17
msr sctlr_el2, x9
ldr x9, [x0, #CTX_SPSR_EL2]
msr spsr_el2, x9
ldp x10, x11, [x0, #CTX_SPSR_EL2]
msr spsr_el2, x10
msr sp_el2, x11
ldr x9, [x0, #CTX_SP_EL2]
msr sp_el2, x9
ldp x12, x13, [x0, #CTX_TCR_EL2]
msr tcr_el2, x12
msr TRFCR_EL2, x13
ldr x9, [x0, #CTX_TCR_EL2]
msr tcr_el2, x9
ldp x14, x15, [x0, #CTX_TTBR0_EL2]
msr ttbr0_el2, x14
msr vbar_el2, x15
ldr x9, [x0, #CTX_TPIDR_EL2]
msr tpidr_el2, x9
ldp x16, x17, [x0, #CTX_VMPIDR_EL2]
msr vmpidr_el2, x16
msr vpidr_el2, x17
ldr x9, [x0, #CTX_TTBR0_EL2]
msr ttbr0_el2, x9
ldr x9, [x0, #CTX_VBAR_EL2]
msr vbar_el2, x9
ldr x9, [x0, #CTX_VMPIDR_EL2]
msr vmpidr_el2, x9
ldr x9, [x0, #CTX_VPIDR_EL2]
msr vpidr_el2, x9
ldr x9, [x0, #CTX_VTCR_EL2]
ldp x9, x10, [x0, #CTX_VTCR_EL2]
msr vtcr_el2, x9
msr vttbr_el2, x10
ldr x9, [x0, #CTX_VTTBR_EL2]
msr vttbr_el2, x9
#if CTX_INCLUDE_MTE_REGS
ldr x11, [x0, #CTX_TFSR_EL2]
msr TFSR_EL2, x11
#endif
ldr x9, [x0, #CTX_ZCR_EL2]
msr ZCR_EL2, x9
#if ENABLE_MPAM_FOR_LOWER_ELS
ldp x9, x10, [x0, #CTX_MPAM2_EL2]
msr MPAM2_EL2, x9
msr MPAMHCR_EL2, x10
ldp x11, x12, [x0, #CTX_MPAMVPM0_EL2]
msr MPAMVPM0_EL2, x11
msr MPAMVPM1_EL2, x12
ldp x13, x14, [x0, #CTX_MPAMVPM2_EL2]
msr MPAMVPM2_EL2, x13
msr MPAMVPM3_EL2, x14
ldp x15, x16, [x0, #CTX_MPAMVPM4_EL2]
msr MPAMVPM4_EL2, x15
msr MPAMVPM5_EL2, x16
ldp x17, x9, [x0, #CTX_MPAMVPM6_EL2]
msr MPAMVPM6_EL2, x17
msr MPAMVPM7_EL2, x9
ldr x10, [x0, #CTX_MPAMVPMV_EL2]
msr MPAMVPMV_EL2, x10
#endif
#if ARM_ARCH_AT_LEAST(8, 6)
ldp x11, x12, [x0, #CTX_HAFGRTR_EL2]
msr HAFGRTR_EL2, x11
msr HDFGRTR_EL2, x12
ldp x13, x14, [x0, #CTX_HDFGWTR_EL2]
msr HDFGWTR_EL2, x13
msr HFGITR_EL2, x14
ldp x15, x16, [x0, #CTX_HFGRTR_EL2]
msr HFGRTR_EL2, x15
msr HFGWTR_EL2, x16
ldr x17, [x0, #CTX_CNTPOFF_EL2]
msr CNTPOFF_EL2, x17
#endif
#if ARM_ARCH_AT_LEAST(8, 4)
ldp x9, x10, [x0, #CTX_CNTHPS_CTL_EL2]
msr cnthps_ctl_el2, x9
msr cnthps_cval_el2, x10
ldp x11, x12, [x0, #CTX_CNTHPS_TVAL_EL2]
msr cnthps_tval_el2, x11
msr cnthvs_ctl_el2, x12
ldp x13, x14, [x0, #CTX_CNTHVS_CVAL_EL2]
msr cnthvs_cval_el2, x13
msr cnthvs_tval_el2, x14
ldp x15, x16, [x0, #CTX_CNTHV_CTL_EL2]
msr cnthv_ctl_el2, x15
msr cnthv_cval_el2, x16
ldp x17, x9, [x0, #CTX_CNTHV_TVAL_EL2]
msr cnthv_tval_el2, x17
msr contextidr_el2, x9
ldr x10, [x0, #CTX_SDER32_EL2]
msr sder32_el2, x10
ldr x11, [x0, #CTX_TTBR1_EL2]
msr ttbr1_el2, x11
ldr x12, [x0, #CTX_VDISR_EL2]
msr vdisr_el2, x12
ldr x13, [x0, #CTX_VNCR_EL2]
msr vncr_el2, x13
ldr x14, [x0, #CTX_VSESR_EL2]
msr vsesr_el2, x14
ldr x15, [x0, #CTX_VSTCR_EL2]
msr vstcr_el2, x15
ldr x16, [x0, #CTX_VSTTBR_EL2]
msr vsttbr_el2, x16
#endif
#if ARM_ARCH_AT_LEAST(8, 5)
ldr x17, [x0, #CTX_SCXTNUM_EL2]
msr scxtnum_el2, x17
#endif
ret
endfunc el2_sysregs_context_restore

View File

@ -234,7 +234,7 @@ void cm_setup_context(cpu_context_t *ctx, const entry_point_info_t *ep)
* and other EL2 registers are set up by cm_prepare_ns_entry() as they
* are not part of the stored cpu_context.
*/
write_ctx_reg(get_sysregs_ctx(ctx), CTX_SCTLR_EL1, sctlr_elx);
write_ctx_reg(get_el1_sysregs_ctx(ctx), CTX_SCTLR_EL1, sctlr_elx);
/*
* Base the context ACTLR_EL1 on the current value, as it is
@ -244,7 +244,7 @@ void cm_setup_context(cpu_context_t *ctx, const entry_point_info_t *ep)
* be zero.
*/
actlr_elx = read_actlr_el1();
write_ctx_reg((get_sysregs_ctx(ctx)), (CTX_ACTLR_EL1), (actlr_elx));
write_ctx_reg((get_el1_sysregs_ctx(ctx)), (CTX_ACTLR_EL1), (actlr_elx));
/*
* Populate EL3 state so that we've the right context
@ -336,7 +336,7 @@ void cm_prepare_el3_exit(uint32_t security_state)
CTX_SCR_EL3);
if ((scr_el3 & SCR_HCE_BIT) != 0U) {
/* Use SCTLR_EL1.EE value to initialise sctlr_el2 */
sctlr_elx = read_ctx_reg(get_sysregs_ctx(ctx),
sctlr_elx = read_ctx_reg(get_el1_sysregs_ctx(ctx),
CTX_SCTLR_EL1);
sctlr_elx &= SCTLR_EE_BIT;
sctlr_elx |= SCTLR_EL2_RES1;
@ -549,7 +549,7 @@ void cm_el2_sysregs_context_save(uint32_t security_state)
ctx = cm_get_context(security_state);
assert(ctx != NULL);
el2_sysregs_context_save(get_sysregs_ctx(ctx));
el2_sysregs_context_save(get_el2_sysregs_ctx(ctx));
}
}
@ -571,7 +571,7 @@ void cm_el2_sysregs_context_restore(uint32_t security_state)
ctx = cm_get_context(security_state);
assert(ctx != NULL);
el2_sysregs_context_restore(get_sysregs_ctx(ctx));
el2_sysregs_context_restore(get_el2_sysregs_ctx(ctx));
}
}
#endif /* CTX_INCLUDE_EL2_REGS */
@ -588,7 +588,7 @@ void cm_el1_sysregs_context_save(uint32_t security_state)
ctx = cm_get_context(security_state);
assert(ctx != NULL);
el1_sysregs_context_save(get_sysregs_ctx(ctx));
el1_sysregs_context_save(get_el1_sysregs_ctx(ctx));
#if IMAGE_BL31
if (security_state == SECURE)
@ -605,7 +605,7 @@ void cm_el1_sysregs_context_restore(uint32_t security_state)
ctx = cm_get_context(security_state);
assert(ctx != NULL);
el1_sysregs_context_restore(get_sysregs_ctx(ctx));
el1_sysregs_context_restore(get_el1_sysregs_ctx(ctx));
#if IMAGE_BL31
if (security_state == SECURE)

View File

@ -150,9 +150,9 @@ static uint64_t trusty_fiq_handler(uint32_t id,
(void)memcpy(&ctx->fiq_gpregs, get_gpregs_ctx(handle), sizeof(ctx->fiq_gpregs));
ctx->fiq_pc = SMC_GET_EL3(handle, CTX_ELR_EL3);
ctx->fiq_cpsr = SMC_GET_EL3(handle, CTX_SPSR_EL3);
ctx->fiq_sp_el1 = read_ctx_reg(get_sysregs_ctx(handle), CTX_SP_EL1);
ctx->fiq_sp_el1 = read_ctx_reg(get_el1_sysregs_ctx(handle), CTX_SP_EL1);
write_ctx_reg(get_sysregs_ctx(handle), CTX_SP_EL1, ctx->fiq_handler_sp);
write_ctx_reg(get_el1_sysregs_ctx(handle), CTX_SP_EL1, ctx->fiq_handler_sp);
cm_set_elr_spsr_el3(NON_SECURE, ctx->fiq_handler_pc, (uint32_t)ctx->fiq_handler_cpsr);
SMC_RET0(handle);
@ -211,7 +211,7 @@ static uint64_t trusty_fiq_exit(void *handle, uint64_t x1, uint64_t x2, uint64_t
*/
(void)memcpy(get_gpregs_ctx(handle), &ctx->fiq_gpregs, sizeof(ctx->fiq_gpregs));
ctx->fiq_handler_active = 0;
write_ctx_reg(get_sysregs_ctx(handle), CTX_SP_EL1, ctx->fiq_sp_el1);
write_ctx_reg(get_el1_sysregs_ctx(handle), CTX_SP_EL1, ctx->fiq_sp_el1);
cm_set_elr_spsr_el3(NON_SECURE, ctx->fiq_pc, (uint32_t)ctx->fiq_cpsr);
SMC_RET0(handle);

View File

@ -116,17 +116,17 @@ void spm_sp_setup(sp_context_t *sp_ctx)
xlat_ctx->pa_max_address, xlat_ctx->va_max_address,
EL1_EL0_REGIME);
write_ctx_reg(get_sysregs_ctx(ctx), CTX_MAIR_EL1,
write_ctx_reg(get_el1_sysregs_ctx(ctx), CTX_MAIR_EL1,
mmu_cfg_params[MMU_CFG_MAIR]);
write_ctx_reg(get_sysregs_ctx(ctx), CTX_TCR_EL1,
write_ctx_reg(get_el1_sysregs_ctx(ctx), CTX_TCR_EL1,
mmu_cfg_params[MMU_CFG_TCR]);
write_ctx_reg(get_sysregs_ctx(ctx), CTX_TTBR0_EL1,
write_ctx_reg(get_el1_sysregs_ctx(ctx), CTX_TTBR0_EL1,
mmu_cfg_params[MMU_CFG_TTBR0]);
/* Setup SCTLR_EL1 */
u_register_t sctlr_el1 = read_ctx_reg(get_sysregs_ctx(ctx), CTX_SCTLR_EL1);
u_register_t sctlr_el1 = read_ctx_reg(get_el1_sysregs_ctx(ctx), CTX_SCTLR_EL1);
sctlr_el1 |=
/*SCTLR_EL1_RES1 |*/
@ -160,7 +160,7 @@ void spm_sp_setup(sp_context_t *sp_ctx)
SCTLR_UMA_BIT
);
write_ctx_reg(get_sysregs_ctx(ctx), CTX_SCTLR_EL1, sctlr_el1);
write_ctx_reg(get_el1_sysregs_ctx(ctx), CTX_SCTLR_EL1, sctlr_el1);
/*
* Setup other system registers
@ -168,10 +168,10 @@ void spm_sp_setup(sp_context_t *sp_ctx)
*/
/* Shim Exception Vector Base Address */
write_ctx_reg(get_sysregs_ctx(ctx), CTX_VBAR_EL1,
write_ctx_reg(get_el1_sysregs_ctx(ctx), CTX_VBAR_EL1,
SPM_SHIM_EXCEPTIONS_PTR);
write_ctx_reg(get_sysregs_ctx(ctx), CTX_CNTKCTL_EL1,
write_ctx_reg(get_el1_sysregs_ctx(ctx), CTX_CNTKCTL_EL1,
EL0PTEN_BIT | EL0VTEN_BIT | EL0PCTEN_BIT | EL0VCTEN_BIT);
/*
@ -181,7 +181,7 @@ void spm_sp_setup(sp_context_t *sp_ctx)
* TTA: Enable access to trace registers.
* ZEN (v8.2): Trap SVE instructions and access to SVE registers.
*/
write_ctx_reg(get_sysregs_ctx(ctx), CTX_CPACR_EL1,
write_ctx_reg(get_el1_sysregs_ctx(ctx), CTX_CPACR_EL1,
CPACR_EL1_FPEN(CPACR_EL1_FP_TRAP_NONE));
/*