Fix execute-never permissions in xlat tables libs

Translation regimes that only support one virtual address space (such as
the ones for EL2 and EL3) can flag memory regions as execute-never by
setting to 1 the XN bit in the Upper Attributes field in the translation
tables descriptors. Translation regimes that support two different
virtual address spaces (such as the one shared by EL1 and EL0) use bits
PXN and UXN instead.

The Trusted Firmware runs at EL3 and EL1, it has to handle translation
tables of both translation regimes, but the previous code handled both
regimes the same way, as if both had only 1 VA range.

When trying to set a descriptor as execute-never it would set the XN
bit correctly in EL3, but it would set the XN bit in EL1 as well. XN is
at the same bit position as UXN, which means that EL0 was being
prevented from executing code at this region, not EL1 as the code
intended. Therefore, the PXN bit was unset to 0 all the time. The result
is that, in AArch64 mode, read-only data sections of BL2 weren't
protected from being executed.

This patch adds support of translation regimes with two virtual address
spaces to both versions of the translation tables library, fixing the
execute-never permissions for translation tables in EL1.

The library currently does not support initializing translation tables
for EL0 software, therefore it does not set/unset the UXN bit. If EL1
software needs to initialize translation tables for EL0 software, it
should use a different library instead.

Change-Id: If27588f9820ff42988851d90dc92801c8ecbe0c9
Signed-off-by: Antonio Nino Diaz <antonio.ninodiaz@arm.com>
This commit is contained in:
Antonio Nino Diaz 2017-04-27 13:30:22 +01:00
parent 062dd37828
commit a56402521f
10 changed files with 124 additions and 17 deletions

View File

@ -56,7 +56,10 @@
#define SECOND_LEVEL_DESC_N TWO_MB_SHIFT
#define THIRD_LEVEL_DESC_N FOUR_KB_SHIFT
/* XN: Translation regimes that support one VA range (EL2 and EL3). */
#define XN (ULL(1) << 2)
/* UXN, PXN: Translation regimes that support two VA ranges (EL1&0). */
#define UXN (ULL(1) << 2)
#define PXN (ULL(1) << 1)
#define CONT_HINT (ULL(1) << 0)
#define UPPER_ATTRS(x) (((x) & ULL(0x7)) << 52)

View File

@ -93,6 +93,20 @@ static unsigned long long get_max_supported_pa(void)
}
#endif /* ENABLE_ASSERTIONS */
int xlat_arch_current_el(void)
{
/*
* If EL3 is in AArch32 mode, all secure PL1 modes (Monitor, System,
* SVC, Abort, UND, IRQ and FIQ modes) execute at EL3.
*/
return 3;
}
uint64_t xlat_arch_get_xn_desc(int el __unused)
{
return UPPER_ATTRS(XN);
}
void init_xlat_tables(void)
{
unsigned long long max_pa;

View File

@ -146,6 +146,25 @@ static unsigned long long get_max_supported_pa(void)
}
#endif /* ENABLE_ASSERTIONS */
int xlat_arch_current_el(void)
{
int el = GET_EL(read_CurrentEl());
assert(el > 0);
return el;
}
uint64_t xlat_arch_get_xn_desc(int el)
{
if (el == 3) {
return UPPER_ATTRS(XN);
} else {
assert(el == 1);
return UPPER_ATTRS(PXN);
}
}
void init_xlat_tables(void)
{
unsigned long long max_pa;

View File

@ -64,6 +64,8 @@ static unsigned next_xlat;
static unsigned long long xlat_max_pa;
static uintptr_t xlat_max_va;
static uint64_t execute_never_mask;
/*
* Array of all memory regions stored in order of ascending base address.
* The list is terminated by the first entry with size == 0.
@ -237,7 +239,8 @@ static uint64_t mmap_desc(mmap_attr_t attr, unsigned long long addr_pa,
* fetch, which could be an issue if this memory region
* corresponds to a read-sensitive peripheral.
*/
desc |= UPPER_ATTRS(XN);
desc |= execute_never_mask;
} else { /* Normal memory */
/*
* Always map read-write normal memory as execute-never.
@ -245,7 +248,7 @@ static uint64_t mmap_desc(mmap_attr_t attr, unsigned long long addr_pa,
* R/W memory is reserved for data storage, which must not be
* executable.)
* Note that setting the XN bit here is for consistency only.
* The enable_mmu_elx() function sets the SCTLR_EL3.WXN bit,
* The function that enables the MMU sets the SCTLR_ELx.WXN bit,
* which makes any writable memory region to be treated as
* execute-never, regardless of the value of the XN bit in the
* translation table.
@ -253,8 +256,9 @@ static uint64_t mmap_desc(mmap_attr_t attr, unsigned long long addr_pa,
* For read-only memory, rely on the MT_EXECUTE/MT_EXECUTE_NEVER
* attribute to figure out the value of the XN bit.
*/
if ((attr & MT_RW) || (attr & MT_EXECUTE_NEVER))
desc |= UPPER_ATTRS(XN);
if ((attr & MT_RW) || (attr & MT_EXECUTE_NEVER)) {
desc |= execute_never_mask;
}
if (mem_type == MT_MEMORY) {
desc |= LOWER_ATTRS(ATTR_IWBWA_OWBWA_NTR_INDEX | ISH);
@ -401,7 +405,7 @@ void init_xlation_table(uintptr_t base_va, uint64_t *table,
int level, uintptr_t *max_va,
unsigned long long *max_pa)
{
execute_never_mask = xlat_arch_get_xn_desc(xlat_arch_current_el());
init_xlation_table_inner(mmap, base_va, table, level);
*max_va = xlat_max_va;
*max_pa = xlat_max_pa;

View File

@ -89,6 +89,17 @@ CASSERT(IS_POWER_OF_TWO(PLAT_PHY_ADDR_SPACE_SIZE),
#endif /* AARCH32 */
void print_mmap(void);
/* Returns the current Exception Level. The returned EL must be 1 or higher. */
int xlat_arch_current_el(void);
/*
* Returns the bit mask that has to be ORed to the rest of a translation table
* descriptor so that execution of code is prohibited at the given Exception
* Level.
*/
uint64_t xlat_arch_get_xn_desc(int el);
void init_xlation_table(uintptr_t base_va, uint64_t *table,
int level, uintptr_t *max_va,
unsigned long long *max_pa);

View File

@ -91,6 +91,20 @@ void xlat_arch_tlbi_va_sync(void)
#endif /* PLAT_XLAT_TABLES_DYNAMIC */
int xlat_arch_current_el(void)
{
/*
* If EL3 is in AArch32 mode, all secure PL1 modes (Monitor, System,
* SVC, Abort, UND, IRQ and FIQ modes) execute at EL3.
*/
return 3;
}
uint64_t xlat_arch_get_xn_desc(int el __unused)
{
return UPPER_ATTRS(XN);
}
void init_xlat_tables_arch(unsigned long long max_pa)
{
assert((PLAT_PHY_ADDR_SPACE_SIZE - 1) <=

View File

@ -151,6 +151,25 @@ void xlat_arch_tlbi_va_sync(void)
#endif /* PLAT_XLAT_TABLES_DYNAMIC */
int xlat_arch_current_el(void)
{
int el = GET_EL(read_CurrentEl());
assert(el > 0);
return el;
}
uint64_t xlat_arch_get_xn_desc(int el)
{
if (el == 3) {
return UPPER_ATTRS(XN);
} else {
assert(el == 1);
return UPPER_ATTRS(PXN);
}
}
void init_xlat_tables_arch(unsigned long long max_pa)
{
assert((PLAT_PHY_ADDR_SPACE_SIZE - 1) <=

View File

@ -137,6 +137,8 @@ void init_xlat_tables(void)
assert(!is_mmu_enabled());
assert(!tf_xlat_ctx.initialized);
print_mmap(tf_xlat_ctx.mmap);
tf_xlat_ctx.execute_never_mask =
xlat_arch_get_xn_desc(xlat_arch_current_el());
init_xlation_table(&tf_xlat_ctx);
xlat_tables_print(&tf_xlat_ctx);

View File

@ -116,7 +116,7 @@ static uint64_t *xlat_table_get_empty(xlat_ctx_t *ctx)
/* Returns a block/page table descriptor for the given level and attributes. */
static uint64_t xlat_desc(mmap_attr_t attr, unsigned long long addr_pa,
int level)
int level, uint64_t execute_never_mask)
{
uint64_t desc;
int mem_type;
@ -158,7 +158,8 @@ static uint64_t xlat_desc(mmap_attr_t attr, unsigned long long addr_pa,
* fetch, which could be an issue if this memory region
* corresponds to a read-sensitive peripheral.
*/
desc |= UPPER_ATTRS(XN);
desc |= execute_never_mask;
} else { /* Normal memory */
/*
* Always map read-write normal memory as execute-never.
@ -166,7 +167,7 @@ static uint64_t xlat_desc(mmap_attr_t attr, unsigned long long addr_pa,
* R/W memory is reserved for data storage, which must not be
* executable.)
* Note that setting the XN bit here is for consistency only.
* The enable_mmu_elx() function sets the SCTLR_EL3.WXN bit,
* The function that enables the MMU sets the SCTLR_ELx.WXN bit,
* which makes any writable memory region to be treated as
* execute-never, regardless of the value of the XN bit in the
* translation table.
@ -174,8 +175,9 @@ static uint64_t xlat_desc(mmap_attr_t attr, unsigned long long addr_pa,
* For read-only memory, rely on the MT_EXECUTE/MT_EXECUTE_NEVER
* attribute to figure out the value of the XN bit.
*/
if ((attr & MT_RW) || (attr & MT_EXECUTE_NEVER))
desc |= UPPER_ATTRS(XN);
if ((attr & MT_RW) || (attr & MT_EXECUTE_NEVER)) {
desc |= execute_never_mask;
}
if (mem_type == MT_MEMORY) {
desc |= LOWER_ATTRS(ATTR_IWBWA_OWBWA_NTR_INDEX | ISH);
@ -535,7 +537,8 @@ static uintptr_t xlat_tables_map_region(xlat_ctx_t *ctx, mmap_region_t *mm,
if (action == ACTION_WRITE_BLOCK_ENTRY) {
table_base[table_idx] =
xlat_desc(mm->attr, table_idx_pa, level);
xlat_desc(mm->attr, table_idx_pa, level,
ctx->execute_never_mask);
} else if (action == ACTION_CREATE_NEW_TABLE) {
@ -940,7 +943,7 @@ int mmap_remove_dynamic_region_ctx(xlat_ctx_t *ctx, uintptr_t base_va,
#if LOG_LEVEL >= LOG_LEVEL_VERBOSE
/* Print the attributes of the specified block descriptor. */
static void xlat_desc_print(uint64_t desc)
static void xlat_desc_print(uint64_t desc, uint64_t execute_never_mask)
{
int mem_type_index = ATTR_INDEX_GET(desc);
@ -955,7 +958,7 @@ static void xlat_desc_print(uint64_t desc)
tf_printf(LOWER_ATTRS(AP_RO) & desc ? "-RO" : "-RW");
tf_printf(LOWER_ATTRS(NS) & desc ? "-NS" : "-S");
tf_printf(UPPER_ATTRS(XN) & desc ? "-XN" : "-EXEC");
tf_printf(execute_never_mask & desc ? "-XN" : "-EXEC");
}
static const char * const level_spacers[] = {
@ -974,7 +977,7 @@ static const char *invalid_descriptors_ommited =
*/
static void xlat_tables_print_internal(const uintptr_t table_base_va,
uint64_t *const table_base, const int table_entries,
const int level)
const int level, const uint64_t execute_never_mask)
{
assert(level <= XLAT_TABLE_LEVEL_MAX);
@ -1035,14 +1038,15 @@ static void xlat_tables_print_internal(const uintptr_t table_base_va,
xlat_tables_print_internal(table_idx_va,
(uint64_t *)addr_inner,
XLAT_TABLE_ENTRIES, level+1);
XLAT_TABLE_ENTRIES, level+1,
execute_never_mask);
} else {
tf_printf("%sVA:%p PA:0x%llx size:0x%zx ",
level_spacers[level],
(void *)table_idx_va,
(unsigned long long)(desc & TABLE_ADDR_MASK),
level_size);
xlat_desc_print(desc);
xlat_desc_print(desc, execute_never_mask);
tf_printf("\n");
}
}
@ -1063,7 +1067,7 @@ void xlat_tables_print(xlat_ctx_t *ctx)
{
#if LOG_LEVEL >= LOG_LEVEL_VERBOSE
xlat_tables_print_internal(0, ctx->base_table, ctx->base_table_entries,
ctx->base_level);
ctx->base_level, ctx->execute_never_mask);
#endif /* LOG_LEVEL >= LOG_LEVEL_VERBOSE */
}

View File

@ -108,6 +108,13 @@ typedef struct {
/* Set to 1 when the translation tables are initialized. */
int initialized;
/*
* Bit mask that has to be ORed to the rest of a translation table
* descriptor in order to prohibit execution of code at the exception
* level of this translation context.
*/
uint64_t execute_never_mask;
} xlat_ctx_t;
#if PLAT_XLAT_TABLES_DYNAMIC
@ -178,6 +185,16 @@ void mmap_add_region_ctx(xlat_ctx_t *ctx, mmap_region_t *mm);
* Architecture-specific initialization code.
*/
/* Returns the current Exception Level. The returned EL must be 1 or higher. */
int xlat_arch_current_el(void);
/*
* Returns the bit mask that has to be ORed to the rest of a translation table
* descriptor so that execution of code is prohibited at the given Exception
* Level.
*/
uint64_t xlat_arch_get_xn_desc(int el);
/* Execute architecture-specific translation table initialization code. */
void init_xlat_tables_arch(unsigned long long max_pa);