summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAntonio Nino Diaz <antonio.ninodiaz@arm.com>2019-03-27 11:10:31 +0000
committerAntonio Nino Diaz <antonio.ninodiaz@arm.com>2019-04-03 10:51:31 +0100
commit6de6965b2fcaffec01b2679118d16eabfde4d9c9 (patch)
tree96734032d5d6de50bfa2ba210618c8e88344f061
parent5db5930bafe00db095c2aed150fdbc45059b692a (diff)
SPM: Move shim layer to TTBR1_EL1
This gives each Secure Partition complete freedom on its address space. Previously, the memory used by the exception vectors was reserved and couldn't be used. Also, it always had to be mapped, forcing SPM to generate translation tables that included the exception vectors as well as the Partition memory regions. With this change, partitions can reduce their address space size easily. Change-Id: I67fb5e9bdf2870b73347f23bff702fab0a8f8711 Signed-off-by: Antonio Nino Diaz <antonio.ninodiaz@arm.com>
-rw-r--r--include/arch/aarch64/arch.h23
-rw-r--r--lib/xlat_tables/aarch64/xlat_tables.c6
-rw-r--r--lib/xlat_tables_v2/aarch64/xlat_tables_arch.c2
-rw-r--r--services/std_svc/spm/spm_main.c3
-rw-r--r--services/std_svc/spm/spm_private.h6
-rw-r--r--services/std_svc/spm/spm_setup.c23
-rw-r--r--services/std_svc/spm/spm_shim_private.h10
-rw-r--r--services/std_svc/spm/spm_xlat.c37
8 files changed, 92 insertions, 18 deletions
diff --git a/include/arch/aarch64/arch.h b/include/arch/aarch64/arch.h
index d3c5beaa..d15851d8 100644
--- a/include/arch/aarch64/arch.h
+++ b/include/arch/aarch64/arch.h
@@ -433,6 +433,9 @@
#define TCR_TxSZ_MAX ULL(39)
#define TCR_TxSZ_MAX_TTST ULL(48)
+#define TCR_T0SZ_SHIFT U(0)
+#define TCR_T1SZ_SHIFT U(16)
+
/* (internal) physical address size bits in EL3/EL1 */
#define TCR_PS_BITS_4GB ULL(0x0)
#define TCR_PS_BITS_64GB ULL(0x1)
@@ -462,12 +465,32 @@
#define TCR_SH_OUTER_SHAREABLE (ULL(0x2) << 12)
#define TCR_SH_INNER_SHAREABLE (ULL(0x3) << 12)
+#define TCR_RGN1_INNER_NC (ULL(0x0) << 24)
+#define TCR_RGN1_INNER_WBA (ULL(0x1) << 24)
+#define TCR_RGN1_INNER_WT (ULL(0x2) << 24)
+#define TCR_RGN1_INNER_WBNA (ULL(0x3) << 24)
+
+#define TCR_RGN1_OUTER_NC (ULL(0x0) << 26)
+#define TCR_RGN1_OUTER_WBA (ULL(0x1) << 26)
+#define TCR_RGN1_OUTER_WT (ULL(0x2) << 26)
+#define TCR_RGN1_OUTER_WBNA (ULL(0x3) << 26)
+
+#define TCR_SH1_NON_SHAREABLE (ULL(0x0) << 28)
+#define TCR_SH1_OUTER_SHAREABLE (ULL(0x2) << 28)
+#define TCR_SH1_INNER_SHAREABLE (ULL(0x3) << 28)
+
#define TCR_TG0_SHIFT U(14)
#define TCR_TG0_MASK ULL(3)
#define TCR_TG0_4K (ULL(0) << TCR_TG0_SHIFT)
#define TCR_TG0_64K (ULL(1) << TCR_TG0_SHIFT)
#define TCR_TG0_16K (ULL(2) << TCR_TG0_SHIFT)
+#define TCR_TG1_SHIFT U(30)
+#define TCR_TG1_MASK ULL(3)
+#define TCR_TG1_16K (ULL(1) << TCR_TG1_SHIFT)
+#define TCR_TG1_4K (ULL(2) << TCR_TG1_SHIFT)
+#define TCR_TG1_64K (ULL(3) << TCR_TG1_SHIFT)
+
#define TCR_EPD0_BIT (ULL(1) << 7)
#define TCR_EPD1_BIT (ULL(1) << 23)
diff --git a/lib/xlat_tables/aarch64/xlat_tables.c b/lib/xlat_tables/aarch64/xlat_tables.c
index e64fd3ef..c86412c9 100644
--- a/lib/xlat_tables/aarch64/xlat_tables.c
+++ b/lib/xlat_tables/aarch64/xlat_tables.c
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2014-2018, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2014-2019, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -174,12 +174,12 @@ void init_xlat_tables(void)
/* Inner & outer non-cacheable non-shareable. */\
tcr = TCR_SH_NON_SHAREABLE | \
TCR_RGN_OUTER_NC | TCR_RGN_INNER_NC | \
- (uint64_t) t0sz; \
+ ((uint64_t)t0sz << TCR_T0SZ_SHIFT); \
} else { \
/* Inner & outer WBWA & shareable. */ \
tcr = TCR_SH_INNER_SHAREABLE | \
TCR_RGN_OUTER_WBA | TCR_RGN_INNER_WBA | \
- (uint64_t) t0sz; \
+ ((uint64_t)t0sz << TCR_T0SZ_SHIFT); \
} \
tcr |= _tcr_extra; \
write_tcr_el##_el(tcr); \
diff --git a/lib/xlat_tables_v2/aarch64/xlat_tables_arch.c b/lib/xlat_tables_v2/aarch64/xlat_tables_arch.c
index e7593dde..8eeeea1d 100644
--- a/lib/xlat_tables_v2/aarch64/xlat_tables_arch.c
+++ b/lib/xlat_tables_v2/aarch64/xlat_tables_arch.c
@@ -248,7 +248,7 @@ void setup_mmu_cfg(uint64_t *params, unsigned int flags,
*/
int t0sz = 64 - __builtin_ctzll(virtual_addr_space_size);
- tcr = (uint64_t) t0sz;
+ tcr = (uint64_t)t0sz << TCR_T0SZ_SHIFT;
/*
* Set the cacheability and shareability attributes for memory
diff --git a/services/std_svc/spm/spm_main.c b/services/std_svc/spm/spm_main.c
index 6cd77e3d..d740a8dc 100644
--- a/services/std_svc/spm/spm_main.c
+++ b/services/std_svc/spm/spm_main.c
@@ -300,6 +300,9 @@ int32_t spm_setup(void)
panic();
}
+ /* Setup shim layer */
+ spm_exceptions_xlat_init_context();
+
/*
* Setup all Secure Partitions.
*/
diff --git a/services/std_svc/spm/spm_private.h b/services/std_svc/spm/spm_private.h
index 1d5a88e8..8b98e8c0 100644
--- a/services/std_svc/spm/spm_private.h
+++ b/services/std_svc/spm/spm_private.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -95,6 +95,10 @@ void spm_sp_request_increase(sp_context_t *sp_ctx);
void spm_sp_request_decrease(sp_context_t *sp_ctx);
int spm_sp_request_increase_if_zero(sp_context_t *sp_ctx);
+/* Functions related to the shim layer translation tables */
+void spm_exceptions_xlat_init_context(void);
+uint64_t *spm_exceptions_xlat_get_base_table(void);
+
/* Functions related to the translation tables management */
xlat_ctx_t *spm_sp_xlat_context_alloc(void);
void sp_map_memory_regions(sp_context_t *sp_ctx);
diff --git a/services/std_svc/spm/spm_setup.c b/services/std_svc/spm/spm_setup.c
index 3aabc200..6cbbc5b2 100644
--- a/services/std_svc/spm/spm_setup.c
+++ b/services/std_svc/spm/spm_setup.c
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -77,12 +77,24 @@ void spm_sp_setup(sp_context_t *sp_ctx)
write_ctx_reg(get_sysregs_ctx(ctx), CTX_MAIR_EL1,
mmu_cfg_params[MMU_CFG_MAIR]);
+ /* Enable translations using TTBR1_EL1 */
+ int t1sz = 64 - __builtin_ctzll(SPM_SHIM_XLAT_VIRT_ADDR_SPACE_SIZE);
+ mmu_cfg_params[MMU_CFG_TCR] &= ~TCR_EPD1_BIT;
+ mmu_cfg_params[MMU_CFG_TCR] |=
+ ((uint64_t)t1sz << TCR_T1SZ_SHIFT) |
+ TCR_SH1_INNER_SHAREABLE |
+ TCR_RGN1_OUTER_WBA | TCR_RGN1_INNER_WBA |
+ TCR_TG1_4K;
+
write_ctx_reg(get_sysregs_ctx(ctx), CTX_TCR_EL1,
mmu_cfg_params[MMU_CFG_TCR]);
write_ctx_reg(get_sysregs_ctx(ctx), CTX_TTBR0_EL1,
mmu_cfg_params[MMU_CFG_TTBR0]);
+ write_ctx_reg(get_sysregs_ctx(ctx), CTX_TTBR1_EL1,
+ (uint64_t)spm_exceptions_xlat_get_base_table());
+
/* Setup SCTLR_EL1 */
u_register_t sctlr_el1 = read_ctx_reg(get_sysregs_ctx(ctx), CTX_SCTLR_EL1);
@@ -122,9 +134,14 @@ void spm_sp_setup(sp_context_t *sp_ctx)
* ----------------------------
*/
- /* Shim Exception Vector Base Address */
+ /*
+ * Shim exception vector base address. It is mapped at the start of the
+ * address space accessed by TTBR1_EL1, which means that the base
+ * address of the exception vectors depends on the size of the address
+ * space specified in TCR_EL1.T1SZ.
+ */
write_ctx_reg(get_sysregs_ctx(ctx), CTX_VBAR_EL1,
- SPM_SHIM_EXCEPTIONS_PTR);
+ UINT64_MAX - (SPM_SHIM_XLAT_VIRT_ADDR_SPACE_SIZE - 1ULL));
/*
* FPEN: Allow the Secure Partition to access FP/SIMD registers.
diff --git a/services/std_svc/spm/spm_shim_private.h b/services/std_svc/spm/spm_shim_private.h
index 7fe9692b..fc510b11 100644
--- a/services/std_svc/spm/spm_shim_private.h
+++ b/services/std_svc/spm/spm_shim_private.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -23,4 +23,12 @@ IMPORT_SYM(uintptr_t, __SPM_SHIM_EXCEPTIONS_END__, SPM_SHIM_EXCEPTIONS_END);
#define SPM_SHIM_EXCEPTIONS_SIZE \
(SPM_SHIM_EXCEPTIONS_END - SPM_SHIM_EXCEPTIONS_START)
+/*
+ * Use the smallest virtual address space size allowed in ARMv8.0 for
+ * compatibility.
+ */
+#define SPM_SHIM_XLAT_VIRT_ADDR_SPACE_SIZE (1ULL << 25)
+#define SPM_SHIM_MMAP_REGIONS 1
+#define SPM_SHIM_XLAT_TABLES 1
+
#endif /* SPM_SHIM_PRIVATE_H */
diff --git a/services/std_svc/spm/spm_xlat.c b/services/std_svc/spm/spm_xlat.c
index 5f830965..58d61fc3 100644
--- a/services/std_svc/spm/spm_xlat.c
+++ b/services/std_svc/spm/spm_xlat.c
@@ -95,6 +95,34 @@ xlat_ctx_t *spm_sp_xlat_context_alloc(void)
};
/*******************************************************************************
+ * Translation table context used for S-EL1 exception vectors
+ ******************************************************************************/
+
+REGISTER_XLAT_CONTEXT2(spm_sel1, SPM_SHIM_MMAP_REGIONS, SPM_SHIM_XLAT_TABLES,
+ SPM_SHIM_XLAT_VIRT_ADDR_SPACE_SIZE, PLAT_PHY_ADDR_SPACE_SIZE,
+ EL1_EL0_REGIME, PLAT_SP_IMAGE_XLAT_SECTION_NAME);
+
+void spm_exceptions_xlat_init_context(void)
+{
+ /* This region contains the exception vectors used at S-EL1. */
+ mmap_region_t sel1_exception_vectors =
+ MAP_REGION(SPM_SHIM_EXCEPTIONS_PTR,
+ 0x0UL,
+ SPM_SHIM_EXCEPTIONS_SIZE,
+ MT_CODE | MT_SECURE | MT_PRIVILEGED);
+
+ mmap_add_region_ctx(&spm_sel1_xlat_ctx,
+ &sel1_exception_vectors);
+
+ init_xlat_tables_ctx(&spm_sel1_xlat_ctx);
+}
+
+uint64_t *spm_exceptions_xlat_get_base_table(void)
+{
+ return spm_sel1_xlat_ctx.base_table;
+}
+
+/*******************************************************************************
* Functions to allocate memory for regions.
******************************************************************************/
@@ -300,15 +328,6 @@ static void map_rdmem(sp_context_t *sp_ctx, struct sp_rd_sect_mem_region *rdmem)
void sp_map_memory_regions(sp_context_t *sp_ctx)
{
- /* This region contains the exception vectors used at S-EL1. */
- const mmap_region_t sel1_exception_vectors =
- MAP_REGION_FLAT(SPM_SHIM_EXCEPTIONS_START,
- SPM_SHIM_EXCEPTIONS_SIZE,
- MT_CODE | MT_SECURE | MT_PRIVILEGED);
-
- mmap_add_region_ctx(sp_ctx->xlat_ctx_handle,
- &sel1_exception_vectors);
-
struct sp_rd_sect_mem_region *rdmem;
for (rdmem = sp_ctx->rd.mem_region; rdmem != NULL; rdmem = rdmem->next) {