diff --git a/include/arch/aarch64/arch.h b/include/arch/aarch64/arch.h index d3c5beaadb678a4783aabc03d04a7057bc2f3871..d15851d813ece953553a71ecab652402f6a80300 100644 --- a/include/arch/aarch64/arch.h +++ b/include/arch/aarch64/arch.h @@ -433,6 +433,9 @@ #define TCR_TxSZ_MAX ULL(39) #define TCR_TxSZ_MAX_TTST ULL(48) +#define TCR_T0SZ_SHIFT U(0) +#define TCR_T1SZ_SHIFT U(16) + /* (internal) physical address size bits in EL3/EL1 */ #define TCR_PS_BITS_4GB ULL(0x0) #define TCR_PS_BITS_64GB ULL(0x1) @@ -462,12 +465,32 @@ #define TCR_SH_OUTER_SHAREABLE (ULL(0x2) << 12) #define TCR_SH_INNER_SHAREABLE (ULL(0x3) << 12) +#define TCR_RGN1_INNER_NC (ULL(0x0) << 24) +#define TCR_RGN1_INNER_WBA (ULL(0x1) << 24) +#define TCR_RGN1_INNER_WT (ULL(0x2) << 24) +#define TCR_RGN1_INNER_WBNA (ULL(0x3) << 24) + +#define TCR_RGN1_OUTER_NC (ULL(0x0) << 26) +#define TCR_RGN1_OUTER_WBA (ULL(0x1) << 26) +#define TCR_RGN1_OUTER_WT (ULL(0x2) << 26) +#define TCR_RGN1_OUTER_WBNA (ULL(0x3) << 26) + +#define TCR_SH1_NON_SHAREABLE (ULL(0x0) << 28) +#define TCR_SH1_OUTER_SHAREABLE (ULL(0x2) << 28) +#define TCR_SH1_INNER_SHAREABLE (ULL(0x3) << 28) + #define TCR_TG0_SHIFT U(14) #define TCR_TG0_MASK ULL(3) #define TCR_TG0_4K (ULL(0) << TCR_TG0_SHIFT) #define TCR_TG0_64K (ULL(1) << TCR_TG0_SHIFT) #define TCR_TG0_16K (ULL(2) << TCR_TG0_SHIFT) +#define TCR_TG1_SHIFT U(30) +#define TCR_TG1_MASK ULL(3) +#define TCR_TG1_16K (ULL(1) << TCR_TG1_SHIFT) +#define TCR_TG1_4K (ULL(2) << TCR_TG1_SHIFT) +#define TCR_TG1_64K (ULL(3) << TCR_TG1_SHIFT) + #define TCR_EPD0_BIT (ULL(1) << 7) #define TCR_EPD1_BIT (ULL(1) << 23) diff --git a/lib/xlat_tables/aarch64/xlat_tables.c b/lib/xlat_tables/aarch64/xlat_tables.c index e64fd3ef73218cbfd57000365dace96116f100b8..c86412c9bdd40f3348ba98a8252a70056a4f9053 100644 --- a/lib/xlat_tables/aarch64/xlat_tables.c +++ b/lib/xlat_tables/aarch64/xlat_tables.c @@ -1,5 +1,5 @@ /* - * Copyright (c) 2014-2018, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2014-2019, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -174,12 +174,12 @@ void init_xlat_tables(void) /* Inner & outer non-cacheable non-shareable. */\ tcr = TCR_SH_NON_SHAREABLE | \ TCR_RGN_OUTER_NC | TCR_RGN_INNER_NC | \ - (uint64_t) t0sz; \ + ((uint64_t)t0sz << TCR_T0SZ_SHIFT); \ } else { \ /* Inner & outer WBWA & shareable. */ \ tcr = TCR_SH_INNER_SHAREABLE | \ TCR_RGN_OUTER_WBA | TCR_RGN_INNER_WBA | \ - (uint64_t) t0sz; \ + ((uint64_t)t0sz << TCR_T0SZ_SHIFT); \ } \ tcr |= _tcr_extra; \ write_tcr_el##_el(tcr); \ diff --git a/lib/xlat_tables_v2/aarch64/xlat_tables_arch.c b/lib/xlat_tables_v2/aarch64/xlat_tables_arch.c index e7593dde76581799be3c0c3d6f8cf8c68a855fe5..8eeeea1dd5aefc04a91d9b37d01b62fb40b884a1 100644 --- a/lib/xlat_tables_v2/aarch64/xlat_tables_arch.c +++ b/lib/xlat_tables_v2/aarch64/xlat_tables_arch.c @@ -248,7 +248,7 @@ void setup_mmu_cfg(uint64_t *params, unsigned int flags, */ int t0sz = 64 - __builtin_ctzll(virtual_addr_space_size); - tcr = (uint64_t) t0sz; + tcr = (uint64_t)t0sz << TCR_T0SZ_SHIFT; /* * Set the cacheability and shareability attributes for memory diff --git a/services/std_svc/spm/spm_main.c b/services/std_svc/spm/spm_main.c index 6cd77e3d971538b2fe163b087bc819dd8802c866..d740a8dc997325521272072215315c4c297784c3 100644 --- a/services/std_svc/spm/spm_main.c +++ b/services/std_svc/spm/spm_main.c @@ -300,6 +300,9 @@ int32_t spm_setup(void) panic(); } + /* Setup shim layer */ + spm_exceptions_xlat_init_context(); + /* * Setup all Secure Partitions. */ diff --git a/services/std_svc/spm/spm_private.h b/services/std_svc/spm/spm_private.h index 1d5a88e88cd9d24041ad4b6e54e8c936a1e727dd..8b98e8c0d34b4b9af4daab9a51c72ee64ce3342b 100644 --- a/services/std_svc/spm/spm_private.h +++ b/services/std_svc/spm/spm_private.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -95,6 +95,10 @@ void spm_sp_request_increase(sp_context_t *sp_ctx); void spm_sp_request_decrease(sp_context_t *sp_ctx); int spm_sp_request_increase_if_zero(sp_context_t *sp_ctx); +/* Functions related to the shim layer translation tables */ +void spm_exceptions_xlat_init_context(void); +uint64_t *spm_exceptions_xlat_get_base_table(void); + /* Functions related to the translation tables management */ xlat_ctx_t *spm_sp_xlat_context_alloc(void); void sp_map_memory_regions(sp_context_t *sp_ctx); diff --git a/services/std_svc/spm/spm_setup.c b/services/std_svc/spm/spm_setup.c index 3aabc2007242de845616b6208147bbdd9b339948..6cbbc5b22ecf34b58a28d3e0a86eb54950db7cd0 100644 --- a/services/std_svc/spm/spm_setup.c +++ b/services/std_svc/spm/spm_setup.c @@ -1,5 +1,5 @@ /* - * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -77,12 +77,24 @@ void spm_sp_setup(sp_context_t *sp_ctx) write_ctx_reg(get_sysregs_ctx(ctx), CTX_MAIR_EL1, mmu_cfg_params[MMU_CFG_MAIR]); + /* Enable translations using TTBR1_EL1 */ + int t1sz = 64 - __builtin_ctzll(SPM_SHIM_XLAT_VIRT_ADDR_SPACE_SIZE); + mmu_cfg_params[MMU_CFG_TCR] &= ~TCR_EPD1_BIT; + mmu_cfg_params[MMU_CFG_TCR] |= + ((uint64_t)t1sz << TCR_T1SZ_SHIFT) | + TCR_SH1_INNER_SHAREABLE | + TCR_RGN1_OUTER_WBA | TCR_RGN1_INNER_WBA | + TCR_TG1_4K; + write_ctx_reg(get_sysregs_ctx(ctx), CTX_TCR_EL1, mmu_cfg_params[MMU_CFG_TCR]); write_ctx_reg(get_sysregs_ctx(ctx), CTX_TTBR0_EL1, mmu_cfg_params[MMU_CFG_TTBR0]); + write_ctx_reg(get_sysregs_ctx(ctx), CTX_TTBR1_EL1, + (uint64_t)spm_exceptions_xlat_get_base_table()); + /* Setup SCTLR_EL1 */ u_register_t sctlr_el1 = read_ctx_reg(get_sysregs_ctx(ctx), CTX_SCTLR_EL1); @@ -122,9 +134,14 @@ void spm_sp_setup(sp_context_t *sp_ctx) * ---------------------------- */ - /* Shim Exception Vector Base Address */ + /* + * Shim exception vector base address. It is mapped at the start of the + * address space accessed by TTBR1_EL1, which means that the base + * address of the exception vectors depends on the size of the address + * space specified in TCR_EL1.T1SZ. + */ write_ctx_reg(get_sysregs_ctx(ctx), CTX_VBAR_EL1, - SPM_SHIM_EXCEPTIONS_PTR); + UINT64_MAX - (SPM_SHIM_XLAT_VIRT_ADDR_SPACE_SIZE - 1ULL)); /* * FPEN: Allow the Secure Partition to access FP/SIMD registers. diff --git a/services/std_svc/spm/spm_shim_private.h b/services/std_svc/spm/spm_shim_private.h index 7fe9692b4563aae9b7ac4c736c6908f11e44fad9..fc510b111057690bbb2d9c3cdaebeca80c7989b4 100644 --- a/services/std_svc/spm/spm_shim_private.h +++ b/services/std_svc/spm/spm_shim_private.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -23,4 +23,12 @@ IMPORT_SYM(uintptr_t, __SPM_SHIM_EXCEPTIONS_END__, SPM_SHIM_EXCEPTIONS_END); #define SPM_SHIM_EXCEPTIONS_SIZE \ (SPM_SHIM_EXCEPTIONS_END - SPM_SHIM_EXCEPTIONS_START) +/* + * Use the smallest virtual address space size allowed in ARMv8.0 for + * compatibility. + */ +#define SPM_SHIM_XLAT_VIRT_ADDR_SPACE_SIZE (1ULL << 25) +#define SPM_SHIM_MMAP_REGIONS 1 +#define SPM_SHIM_XLAT_TABLES 1 + #endif /* SPM_SHIM_PRIVATE_H */ diff --git a/services/std_svc/spm/spm_xlat.c b/services/std_svc/spm/spm_xlat.c index 5f83096507c9b68ce9861000d64174acb24aadd9..58d61fc347af1585c092b66083a11be59dfc1e73 100644 --- a/services/std_svc/spm/spm_xlat.c +++ b/services/std_svc/spm/spm_xlat.c @@ -94,6 +94,34 @@ xlat_ctx_t *spm_sp_xlat_context_alloc(void) return ctx; }; +/******************************************************************************* + * Translation table context used for S-EL1 exception vectors + ******************************************************************************/ + +REGISTER_XLAT_CONTEXT2(spm_sel1, SPM_SHIM_MMAP_REGIONS, SPM_SHIM_XLAT_TABLES, + SPM_SHIM_XLAT_VIRT_ADDR_SPACE_SIZE, PLAT_PHY_ADDR_SPACE_SIZE, + EL1_EL0_REGIME, PLAT_SP_IMAGE_XLAT_SECTION_NAME); + +void spm_exceptions_xlat_init_context(void) +{ + /* This region contains the exception vectors used at S-EL1. */ + mmap_region_t sel1_exception_vectors = + MAP_REGION(SPM_SHIM_EXCEPTIONS_PTR, + 0x0UL, + SPM_SHIM_EXCEPTIONS_SIZE, + MT_CODE | MT_SECURE | MT_PRIVILEGED); + + mmap_add_region_ctx(&spm_sel1_xlat_ctx, + &sel1_exception_vectors); + + init_xlat_tables_ctx(&spm_sel1_xlat_ctx); +} + +uint64_t *spm_exceptions_xlat_get_base_table(void) +{ + return spm_sel1_xlat_ctx.base_table; +} + /******************************************************************************* * Functions to allocate memory for regions. ******************************************************************************/ @@ -300,15 +328,6 @@ static void map_rdmem(sp_context_t *sp_ctx, struct sp_rd_sect_mem_region *rdmem) void sp_map_memory_regions(sp_context_t *sp_ctx) { - /* This region contains the exception vectors used at S-EL1. */ - const mmap_region_t sel1_exception_vectors = - MAP_REGION_FLAT(SPM_SHIM_EXCEPTIONS_START, - SPM_SHIM_EXCEPTIONS_SIZE, - MT_CODE | MT_SECURE | MT_PRIVILEGED); - - mmap_add_region_ctx(sp_ctx->xlat_ctx_handle, - &sel1_exception_vectors); - struct sp_rd_sect_mem_region *rdmem; for (rdmem = sp_ctx->rd.mem_region; rdmem != NULL; rdmem = rdmem->next) {