Commit fab2a4e3 authored by johpow01's avatar johpow01 Committed by Mark Dykes
Browse files

feat(RME): Xlat updates and general architectural changes



This patch makes the necessary changes in the Xlat framework as well as
some general architectural changes needed in RME-enabled systems.
Signed-off-by: default avatarJohn Powell <john.powell@arm.com>
Change-Id: I1953de15fc9b8d10a6b2eead100513729f66e2ea
parent c1588782
/*
* Copyright (c) 2013-2021, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2013-2021, Arm Limited and Contributors. All rights reserved.
* Copyright (c) 2020, NVIDIA Corporation. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
......@@ -181,6 +181,11 @@
#define ID_AA64PFR0_CSV2_SHIFT U(56)
#define ID_AA64PFR0_CSV2_MASK ULL(0xf)
#define ID_AA64PFR0_CSV2_LENGTH U(4)
#define ID_AA64PFR0_FEAT_RME_SHIFT U(52)
#define ID_AA64PFR0_FEAT_RME_MASK ULL(0xf)
#define ID_AA64PFR0_FEAT_RME_LENGTH U(4)
#define ID_AA64PFR0_FEAT_RME_NOT_SUPPORTED U(0)
#define ID_AA64PFR0_FEAT_RME_V1 U(1)
/* Exception level handling */
#define EL_IMPL_NONE ULL(0)
......@@ -406,7 +411,8 @@
#define CPACR_EL1_FP_TRAP_NONE UL(0x3)
/* SCR definitions */
#define SCR_RES1_BITS ((U(1) << 4) | (U(1) << 5))
#define SCR_RES1_BITS ((U(1) << 5) | (U(1) << 4))
#define SCR_GPF_BIT (UL(1) << 48)
#define SCR_TWEDEL_SHIFT U(30)
#define SCR_TWEDEL_MASK ULL(0xf)
#define SCR_AMVOFFEN_BIT (UL(1) << 35)
......@@ -426,6 +432,8 @@
#define SCR_SIF_BIT (UL(1) << 9)
#define SCR_HCE_BIT (UL(1) << 8)
#define SCR_SMD_BIT (UL(1) << 7)
#define SCR_NSE_SHIFT U(62)
#define SCR_NSE_BIT (ULL(1) << SCR_NSE_SHIFT)
#define SCR_EA_BIT (UL(1) << 3)
#define SCR_FIQ_BIT (UL(1) << 2)
#define SCR_IRQ_BIT (UL(1) << 1)
......@@ -486,6 +494,7 @@
#define HCR_AMVOFFEN_BIT (ULL(1) << 51)
#define HCR_API_BIT (ULL(1) << 41)
#define HCR_APK_BIT (ULL(1) << 40)
#define HCR_TEA_BIT (ULL(1) << 37)
#define HCR_E2H_BIT (ULL(1) << 34)
#define HCR_TGE_BIT (ULL(1) << 27)
#define HCR_RW_SHIFT U(31)
......@@ -501,9 +510,11 @@
/* CNTHCTL_EL2 definitions */
#define CNTHCTL_RESET_VAL U(0x0)
#define EVNTEN_BIT (U(1) << 2)
#define EL1PCEN_BIT (U(1) << 1)
#define EL1PCTEN_BIT (U(1) << 0)
#define CNTHCTL_EL1PTEN_BIT (U(1) << 11)
#define CNTHCTL_EL1PCTEN_BIT (U(1) << 10)
#define CNTHCTL_EVNTEN_BIT (U(1) << 2)
#define CNTHCTL_EL0VCTEN_BIT (U(1) << 1)
#define CNTHCTL_EL0PCTEN_BIT (U(1) << 0)
/* CNTKCTL_EL1 definitions */
#define EL0PTEN_BIT (U(1) << 9)
......@@ -527,7 +538,9 @@
#define CPTR_EL2_RES1 ((U(1) << 13) | (U(1) << 12) | (U(0x3ff)))
#define CPTR_EL2_TCPAC_BIT (U(1) << 31)
#define CPTR_EL2_TAM_BIT (U(1) << 30)
#define CPTR_EL2_TTA_BIT (U(1) << 20)
#define CPTR_EL2_TTA_BIT (U(1) << 28)
#define CPTR_EL2_FPEN_DISABLE_EL0 (U(1) << 20)
#define CPTR_EL2_ZEN_DISABLE_EL0 (U(1) << 16)
#define CPTR_EL2_TFP_BIT (U(1) << 10)
#define CPTR_EL2_TZ_BIT (U(1) << 8)
#define CPTR_EL2_RESET_VAL CPTR_EL2_RES1
......@@ -557,6 +570,7 @@
#define SPSR_M_MASK U(0x1)
#define SPSR_M_AARCH64 U(0x0)
#define SPSR_M_AARCH32 U(0x1)
#define SPSR_M_EL2H U(0x9)
#define SPSR_EL_SHIFT U(2)
#define SPSR_EL_WIDTH U(2)
......@@ -564,6 +578,8 @@
#define SPSR_SSBS_BIT_AARCH64 BIT_64(12)
#define SPSR_SSBS_BIT_AARCH32 BIT_64(23)
#define SPSR_PAN_BIT (U(1) << 22)
#define DISABLE_ALL_EXCEPTIONS \
(DAIF_FIQ_BIT | DAIF_IRQ_BIT | DAIF_ABT_BIT | DAIF_DBG_BIT)
......@@ -587,6 +603,7 @@
#define TCR_EL2_RES1 ((ULL(1) << 31) | (ULL(1) << 23))
#define TCR_EL1_IPS_SHIFT U(32)
#define TCR_EL2_PS_SHIFT U(16)
#define TCR_EL2_IPS_SHIFT U(32)
#define TCR_EL3_PS_SHIFT U(16)
#define TCR_TxSZ_MIN ULL(16)
......@@ -651,6 +668,11 @@
#define TCR_TG1_4K (ULL(2) << TCR_TG1_SHIFT)
#define TCR_TG1_64K (ULL(3) << TCR_TG1_SHIFT)
#define TCR_AS_BIT (ULL(1) << 36)
#define TCR_HPD0_BIT (ULL(1) << 41)
#define TCR_HPD1_BIT (ULL(1) << 42)
#define TCR_EPD0_BIT (ULL(1) << 7)
#define TCR_EPD1_BIT (ULL(1) << 23)
......@@ -1040,6 +1062,90 @@
#define AMEVCNTVOFF1E_EL2 S3_4_C13_C11_6
#define AMEVCNTVOFF1F_EL2 S3_4_C13_C11_7
/*******************************************************************************
* Realm management extension register definitions
******************************************************************************/
/* GPCCR_EL3 definitions */
#define GPCCR_EL3 S3_6_C2_C1_6
/* Least significant address bits protected by each entry in level 0 GPT */
#define GPCCR_L0GPTSZ_SHIFT U(20)
#define GPCCR_L0GPTSZ_MASK U(0xF)
#define GPCCR_L0GPTSZ_30BITS U(0x0)
#define GPCCR_L0GPTSZ_34BITS U(0x4)
#define GPCCR_L0GPTSZ_36BITS U(0x6)
#define GPCCR_L0GPTSZ_39BITS U(0x9)
#define SET_GPCCR_L0GPTSZ(x) \
((x & GPCCR_L0GPTSZ_MASK) << GPCCR_L0GPTSZ_SHIFT)
/* Granule protection check priority bit definitions */
#define GPCCR_GPCP_SHIFT U(17)
#define GPCCR_GPCP_BIT (ULL(1) << GPCCR_EL3_GPCP_SHIFT)
/* Granule protection check bit definitions */
#define GPCCR_GPC_SHIFT U(16)
#define GPCCR_GPC_BIT (ULL(1) << GPCCR_GPC_SHIFT)
/* Physical granule size bit definitions */
#define GPCCR_PGS_SHIFT U(14)
#define GPCCR_PGS_MASK U(0x3)
#define GPCCR_PGS_4K U(0x0)
#define GPCCR_PGS_16K U(0x2)
#define GPCCR_PGS_64K U(0x1)
#define SET_GPCCR_PGS(x) \
((x & GPCCR_PGS_MASK) << GPCCR_PGS_SHIFT)
/* GPT fetch shareability attribute bit definitions */
#define GPCCR_SH_SHIFT U(12)
#define GPCCR_SH_MASK U(0x3)
#define GPCCR_SH_NS U(0x0)
#define GPCCR_SH_OS U(0x2)
#define GPCCR_SH_IS U(0x3)
#define SET_GPCCR_SH(x) \
((x & GPCCR_SH_MASK) << GPCCR_SH_SHIFT)
/* GPT fetch outer cacheability attribute bit definitions */
#define GPCCR_ORGN_SHIFT U(10)
#define GPCCR_ORGN_MASK U(0x3)
#define GPCCR_ORGN_NC U(0x0)
#define GPCCR_ORGN_WB_RA_WA U(0x1)
#define GPCCR_ORGN_WT_RA_NWA U(0x2)
#define GPCCR_ORGN_WB_RA_NWA U(0x3)
#define SET_GPCCR_ORGN(x) \
((x & GPCCR_ORGN_MASK) << GPCCR_ORGN_SHIFT)
/* GPT fetch inner cacheability attribute bit definitions */
#define GPCCR_IRGN_SHIFT U(8)
#define GPCCR_IRGN_MASK U(0x3)
#define GPCCR_IRGN_NC U(0x0)
#define GPCCR_IRGN_WB_RA_WA U(0x1)
#define GPCCR_IRGN_WT_RA_NWA U(0x2)
#define GPCCR_IRGN_WB_RA_NWA U(0x3)
#define SET_GPCCR_IRGN(x) \
((x & GPCCR_IRGN_MASK) << GPCCR_IRGN_SHIFT)
/* Protected physical address size bit definitions */
#define GPCCR_PPS_SHIFT U(0)
#define GPCCR_PPS_MASK U(0x7)
#define GPCCR_PPS_4GB U(0x0)
#define GPCCR_PPS_64GB U(0x1)
#define GPCCR_PPS_1TB U(0x2)
#define GPCCR_PPS_4TB U(0x3)
#define GPCCR_PPS_16TB U(0x4)
#define GPCCR_PPS_256TB U(0x5)
#define GPCCR_PPS_4PB U(0x6)
#define SET_GPCCR_PPS(x) \
((x & GPCCR_PPS_MASK) << GPCCR_PPS_SHIFT)
/* GPTBR_EL3 definitions */
#define GPTBR_EL3 S3_6_C2_C1_4
/* Base Address for the GPT bit definitions */
#define GPTBR_BADDR_SHIFT U(0)
#define GPTBR_BADDR_VAL_SHIFT U(12)
#define GPTBR_BADDR_MASK ULL(0xffffffffff)
/*******************************************************************************
* RAS system registers
******************************************************************************/
......
......@@ -105,4 +105,15 @@ static inline unsigned int get_mpam_version(void)
ID_AA64PFR1_MPAM_FRAC_SHIFT) & ID_AA64PFR1_MPAM_FRAC_MASK));
}
static inline unsigned int get_armv9_2_feat_rme_support(void)
{
/*
* Return the RME version, zero if not supported. This function can be
* used as both an integer value for the RME version or compared to zero
* to detect RME presence.
*/
return (unsigned int)(read_id_aa64pfr0_el1() >>
ID_AA64PFR0_FEAT_RME_SHIFT) & ID_AA64PFR0_FEAT_RME_MASK;
}
#endif /* ARCH_FEATURES_H */
/*
* Copyright (c) 2013-2021, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2013-2021, Arm Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
......@@ -535,6 +535,10 @@ DEFINE_SYSREG_READ_FUNC(rndrrs)
/* DynamIQ Shared Unit power management */
DEFINE_RENAME_SYSREG_RW_FUNCS(clusterpwrdn_el1, CLUSTERPWRDN_EL1)
/* Armv9.2 RME Registers */
DEFINE_RENAME_SYSREG_RW_FUNCS(gptbr_el3, GPTBR_EL3)
DEFINE_RENAME_SYSREG_RW_FUNCS(gpccr_el3, GPCCR_EL3)
#define IS_IN_EL(x) \
(GET_EL(read_CurrentEl()) == MODE_EL##x)
......@@ -578,7 +582,15 @@ static inline uint64_t el_implemented(unsigned int el)
}
}
/* Previously defined accesor functions with incomplete register names */
static inline void tlbipaallos(void)
{
__asm__("SYS #6,c8,c1,#4");
}
void tlbi_by_pa(uint64_t pa);
/* Previously defined accessor functions with incomplete register names */
#define read_current_el() read_CurrentEl()
......
/*
* Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2017-2021, Arm Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
......@@ -16,9 +16,16 @@
#include <export/common/ep_info_exp.h>
#define REALM EP_REALM
#define SECURE EP_SECURE
#define NON_SECURE EP_NON_SECURE
#if ENABLE_RME
#define sec_state_is_valid(s) (((s) == SECURE) || \
((s) == NON_SECURE) || \
((s) == REALM))
#else
#define sec_state_is_valid(s) (((s) == SECURE) || ((s) == NON_SECURE))
#endif
#define PARAM_EP_SECURITY_MASK EP_SECURITY_MASK
......
/*
* Copyright (c) 2020, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2021, Arm Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
......@@ -25,39 +25,64 @@
#endif
/* Security state of the image. */
#if ENABLE_RME
#define EP_SECURITY_MASK UL(0x3)
#else
#define EP_SECURITY_MASK UL(0x1)
#endif
#define EP_SECURITY_SHIFT UL(0)
#define EP_SECURE UL(0x0)
#define EP_NON_SECURE UL(0x1)
#define EP_REALM UL(0x2)
/* Endianness of the image. */
#if ENABLE_RME
#define EP_EE_MASK U(0x4)
#define EP_EE_SHIFT U(2)
#else
#define EP_EE_MASK U(0x2)
#define EP_EE_SHIFT U(1)
#endif
#define EP_EE_LITTLE U(0x0)
#define EP_EE_BIG U(0x2)
#define EP_EE_BIG (U(0x1) << EP_EE_SHIFT)
#define EP_GET_EE(x) ((x) & EP_EE_MASK)
#define EP_SET_EE(x, ee) ((x) = ((x) & ~EP_EE_MASK) | (ee))
/* Enable or disable access to the secure timer from secure images. */
#if ENABLE_RME
#define EP_ST_MASK U(0x8)
#define EP_ST_SHIFT U(3)
#else
#define EP_ST_MASK U(0x4)
#define EP_ST_SHIFT U(2)
#endif
#define EP_ST_DISABLE U(0x0)
#define EP_ST_ENABLE U(0x4)
#define EP_ST_ENABLE (U(0x1) << EP_ST_SHIFT)
#define EP_GET_ST(x) ((x) & EP_ST_MASK)
#define EP_SET_ST(x, ee) ((x) = ((x) & ~EP_ST_MASK) | (ee))
/* Determine if an image is executable or not. */
#if ENABLE_RME
#define EP_EXE_MASK U(0x10)
#define EP_EXE_SHIFT U(4)
#else
#define EP_EXE_MASK U(0x8)
#define EP_EXE_SHIFT U(3)
#endif
#define EP_NON_EXECUTABLE U(0x0)
#define EP_EXECUTABLE U(0x8)
#define EP_EXECUTABLE (U(0x1) << EP_EXE_SHIFT)
#define EP_GET_EXE(x) ((x) & EP_EXE_MASK)
#define EP_SET_EXE(x, ee) ((x) = ((x) & ~EP_EXE_MASK) | (ee))
/* Flag to indicate the first image that is executed. */
#if ENABLE_RME
#define EP_FIRST_EXE_MASK U(0x20)
#define EP_FIRST_EXE_SHIFT U(5)
#else
#define EP_FIRST_EXE_MASK U(0x10)
#define EP_FIRST_EXE_SHIFT U(4)
#define EP_FIRST_EXE U(0x10)
#endif
#define EP_FIRST_EXE (U(0x1) << EP_FIRST_EXE_SHIFT)
#define EP_GET_FIRST_EXE(x) ((x) & EP_FIRST_EXE_MASK)
#define EP_SET_FIRST_EXE(x, ee) ((x) = ((x) & ~EP_FIRST_EXE_MASK) | (ee))
......
/*
* Copyright (c) 2017-2020, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2017-2021, Arm Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
......@@ -141,7 +141,8 @@
#define AP_ACCESS_UNPRIVILEGED (AP1_ACCESS_UNPRIVILEGED << 4)
#define AP_NO_ACCESS_UNPRIVILEGED (AP1_NO_ACCESS_UNPRIVILEGED << 4)
#define AP_ONE_VA_RANGE_RES1 (AP1_RES1 << 4)
#define NS (U(0x1) << 3)
#define NS (U(0x1) << 3) /* Bit[5] absolutely */
#define EL3_S1_NSE (U(0x1) << 9) /* Bit[11] absolutely */
#define ATTR_NON_CACHEABLE_INDEX ULL(0x2)
#define ATTR_DEVICE_INDEX ULL(0x1)
#define ATTR_IWBWA_OWBWA_NTR_INDEX ULL(0x0)
......
......@@ -60,8 +60,6 @@
#define MT_TYPE(_attr) ((_attr) & MT_TYPE_MASK)
/* Access permissions (RO/RW) */
#define MT_PERM_SHIFT U(3)
/* Security state (SECURE/NS) */
#define MT_SEC_SHIFT U(4)
/* Access permissions for instruction execution (EXECUTE/EXECUTE_NEVER) */
#define MT_EXECUTE_SHIFT U(5)
/* In the EL1&0 translation regime, User (EL0) or Privileged (EL1). */
......@@ -71,6 +69,17 @@
#define MT_SHAREABILITY_SHIFT U(7)
#define MT_SHAREABILITY_MASK (U(3) << MT_SHAREABILITY_SHIFT)
#define MT_SHAREABILITY(_attr) ((_attr) & MT_SHAREABILITY_MASK)
#if ENABLE_RME
/* Physical address space (SECURE/NS/Root/Realm) */
#define MT_PAS_SHIFT U(9)
#define MT_PAS_MASK (U(3) << MT_PAS_SHIFT)
#define MT_PAS(_attr) ((_attr) & MT_PAS_MASK)
#else
/* Security state (SECURE/NS) */
#define MT_SEC_SHIFT U(4)
#endif
/* All other bits are reserved */
/*
......@@ -91,8 +100,15 @@
#define MT_RO (U(0) << MT_PERM_SHIFT)
#define MT_RW (U(1) << MT_PERM_SHIFT)
#if ENABLE_RME
#define MT_SECURE (U(0) << MT_PAS_SHIFT)
#define MT_NS (U(1) << MT_PAS_SHIFT)
#define MT_ROOT (U(2) << MT_PAS_SHIFT)
#define MT_REALM (U(3) << MT_PAS_SHIFT)
#else
#define MT_SECURE (U(0) << MT_SEC_SHIFT)
#define MT_NS (U(1) << MT_SEC_SHIFT)
#endif
/*
* Access permissions for instruction execution are only relevant for normal
......
/*
* Copyright (c) 2017-2020, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2017-2021, Arm Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
......@@ -53,6 +53,34 @@ size_t xlat_arch_get_max_supported_granule_size(void)
}
}
#if ENABLE_RME
/*
* Determine the physical address space encoded in the 'attr' parameter.
*
* This function is used when ENABLE_RME is true so the physical address will
* fall into one of four spaces; secure, nonsecure, root, or realm.
*/
uint32_t xlat_arch_get_pas(uint32_t attr)
{
/* RME must be implemented. */
assert(get_armv9_2_feat_rme_support() !=
ID_AA64PFR0_FEAT_RME_NOT_SUPPORTED);
uint32_t pas = MT_PAS(attr);
switch (pas) {
case MT_SECURE:
return 0U;
case MT_NS:
return LOWER_ATTRS(NS);
case MT_ROOT:
return LOWER_ATTRS(EL3_S1_NSE);
default: /* MT_REALM */
return LOWER_ATTRS(EL3_S1_NSE | NS);
}
}
#endif
unsigned long long tcr_physical_addr_size_bits(unsigned long long max_addr)
{
/* Physical address can't exceed 48 bits */
......
/*
* Copyright (c) 2017-2020, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2017-2021, Arm Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
......@@ -125,11 +125,19 @@ uint64_t xlat_desc(const xlat_ctx_t *ctx, uint32_t attr,
* faults aren't managed.
*/
desc |= LOWER_ATTRS(ACCESS_FLAG);
#if ENABLE_RME
/* Determine the physical address space this region belongs to. */
desc |= xlat_arch_get_pas(attr);
#else
/* Check if secure or nonsecure. */
desc |= ((attr & MT_NS) != 0U) ? LOWER_ATTRS(NS) : 0U;
#endif
/*
* Deduce other fields of the descriptor based on the MT_NS and MT_RW
* memory region attributes.
* Deduce other fields of the descriptor based on the MT_RW memory
* region attributes.
*/
desc |= ((attr & MT_NS) != 0U) ? LOWER_ATTRS(NS) : 0U;
desc |= ((attr & MT_RW) != 0U) ? LOWER_ATTRS(AP_RW) : LOWER_ATTRS(AP_RO);
/*
......
/*
* Copyright (c) 2017-2020, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2017-2021, Arm Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
......@@ -40,6 +40,11 @@
extern uint64_t mmu_cfg_params[MMU_CFG_PARAM_MAX];
#if ENABLE_RME
/* Determine the physical address space encoded in the 'attr' parameter. */
uint32_t xlat_arch_get_pas(uint32_t attr);
#endif
/*
* Return the execute-never mask that will prevent instruction fetch at the
* given translation regime.
......
/*
* Copyright (c) 2017-2020, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2017-2021, Arm Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
......@@ -95,7 +95,23 @@ static void xlat_desc_print(const xlat_ctx_t *ctx, uint64_t desc)
? "-USER" : "-PRIV");
}
#if ENABLE_RME
switch (desc & LOWER_ATTRS(EL3_S1_NSE | NS)) {
case 0ULL:
printf("-S");
break;
case LOWER_ATTRS(NS):
printf("-N");
break;
case LOWER_ATTRS(EL3_S1_NSE):
printf("-RT");
break;
default: /* LOWER_ATTRS(EL3_S1_NSE | NS) */
printf("-RL");
}
#else
printf(((LOWER_ATTRS(NS) & desc) != 0ULL) ? "-NS" : "-S");
#endif
#ifdef __aarch64__
/* Check Guarded Page bit */
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment