Commit e290a8fc authored by Alexei Fedorov's avatar Alexei Fedorov
Browse files

AArch64: Disable Secure Cycle Counter



This patch fixes an issue when secure world timing information
can be leaked because Secure Cycle Counter is not disabled.
For ARMv8.5 the counter gets disabled by setting MDCR_El3.SCCD
bit on CPU cold/warm boot.
For the earlier architectures PMCR_EL0 register is saved/restored
on secure world entry/exit from/to Non-secure state, and cycle
counting gets disabled by setting PMCR_EL0.DP bit.
'include\aarch64\arch.h' header file was tided up and new
ARMv8.5-PMU related definitions were added.

Change-Id: I6f56db6bc77504634a352388990ad925a69ebbfa
Signed-off-by: default avatarAlexei Fedorov <Alexei.Fedorov@arm.com>
parent 5119fa7b
/* /*
* Copyright (c) 2013-2018, ARM Limited and Contributors. All rights reserved. * Copyright (c) 2013-2019, ARM Limited and Contributors. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
...@@ -223,6 +223,14 @@ smc_handler: ...@@ -223,6 +223,14 @@ smc_handler:
*/ */
bl save_gp_registers bl save_gp_registers
/* -----------------------------------------------------
* If Secure Cycle Counter is not disabled in MDCR_EL3
* when ARMv8.5-PMU is implemented, save PMCR_EL0 and
* disable all event counters and cycle counter.
* -----------------------------------------------------
*/
bl save_pmcr_disable_pmu
/* ----------------------------------------------------- /* -----------------------------------------------------
* Populate the parameters for the SMC handler. We * Populate the parameters for the SMC handler. We
* already have x0-x4 in place. x5 will point to a * already have x0-x4 in place. x5 will point to a
......
...@@ -68,6 +68,13 @@ func enter_lower_el_sync_ea ...@@ -68,6 +68,13 @@ func enter_lower_el_sync_ea
/* Save GP registers */ /* Save GP registers */
bl save_gp_registers bl save_gp_registers
/*
* If Secure Cycle Counter is not disabled in MDCR_EL3
* when ARMv8.5-PMU is implemented, save PMCR_EL0 and
* disable all event counters and cycle counter.
*/
bl save_pmcr_disable_pmu
/* Save ARMv8.3-PAuth registers and load firmware key */ /* Save ARMv8.3-PAuth registers and load firmware key */
#if CTX_INCLUDE_PAUTH_REGS #if CTX_INCLUDE_PAUTH_REGS
bl pauth_context_save bl pauth_context_save
...@@ -106,6 +113,13 @@ func enter_lower_el_async_ea ...@@ -106,6 +113,13 @@ func enter_lower_el_async_ea
/* Save GP registers */ /* Save GP registers */
bl save_gp_registers bl save_gp_registers
/*
* If Secure Cycle Counter is not disabled in MDCR_EL3
* when ARMv8.5-PMU is implemented, save PMCR_EL0 and
* disable all event counters and cycle counter.
*/
bl save_pmcr_disable_pmu
/* Save ARMv8.3-PAuth registers and load firmware key */ /* Save ARMv8.3-PAuth registers and load firmware key */
#if CTX_INCLUDE_PAUTH_REGS #if CTX_INCLUDE_PAUTH_REGS
bl pauth_context_save bl pauth_context_save
......
...@@ -67,6 +67,14 @@ ...@@ -67,6 +67,14 @@
/* Save GP registers and restore them afterwards */ /* Save GP registers and restore them afterwards */
bl save_gp_registers bl save_gp_registers
/*
* If Secure Cycle Counter is not disabled in MDCR_EL3
* when ARMv8.5-PMU is implemented, save PMCR_EL0 and
* disable all event counters and cycle counter.
*/
bl save_pmcr_disable_pmu
bl handle_lower_el_ea_esb bl handle_lower_el_ea_esb
bl restore_gp_registers bl restore_gp_registers
...@@ -123,6 +131,13 @@ ...@@ -123,6 +131,13 @@
bl save_gp_registers bl save_gp_registers
/*
* If Secure Cycle Counter is not disabled in MDCR_EL3
* when ARMv8.5-PMU is implemented, save PMCR_EL0 and
* disable all event counters and cycle counter.
*/
bl save_pmcr_disable_pmu
/* Save ARMv8.3-PAuth registers and load firmware key */ /* Save ARMv8.3-PAuth registers and load firmware key */
#if CTX_INCLUDE_PAUTH_REGS #if CTX_INCLUDE_PAUTH_REGS
bl pauth_context_save bl pauth_context_save
...@@ -335,6 +350,13 @@ smc_handler64: ...@@ -335,6 +350,13 @@ smc_handler64:
/* Save general purpose registers */ /* Save general purpose registers */
bl save_gp_registers bl save_gp_registers
/*
* If Secure Cycle Counter is not disabled in MDCR_EL3
* when ARMv8.5-PMU is implemented, save PMCR_EL0 and
* disable all event counters and cycle counter.
*/
bl save_pmcr_disable_pmu
/* Save ARMv8.3-PAuth registers and load firmware key */ /* Save ARMv8.3-PAuth registers and load firmware key */
#if CTX_INCLUDE_PAUTH_REGS #if CTX_INCLUDE_PAUTH_REGS
bl pauth_context_save bl pauth_context_save
......
...@@ -132,12 +132,13 @@ ...@@ -132,12 +132,13 @@
#define ID_AA64PFR0_EL2_SHIFT U(8) #define ID_AA64PFR0_EL2_SHIFT U(8)
#define ID_AA64PFR0_EL3_SHIFT U(12) #define ID_AA64PFR0_EL3_SHIFT U(12)
#define ID_AA64PFR0_AMU_SHIFT U(44) #define ID_AA64PFR0_AMU_SHIFT U(44)
#define ID_AA64PFR0_AMU_LENGTH U(4)
#define ID_AA64PFR0_AMU_MASK ULL(0xf) #define ID_AA64PFR0_AMU_MASK ULL(0xf)
#define ID_AA64PFR0_ELX_MASK ULL(0xf) #define ID_AA64PFR0_ELX_MASK ULL(0xf)
#define ID_AA64PFR0_GIC_SHIFT U(24)
#define ID_AA64PFR0_GIC_WIDTH U(4)
#define ID_AA64PFR0_GIC_MASK ULL(0xf)
#define ID_AA64PFR0_SVE_SHIFT U(32) #define ID_AA64PFR0_SVE_SHIFT U(32)
#define ID_AA64PFR0_SVE_MASK ULL(0xf) #define ID_AA64PFR0_SVE_MASK ULL(0xf)
#define ID_AA64PFR0_SVE_LENGTH U(4)
#define ID_AA64PFR0_MPAM_SHIFT U(40) #define ID_AA64PFR0_MPAM_SHIFT U(40)
#define ID_AA64PFR0_MPAM_MASK ULL(0xf) #define ID_AA64PFR0_MPAM_MASK ULL(0xf)
#define ID_AA64PFR0_DIT_SHIFT U(48) #define ID_AA64PFR0_DIT_SHIFT U(48)
...@@ -148,18 +149,14 @@ ...@@ -148,18 +149,14 @@
#define ID_AA64PFR0_CSV2_MASK ULL(0xf) #define ID_AA64PFR0_CSV2_MASK ULL(0xf)
#define ID_AA64PFR0_CSV2_LENGTH U(4) #define ID_AA64PFR0_CSV2_LENGTH U(4)
/* ID_AA64DFR0_EL1.PMS definitions (for ARMv8.2+) */ /* Exception level handling */
#define ID_AA64DFR0_PMS_SHIFT U(32)
#define ID_AA64DFR0_PMS_LENGTH U(4)
#define ID_AA64DFR0_PMS_MASK ULL(0xf)
#define EL_IMPL_NONE ULL(0) #define EL_IMPL_NONE ULL(0)
#define EL_IMPL_A64ONLY ULL(1) #define EL_IMPL_A64ONLY ULL(1)
#define EL_IMPL_A64_A32 ULL(2) #define EL_IMPL_A64_A32 ULL(2)
#define ID_AA64PFR0_GIC_SHIFT U(24) /* ID_AA64DFR0_EL1.PMS definitions (for ARMv8.2+) */
#define ID_AA64PFR0_GIC_WIDTH U(4) #define ID_AA64DFR0_PMS_SHIFT U(32)
#define ID_AA64PFR0_GIC_MASK ULL(0xf) #define ID_AA64DFR0_PMS_MASK ULL(0xf)
/* ID_AA64ISAR1_EL1 definitions */ /* ID_AA64ISAR1_EL1 definitions */
#define ID_AA64ISAR1_EL1 S3_0_C0_C6_1 #define ID_AA64ISAR1_EL1 S3_0_C0_C6_1
...@@ -304,20 +301,25 @@ ...@@ -304,20 +301,25 @@
#define SCR_RESET_VAL SCR_RES1_BITS #define SCR_RESET_VAL SCR_RES1_BITS
/* MDCR_EL3 definitions */ /* MDCR_EL3 definitions */
#define MDCR_SCCD_BIT (ULL(1) << 23)
#define MDCR_SPME_BIT (ULL(1) << 17)
#define MDCR_SDD_BIT (ULL(1) << 16)
#define MDCR_SPD32(x) ((x) << 14) #define MDCR_SPD32(x) ((x) << 14)
#define MDCR_SPD32_LEGACY ULL(0x0) #define MDCR_SPD32_LEGACY ULL(0x0)
#define MDCR_SPD32_DISABLE ULL(0x2) #define MDCR_SPD32_DISABLE ULL(0x2)
#define MDCR_SPD32_ENABLE ULL(0x3) #define MDCR_SPD32_ENABLE ULL(0x3)
#define MDCR_SDD_BIT (ULL(1) << 16)
#define MDCR_NSPB(x) ((x) << 12) #define MDCR_NSPB(x) ((x) << 12)
#define MDCR_NSPB_EL1 ULL(0x3) #define MDCR_NSPB_EL1 ULL(0x3)
#define MDCR_TDOSA_BIT (ULL(1) << 10) #define MDCR_TDOSA_BIT (ULL(1) << 10)
#define MDCR_TDA_BIT (ULL(1) << 9) #define MDCR_TDA_BIT (ULL(1) << 9)
#define MDCR_TPM_BIT (ULL(1) << 6) #define MDCR_TPM_BIT (ULL(1) << 6)
#define MDCR_SCCD_BIT (ULL(1) << 23)
#define MDCR_EL3_RESET_VAL ULL(0x0) #define MDCR_EL3_RESET_VAL ULL(0x0)
/* MDCR_EL2 definitions */ /* MDCR_EL2 definitions */
#define MDCR_EL2_HLP (U(1) << 26)
#define MDCR_EL2_HCCD (U(1) << 23)
#define MDCR_EL2_TTRF (U(1) << 19)
#define MDCR_EL2_HPMD (U(1) << 17)
#define MDCR_EL2_TPMS (U(1) << 14) #define MDCR_EL2_TPMS (U(1) << 14)
#define MDCR_EL2_E2PB(x) ((x) << 12) #define MDCR_EL2_E2PB(x) ((x) << 12)
#define MDCR_EL2_E2PB_EL1 U(0x3) #define MDCR_EL2_E2PB_EL1 U(0x3)
...@@ -677,10 +679,14 @@ ...@@ -677,10 +679,14 @@
#define PMCR_EL0_N_SHIFT U(11) #define PMCR_EL0_N_SHIFT U(11)
#define PMCR_EL0_N_MASK U(0x1f) #define PMCR_EL0_N_MASK U(0x1f)
#define PMCR_EL0_N_BITS (PMCR_EL0_N_MASK << PMCR_EL0_N_SHIFT) #define PMCR_EL0_N_BITS (PMCR_EL0_N_MASK << PMCR_EL0_N_SHIFT)
#define PMCR_EL0_LP_BIT (U(1) << 7)
#define PMCR_EL0_LC_BIT (U(1) << 6) #define PMCR_EL0_LC_BIT (U(1) << 6)
#define PMCR_EL0_DP_BIT (U(1) << 5) #define PMCR_EL0_DP_BIT (U(1) << 5)
#define PMCR_EL0_X_BIT (U(1) << 4) #define PMCR_EL0_X_BIT (U(1) << 4)
#define PMCR_EL0_D_BIT (U(1) << 3) #define PMCR_EL0_D_BIT (U(1) << 3)
#define PMCR_EL0_C_BIT (U(1) << 2)
#define PMCR_EL0_P_BIT (U(1) << 1)
#define PMCR_EL0_E_BIT (U(1) << 0)
/******************************************************************************* /*******************************************************************************
* Definitions for system register interface to SVE * Definitions for system register interface to SVE
......
...@@ -116,11 +116,41 @@ ...@@ -116,11 +116,41 @@
* --------------------------------------------------------------------- * ---------------------------------------------------------------------
*/ */
mov_imm x0, ((MDCR_EL3_RESET_VAL | MDCR_SDD_BIT | \ mov_imm x0, ((MDCR_EL3_RESET_VAL | MDCR_SDD_BIT | \
MDCR_SPD32(MDCR_SPD32_DISABLE) | MDCR_SCCD_BIT) \ MDCR_SPD32(MDCR_SPD32_DISABLE) | MDCR_SCCD_BIT) & \
& ~(MDCR_TDOSA_BIT | MDCR_TDA_BIT | MDCR_TPM_BIT)) ~(MDCR_TDOSA_BIT | MDCR_TDA_BIT | MDCR_TPM_BIT))
msr mdcr_el3, x0 msr mdcr_el3, x0
/* ---------------------------------------------------------------------
* Initialise PMCR_EL0 setting all fields rather than relying
* on hw. Some fields are architecturally UNKNOWN on reset.
*
* PMCR_EL0.LP: Set to one so that event counter overflow, that
* is recorded in PMOVSCLR_EL0[0-30], occurs on the increment
* that changes PMEVCNTR<n>_EL0[63] from 1 to 0, when ARMv8.5-PMU
* is implemented. This bit is RES0 in versions of the architecture
* earlier than ARMv8.5, setting it to 1 doesn't have any effect
* on them.
*
* PMCR_EL0.LC: Set to one so that cycle counter overflow, that
* is recorded in PMOVSCLR_EL0[31], occurs on the increment
* that changes PMCCNTR_EL0[63] from 1 to 0.
*
* PMCR_EL0.DP: Set to one so that the cycle counter,
* PMCCNTR_EL0 does not count when event counting is prohibited.
*
* PMCR_EL0.X: Set to zero to disable export of events.
*
* PMCR_EL0.D: Set to zero so that, when enabled, PMCCNTR_EL0
* counts on every clock cycle.
* ---------------------------------------------------------------------
*/
mov_imm x0, ((PMCR_EL0_RESET_VAL | PMCR_EL0_LP_BIT | \
PMCR_EL0_LC_BIT | PMCR_EL0_DP_BIT) & \
~(PMCR_EL0_X_BIT | PMCR_EL0_D_BIT))
msr pmcr_el0, x0
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* Enable External Aborts and SError Interrupts now that the exception * Enable External Aborts and SError Interrupts now that the exception
* vectors have been setup. * vectors have been setup.
......
...@@ -59,7 +59,7 @@ ...@@ -59,7 +59,7 @@
#define CTX_RUNTIME_SP U(0x10) #define CTX_RUNTIME_SP U(0x10)
#define CTX_SPSR_EL3 U(0x18) #define CTX_SPSR_EL3 U(0x18)
#define CTX_ELR_EL3 U(0x20) #define CTX_ELR_EL3 U(0x20)
#define CTX_UNUSED U(0x28) #define CTX_PMCR_EL0 U(0x28)
#define CTX_EL3STATE_END U(0x30) #define CTX_EL3STATE_END U(0x30)
/******************************************************************************* /*******************************************************************************
...@@ -91,22 +91,21 @@ ...@@ -91,22 +91,21 @@
#define CTX_AFSR1_EL1 U(0x98) #define CTX_AFSR1_EL1 U(0x98)
#define CTX_CONTEXTIDR_EL1 U(0xa0) #define CTX_CONTEXTIDR_EL1 U(0xa0)
#define CTX_VBAR_EL1 U(0xa8) #define CTX_VBAR_EL1 U(0xa8)
#define CTX_PMCR_EL0 U(0xb0)
/* /*
* If the platform is AArch64-only, there is no need to save and restore these * If the platform is AArch64-only, there is no need to save and restore these
* AArch32 registers. * AArch32 registers.
*/ */
#if CTX_INCLUDE_AARCH32_REGS #if CTX_INCLUDE_AARCH32_REGS
#define CTX_SPSR_ABT U(0xc0) /* Align to the next 16 byte boundary */ #define CTX_SPSR_ABT U(0xb0) /* Align to the next 16 byte boundary */
#define CTX_SPSR_UND U(0xc8) #define CTX_SPSR_UND U(0xb8)
#define CTX_SPSR_IRQ U(0xd0) #define CTX_SPSR_IRQ U(0xc0)
#define CTX_SPSR_FIQ U(0xd8) #define CTX_SPSR_FIQ U(0xc8)
#define CTX_DACR32_EL2 U(0xe0) #define CTX_DACR32_EL2 U(0xd0)
#define CTX_IFSR32_EL2 U(0xe8) #define CTX_IFSR32_EL2 U(0xd8)
#define CTX_AARCH32_END U(0xf0) /* Align to the next 16 byte boundary */ #define CTX_AARCH32_END U(0xe0) /* Align to the next 16 byte boundary */
#else #else
#define CTX_AARCH32_END U(0xc0) /* Align to the next 16 byte boundary */ #define CTX_AARCH32_END U(0xb0) /* Align to the next 16 byte boundary */
#endif /* CTX_INCLUDE_AARCH32_REGS */ #endif /* CTX_INCLUDE_AARCH32_REGS */
/* /*
......
...@@ -24,8 +24,44 @@ ...@@ -24,8 +24,44 @@
.global save_gp_registers .global save_gp_registers
.global restore_gp_registers .global restore_gp_registers
.global restore_gp_registers_eret .global restore_gp_registers_eret
.global save_pmcr_disable_pmu
.global el3_exit .global el3_exit
/* -----------------------------------------------------
* If ARMv8.5-PMU is implemented, cycle counting is
* disabled by seting MDCR_EL3.SCCD to 1.
* -----------------------------------------------------
*/
func save_pmcr_disable_pmu
/* -----------------------------------------------------
* Check if earlier initialization MDCR_EL3.SCCD to 1
* failed, meaning that ARMv8-PMU is not implemented and
* PMCR_EL0 should be saved in non-secure context.
* -----------------------------------------------------
*/
mrs x9, mdcr_el3
tst x9, #MDCR_SCCD_BIT
bne 1f
/* Secure Cycle Counter is not disabled */
mrs x9, pmcr_el0
/* Check caller's security state */
mrs x10, scr_el3
tst x10, #SCR_NS_BIT
beq 2f
/* Save PMCR_EL0 if called from Non-secure state */
str x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
/* Disable cycle counter when event counting is prohibited */
2: orr x9, x9, #PMCR_EL0_DP_BIT
msr pmcr_el0, x9
isb
1: ret
endfunc save_pmcr_disable_pmu
/* ----------------------------------------------------- /* -----------------------------------------------------
* The following function strictly follows the AArch64 * The following function strictly follows the AArch64
* PCS to use x9-x17 (temporary caller-saved registers) * PCS to use x9-x17 (temporary caller-saved registers)
...@@ -80,9 +116,6 @@ func el1_sysregs_context_save ...@@ -80,9 +116,6 @@ func el1_sysregs_context_save
mrs x9, vbar_el1 mrs x9, vbar_el1
stp x17, x9, [x0, #CTX_CONTEXTIDR_EL1] stp x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
mrs x10, pmcr_el0
str x10, [x0, #CTX_PMCR_EL0]
/* Save AArch32 system registers if the build has instructed so */ /* Save AArch32 system registers if the build has instructed so */
#if CTX_INCLUDE_AARCH32_REGS #if CTX_INCLUDE_AARCH32_REGS
mrs x11, spsr_abt mrs x11, spsr_abt
...@@ -169,9 +202,6 @@ func el1_sysregs_context_restore ...@@ -169,9 +202,6 @@ func el1_sysregs_context_restore
msr contextidr_el1, x17 msr contextidr_el1, x17
msr vbar_el1, x9 msr vbar_el1, x9
ldr x10, [x0, #CTX_PMCR_EL0]
msr pmcr_el0, x10
/* Restore AArch32 system registers if the build has instructed so */ /* Restore AArch32 system registers if the build has instructed so */
#if CTX_INCLUDE_AARCH32_REGS #if CTX_INCLUDE_AARCH32_REGS
ldp x11, x12, [x0, #CTX_SPSR_ABT] ldp x11, x12, [x0, #CTX_SPSR_ABT]
...@@ -503,6 +533,29 @@ func el3_exit ...@@ -503,6 +533,29 @@ func el3_exit
msr spsr_el3, x16 msr spsr_el3, x16
msr elr_el3, x17 msr elr_el3, x17
/* -----------------------------------------------------
* Restore PMCR_EL0 when returning to Non-secure state
* if Secure Cycle Counter is not disabled in MDCR_EL3
* when ARMv8.5-PMU is implemented
* -----------------------------------------------------
*/
tst x18, #SCR_NS_BIT
beq 2f
/* -----------------------------------------------------
* Back to Non-secure state.
* Check if earlier initialization MDCR_EL3.SCCD to 1
* failed, meaning that ARMv8-PMU is not implemented and
* PMCR_EL0 should be restored from non-secure context.
* -----------------------------------------------------
*/
mrs x17, mdcr_el3
tst x17, #MDCR_SCCD_BIT
bne 2f
ldr x17, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
msr pmcr_el0, x17
2:
#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 #if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
/* Restore mitigation state as it was on entry to EL3 */ /* Restore mitigation state as it was on entry to EL3 */
ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE] ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
......
...@@ -66,7 +66,7 @@ void __init cm_init(void) ...@@ -66,7 +66,7 @@ void __init cm_init(void)
void cm_setup_context(cpu_context_t *ctx, const entry_point_info_t *ep) void cm_setup_context(cpu_context_t *ctx, const entry_point_info_t *ep)
{ {
unsigned int security_state; unsigned int security_state;
uint32_t scr_el3, pmcr_el0; uint32_t scr_el3;
el3_state_t *state; el3_state_t *state;
gp_regs_t *gp_regs; gp_regs_t *gp_regs;
unsigned long sctlr_elx, actlr_elx; unsigned long sctlr_elx, actlr_elx;
...@@ -225,31 +225,10 @@ void cm_setup_context(cpu_context_t *ctx, const entry_point_info_t *ep) ...@@ -225,31 +225,10 @@ void cm_setup_context(cpu_context_t *ctx, const entry_point_info_t *ep)
actlr_elx = read_actlr_el1(); actlr_elx = read_actlr_el1();
write_ctx_reg((get_sysregs_ctx(ctx)), (CTX_ACTLR_EL1), (actlr_elx)); write_ctx_reg((get_sysregs_ctx(ctx)), (CTX_ACTLR_EL1), (actlr_elx));
if (security_state == SECURE) {
/* /*
* Initialise PMCR_EL0 for secure context only, setting all * Populate EL3 state so that we've the right context
* fields rather than relying on hw. Some fields are * before doing ERET
* architecturally UNKNOWN on reset.
*
* PMCR_EL0.LC: Set to one so that cycle counter overflow, that
* is recorded in PMOVSCLR_EL0[31], occurs on the increment
* that changes PMCCNTR_EL0[63] from 1 to 0.
*
* PMCR_EL0.DP: Set to one so that the cycle counter,
* PMCCNTR_EL0 does not count when event counting is prohibited.
*
* PMCR_EL0.X: Set to zero to disable export of events.
*
* PMCR_EL0.D: Set to zero so that, when enabled, PMCCNTR_EL0
* counts on every clock cycle.
*/ */
pmcr_el0 = ((PMCR_EL0_RESET_VAL | PMCR_EL0_LC_BIT
| PMCR_EL0_DP_BIT)
& ~(PMCR_EL0_X_BIT | PMCR_EL0_D_BIT));
write_ctx_reg(get_sysregs_ctx(ctx), CTX_PMCR_EL0, pmcr_el0);
}
/* Populate EL3 state so that we've the right context before doing ERET */
state = get_el3state_ctx(ctx); state = get_el3state_ctx(ctx);
write_ctx_reg(state, CTX_SCR_EL3, scr_el3); write_ctx_reg(state, CTX_SCR_EL3, scr_el3);
write_ctx_reg(state, CTX_ELR_EL3, ep->pc); write_ctx_reg(state, CTX_ELR_EL3, ep->pc);
...@@ -441,6 +420,29 @@ void cm_prepare_el3_exit(uint32_t security_state) ...@@ -441,6 +420,29 @@ void cm_prepare_el3_exit(uint32_t security_state)
* relying on hw. Some fields are architecturally * relying on hw. Some fields are architecturally
* UNKNOWN on reset. * UNKNOWN on reset.
* *
* MDCR_EL2.HLP: Set to one so that event counter
* overflow, that is recorded in PMOVSCLR_EL0[0-30],
* occurs on the increment that changes
* PMEVCNTR<n>_EL0[63] from 1 to 0, when ARMv8.5-PMU is
* implemented. This bit is RES0 in versions of the
* architecture earlier than ARMv8.5, setting it to 1
* doesn't have any effect on them.
*
* MDCR_EL2.TTRF: Set to zero so that access to Trace
* Filter Control register TRFCR_EL1 at EL1 is not
* trapped to EL2. This bit is RES0 in versions of
* the architecture earlier than ARMv8.4.
*
* MDCR_EL2.HPMD: Set to one so that event counting is
* prohibited at EL2. This bit is RES0 in versions of
* the architecture earlier than ARMv8.1, setting it
* to 1 doesn't have any effect on them.
*
* MDCR_EL2.TPMS: Set to zero so that accesses to
* Statistical Profiling control registers from EL1
* do not trap to EL2. This bit is RES0 when SPE is
* not implemented.
*
* MDCR_EL2.TDRA: Set to zero so that Non-secure EL0 and * MDCR_EL2.TDRA: Set to zero so that Non-secure EL0 and
* EL1 System register accesses to the Debug ROM * EL1 System register accesses to the Debug ROM
* registers are not trapped to EL2. * registers are not trapped to EL2.
...@@ -469,13 +471,15 @@ void cm_prepare_el3_exit(uint32_t security_state) ...@@ -469,13 +471,15 @@ void cm_prepare_el3_exit(uint32_t security_state)
* MDCR_EL2.HPMN: Set to value of PMCR_EL0.N which is the * MDCR_EL2.HPMN: Set to value of PMCR_EL0.N which is the
* architecturally-defined reset value. * architecturally-defined reset value.
*/ */
mdcr_el2 = ((MDCR_EL2_RESET_VAL | mdcr_el2 = ((MDCR_EL2_RESET_VAL | MDCR_EL2_HLP |
MDCR_EL2_HPMD) |
((read_pmcr_el0() & PMCR_EL0_N_BITS) ((read_pmcr_el0() & PMCR_EL0_N_BITS)
>> PMCR_EL0_N_SHIFT)) & >> PMCR_EL0_N_SHIFT)) &
~(MDCR_EL2_TDRA_BIT | MDCR_EL2_TDOSA_BIT ~(MDCR_EL2_TTRF | MDCR_EL2_TPMS |
| MDCR_EL2_TDA_BIT | MDCR_EL2_TDE_BIT MDCR_EL2_TDRA_BIT | MDCR_EL2_TDOSA_BIT |
| MDCR_EL2_HPME_BIT | MDCR_EL2_TPM_BIT MDCR_EL2_TDA_BIT | MDCR_EL2_TDE_BIT |
| MDCR_EL2_TPMCR_BIT)); MDCR_EL2_HPME_BIT | MDCR_EL2_TPM_BIT |
MDCR_EL2_TPMCR_BIT);
write_mdcr_el2(mdcr_el2); write_mdcr_el2(mdcr_el2);
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment