Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Sign in / Register
Toggle navigation
Menu
Open sidebar
adam.huang
Arm Trusted Firmware
Commits
30560911
Commit
30560911
authored
Aug 23, 2019
by
Paul Beesley
Committed by
TrustedFirmware Code Review
Aug 23, 2019
Browse files
Merge "AArch64: Disable Secure Cycle Counter" into integration
parents
44e8d5eb
e290a8fc
Changes
8
Hide whitespace changes
Inline
Side-by-side
bl1/aarch64/bl1_exceptions.S
View file @
30560911
/*
*
Copyright
(
c
)
2013
-
201
8
,
ARM
Limited
and
Contributors
.
All
rights
reserved
.
*
Copyright
(
c
)
2013
-
201
9
,
ARM
Limited
and
Contributors
.
All
rights
reserved
.
*
*
SPDX
-
License
-
Identifier
:
BSD
-
3
-
Clause
*/
...
...
@@ -223,6 +223,14 @@ smc_handler:
*/
bl
save_gp_registers
/
*
-----------------------------------------------------
*
If
Secure
Cycle
Counter
is
not
disabled
in
MDCR_EL3
*
when
ARMv8
.5
-
PMU
is
implemented
,
save
PMCR_EL0
and
*
disable
all
event
counters
and
cycle
counter
.
*
-----------------------------------------------------
*/
bl
save_pmcr_disable_pmu
/
*
-----------------------------------------------------
*
Populate
the
parameters
for
the
SMC
handler
.
We
*
already
have
x0
-
x4
in
place
.
x5
will
point
to
a
...
...
bl31/aarch64/ea_delegate.S
View file @
30560911
...
...
@@ -68,6 +68,13 @@ func enter_lower_el_sync_ea
/
*
Save
GP
registers
*/
bl
save_gp_registers
/
*
*
If
Secure
Cycle
Counter
is
not
disabled
in
MDCR_EL3
*
when
ARMv8
.5
-
PMU
is
implemented
,
save
PMCR_EL0
and
*
disable
all
event
counters
and
cycle
counter
.
*/
bl
save_pmcr_disable_pmu
/
*
Save
ARMv8
.3
-
PAuth
registers
and
load
firmware
key
*/
#if CTX_INCLUDE_PAUTH_REGS
bl
pauth_context_save
...
...
@@ -106,6 +113,13 @@ func enter_lower_el_async_ea
/
*
Save
GP
registers
*/
bl
save_gp_registers
/
*
*
If
Secure
Cycle
Counter
is
not
disabled
in
MDCR_EL3
*
when
ARMv8
.5
-
PMU
is
implemented
,
save
PMCR_EL0
and
*
disable
all
event
counters
and
cycle
counter
.
*/
bl
save_pmcr_disable_pmu
/
*
Save
ARMv8
.3
-
PAuth
registers
and
load
firmware
key
*/
#if CTX_INCLUDE_PAUTH_REGS
bl
pauth_context_save
...
...
bl31/aarch64/runtime_exceptions.S
View file @
30560911
...
...
@@ -67,6 +67,14 @@
/
*
Save
GP
registers
and
restore
them
afterwards
*/
bl
save_gp_registers
/
*
*
If
Secure
Cycle
Counter
is
not
disabled
in
MDCR_EL3
*
when
ARMv8
.5
-
PMU
is
implemented
,
save
PMCR_EL0
and
*
disable
all
event
counters
and
cycle
counter
.
*/
bl
save_pmcr_disable_pmu
bl
handle_lower_el_ea_esb
bl
restore_gp_registers
...
...
@@ -123,6 +131,13 @@
bl
save_gp_registers
/
*
*
If
Secure
Cycle
Counter
is
not
disabled
in
MDCR_EL3
*
when
ARMv8
.5
-
PMU
is
implemented
,
save
PMCR_EL0
and
*
disable
all
event
counters
and
cycle
counter
.
*/
bl
save_pmcr_disable_pmu
/
*
Save
ARMv8
.3
-
PAuth
registers
and
load
firmware
key
*/
#if CTX_INCLUDE_PAUTH_REGS
bl
pauth_context_save
...
...
@@ -335,6 +350,13 @@ smc_handler64:
/
*
Save
general
purpose
registers
*/
bl
save_gp_registers
/
*
*
If
Secure
Cycle
Counter
is
not
disabled
in
MDCR_EL3
*
when
ARMv8
.5
-
PMU
is
implemented
,
save
PMCR_EL0
and
*
disable
all
event
counters
and
cycle
counter
.
*/
bl
save_pmcr_disable_pmu
/
*
Save
ARMv8
.3
-
PAuth
registers
and
load
firmware
key
*/
#if CTX_INCLUDE_PAUTH_REGS
bl
pauth_context_save
...
...
include/arch/aarch64/arch.h
View file @
30560911
...
...
@@ -133,12 +133,13 @@
#define ID_AA64PFR0_EL2_SHIFT U(8)
#define ID_AA64PFR0_EL3_SHIFT U(12)
#define ID_AA64PFR0_AMU_SHIFT U(44)
#define ID_AA64PFR0_AMU_LENGTH U(4)
#define ID_AA64PFR0_AMU_MASK ULL(0xf)
#define ID_AA64PFR0_ELX_MASK ULL(0xf)
#define ID_AA64PFR0_GIC_SHIFT U(24)
#define ID_AA64PFR0_GIC_WIDTH U(4)
#define ID_AA64PFR0_GIC_MASK ULL(0xf)
#define ID_AA64PFR0_SVE_SHIFT U(32)
#define ID_AA64PFR0_SVE_MASK ULL(0xf)
#define ID_AA64PFR0_SVE_LENGTH U(4)
#define ID_AA64PFR0_MPAM_SHIFT U(40)
#define ID_AA64PFR0_MPAM_MASK ULL(0xf)
#define ID_AA64PFR0_DIT_SHIFT U(48)
...
...
@@ -149,18 +150,14 @@
#define ID_AA64PFR0_CSV2_MASK ULL(0xf)
#define ID_AA64PFR0_CSV2_LENGTH U(4)
/* ID_AA64DFR0_EL1.PMS definitions (for ARMv8.2+) */
#define ID_AA64DFR0_PMS_SHIFT U(32)
#define ID_AA64DFR0_PMS_LENGTH U(4)
#define ID_AA64DFR0_PMS_MASK ULL(0xf)
/* Exception level handling */
#define EL_IMPL_NONE ULL(0)
#define EL_IMPL_A64ONLY ULL(1)
#define EL_IMPL_A64_A32 ULL(2)
#define
ID_AA64
P
FR0_
GIC_SHIFT U(24)
#define ID_AA64
P
FR0_
GIC_WIDTH
U(
4
)
#define ID_AA64
P
FR0_
GIC
_MASK ULL(0xf)
/*
ID_AA64
D
FR0_
EL1.PMS definitions (for ARMv8.2+) */
#define ID_AA64
D
FR0_
PMS_SHIFT
U(
32
)
#define ID_AA64
D
FR0_
PMS
_MASK ULL(0xf)
/* ID_AA64ISAR1_EL1 definitions */
#define ID_AA64ISAR1_EL1 S3_0_C0_C6_1
...
...
@@ -305,20 +302,25 @@
#define SCR_RESET_VAL SCR_RES1_BITS
/* MDCR_EL3 definitions */
#define MDCR_SCCD_BIT (ULL(1) << 23)
#define MDCR_SPME_BIT (ULL(1) << 17)
#define MDCR_SDD_BIT (ULL(1) << 16)
#define MDCR_SPD32(x) ((x) << 14)
#define MDCR_SPD32_LEGACY ULL(0x0)
#define MDCR_SPD32_DISABLE ULL(0x2)
#define MDCR_SPD32_ENABLE ULL(0x3)
#define MDCR_SDD_BIT (ULL(1) << 16)
#define MDCR_NSPB(x) ((x) << 12)
#define MDCR_NSPB_EL1 ULL(0x3)
#define MDCR_TDOSA_BIT (ULL(1) << 10)
#define MDCR_TDA_BIT (ULL(1) << 9)
#define MDCR_TPM_BIT (ULL(1) << 6)
#define MDCR_SCCD_BIT (ULL(1) << 23)
#define MDCR_EL3_RESET_VAL ULL(0x0)
/* MDCR_EL2 definitions */
#define MDCR_EL2_HLP (U(1) << 26)
#define MDCR_EL2_HCCD (U(1) << 23)
#define MDCR_EL2_TTRF (U(1) << 19)
#define MDCR_EL2_HPMD (U(1) << 17)
#define MDCR_EL2_TPMS (U(1) << 14)
#define MDCR_EL2_E2PB(x) ((x) << 12)
#define MDCR_EL2_E2PB_EL1 U(0x3)
...
...
@@ -678,10 +680,14 @@
#define PMCR_EL0_N_SHIFT U(11)
#define PMCR_EL0_N_MASK U(0x1f)
#define PMCR_EL0_N_BITS (PMCR_EL0_N_MASK << PMCR_EL0_N_SHIFT)
#define PMCR_EL0_LP_BIT (U(1) << 7)
#define PMCR_EL0_LC_BIT (U(1) << 6)
#define PMCR_EL0_DP_BIT (U(1) << 5)
#define PMCR_EL0_X_BIT (U(1) << 4)
#define PMCR_EL0_D_BIT (U(1) << 3)
#define PMCR_EL0_C_BIT (U(1) << 2)
#define PMCR_EL0_P_BIT (U(1) << 1)
#define PMCR_EL0_E_BIT (U(1) << 0)
/*******************************************************************************
* Definitions for system register interface to SVE
...
...
include/arch/aarch64/el3_common_macros.S
View file @
30560911
...
...
@@ -116,11 +116,41 @@
*
---------------------------------------------------------------------
*/
mov_imm
x0
,
((
MDCR_EL3_RESET_VAL
| MDCR_SDD_BIT |
\
MDCR_SPD32
(
MDCR_SPD32_DISABLE
)
|
MDCR_SCCD_BIT
)
\
&
~
(
MDCR_TDOSA_BIT
| MDCR_TDA_BIT |
MDCR_TPM_BIT
))
MDCR_SPD32
(
MDCR_SPD32_DISABLE
)
|
MDCR_SCCD_BIT
)
&
\
~(
MDCR_TDOSA_BIT
| MDCR_TDA_BIT |
MDCR_TPM_BIT
))
msr
mdcr_el3
,
x0
/
*
---------------------------------------------------------------------
*
Initialise
PMCR_EL0
setting
all
fields
rather
than
relying
*
on
hw
.
Some
fields
are
architecturally
UNKNOWN
on
reset
.
*
*
PMCR_EL0
.
LP
:
Set
to
one
so
that
event
counter
overflow
,
that
*
is
recorded
in
PMOVSCLR_EL0
[
0
-
30
],
occurs
on
the
increment
*
that
changes
PMEVCNTR
<
n
>
_EL0
[
63
]
from
1
to
0
,
when
ARMv8
.5
-
PMU
*
is
implemented
.
This
bit
is
RES0
in
versions
of
the
architecture
*
earlier
than
ARMv8
.5
,
setting
it
to
1
doesn
't have any effect
*
on
them
.
*
*
PMCR_EL0
.
LC
:
Set
to
one
so
that
cycle
counter
overflow
,
that
*
is
recorded
in
PMOVSCLR_EL0
[
31
],
occurs
on
the
increment
*
that
changes
PMCCNTR_EL0
[
63
]
from
1
to
0
.
*
*
PMCR_EL0
.
DP
:
Set
to
one
so
that
the
cycle
counter
,
*
PMCCNTR_EL0
does
not
count
when
event
counting
is
prohibited
.
*
*
PMCR_EL0
.
X
:
Set
to
zero
to
disable
export
of
events
.
*
*
PMCR_EL0
.
D
:
Set
to
zero
so
that
,
when
enabled
,
PMCCNTR_EL0
*
counts
on
every
clock
cycle
.
*
---------------------------------------------------------------------
*/
mov_imm
x0
,
((
PMCR_EL0_RESET_VAL
| PMCR_EL0_LP_BIT |
\
PMCR_EL0_LC_BIT
|
PMCR_EL0_DP_BIT
)
&
\
~(
PMCR_EL0_X_BIT
|
PMCR_EL0_D_BIT
))
msr
pmcr_el0
,
x0
/
*
---------------------------------------------------------------------
*
Enable
External
Aborts
and
SError
Interrupts
now
that
the
exception
*
vectors
have
been
setup
.
...
...
include/lib/el3_runtime/aarch64/context.h
View file @
30560911
...
...
@@ -59,7 +59,7 @@
#define CTX_RUNTIME_SP U(0x10)
#define CTX_SPSR_EL3 U(0x18)
#define CTX_ELR_EL3 U(0x20)
#define CTX_
UNUSED
U(0x28)
#define CTX_
PMCR_EL0
U(0x28)
#define CTX_EL3STATE_END U(0x30)
/*******************************************************************************
...
...
@@ -91,22 +91,21 @@
#define CTX_AFSR1_EL1 U(0x98)
#define CTX_CONTEXTIDR_EL1 U(0xa0)
#define CTX_VBAR_EL1 U(0xa8)
#define CTX_PMCR_EL0 U(0xb0)
/*
* If the platform is AArch64-only, there is no need to save and restore these
* AArch32 registers.
*/
#if CTX_INCLUDE_AARCH32_REGS
#define CTX_SPSR_ABT U(0x
c
0)
/* Align to the next 16 byte boundary */
#define CTX_SPSR_UND U(0x
c
8)
#define CTX_SPSR_IRQ U(0x
d
0)
#define CTX_SPSR_FIQ U(0x
d
8)
#define CTX_DACR32_EL2 U(0x
e
0)
#define CTX_IFSR32_EL2 U(0x
e
8)
#define CTX_AARCH32_END U(0x
f
0)
/* Align to the next 16 byte boundary */
#define CTX_SPSR_ABT U(0x
b
0)
/* Align to the next 16 byte boundary */
#define CTX_SPSR_UND U(0x
b
8)
#define CTX_SPSR_IRQ U(0x
c
0)
#define CTX_SPSR_FIQ U(0x
c
8)
#define CTX_DACR32_EL2 U(0x
d
0)
#define CTX_IFSR32_EL2 U(0x
d
8)
#define CTX_AARCH32_END U(0x
e
0)
/* Align to the next 16 byte boundary */
#else
#define CTX_AARCH32_END U(0x
c
0)
/* Align to the next 16 byte boundary */
#define CTX_AARCH32_END U(0x
b
0)
/* Align to the next 16 byte boundary */
#endif
/* CTX_INCLUDE_AARCH32_REGS */
/*
...
...
lib/el3_runtime/aarch64/context.S
View file @
30560911
...
...
@@ -24,8 +24,44 @@
.
global
save_gp_registers
.
global
restore_gp_registers
.
global
restore_gp_registers_eret
.
global
save_pmcr_disable_pmu
.
global
el3_exit
/*
-----------------------------------------------------
*
If
ARMv8
.5
-
PMU
is
implemented
,
cycle
counting
is
*
disabled
by
seting
MDCR_EL3
.
SCCD
to
1
.
*
-----------------------------------------------------
*/
func
save_pmcr_disable_pmu
/
*
-----------------------------------------------------
*
Check
if
earlier
initialization
MDCR_EL3
.
SCCD
to
1
*
failed
,
meaning
that
ARMv8
-
PMU
is
not
implemented
and
*
PMCR_EL0
should
be
saved
in
non
-
secure
context
.
*
-----------------------------------------------------
*/
mrs
x9
,
mdcr_el3
tst
x9
,
#
MDCR_SCCD_BIT
bne
1
f
/
*
Secure
Cycle
Counter
is
not
disabled
*/
mrs
x9
,
pmcr_el0
/
*
Check
caller
's security state */
mrs
x10
,
scr_el3
tst
x10
,
#
SCR_NS_BIT
beq
2
f
/
*
Save
PMCR_EL0
if
called
from
Non
-
secure
state
*/
str
x9
,
[
sp
,
#
CTX_EL3STATE_OFFSET
+
CTX_PMCR_EL0
]
/
*
Disable
cycle
counter
when
event
counting
is
prohibited
*/
2
:
orr
x9
,
x9
,
#
PMCR_EL0_DP_BIT
msr
pmcr_el0
,
x9
isb
1
:
ret
endfunc
save_pmcr_disable_pmu
/*
-----------------------------------------------------
*
The
following
function
strictly
follows
the
AArch64
*
PCS
to
use
x9
-
x17
(
temporary
caller
-
saved
registers
)
...
...
@@ -80,9 +116,6 @@ func el1_sysregs_context_save
mrs
x9
,
vbar_el1
stp
x17
,
x9
,
[
x0
,
#
CTX_CONTEXTIDR_EL1
]
mrs
x10
,
pmcr_el0
str
x10
,
[
x0
,
#
CTX_PMCR_EL0
]
/
*
Save
AArch32
system
registers
if
the
build
has
instructed
so
*/
#if CTX_INCLUDE_AARCH32_REGS
mrs
x11
,
spsr_abt
...
...
@@ -169,9 +202,6 @@ func el1_sysregs_context_restore
msr
contextidr_el1
,
x17
msr
vbar_el1
,
x9
ldr
x10
,
[
x0
,
#
CTX_PMCR_EL0
]
msr
pmcr_el0
,
x10
/
*
Restore
AArch32
system
registers
if
the
build
has
instructed
so
*/
#if CTX_INCLUDE_AARCH32_REGS
ldp
x11
,
x12
,
[
x0
,
#
CTX_SPSR_ABT
]
...
...
@@ -503,6 +533,29 @@ func el3_exit
msr
spsr_el3
,
x16
msr
elr_el3
,
x17
/
*
-----------------------------------------------------
*
Restore
PMCR_EL0
when
returning
to
Non
-
secure
state
*
if
Secure
Cycle
Counter
is
not
disabled
in
MDCR_EL3
*
when
ARMv8
.5
-
PMU
is
implemented
*
-----------------------------------------------------
*/
tst
x18
,
#
SCR_NS_BIT
beq
2
f
/
*
-----------------------------------------------------
*
Back
to
Non
-
secure
state
.
*
Check
if
earlier
initialization
MDCR_EL3
.
SCCD
to
1
*
failed
,
meaning
that
ARMv8
-
PMU
is
not
implemented
and
*
PMCR_EL0
should
be
restored
from
non
-
secure
context
.
*
-----------------------------------------------------
*/
mrs
x17
,
mdcr_el3
tst
x17
,
#
MDCR_SCCD_BIT
bne
2
f
ldr
x17
,
[
sp
,
#
CTX_EL3STATE_OFFSET
+
CTX_PMCR_EL0
]
msr
pmcr_el0
,
x17
2
:
#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
/
*
Restore
mitigation
state
as
it
was
on
entry
to
EL3
*/
ldr
x17
,
[
sp
,
#
CTX_CVE_2018_3639_OFFSET
+
CTX_CVE_2018_3639_DISABLE
]
...
...
lib/el3_runtime/aarch64/context_mgmt.c
View file @
30560911
...
...
@@ -66,7 +66,7 @@ void __init cm_init(void)
void
cm_setup_context
(
cpu_context_t
*
ctx
,
const
entry_point_info_t
*
ep
)
{
unsigned
int
security_state
;
uint32_t
scr_el3
,
pmcr_el0
;
uint32_t
scr_el3
;
el3_state_t
*
state
;
gp_regs_t
*
gp_regs
;
unsigned
long
sctlr_elx
,
actlr_elx
;
...
...
@@ -225,31 +225,10 @@ void cm_setup_context(cpu_context_t *ctx, const entry_point_info_t *ep)
actlr_elx
=
read_actlr_el1
();
write_ctx_reg
((
get_sysregs_ctx
(
ctx
)),
(
CTX_ACTLR_EL1
),
(
actlr_elx
));
if
(
security_state
==
SECURE
)
{
/*
* Initialise PMCR_EL0 for secure context only, setting all
* fields rather than relying on hw. Some fields are
* architecturally UNKNOWN on reset.
*
* PMCR_EL0.LC: Set to one so that cycle counter overflow, that
* is recorded in PMOVSCLR_EL0[31], occurs on the increment
* that changes PMCCNTR_EL0[63] from 1 to 0.
*
* PMCR_EL0.DP: Set to one so that the cycle counter,
* PMCCNTR_EL0 does not count when event counting is prohibited.
*
* PMCR_EL0.X: Set to zero to disable export of events.
*
* PMCR_EL0.D: Set to zero so that, when enabled, PMCCNTR_EL0
* counts on every clock cycle.
*/
pmcr_el0
=
((
PMCR_EL0_RESET_VAL
|
PMCR_EL0_LC_BIT
|
PMCR_EL0_DP_BIT
)
&
~
(
PMCR_EL0_X_BIT
|
PMCR_EL0_D_BIT
));
write_ctx_reg
(
get_sysregs_ctx
(
ctx
),
CTX_PMCR_EL0
,
pmcr_el0
);
}
/* Populate EL3 state so that we've the right context before doing ERET */
/*
* Populate EL3 state so that we've the right context
* before doing ERET
*/
state
=
get_el3state_ctx
(
ctx
);
write_ctx_reg
(
state
,
CTX_SCR_EL3
,
scr_el3
);
write_ctx_reg
(
state
,
CTX_ELR_EL3
,
ep
->
pc
);
...
...
@@ -441,6 +420,29 @@ void cm_prepare_el3_exit(uint32_t security_state)
* relying on hw. Some fields are architecturally
* UNKNOWN on reset.
*
* MDCR_EL2.HLP: Set to one so that event counter
* overflow, that is recorded in PMOVSCLR_EL0[0-30],
* occurs on the increment that changes
* PMEVCNTR<n>_EL0[63] from 1 to 0, when ARMv8.5-PMU is
* implemented. This bit is RES0 in versions of the
* architecture earlier than ARMv8.5, setting it to 1
* doesn't have any effect on them.
*
* MDCR_EL2.TTRF: Set to zero so that access to Trace
* Filter Control register TRFCR_EL1 at EL1 is not
* trapped to EL2. This bit is RES0 in versions of
* the architecture earlier than ARMv8.4.
*
* MDCR_EL2.HPMD: Set to one so that event counting is
* prohibited at EL2. This bit is RES0 in versions of
* the architecture earlier than ARMv8.1, setting it
* to 1 doesn't have any effect on them.
*
* MDCR_EL2.TPMS: Set to zero so that accesses to
* Statistical Profiling control registers from EL1
* do not trap to EL2. This bit is RES0 when SPE is
* not implemented.
*
* MDCR_EL2.TDRA: Set to zero so that Non-secure EL0 and
* EL1 System register accesses to the Debug ROM
* registers are not trapped to EL2.
...
...
@@ -469,13 +471,15 @@ void cm_prepare_el3_exit(uint32_t security_state)
* MDCR_EL2.HPMN: Set to value of PMCR_EL0.N which is the
* architecturally-defined reset value.
*/
mdcr_el2
=
((
MDCR_EL2_RESET_VAL
|
((
read_pmcr_el0
()
&
PMCR_EL0_N_BITS
)
>>
PMCR_EL0_N_SHIFT
))
&
~
(
MDCR_EL2_TDRA_BIT
|
MDCR_EL2_TDOSA_BIT
|
MDCR_EL2_TDA_BIT
|
MDCR_EL2_TDE_BIT
|
MDCR_EL2_HPME_BIT
|
MDCR_EL2_TPM_BIT
|
MDCR_EL2_TPMCR_BIT
));
mdcr_el2
=
((
MDCR_EL2_RESET_VAL
|
MDCR_EL2_HLP
|
MDCR_EL2_HPMD
)
|
((
read_pmcr_el0
()
&
PMCR_EL0_N_BITS
)
>>
PMCR_EL0_N_SHIFT
))
&
~
(
MDCR_EL2_TTRF
|
MDCR_EL2_TPMS
|
MDCR_EL2_TDRA_BIT
|
MDCR_EL2_TDOSA_BIT
|
MDCR_EL2_TDA_BIT
|
MDCR_EL2_TDE_BIT
|
MDCR_EL2_HPME_BIT
|
MDCR_EL2_TPM_BIT
|
MDCR_EL2_TPMCR_BIT
);
write_mdcr_el2
(
mdcr_el2
);
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment