Commit ed108b56 authored by Alexei Fedorov's avatar Alexei Fedorov
Browse files

Refactor ARMv8.3 Pointer Authentication support code



This patch provides the following features and makes modifications
listed below:
- Individual APIAKey key generation for each CPU.
- New key generation on every BL31 warm boot and TSP CPU On event.
- Per-CPU storage of APIAKey added in percpu_data[]
  of cpu_data structure.
- `plat_init_apiakey()` function replaced with `plat_init_apkey()`
  which returns 128-bit value and uses Generic timer physical counter
  value to increase the randomness of the generated key.
  The new function can be used for generation of all ARMv8.3-PAuth keys
- ARMv8.3-PAuth specific code placed in `lib\extensions\pauth`.
- New `pauth_init_enable_el1()` and `pauth_init_enable_el3()` functions
  generate, program and enable APIAKey_EL1 for EL1 and EL3 respectively;
  pauth_disable_el1()` and `pauth_disable_el3()` functions disable
  PAuth for EL1 and EL3 respectively;
  `pauth_load_bl31_apiakey()` loads saved per-CPU APIAKey_EL1 from
  cpu-data structure.
- Combined `save_gp_pauth_registers()` function replaces calls to
  `save_gp_registers()` and `pauth_context_save()`;
  `restore_gp_pauth_registers()` replaces `pauth_context_restore()`
  and `restore_gp_registers()` calls.
- `restore_gp_registers_eret()` function removed with corresponding
  code placed in `el3_exit()`.
- Fixed the issue when `pauth_t pauth_ctx` structure allocated space
  for 12 uint64_t PAuth registers instead of 10 by removal of macro
  CTX_PACGAKEY_END from `include/lib/el3_runtime/aarch64/context.h`
  and assigning its value to CTX_PAUTH_REGS_END.
- Use of MODE_SP_ELX and MODE_SP_EL0 macro definitions
  in `msr	spsel`  instruction instead of hard-coded values.
- Changes in documentation related to ARMv8.3-PAuth and ARMv8.5-BTI.

Change-Id: Id18b81cc46f52a783a7e6a09b9f149b6ce803211
Signed-off-by: default avatarAlexei Fedorov <Alexei.Fedorov@arm.com>
parent 2fc6ffc4
...@@ -14,61 +14,16 @@ ...@@ -14,61 +14,16 @@
.global fpregs_context_save .global fpregs_context_save
.global fpregs_context_restore .global fpregs_context_restore
#endif #endif
#if CTX_INCLUDE_PAUTH_REGS .global save_gp_pmcr_pauth_regs
.global pauth_context_restore .global restore_gp_pmcr_pauth_regs
.global pauth_context_save
#endif
#if ENABLE_PAUTH
.global pauth_load_bl_apiakey
#endif
.global save_gp_registers
.global restore_gp_registers
.global restore_gp_registers_eret
.global save_pmcr_disable_pmu
.global el3_exit .global el3_exit
/* ----------------------------------------------------- /* ------------------------------------------------------------------
* If ARMv8.5-PMU is implemented, cycle counting is * The following function strictly follows the AArch64 PCS to use
* disabled by seting MDCR_EL3.SCCD to 1. * x9-x17 (temporary caller-saved registers) to save EL1 system
* ----------------------------------------------------- * register context. It assumes that 'x0' is pointing to a
*/ * 'el1_sys_regs' structure where the register context will be saved.
func save_pmcr_disable_pmu * ------------------------------------------------------------------
/* -----------------------------------------------------
* Check if earlier initialization MDCR_EL3.SCCD to 1
* failed, meaning that ARMv8-PMU is not implemented and
* PMCR_EL0 should be saved in non-secure context.
* -----------------------------------------------------
*/
mrs x9, mdcr_el3
tst x9, #MDCR_SCCD_BIT
bne 1f
/* Secure Cycle Counter is not disabled */
mrs x9, pmcr_el0
/* Check caller's security state */
mrs x10, scr_el3
tst x10, #SCR_NS_BIT
beq 2f
/* Save PMCR_EL0 if called from Non-secure state */
str x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
/* Disable cycle counter when event counting is prohibited */
2: orr x9, x9, #PMCR_EL0_DP_BIT
msr pmcr_el0, x9
isb
1: ret
endfunc save_pmcr_disable_pmu
/* -----------------------------------------------------
* The following function strictly follows the AArch64
* PCS to use x9-x17 (temporary caller-saved registers)
* to save EL1 system register context. It assumes that
* 'x0' is pointing to a 'el1_sys_regs' structure where
* the register context will be saved.
* -----------------------------------------------------
*/ */
func el1_sysregs_context_save func el1_sysregs_context_save
...@@ -159,13 +114,13 @@ func el1_sysregs_context_save ...@@ -159,13 +114,13 @@ func el1_sysregs_context_save
ret ret
endfunc el1_sysregs_context_save endfunc el1_sysregs_context_save
/* ----------------------------------------------------- /* ------------------------------------------------------------------
* The following function strictly follows the AArch64 * The following function strictly follows the AArch64 PCS to use
* PCS to use x9-x17 (temporary caller-saved registers) * x9-x17 (temporary caller-saved registers) to restore EL1 system
* to restore EL1 system register context. It assumes * register context. It assumes that 'x0' is pointing to a
* that 'x0' is pointing to a 'el1_sys_regs' structure * 'el1_sys_regs' structure from where the register context will be
* from where the register context will be restored * restored
* ----------------------------------------------------- * ------------------------------------------------------------------
*/ */
func el1_sysregs_context_restore func el1_sysregs_context_restore
...@@ -255,21 +210,19 @@ func el1_sysregs_context_restore ...@@ -255,21 +210,19 @@ func el1_sysregs_context_restore
ret ret
endfunc el1_sysregs_context_restore endfunc el1_sysregs_context_restore
/* ----------------------------------------------------- /* ------------------------------------------------------------------
* The following function follows the aapcs_64 strictly * The following function follows the aapcs_64 strictly to use
* to use x9-x17 (temporary caller-saved registers * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
* according to AArch64 PCS) to save floating point * to save floating point register context. It assumes that 'x0' is
* register context. It assumes that 'x0' is pointing to * pointing to a 'fp_regs' structure where the register context will
* a 'fp_regs' structure where the register context will
* be saved. * be saved.
* *
* Access to VFP registers will trap if CPTR_EL3.TFP is * Access to VFP registers will trap if CPTR_EL3.TFP is set.
* set. However currently we don't use VFP registers * However currently we don't use VFP registers nor set traps in
* nor set traps in Trusted Firmware, and assume it's * Trusted Firmware, and assume it's cleared.
* cleared
* *
* TODO: Revisit when VFP is used in secure world * TODO: Revisit when VFP is used in secure world
* ----------------------------------------------------- * ------------------------------------------------------------------
*/ */
#if CTX_INCLUDE_FPREGS #if CTX_INCLUDE_FPREGS
func fpregs_context_save func fpregs_context_save
...@@ -303,21 +256,19 @@ func fpregs_context_save ...@@ -303,21 +256,19 @@ func fpregs_context_save
ret ret
endfunc fpregs_context_save endfunc fpregs_context_save
/* ----------------------------------------------------- /* ------------------------------------------------------------------
* The following function follows the aapcs_64 strictly * The following function follows the aapcs_64 strictly to use x9-x17
* to use x9-x17 (temporary caller-saved registers * (temporary caller-saved registers according to AArch64 PCS) to
* according to AArch64 PCS) to restore floating point * restore floating point register context. It assumes that 'x0' is
* register context. It assumes that 'x0' is pointing to * pointing to a 'fp_regs' structure from where the register context
* a 'fp_regs' structure from where the register context
* will be restored. * will be restored.
* *
* Access to VFP registers will trap if CPTR_EL3.TFP is * Access to VFP registers will trap if CPTR_EL3.TFP is set.
* set. However currently we don't use VFP registers * However currently we don't use VFP registers nor set traps in
* nor set traps in Trusted Firmware, and assume it's * Trusted Firmware, and assume it's cleared.
* cleared
* *
* TODO: Revisit when VFP is used in secure world * TODO: Revisit when VFP is used in secure world
* ----------------------------------------------------- * ------------------------------------------------------------------
*/ */
func fpregs_context_restore func fpregs_context_restore
ldp q0, q1, [x0, #CTX_FP_Q0] ldp q0, q1, [x0, #CTX_FP_Q0]
...@@ -357,109 +308,23 @@ func fpregs_context_restore ...@@ -357,109 +308,23 @@ func fpregs_context_restore
endfunc fpregs_context_restore endfunc fpregs_context_restore
#endif /* CTX_INCLUDE_FPREGS */ #endif /* CTX_INCLUDE_FPREGS */
#if CTX_INCLUDE_PAUTH_REGS /* ------------------------------------------------------------------
/* ----------------------------------------------------- * The following function is used to save and restore all the general
* The following function strictly follows the AArch64 * purpose and ARMv8.3-PAuth (if enabled) registers.
* PCS to use x9-x17 (temporary caller-saved registers) * It also checks if Secure Cycle Counter is not disabled in MDCR_EL3
* to save the ARMv8.3-PAuth register context. It assumes * when ARMv8.5-PMU is implemented, and if called from Non-secure
* that 'sp' is pointing to a 'cpu_context_t' structure * state saves PMCR_EL0 and disables Cycle Counter.
* to where the register context will be saved. *
* ----------------------------------------------------- * Ideally we would only save and restore the callee saved registers
*/ * when a world switch occurs but that type of implementation is more
func pauth_context_save * complex. So currently we will always save and restore these
add x11, sp, #CTX_PAUTH_REGS_OFFSET * registers on entry and exit of EL3.
* These are not macros to ensure their invocation fits within the 32
mrs x9, APIAKeyLo_EL1 * instructions per exception vector.
mrs x10, APIAKeyHi_EL1
stp x9, x10, [x11, #CTX_PACIAKEY_LO]
mrs x9, APIBKeyLo_EL1
mrs x10, APIBKeyHi_EL1
stp x9, x10, [x11, #CTX_PACIBKEY_LO]
mrs x9, APDAKeyLo_EL1
mrs x10, APDAKeyHi_EL1
stp x9, x10, [x11, #CTX_PACDAKEY_LO]
mrs x9, APDBKeyLo_EL1
mrs x10, APDBKeyHi_EL1
stp x9, x10, [x11, #CTX_PACDBKEY_LO]
mrs x9, APGAKeyLo_EL1
mrs x10, APGAKeyHi_EL1
stp x9, x10, [x11, #CTX_PACGAKEY_LO]
ret
endfunc pauth_context_save
/* -----------------------------------------------------
* The following function strictly follows the AArch64
* PCS to use x9-x17 (temporary caller-saved registers)
* to restore the ARMv8.3-PAuth register context. It assumes
* that 'sp' is pointing to a 'cpu_context_t' structure
* from where the register context will be restored.
* -----------------------------------------------------
*/
func pauth_context_restore
add x11, sp, #CTX_PAUTH_REGS_OFFSET
ldp x9, x10, [x11, #CTX_PACIAKEY_LO]
msr APIAKeyLo_EL1, x9
msr APIAKeyHi_EL1, x10
ldp x9, x10, [x11, #CTX_PACIBKEY_LO]
msr APIBKeyLo_EL1, x9
msr APIBKeyHi_EL1, x10
ldp x9, x10, [x11, #CTX_PACDAKEY_LO]
msr APDAKeyLo_EL1, x9
msr APDAKeyHi_EL1, x10
ldp x9, x10, [x11, #CTX_PACDBKEY_LO]
msr APDBKeyLo_EL1, x9
msr APDBKeyHi_EL1, x10
ldp x9, x10, [x11, #CTX_PACGAKEY_LO]
msr APGAKeyLo_EL1, x9
msr APGAKeyHi_EL1, x10
ret
endfunc pauth_context_restore
#endif /* CTX_INCLUDE_PAUTH_REGS */
/* -----------------------------------------------------
* The following function strictly follows the AArch64
* PCS to use x9-x17 (temporary caller-saved registers)
* to load the APIA key used by the firmware.
* -----------------------------------------------------
*/
#if ENABLE_PAUTH
func pauth_load_bl_apiakey
/* Load instruction key A used by the Trusted Firmware. */
adrp x11, plat_apiakey
add x11, x11, :lo12:plat_apiakey
ldp x9, x10, [x11, #0]
msr APIAKeyLo_EL1, x9
msr APIAKeyHi_EL1, x10
ret
endfunc pauth_load_bl_apiakey
#endif /* ENABLE_PAUTH */
/* -----------------------------------------------------
* The following functions are used to save and restore
* all the general purpose registers. Ideally we would
* only save and restore the callee saved registers when
* a world switch occurs but that type of implementation
* is more complex. So currently we will always save and
* restore these registers on entry and exit of EL3.
* These are not macros to ensure their invocation fits
* within the 32 instructions per exception vector.
* clobbers: x18 * clobbers: x18
* ----------------------------------------------------- * ------------------------------------------------------------------
*/ */
func save_gp_registers func save_gp_pmcr_pauth_regs
stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
...@@ -477,15 +342,114 @@ func save_gp_registers ...@@ -477,15 +342,114 @@ func save_gp_registers
stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
mrs x18, sp_el0 mrs x18, sp_el0
str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
/* ----------------------------------------------------------
* Check if earlier initialization MDCR_EL3.SCCD to 1 failed,
* meaning that ARMv8-PMU is not implemented and PMCR_EL0
* should be saved in non-secure context.
* ----------------------------------------------------------
*/
mrs x9, mdcr_el3
tst x9, #MDCR_SCCD_BIT
bne 1f
/* Secure Cycle Counter is not disabled */
mrs x9, pmcr_el0
/* Check caller's security state */
mrs x10, scr_el3
tst x10, #SCR_NS_BIT
beq 2f
/* Save PMCR_EL0 if called from Non-secure state */
str x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
/* Disable cycle counter when event counting is prohibited */
2: orr x9, x9, #PMCR_EL0_DP_BIT
msr pmcr_el0, x9
isb
1:
#if CTX_INCLUDE_PAUTH_REGS
/* ----------------------------------------------------------
* Save the ARMv8.3-PAuth keys as they are not banked
* by exception level
* ----------------------------------------------------------
*/
add x19, sp, #CTX_PAUTH_REGS_OFFSET
mrs x20, APIAKeyLo_EL1 /* x21:x20 = APIAKey */
mrs x21, APIAKeyHi_EL1
mrs x22, APIBKeyLo_EL1 /* x23:x22 = APIBKey */
mrs x23, APIBKeyHi_EL1
mrs x24, APDAKeyLo_EL1 /* x25:x24 = APDAKey */
mrs x25, APDAKeyHi_EL1
mrs x26, APDBKeyLo_EL1 /* x27:x26 = APDBKey */
mrs x27, APDBKeyHi_EL1
mrs x28, APGAKeyLo_EL1 /* x29:x28 = APGAKey */
mrs x29, APGAKeyHi_EL1
stp x20, x21, [x19, #CTX_PACIAKEY_LO]
stp x22, x23, [x19, #CTX_PACIBKEY_LO]
stp x24, x25, [x19, #CTX_PACDAKEY_LO]
stp x26, x27, [x19, #CTX_PACDBKEY_LO]
stp x28, x29, [x19, #CTX_PACGAKEY_LO]
#endif /* CTX_INCLUDE_PAUTH_REGS */
ret ret
endfunc save_gp_registers endfunc save_gp_pmcr_pauth_regs
/* ------------------------------------------------------------------
* This function restores ARMv8.3-PAuth (if enabled) and all general
* purpose registers except x30 from the CPU context.
* x30 register must be explicitly restored by the caller.
* ------------------------------------------------------------------
*/
func restore_gp_pmcr_pauth_regs
#if CTX_INCLUDE_PAUTH_REGS
/* Restore the ARMv8.3 PAuth keys */
add x10, sp, #CTX_PAUTH_REGS_OFFSET
ldp x0, x1, [x10, #CTX_PACIAKEY_LO] /* x1:x0 = APIAKey */
ldp x2, x3, [x10, #CTX_PACIBKEY_LO] /* x3:x2 = APIBKey */
ldp x4, x5, [x10, #CTX_PACDAKEY_LO] /* x5:x4 = APDAKey */
ldp x6, x7, [x10, #CTX_PACDBKEY_LO] /* x7:x6 = APDBKey */
ldp x8, x9, [x10, #CTX_PACGAKEY_LO] /* x9:x8 = APGAKey */
msr APIAKeyLo_EL1, x0
msr APIAKeyHi_EL1, x1
msr APIBKeyLo_EL1, x2
msr APIBKeyHi_EL1, x3
msr APDAKeyLo_EL1, x4
msr APDAKeyHi_EL1, x5
msr APDBKeyLo_EL1, x6
msr APDBKeyHi_EL1, x7
msr APGAKeyLo_EL1, x8
msr APGAKeyHi_EL1, x9
#endif /* CTX_INCLUDE_PAUTH_REGS */
/* ----------------------------------------------------- /* ----------------------------------------------------------
* This function restores all general purpose registers except x30 from the * Restore PMCR_EL0 when returning to Non-secure state if
* CPU context. x30 register must be explicitly restored by the caller. * Secure Cycle Counter is not disabled in MDCR_EL3 when
* ----------------------------------------------------- * ARMv8.5-PMU is implemented.
* ----------------------------------------------------------
*/ */
func restore_gp_registers mrs x0, scr_el3
tst x0, #SCR_NS_BIT
beq 2f
/* ----------------------------------------------------------
* Back to Non-secure state.
* Check if earlier initialization MDCR_EL3.SCCD to 1 failed,
* meaning that ARMv8-PMU is not implemented and PMCR_EL0
* should be restored from non-secure context.
* ----------------------------------------------------------
*/
mrs x0, mdcr_el3
tst x0, #MDCR_SCCD_BIT
bne 2f
ldr x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
msr pmcr_el0, x0
2:
ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
...@@ -504,49 +468,28 @@ func restore_gp_registers ...@@ -504,49 +468,28 @@ func restore_gp_registers
msr sp_el0, x28 msr sp_el0, x28
ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
ret ret
endfunc restore_gp_registers endfunc restore_gp_pmcr_pauth_regs
/* -----------------------------------------------------
* Restore general purpose registers (including x30), and exit EL3 via ERET to
* a lower exception level.
* -----------------------------------------------------
*/
func restore_gp_registers_eret
bl restore_gp_registers
ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
#if IMAGE_BL31 && RAS_EXTENSION
/*
* Issue Error Synchronization Barrier to synchronize SErrors before
* exiting EL3. We're running with EAs unmasked, so any synchronized
* errors would be taken immediately; therefore no need to inspect
* DISR_EL1 register.
*/
esb
#endif
eret
endfunc restore_gp_registers_eret
/* ----------------------------------------------------- /* ------------------------------------------------------------------
* This routine assumes that the SP_EL3 is pointing to * This routine assumes that the SP_EL3 is pointing to a valid
* a valid context structure from where the gp regs and * context structure from where the gp regs and other special
* other special registers can be retrieved. * registers can be retrieved.
* ----------------------------------------------------- * ------------------------------------------------------------------
*/ */
func el3_exit func el3_exit
/* ----------------------------------------------------- /* ----------------------------------------------------------
* Save the current SP_EL0 i.e. the EL3 runtime stack * Save the current SP_EL0 i.e. the EL3 runtime stack which
* which will be used for handling the next SMC. Then * will be used for handling the next SMC.
* switch to SP_EL3 * Then switch to SP_EL3.
* ----------------------------------------------------- * ----------------------------------------------------------
*/ */
mov x17, sp mov x17, sp
msr spsel, #1 msr spsel, #MODE_SP_ELX
str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP] str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
/* ----------------------------------------------------- /* ----------------------------------------------------------
* Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
* ----------------------------------------------------- * ----------------------------------------------------------
*/ */
ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3] ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
...@@ -554,43 +497,35 @@ func el3_exit ...@@ -554,43 +497,35 @@ func el3_exit
msr spsr_el3, x16 msr spsr_el3, x16
msr elr_el3, x17 msr elr_el3, x17
/* -----------------------------------------------------
* Restore PMCR_EL0 when returning to Non-secure state
* if Secure Cycle Counter is not disabled in MDCR_EL3
* when ARMv8.5-PMU is implemented
* -----------------------------------------------------
*/
tst x18, #SCR_NS_BIT
beq 2f
/* -----------------------------------------------------
* Back to Non-secure state.
* Check if earlier initialization MDCR_EL3.SCCD to 1
* failed, meaning that ARMv8-PMU is not implemented and
* PMCR_EL0 should be restored from non-secure context.
* -----------------------------------------------------
*/
mrs x17, mdcr_el3
tst x17, #MDCR_SCCD_BIT
bne 2f
ldr x17, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
msr pmcr_el0, x17
2:
#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 #if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
/* Restore mitigation state as it was on entry to EL3 */ /* ----------------------------------------------------------
* Restore mitigation state as it was on entry to EL3
* ----------------------------------------------------------
*/
ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE] ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
cmp x17, xzr cbz x17, 1f
beq 1f
blr x17 blr x17
1: 1:
#endif #endif
/* ----------------------------------------------------------
* Restore general purpose (including x30), PMCR_EL0 and
* ARMv8.3-PAuth registers.
* Exit EL3 via ERET to a lower exception level.
* ----------------------------------------------------------
*/
bl restore_gp_pmcr_pauth_regs
ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
#if CTX_INCLUDE_PAUTH_REGS #if IMAGE_BL31 && RAS_EXTENSION
/* Restore ARMv8.3-PAuth registers */ /* ----------------------------------------------------------
bl pauth_context_restore * Issue Error Synchronization Barrier to synchronize SErrors
* before exiting EL3. We're running with EAs unmasked, so
* any synchronized errors would be taken immediately;
* therefore no need to inspect DISR_EL1 register.
* ----------------------------------------------------------
*/
esb
#endif #endif
eret
/* Restore saved general purpose registers and return */
b restore_gp_registers_eret
endfunc el3_exit endfunc el3_exit
/*
* Copyright (c) 2019, Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
#include <arch.h>
#include <asm_macros.S>
#include <lib/el3_runtime/cpu_data.h>
.global pauth_init_enable_el1
.global pauth_disable_el1
.global pauth_init_enable_el3
.global pauth_disable_el3
.globl pauth_load_bl31_apiakey
/* -------------------------------------------------------------
* Program APIAKey_EL1 and enable pointer authentication in EL1
* -------------------------------------------------------------
*/
func pauth_init_enable_el1
stp x29, x30, [sp, #-16]!
/* Initialize platform key */
bl plat_init_apkey
/* Program instruction key A used by the Trusted Firmware */
msr APIAKeyLo_EL1, x0
msr APIAKeyHi_EL1, x1
/* Enable pointer authentication */
mrs x0, sctlr_el1
orr x0, x0, #SCTLR_EnIA_BIT
#if ENABLE_BTI
/* Enable PAC branch type compatibility */
bic x0, x0, #(SCTLR_BT0_BIT | SCTLR_BT1_BIT)
#endif
msr sctlr_el1, x0
isb
ldp x29, x30, [sp], #16
ret
endfunc pauth_init_enable_el1
/* -------------------------------------------------------------
* Disable pointer authentication in EL3
* -------------------------------------------------------------
*/
func pauth_disable_el1
mrs x0, sctlr_el1
bic x0, x0, #SCTLR_EnIA_BIT
msr sctlr_el1, x0
isb
ret
endfunc pauth_disable_el1
/* -------------------------------------------------------------
* Program APIAKey_EL1 and enable pointer authentication in EL3
* -------------------------------------------------------------
*/
func pauth_init_enable_el3
stp x29, x30, [sp, #-16]!
/* Initialize platform key */
bl plat_init_apkey
/* Program instruction key A used by the Trusted Firmware */
msr APIAKeyLo_EL1, x0
msr APIAKeyHi_EL1, x1
/* Enable pointer authentication */
mrs x0, sctlr_el3
orr x0, x0, #SCTLR_EnIA_BIT
#if ENABLE_BTI
/* Enable PAC branch type compatibility */
bic x0, x0, #SCTLR_BT_BIT
#endif
msr sctlr_el3, x0
isb
ldp x29, x30, [sp], #16
ret
endfunc pauth_init_enable_el3
/* -------------------------------------------------------------
* Disable pointer authentication in EL3
* -------------------------------------------------------------
*/
func pauth_disable_el3
mrs x0, sctlr_el3
bic x0, x0, #SCTLR_EnIA_BIT
msr sctlr_el3, x0
isb
ret
endfunc pauth_disable_el3
/* -------------------------------------------------------------
* The following function strictly follows the AArch64 PCS
* to use x9-x17 (temporary caller-saved registers) to load
* the APIAKey_EL1 used by the firmware.
* -------------------------------------------------------------
*/
func pauth_load_bl31_apiakey
/* tpidr_el3 contains the address of cpu_data structure */
mrs x9, tpidr_el3
/* Load apiakey from cpu_data */
ldp x10, x11, [x9, #CPU_DATA_APIAKEY_OFFSET]
/* Program instruction key A */
msr APIAKeyLo_EL1, x10
msr APIAKeyHi_EL1, x11
isb
ret
endfunc pauth_load_bl31_apiakey
/* /*
* Copyright (c) 2013-2018, ARM Limited and Contributors. All rights reserved. * Copyright (c) 2013-2019, ARM Limited and Contributors. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
...@@ -280,6 +280,12 @@ void psci_arch_setup(void) ...@@ -280,6 +280,12 @@ void psci_arch_setup(void)
/* Having initialized cpu_ops, we can now print errata status */ /* Having initialized cpu_ops, we can now print errata status */
print_errata_status(); print_errata_status();
#if ENABLE_PAUTH
/* Store APIAKey_EL1 key */
set_cpu_data(apiakey[0], read_apiakeylo_el1());
set_cpu_data(apiakey[1], read_apiakeyhi_el1());
#endif /* ENABLE_PAUTH */
} }
/****************************************************************************** /******************************************************************************
......
/* /*
* Copyright (c) 2013-2018, ARM Limited and Contributors. All rights reserved. * Copyright (c) 2013-2019, ARM Limited and Contributors. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
...@@ -304,6 +304,12 @@ void psci_cpu_suspend_finish(int cpu_idx, const psci_power_state_t *state_info) ...@@ -304,6 +304,12 @@ void psci_cpu_suspend_finish(int cpu_idx, const psci_power_state_t *state_info)
counter_freq = plat_get_syscnt_freq2(); counter_freq = plat_get_syscnt_freq2();
write_cntfrq_el0(counter_freq); write_cntfrq_el0(counter_freq);
#if ENABLE_PAUTH
/* Store APIAKey_EL1 key */
set_cpu_data(apiakey[0], read_apiakeylo_el1());
set_cpu_data(apiakey[1], read_apiakeyhi_el1());
#endif /* ENABLE_PAUTH */
/* /*
* Call the cpu suspend finish handler registered by the Secure Payload * Call the cpu suspend finish handler registered by the Secure Payload
* Dispatcher to let it do any bookeeping. If the handler encounters an * Dispatcher to let it do any bookeeping. If the handler encounters an
......
...@@ -4,27 +4,25 @@ ...@@ -4,27 +4,25 @@
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
#include <arch_helpers.h>
#include <cdefs.h> #include <cdefs.h>
#include <stdint.h> #include <stdint.h>
/* /*
* Instruction pointer authentication key A. The low 64-bit are at [0], and the * This is only a toy implementation to generate a seemingly random
* high bits at [1]. * 128-bit key from sp, x30 and cntpct_el0 values.
* A production system must re-implement this function to generate
* keys from a reliable randomness source.
*/ */
uint64_t plat_apiakey[2]; uint128_t plat_init_apkey(void)
/*
* This is only a toy implementation to generate a seemingly random 128-bit key
* from sp and x30 values. A production system must re-implement this function
* to generate keys from a reliable randomness source.
*/
uint64_t *plat_init_apiakey(void)
{ {
uintptr_t return_addr = (uintptr_t)__builtin_return_address(0U); uint64_t return_addr = (uint64_t)__builtin_return_address(0U);
uintptr_t frame_addr = (uintptr_t)__builtin_frame_address(0U); uint64_t frame_addr = (uint64_t)__builtin_frame_address(0U);
uint64_t cntpct = read_cntpct_el0();
plat_apiakey[0] = (return_addr << 13) ^ frame_addr; /* Generate 128-bit key */
plat_apiakey[1] = (frame_addr << 15) ^ return_addr; uint64_t key_lo = (return_addr << 13) ^ frame_addr ^ cntpct;
uint64_t key_hi = (frame_addr << 15) ^ return_addr ^ cntpct;
return plat_apiakey; return ((uint128_t)(key_hi) << 64) | key_lo;
} }
...@@ -234,7 +234,8 @@ endif ...@@ -234,7 +234,8 @@ endif
# Pointer Authentication sources # Pointer Authentication sources
ifeq (${ENABLE_PAUTH}, 1) ifeq (${ENABLE_PAUTH}, 1)
PLAT_BL_COMMON_SOURCES += plat/arm/common/aarch64/arm_pauth.c PLAT_BL_COMMON_SOURCES += plat/arm/common/aarch64/arm_pauth.c \
lib/extensions/pauth/pauth_helpers.S
endif endif
# SPM uses libfdt in Arm platforms # SPM uses libfdt in Arm platforms
......
...@@ -156,8 +156,8 @@ Functionality ...@@ -156,8 +156,8 @@ Functionality
The use of pointer authentication in the normal world is enabled whenever The use of pointer authentication in the normal world is enabled whenever
architectural support is available, without the need for additional build architectural support is available, without the need for additional build
flags. Use of pointer authentication in the secure world remains an flags. Use of pointer authentication in the secure world remains an
experimental configuration at this time and requires the ``ENABLE_PAUTH`` experimental configuration at this time and requires the
build flag to be set. ``BRANCH_PROTECTION`` option to be set to non-zero.
- Position-Independent Executable (PIE) support. Initially for BL31 only, with - Position-Independent Executable (PIE) support. Initially for BL31 only, with
further support to be added in a future release. further support to be added in a future release.
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment