Commit 201ca5b6 authored by Dimitris Papastamos's avatar Dimitris Papastamos
Browse files

runtime_exceptions: Save x4-x29 unconditionally



In preparation for SMCCC v1.1 support, save x4 to x29 unconditionally.
Previously we expected callers coming from AArch64 mode to preserve
x8-x17.  This is no longer the case with SMCCC v1.1 as AArch64 callers
only need to save x0-x3.

Change-Id: Ie62d620776533969ff4a02c635422f1b9208be9c
Signed-off-by: default avatarDimitris Papastamos <dimitris.papastamos@arm.com>
parent 383c8089
/* /*
* Copyright (c) 2013-2016, ARM Limited and Contributors. All rights reserved. * Copyright (c) 2013-2018, ARM Limited and Contributors. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
...@@ -153,7 +153,14 @@ interrupt_exit_\label: ...@@ -153,7 +153,14 @@ interrupt_exit_\label:
.endm .endm
.macro save_x18_to_x29_sp_el0 .macro save_x4_to_x29_sp_el0
stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
...@@ -297,34 +304,16 @@ smc_handler32: ...@@ -297,34 +304,16 @@ smc_handler32:
/* Check whether aarch32 issued an SMC64 */ /* Check whether aarch32 issued an SMC64 */
tbnz x0, #FUNCID_CC_SHIFT, smc_prohibited tbnz x0, #FUNCID_CC_SHIFT, smc_prohibited
/*
* Since we're are coming from aarch32, x8-x18 need to be saved as per
* SMC32 calling convention. If a lower EL in aarch64 is making an
* SMC32 call then it must have saved x8-x17 already therein.
*/
stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
/* x4-x7, x18, sp_el0 are saved below */
smc_handler64: smc_handler64:
/* /*
* Populate the parameters for the SMC handler. * Populate the parameters for the SMC handler.
* We already have x0-x4 in place. x5 will point to a cookie (not used * We already have x0-x4 in place. x5 will point to a cookie (not used
* now). x6 will point to the context structure (SP_EL3) and x7 will * now). x6 will point to the context structure (SP_EL3) and x7 will
* contain flags we need to pass to the handler Hence save x5-x7. * contain flags we need to pass to the handler.
* *
* Note: x4 only needs to be preserved for AArch32 callers but we do it * Save x4-x29 and sp_el0. Refer to SMCCC v1.1.
* for AArch64 callers as well for convenience
*/ */
stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] save_x4_to_x29_sp_el0
stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
/* Save rest of the gpregs and sp_el0*/
save_x18_to_x29_sp_el0
mov x5, xzr mov x5, xzr
mov x6, sp mov x6, sp
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment