Commit 5b567758 authored by Soby Mathew's avatar Soby Mathew Committed by TrustedFirmware Code Review
Browse files

Merge "Fix the CAS spinlock implementation" into integration

parents 81da28c2 c97cba4e
......@@ -141,6 +141,15 @@ else
$(error Unknown BRANCH_PROTECTION value ${BRANCH_PROTECTION})
endif
# USE_SPINLOCK_CAS requires AArch64 build
ifeq (${USE_SPINLOCK_CAS},1)
ifneq (${ARCH},aarch64)
$(error USE_SPINLOCK_CAS requires AArch64)
else
$(info USE_SPINLOCK_CAS is an experimental feature)
endif
endif
################################################################################
# Toolchain
################################################################################
......@@ -690,6 +699,7 @@ $(eval $(call assert_boolean,WARMBOOT_ENABLE_DCACHE_EARLY))
$(eval $(call assert_boolean,BL2_AT_EL3))
$(eval $(call assert_boolean,BL2_IN_XIP_MEM))
$(eval $(call assert_boolean,BL2_INV_DCACHE))
$(eval $(call assert_boolean,USE_SPINLOCK_CAS))
$(eval $(call assert_numeric,ARM_ARCH_MAJOR))
$(eval $(call assert_numeric,ARM_ARCH_MINOR))
......@@ -755,6 +765,7 @@ $(eval $(call add_define,WARMBOOT_ENABLE_DCACHE_EARLY))
$(eval $(call add_define,BL2_AT_EL3))
$(eval $(call add_define,BL2_IN_XIP_MEM))
$(eval $(call add_define,BL2_INV_DCACHE))
$(eval $(call add_define,USE_SPINLOCK_CAS))
ifeq (${SANITIZE_UB},trap)
$(eval $(call add_define,MONITOR_TRAPS))
......
......@@ -2540,8 +2540,11 @@ Armv8.1-A
This Architecture Extension is targeted when ``ARM_ARCH_MAJOR`` >= 8, or when
``ARM_ARCH_MAJOR`` == 8 and ``ARM_ARCH_MINOR`` >= 1.
- The Compare and Swap instruction is used to implement spinlocks. Otherwise,
the load-/store-exclusive instruction pair is used.
- By default, a load-/store-exclusive instruction pair is used to implement
spinlocks. The ``USE_SPINLOCK_CAS`` build option when set to 1 selects the
spinlock implementation using the ARMv8.1-LSE Compare and Swap instruction.
Notice this instruction is only available in AArch64 execution state, so
the option is only available to AArch64 builds.
Armv8.2-A
~~~~~~~~~
......
......@@ -817,6 +817,10 @@ Common build options
reduces SRAM usage. Refer to `Library at ROM`_ for further details. Default
is 0.
- ``USE_SPINLOCK_CAS``: Setting this build flag to 1 selects the spinlock
implementation variant using the ARMv8.1-LSE compare-and-swap instruction.
Notice this option is experimental and only available to AArch64 builds.
- ``V``: Verbose build. If assigned anything other than 0, the build commands
are printed. Default is 0.
......
......@@ -9,56 +9,38 @@
.globl spin_lock
.globl spin_unlock
#if ARM_ARCH_AT_LEAST(8, 1)
#if USE_SPINLOCK_CAS
#if !ARM_ARCH_AT_LEAST(8, 1)
#error USE_SPINLOCK_CAS option requires at least an ARMv8.1 platform
#endif
/*
* When compiled for ARMv8.1 or later, choose spin locks based on Compare and
* Swap instruction.
*/
# define USE_CAS 1
/*
* Lock contenders using CAS, upon failing to acquire the lock, wait with the
* monitor in open state. Therefore, a normal store upon unlocking won't
* generate an SEV. Use explicit SEV instruction with CAS unlock.
*/
# define COND_SEV() sev
#else
# define USE_CAS 0
/*
* Lock contenders using exclusive pairs, upon failing to acquire the lock, wait
* with the monitor in exclusive state. A normal store upon unlocking will
* implicitly generate an envent; so, no explicit SEV with unlock is required.
*/
# define COND_SEV()
#endif
#if USE_CAS
/*
* Acquire lock using Compare and Swap instruction.
*
* Compare for 0 with acquire semantics, and swap 1. Wait until CAS returns
* 0.
* Compare for 0 with acquire semantics, and swap 1. If failed to acquire, use
* load exclusive semantics to monitor the address and enter WFE.
*
* void spin_lock(spinlock_t *lock);
*/
func spin_lock
mov w2, #1
sevl
1:
1: mov w1, wzr
2: casa w1, w2, [x0]
cbz w1, 3f
ldxr w1, [x0]
cbz w1, 2b
wfe
mov w1, wzr
casa w1, w2, [x0]
cbnz w1, 1b
b 1b
3:
ret
endfunc spin_lock
#else /* !USE_CAS */
#else /* !USE_SPINLOCK_CAS */
/*
* Acquire lock using load-/store-exclusive instruction pair.
......@@ -76,17 +58,18 @@ l2: ldaxr w1, [x0]
ret
endfunc spin_lock
#endif /* USE_CAS */
#endif /* USE_SPINLOCK_CAS */
/*
* Release lock previously acquired by spin_lock.
*
* Unconditionally write 0, and conditionally generate an event.
* Use store-release to unconditionally clear the spinlock variable.
* Store operation generates an event to all cores waiting in WFE
* when address is monitored by the global monitor.
*
* void spin_unlock(spinlock_t *lock);
*/
func spin_unlock
stlr wzr, [x0]
COND_SEV()
ret
endfunc spin_unlock
......@@ -234,3 +234,8 @@ else
endif
SANITIZE_UB := off
# For ARMv8.1 (AArch64) platforms, enabling this option selects the spinlock
# implementation variant using the ARMv8.1-LSE compare-and-swap instruction.
# Default: disabled
USE_SPINLOCK_CAS := 0
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment