spinlock.S 1.76 KB
Newer Older
1
/*
2
 * Copyright (c) 2013-2017, ARM Limited and Contributors. All rights reserved.
3
 *
dp-arm's avatar
dp-arm committed
4
 * SPDX-License-Identifier: BSD-3-Clause
5
6
7
8
9
10
11
 */

#include <asm_macros.S>

	.globl	spin_lock
	.globl	spin_unlock

12
#if (ARM_ARCH_MAJOR > 8) || ((ARM_ARCH_MAJOR == 8) && (ARM_ARCH_MINOR >= 1))
13

14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
/*
 * When compiled for ARMv8.1 or later, choose spin locks based on Compare and
 * Swap instruction.
 */
# define USE_CAS	1

/*
 * Lock contenders using CAS, upon failing to acquire the lock, wait with the
 * monitor in open state. Therefore, a normal store upon unlocking won't
 * generate an SEV. Use explicit SEV instruction with CAS unlock.
 */
# define COND_SEV()	sev

#else

# define USE_CAS	0

/*
 * Lock contenders using exclusive pairs, upon failing to acquire the lock, wait
 * with the monitor in exclusive state. A normal store upon unlocking will
 * implicitly generate an envent; so, no explicit SEV with unlock is required.
 */
# define COND_SEV()

#endif

#if USE_CAS

	.arch	armv8.1-a

/*
 * Acquire lock using Compare and Swap instruction.
 *
 * Compare for 0 with acquire semantics, and swap 1. Wait until CAS returns
 * 0.
 *
 * void spin_lock(spinlock_t *lock);
 */
func spin_lock
	mov	w2, #1
	sevl
1:
	wfe
	mov	w1, wzr
	casa	w1, w2, [x0]
	cbnz	w1, 1b
	ret
endfunc spin_lock

	.arch	armv8-a

#else /* !USE_CAS */

/*
 * Acquire lock using load-/store-exclusive instruction pair.
 *
 * void spin_lock(spinlock_t *lock);
 */
72
73
74
75
76
77
78
79
80
81
82
func spin_lock
	mov	w2, #1
	sevl
l1:	wfe
l2:	ldaxr	w1, [x0]
	cbnz	w1, l1
	stxr	w1, w2, [x0]
	cbnz	w1, l2
	ret
endfunc spin_lock

83
#endif /* USE_CAS */
84

85
86
87
88
89
90
91
/*
 * Release lock previously acquired by spin_lock.
 *
 * Unconditionally write 0, and conditionally generate an event.
 *
 * void spin_unlock(spinlock_t *lock);
 */
92
93
func spin_unlock
	stlr	wzr, [x0]
94
	COND_SEV()
95
96
	ret
endfunc spin_unlock