xref: /rk3399_ARM-atf/lib/locks/exclusive/aarch64/spinlock.S (revision 5e03b0998201e27732f617d568941d440f908617)
1/*
2 * Copyright (c) 2013-2025, Arm Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <asm_macros.S>
8
9	.globl	spin_lock
10	.globl	spin_trylock
11	.globl	spin_unlock
12	.globl	bit_lock
13	.globl	bit_unlock
14
15#if USE_SPINLOCK_CAS
16#if !ARM_ARCH_AT_LEAST(8, 1)
17#error USE_SPINLOCK_CAS option requires at least an ARMv8.1 platform
18#endif
19
20/*
21 * When compiled for ARMv8.1 or later, choose spin locks based on Compare and
22 * Swap instruction.
23 */
24
25/*
26 * Function: spin_lock
27 * -------------------
28 * Acquires a spinlock using the Compare-And-Swap (CASA) instruction.
29 * Spins until the lock is successfully acquired.
30 *
31 * Arguments:
32 *   x0 - Pointer to the spinlock variable (spinlock_t *lock)
33 *
34 * Return:
35 *   None
36 *
37 * Description:
38 *   - Attempts to acquire the lock by performing a compare-and-swap of 0 -> 1.
39 *   - If the lock is already held, uses LDAXR/WFE to efficiently wait.
40 *   - Loops until the lock is acquired.
41 */
42
43func spin_lock
44	mov	w2, #1
451:	mov	w1, wzr
462:	casa	w1, w2, [x0]
47	cbz	w1, 3f
48	ldxr	w1, [x0]
49	cbz	w1, 2b
50	wfe
51	b	1b
523:
53	ret
54endfunc spin_lock
55
56/*
57 * Function: spin_trylock
58 * ----------------------
59 * Attempts to acquire the spinlock using the CASA instruction without spinning.
60 *
61 * Arguments:
62 *   x0 - Pointer to the spinlock variable (spinlock_t *lock)
63 *
64 * Return:
65 *   w0 - 1 if lock was successfully acquired
66 *        0 if lock was already held
67 *
68 * Description:
69 *   - Performs a single compare-and-swap operation.
70 *   - If the lock is already held, returns failure immediately.
71 */
72func spin_trylock
73        mov     w1, wzr
74        mov     w2, #1
75        casa    w1, w2, [x0]
76        eor     w0, w1, #1
77        ret
78endfunc spin_trylock
79
80#else /* !USE_SPINLOCK_CAS */
81
82/*
83 * Function: spin_lock
84 * -------------------
85 * Acquires a spinlock using the load-acquire (LDAXR) and store-exclusive
86 * (STXR) instruction pair.Spins until the lock is acquired.
87 *
88 * Arguments:
89 *   x0 - Pointer to the spinlock variable (spinlock_t *lock)
90 *
91 * Return:
92 *   None
93 *
94 * Description:
95 *   - Waits for the lock to be released using WFE.
96 *   - Attempts to acquire it by setting the value to 1 using LDAXR/STXR.
97 *   - Uses SEVL/WFE to reduce power while waiting.
98 */
99func spin_lock
100	mov	w2, #1
101	sevl
102l1:	wfe
103l2:	ldaxr	w1, [x0]
104	cbnz	w1, l1
105	stxr	w1, w2, [x0]
106	cbnz	w1, l2
107	ret
108endfunc spin_lock
109
110/*
111 * Function: spin_trylock
112 * ----------------------
113 * Attempts to acquire the spinlock once using LDAXR/STXR without spinning.
114 *
115 * Arguments:
116 *   x0 - Pointer to the spinlock variable (spinlock_t *lock)
117 *
118 * Return:
119 *   w0 - 1 if lock was successfully acquired
120 *        0 if lock was already held
121 *
122 * Description:
123 *   - Loads the lock value.
124 *   - If unlocked (0), attempts to store 1 to acquire it.
125 *   - Returns success or failure based on the outcome.
126 */
127func spin_trylock
128        mov     w2, #1
129        ldaxr   w1, [x0]
130        cbnz    w1, fail
131        stxr    w1, w2, [x0]
132        cbnz    w1, fail
133        mov     w0, #1
134        ret
135fail:
136        mov     w0, #0
137        ret
138endfunc spin_trylock
139
140#endif /* USE_SPINLOCK_CAS */
141
142/*
143 * Function: spin_unlock
144 * ---------------------
145 * Releases the spinlock previously acquired by spin_lock or spin_trylock.
146 *
147 * Arguments:
148 *   x0 - Pointer to the spinlock variable (spinlock_t *lock)
149 *
150 * Return:
151 *   None
152 *
153 * Description:
154 *     Use store-release to unconditionally clear the spinlock variable.
155 *     Store operation generates an event to all cores waiting in WFE
156 *     when address is monitored by the global monitor
157 */
158func spin_unlock
159	stlr	wzr, [x0]
160	ret
161endfunc spin_unlock
162
163/*
164 * Atomic bit clear and set instructions require FEAT_LSE which is
165 * mandatory from Armv8.1.
166 */
167#if ARM_ARCH_AT_LEAST(8, 1)
168
169/*
170 * Acquire bitlock using atomic bit set on byte. If the original read value
171 * has the bit set, use load exclusive semantics to monitor the address and
172 * enter WFE.
173 *
174 * void bit_lock(bitlock_t *lock, uint8_t mask);
175 */
176func bit_lock
1771:	ldsetab	w1, w2, [x0]
178	tst	w2, w1
179	b.eq	2f
180	ldxrb	w2, [x0]
181	tst	w2, w1
182	b.eq	1b
183	wfe
184	b	1b
1852:
186	ret
187endfunc bit_lock
188
189/*
190 * Use atomic bit clear store-release to unconditionally clear bitlock variable.
191 * Store operation generates an event to all cores waiting in WFE when address
192 * is monitored by the global monitor.
193 *
194 * void bit_unlock(bitlock_t *lock, uint8_t mask);
195 */
196func bit_unlock
197	stclrlb	w1, [x0]
198	ret
199endfunc bit_unlock
200
201#endif /* ARM_ARCH_AT_LEAST(8, 1) */
202