xref: /rk3399_ARM-atf/lib/locks/exclusive/aarch64/spinlock.S (revision b67e984664a8644d6cfd1812cabaa02cf24f09c9)
1/*
2 * Copyright (c) 2013-2025, Arm Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <asm_macros.S>
8
9	.globl	spin_lock
10	.globl	spin_unlock
11	.globl	bit_lock
12	.globl	bit_unlock
13
14#if USE_SPINLOCK_CAS
15#if !ARM_ARCH_AT_LEAST(8, 1)
16#error USE_SPINLOCK_CAS option requires at least an ARMv8.1 platform
17#endif
18
19/*
20 * When compiled for ARMv8.1 or later, choose spin locks based on Compare and
21 * Swap instruction.
22 */
23
24/*
25 * Function: spin_lock
26 * -------------------
27 * Acquires a spinlock using the Compare-And-Swap (CASA) instruction.
28 * Spins until the lock is successfully acquired.
29 *
30 * Arguments:
31 *   x0 - Pointer to the spinlock variable (spinlock_t *lock)
32 *
33 * Return:
34 *   None
35 *
36 * Description:
37 *   - Attempts to acquire the lock by performing a compare-and-swap of 0 -> 1.
38 *   - If the lock is already held, uses LDAXR/WFE to efficiently wait.
39 *   - Loops until the lock is acquired.
40 */
41
42func spin_lock
43	mov	w2, #1
441:	mov	w1, wzr
452:	casa	w1, w2, [x0]
46	cbz	w1, 3f
47	ldxr	w1, [x0]
48	cbz	w1, 2b
49	wfe
50	b	1b
513:
52	ret
53endfunc spin_lock
54
55/*
56 * Function: spin_trylock
57 * ----------------------
58 * Attempts to acquire the spinlock using the CASA instruction without spinning.
59 *
60 * Arguments:
61 *   x0 - Pointer to the spinlock variable (spinlock_t *lock)
62 *
63 * Return:
64 *   w0 - 1 if lock was successfully acquired
65 *        0 if lock was already held
66 *
67 * Description:
68 *   - Performs a single compare-and-swap operation.
69 *   - If the lock is already held, returns failure immediately.
70 */
71func spin_trylock
72        mov     w1, wzr
73        mov     w2, #1
74        casa    w1, w2, [x0]
75        eor     w0, w1, #1
76        ret
77endfunc spin_trylock
78
79#else /* !USE_SPINLOCK_CAS */
80
81/*
82 * Function: spin_lock
83 * -------------------
84 * Acquires a spinlock using the load-acquire (LDAXR) and store-exclusive
85 * (STXR) instruction pair.Spins until the lock is acquired.
86 *
87 * Arguments:
88 *   x0 - Pointer to the spinlock variable (spinlock_t *lock)
89 *
90 * Return:
91 *   None
92 *
93 * Description:
94 *   - Waits for the lock to be released using WFE.
95 *   - Attempts to acquire it by setting the value to 1 using LDAXR/STXR.
96 *   - Uses SEVL/WFE to reduce power while waiting.
97 */
98func spin_lock
99	mov	w2, #1
100	sevl
101l1:	wfe
102l2:	ldaxr	w1, [x0]
103	cbnz	w1, l1
104	stxr	w1, w2, [x0]
105	cbnz	w1, l2
106	ret
107endfunc spin_lock
108
109/*
110 * Function: spin_trylock
111 * ----------------------
112 * Attempts to acquire the spinlock once using LDAXR/STXR without spinning.
113 *
114 * Arguments:
115 *   x0 - Pointer to the spinlock variable (spinlock_t *lock)
116 *
117 * Return:
118 *   w0 - 1 if lock was successfully acquired
119 *        0 if lock was already held
120 *
121 * Description:
122 *   - Loads the lock value.
123 *   - If unlocked (0), attempts to store 1 to acquire it.
124 *   - Returns success or failure based on the outcome.
125 */
126func spin_trylock
127        mov     w2, #1
128        ldaxr   w1, [x0]
129        cbnz    w1, fail
130        stxr    w1, w2, [x0]
131        cbnz    w1, fail
132        mov     w0, #1
133        ret
134fail:
135        mov     w0, #0
136        ret
137endfunc spin_trylock
138
139#endif /* USE_SPINLOCK_CAS */
140
141/*
142 * Function: spin_unlock
143 * ---------------------
144 * Releases the spinlock previously acquired by spin_lock or spin_trylock.
145 *
146 * Arguments:
147 *   x0 - Pointer to the spinlock variable (spinlock_t *lock)
148 *
149 * Return:
150 *   None
151 *
152 * Description:
153 *     Use store-release to unconditionally clear the spinlock variable.
154 *     Store operation generates an event to all cores waiting in WFE
155 *     when address is monitored by the global monitor
156 */
157func spin_unlock
158	stlr	wzr, [x0]
159	ret
160endfunc spin_unlock
161
162/*
163 * Atomic bit clear and set instructions require FEAT_LSE which is
164 * mandatory from Armv8.1.
165 */
166#if ARM_ARCH_AT_LEAST(8, 1)
167
168/*
169 * Acquire bitlock using atomic bit set on byte. If the original read value
170 * has the bit set, use load exclusive semantics to monitor the address and
171 * enter WFE.
172 *
173 * void bit_lock(bitlock_t *lock, uint8_t mask);
174 */
175func bit_lock
1761:	ldsetab	w1, w2, [x0]
177	tst	w2, w1
178	b.eq	2f
179	ldxrb	w2, [x0]
180	tst	w2, w1
181	b.eq	1b
182	wfe
183	b	1b
1842:
185	ret
186endfunc bit_lock
187
188/*
189 * Use atomic bit clear store-release to unconditionally clear bitlock variable.
190 * Store operation generates an event to all cores waiting in WFE when address
191 * is monitored by the global monitor.
192 *
193 * void bit_unlock(bitlock_t *lock, uint8_t mask);
194 */
195func bit_unlock
196	stclrlb	w1, [x0]
197	ret
198endfunc bit_unlock
199
200#endif /* ARM_ARCH_AT_LEAST(8, 1) */
201