xref: /rk3399_ARM-atf/lib/locks/exclusive/aarch64/spinlock.S (revision 86822f24857915bc8bc21ac0ffb86be69d5c966d)
112ab697eSSoby Mathew/*
2*86822f24SManish V Badarkhe * Copyright (c) 2013-2025, Arm Limited and Contributors. All rights reserved.
312ab697eSSoby Mathew *
482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause
512ab697eSSoby Mathew */
612ab697eSSoby Mathew
712ab697eSSoby Mathew#include <asm_macros.S>
812ab697eSSoby Mathew
912ab697eSSoby Mathew	.globl	spin_lock
1012ab697eSSoby Mathew	.globl	spin_unlock
11222f885dSAlexeiFedorov	.globl	bit_lock
12222f885dSAlexeiFedorov	.globl	bit_unlock
1312ab697eSSoby Mathew
14c97cba4eSSoby Mathew#if USE_SPINLOCK_CAS
15c97cba4eSSoby Mathew#if !ARM_ARCH_AT_LEAST(8, 1)
16c97cba4eSSoby Mathew#error USE_SPINLOCK_CAS option requires at least an ARMv8.1 platform
17c97cba4eSSoby Mathew#endif
1812ab697eSSoby Mathew
19c877b414SJeenu Viswambharan/*
20c877b414SJeenu Viswambharan * When compiled for ARMv8.1 or later, choose spin locks based on Compare and
21c877b414SJeenu Viswambharan * Swap instruction.
22c877b414SJeenu Viswambharan */
23c877b414SJeenu Viswambharan
24c877b414SJeenu Viswambharan/*
25*86822f24SManish V Badarkhe * Function: spin_lock
26*86822f24SManish V Badarkhe * -------------------
27*86822f24SManish V Badarkhe * Acquires a spinlock using the Compare-And-Swap (CASA) instruction.
28*86822f24SManish V Badarkhe * Spins until the lock is successfully acquired.
29c877b414SJeenu Viswambharan *
30*86822f24SManish V Badarkhe * Arguments:
31*86822f24SManish V Badarkhe *   x0 - Pointer to the spinlock variable (spinlock_t *lock)
32c877b414SJeenu Viswambharan *
33*86822f24SManish V Badarkhe * Return:
34*86822f24SManish V Badarkhe *   None
35*86822f24SManish V Badarkhe *
36*86822f24SManish V Badarkhe * Description:
37*86822f24SManish V Badarkhe *   - Attempts to acquire the lock by performing a compare-and-swap of 0 -> 1.
38*86822f24SManish V Badarkhe *   - If the lock is already held, uses LDAXR/WFE to efficiently wait.
39*86822f24SManish V Badarkhe *   - Loops until the lock is acquired.
40c877b414SJeenu Viswambharan */
41*86822f24SManish V Badarkhe
42c877b414SJeenu Viswambharanfunc spin_lock
43c877b414SJeenu Viswambharan	mov	w2, #1
44c97cba4eSSoby Mathew1:	mov	w1, wzr
45c97cba4eSSoby Mathew2:	casa	w1, w2, [x0]
46c97cba4eSSoby Mathew	cbz	w1, 3f
47c97cba4eSSoby Mathew	ldxr	w1, [x0]
48c97cba4eSSoby Mathew	cbz	w1, 2b
49c877b414SJeenu Viswambharan	wfe
50c97cba4eSSoby Mathew	b	1b
51c97cba4eSSoby Mathew3:
52c877b414SJeenu Viswambharan	ret
53c877b414SJeenu Viswambharanendfunc spin_lock
54c877b414SJeenu Viswambharan
55*86822f24SManish V Badarkhe/*
56*86822f24SManish V Badarkhe * Function: spin_trylock
57*86822f24SManish V Badarkhe * ----------------------
58*86822f24SManish V Badarkhe * Attempts to acquire the spinlock using the CASA instruction without spinning.
59*86822f24SManish V Badarkhe *
60*86822f24SManish V Badarkhe * Arguments:
61*86822f24SManish V Badarkhe *   x0 - Pointer to the spinlock variable (spinlock_t *lock)
62*86822f24SManish V Badarkhe *
63*86822f24SManish V Badarkhe * Return:
64*86822f24SManish V Badarkhe *   w0 - 1 if lock was successfully acquired
65*86822f24SManish V Badarkhe *        0 if lock was already held
66*86822f24SManish V Badarkhe *
67*86822f24SManish V Badarkhe * Description:
68*86822f24SManish V Badarkhe *   - Performs a single compare-and-swap operation.
69*86822f24SManish V Badarkhe *   - If the lock is already held, returns failure immediately.
70*86822f24SManish V Badarkhe */
71*86822f24SManish V Badarkhefunc spin_trylock
72*86822f24SManish V Badarkhe        mov     w1, wzr
73*86822f24SManish V Badarkhe        mov     w2, #1
74*86822f24SManish V Badarkhe        casa    w1, w2, [x0]
75*86822f24SManish V Badarkhe        eor     w0, w1, #1
76*86822f24SManish V Badarkhe        ret
77*86822f24SManish V Badarkheendfunc spin_trylock
78*86822f24SManish V Badarkhe
79c97cba4eSSoby Mathew#else /* !USE_SPINLOCK_CAS */
80c877b414SJeenu Viswambharan
81c877b414SJeenu Viswambharan/*
82*86822f24SManish V Badarkhe * Function: spin_lock
83*86822f24SManish V Badarkhe * -------------------
84*86822f24SManish V Badarkhe * Acquires a spinlock using the load-acquire (LDAXR) and store-exclusive
85*86822f24SManish V Badarkhe * (STXR) instruction pair.Spins until the lock is acquired.
86c877b414SJeenu Viswambharan *
87*86822f24SManish V Badarkhe * Arguments:
88*86822f24SManish V Badarkhe *   x0 - Pointer to the spinlock variable (spinlock_t *lock)
89*86822f24SManish V Badarkhe *
90*86822f24SManish V Badarkhe * Return:
91*86822f24SManish V Badarkhe *   None
92*86822f24SManish V Badarkhe *
93*86822f24SManish V Badarkhe * Description:
94*86822f24SManish V Badarkhe *   - Waits for the lock to be released using WFE.
95*86822f24SManish V Badarkhe *   - Attempts to acquire it by setting the value to 1 using LDAXR/STXR.
96*86822f24SManish V Badarkhe *   - Uses SEVL/WFE to reduce power while waiting.
97c877b414SJeenu Viswambharan */
9812ab697eSSoby Mathewfunc spin_lock
9912ab697eSSoby Mathew	mov	w2, #1
10012ab697eSSoby Mathew	sevl
10112ab697eSSoby Mathewl1:	wfe
10212ab697eSSoby Mathewl2:	ldaxr	w1, [x0]
10312ab697eSSoby Mathew	cbnz	w1, l1
10412ab697eSSoby Mathew	stxr	w1, w2, [x0]
10512ab697eSSoby Mathew	cbnz	w1, l2
10612ab697eSSoby Mathew	ret
10712ab697eSSoby Mathewendfunc spin_lock
10812ab697eSSoby Mathew
109*86822f24SManish V Badarkhe/*
110*86822f24SManish V Badarkhe * Function: spin_trylock
111*86822f24SManish V Badarkhe * ----------------------
112*86822f24SManish V Badarkhe * Attempts to acquire the spinlock once using LDAXR/STXR without spinning.
113*86822f24SManish V Badarkhe *
114*86822f24SManish V Badarkhe * Arguments:
115*86822f24SManish V Badarkhe *   x0 - Pointer to the spinlock variable (spinlock_t *lock)
116*86822f24SManish V Badarkhe *
117*86822f24SManish V Badarkhe * Return:
118*86822f24SManish V Badarkhe *   w0 - 1 if lock was successfully acquired
119*86822f24SManish V Badarkhe *        0 if lock was already held
120*86822f24SManish V Badarkhe *
121*86822f24SManish V Badarkhe * Description:
122*86822f24SManish V Badarkhe *   - Loads the lock value.
123*86822f24SManish V Badarkhe *   - If unlocked (0), attempts to store 1 to acquire it.
124*86822f24SManish V Badarkhe *   - Returns success or failure based on the outcome.
125*86822f24SManish V Badarkhe */
126*86822f24SManish V Badarkhefunc spin_trylock
127*86822f24SManish V Badarkhe        mov     w2, #1
128*86822f24SManish V Badarkhe        ldaxr   w1, [x0]
129*86822f24SManish V Badarkhe        cbnz    w1, fail
130*86822f24SManish V Badarkhe        stxr    w1, w2, [x0]
131*86822f24SManish V Badarkhe        cbnz    w1, fail
132*86822f24SManish V Badarkhe        mov     w0, #1
133*86822f24SManish V Badarkhe        ret
134*86822f24SManish V Badarkhefail:
135*86822f24SManish V Badarkhe        mov     w0, #0
136*86822f24SManish V Badarkhe        ret
137*86822f24SManish V Badarkheendfunc spin_trylock
138*86822f24SManish V Badarkhe
139c97cba4eSSoby Mathew#endif /* USE_SPINLOCK_CAS */
14012ab697eSSoby Mathew
141c877b414SJeenu Viswambharan/*
142*86822f24SManish V Badarkhe * Function: spin_unlock
143*86822f24SManish V Badarkhe * ---------------------
144*86822f24SManish V Badarkhe * Releases the spinlock previously acquired by spin_lock or spin_trylock.
145c877b414SJeenu Viswambharan *
146*86822f24SManish V Badarkhe * Arguments:
147*86822f24SManish V Badarkhe *   x0 - Pointer to the spinlock variable (spinlock_t *lock)
148*86822f24SManish V Badarkhe *
149*86822f24SManish V Badarkhe * Return:
150*86822f24SManish V Badarkhe *   None
151*86822f24SManish V Badarkhe *
152*86822f24SManish V Badarkhe * Description:
153c97cba4eSSoby Mathew *     Use store-release to unconditionally clear the spinlock variable.
154c97cba4eSSoby Mathew *     Store operation generates an event to all cores waiting in WFE
155*86822f24SManish V Badarkhe *     when address is monitored by the global monitor
156c877b414SJeenu Viswambharan */
15712ab697eSSoby Mathewfunc spin_unlock
15812ab697eSSoby Mathew	stlr	wzr, [x0]
15912ab697eSSoby Mathew	ret
16012ab697eSSoby Mathewendfunc spin_unlock
161222f885dSAlexeiFedorov
162222f885dSAlexeiFedorov/*
163222f885dSAlexeiFedorov * Atomic bit clear and set instructions require FEAT_LSE which is
164222f885dSAlexeiFedorov * mandatory from Armv8.1.
165222f885dSAlexeiFedorov */
166222f885dSAlexeiFedorov#if ARM_ARCH_AT_LEAST(8, 1)
167222f885dSAlexeiFedorov
168222f885dSAlexeiFedorov/*
169222f885dSAlexeiFedorov * Acquire bitlock using atomic bit set on byte. If the original read value
170222f885dSAlexeiFedorov * has the bit set, use load exclusive semantics to monitor the address and
171222f885dSAlexeiFedorov * enter WFE.
172222f885dSAlexeiFedorov *
173222f885dSAlexeiFedorov * void bit_lock(bitlock_t *lock, uint8_t mask);
174222f885dSAlexeiFedorov */
175222f885dSAlexeiFedorovfunc bit_lock
176222f885dSAlexeiFedorov1:	ldsetab	w1, w2, [x0]
177222f885dSAlexeiFedorov	tst	w2, w1
178222f885dSAlexeiFedorov	b.eq	2f
179222f885dSAlexeiFedorov	ldxrb	w2, [x0]
180222f885dSAlexeiFedorov	tst	w2, w1
181222f885dSAlexeiFedorov	b.eq	1b
182222f885dSAlexeiFedorov	wfe
183222f885dSAlexeiFedorov	b	1b
184222f885dSAlexeiFedorov2:
185222f885dSAlexeiFedorov	ret
186222f885dSAlexeiFedorovendfunc bit_lock
187222f885dSAlexeiFedorov
188222f885dSAlexeiFedorov/*
189222f885dSAlexeiFedorov * Use atomic bit clear store-release to unconditionally clear bitlock variable.
190222f885dSAlexeiFedorov * Store operation generates an event to all cores waiting in WFE when address
191222f885dSAlexeiFedorov * is monitored by the global monitor.
192222f885dSAlexeiFedorov *
193222f885dSAlexeiFedorov * void bit_unlock(bitlock_t *lock, uint8_t mask);
194222f885dSAlexeiFedorov */
195222f885dSAlexeiFedorovfunc bit_unlock
196222f885dSAlexeiFedorov	stclrlb	w1, [x0]
197222f885dSAlexeiFedorov	ret
198222f885dSAlexeiFedorovendfunc bit_unlock
199222f885dSAlexeiFedorov
200222f885dSAlexeiFedorov#endif /* ARM_ARCH_AT_LEAST(8, 1) */
201