xref: /rk3399_ARM-atf/lib/locks/exclusive/aarch64/spinlock.S (revision 5e03b0998201e27732f617d568941d440f908617)
112ab697eSSoby Mathew/*
286822f24SManish V Badarkhe * Copyright (c) 2013-2025, Arm Limited and Contributors. All rights reserved.
312ab697eSSoby Mathew *
482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause
512ab697eSSoby Mathew */
612ab697eSSoby Mathew
712ab697eSSoby Mathew#include <asm_macros.S>
812ab697eSSoby Mathew
912ab697eSSoby Mathew	.globl	spin_lock
10*d7e9372fSVarun Wadekar	.globl	spin_trylock
1112ab697eSSoby Mathew	.globl	spin_unlock
12222f885dSAlexeiFedorov	.globl	bit_lock
13222f885dSAlexeiFedorov	.globl	bit_unlock
1412ab697eSSoby Mathew
15c97cba4eSSoby Mathew#if USE_SPINLOCK_CAS
16c97cba4eSSoby Mathew#if !ARM_ARCH_AT_LEAST(8, 1)
17c97cba4eSSoby Mathew#error USE_SPINLOCK_CAS option requires at least an ARMv8.1 platform
18c97cba4eSSoby Mathew#endif
1912ab697eSSoby Mathew
20c877b414SJeenu Viswambharan/*
21c877b414SJeenu Viswambharan * When compiled for ARMv8.1 or later, choose spin locks based on Compare and
22c877b414SJeenu Viswambharan * Swap instruction.
23c877b414SJeenu Viswambharan */
24c877b414SJeenu Viswambharan
25c877b414SJeenu Viswambharan/*
2686822f24SManish V Badarkhe * Function: spin_lock
2786822f24SManish V Badarkhe * -------------------
2886822f24SManish V Badarkhe * Acquires a spinlock using the Compare-And-Swap (CASA) instruction.
2986822f24SManish V Badarkhe * Spins until the lock is successfully acquired.
30c877b414SJeenu Viswambharan *
3186822f24SManish V Badarkhe * Arguments:
3286822f24SManish V Badarkhe *   x0 - Pointer to the spinlock variable (spinlock_t *lock)
33c877b414SJeenu Viswambharan *
3486822f24SManish V Badarkhe * Return:
3586822f24SManish V Badarkhe *   None
3686822f24SManish V Badarkhe *
3786822f24SManish V Badarkhe * Description:
3886822f24SManish V Badarkhe *   - Attempts to acquire the lock by performing a compare-and-swap of 0 -> 1.
3986822f24SManish V Badarkhe *   - If the lock is already held, uses LDAXR/WFE to efficiently wait.
4086822f24SManish V Badarkhe *   - Loops until the lock is acquired.
41c877b414SJeenu Viswambharan */
4286822f24SManish V Badarkhe
43c877b414SJeenu Viswambharanfunc spin_lock
44c877b414SJeenu Viswambharan	mov	w2, #1
45c97cba4eSSoby Mathew1:	mov	w1, wzr
46c97cba4eSSoby Mathew2:	casa	w1, w2, [x0]
47c97cba4eSSoby Mathew	cbz	w1, 3f
48c97cba4eSSoby Mathew	ldxr	w1, [x0]
49c97cba4eSSoby Mathew	cbz	w1, 2b
50c877b414SJeenu Viswambharan	wfe
51c97cba4eSSoby Mathew	b	1b
52c97cba4eSSoby Mathew3:
53c877b414SJeenu Viswambharan	ret
54c877b414SJeenu Viswambharanendfunc spin_lock
55c877b414SJeenu Viswambharan
5686822f24SManish V Badarkhe/*
5786822f24SManish V Badarkhe * Function: spin_trylock
5886822f24SManish V Badarkhe * ----------------------
5986822f24SManish V Badarkhe * Attempts to acquire the spinlock using the CASA instruction without spinning.
6086822f24SManish V Badarkhe *
6186822f24SManish V Badarkhe * Arguments:
6286822f24SManish V Badarkhe *   x0 - Pointer to the spinlock variable (spinlock_t *lock)
6386822f24SManish V Badarkhe *
6486822f24SManish V Badarkhe * Return:
6586822f24SManish V Badarkhe *   w0 - 1 if lock was successfully acquired
6686822f24SManish V Badarkhe *        0 if lock was already held
6786822f24SManish V Badarkhe *
6886822f24SManish V Badarkhe * Description:
6986822f24SManish V Badarkhe *   - Performs a single compare-and-swap operation.
7086822f24SManish V Badarkhe *   - If the lock is already held, returns failure immediately.
7186822f24SManish V Badarkhe */
7286822f24SManish V Badarkhefunc spin_trylock
7386822f24SManish V Badarkhe        mov     w1, wzr
7486822f24SManish V Badarkhe        mov     w2, #1
7586822f24SManish V Badarkhe        casa    w1, w2, [x0]
7686822f24SManish V Badarkhe        eor     w0, w1, #1
7786822f24SManish V Badarkhe        ret
7886822f24SManish V Badarkheendfunc spin_trylock
7986822f24SManish V Badarkhe
80c97cba4eSSoby Mathew#else /* !USE_SPINLOCK_CAS */
81c877b414SJeenu Viswambharan
82c877b414SJeenu Viswambharan/*
8386822f24SManish V Badarkhe * Function: spin_lock
8486822f24SManish V Badarkhe * -------------------
8586822f24SManish V Badarkhe * Acquires a spinlock using the load-acquire (LDAXR) and store-exclusive
8686822f24SManish V Badarkhe * (STXR) instruction pair.Spins until the lock is acquired.
87c877b414SJeenu Viswambharan *
8886822f24SManish V Badarkhe * Arguments:
8986822f24SManish V Badarkhe *   x0 - Pointer to the spinlock variable (spinlock_t *lock)
9086822f24SManish V Badarkhe *
9186822f24SManish V Badarkhe * Return:
9286822f24SManish V Badarkhe *   None
9386822f24SManish V Badarkhe *
9486822f24SManish V Badarkhe * Description:
9586822f24SManish V Badarkhe *   - Waits for the lock to be released using WFE.
9686822f24SManish V Badarkhe *   - Attempts to acquire it by setting the value to 1 using LDAXR/STXR.
9786822f24SManish V Badarkhe *   - Uses SEVL/WFE to reduce power while waiting.
98c877b414SJeenu Viswambharan */
9912ab697eSSoby Mathewfunc spin_lock
10012ab697eSSoby Mathew	mov	w2, #1
10112ab697eSSoby Mathew	sevl
10212ab697eSSoby Mathewl1:	wfe
10312ab697eSSoby Mathewl2:	ldaxr	w1, [x0]
10412ab697eSSoby Mathew	cbnz	w1, l1
10512ab697eSSoby Mathew	stxr	w1, w2, [x0]
10612ab697eSSoby Mathew	cbnz	w1, l2
10712ab697eSSoby Mathew	ret
10812ab697eSSoby Mathewendfunc spin_lock
10912ab697eSSoby Mathew
11086822f24SManish V Badarkhe/*
11186822f24SManish V Badarkhe * Function: spin_trylock
11286822f24SManish V Badarkhe * ----------------------
11386822f24SManish V Badarkhe * Attempts to acquire the spinlock once using LDAXR/STXR without spinning.
11486822f24SManish V Badarkhe *
11586822f24SManish V Badarkhe * Arguments:
11686822f24SManish V Badarkhe *   x0 - Pointer to the spinlock variable (spinlock_t *lock)
11786822f24SManish V Badarkhe *
11886822f24SManish V Badarkhe * Return:
11986822f24SManish V Badarkhe *   w0 - 1 if lock was successfully acquired
12086822f24SManish V Badarkhe *        0 if lock was already held
12186822f24SManish V Badarkhe *
12286822f24SManish V Badarkhe * Description:
12386822f24SManish V Badarkhe *   - Loads the lock value.
12486822f24SManish V Badarkhe *   - If unlocked (0), attempts to store 1 to acquire it.
12586822f24SManish V Badarkhe *   - Returns success or failure based on the outcome.
12686822f24SManish V Badarkhe */
12786822f24SManish V Badarkhefunc spin_trylock
12886822f24SManish V Badarkhe        mov     w2, #1
12986822f24SManish V Badarkhe        ldaxr   w1, [x0]
13086822f24SManish V Badarkhe        cbnz    w1, fail
13186822f24SManish V Badarkhe        stxr    w1, w2, [x0]
13286822f24SManish V Badarkhe        cbnz    w1, fail
13386822f24SManish V Badarkhe        mov     w0, #1
13486822f24SManish V Badarkhe        ret
13586822f24SManish V Badarkhefail:
13686822f24SManish V Badarkhe        mov     w0, #0
13786822f24SManish V Badarkhe        ret
13886822f24SManish V Badarkheendfunc spin_trylock
13986822f24SManish V Badarkhe
140c97cba4eSSoby Mathew#endif /* USE_SPINLOCK_CAS */
14112ab697eSSoby Mathew
142c877b414SJeenu Viswambharan/*
14386822f24SManish V Badarkhe * Function: spin_unlock
14486822f24SManish V Badarkhe * ---------------------
14586822f24SManish V Badarkhe * Releases the spinlock previously acquired by spin_lock or spin_trylock.
146c877b414SJeenu Viswambharan *
14786822f24SManish V Badarkhe * Arguments:
14886822f24SManish V Badarkhe *   x0 - Pointer to the spinlock variable (spinlock_t *lock)
14986822f24SManish V Badarkhe *
15086822f24SManish V Badarkhe * Return:
15186822f24SManish V Badarkhe *   None
15286822f24SManish V Badarkhe *
15386822f24SManish V Badarkhe * Description:
154c97cba4eSSoby Mathew *     Use store-release to unconditionally clear the spinlock variable.
155c97cba4eSSoby Mathew *     Store operation generates an event to all cores waiting in WFE
15686822f24SManish V Badarkhe *     when address is monitored by the global monitor
157c877b414SJeenu Viswambharan */
15812ab697eSSoby Mathewfunc spin_unlock
15912ab697eSSoby Mathew	stlr	wzr, [x0]
16012ab697eSSoby Mathew	ret
16112ab697eSSoby Mathewendfunc spin_unlock
162222f885dSAlexeiFedorov
163222f885dSAlexeiFedorov/*
164222f885dSAlexeiFedorov * Atomic bit clear and set instructions require FEAT_LSE which is
165222f885dSAlexeiFedorov * mandatory from Armv8.1.
166222f885dSAlexeiFedorov */
167222f885dSAlexeiFedorov#if ARM_ARCH_AT_LEAST(8, 1)
168222f885dSAlexeiFedorov
169222f885dSAlexeiFedorov/*
170222f885dSAlexeiFedorov * Acquire bitlock using atomic bit set on byte. If the original read value
171222f885dSAlexeiFedorov * has the bit set, use load exclusive semantics to monitor the address and
172222f885dSAlexeiFedorov * enter WFE.
173222f885dSAlexeiFedorov *
174222f885dSAlexeiFedorov * void bit_lock(bitlock_t *lock, uint8_t mask);
175222f885dSAlexeiFedorov */
176222f885dSAlexeiFedorovfunc bit_lock
177222f885dSAlexeiFedorov1:	ldsetab	w1, w2, [x0]
178222f885dSAlexeiFedorov	tst	w2, w1
179222f885dSAlexeiFedorov	b.eq	2f
180222f885dSAlexeiFedorov	ldxrb	w2, [x0]
181222f885dSAlexeiFedorov	tst	w2, w1
182222f885dSAlexeiFedorov	b.eq	1b
183222f885dSAlexeiFedorov	wfe
184222f885dSAlexeiFedorov	b	1b
185222f885dSAlexeiFedorov2:
186222f885dSAlexeiFedorov	ret
187222f885dSAlexeiFedorovendfunc bit_lock
188222f885dSAlexeiFedorov
189222f885dSAlexeiFedorov/*
190222f885dSAlexeiFedorov * Use atomic bit clear store-release to unconditionally clear bitlock variable.
191222f885dSAlexeiFedorov * Store operation generates an event to all cores waiting in WFE when address
192222f885dSAlexeiFedorov * is monitored by the global monitor.
193222f885dSAlexeiFedorov *
194222f885dSAlexeiFedorov * void bit_unlock(bitlock_t *lock, uint8_t mask);
195222f885dSAlexeiFedorov */
196222f885dSAlexeiFedorovfunc bit_unlock
197222f885dSAlexeiFedorov	stclrlb	w1, [x0]
198222f885dSAlexeiFedorov	ret
199222f885dSAlexeiFedorovendfunc bit_unlock
200222f885dSAlexeiFedorov
201222f885dSAlexeiFedorov#endif /* ARM_ARCH_AT_LEAST(8, 1) */
202