xref: /rk3399_ARM-atf/lib/locks/exclusive/aarch64/spinlock.S (revision 02a85c1116369488784c5ed4d67f326c22455e94)
112ab697eSSoby Mathew/*
2*02a85c11SAlexei Fedorov * Copyright (c) 2013-2019, ARM Limited and Contributors. All rights reserved.
312ab697eSSoby Mathew *
482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause
512ab697eSSoby Mathew */
612ab697eSSoby Mathew
712ab697eSSoby Mathew#include <asm_macros.S>
812ab697eSSoby Mathew
912ab697eSSoby Mathew	.globl	spin_lock
1012ab697eSSoby Mathew	.globl	spin_unlock
1112ab697eSSoby Mathew
12f45e232aSJeenu Viswambharan#if ARM_ARCH_AT_LEAST(8, 1)
1312ab697eSSoby Mathew
14c877b414SJeenu Viswambharan/*
15c877b414SJeenu Viswambharan * When compiled for ARMv8.1 or later, choose spin locks based on Compare and
16c877b414SJeenu Viswambharan * Swap instruction.
17c877b414SJeenu Viswambharan */
18c877b414SJeenu Viswambharan# define USE_CAS	1
19c877b414SJeenu Viswambharan
20c877b414SJeenu Viswambharan/*
21c877b414SJeenu Viswambharan * Lock contenders using CAS, upon failing to acquire the lock, wait with the
22c877b414SJeenu Viswambharan * monitor in open state. Therefore, a normal store upon unlocking won't
23c877b414SJeenu Viswambharan * generate an SEV. Use explicit SEV instruction with CAS unlock.
24c877b414SJeenu Viswambharan */
25c877b414SJeenu Viswambharan# define COND_SEV()	sev
26c877b414SJeenu Viswambharan
27c877b414SJeenu Viswambharan#else
28c877b414SJeenu Viswambharan
29c877b414SJeenu Viswambharan# define USE_CAS	0
30c877b414SJeenu Viswambharan
31c877b414SJeenu Viswambharan/*
32c877b414SJeenu Viswambharan * Lock contenders using exclusive pairs, upon failing to acquire the lock, wait
33c877b414SJeenu Viswambharan * with the monitor in exclusive state. A normal store upon unlocking will
34c877b414SJeenu Viswambharan * implicitly generate an envent; so, no explicit SEV with unlock is required.
35c877b414SJeenu Viswambharan */
36c877b414SJeenu Viswambharan# define COND_SEV()
37c877b414SJeenu Viswambharan
38c877b414SJeenu Viswambharan#endif
39c877b414SJeenu Viswambharan
40c877b414SJeenu Viswambharan#if USE_CAS
41c877b414SJeenu Viswambharan
42c877b414SJeenu Viswambharan/*
43c877b414SJeenu Viswambharan * Acquire lock using Compare and Swap instruction.
44c877b414SJeenu Viswambharan *
45c877b414SJeenu Viswambharan * Compare for 0 with acquire semantics, and swap 1. Wait until CAS returns
46c877b414SJeenu Viswambharan * 0.
47c877b414SJeenu Viswambharan *
48c877b414SJeenu Viswambharan * void spin_lock(spinlock_t *lock);
49c877b414SJeenu Viswambharan */
50c877b414SJeenu Viswambharanfunc spin_lock
51c877b414SJeenu Viswambharan	mov	w2, #1
52c877b414SJeenu Viswambharan	sevl
53c877b414SJeenu Viswambharan1:
54c877b414SJeenu Viswambharan	wfe
55c877b414SJeenu Viswambharan	mov	w1, wzr
56c877b414SJeenu Viswambharan	casa	w1, w2, [x0]
57c877b414SJeenu Viswambharan	cbnz	w1, 1b
58c877b414SJeenu Viswambharan	ret
59c877b414SJeenu Viswambharanendfunc spin_lock
60c877b414SJeenu Viswambharan
61c877b414SJeenu Viswambharan#else /* !USE_CAS */
62c877b414SJeenu Viswambharan
63c877b414SJeenu Viswambharan/*
64c877b414SJeenu Viswambharan * Acquire lock using load-/store-exclusive instruction pair.
65c877b414SJeenu Viswambharan *
66c877b414SJeenu Viswambharan * void spin_lock(spinlock_t *lock);
67c877b414SJeenu Viswambharan */
6812ab697eSSoby Mathewfunc spin_lock
6912ab697eSSoby Mathew	mov	w2, #1
7012ab697eSSoby Mathew	sevl
7112ab697eSSoby Mathewl1:	wfe
7212ab697eSSoby Mathewl2:	ldaxr	w1, [x0]
7312ab697eSSoby Mathew	cbnz	w1, l1
7412ab697eSSoby Mathew	stxr	w1, w2, [x0]
7512ab697eSSoby Mathew	cbnz	w1, l2
7612ab697eSSoby Mathew	ret
7712ab697eSSoby Mathewendfunc spin_lock
7812ab697eSSoby Mathew
79c877b414SJeenu Viswambharan#endif /* USE_CAS */
8012ab697eSSoby Mathew
81c877b414SJeenu Viswambharan/*
82c877b414SJeenu Viswambharan * Release lock previously acquired by spin_lock.
83c877b414SJeenu Viswambharan *
84c877b414SJeenu Viswambharan * Unconditionally write 0, and conditionally generate an event.
85c877b414SJeenu Viswambharan *
86c877b414SJeenu Viswambharan * void spin_unlock(spinlock_t *lock);
87c877b414SJeenu Viswambharan */
8812ab697eSSoby Mathewfunc spin_unlock
8912ab697eSSoby Mathew	stlr	wzr, [x0]
90c877b414SJeenu Viswambharan	COND_SEV()
9112ab697eSSoby Mathew	ret
9212ab697eSSoby Mathewendfunc spin_unlock
93