xref: /rk3399_ARM-atf/lib/locks/exclusive/aarch64/spinlock.S (revision 82cb2c1ad9897473743f08437d0a3995bed561b9)
112ab697eSSoby Mathew/*
2c877b414SJeenu Viswambharan * Copyright (c) 2013-2017, ARM Limited and Contributors. All rights reserved.
312ab697eSSoby Mathew *
4*82cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause
512ab697eSSoby Mathew */
612ab697eSSoby Mathew
712ab697eSSoby Mathew#include <asm_macros.S>
812ab697eSSoby Mathew
912ab697eSSoby Mathew	.globl	spin_lock
1012ab697eSSoby Mathew	.globl	spin_unlock
1112ab697eSSoby Mathew
12c877b414SJeenu Viswambharan#if (ARM_ARCH_MAJOR > 8) || ((ARM_ARCH_MAJOR == 8) && (ARM_ARCH_MINOR >= 1))
1312ab697eSSoby Mathew
14c877b414SJeenu Viswambharan/*
15c877b414SJeenu Viswambharan * When compiled for ARMv8.1 or later, choose spin locks based on Compare and
16c877b414SJeenu Viswambharan * Swap instruction.
17c877b414SJeenu Viswambharan */
18c877b414SJeenu Viswambharan# define USE_CAS	1
19c877b414SJeenu Viswambharan
20c877b414SJeenu Viswambharan/*
21c877b414SJeenu Viswambharan * Lock contenders using CAS, upon failing to acquire the lock, wait with the
22c877b414SJeenu Viswambharan * monitor in open state. Therefore, a normal store upon unlocking won't
23c877b414SJeenu Viswambharan * generate an SEV. Use explicit SEV instruction with CAS unlock.
24c877b414SJeenu Viswambharan */
25c877b414SJeenu Viswambharan# define COND_SEV()	sev
26c877b414SJeenu Viswambharan
27c877b414SJeenu Viswambharan#else
28c877b414SJeenu Viswambharan
29c877b414SJeenu Viswambharan# define USE_CAS	0
30c877b414SJeenu Viswambharan
31c877b414SJeenu Viswambharan/*
32c877b414SJeenu Viswambharan * Lock contenders using exclusive pairs, upon failing to acquire the lock, wait
33c877b414SJeenu Viswambharan * with the monitor in exclusive state. A normal store upon unlocking will
34c877b414SJeenu Viswambharan * implicitly generate an envent; so, no explicit SEV with unlock is required.
35c877b414SJeenu Viswambharan */
36c877b414SJeenu Viswambharan# define COND_SEV()
37c877b414SJeenu Viswambharan
38c877b414SJeenu Viswambharan#endif
39c877b414SJeenu Viswambharan
40c877b414SJeenu Viswambharan#if USE_CAS
41c877b414SJeenu Viswambharan
42c877b414SJeenu Viswambharan	.arch	armv8.1-a
43c877b414SJeenu Viswambharan
44c877b414SJeenu Viswambharan/*
45c877b414SJeenu Viswambharan * Acquire lock using Compare and Swap instruction.
46c877b414SJeenu Viswambharan *
47c877b414SJeenu Viswambharan * Compare for 0 with acquire semantics, and swap 1. Wait until CAS returns
48c877b414SJeenu Viswambharan * 0.
49c877b414SJeenu Viswambharan *
50c877b414SJeenu Viswambharan * void spin_lock(spinlock_t *lock);
51c877b414SJeenu Viswambharan */
52c877b414SJeenu Viswambharanfunc spin_lock
53c877b414SJeenu Viswambharan	mov	w2, #1
54c877b414SJeenu Viswambharan	sevl
55c877b414SJeenu Viswambharan1:
56c877b414SJeenu Viswambharan	wfe
57c877b414SJeenu Viswambharan	mov	w1, wzr
58c877b414SJeenu Viswambharan	casa	w1, w2, [x0]
59c877b414SJeenu Viswambharan	cbnz	w1, 1b
60c877b414SJeenu Viswambharan	ret
61c877b414SJeenu Viswambharanendfunc spin_lock
62c877b414SJeenu Viswambharan
63c877b414SJeenu Viswambharan	.arch	armv8-a
64c877b414SJeenu Viswambharan
65c877b414SJeenu Viswambharan#else /* !USE_CAS */
66c877b414SJeenu Viswambharan
67c877b414SJeenu Viswambharan/*
68c877b414SJeenu Viswambharan * Acquire lock using load-/store-exclusive instruction pair.
69c877b414SJeenu Viswambharan *
70c877b414SJeenu Viswambharan * void spin_lock(spinlock_t *lock);
71c877b414SJeenu Viswambharan */
7212ab697eSSoby Mathewfunc spin_lock
7312ab697eSSoby Mathew	mov	w2, #1
7412ab697eSSoby Mathew	sevl
7512ab697eSSoby Mathewl1:	wfe
7612ab697eSSoby Mathewl2:	ldaxr	w1, [x0]
7712ab697eSSoby Mathew	cbnz	w1, l1
7812ab697eSSoby Mathew	stxr	w1, w2, [x0]
7912ab697eSSoby Mathew	cbnz	w1, l2
8012ab697eSSoby Mathew	ret
8112ab697eSSoby Mathewendfunc spin_lock
8212ab697eSSoby Mathew
83c877b414SJeenu Viswambharan#endif /* USE_CAS */
8412ab697eSSoby Mathew
85c877b414SJeenu Viswambharan/*
86c877b414SJeenu Viswambharan * Release lock previously acquired by spin_lock.
87c877b414SJeenu Viswambharan *
88c877b414SJeenu Viswambharan * Unconditionally write 0, and conditionally generate an event.
89c877b414SJeenu Viswambharan *
90c877b414SJeenu Viswambharan * void spin_unlock(spinlock_t *lock);
91c877b414SJeenu Viswambharan */
9212ab697eSSoby Mathewfunc spin_unlock
9312ab697eSSoby Mathew	stlr	wzr, [x0]
94c877b414SJeenu Viswambharan	COND_SEV()
9512ab697eSSoby Mathew	ret
9612ab697eSSoby Mathewendfunc spin_unlock
97