xref: /rk3399_ARM-atf/lib/locks/exclusive/aarch64/spinlock.S (revision a10d3632acbd1135648f07c2a998cba8c5c77cfd)
1/*
2 * Copyright (c) 2013-2017, ARM Limited and Contributors. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are met:
6 *
7 * Redistributions of source code must retain the above copyright notice, this
8 * list of conditions and the following disclaimer.
9 *
10 * Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 *
14 * Neither the name of ARM nor the names of its contributors may be used
15 * to endorse or promote products derived from this software without specific
16 * prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
22 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28 * POSSIBILITY OF SUCH DAMAGE.
29 */
30
31#include <asm_macros.S>
32
33	.globl	spin_lock
34	.globl	spin_unlock
35
36#if (ARM_ARCH_MAJOR > 8) || ((ARM_ARCH_MAJOR == 8) && (ARM_ARCH_MINOR >= 1))
37
38/*
39 * When compiled for ARMv8.1 or later, choose spin locks based on Compare and
40 * Swap instruction.
41 */
42# define USE_CAS	1
43
44/*
45 * Lock contenders using CAS, upon failing to acquire the lock, wait with the
46 * monitor in open state. Therefore, a normal store upon unlocking won't
47 * generate an SEV. Use explicit SEV instruction with CAS unlock.
48 */
49# define COND_SEV()	sev
50
51#else
52
53# define USE_CAS	0
54
55/*
56 * Lock contenders using exclusive pairs, upon failing to acquire the lock, wait
57 * with the monitor in exclusive state. A normal store upon unlocking will
58 * implicitly generate an envent; so, no explicit SEV with unlock is required.
59 */
60# define COND_SEV()
61
62#endif
63
64#if USE_CAS
65
66	.arch	armv8.1-a
67
68/*
69 * Acquire lock using Compare and Swap instruction.
70 *
71 * Compare for 0 with acquire semantics, and swap 1. Wait until CAS returns
72 * 0.
73 *
74 * void spin_lock(spinlock_t *lock);
75 */
76func spin_lock
77	mov	w2, #1
78	sevl
791:
80	wfe
81	mov	w1, wzr
82	casa	w1, w2, [x0]
83	cbnz	w1, 1b
84	ret
85endfunc spin_lock
86
87	.arch	armv8-a
88
89#else /* !USE_CAS */
90
91/*
92 * Acquire lock using load-/store-exclusive instruction pair.
93 *
94 * void spin_lock(spinlock_t *lock);
95 */
96func spin_lock
97	mov	w2, #1
98	sevl
99l1:	wfe
100l2:	ldaxr	w1, [x0]
101	cbnz	w1, l1
102	stxr	w1, w2, [x0]
103	cbnz	w1, l2
104	ret
105endfunc spin_lock
106
107#endif /* USE_CAS */
108
109/*
110 * Release lock previously acquired by spin_lock.
111 *
112 * Unconditionally write 0, and conditionally generate an event.
113 *
114 * void spin_unlock(spinlock_t *lock);
115 */
116func spin_unlock
117	stlr	wzr, [x0]
118	COND_SEV()
119	ret
120endfunc spin_unlock
121