xref: /rk3399_ARM-atf/lib/locks/exclusive/aarch64/spinlock.S (revision bf719f66a7f2261b69b397072cec5ad99c573891)
1/*
2 * Copyright (c) 2013-2019, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <asm_macros.S>
8
9	.globl	spin_lock
10	.globl	spin_unlock
11
12#if ARM_ARCH_AT_LEAST(8, 1)
13
14/*
15 * When compiled for ARMv8.1 or later, choose spin locks based on Compare and
16 * Swap instruction.
17 */
18# define USE_CAS	1
19
20/*
21 * Lock contenders using CAS, upon failing to acquire the lock, wait with the
22 * monitor in open state. Therefore, a normal store upon unlocking won't
23 * generate an SEV. Use explicit SEV instruction with CAS unlock.
24 */
25# define COND_SEV()	sev
26
27#else
28
29# define USE_CAS	0
30
31/*
32 * Lock contenders using exclusive pairs, upon failing to acquire the lock, wait
33 * with the monitor in exclusive state. A normal store upon unlocking will
34 * implicitly generate an envent; so, no explicit SEV with unlock is required.
35 */
36# define COND_SEV()
37
38#endif
39
40#if USE_CAS
41
42/*
43 * Acquire lock using Compare and Swap instruction.
44 *
45 * Compare for 0 with acquire semantics, and swap 1. Wait until CAS returns
46 * 0.
47 *
48 * void spin_lock(spinlock_t *lock);
49 */
50func spin_lock
51	mov	w2, #1
52	sevl
531:
54	wfe
55	mov	w1, wzr
56	casa	w1, w2, [x0]
57	cbnz	w1, 1b
58	ret
59endfunc spin_lock
60
61#else /* !USE_CAS */
62
63/*
64 * Acquire lock using load-/store-exclusive instruction pair.
65 *
66 * void spin_lock(spinlock_t *lock);
67 */
68func spin_lock
69	mov	w2, #1
70	sevl
71l1:	wfe
72l2:	ldaxr	w1, [x0]
73	cbnz	w1, l1
74	stxr	w1, w2, [x0]
75	cbnz	w1, l2
76	ret
77endfunc spin_lock
78
79#endif /* USE_CAS */
80
81/*
82 * Release lock previously acquired by spin_lock.
83 *
84 * Unconditionally write 0, and conditionally generate an event.
85 *
86 * void spin_unlock(spinlock_t *lock);
87 */
88func spin_unlock
89	stlr	wzr, [x0]
90	COND_SEV()
91	ret
92endfunc spin_unlock
93