xref: /rk3399_ARM-atf/lib/locks/exclusive/aarch64/spinlock.S (revision c97cba4ea44910df1f7b1af5dba79013fb44c383)
112ab697eSSoby Mathew/*
202a85c11SAlexei Fedorov * Copyright (c) 2013-2019, ARM Limited and Contributors. All rights reserved.
312ab697eSSoby Mathew *
482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause
512ab697eSSoby Mathew */
612ab697eSSoby Mathew
712ab697eSSoby Mathew#include <asm_macros.S>
812ab697eSSoby Mathew
912ab697eSSoby Mathew	.globl	spin_lock
1012ab697eSSoby Mathew	.globl	spin_unlock
1112ab697eSSoby Mathew
12*c97cba4eSSoby Mathew#if USE_SPINLOCK_CAS
13*c97cba4eSSoby Mathew#if !ARM_ARCH_AT_LEAST(8, 1)
14*c97cba4eSSoby Mathew#error USE_SPINLOCK_CAS option requires at least an ARMv8.1 platform
15*c97cba4eSSoby Mathew#endif
1612ab697eSSoby Mathew
17c877b414SJeenu Viswambharan/*
18c877b414SJeenu Viswambharan * When compiled for ARMv8.1 or later, choose spin locks based on Compare and
19c877b414SJeenu Viswambharan * Swap instruction.
20c877b414SJeenu Viswambharan */
21c877b414SJeenu Viswambharan
22c877b414SJeenu Viswambharan/*
23c877b414SJeenu Viswambharan * Acquire lock using Compare and Swap instruction.
24c877b414SJeenu Viswambharan *
25*c97cba4eSSoby Mathew * Compare for 0 with acquire semantics, and swap 1. If failed to acquire, use
26*c97cba4eSSoby Mathew * load exclusive semantics to monitor the address and enter WFE.
27c877b414SJeenu Viswambharan *
28c877b414SJeenu Viswambharan * void spin_lock(spinlock_t *lock);
29c877b414SJeenu Viswambharan */
30c877b414SJeenu Viswambharanfunc spin_lock
31c877b414SJeenu Viswambharan	mov	w2, #1
32*c97cba4eSSoby Mathew1:	mov	w1, wzr
33*c97cba4eSSoby Mathew2:	casa	w1, w2, [x0]
34*c97cba4eSSoby Mathew	cbz	w1, 3f
35*c97cba4eSSoby Mathew	ldxr	w1, [x0]
36*c97cba4eSSoby Mathew	cbz	w1, 2b
37c877b414SJeenu Viswambharan	wfe
38*c97cba4eSSoby Mathew	b	1b
39*c97cba4eSSoby Mathew3:
40c877b414SJeenu Viswambharan	ret
41c877b414SJeenu Viswambharanendfunc spin_lock
42c877b414SJeenu Viswambharan
43*c97cba4eSSoby Mathew#else /* !USE_SPINLOCK_CAS */
44c877b414SJeenu Viswambharan
45c877b414SJeenu Viswambharan/*
46c877b414SJeenu Viswambharan * Acquire lock using load-/store-exclusive instruction pair.
47c877b414SJeenu Viswambharan *
48c877b414SJeenu Viswambharan * void spin_lock(spinlock_t *lock);
49c877b414SJeenu Viswambharan */
5012ab697eSSoby Mathewfunc spin_lock
5112ab697eSSoby Mathew	mov	w2, #1
5212ab697eSSoby Mathew	sevl
5312ab697eSSoby Mathewl1:	wfe
5412ab697eSSoby Mathewl2:	ldaxr	w1, [x0]
5512ab697eSSoby Mathew	cbnz	w1, l1
5612ab697eSSoby Mathew	stxr	w1, w2, [x0]
5712ab697eSSoby Mathew	cbnz	w1, l2
5812ab697eSSoby Mathew	ret
5912ab697eSSoby Mathewendfunc spin_lock
6012ab697eSSoby Mathew
61*c97cba4eSSoby Mathew#endif /* USE_SPINLOCK_CAS */
6212ab697eSSoby Mathew
63c877b414SJeenu Viswambharan/*
64c877b414SJeenu Viswambharan * Release lock previously acquired by spin_lock.
65c877b414SJeenu Viswambharan *
66*c97cba4eSSoby Mathew * Use store-release to unconditionally clear the spinlock variable.
67*c97cba4eSSoby Mathew * Store operation generates an event to all cores waiting in WFE
68*c97cba4eSSoby Mathew * when address is monitored by the global monitor.
69c877b414SJeenu Viswambharan *
70c877b414SJeenu Viswambharan * void spin_unlock(spinlock_t *lock);
71c877b414SJeenu Viswambharan */
7212ab697eSSoby Mathewfunc spin_unlock
7312ab697eSSoby Mathew	stlr	wzr, [x0]
7412ab697eSSoby Mathew	ret
7512ab697eSSoby Mathewendfunc spin_unlock
76