xref: /rk3399_ARM-atf/lib/locks/exclusive/aarch32/spinlock.S (revision 0147bef523e27e26c0240fef4b47deca6720566c)
1e33b78a6SSoby Mathew/*
2e33b78a6SSoby Mathew * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
3e33b78a6SSoby Mathew *
482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause
5e33b78a6SSoby Mathew */
6e33b78a6SSoby Mathew
7e33b78a6SSoby Mathew#include <asm_macros.S>
8e33b78a6SSoby Mathew
9e33b78a6SSoby Mathew	.globl	spin_lock
10e33b78a6SSoby Mathew	.globl	spin_unlock
11e33b78a6SSoby Mathew
12*0147bef5SEtienne Carriere#if ARM_ARCH_AT_LEAST(8, 0)
13*0147bef5SEtienne Carriere/*
14*0147bef5SEtienne Carriere * According to the ARMv8-A Architecture Reference Manual, "when the global
15*0147bef5SEtienne Carriere * monitor for a PE changes from Exclusive Access state to Open Access state,
16*0147bef5SEtienne Carriere * an event is generated.". This applies to both AArch32 and AArch64 modes of
17*0147bef5SEtienne Carriere * ARMv8-A. As a result, no explicit SEV with unlock is required.
18*0147bef5SEtienne Carriere */
19*0147bef5SEtienne Carriere#define COND_SEV()
20*0147bef5SEtienne Carriere#else
21*0147bef5SEtienne Carriere#define COND_SEV()	sev
22*0147bef5SEtienne Carriere#endif
23e33b78a6SSoby Mathew
24e33b78a6SSoby Mathewfunc spin_lock
25e33b78a6SSoby Mathew	mov	r2, #1
26e33b78a6SSoby Mathew1:
27e33b78a6SSoby Mathew	ldrex	r1, [r0]
28e33b78a6SSoby Mathew	cmp	r1, #0
29e33b78a6SSoby Mathew	wfene
30e33b78a6SSoby Mathew	strexeq	r1, r2, [r0]
31e33b78a6SSoby Mathew	cmpeq	r1, #0
32e33b78a6SSoby Mathew	bne	1b
33e33b78a6SSoby Mathew	dmb
34e33b78a6SSoby Mathew	bx	lr
35e33b78a6SSoby Mathewendfunc spin_lock
36e33b78a6SSoby Mathew
37e33b78a6SSoby Mathew
38e33b78a6SSoby Mathewfunc spin_unlock
39e33b78a6SSoby Mathew	mov	r1, #0
40e33b78a6SSoby Mathew	stl	r1, [r0]
41*0147bef5SEtienne Carriere	COND_SEV()
42e33b78a6SSoby Mathew	bx	lr
43e33b78a6SSoby Mathewendfunc spin_unlock
44