1*38e580e6SBoyan Karatotev /*
2*38e580e6SBoyan Karatotev * Copyright (c) 2025, Arm Limited. All rights reserved.
3*38e580e6SBoyan Karatotev *
4*38e580e6SBoyan Karatotev * SPDX-License-Identifier: BSD-3-Clause
5*38e580e6SBoyan Karatotev */
6*38e580e6SBoyan Karatotev
7*38e580e6SBoyan Karatotev #include <lib/spinlock.h>
8*38e580e6SBoyan Karatotev
spin_lock(spinlock_t * lock)9*38e580e6SBoyan Karatotev void __attribute__((target("arm"))) spin_lock(spinlock_t *lock)
10*38e580e6SBoyan Karatotev {
11*38e580e6SBoyan Karatotev volatile uint32_t *dst = &(lock->lock);
12*38e580e6SBoyan Karatotev uint32_t src = 1;
13*38e580e6SBoyan Karatotev uint32_t tmp;
14*38e580e6SBoyan Karatotev
15*38e580e6SBoyan Karatotev __asm__ volatile (
16*38e580e6SBoyan Karatotev "1:\n"
17*38e580e6SBoyan Karatotev " ldrex %[tmp], [%[dst]]\n"
18*38e580e6SBoyan Karatotev " cmp %[tmp], #0\n"
19*38e580e6SBoyan Karatotev " wfene\n"
20*38e580e6SBoyan Karatotev " strexeq %[tmp], %[src], [%[dst]]\n"
21*38e580e6SBoyan Karatotev " cmpeq %[tmp], #0\n"
22*38e580e6SBoyan Karatotev " bne 1b\n"
23*38e580e6SBoyan Karatotev " dmb\n"
24*38e580e6SBoyan Karatotev : "+m" (*dst), [tmp] "=&r" (tmp), [src] "+r" (src)
25*38e580e6SBoyan Karatotev : [dst] "r" (dst));
26*38e580e6SBoyan Karatotev }
27*38e580e6SBoyan Karatotev
spin_unlock(spinlock_t * lock)28*38e580e6SBoyan Karatotev void __attribute__((target("arm"))) spin_unlock(spinlock_t *lock)
29*38e580e6SBoyan Karatotev {
30*38e580e6SBoyan Karatotev volatile uint32_t *dst = &(lock->lock);
31*38e580e6SBoyan Karatotev uint32_t val = 0;
32*38e580e6SBoyan Karatotev
33*38e580e6SBoyan Karatotev /*
34*38e580e6SBoyan Karatotev * According to the ARMv8-A Architecture Reference Manual, "when the
35*38e580e6SBoyan Karatotev * global monitor for a PE changes from Exclusive Access state to Open
36*38e580e6SBoyan Karatotev * Access state, an event is generated.". This applies to both AArch32
37*38e580e6SBoyan Karatotev * and AArch64 modes of ARMv8-A. As a result, no explicit SEV with
38*38e580e6SBoyan Karatotev * unlock is required.
39*38e580e6SBoyan Karatotev */
40*38e580e6SBoyan Karatotev __asm__ volatile (
41*38e580e6SBoyan Karatotev /* ARMv7 does not support stl instruction */
42*38e580e6SBoyan Karatotev #if ARM_ARCH_MAJOR == 7
43*38e580e6SBoyan Karatotev "dmb\n"
44*38e580e6SBoyan Karatotev "str %[val], [%[dst]]\n"
45*38e580e6SBoyan Karatotev "dsb\n"
46*38e580e6SBoyan Karatotev "sev\n"
47*38e580e6SBoyan Karatotev #else
48*38e580e6SBoyan Karatotev "stl %[val], [%[dst]]\n"
49*38e580e6SBoyan Karatotev #endif
50*38e580e6SBoyan Karatotev : "=m" (dst)
51*38e580e6SBoyan Karatotev : [val] "r" (val), [dst] "r" (dst));
52*38e580e6SBoyan Karatotev }
53*38e580e6SBoyan Karatotev
spin_trylock(spinlock_t * lock)54*38e580e6SBoyan Karatotev bool __attribute__((target("arm"))) spin_trylock(spinlock_t *lock)
55*38e580e6SBoyan Karatotev {
56*38e580e6SBoyan Karatotev volatile uint32_t *dst = &(lock->lock);
57*38e580e6SBoyan Karatotev uint32_t src = 1;
58*38e580e6SBoyan Karatotev uint32_t tmp;
59*38e580e6SBoyan Karatotev bool out;
60*38e580e6SBoyan Karatotev
61*38e580e6SBoyan Karatotev __asm__ volatile (
62*38e580e6SBoyan Karatotev "ldrex %[tmp], [%[dst]]\n"
63*38e580e6SBoyan Karatotev "cmp %[tmp], #0\n"
64*38e580e6SBoyan Karatotev "strexeq %[tmp], %[src], [%[dst]]\n"
65*38e580e6SBoyan Karatotev "cmpeq %[tmp], #0\n"
66*38e580e6SBoyan Karatotev "dmb\n"
67*38e580e6SBoyan Karatotev "moveq %[out], #1\n"
68*38e580e6SBoyan Karatotev "movne %[out], #0\n"
69*38e580e6SBoyan Karatotev : "+m" (*dst), [tmp] "=&r" (tmp), [out] "=r" (out)
70*38e580e6SBoyan Karatotev : [src] "r" (src), [dst] "r" (dst));
71*38e580e6SBoyan Karatotev
72*38e580e6SBoyan Karatotev return out;
73*38e580e6SBoyan Karatotev }
74