1/* 2 * Copyright (c) 2016-2025, Arm Limited and Contributors. All rights reserved. 3 * 4 * SPDX-License-Identifier: BSD-3-Clause 5 */ 6 7#include <asm_macros.S> 8 9 .globl spin_lock 10 .globl spin_trylock 11 .globl spin_unlock 12 13#if ARM_ARCH_AT_LEAST(8, 0) 14/* 15 * According to the ARMv8-A Architecture Reference Manual, "when the global 16 * monitor for a PE changes from Exclusive Access state to Open Access state, 17 * an event is generated.". This applies to both AArch32 and AArch64 modes of 18 * ARMv8-A. As a result, no explicit SEV with unlock is required. 19 */ 20#define COND_SEV() 21#else 22#define COND_SEV() sev 23#endif 24 25/* 26 * Function: spin_lock 27 * ------------------- 28 * Acquires a spinlock by continuously attempting to set it. 29 * Will block (spin) until the lock is successfully acquired. 30 * 31 * Arguments: 32 * r0 - Pointer to the spinlock variable (uint32_t *lock) 33 * 34 * Return: 35 * None 36 * 37 * Description: 38 * Blocks until the lock is acquired using LDREX/STREX with WFE for wait. 39 */ 40func spin_lock 41 mov r2, #1 421: 43 ldrex r1, [r0] 44 cmp r1, #0 45 wfene 46 strexeq r1, r2, [r0] 47 cmpeq r1, #0 48 bne 1b 49 dmb 50 bx lr 51endfunc spin_lock 52 53/* 54 * Function: spin_trylock 55 * ---------------------- 56 * Attempts to acquire the spinlock once without blocking. 57 * 58 * Arguments: 59 * r0 - Pointer to the spinlock variable (uint32_t *lock) 60 * 61 * Return: 62 * r0 - 1 if lock was successfully acquired 63 * 0 if lock was already held 64 * 65 * Description: 66 * Tries once to acquire the lock using LDREX/STREX. 67 */ 68func spin_trylock 69 mov r2, #1 70 ldrex r1, [r0] 71 cmp r1, #0 72 strexeq r1, r2, [r0] 73 cmpeq r1, #0 74 dmb 75 moveq r0, #1 76 movne r0, #0 77 bx lr 78endfunc spin_trylock 79 80/* 81 * Function: spin_unlock 82 * --------------------- 83 * Releases the spinlock by clearing its value. 84 * 85 * Arguments: 86 * r0 - Pointer to the spinlock variable (uint32_t *lock) 87 * 88 * Return: 89 * None 90 * 91 * Description: 92 * Releases the lock using store-release and sends SEV. 93 */ 94func spin_unlock 95 mov r1, #0 96 stl r1, [r0] 97 COND_SEV() 98 bx lr 99endfunc spin_unlock 100