xref: /OK3568_Linux_fs/kernel/arch/arm/common/vlock.S (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun/* SPDX-License-Identifier: GPL-2.0-only */
2*4882a593Smuzhiyun/*
3*4882a593Smuzhiyun * vlock.S - simple voting lock implementation for ARM
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * Created by:	Dave Martin, 2012-08-16
6*4882a593Smuzhiyun * Copyright:	(C) 2012-2013  Linaro Limited
7*4882a593Smuzhiyun *
8*4882a593Smuzhiyun * This algorithm is described in more detail in
9*4882a593Smuzhiyun * Documentation/arm/vlocks.rst.
10*4882a593Smuzhiyun */
11*4882a593Smuzhiyun
12*4882a593Smuzhiyun#include <linux/linkage.h>
13*4882a593Smuzhiyun#include "vlock.h"
14*4882a593Smuzhiyun
15*4882a593Smuzhiyun/* Select different code if voting flags  can fit in a single word. */
16*4882a593Smuzhiyun#if VLOCK_VOTING_SIZE > 4
17*4882a593Smuzhiyun#define FEW(x...)
18*4882a593Smuzhiyun#define MANY(x...) x
19*4882a593Smuzhiyun#else
20*4882a593Smuzhiyun#define FEW(x...) x
21*4882a593Smuzhiyun#define MANY(x...)
22*4882a593Smuzhiyun#endif
23*4882a593Smuzhiyun
24*4882a593Smuzhiyun@ voting lock for first-man coordination
25*4882a593Smuzhiyun
26*4882a593Smuzhiyun.macro voting_begin rbase:req, rcpu:req, rscratch:req
27*4882a593Smuzhiyun	mov	\rscratch, #1
28*4882a593Smuzhiyun	strb	\rscratch, [\rbase, \rcpu]
29*4882a593Smuzhiyun	dmb
30*4882a593Smuzhiyun.endm
31*4882a593Smuzhiyun
32*4882a593Smuzhiyun.macro voting_end rbase:req, rcpu:req, rscratch:req
33*4882a593Smuzhiyun	dmb
34*4882a593Smuzhiyun	mov	\rscratch, #0
35*4882a593Smuzhiyun	strb	\rscratch, [\rbase, \rcpu]
36*4882a593Smuzhiyun	dsb	st
37*4882a593Smuzhiyun	sev
38*4882a593Smuzhiyun.endm
39*4882a593Smuzhiyun
40*4882a593Smuzhiyun/*
41*4882a593Smuzhiyun * The vlock structure must reside in Strongly-Ordered or Device memory.
42*4882a593Smuzhiyun * This implementation deliberately eliminates most of the barriers which
43*4882a593Smuzhiyun * would be required for other memory types, and assumes that independent
44*4882a593Smuzhiyun * writes to neighbouring locations within a cacheline do not interfere
45*4882a593Smuzhiyun * with one another.
46*4882a593Smuzhiyun */
47*4882a593Smuzhiyun
48*4882a593Smuzhiyun@ r0: lock structure base
49*4882a593Smuzhiyun@ r1: CPU ID (0-based index within cluster)
50*4882a593SmuzhiyunENTRY(vlock_trylock)
51*4882a593Smuzhiyun	add	r1, r1, #VLOCK_VOTING_OFFSET
52*4882a593Smuzhiyun
53*4882a593Smuzhiyun	voting_begin	r0, r1, r2
54*4882a593Smuzhiyun
55*4882a593Smuzhiyun	ldrb	r2, [r0, #VLOCK_OWNER_OFFSET]	@ check whether lock is held
56*4882a593Smuzhiyun	cmp	r2, #VLOCK_OWNER_NONE
57*4882a593Smuzhiyun	bne	trylock_fail			@ fail if so
58*4882a593Smuzhiyun
59*4882a593Smuzhiyun	@ Control dependency implies strb not observable before previous ldrb.
60*4882a593Smuzhiyun
61*4882a593Smuzhiyun	strb	r1, [r0, #VLOCK_OWNER_OFFSET]	@ submit my vote
62*4882a593Smuzhiyun
63*4882a593Smuzhiyun	voting_end	r0, r1, r2		@ implies DMB
64*4882a593Smuzhiyun
65*4882a593Smuzhiyun	@ Wait for the current round of voting to finish:
66*4882a593Smuzhiyun
67*4882a593Smuzhiyun MANY(	mov	r3, #VLOCK_VOTING_OFFSET			)
68*4882a593Smuzhiyun0:
69*4882a593Smuzhiyun MANY(	ldr	r2, [r0, r3]					)
70*4882a593Smuzhiyun FEW(	ldr	r2, [r0, #VLOCK_VOTING_OFFSET]			)
71*4882a593Smuzhiyun	cmp	r2, #0
72*4882a593Smuzhiyun	wfene
73*4882a593Smuzhiyun	bne	0b
74*4882a593Smuzhiyun MANY(	add	r3, r3, #4					)
75*4882a593Smuzhiyun MANY(	cmp	r3, #VLOCK_VOTING_OFFSET + VLOCK_VOTING_SIZE	)
76*4882a593Smuzhiyun MANY(	bne	0b						)
77*4882a593Smuzhiyun
78*4882a593Smuzhiyun	@ Check who won:
79*4882a593Smuzhiyun
80*4882a593Smuzhiyun	dmb
81*4882a593Smuzhiyun	ldrb	r2, [r0, #VLOCK_OWNER_OFFSET]
82*4882a593Smuzhiyun	eor	r0, r1, r2			@ zero if I won, else nonzero
83*4882a593Smuzhiyun	bx	lr
84*4882a593Smuzhiyun
85*4882a593Smuzhiyuntrylock_fail:
86*4882a593Smuzhiyun	voting_end	r0, r1, r2
87*4882a593Smuzhiyun	mov	r0, #1				@ nonzero indicates that I lost
88*4882a593Smuzhiyun	bx	lr
89*4882a593SmuzhiyunENDPROC(vlock_trylock)
90*4882a593Smuzhiyun
91*4882a593Smuzhiyun@ r0: lock structure base
92*4882a593SmuzhiyunENTRY(vlock_unlock)
93*4882a593Smuzhiyun	dmb
94*4882a593Smuzhiyun	mov	r1, #VLOCK_OWNER_NONE
95*4882a593Smuzhiyun	strb	r1, [r0, #VLOCK_OWNER_OFFSET]
96*4882a593Smuzhiyun	dsb	st
97*4882a593Smuzhiyun	sev
98*4882a593Smuzhiyun	bx	lr
99*4882a593SmuzhiyunENDPROC(vlock_unlock)
100