xref: /OK3568_Linux_fs/kernel/arch/arm/lib/bitops.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun #include <asm/assembler.h>
3*4882a593Smuzhiyun #include <asm/unwind.h>
4*4882a593Smuzhiyun 
5*4882a593Smuzhiyun #if __LINUX_ARM_ARCH__ >= 6
6*4882a593Smuzhiyun 	.macro	bitop, name, instr
7*4882a593Smuzhiyun ENTRY(	\name		)
8*4882a593Smuzhiyun UNWIND(	.fnstart	)
9*4882a593Smuzhiyun 	ands	ip, r1, #3
10*4882a593Smuzhiyun 	strbne	r1, [ip]		@ assert word-aligned
11*4882a593Smuzhiyun 	mov	r2, #1
12*4882a593Smuzhiyun 	and	r3, r0, #31		@ Get bit offset
13*4882a593Smuzhiyun 	mov	r0, r0, lsr #5
14*4882a593Smuzhiyun 	add	r1, r1, r0, lsl #2	@ Get word offset
15*4882a593Smuzhiyun #if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP)
16*4882a593Smuzhiyun 	.arch_extension	mp
17*4882a593Smuzhiyun 	ALT_SMP(W(pldw)	[r1])
18*4882a593Smuzhiyun 	ALT_UP(W(nop))
19*4882a593Smuzhiyun #endif
20*4882a593Smuzhiyun 	mov	r3, r2, lsl r3
21*4882a593Smuzhiyun 1:	ldrex	r2, [r1]
22*4882a593Smuzhiyun 	\instr	r2, r2, r3
23*4882a593Smuzhiyun 	strex	r0, r2, [r1]
24*4882a593Smuzhiyun 	cmp	r0, #0
25*4882a593Smuzhiyun 	bne	1b
26*4882a593Smuzhiyun 	bx	lr
27*4882a593Smuzhiyun UNWIND(	.fnend		)
28*4882a593Smuzhiyun ENDPROC(\name		)
29*4882a593Smuzhiyun 	.endm
30*4882a593Smuzhiyun 
31*4882a593Smuzhiyun 	.macro	testop, name, instr, store
32*4882a593Smuzhiyun ENTRY(	\name		)
33*4882a593Smuzhiyun UNWIND(	.fnstart	)
34*4882a593Smuzhiyun 	ands	ip, r1, #3
35*4882a593Smuzhiyun 	strbne	r1, [ip]		@ assert word-aligned
36*4882a593Smuzhiyun 	mov	r2, #1
37*4882a593Smuzhiyun 	and	r3, r0, #31		@ Get bit offset
38*4882a593Smuzhiyun 	mov	r0, r0, lsr #5
39*4882a593Smuzhiyun 	add	r1, r1, r0, lsl #2	@ Get word offset
40*4882a593Smuzhiyun 	mov	r3, r2, lsl r3		@ create mask
41*4882a593Smuzhiyun 	smp_dmb
42*4882a593Smuzhiyun #if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP)
43*4882a593Smuzhiyun 	.arch_extension	mp
44*4882a593Smuzhiyun 	ALT_SMP(W(pldw)	[r1])
45*4882a593Smuzhiyun 	ALT_UP(W(nop))
46*4882a593Smuzhiyun #endif
47*4882a593Smuzhiyun 1:	ldrex	r2, [r1]
48*4882a593Smuzhiyun 	ands	r0, r2, r3		@ save old value of bit
49*4882a593Smuzhiyun 	\instr	r2, r2, r3		@ toggle bit
50*4882a593Smuzhiyun 	strex	ip, r2, [r1]
51*4882a593Smuzhiyun 	cmp	ip, #0
52*4882a593Smuzhiyun 	bne	1b
53*4882a593Smuzhiyun 	smp_dmb
54*4882a593Smuzhiyun 	cmp	r0, #0
55*4882a593Smuzhiyun 	movne	r0, #1
56*4882a593Smuzhiyun 2:	bx	lr
57*4882a593Smuzhiyun UNWIND(	.fnend		)
58*4882a593Smuzhiyun ENDPROC(\name		)
59*4882a593Smuzhiyun 	.endm
60*4882a593Smuzhiyun #else
61*4882a593Smuzhiyun 	.macro	bitop, name, instr
62*4882a593Smuzhiyun ENTRY(	\name		)
63*4882a593Smuzhiyun UNWIND(	.fnstart	)
64*4882a593Smuzhiyun 	ands	ip, r1, #3
65*4882a593Smuzhiyun 	strbne	r1, [ip]		@ assert word-aligned
66*4882a593Smuzhiyun 	and	r2, r0, #31
67*4882a593Smuzhiyun 	mov	r0, r0, lsr #5
68*4882a593Smuzhiyun 	mov	r3, #1
69*4882a593Smuzhiyun 	mov	r3, r3, lsl r2
70*4882a593Smuzhiyun 	save_and_disable_irqs ip
71*4882a593Smuzhiyun 	ldr	r2, [r1, r0, lsl #2]
72*4882a593Smuzhiyun 	\instr	r2, r2, r3
73*4882a593Smuzhiyun 	str	r2, [r1, r0, lsl #2]
74*4882a593Smuzhiyun 	restore_irqs ip
75*4882a593Smuzhiyun 	ret	lr
76*4882a593Smuzhiyun UNWIND(	.fnend		)
77*4882a593Smuzhiyun ENDPROC(\name		)
78*4882a593Smuzhiyun 	.endm
79*4882a593Smuzhiyun 
80*4882a593Smuzhiyun /**
81*4882a593Smuzhiyun  * testop - implement a test_and_xxx_bit operation.
82*4882a593Smuzhiyun  * @instr: operational instruction
83*4882a593Smuzhiyun  * @store: store instruction
84*4882a593Smuzhiyun  *
85*4882a593Smuzhiyun  * Note: we can trivially conditionalise the store instruction
86*4882a593Smuzhiyun  * to avoid dirtying the data cache.
87*4882a593Smuzhiyun  */
88*4882a593Smuzhiyun 	.macro	testop, name, instr, store
89*4882a593Smuzhiyun ENTRY(	\name		)
90*4882a593Smuzhiyun UNWIND(	.fnstart	)
91*4882a593Smuzhiyun 	ands	ip, r1, #3
92*4882a593Smuzhiyun 	strbne	r1, [ip]		@ assert word-aligned
93*4882a593Smuzhiyun 	and	r3, r0, #31
94*4882a593Smuzhiyun 	mov	r0, r0, lsr #5
95*4882a593Smuzhiyun 	save_and_disable_irqs ip
96*4882a593Smuzhiyun 	ldr	r2, [r1, r0, lsl #2]!
97*4882a593Smuzhiyun 	mov	r0, #1
98*4882a593Smuzhiyun 	tst	r2, r0, lsl r3
99*4882a593Smuzhiyun 	\instr	r2, r2, r0, lsl r3
100*4882a593Smuzhiyun 	\store	r2, [r1]
101*4882a593Smuzhiyun 	moveq	r0, #0
102*4882a593Smuzhiyun 	restore_irqs ip
103*4882a593Smuzhiyun 	ret	lr
104*4882a593Smuzhiyun UNWIND(	.fnend		)
105*4882a593Smuzhiyun ENDPROC(\name		)
106*4882a593Smuzhiyun 	.endm
107*4882a593Smuzhiyun #endif
108