1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */ 2*4882a593Smuzhiyun #ifndef __ASM_SH_ATOMIC_IRQ_H 3*4882a593Smuzhiyun #define __ASM_SH_ATOMIC_IRQ_H 4*4882a593Smuzhiyun 5*4882a593Smuzhiyun #include <linux/irqflags.h> 6*4882a593Smuzhiyun 7*4882a593Smuzhiyun /* 8*4882a593Smuzhiyun * To get proper branch prediction for the main line, we must branch 9*4882a593Smuzhiyun * forward to code at the end of this object's .text section, then 10*4882a593Smuzhiyun * branch back to restart the operation. 11*4882a593Smuzhiyun */ 12*4882a593Smuzhiyun 13*4882a593Smuzhiyun #define ATOMIC_OP(op, c_op) \ 14*4882a593Smuzhiyun static inline void atomic_##op(int i, atomic_t *v) \ 15*4882a593Smuzhiyun { \ 16*4882a593Smuzhiyun unsigned long flags; \ 17*4882a593Smuzhiyun \ 18*4882a593Smuzhiyun raw_local_irq_save(flags); \ 19*4882a593Smuzhiyun v->counter c_op i; \ 20*4882a593Smuzhiyun raw_local_irq_restore(flags); \ 21*4882a593Smuzhiyun } 22*4882a593Smuzhiyun 23*4882a593Smuzhiyun #define ATOMIC_OP_RETURN(op, c_op) \ 24*4882a593Smuzhiyun static inline int atomic_##op##_return(int i, atomic_t *v) \ 25*4882a593Smuzhiyun { \ 26*4882a593Smuzhiyun unsigned long temp, flags; \ 27*4882a593Smuzhiyun \ 28*4882a593Smuzhiyun raw_local_irq_save(flags); \ 29*4882a593Smuzhiyun temp = v->counter; \ 30*4882a593Smuzhiyun temp c_op i; \ 31*4882a593Smuzhiyun v->counter = temp; \ 32*4882a593Smuzhiyun raw_local_irq_restore(flags); \ 33*4882a593Smuzhiyun \ 34*4882a593Smuzhiyun return temp; \ 35*4882a593Smuzhiyun } 36*4882a593Smuzhiyun 37*4882a593Smuzhiyun #define ATOMIC_FETCH_OP(op, c_op) \ 38*4882a593Smuzhiyun static inline int atomic_fetch_##op(int i, atomic_t *v) \ 39*4882a593Smuzhiyun { \ 40*4882a593Smuzhiyun unsigned long temp, flags; \ 41*4882a593Smuzhiyun \ 42*4882a593Smuzhiyun raw_local_irq_save(flags); \ 43*4882a593Smuzhiyun temp = v->counter; \ 44*4882a593Smuzhiyun v->counter c_op i; \ 45*4882a593Smuzhiyun raw_local_irq_restore(flags); \ 46*4882a593Smuzhiyun \ 47*4882a593Smuzhiyun return temp; \ 48*4882a593Smuzhiyun } 49*4882a593Smuzhiyun 50*4882a593Smuzhiyun #define ATOMIC_OPS(op, c_op) \ 51*4882a593Smuzhiyun ATOMIC_OP(op, c_op) \ 52*4882a593Smuzhiyun ATOMIC_OP_RETURN(op, c_op) \ 53*4882a593Smuzhiyun ATOMIC_FETCH_OP(op, c_op) 54*4882a593Smuzhiyun 55*4882a593Smuzhiyun ATOMIC_OPS(add, +=) 56*4882a593Smuzhiyun ATOMIC_OPS(sub, -=) 57*4882a593Smuzhiyun 58*4882a593Smuzhiyun #undef ATOMIC_OPS 59*4882a593Smuzhiyun #define ATOMIC_OPS(op, c_op) \ 60*4882a593Smuzhiyun ATOMIC_OP(op, c_op) \ 61*4882a593Smuzhiyun ATOMIC_FETCH_OP(op, c_op) 62*4882a593Smuzhiyun 63*4882a593Smuzhiyun ATOMIC_OPS(and, &=) 64*4882a593Smuzhiyun ATOMIC_OPS(or, |=) 65*4882a593Smuzhiyun ATOMIC_OPS(xor, ^=) 66*4882a593Smuzhiyun 67*4882a593Smuzhiyun #undef ATOMIC_OPS 68*4882a593Smuzhiyun #undef ATOMIC_FETCH_OP 69*4882a593Smuzhiyun #undef ATOMIC_OP_RETURN 70*4882a593Smuzhiyun #undef ATOMIC_OP 71*4882a593Smuzhiyun 72*4882a593Smuzhiyun #endif /* __ASM_SH_ATOMIC_IRQ_H */ 73