1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */ 2*4882a593Smuzhiyun #ifndef __ASM_SH_ATOMIC_GRB_H 3*4882a593Smuzhiyun #define __ASM_SH_ATOMIC_GRB_H 4*4882a593Smuzhiyun 5*4882a593Smuzhiyun #define ATOMIC_OP(op) \ 6*4882a593Smuzhiyun static inline void atomic_##op(int i, atomic_t *v) \ 7*4882a593Smuzhiyun { \ 8*4882a593Smuzhiyun int tmp; \ 9*4882a593Smuzhiyun \ 10*4882a593Smuzhiyun __asm__ __volatile__ ( \ 11*4882a593Smuzhiyun " .align 2 \n\t" \ 12*4882a593Smuzhiyun " mova 1f, r0 \n\t" /* r0 = end point */ \ 13*4882a593Smuzhiyun " mov r15, r1 \n\t" /* r1 = saved sp */ \ 14*4882a593Smuzhiyun " mov #-6, r15 \n\t" /* LOGIN: r15 = size */ \ 15*4882a593Smuzhiyun " mov.l @%1, %0 \n\t" /* load old value */ \ 16*4882a593Smuzhiyun " " #op " %2, %0 \n\t" /* $op */ \ 17*4882a593Smuzhiyun " mov.l %0, @%1 \n\t" /* store new value */ \ 18*4882a593Smuzhiyun "1: mov r1, r15 \n\t" /* LOGOUT */ \ 19*4882a593Smuzhiyun : "=&r" (tmp), \ 20*4882a593Smuzhiyun "+r" (v) \ 21*4882a593Smuzhiyun : "r" (i) \ 22*4882a593Smuzhiyun : "memory" , "r0", "r1"); \ 23*4882a593Smuzhiyun } \ 24*4882a593Smuzhiyun 25*4882a593Smuzhiyun #define ATOMIC_OP_RETURN(op) \ 26*4882a593Smuzhiyun static inline int atomic_##op##_return(int i, atomic_t *v) \ 27*4882a593Smuzhiyun { \ 28*4882a593Smuzhiyun int tmp; \ 29*4882a593Smuzhiyun \ 30*4882a593Smuzhiyun __asm__ __volatile__ ( \ 31*4882a593Smuzhiyun " .align 2 \n\t" \ 32*4882a593Smuzhiyun " mova 1f, r0 \n\t" /* r0 = end point */ \ 33*4882a593Smuzhiyun " mov r15, r1 \n\t" /* r1 = saved sp */ \ 34*4882a593Smuzhiyun " mov #-6, r15 \n\t" /* LOGIN: r15 = size */ \ 35*4882a593Smuzhiyun " mov.l @%1, %0 \n\t" /* load old value */ \ 36*4882a593Smuzhiyun " " #op " %2, %0 \n\t" /* $op */ \ 37*4882a593Smuzhiyun " mov.l %0, @%1 \n\t" /* store new value */ \ 38*4882a593Smuzhiyun "1: mov r1, r15 \n\t" /* LOGOUT */ \ 39*4882a593Smuzhiyun : "=&r" (tmp), \ 40*4882a593Smuzhiyun "+r" (v) \ 41*4882a593Smuzhiyun : "r" (i) \ 42*4882a593Smuzhiyun : "memory" , "r0", "r1"); \ 43*4882a593Smuzhiyun \ 44*4882a593Smuzhiyun return tmp; \ 45*4882a593Smuzhiyun } 46*4882a593Smuzhiyun 47*4882a593Smuzhiyun #define ATOMIC_FETCH_OP(op) \ 48*4882a593Smuzhiyun static inline int atomic_fetch_##op(int i, atomic_t *v) \ 49*4882a593Smuzhiyun { \ 50*4882a593Smuzhiyun int res, tmp; \ 51*4882a593Smuzhiyun \ 52*4882a593Smuzhiyun __asm__ __volatile__ ( \ 53*4882a593Smuzhiyun " .align 2 \n\t" \ 54*4882a593Smuzhiyun " mova 1f, r0 \n\t" /* r0 = end point */ \ 55*4882a593Smuzhiyun " mov r15, r1 \n\t" /* r1 = saved sp */ \ 56*4882a593Smuzhiyun " mov #-6, r15 \n\t" /* LOGIN: r15 = size */ \ 57*4882a593Smuzhiyun " mov.l @%2, %0 \n\t" /* load old value */ \ 58*4882a593Smuzhiyun " mov %0, %1 \n\t" /* save old value */ \ 59*4882a593Smuzhiyun " " #op " %3, %0 \n\t" /* $op */ \ 60*4882a593Smuzhiyun " mov.l %0, @%2 \n\t" /* store new value */ \ 61*4882a593Smuzhiyun "1: mov r1, r15 \n\t" /* LOGOUT */ \ 62*4882a593Smuzhiyun : "=&r" (tmp), "=&r" (res), "+r" (v) \ 63*4882a593Smuzhiyun : "r" (i) \ 64*4882a593Smuzhiyun : "memory" , "r0", "r1"); \ 65*4882a593Smuzhiyun \ 66*4882a593Smuzhiyun return res; \ 67*4882a593Smuzhiyun } 68*4882a593Smuzhiyun 69*4882a593Smuzhiyun #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op) 70*4882a593Smuzhiyun 71*4882a593Smuzhiyun ATOMIC_OPS(add) 72*4882a593Smuzhiyun ATOMIC_OPS(sub) 73*4882a593Smuzhiyun 74*4882a593Smuzhiyun #undef ATOMIC_OPS 75*4882a593Smuzhiyun #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op) 76*4882a593Smuzhiyun 77*4882a593Smuzhiyun ATOMIC_OPS(and) 78*4882a593Smuzhiyun ATOMIC_OPS(or) 79*4882a593Smuzhiyun ATOMIC_OPS(xor) 80*4882a593Smuzhiyun 81*4882a593Smuzhiyun #undef ATOMIC_OPS 82*4882a593Smuzhiyun #undef ATOMIC_FETCH_OP 83*4882a593Smuzhiyun #undef ATOMIC_OP_RETURN 84*4882a593Smuzhiyun #undef ATOMIC_OP 85*4882a593Smuzhiyun 86*4882a593Smuzhiyun #endif /* __ASM_SH_ATOMIC_GRB_H */ 87