Lines Matching refs:s64
1188 static __always_inline s64
1198 arch_atomic64_set_release(atomic64_t *v, s64 i) in arch_atomic64_set_release()
1212 static __always_inline s64
1213 arch_atomic64_add_return_acquire(s64 i, atomic64_t *v) in arch_atomic64_add_return_acquire()
1215 s64 ret = arch_atomic64_add_return_relaxed(i, v); in arch_atomic64_add_return_acquire()
1223 static __always_inline s64
1224 arch_atomic64_add_return_release(s64 i, atomic64_t *v) in arch_atomic64_add_return_release()
1233 static __always_inline s64
1234 arch_atomic64_add_return(s64 i, atomic64_t *v) in arch_atomic64_add_return()
1236 s64 ret; in arch_atomic64_add_return()
1254 static __always_inline s64
1255 arch_atomic64_fetch_add_acquire(s64 i, atomic64_t *v) in arch_atomic64_fetch_add_acquire()
1257 s64 ret = arch_atomic64_fetch_add_relaxed(i, v); in arch_atomic64_fetch_add_acquire()
1265 static __always_inline s64
1266 arch_atomic64_fetch_add_release(s64 i, atomic64_t *v) in arch_atomic64_fetch_add_release()
1275 static __always_inline s64
1276 arch_atomic64_fetch_add(s64 i, atomic64_t *v) in arch_atomic64_fetch_add()
1278 s64 ret; in arch_atomic64_fetch_add()
1296 static __always_inline s64
1297 arch_atomic64_sub_return_acquire(s64 i, atomic64_t *v) in arch_atomic64_sub_return_acquire()
1299 s64 ret = arch_atomic64_sub_return_relaxed(i, v); in arch_atomic64_sub_return_acquire()
1307 static __always_inline s64
1308 arch_atomic64_sub_return_release(s64 i, atomic64_t *v) in arch_atomic64_sub_return_release()
1317 static __always_inline s64
1318 arch_atomic64_sub_return(s64 i, atomic64_t *v) in arch_atomic64_sub_return()
1320 s64 ret; in arch_atomic64_sub_return()
1338 static __always_inline s64
1339 arch_atomic64_fetch_sub_acquire(s64 i, atomic64_t *v) in arch_atomic64_fetch_sub_acquire()
1341 s64 ret = arch_atomic64_fetch_sub_relaxed(i, v); in arch_atomic64_fetch_sub_acquire()
1349 static __always_inline s64
1350 arch_atomic64_fetch_sub_release(s64 i, atomic64_t *v) in arch_atomic64_fetch_sub_release()
1359 static __always_inline s64
1360 arch_atomic64_fetch_sub(s64 i, atomic64_t *v) in arch_atomic64_fetch_sub()
1362 s64 ret; in arch_atomic64_fetch_sub()
1390 static __always_inline s64
1399 static __always_inline s64
1408 static __always_inline s64
1417 static __always_inline s64
1428 static __always_inline s64
1431 s64 ret = arch_atomic64_inc_return_relaxed(v); in arch_atomic64_inc_return_acquire()
1439 static __always_inline s64
1449 static __always_inline s64
1452 s64 ret; in arch_atomic64_inc_return()
1471 static __always_inline s64
1480 static __always_inline s64
1489 static __always_inline s64
1498 static __always_inline s64
1509 static __always_inline s64
1512 s64 ret = arch_atomic64_fetch_inc_relaxed(v); in arch_atomic64_fetch_inc_acquire()
1520 static __always_inline s64
1530 static __always_inline s64
1533 s64 ret; in arch_atomic64_fetch_inc()
1561 static __always_inline s64
1570 static __always_inline s64
1579 static __always_inline s64
1588 static __always_inline s64
1599 static __always_inline s64
1602 s64 ret = arch_atomic64_dec_return_relaxed(v); in arch_atomic64_dec_return_acquire()
1610 static __always_inline s64
1620 static __always_inline s64
1623 s64 ret; in arch_atomic64_dec_return()
1642 static __always_inline s64
1651 static __always_inline s64
1660 static __always_inline s64
1669 static __always_inline s64
1680 static __always_inline s64
1683 s64 ret = arch_atomic64_fetch_dec_relaxed(v); in arch_atomic64_fetch_dec_acquire()
1691 static __always_inline s64
1701 static __always_inline s64
1704 s64 ret; in arch_atomic64_fetch_dec()
1722 static __always_inline s64
1723 arch_atomic64_fetch_and_acquire(s64 i, atomic64_t *v) in arch_atomic64_fetch_and_acquire()
1725 s64 ret = arch_atomic64_fetch_and_relaxed(i, v); in arch_atomic64_fetch_and_acquire()
1733 static __always_inline s64
1734 arch_atomic64_fetch_and_release(s64 i, atomic64_t *v) in arch_atomic64_fetch_and_release()
1743 static __always_inline s64
1744 arch_atomic64_fetch_and(s64 i, atomic64_t *v) in arch_atomic64_fetch_and()
1746 s64 ret; in arch_atomic64_fetch_and()
1759 arch_atomic64_andnot(s64 i, atomic64_t *v) in arch_atomic64_andnot()
1774 static __always_inline s64
1775 arch_atomic64_fetch_andnot(s64 i, atomic64_t *v) in arch_atomic64_fetch_andnot()
1783 static __always_inline s64
1784 arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) in arch_atomic64_fetch_andnot_acquire()
1792 static __always_inline s64
1793 arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v) in arch_atomic64_fetch_andnot_release()
1801 static __always_inline s64
1802 arch_atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v) in arch_atomic64_fetch_andnot_relaxed()
1812 static __always_inline s64
1813 arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) in arch_atomic64_fetch_andnot_acquire()
1815 s64 ret = arch_atomic64_fetch_andnot_relaxed(i, v); in arch_atomic64_fetch_andnot_acquire()
1823 static __always_inline s64
1824 arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v) in arch_atomic64_fetch_andnot_release()
1833 static __always_inline s64
1834 arch_atomic64_fetch_andnot(s64 i, atomic64_t *v) in arch_atomic64_fetch_andnot()
1836 s64 ret; in arch_atomic64_fetch_andnot()
1854 static __always_inline s64
1855 arch_atomic64_fetch_or_acquire(s64 i, atomic64_t *v) in arch_atomic64_fetch_or_acquire()
1857 s64 ret = arch_atomic64_fetch_or_relaxed(i, v); in arch_atomic64_fetch_or_acquire()
1865 static __always_inline s64
1866 arch_atomic64_fetch_or_release(s64 i, atomic64_t *v) in arch_atomic64_fetch_or_release()
1875 static __always_inline s64
1876 arch_atomic64_fetch_or(s64 i, atomic64_t *v) in arch_atomic64_fetch_or()
1878 s64 ret; in arch_atomic64_fetch_or()
1896 static __always_inline s64
1897 arch_atomic64_fetch_xor_acquire(s64 i, atomic64_t *v) in arch_atomic64_fetch_xor_acquire()
1899 s64 ret = arch_atomic64_fetch_xor_relaxed(i, v); in arch_atomic64_fetch_xor_acquire()
1907 static __always_inline s64
1908 arch_atomic64_fetch_xor_release(s64 i, atomic64_t *v) in arch_atomic64_fetch_xor_release()
1917 static __always_inline s64
1918 arch_atomic64_fetch_xor(s64 i, atomic64_t *v) in arch_atomic64_fetch_xor()
1920 s64 ret; in arch_atomic64_fetch_xor()
1938 static __always_inline s64
1939 arch_atomic64_xchg_acquire(atomic64_t *v, s64 i) in arch_atomic64_xchg_acquire()
1941 s64 ret = arch_atomic64_xchg_relaxed(v, i); in arch_atomic64_xchg_acquire()
1949 static __always_inline s64
1950 arch_atomic64_xchg_release(atomic64_t *v, s64 i) in arch_atomic64_xchg_release()
1959 static __always_inline s64
1960 arch_atomic64_xchg(atomic64_t *v, s64 i) in arch_atomic64_xchg()
1962 s64 ret; in arch_atomic64_xchg()
1980 static __always_inline s64
1981 arch_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new) in arch_atomic64_cmpxchg_acquire()
1983 s64 ret = arch_atomic64_cmpxchg_relaxed(v, old, new); in arch_atomic64_cmpxchg_acquire()
1991 static __always_inline s64
1992 arch_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new) in arch_atomic64_cmpxchg_release()
2001 static __always_inline s64
2002 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) in arch_atomic64_cmpxchg()
2004 s64 ret; in arch_atomic64_cmpxchg()
2024 arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) in arch_atomic64_try_cmpxchg()
2026 s64 r, o = *old; in arch_atomic64_try_cmpxchg()
2037 arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) in arch_atomic64_try_cmpxchg_acquire()
2039 s64 r, o = *old; in arch_atomic64_try_cmpxchg_acquire()
2050 arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) in arch_atomic64_try_cmpxchg_release()
2052 s64 r, o = *old; in arch_atomic64_try_cmpxchg_release()
2063 arch_atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new) in arch_atomic64_try_cmpxchg_relaxed()
2065 s64 r, o = *old; in arch_atomic64_try_cmpxchg_relaxed()
2078 arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) in arch_atomic64_try_cmpxchg_acquire()
2089 arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) in arch_atomic64_try_cmpxchg_release()
2099 arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) in arch_atomic64_try_cmpxchg()
2123 arch_atomic64_sub_and_test(s64 i, atomic64_t *v) in arch_atomic64_sub_and_test()
2175 arch_atomic64_add_negative(s64 i, atomic64_t *v) in arch_atomic64_add_negative()
2192 static __always_inline s64
2193 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) in arch_atomic64_fetch_add_unless()
2195 s64 c = arch_atomic64_read(v); in arch_atomic64_fetch_add_unless()
2218 arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u) in arch_atomic64_add_unless()
2245 s64 c = arch_atomic64_read(v); in arch_atomic64_inc_unless_negative()
2261 s64 c = arch_atomic64_read(v); in arch_atomic64_dec_unless_positive()
2274 static __always_inline s64
2277 s64 dec, c = arch_atomic64_read(v); in arch_atomic64_dec_if_positive()