Lines Matching refs:s64

1258 static __always_inline s64
1268 arch_atomic64_set_release(atomic64_t *v, s64 i) in arch_atomic64_set_release()
1282 static __always_inline s64
1283 arch_atomic64_add_return_acquire(s64 i, atomic64_t *v) in arch_atomic64_add_return_acquire()
1285 s64 ret = arch_atomic64_add_return_relaxed(i, v); in arch_atomic64_add_return_acquire()
1293 static __always_inline s64
1294 arch_atomic64_add_return_release(s64 i, atomic64_t *v) in arch_atomic64_add_return_release()
1303 static __always_inline s64
1304 arch_atomic64_add_return(s64 i, atomic64_t *v) in arch_atomic64_add_return()
1306 s64 ret; in arch_atomic64_add_return()
1324 static __always_inline s64
1325 arch_atomic64_fetch_add_acquire(s64 i, atomic64_t *v) in arch_atomic64_fetch_add_acquire()
1327 s64 ret = arch_atomic64_fetch_add_relaxed(i, v); in arch_atomic64_fetch_add_acquire()
1335 static __always_inline s64
1336 arch_atomic64_fetch_add_release(s64 i, atomic64_t *v) in arch_atomic64_fetch_add_release()
1345 static __always_inline s64
1346 arch_atomic64_fetch_add(s64 i, atomic64_t *v) in arch_atomic64_fetch_add()
1348 s64 ret; in arch_atomic64_fetch_add()
1366 static __always_inline s64
1367 arch_atomic64_sub_return_acquire(s64 i, atomic64_t *v) in arch_atomic64_sub_return_acquire()
1369 s64 ret = arch_atomic64_sub_return_relaxed(i, v); in arch_atomic64_sub_return_acquire()
1377 static __always_inline s64
1378 arch_atomic64_sub_return_release(s64 i, atomic64_t *v) in arch_atomic64_sub_return_release()
1387 static __always_inline s64
1388 arch_atomic64_sub_return(s64 i, atomic64_t *v) in arch_atomic64_sub_return()
1390 s64 ret; in arch_atomic64_sub_return()
1408 static __always_inline s64
1409 arch_atomic64_fetch_sub_acquire(s64 i, atomic64_t *v) in arch_atomic64_fetch_sub_acquire()
1411 s64 ret = arch_atomic64_fetch_sub_relaxed(i, v); in arch_atomic64_fetch_sub_acquire()
1419 static __always_inline s64
1420 arch_atomic64_fetch_sub_release(s64 i, atomic64_t *v) in arch_atomic64_fetch_sub_release()
1429 static __always_inline s64
1430 arch_atomic64_fetch_sub(s64 i, atomic64_t *v) in arch_atomic64_fetch_sub()
1432 s64 ret; in arch_atomic64_fetch_sub()
1460 static __always_inline s64
1469 static __always_inline s64
1478 static __always_inline s64
1487 static __always_inline s64
1498 static __always_inline s64
1501 s64 ret = arch_atomic64_inc_return_relaxed(v); in arch_atomic64_inc_return_acquire()
1509 static __always_inline s64
1519 static __always_inline s64
1522 s64 ret; in arch_atomic64_inc_return()
1541 static __always_inline s64
1550 static __always_inline s64
1559 static __always_inline s64
1568 static __always_inline s64
1579 static __always_inline s64
1582 s64 ret = arch_atomic64_fetch_inc_relaxed(v); in arch_atomic64_fetch_inc_acquire()
1590 static __always_inline s64
1600 static __always_inline s64
1603 s64 ret; in arch_atomic64_fetch_inc()
1631 static __always_inline s64
1640 static __always_inline s64
1649 static __always_inline s64
1658 static __always_inline s64
1669 static __always_inline s64
1672 s64 ret = arch_atomic64_dec_return_relaxed(v); in arch_atomic64_dec_return_acquire()
1680 static __always_inline s64
1690 static __always_inline s64
1693 s64 ret; in arch_atomic64_dec_return()
1712 static __always_inline s64
1721 static __always_inline s64
1730 static __always_inline s64
1739 static __always_inline s64
1750 static __always_inline s64
1753 s64 ret = arch_atomic64_fetch_dec_relaxed(v); in arch_atomic64_fetch_dec_acquire()
1761 static __always_inline s64
1771 static __always_inline s64
1774 s64 ret; in arch_atomic64_fetch_dec()
1792 static __always_inline s64
1793 arch_atomic64_fetch_and_acquire(s64 i, atomic64_t *v) in arch_atomic64_fetch_and_acquire()
1795 s64 ret = arch_atomic64_fetch_and_relaxed(i, v); in arch_atomic64_fetch_and_acquire()
1803 static __always_inline s64
1804 arch_atomic64_fetch_and_release(s64 i, atomic64_t *v) in arch_atomic64_fetch_and_release()
1813 static __always_inline s64
1814 arch_atomic64_fetch_and(s64 i, atomic64_t *v) in arch_atomic64_fetch_and()
1816 s64 ret; in arch_atomic64_fetch_and()
1829 arch_atomic64_andnot(s64 i, atomic64_t *v) in arch_atomic64_andnot()
1844 static __always_inline s64
1845 arch_atomic64_fetch_andnot(s64 i, atomic64_t *v) in arch_atomic64_fetch_andnot()
1853 static __always_inline s64
1854 arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) in arch_atomic64_fetch_andnot_acquire()
1862 static __always_inline s64
1863 arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v) in arch_atomic64_fetch_andnot_release()
1871 static __always_inline s64
1872 arch_atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v) in arch_atomic64_fetch_andnot_relaxed()
1882 static __always_inline s64
1883 arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) in arch_atomic64_fetch_andnot_acquire()
1885 s64 ret = arch_atomic64_fetch_andnot_relaxed(i, v); in arch_atomic64_fetch_andnot_acquire()
1893 static __always_inline s64
1894 arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v) in arch_atomic64_fetch_andnot_release()
1903 static __always_inline s64
1904 arch_atomic64_fetch_andnot(s64 i, atomic64_t *v) in arch_atomic64_fetch_andnot()
1906 s64 ret; in arch_atomic64_fetch_andnot()
1924 static __always_inline s64
1925 arch_atomic64_fetch_or_acquire(s64 i, atomic64_t *v) in arch_atomic64_fetch_or_acquire()
1927 s64 ret = arch_atomic64_fetch_or_relaxed(i, v); in arch_atomic64_fetch_or_acquire()
1935 static __always_inline s64
1936 arch_atomic64_fetch_or_release(s64 i, atomic64_t *v) in arch_atomic64_fetch_or_release()
1945 static __always_inline s64
1946 arch_atomic64_fetch_or(s64 i, atomic64_t *v) in arch_atomic64_fetch_or()
1948 s64 ret; in arch_atomic64_fetch_or()
1966 static __always_inline s64
1967 arch_atomic64_fetch_xor_acquire(s64 i, atomic64_t *v) in arch_atomic64_fetch_xor_acquire()
1969 s64 ret = arch_atomic64_fetch_xor_relaxed(i, v); in arch_atomic64_fetch_xor_acquire()
1977 static __always_inline s64
1978 arch_atomic64_fetch_xor_release(s64 i, atomic64_t *v) in arch_atomic64_fetch_xor_release()
1987 static __always_inline s64
1988 arch_atomic64_fetch_xor(s64 i, atomic64_t *v) in arch_atomic64_fetch_xor()
1990 s64 ret; in arch_atomic64_fetch_xor()
2008 static __always_inline s64
2009 arch_atomic64_xchg_acquire(atomic64_t *v, s64 i) in arch_atomic64_xchg_acquire()
2011 s64 ret = arch_atomic64_xchg_relaxed(v, i); in arch_atomic64_xchg_acquire()
2019 static __always_inline s64
2020 arch_atomic64_xchg_release(atomic64_t *v, s64 i) in arch_atomic64_xchg_release()
2029 static __always_inline s64
2030 arch_atomic64_xchg(atomic64_t *v, s64 i) in arch_atomic64_xchg()
2032 s64 ret; in arch_atomic64_xchg()
2050 static __always_inline s64
2051 arch_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new) in arch_atomic64_cmpxchg_acquire()
2053 s64 ret = arch_atomic64_cmpxchg_relaxed(v, old, new); in arch_atomic64_cmpxchg_acquire()
2061 static __always_inline s64
2062 arch_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new) in arch_atomic64_cmpxchg_release()
2071 static __always_inline s64
2072 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) in arch_atomic64_cmpxchg()
2074 s64 ret; in arch_atomic64_cmpxchg()
2094 arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) in arch_atomic64_try_cmpxchg()
2096 s64 r, o = *old; in arch_atomic64_try_cmpxchg()
2107 arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) in arch_atomic64_try_cmpxchg_acquire()
2109 s64 r, o = *old; in arch_atomic64_try_cmpxchg_acquire()
2120 arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) in arch_atomic64_try_cmpxchg_release()
2122 s64 r, o = *old; in arch_atomic64_try_cmpxchg_release()
2133 arch_atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new) in arch_atomic64_try_cmpxchg_relaxed()
2135 s64 r, o = *old; in arch_atomic64_try_cmpxchg_relaxed()
2148 arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) in arch_atomic64_try_cmpxchg_acquire()
2159 arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) in arch_atomic64_try_cmpxchg_release()
2169 arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) in arch_atomic64_try_cmpxchg()
2193 arch_atomic64_sub_and_test(s64 i, atomic64_t *v) in arch_atomic64_sub_and_test()
2245 arch_atomic64_add_negative(s64 i, atomic64_t *v) in arch_atomic64_add_negative()
2262 static __always_inline s64
2263 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) in arch_atomic64_fetch_add_unless()
2265 s64 c = arch_atomic64_read(v); in arch_atomic64_fetch_add_unless()
2288 arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u) in arch_atomic64_add_unless()
2315 s64 c = arch_atomic64_read(v); in arch_atomic64_inc_unless_negative()
2331 s64 c = arch_atomic64_read(v); in arch_atomic64_dec_unless_positive()
2344 static __always_inline s64
2347 s64 dec, c = arch_atomic64_read(v); in arch_atomic64_dec_if_positive()