Lines Matching refs:s64
1342 static __always_inline s64
1345 s64 ret; in arch_atomic64_read_acquire()
1361 arch_atomic64_set_release(atomic64_t *v, s64 i) in arch_atomic64_set_release()
1380 static __always_inline s64
1381 arch_atomic64_add_return_acquire(s64 i, atomic64_t *v) in arch_atomic64_add_return_acquire()
1383 s64 ret = arch_atomic64_add_return_relaxed(i, v); in arch_atomic64_add_return_acquire()
1391 static __always_inline s64
1392 arch_atomic64_add_return_release(s64 i, atomic64_t *v) in arch_atomic64_add_return_release()
1401 static __always_inline s64
1402 arch_atomic64_add_return(s64 i, atomic64_t *v) in arch_atomic64_add_return()
1404 s64 ret; in arch_atomic64_add_return()
1422 static __always_inline s64
1423 arch_atomic64_fetch_add_acquire(s64 i, atomic64_t *v) in arch_atomic64_fetch_add_acquire()
1425 s64 ret = arch_atomic64_fetch_add_relaxed(i, v); in arch_atomic64_fetch_add_acquire()
1433 static __always_inline s64
1434 arch_atomic64_fetch_add_release(s64 i, atomic64_t *v) in arch_atomic64_fetch_add_release()
1443 static __always_inline s64
1444 arch_atomic64_fetch_add(s64 i, atomic64_t *v) in arch_atomic64_fetch_add()
1446 s64 ret; in arch_atomic64_fetch_add()
1464 static __always_inline s64
1465 arch_atomic64_sub_return_acquire(s64 i, atomic64_t *v) in arch_atomic64_sub_return_acquire()
1467 s64 ret = arch_atomic64_sub_return_relaxed(i, v); in arch_atomic64_sub_return_acquire()
1475 static __always_inline s64
1476 arch_atomic64_sub_return_release(s64 i, atomic64_t *v) in arch_atomic64_sub_return_release()
1485 static __always_inline s64
1486 arch_atomic64_sub_return(s64 i, atomic64_t *v) in arch_atomic64_sub_return()
1488 s64 ret; in arch_atomic64_sub_return()
1506 static __always_inline s64
1507 arch_atomic64_fetch_sub_acquire(s64 i, atomic64_t *v) in arch_atomic64_fetch_sub_acquire()
1509 s64 ret = arch_atomic64_fetch_sub_relaxed(i, v); in arch_atomic64_fetch_sub_acquire()
1517 static __always_inline s64
1518 arch_atomic64_fetch_sub_release(s64 i, atomic64_t *v) in arch_atomic64_fetch_sub_release()
1527 static __always_inline s64
1528 arch_atomic64_fetch_sub(s64 i, atomic64_t *v) in arch_atomic64_fetch_sub()
1530 s64 ret; in arch_atomic64_fetch_sub()
1558 static __always_inline s64
1567 static __always_inline s64
1576 static __always_inline s64
1585 static __always_inline s64
1596 static __always_inline s64
1599 s64 ret = arch_atomic64_inc_return_relaxed(v); in arch_atomic64_inc_return_acquire()
1607 static __always_inline s64
1617 static __always_inline s64
1620 s64 ret; in arch_atomic64_inc_return()
1639 static __always_inline s64
1648 static __always_inline s64
1657 static __always_inline s64
1666 static __always_inline s64
1677 static __always_inline s64
1680 s64 ret = arch_atomic64_fetch_inc_relaxed(v); in arch_atomic64_fetch_inc_acquire()
1688 static __always_inline s64
1698 static __always_inline s64
1701 s64 ret; in arch_atomic64_fetch_inc()
1729 static __always_inline s64
1738 static __always_inline s64
1747 static __always_inline s64
1756 static __always_inline s64
1767 static __always_inline s64
1770 s64 ret = arch_atomic64_dec_return_relaxed(v); in arch_atomic64_dec_return_acquire()
1778 static __always_inline s64
1788 static __always_inline s64
1791 s64 ret; in arch_atomic64_dec_return()
1810 static __always_inline s64
1819 static __always_inline s64
1828 static __always_inline s64
1837 static __always_inline s64
1848 static __always_inline s64
1851 s64 ret = arch_atomic64_fetch_dec_relaxed(v); in arch_atomic64_fetch_dec_acquire()
1859 static __always_inline s64
1869 static __always_inline s64
1872 s64 ret; in arch_atomic64_fetch_dec()
1890 static __always_inline s64
1891 arch_atomic64_fetch_and_acquire(s64 i, atomic64_t *v) in arch_atomic64_fetch_and_acquire()
1893 s64 ret = arch_atomic64_fetch_and_relaxed(i, v); in arch_atomic64_fetch_and_acquire()
1901 static __always_inline s64
1902 arch_atomic64_fetch_and_release(s64 i, atomic64_t *v) in arch_atomic64_fetch_and_release()
1911 static __always_inline s64
1912 arch_atomic64_fetch_and(s64 i, atomic64_t *v) in arch_atomic64_fetch_and()
1914 s64 ret; in arch_atomic64_fetch_and()
1927 arch_atomic64_andnot(s64 i, atomic64_t *v) in arch_atomic64_andnot()
1942 static __always_inline s64
1943 arch_atomic64_fetch_andnot(s64 i, atomic64_t *v) in arch_atomic64_fetch_andnot()
1951 static __always_inline s64
1952 arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) in arch_atomic64_fetch_andnot_acquire()
1960 static __always_inline s64
1961 arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v) in arch_atomic64_fetch_andnot_release()
1969 static __always_inline s64
1970 arch_atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v) in arch_atomic64_fetch_andnot_relaxed()
1980 static __always_inline s64
1981 arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) in arch_atomic64_fetch_andnot_acquire()
1983 s64 ret = arch_atomic64_fetch_andnot_relaxed(i, v); in arch_atomic64_fetch_andnot_acquire()
1991 static __always_inline s64
1992 arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v) in arch_atomic64_fetch_andnot_release()
2001 static __always_inline s64
2002 arch_atomic64_fetch_andnot(s64 i, atomic64_t *v) in arch_atomic64_fetch_andnot()
2004 s64 ret; in arch_atomic64_fetch_andnot()
2022 static __always_inline s64
2023 arch_atomic64_fetch_or_acquire(s64 i, atomic64_t *v) in arch_atomic64_fetch_or_acquire()
2025 s64 ret = arch_atomic64_fetch_or_relaxed(i, v); in arch_atomic64_fetch_or_acquire()
2033 static __always_inline s64
2034 arch_atomic64_fetch_or_release(s64 i, atomic64_t *v) in arch_atomic64_fetch_or_release()
2043 static __always_inline s64
2044 arch_atomic64_fetch_or(s64 i, atomic64_t *v) in arch_atomic64_fetch_or()
2046 s64 ret; in arch_atomic64_fetch_or()
2064 static __always_inline s64
2065 arch_atomic64_fetch_xor_acquire(s64 i, atomic64_t *v) in arch_atomic64_fetch_xor_acquire()
2067 s64 ret = arch_atomic64_fetch_xor_relaxed(i, v); in arch_atomic64_fetch_xor_acquire()
2075 static __always_inline s64
2076 arch_atomic64_fetch_xor_release(s64 i, atomic64_t *v) in arch_atomic64_fetch_xor_release()
2085 static __always_inline s64
2086 arch_atomic64_fetch_xor(s64 i, atomic64_t *v) in arch_atomic64_fetch_xor()
2088 s64 ret; in arch_atomic64_fetch_xor()
2106 static __always_inline s64
2107 arch_atomic64_xchg_acquire(atomic64_t *v, s64 i) in arch_atomic64_xchg_acquire()
2109 s64 ret = arch_atomic64_xchg_relaxed(v, i); in arch_atomic64_xchg_acquire()
2117 static __always_inline s64
2118 arch_atomic64_xchg_release(atomic64_t *v, s64 i) in arch_atomic64_xchg_release()
2127 static __always_inline s64
2128 arch_atomic64_xchg(atomic64_t *v, s64 i) in arch_atomic64_xchg()
2130 s64 ret; in arch_atomic64_xchg()
2148 static __always_inline s64
2149 arch_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new) in arch_atomic64_cmpxchg_acquire()
2151 s64 ret = arch_atomic64_cmpxchg_relaxed(v, old, new); in arch_atomic64_cmpxchg_acquire()
2159 static __always_inline s64
2160 arch_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new) in arch_atomic64_cmpxchg_release()
2169 static __always_inline s64
2170 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) in arch_atomic64_cmpxchg()
2172 s64 ret; in arch_atomic64_cmpxchg()
2192 arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) in arch_atomic64_try_cmpxchg()
2194 s64 r, o = *old; in arch_atomic64_try_cmpxchg()
2205 arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) in arch_atomic64_try_cmpxchg_acquire()
2207 s64 r, o = *old; in arch_atomic64_try_cmpxchg_acquire()
2218 arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) in arch_atomic64_try_cmpxchg_release()
2220 s64 r, o = *old; in arch_atomic64_try_cmpxchg_release()
2231 arch_atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new) in arch_atomic64_try_cmpxchg_relaxed()
2233 s64 r, o = *old; in arch_atomic64_try_cmpxchg_relaxed()
2246 arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) in arch_atomic64_try_cmpxchg_acquire()
2257 arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) in arch_atomic64_try_cmpxchg_release()
2267 arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) in arch_atomic64_try_cmpxchg()
2291 arch_atomic64_sub_and_test(s64 i, atomic64_t *v) in arch_atomic64_sub_and_test()
2343 arch_atomic64_add_negative(s64 i, atomic64_t *v) in arch_atomic64_add_negative()
2360 static __always_inline s64
2361 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) in arch_atomic64_fetch_add_unless()
2363 s64 c = arch_atomic64_read(v); in arch_atomic64_fetch_add_unless()
2386 arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u) in arch_atomic64_add_unless()
2413 s64 c = arch_atomic64_read(v); in arch_atomic64_inc_unless_negative()
2429 s64 c = arch_atomic64_read(v); in arch_atomic64_dec_unless_positive()
2442 static __always_inline s64
2445 s64 dec, c = arch_atomic64_read(v); in arch_atomic64_dec_if_positive()