Lines Matching +full:adc +full:- +full:use +full:- +full:res
1 /* SPDX-License-Identifier: GPL-2.0-only */
22 * strex/ldrex monitor on some implementations. The reason we can use it for
25 #define arch_atomic_read(v) READ_ONCE((v)->counter)
26 #define arch_atomic_set(v,i) WRITE_ONCE(((v)->counter), (i))
31 * ARMv6 UP and SMP safe atomic ops. We use load exclusive and
42 prefetchw(&v->counter); \
49 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
50 : "r" (&v->counter), "Ir" (i) \
60 prefetchw(&v->counter); \
68 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
69 : "r" (&v->counter), "Ir" (i) \
81 prefetchw(&v->counter); \
89 : "=&r" (result), "=&r" (val), "=&r" (tmp), "+Qo" (v->counter) \
90 : "r" (&v->counter), "Ir" (i) \
109 unsigned long res; in arch_atomic_cmpxchg_relaxed() local
111 prefetchw(&ptr->counter); in arch_atomic_cmpxchg_relaxed()
119 : "=&r" (res), "=&r" (oldval), "+Qo" (ptr->counter) in arch_atomic_cmpxchg_relaxed()
120 : "r" (&ptr->counter), "Ir" (old), "r" (new) in arch_atomic_cmpxchg_relaxed()
122 } while (res); in arch_atomic_cmpxchg_relaxed()
134 prefetchw(&v->counter); in arch_atomic_fetch_add_unless()
145 : "=&r" (oldval), "=&r" (newval), "=&r" (tmp), "+Qo" (v->counter) in arch_atomic_fetch_add_unless()
146 : "r" (&v->counter), "r" (u), "r" (a) in arch_atomic_fetch_add_unless()
159 #error SMP not supported on pre-ARMv6 CPUs
168 v->counter c_op i; \
179 v->counter c_op i; \
180 val = v->counter; \
193 val = v->counter; \
194 v->counter c_op i; \
206 ret = v->counter; in arch_atomic_cmpxchg()
208 v->counter = new; in arch_atomic_cmpxchg()
224 ATOMIC_OPS(sub, -=, sub)
243 #define arch_atomic_xchg(v, new) (arch_xchg(&((v)->counter), new))
260 : "r" (&v->counter), "Qo" (v->counter) in arch_atomic64_read()
270 : "=Qo" (v->counter) in arch_atomic64_set()
271 : "r" (&v->counter), "r" (i) in arch_atomic64_set()
282 : "r" (&v->counter), "Qo" (v->counter) in arch_atomic64_read()
292 prefetchw(&v->counter); in arch_atomic64_set()
298 : "=&r" (tmp), "=Qo" (v->counter) in arch_atomic64_set()
299 : "r" (&v->counter), "r" (i) in arch_atomic64_set()
310 prefetchw(&v->counter); \
318 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
319 : "r" (&v->counter), "r" (i) \
330 prefetchw(&v->counter); \
339 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
340 : "r" (&v->counter), "r" (i) \
353 prefetchw(&v->counter); \
362 : "=&r" (result), "=&r" (val), "=&r" (tmp), "+Qo" (v->counter) \
363 : "r" (&v->counter), "r" (i) \
374 ATOMIC64_OPS(add, adds, adc) in ATOMIC64_OPS() argument
407 unsigned long res; in ATOMIC64_OPS() local
409 prefetchw(&ptr->counter); in ATOMIC64_OPS()
418 : "=&r" (res), "=&r" (oldval), "+Qo" (ptr->counter) in ATOMIC64_OPS()
419 : "r" (&ptr->counter), "r" (old), "r" (new) in ATOMIC64_OPS()
421 } while (res); in ATOMIC64_OPS()
432 prefetchw(&ptr->counter); in arch_atomic64_xchg_relaxed()
439 : "=&r" (result), "=&r" (tmp), "+Qo" (ptr->counter) in arch_atomic64_xchg_relaxed()
440 : "r" (&ptr->counter), "r" (new) in arch_atomic64_xchg_relaxed()
453 prefetchw(&v->counter); in arch_atomic64_dec_if_positive()
465 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) in arch_atomic64_dec_if_positive()
466 : "r" (&v->counter) in arch_atomic64_dec_if_positive()
481 prefetchw(&v->counter); in arch_atomic64_fetch_add_unless()
489 " adc %R1, %R0, %R6\n" in arch_atomic64_fetch_add_unless()
494 : "=&r" (oldval), "=&r" (newval), "=&r" (tmp), "+Qo" (v->counter) in arch_atomic64_fetch_add_unless()
495 : "r" (&v->counter), "r" (u), "r" (a) in arch_atomic64_fetch_add_unless()