Lines Matching +full:counter +full:- +full:0
1 /* SPDX-License-Identifier: GPL-2.0-only */
25 #define arch_atomic_read(v) READ_ONCE((v)->counter)
26 #define arch_atomic_set(v,i) WRITE_ONCE(((v)->counter), (i))
42 prefetchw(&v->counter); \
44 "1: ldrex %0, [%3]\n" \
45 " " #asm_op " %0, %0, %4\n" \
46 " strex %1, %0, [%3]\n" \
47 " teq %1, #0\n" \
49 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
50 : "r" (&v->counter), "Ir" (i) \
60 prefetchw(&v->counter); \
63 "1: ldrex %0, [%3]\n" \
64 " " #asm_op " %0, %0, %4\n" \
65 " strex %1, %0, [%3]\n" \
66 " teq %1, #0\n" \
68 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
69 : "r" (&v->counter), "Ir" (i) \
81 prefetchw(&v->counter); \
84 "1: ldrex %0, [%4]\n" \
85 " " #asm_op " %1, %0, %5\n" \
87 " teq %2, #0\n" \
89 : "=&r" (result), "=&r" (val), "=&r" (tmp), "+Qo" (v->counter) \
90 : "r" (&v->counter), "Ir" (i) \
111 prefetchw(&ptr->counter); in arch_atomic_cmpxchg_relaxed()
116 "mov %0, #0\n" in arch_atomic_cmpxchg_relaxed()
118 "strexeq %0, %5, [%3]\n" in arch_atomic_cmpxchg_relaxed()
119 : "=&r" (res), "=&r" (oldval), "+Qo" (ptr->counter) in arch_atomic_cmpxchg_relaxed()
120 : "r" (&ptr->counter), "Ir" (old), "r" (new) in arch_atomic_cmpxchg_relaxed()
134 prefetchw(&v->counter); in arch_atomic_fetch_add_unless()
137 "1: ldrex %0, [%4]\n" in arch_atomic_fetch_add_unless()
138 " teq %0, %5\n" in arch_atomic_fetch_add_unless()
140 " add %1, %0, %6\n" in arch_atomic_fetch_add_unless()
142 " teq %2, #0\n" in arch_atomic_fetch_add_unless()
145 : "=&r" (oldval), "=&r" (newval), "=&r" (tmp), "+Qo" (v->counter) in arch_atomic_fetch_add_unless()
146 : "r" (&v->counter), "r" (u), "r" (a) in arch_atomic_fetch_add_unless()
159 #error SMP not supported on pre-ARMv6 CPUs
168 v->counter c_op i; \
179 v->counter c_op i; \
180 val = v->counter; \
193 val = v->counter; \
194 v->counter c_op i; \
206 ret = v->counter; in arch_atomic_cmpxchg()
208 v->counter = new; in arch_atomic_cmpxchg()
224 ATOMIC_OPS(sub, -=, sub)
243 #define arch_atomic_xchg(v, new) (arch_xchg(&((v)->counter), new))
247 s64 counter; member
258 " ldrd %0, %H0, [%1]" in arch_atomic64_read()
260 : "r" (&v->counter), "Qo" (v->counter) in arch_atomic64_read()
270 : "=Qo" (v->counter) in arch_atomic64_set()
271 : "r" (&v->counter), "r" (i) in arch_atomic64_set()
280 " ldrexd %0, %H0, [%1]" in arch_atomic64_read()
282 : "r" (&v->counter), "Qo" (v->counter) in arch_atomic64_read()
292 prefetchw(&v->counter); in arch_atomic64_set()
294 "1: ldrexd %0, %H0, [%2]\n" in arch_atomic64_set()
295 " strexd %0, %3, %H3, [%2]\n" in arch_atomic64_set()
296 " teq %0, #0\n" in arch_atomic64_set()
298 : "=&r" (tmp), "=Qo" (v->counter) in arch_atomic64_set()
299 : "r" (&v->counter), "r" (i) in arch_atomic64_set()
310 prefetchw(&v->counter); \
312 "1: ldrexd %0, %H0, [%3]\n" \
315 " strexd %1, %0, %H0, [%3]\n" \
316 " teq %1, #0\n" \
318 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
319 : "r" (&v->counter), "r" (i) \
330 prefetchw(&v->counter); \
333 "1: ldrexd %0, %H0, [%3]\n" \
336 " strexd %1, %0, %H0, [%3]\n" \
337 " teq %1, #0\n" \
339 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
340 : "r" (&v->counter), "r" (i) \
353 prefetchw(&v->counter); \
356 "1: ldrexd %0, %H0, [%4]\n" \
360 " teq %2, #0\n" \
362 : "=&r" (result), "=&r" (val), "=&r" (tmp), "+Qo" (v->counter) \
363 : "r" (&v->counter), "r" (i) \
409 prefetchw(&ptr->counter); in ATOMIC64_OPS()
414 "mov %0, #0\n" in ATOMIC64_OPS()
417 "strexdeq %0, %5, %H5, [%3]" in ATOMIC64_OPS()
418 : "=&r" (res), "=&r" (oldval), "+Qo" (ptr->counter) in ATOMIC64_OPS()
419 : "r" (&ptr->counter), "r" (old), "r" (new) in ATOMIC64_OPS()
432 prefetchw(&ptr->counter); in arch_atomic64_xchg_relaxed()
435 "1: ldrexd %0, %H0, [%3]\n" in arch_atomic64_xchg_relaxed()
437 " teq %1, #0\n" in arch_atomic64_xchg_relaxed()
439 : "=&r" (result), "=&r" (tmp), "+Qo" (ptr->counter) in arch_atomic64_xchg_relaxed()
440 : "r" (&ptr->counter), "r" (new) in arch_atomic64_xchg_relaxed()
453 prefetchw(&v->counter); in arch_atomic64_dec_if_positive()
456 "1: ldrexd %0, %H0, [%3]\n" in arch_atomic64_dec_if_positive()
458 " sbc %R0, %R0, #0\n" in arch_atomic64_dec_if_positive()
459 " teq %R0, #0\n" in arch_atomic64_dec_if_positive()
461 " strexd %1, %0, %H0, [%3]\n" in arch_atomic64_dec_if_positive()
462 " teq %1, #0\n" in arch_atomic64_dec_if_positive()
465 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) in arch_atomic64_dec_if_positive()
466 : "r" (&v->counter) in arch_atomic64_dec_if_positive()
481 prefetchw(&v->counter); in arch_atomic64_fetch_add_unless()
484 "1: ldrexd %0, %H0, [%4]\n" in arch_atomic64_fetch_add_unless()
485 " teq %0, %5\n" in arch_atomic64_fetch_add_unless()
491 " teq %2, #0\n" in arch_atomic64_fetch_add_unless()
494 : "=&r" (oldval), "=&r" (newval), "=&r" (tmp), "+Qo" (v->counter) in arch_atomic64_fetch_add_unless()
495 : "r" (&v->counter), "r" (u), "r" (a) in arch_atomic64_fetch_add_unless()