Lines Matching +full:1 +full:v

25 #define atomic_read(v)	READ_ONCE((v)->counter)  argument
26 #define atomic_set(v,i) WRITE_ONCE(((v)->counter), (i)) argument
37 static inline void atomic_##op(int i, atomic_t *v) \
42 prefetchw(&v->counter); \
44 "1: ldrex %0, [%3]\n" \
46 " strex %1, %0, [%3]\n" \
47 " teq %1, #0\n" \
48 " bne 1b" \
49 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
50 : "r" (&v->counter), "Ir" (i) \
55 static inline int atomic_##op##_return_relaxed(int i, atomic_t *v) \
60 prefetchw(&v->counter); \
63 "1: ldrex %0, [%3]\n" \
65 " strex %1, %0, [%3]\n" \
66 " teq %1, #0\n" \
67 " bne 1b" \
68 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
69 : "r" (&v->counter), "Ir" (i) \
76 static inline int atomic_fetch_##op##_relaxed(int i, atomic_t *v) \
81 prefetchw(&v->counter); \
84 "1: ldrex %0, [%4]\n" \
85 " " #asm_op " %1, %0, %5\n" \
86 " strex %2, %1, [%4]\n" \
88 " bne 1b" \
89 : "=&r" (result), "=&r" (val), "=&r" (tmp), "+Qo" (v->counter) \
90 : "r" (&v->counter), "Ir" (i) \
115 "ldrex %1, [%3]\n" in atomic_cmpxchg_relaxed()
117 "teq %1, %4\n" in atomic_cmpxchg_relaxed()
128 static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u) in atomic_fetch_add_unless() argument
134 prefetchw(&v->counter); in atomic_fetch_add_unless()
137 "1: ldrex %0, [%4]\n" in atomic_fetch_add_unless()
140 " add %1, %0, %6\n" in atomic_fetch_add_unless()
141 " strex %2, %1, [%4]\n" in atomic_fetch_add_unless()
143 " bne 1b\n" in atomic_fetch_add_unless()
145 : "=&r" (oldval), "=&r" (newval), "=&r" (tmp), "+Qo" (v->counter) in atomic_fetch_add_unless()
146 : "r" (&v->counter), "r" (u), "r" (a) in atomic_fetch_add_unless()
163 static inline void atomic_##op(int i, atomic_t *v) \
168 v->counter c_op i; \
173 static inline int atomic_##op##_return(int i, atomic_t *v) \
179 v->counter c_op i; \
180 val = v->counter; \
187 static inline int atomic_fetch_##op(int i, atomic_t *v) \
193 val = v->counter; \
194 v->counter c_op i; \
200 static inline int atomic_cmpxchg(atomic_t *v, int old, int new) in atomic_cmpxchg() argument
206 ret = v->counter; in atomic_cmpxchg()
208 v->counter = new; in atomic_cmpxchg()
243 #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) argument
253 static inline s64 atomic64_read(const atomic64_t *v) in atomic64_read() argument
258 " ldrd %0, %H0, [%1]" in atomic64_read()
260 : "r" (&v->counter), "Qo" (v->counter) in atomic64_read()
266 static inline void atomic64_set(atomic64_t *v, s64 i) in atomic64_set() argument
269 " strd %2, %H2, [%1]" in atomic64_set()
270 : "=Qo" (v->counter) in atomic64_set()
271 : "r" (&v->counter), "r" (i) in atomic64_set()
275 static inline s64 atomic64_read(const atomic64_t *v) in atomic64_read() argument
280 " ldrexd %0, %H0, [%1]" in atomic64_read()
282 : "r" (&v->counter), "Qo" (v->counter) in atomic64_read()
288 static inline void atomic64_set(atomic64_t *v, s64 i) in atomic64_set() argument
292 prefetchw(&v->counter); in atomic64_set()
294 "1: ldrexd %0, %H0, [%2]\n" in atomic64_set()
297 " bne 1b" in atomic64_set()
298 : "=&r" (tmp), "=Qo" (v->counter) in atomic64_set()
299 : "r" (&v->counter), "r" (i) in atomic64_set()
305 static inline void atomic64_##op(s64 i, atomic64_t *v) \
310 prefetchw(&v->counter); \
312 "1: ldrexd %0, %H0, [%3]\n" \
315 " strexd %1, %0, %H0, [%3]\n" \
316 " teq %1, #0\n" \
317 " bne 1b" \
318 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
319 : "r" (&v->counter), "r" (i) \
325 atomic64_##op##_return_relaxed(s64 i, atomic64_t *v) \
330 prefetchw(&v->counter); \
333 "1: ldrexd %0, %H0, [%3]\n" \
336 " strexd %1, %0, %H0, [%3]\n" \
337 " teq %1, #0\n" \
338 " bne 1b" \
339 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
340 : "r" (&v->counter), "r" (i) \
348 atomic64_fetch_##op##_relaxed(s64 i, atomic64_t *v) \
353 prefetchw(&v->counter); \
356 "1: ldrexd %0, %H0, [%4]\n" \
359 " strexd %2, %1, %H1, [%4]\n" \
361 " bne 1b" \
362 : "=&r" (result), "=&r" (val), "=&r" (tmp), "+Qo" (v->counter) \
363 : "r" (&v->counter), "r" (i) \
413 "ldrexd %1, %H1, [%3]\n" in ATOMIC64_OPS()
415 "teq %1, %4\n" in ATOMIC64_OPS()
435 "1: ldrexd %0, %H0, [%3]\n" in atomic64_xchg_relaxed()
436 " strexd %1, %4, %H4, [%3]\n" in atomic64_xchg_relaxed()
437 " teq %1, #0\n" in atomic64_xchg_relaxed()
438 " bne 1b" in atomic64_xchg_relaxed()
447 static inline s64 atomic64_dec_if_positive(atomic64_t *v) in atomic64_dec_if_positive() argument
453 prefetchw(&v->counter); in atomic64_dec_if_positive()
456 "1: ldrexd %0, %H0, [%3]\n" in atomic64_dec_if_positive()
457 " subs %Q0, %Q0, #1\n" in atomic64_dec_if_positive()
461 " strexd %1, %0, %H0, [%3]\n" in atomic64_dec_if_positive()
462 " teq %1, #0\n" in atomic64_dec_if_positive()
463 " bne 1b\n" in atomic64_dec_if_positive()
465 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) in atomic64_dec_if_positive()
466 : "r" (&v->counter) in atomic64_dec_if_positive()
475 static inline s64 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) in atomic64_fetch_add_unless() argument
481 prefetchw(&v->counter); in atomic64_fetch_add_unless()
484 "1: ldrexd %0, %H0, [%4]\n" in atomic64_fetch_add_unless()
490 " strexd %2, %1, %H1, [%4]\n" in atomic64_fetch_add_unless()
492 " bne 1b\n" in atomic64_fetch_add_unless()
494 : "=&r" (oldval), "=&r" (newval), "=&r" (tmp), "+Qo" (v->counter) in atomic64_fetch_add_unless()
495 : "r" (&v->counter), "r" (u), "r" (a) in atomic64_fetch_add_unless()