Lines Matching +full:2 +full:v

25 #define arch_atomic_read(v)	READ_ONCE((v)->counter)  argument
26 #define arch_atomic_set(v,i) WRITE_ONCE(((v)->counter), (i)) argument
37 static inline void arch_atomic_##op(int i, atomic_t *v) \
42 prefetchw(&v->counter); \
49 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
50 : "r" (&v->counter), "Ir" (i) \
55 static inline int arch_atomic_##op##_return_relaxed(int i, atomic_t *v) \
60 prefetchw(&v->counter); \
68 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
69 : "r" (&v->counter), "Ir" (i) \
76 static inline int arch_atomic_fetch_##op##_relaxed(int i, atomic_t *v) \
81 prefetchw(&v->counter); \
86 " strex %2, %1, [%4]\n" \
87 " teq %2, #0\n" \
89 : "=&r" (result), "=&r" (val), "=&r" (tmp), "+Qo" (v->counter) \
90 : "r" (&v->counter), "Ir" (i) \
128 static inline int arch_atomic_fetch_add_unless(atomic_t *v, int a, int u) in arch_atomic_fetch_add_unless() argument
134 prefetchw(&v->counter); in arch_atomic_fetch_add_unless()
139 " beq 2f\n" in arch_atomic_fetch_add_unless()
141 " strex %2, %1, [%4]\n" in arch_atomic_fetch_add_unless()
142 " teq %2, #0\n" in arch_atomic_fetch_add_unless()
144 "2:" in arch_atomic_fetch_add_unless()
145 : "=&r" (oldval), "=&r" (newval), "=&r" (tmp), "+Qo" (v->counter) in arch_atomic_fetch_add_unless()
146 : "r" (&v->counter), "r" (u), "r" (a) in arch_atomic_fetch_add_unless()
163 static inline void arch_atomic_##op(int i, atomic_t *v) \
168 v->counter c_op i; \
173 static inline int arch_atomic_##op##_return(int i, atomic_t *v) \
179 v->counter c_op i; \
180 val = v->counter; \
187 static inline int arch_atomic_fetch_##op(int i, atomic_t *v) \
193 val = v->counter; \
194 v->counter c_op i; \
200 static inline int arch_atomic_cmpxchg(atomic_t *v, int old, int new) in arch_atomic_cmpxchg() argument
206 ret = v->counter; in arch_atomic_cmpxchg()
208 v->counter = new; in arch_atomic_cmpxchg()
243 #define arch_atomic_xchg(v, new) (arch_xchg(&((v)->counter), new)) argument
253 static inline s64 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read() argument
260 : "r" (&v->counter), "Qo" (v->counter) in arch_atomic64_read()
266 static inline void arch_atomic64_set(atomic64_t *v, s64 i) in arch_atomic64_set() argument
269 " strd %2, %H2, [%1]" in arch_atomic64_set()
270 : "=Qo" (v->counter) in arch_atomic64_set()
271 : "r" (&v->counter), "r" (i) in arch_atomic64_set()
275 static inline s64 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read() argument
282 : "r" (&v->counter), "Qo" (v->counter) in arch_atomic64_read()
288 static inline void arch_atomic64_set(atomic64_t *v, s64 i) in arch_atomic64_set() argument
292 prefetchw(&v->counter); in arch_atomic64_set()
294 "1: ldrexd %0, %H0, [%2]\n" in arch_atomic64_set()
295 " strexd %0, %3, %H3, [%2]\n" in arch_atomic64_set()
298 : "=&r" (tmp), "=Qo" (v->counter) in arch_atomic64_set()
299 : "r" (&v->counter), "r" (i) in arch_atomic64_set()
305 static inline void arch_atomic64_##op(s64 i, atomic64_t *v) \
310 prefetchw(&v->counter); \
318 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
319 : "r" (&v->counter), "r" (i) \
325 arch_atomic64_##op##_return_relaxed(s64 i, atomic64_t *v) \
330 prefetchw(&v->counter); \
339 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
340 : "r" (&v->counter), "r" (i) \
348 arch_atomic64_fetch_##op##_relaxed(s64 i, atomic64_t *v) \
353 prefetchw(&v->counter); \
359 " strexd %2, %1, %H1, [%4]\n" \
360 " teq %2, #0\n" \
362 : "=&r" (result), "=&r" (val), "=&r" (tmp), "+Qo" (v->counter) \
363 : "r" (&v->counter), "r" (i) \
447 static inline s64 arch_atomic64_dec_if_positive(atomic64_t *v) in arch_atomic64_dec_if_positive() argument
453 prefetchw(&v->counter); in arch_atomic64_dec_if_positive()
460 " bmi 2f\n" in arch_atomic64_dec_if_positive()
464 "2:" in arch_atomic64_dec_if_positive()
465 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) in arch_atomic64_dec_if_positive()
466 : "r" (&v->counter) in arch_atomic64_dec_if_positive()
475 static inline s64 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) in arch_atomic64_fetch_add_unless() argument
481 prefetchw(&v->counter); in arch_atomic64_fetch_add_unless()
487 " beq 2f\n" in arch_atomic64_fetch_add_unless()
490 " strexd %2, %1, %H1, [%4]\n" in arch_atomic64_fetch_add_unless()
491 " teq %2, #0\n" in arch_atomic64_fetch_add_unless()
493 "2:" in arch_atomic64_fetch_add_unless()
494 : "=&r" (oldval), "=&r" (newval), "=&r" (tmp), "+Qo" (v->counter) in arch_atomic64_fetch_add_unless()
495 : "r" (&v->counter), "r" (u), "r" (a) in arch_atomic64_fetch_add_unless()