Lines Matching +full:3 +full:v

26 static __inline__ int arch_atomic_read(const atomic_t *v)  in arch_atomic_read()  argument
32 __asm__ __volatile__("lwz %0,0(%1)" : "=r"(t) : "b"(&v->counter)); in arch_atomic_read()
34 __asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m<>"(v->counter)); in arch_atomic_read()
39 static __inline__ void arch_atomic_set(atomic_t *v, int i) in arch_atomic_set() argument
43 __asm__ __volatile__("stw %1,0(%2)" : "=m"(v->counter) : "r"(i), "b"(&v->counter)); in arch_atomic_set()
45 __asm__ __volatile__("stw%U0%X0 %1,%0" : "=m<>"(v->counter) : "r"(i)); in arch_atomic_set()
49 static __inline__ void arch_atomic_##op(int a, atomic_t *v) \
54 "1: lwarx %0,0,%3 # atomic_" #op "\n" \
56 " stwcx. %0,0,%3 \n" \
58 : "=&r" (t), "+m" (v->counter) \
59 : "r"#sign (a), "r" (&v->counter) \
64 static inline int arch_atomic_##op##_return_relaxed(int a, atomic_t *v) \
69 "1: lwarx %0,0,%3 # atomic_" #op "_return_relaxed\n" \
71 " stwcx. %0,0,%3\n" \
73 : "=&r" (t), "+m" (v->counter) \
74 : "r"#sign (a), "r" (&v->counter) \
81 static inline int arch_atomic_fetch_##op##_relaxed(int a, atomic_t *v) \
87 #asm_op "%I3" suffix " %1,%0,%3\n" \
90 : "=&r" (res), "=&r" (t), "+m" (v->counter) \
91 : "r"#sign (a), "r" (&v->counter) \
131 * @v: pointer of type atomic_t
132 * @a: the amount to add to v...
133 * @u: ...unless v is equal to u.
135 * Atomically adds @a to @v, so long as it was not @u.
136 * Returns the old value of @v.
138 static __inline__ int arch_atomic_fetch_add_unless(atomic_t *v, int a, int u) in arch_atomic_fetch_add_unless() argument
145 cmpw 0,%0,%3 \n\ in arch_atomic_fetch_add_unless()
154 : "r" (&v->counter), "rI" (a), "r" (u) in arch_atomic_fetch_add_unless()
162 * Atomically test *v and decrement if it is greater than 0.
163 * The function returns the old value of *v minus 1, even if
164 * the atomic variable, v, was not decremented.
166 static __inline__ int arch_atomic_dec_if_positive(atomic_t *v) in arch_atomic_dec_if_positive() argument
181 : "r" (&v->counter) in arch_atomic_dec_if_positive()
192 static __inline__ s64 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read() argument
198 __asm__ __volatile__("ld %0,0(%1)" : "=r"(t) : "b"(&v->counter)); in arch_atomic64_read()
200 __asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m<>"(v->counter)); in arch_atomic64_read()
205 static __inline__ void arch_atomic64_set(atomic64_t *v, s64 i) in arch_atomic64_set() argument
209 __asm__ __volatile__("std %1,0(%2)" : "=m"(v->counter) : "r"(i), "b"(&v->counter)); in arch_atomic64_set()
211 __asm__ __volatile__("std%U0%X0 %1,%0" : "=m<>"(v->counter) : "r"(i)); in arch_atomic64_set()
215 static __inline__ void arch_atomic64_##op(s64 a, atomic64_t *v) \
220 "1: ldarx %0,0,%3 # atomic64_" #op "\n" \
222 " stdcx. %0,0,%3 \n" \
224 : "=&r" (t), "+m" (v->counter) \
225 : "r" (a), "r" (&v->counter) \
231 arch_atomic64_##op##_return_relaxed(s64 a, atomic64_t *v) \
236 "1: ldarx %0,0,%3 # atomic64_" #op "_return_relaxed\n" \
238 " stdcx. %0,0,%3\n" \
240 : "=&r" (t), "+m" (v->counter) \
241 : "r" (a), "r" (&v->counter) \
249 arch_atomic64_fetch_##op##_relaxed(s64 a, atomic64_t *v) \
255 #asm_op " %1,%3,%0\n" \
258 : "=&r" (res), "=&r" (t), "+m" (v->counter) \
259 : "r" (a), "r" (&v->counter) \
297 static __inline__ void arch_atomic64_inc(atomic64_t *v) in ATOMIC64_OPS()
306 : "=&r" (t), "+m" (v->counter) in ATOMIC64_OPS()
307 : "r" (&v->counter) in ATOMIC64_OPS()
312 static __inline__ s64 arch_atomic64_inc_return_relaxed(atomic64_t *v) in arch_atomic64_inc_return_relaxed() argument
321 : "=&r" (t), "+m" (v->counter) in arch_atomic64_inc_return_relaxed()
322 : "r" (&v->counter) in arch_atomic64_inc_return_relaxed()
328 static __inline__ void arch_atomic64_dec(atomic64_t *v) in arch_atomic64_dec() argument
337 : "=&r" (t), "+m" (v->counter) in arch_atomic64_dec()
338 : "r" (&v->counter) in arch_atomic64_dec()
343 static __inline__ s64 arch_atomic64_dec_return_relaxed(atomic64_t *v) in arch_atomic64_dec_return_relaxed() argument
352 : "=&r" (t), "+m" (v->counter) in arch_atomic64_dec_return_relaxed()
353 : "r" (&v->counter) in arch_atomic64_dec_return_relaxed()
363 * Atomically test *v and decrement if it is greater than 0.
364 * The function returns the old value of *v minus 1.
366 static __inline__ s64 arch_atomic64_dec_if_positive(atomic64_t *v) in arch_atomic64_dec_if_positive() argument
380 : "r" (&v->counter) in arch_atomic64_dec_if_positive()
389 * @v: pointer of type atomic64_t
390 * @a: the amount to add to v...
391 * @u: ...unless v is equal to u.
393 * Atomically adds @a to @v, so long as it was not @u.
394 * Returns the old value of @v.
396 static __inline__ s64 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) in arch_atomic64_fetch_add_unless() argument
403 cmpd 0,%0,%3 \n\ in arch_atomic64_fetch_add_unless()
412 : "r" (&v->counter), "r" (a), "r" (u) in arch_atomic64_fetch_add_unless()
421 * @v: pointer of type atomic64_t
423 * Atomically increments @v by 1, so long as @v is non-zero.
424 * Returns non-zero if @v was non-zero, and zero otherwise.
426 static __inline__ int arch_atomic64_inc_not_zero(atomic64_t *v) in arch_atomic64_inc_not_zero() argument
442 : "r" (&v->counter) in arch_atomic64_inc_not_zero()
447 #define arch_atomic64_inc_not_zero(v) arch_atomic64_inc_not_zero((v)) argument