Lines Matching +full:1 +full:v
29 #define atomic_read(v) READ_ONCE((v)->counter) argument
30 #define atomic64_read(v) READ_ONCE((v)->counter) argument
32 #define atomic_set(v,i) WRITE_ONCE((v)->counter, (i)) argument
33 #define atomic64_set(v,i) WRITE_ONCE((v)->counter, (i)) argument
42 static __inline__ void atomic_##op(int i, atomic_t * v) \
46 "1: ldl_l %0,%1\n" \
48 " stl_c %0,%1\n" \
51 "2: br 1b\n" \
53 :"=&r" (temp), "=m" (v->counter) \
54 :"Ir" (i), "m" (v->counter)); \
58 static inline int atomic_##op##_return_relaxed(int i, atomic_t *v) \
62 "1: ldl_l %0,%1\n" \
65 " stl_c %0,%1\n" \
68 "2: br 1b\n" \
70 :"=&r" (temp), "=m" (v->counter), "=&r" (result) \
71 :"Ir" (i), "m" (v->counter) : "memory"); \
77 static inline int atomic_fetch_##op##_relaxed(int i, atomic_t *v) \
81 "1: ldl_l %2,%1\n" \
83 " stl_c %0,%1\n" \
86 "2: br 1b\n" \
88 :"=&r" (temp), "=m" (v->counter), "=&r" (result) \
89 :"Ir" (i), "m" (v->counter) : "memory"); \
95 static __inline__ void atomic64_##op(s64 i, atomic64_t * v) \
99 "1: ldq_l %0,%1\n" \
101 " stq_c %0,%1\n" \
104 "2: br 1b\n" \
106 :"=&r" (temp), "=m" (v->counter) \
107 :"Ir" (i), "m" (v->counter)); \
111 static __inline__ s64 atomic64_##op##_return_relaxed(s64 i, atomic64_t * v) \
115 "1: ldq_l %0,%1\n" \
118 " stq_c %0,%1\n" \
121 "2: br 1b\n" \
123 :"=&r" (temp), "=m" (v->counter), "=&r" (result) \
124 :"Ir" (i), "m" (v->counter) : "memory"); \
130 static __inline__ s64 atomic64_fetch_##op##_relaxed(s64 i, atomic64_t * v) \
134 "1: ldq_l %2,%1\n" \
136 " stq_c %0,%1\n" \
139 "2: br 1b\n" \
141 :"=&r" (temp), "=m" (v->counter), "=&r" (result) \
142 :"Ir" (i), "m" (v->counter) : "memory"); \
201 #define atomic64_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new)) in ATOMIC_OPS() argument
202 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) in ATOMIC_OPS() argument
204 #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new)) in ATOMIC_OPS() argument
205 #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) in ATOMIC_OPS() argument
209 * @v: pointer of type atomic_t in ATOMIC_OPS()
210 * @a: the amount to add to v... in ATOMIC_OPS()
211 * @u: ...unless v is equal to u. in ATOMIC_OPS()
213 * Atomically adds @a to @v, so long as it was not @u. in ATOMIC_OPS()
214 * Returns the old value of @v. in ATOMIC_OPS()
216 static __inline__ int atomic_fetch_add_unless(atomic_t *v, int a, int u) in ATOMIC_OPS()
221 "1: ldl_l %[old],%[mem]\n" in ATOMIC_OPS()
229 "3: br 1b\n" in ATOMIC_OPS()
232 : [mem] "m"(*v), [a] "rI"(a), [u] "rI"((long)u) in ATOMIC_OPS()
241 * @v: pointer of type atomic64_t
242 * @a: the amount to add to v...
243 * @u: ...unless v is equal to u.
245 * Atomically adds @a to @v, so long as it was not @u.
246 * Returns the old value of @v.
248 static __inline__ s64 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) in atomic64_fetch_add_unless() argument
253 "1: ldq_l %[old],%[mem]\n" in atomic64_fetch_add_unless()
261 "3: br 1b\n" in atomic64_fetch_add_unless()
264 : [mem] "m"(*v), [a] "rI"(a), [u] "rI"(u) in atomic64_fetch_add_unless()
272 * atomic64_dec_if_positive - decrement by 1 if old value positive
273 * @v: pointer of type atomic_t
275 * The function returns the old value of *v minus 1, even if
276 * the atomic variable, v, was not decremented.
278 static inline s64 atomic64_dec_if_positive(atomic64_t *v) in atomic64_dec_if_positive() argument
283 "1: ldq_l %[old],%[mem]\n" in atomic64_dec_if_positive()
284 " subq %[old],1,%[tmp]\n" in atomic64_dec_if_positive()
290 "3: br 1b\n" in atomic64_dec_if_positive()
293 : [mem] "m"(*v) in atomic64_dec_if_positive()
296 return old - 1; in atomic64_dec_if_positive()