Lines Matching refs:v
24 #define arch_atomic_read(v) READ_ONCE((v)->counter) argument
25 #define arch_atomic64_read(v) READ_ONCE((v)->counter) argument
27 #define arch_atomic_set(v,i) WRITE_ONCE(((v)->counter), (i)) argument
28 #define arch_atomic64_set(v,i) WRITE_ONCE(((v)->counter), (i)) argument
32 ia64_atomic_##op (int i, atomic_t *v) \
38 CMPXCHG_BUGCHECK(v); \
39 old = arch_atomic_read(v); \
41 } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic_t)) != old); \
47 ia64_atomic_fetch_##op (int i, atomic_t *v) \
53 CMPXCHG_BUGCHECK(v); \
54 old = arch_atomic_read(v); \
56 } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic_t)) != old); \
77 #define arch_atomic_add_return(i,v) \ argument
81 ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \
82 : ia64_atomic_add(__ia64_aar_i, v); \
85 #define arch_atomic_sub_return(i,v) \ argument
89 ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \
90 : ia64_atomic_sub(__ia64_asr_i, v); \
93 #define arch_atomic_fetch_add(i,v) \ argument
97 ? ia64_fetchadd(__ia64_aar_i, &(v)->counter, acq) \
98 : ia64_atomic_fetch_add(__ia64_aar_i, v); \
101 #define arch_atomic_fetch_sub(i,v) \ argument
105 ? ia64_fetchadd(-__ia64_asr_i, &(v)->counter, acq) \
106 : ia64_atomic_fetch_sub(__ia64_asr_i, v); \
113 #define arch_atomic_and(i,v) (void)ia64_atomic_fetch_and(i,v) argument
114 #define arch_atomic_or(i,v) (void)ia64_atomic_fetch_or(i,v) argument
115 #define arch_atomic_xor(i,v) (void)ia64_atomic_fetch_xor(i,v) argument
117 #define arch_atomic_fetch_and(i,v) ia64_atomic_fetch_and(i,v) argument
118 #define arch_atomic_fetch_or(i,v) ia64_atomic_fetch_or(i,v) argument
119 #define arch_atomic_fetch_xor(i,v) ia64_atomic_fetch_xor(i,v) argument
127 ia64_atomic64_##op (s64 i, atomic64_t *v) \
133 CMPXCHG_BUGCHECK(v); \
134 old = arch_atomic64_read(v); \
136 } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic64_t)) != old); \
142 ia64_atomic64_fetch_##op (s64 i, atomic64_t *v) \
148 CMPXCHG_BUGCHECK(v); \
149 old = arch_atomic64_read(v); \
151 } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic64_t)) != old); \
162 #define arch_atomic64_add_return(i,v) \ argument
166 ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \
167 : ia64_atomic64_add(__ia64_aar_i, v); \
170 #define arch_atomic64_sub_return(i,v) \ argument
174 ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \
175 : ia64_atomic64_sub(__ia64_asr_i, v); \
178 #define arch_atomic64_fetch_add(i,v) \ argument
182 ? ia64_fetchadd(__ia64_aar_i, &(v)->counter, acq) \
183 : ia64_atomic64_fetch_add(__ia64_aar_i, v); \
186 #define arch_atomic64_fetch_sub(i,v) \ argument
190 ? ia64_fetchadd(-__ia64_asr_i, &(v)->counter, acq) \
191 : ia64_atomic64_fetch_sub(__ia64_asr_i, v); \
198 #define arch_atomic64_and(i,v) (void)ia64_atomic64_fetch_and(i,v) argument
199 #define arch_atomic64_or(i,v) (void)ia64_atomic64_fetch_or(i,v) argument
200 #define arch_atomic64_xor(i,v) (void)ia64_atomic64_fetch_xor(i,v) argument
202 #define arch_atomic64_fetch_and(i,v) ia64_atomic64_fetch_and(i,v) argument
203 #define arch_atomic64_fetch_or(i,v) ia64_atomic64_fetch_or(i,v) argument
204 #define arch_atomic64_fetch_xor(i,v) ia64_atomic64_fetch_xor(i,v) argument
210 #define arch_atomic_add(i,v) (void)arch_atomic_add_return((i), (v)) argument
211 #define arch_atomic_sub(i,v) (void)arch_atomic_sub_return((i), (v)) argument
213 #define arch_atomic64_add(i,v) (void)arch_atomic64_add_return((i), (v)) argument
214 #define arch_atomic64_sub(i,v) (void)arch_atomic64_sub_return((i), (v)) argument