1 /* atomic operations */
2
3 /*
4 * Copyright (c) 1997-2015, Wind River Systems, Inc.
5 *
6 * SPDX-License-Identifier: Apache-2.0
7 */
8
9 #ifndef ZEPHYR_INCLUDE_SYS_ATOMIC_BUILTIN_H_
10 #define ZEPHYR_INCLUDE_SYS_ATOMIC_BUILTIN_H_
11
12 #ifdef __cplusplus
13 extern "C" {
14 #endif
15
16 /* Included from <atomic.h> */
17
18 /**
19 * @addtogroup atomic_apis Atomic Services APIs
20 * @ingroup kernel_apis
21 * @{
22 */
23
24 /**
25 * @brief Atomic compare-and-set.
26 *
27 * This routine performs an atomic compare-and-set on @a target. If the current
28 * value of @a target equals @a old_value, @a target is set to @a new_value.
29 * If the current value of @a target does not equal @a old_value, @a target
30 * is left unchanged.
31 *
32 * @param target Address of atomic variable.
33 * @param old_value Original value to compare against.
34 * @param new_value New value to store.
35 * @return true if @a new_value is written, false otherwise.
36 */
atomic_cas(atomic_t * target,atomic_val_t old_value,atomic_val_t new_value)37 static inline bool atomic_cas(atomic_t *target, atomic_val_t old_value,
38 atomic_val_t new_value)
39 {
40 return __atomic_compare_exchange_n(target, &old_value, new_value,
41 0, __ATOMIC_SEQ_CST,
42 __ATOMIC_SEQ_CST);
43 }
44
45 /**
46 * @brief Atomic compare-and-set with pointer values
47 *
48 * This routine performs an atomic compare-and-set on @a target. If the current
49 * value of @a target equals @a old_value, @a target is set to @a new_value.
50 * If the current value of @a target does not equal @a old_value, @a target
51 * is left unchanged.
52 *
53 * @param target Address of atomic variable.
54 * @param old_value Original value to compare against.
55 * @param new_value New value to store.
56 * @return true if @a new_value is written, false otherwise.
57 */
atomic_ptr_cas(atomic_ptr_t * target,atomic_ptr_val_t old_value,atomic_ptr_val_t new_value)58 static inline bool atomic_ptr_cas(atomic_ptr_t *target, atomic_ptr_val_t old_value,
59 atomic_ptr_val_t new_value)
60 {
61 return __atomic_compare_exchange_n(target, &old_value, new_value,
62 0, __ATOMIC_SEQ_CST,
63 __ATOMIC_SEQ_CST);
64 }
65
66 /**
67 *
68 * @brief Atomic addition.
69 *
70 * This routine performs an atomic addition on @a target.
71 *
72 * @param target Address of atomic variable.
73 * @param value Value to add.
74 *
75 * @return Previous value of @a target.
76 */
atomic_add(atomic_t * target,atomic_val_t value)77 static inline atomic_val_t atomic_add(atomic_t *target, atomic_val_t value)
78 {
79 return __atomic_fetch_add(target, value, __ATOMIC_SEQ_CST);
80 }
81
82 /**
83 *
84 * @brief Atomic subtraction.
85 *
86 * This routine performs an atomic subtraction on @a target.
87 *
88 * @param target Address of atomic variable.
89 * @param value Value to subtract.
90 *
91 * @return Previous value of @a target.
92 */
atomic_sub(atomic_t * target,atomic_val_t value)93 static inline atomic_val_t atomic_sub(atomic_t *target, atomic_val_t value)
94 {
95 return __atomic_fetch_sub(target, value, __ATOMIC_SEQ_CST);
96 }
97
98 /**
99 *
100 * @brief Atomic increment.
101 *
102 * This routine performs an atomic increment by 1 on @a target.
103 *
104 * @param target Address of atomic variable.
105 *
106 * @return Previous value of @a target.
107 */
atomic_inc(atomic_t * target)108 static inline atomic_val_t atomic_inc(atomic_t *target)
109 {
110 return atomic_add(target, 1);
111 }
112
113 /**
114 *
115 * @brief Atomic decrement.
116 *
117 * This routine performs an atomic decrement by 1 on @a target.
118 *
119 * @param target Address of atomic variable.
120 *
121 * @return Previous value of @a target.
122 */
atomic_dec(atomic_t * target)123 static inline atomic_val_t atomic_dec(atomic_t *target)
124 {
125 return atomic_sub(target, 1);
126 }
127
128 /**
129 *
130 * @brief Atomic get.
131 *
132 * This routine performs an atomic read on @a target.
133 *
134 * @param target Address of atomic variable.
135 *
136 * @return Value of @a target.
137 */
atomic_get(const atomic_t * target)138 static inline atomic_val_t atomic_get(const atomic_t *target)
139 {
140 return __atomic_load_n(target, __ATOMIC_SEQ_CST);
141 }
142
143 /**
144 *
145 * @brief Atomic get a pointer value
146 *
147 * This routine performs an atomic read on @a target.
148 *
149 * @param target Address of pointer variable.
150 *
151 * @return Value of @a target.
152 */
atomic_ptr_get(const atomic_ptr_t * target)153 static inline atomic_ptr_val_t atomic_ptr_get(const atomic_ptr_t *target)
154 {
155 return __atomic_load_n(target, __ATOMIC_SEQ_CST);
156 }
157
158 /**
159 *
160 * @brief Atomic get-and-set.
161 *
162 * This routine atomically sets @a target to @a value and returns
163 * the previous value of @a target.
164 *
165 * @param target Address of atomic variable.
166 * @param value Value to write to @a target.
167 *
168 * @return Previous value of @a target.
169 */
atomic_set(atomic_t * target,atomic_val_t value)170 static inline atomic_val_t atomic_set(atomic_t *target, atomic_val_t value)
171 {
172 /* This builtin, as described by Intel, is not a traditional
173 * test-and-set operation, but rather an atomic exchange operation. It
174 * writes value into *ptr, and returns the previous contents of *ptr.
175 */
176 return __atomic_exchange_n(target, value, __ATOMIC_SEQ_CST);
177 }
178
179 /**
180 *
181 * @brief Atomic get-and-set for pointer values
182 *
183 * This routine atomically sets @a target to @a value and returns
184 * the previous value of @a target.
185 *
186 * @param target Address of atomic variable.
187 * @param value Value to write to @a target.
188 *
189 * @return Previous value of @a target.
190 */
atomic_ptr_set(atomic_ptr_t * target,atomic_ptr_val_t value)191 static inline atomic_ptr_val_t atomic_ptr_set(atomic_ptr_t *target, atomic_ptr_val_t value)
192 {
193 return __atomic_exchange_n(target, value, __ATOMIC_SEQ_CST);
194 }
195
196 /**
197 *
198 * @brief Atomic clear.
199 *
200 * This routine atomically sets @a target to zero and returns its previous
201 * value. (Hence, it is equivalent to atomic_set(target, 0).)
202 *
203 * @param target Address of atomic variable.
204 *
205 * @return Previous value of @a target.
206 */
atomic_clear(atomic_t * target)207 static inline atomic_val_t atomic_clear(atomic_t *target)
208 {
209 return atomic_set(target, 0);
210 }
211
212 /**
213 *
214 * @brief Atomic clear of a pointer value
215 *
216 * This routine atomically sets @a target to zero and returns its previous
217 * value. (Hence, it is equivalent to atomic_set(target, 0).)
218 *
219 * @param target Address of atomic variable.
220 *
221 * @return Previous value of @a target.
222 */
atomic_ptr_clear(atomic_ptr_t * target)223 static inline atomic_ptr_val_t atomic_ptr_clear(atomic_ptr_t *target)
224 {
225 return atomic_ptr_set(target, NULL);
226 }
227
228 /**
229 *
230 * @brief Atomic bitwise inclusive OR.
231 *
232 * This routine atomically sets @a target to the bitwise inclusive OR of
233 * @a target and @a value.
234 *
235 * @param target Address of atomic variable.
236 * @param value Value to OR.
237 *
238 * @return Previous value of @a target.
239 */
atomic_or(atomic_t * target,atomic_val_t value)240 static inline atomic_val_t atomic_or(atomic_t *target, atomic_val_t value)
241 {
242 return __atomic_fetch_or(target, value, __ATOMIC_SEQ_CST);
243 }
244
245 /**
246 *
247 * @brief Atomic bitwise exclusive OR (XOR).
248 *
249 * This routine atomically sets @a target to the bitwise exclusive OR (XOR) of
250 * @a target and @a value.
251 *
252 * @param target Address of atomic variable.
253 * @param value Value to XOR
254 *
255 * @return Previous value of @a target.
256 */
atomic_xor(atomic_t * target,atomic_val_t value)257 static inline atomic_val_t atomic_xor(atomic_t *target, atomic_val_t value)
258 {
259 return __atomic_fetch_xor(target, value, __ATOMIC_SEQ_CST);
260 }
261
262 /**
263 *
264 * @brief Atomic bitwise AND.
265 *
266 * This routine atomically sets @a target to the bitwise AND of @a target
267 * and @a value.
268 *
269 * @param target Address of atomic variable.
270 * @param value Value to AND.
271 *
272 * @return Previous value of @a target.
273 */
atomic_and(atomic_t * target,atomic_val_t value)274 static inline atomic_val_t atomic_and(atomic_t *target, atomic_val_t value)
275 {
276 return __atomic_fetch_and(target, value, __ATOMIC_SEQ_CST);
277 }
278
279 /**
280 *
281 * @brief Atomic bitwise NAND.
282 *
283 * This routine atomically sets @a target to the bitwise NAND of @a target
284 * and @a value. (This operation is equivalent to target = ~(target & value).)
285 *
286 * @param target Address of atomic variable.
287 * @param value Value to NAND.
288 *
289 * @return Previous value of @a target.
290 */
atomic_nand(atomic_t * target,atomic_val_t value)291 static inline atomic_val_t atomic_nand(atomic_t *target, atomic_val_t value)
292 {
293 return __atomic_fetch_nand(target, value, __ATOMIC_SEQ_CST);
294 }
295
296 /** @} */
297
298
299 #ifdef __cplusplus
300 }
301 #endif
302
303 #endif /* ZEPHYR_INCLUDE_SYS_ATOMIC_BUILTIN_H_ */
304