1 /* atomic operations */
2
3 /*
4 * Copyright (c) 1997-2015, Wind River Systems, Inc.
5 *
6 * SPDX-License-Identifier: Apache-2.0
7 */
8
9 #ifndef ZEPHYR_INCLUDE_SYS_ATOMIC_BUILTIN_H_
10 #define ZEPHYR_INCLUDE_SYS_ATOMIC_BUILTIN_H_
11
12 #ifdef __cplusplus
13 extern "C" {
14 #endif
15
16 /* Included from <atomic.h> */
17
18 /**
19 * @addtogroup atomic_apis Atomic Services APIs
20 * @ingroup kernel_apis
21 * @{
22 */
23
24 /**
25 * @brief Atomic compare-and-set.
26 *
27 * This routine performs an atomic compare-and-set on @a target. If the current
28 * value of @a target equals @a old_value, @a target is set to @a new_value.
29 * If the current value of @a target does not equal @a old_value, @a target
30 * is left unchanged.
31 *
32 * @note As for all atomic APIs, includes a
33 * full/sequentially-consistent memory barrier (where applicable).
34 *
35 * @param target Address of atomic variable.
36 * @param old_value Original value to compare against.
37 * @param new_value New value to store.
38 * @return true if @a new_value is written, false otherwise.
39 */
atomic_cas(atomic_t * target,atomic_val_t old_value,atomic_val_t new_value)40 static inline bool atomic_cas(atomic_t *target, atomic_val_t old_value,
41 atomic_val_t new_value)
42 {
43 return __atomic_compare_exchange_n(target, &old_value, new_value,
44 0, __ATOMIC_SEQ_CST,
45 __ATOMIC_SEQ_CST);
46 }
47
48 /**
49 * @brief Atomic compare-and-set with pointer values
50 *
51 * This routine performs an atomic compare-and-set on @a target. If the current
52 * value of @a target equals @a old_value, @a target is set to @a new_value.
53 * If the current value of @a target does not equal @a old_value, @a target
54 * is left unchanged.
55 *
56 * @note As for all atomic APIs, includes a
57 * full/sequentially-consistent memory barrier (where applicable).
58 *
59 * @param target Address of atomic variable.
60 * @param old_value Original value to compare against.
61 * @param new_value New value to store.
62 * @return true if @a new_value is written, false otherwise.
63 */
atomic_ptr_cas(atomic_ptr_t * target,atomic_ptr_val_t old_value,atomic_ptr_val_t new_value)64 static inline bool atomic_ptr_cas(atomic_ptr_t *target, atomic_ptr_val_t old_value,
65 atomic_ptr_val_t new_value)
66 {
67 return __atomic_compare_exchange_n(target, &old_value, new_value,
68 0, __ATOMIC_SEQ_CST,
69 __ATOMIC_SEQ_CST);
70 }
71
72 /**
73 *
74 * @brief Atomic addition.
75 *
76 * This routine performs an atomic addition on @a target.
77 *
78 * @note As for all atomic APIs, includes a
79 * full/sequentially-consistent memory barrier (where applicable).
80 *
81 * @param target Address of atomic variable.
82 * @param value Value to add.
83 *
84 * @return Previous value of @a target.
85 */
atomic_add(atomic_t * target,atomic_val_t value)86 static inline atomic_val_t atomic_add(atomic_t *target, atomic_val_t value)
87 {
88 return __atomic_fetch_add(target, value, __ATOMIC_SEQ_CST);
89 }
90
91 /**
92 *
93 * @brief Atomic subtraction.
94 *
95 * This routine performs an atomic subtraction on @a target.
96 *
97 * @note As for all atomic APIs, includes a
98 * full/sequentially-consistent memory barrier (where applicable).
99 *
100 * @param target Address of atomic variable.
101 * @param value Value to subtract.
102 *
103 * @return Previous value of @a target.
104 */
atomic_sub(atomic_t * target,atomic_val_t value)105 static inline atomic_val_t atomic_sub(atomic_t *target, atomic_val_t value)
106 {
107 return __atomic_fetch_sub(target, value, __ATOMIC_SEQ_CST);
108 }
109
110 /**
111 *
112 * @brief Atomic increment.
113 *
114 * This routine performs an atomic increment by 1 on @a target.
115 *
116 * @note As for all atomic APIs, includes a
117 * full/sequentially-consistent memory barrier (where applicable).
118 *
119 * @param target Address of atomic variable.
120 *
121 * @return Previous value of @a target.
122 */
atomic_inc(atomic_t * target)123 static inline atomic_val_t atomic_inc(atomic_t *target)
124 {
125 return atomic_add(target, 1);
126 }
127
128 /**
129 *
130 * @brief Atomic decrement.
131 *
132 * This routine performs an atomic decrement by 1 on @a target.
133 *
134 * @note As for all atomic APIs, includes a
135 * full/sequentially-consistent memory barrier (where applicable).
136 *
137 * @param target Address of atomic variable.
138 *
139 * @return Previous value of @a target.
140 */
atomic_dec(atomic_t * target)141 static inline atomic_val_t atomic_dec(atomic_t *target)
142 {
143 return atomic_sub(target, 1);
144 }
145
146 /**
147 *
148 * @brief Atomic get.
149 *
150 * This routine performs an atomic read on @a target.
151 *
152 * @note As for all atomic APIs, includes a
153 * full/sequentially-consistent memory barrier (where applicable).
154 *
155 * @param target Address of atomic variable.
156 *
157 * @return Value of @a target.
158 */
atomic_get(const atomic_t * target)159 static inline atomic_val_t atomic_get(const atomic_t *target)
160 {
161 return __atomic_load_n(target, __ATOMIC_SEQ_CST);
162 }
163
164 /**
165 *
166 * @brief Atomic get a pointer value
167 *
168 * This routine performs an atomic read on @a target.
169 *
170 * @note As for all atomic APIs, includes a
171 * full/sequentially-consistent memory barrier (where applicable).
172 *
173 * @param target Address of pointer variable.
174 *
175 * @return Value of @a target.
176 */
atomic_ptr_get(const atomic_ptr_t * target)177 static inline atomic_ptr_val_t atomic_ptr_get(const atomic_ptr_t *target)
178 {
179 return __atomic_load_n(target, __ATOMIC_SEQ_CST);
180 }
181
182 /**
183 *
184 * @brief Atomic get-and-set.
185 *
186 * This routine atomically sets @a target to @a value and returns
187 * the previous value of @a target.
188 *
189 * @note As for all atomic APIs, includes a
190 * full/sequentially-consistent memory barrier (where applicable).
191 *
192 * @param target Address of atomic variable.
193 * @param value Value to write to @a target.
194 *
195 * @return Previous value of @a target.
196 */
atomic_set(atomic_t * target,atomic_val_t value)197 static inline atomic_val_t atomic_set(atomic_t *target, atomic_val_t value)
198 {
199 /* This builtin, as described by Intel, is not a traditional
200 * test-and-set operation, but rather an atomic exchange operation. It
201 * writes value into *ptr, and returns the previous contents of *ptr.
202 */
203 return __atomic_exchange_n(target, value, __ATOMIC_SEQ_CST);
204 }
205
206 /**
207 *
208 * @brief Atomic get-and-set for pointer values
209 *
210 * This routine atomically sets @a target to @a value and returns
211 * the previous value of @a target.
212 *
213 * @note As for all atomic APIs, includes a
214 * full/sequentially-consistent memory barrier (where applicable).
215 *
216 * @param target Address of atomic variable.
217 * @param value Value to write to @a target.
218 *
219 * @return Previous value of @a target.
220 */
atomic_ptr_set(atomic_ptr_t * target,atomic_ptr_val_t value)221 static inline atomic_ptr_val_t atomic_ptr_set(atomic_ptr_t *target, atomic_ptr_val_t value)
222 {
223 return __atomic_exchange_n(target, value, __ATOMIC_SEQ_CST);
224 }
225
226 /**
227 *
228 * @brief Atomic clear.
229 *
230 * This routine atomically sets @a target to zero and returns its previous
231 * value. (Hence, it is equivalent to atomic_set(target, 0).)
232 *
233 * @note As for all atomic APIs, includes a
234 * full/sequentially-consistent memory barrier (where applicable).
235 *
236 * @param target Address of atomic variable.
237 *
238 * @return Previous value of @a target.
239 */
atomic_clear(atomic_t * target)240 static inline atomic_val_t atomic_clear(atomic_t *target)
241 {
242 return atomic_set(target, 0);
243 }
244
245 /**
246 *
247 * @brief Atomic clear of a pointer value
248 *
249 * This routine atomically sets @a target to zero and returns its previous
250 * value. (Hence, it is equivalent to atomic_set(target, 0).)
251 *
252 * @note As for all atomic APIs, includes a
253 * full/sequentially-consistent memory barrier (where applicable).
254 *
255 * @param target Address of atomic variable.
256 *
257 * @return Previous value of @a target.
258 */
atomic_ptr_clear(atomic_ptr_t * target)259 static inline atomic_ptr_val_t atomic_ptr_clear(atomic_ptr_t *target)
260 {
261 return atomic_ptr_set(target, NULL);
262 }
263
264 /**
265 *
266 * @brief Atomic bitwise inclusive OR.
267 *
268 * This routine atomically sets @a target to the bitwise inclusive OR of
269 * @a target and @a value.
270 *
271 * @note As for all atomic APIs, includes a
272 * full/sequentially-consistent memory barrier (where applicable).
273 *
274 * @param target Address of atomic variable.
275 * @param value Value to OR.
276 *
277 * @return Previous value of @a target.
278 */
atomic_or(atomic_t * target,atomic_val_t value)279 static inline atomic_val_t atomic_or(atomic_t *target, atomic_val_t value)
280 {
281 return __atomic_fetch_or(target, value, __ATOMIC_SEQ_CST);
282 }
283
284 /**
285 *
286 * @brief Atomic bitwise exclusive OR (XOR).
287 *
288 * @note As for all atomic APIs, includes a
289 * full/sequentially-consistent memory barrier (where applicable).
290 *
291 * This routine atomically sets @a target to the bitwise exclusive OR (XOR) of
292 * @a target and @a value.
293 *
294 * @param target Address of atomic variable.
295 * @param value Value to XOR
296 *
297 * @return Previous value of @a target.
298 */
atomic_xor(atomic_t * target,atomic_val_t value)299 static inline atomic_val_t atomic_xor(atomic_t *target, atomic_val_t value)
300 {
301 return __atomic_fetch_xor(target, value, __ATOMIC_SEQ_CST);
302 }
303
304 /**
305 *
306 * @brief Atomic bitwise AND.
307 *
308 * This routine atomically sets @a target to the bitwise AND of @a target
309 * and @a value.
310 *
311 * @note As for all atomic APIs, includes a
312 * full/sequentially-consistent memory barrier (where applicable).
313 *
314 * @param target Address of atomic variable.
315 * @param value Value to AND.
316 *
317 * @return Previous value of @a target.
318 */
atomic_and(atomic_t * target,atomic_val_t value)319 static inline atomic_val_t atomic_and(atomic_t *target, atomic_val_t value)
320 {
321 return __atomic_fetch_and(target, value, __ATOMIC_SEQ_CST);
322 }
323
324 /**
325 *
326 * @brief Atomic bitwise NAND.
327 *
328 * This routine atomically sets @a target to the bitwise NAND of @a target
329 * and @a value. (This operation is equivalent to target = ~(target & value).)
330 *
331 * @note As for all atomic APIs, includes a
332 * full/sequentially-consistent memory barrier (where applicable).
333 *
334 * @param target Address of atomic variable.
335 * @param value Value to NAND.
336 *
337 * @return Previous value of @a target.
338 */
atomic_nand(atomic_t * target,atomic_val_t value)339 static inline atomic_val_t atomic_nand(atomic_t *target, atomic_val_t value)
340 {
341 return __atomic_fetch_nand(target, value, __ATOMIC_SEQ_CST);
342 }
343
344 /** @} */
345
346
347 #ifdef __cplusplus
348 }
349 #endif
350
351 #endif /* ZEPHYR_INCLUDE_SYS_ATOMIC_BUILTIN_H_ */
352