1 /* atomic operations */
2
3 /*
4 * Copyright (c) 1997-2015, Wind River Systems, Inc.
5 * Copyright (c) 2023 Nordic Semiconductor ASA
6 *
7 * SPDX-License-Identifier: Apache-2.0
8 */
9
10 #ifndef ZEPHYR_INCLUDE_SYS_ATOMIC_BUILTIN_H_
11 #define ZEPHYR_INCLUDE_SYS_ATOMIC_BUILTIN_H_
12
13 #include <stdbool.h>
14 #include <zephyr/sys/atomic_types.h>
15
16 #ifdef __cplusplus
17 extern "C" {
18 #endif
19
20 /* Included from <atomic.h> */
21
atomic_cas(atomic_t * target,atomic_val_t old_value,atomic_val_t new_value)22 static inline bool atomic_cas(atomic_t *target, atomic_val_t old_value,
23 atomic_val_t new_value)
24 {
25 return __atomic_compare_exchange_n(target, &old_value, new_value,
26 0, __ATOMIC_SEQ_CST,
27 __ATOMIC_SEQ_CST);
28 }
29
atomic_ptr_cas(atomic_ptr_t * target,atomic_ptr_val_t old_value,atomic_ptr_val_t new_value)30 static inline bool atomic_ptr_cas(atomic_ptr_t *target, atomic_ptr_val_t old_value,
31 atomic_ptr_val_t new_value)
32 {
33 return __atomic_compare_exchange_n(target, &old_value, new_value,
34 0, __ATOMIC_SEQ_CST,
35 __ATOMIC_SEQ_CST);
36 }
37
atomic_add(atomic_t * target,atomic_val_t value)38 static inline atomic_val_t atomic_add(atomic_t *target, atomic_val_t value)
39 {
40 return __atomic_fetch_add(target, value, __ATOMIC_SEQ_CST);
41 }
42
atomic_sub(atomic_t * target,atomic_val_t value)43 static inline atomic_val_t atomic_sub(atomic_t *target, atomic_val_t value)
44 {
45 return __atomic_fetch_sub(target, value, __ATOMIC_SEQ_CST);
46 }
47
atomic_inc(atomic_t * target)48 static inline atomic_val_t atomic_inc(atomic_t *target)
49 {
50 return atomic_add(target, 1);
51 }
52
atomic_dec(atomic_t * target)53 static inline atomic_val_t atomic_dec(atomic_t *target)
54 {
55 return atomic_sub(target, 1);
56 }
57
atomic_get(const atomic_t * target)58 static inline atomic_val_t atomic_get(const atomic_t *target)
59 {
60 return __atomic_load_n(target, __ATOMIC_SEQ_CST);
61 }
62
atomic_ptr_get(const atomic_ptr_t * target)63 static inline atomic_ptr_val_t atomic_ptr_get(const atomic_ptr_t *target)
64 {
65 return __atomic_load_n(target, __ATOMIC_SEQ_CST);
66 }
67
atomic_set(atomic_t * target,atomic_val_t value)68 static inline atomic_val_t atomic_set(atomic_t *target, atomic_val_t value)
69 {
70 /* This builtin, as described by Intel, is not a traditional
71 * test-and-set operation, but rather an atomic exchange operation. It
72 * writes value into *ptr, and returns the previous contents of *ptr.
73 */
74 return __atomic_exchange_n(target, value, __ATOMIC_SEQ_CST);
75 }
76
atomic_ptr_set(atomic_ptr_t * target,atomic_ptr_val_t value)77 static inline atomic_ptr_val_t atomic_ptr_set(atomic_ptr_t *target, atomic_ptr_val_t value)
78 {
79 return __atomic_exchange_n(target, value, __ATOMIC_SEQ_CST);
80 }
81
atomic_clear(atomic_t * target)82 static inline atomic_val_t atomic_clear(atomic_t *target)
83 {
84 return atomic_set(target, 0);
85 }
86
atomic_ptr_clear(atomic_ptr_t * target)87 static inline atomic_ptr_val_t atomic_ptr_clear(atomic_ptr_t *target)
88 {
89 return atomic_ptr_set(target, NULL);
90 }
91
atomic_or(atomic_t * target,atomic_val_t value)92 static inline atomic_val_t atomic_or(atomic_t *target, atomic_val_t value)
93 {
94 return __atomic_fetch_or(target, value, __ATOMIC_SEQ_CST);
95 }
96
atomic_xor(atomic_t * target,atomic_val_t value)97 static inline atomic_val_t atomic_xor(atomic_t *target, atomic_val_t value)
98 {
99 return __atomic_fetch_xor(target, value, __ATOMIC_SEQ_CST);
100 }
101
atomic_and(atomic_t * target,atomic_val_t value)102 static inline atomic_val_t atomic_and(atomic_t *target, atomic_val_t value)
103 {
104 return __atomic_fetch_and(target, value, __ATOMIC_SEQ_CST);
105 }
106
atomic_nand(atomic_t * target,atomic_val_t value)107 static inline atomic_val_t atomic_nand(atomic_t *target, atomic_val_t value)
108 {
109 return __atomic_fetch_nand(target, value, __ATOMIC_SEQ_CST);
110 }
111
112 #ifdef __cplusplus
113 }
114 #endif
115
116 #endif /* ZEPHYR_INCLUDE_SYS_ATOMIC_BUILTIN_H_ */
117