1 /*
2  * Copyright (c) 2021 Intel Corporation
3  * SPDX-License-Identifier: Apache-2.0
4  */
5 
6 #ifndef ZEPHYR_INCLUDE_ATOMIC_XTENSA_H_
7 #define ZEPHYR_INCLUDE_ATOMIC_XTENSA_H_
8 
9 /* Included from <zephyr/sys/atomic.h> */
10 
11 /* Recent GCC versions actually do have working atomics support on
12  * Xtensa (and so should work with CONFIG_ATOMIC_OPERATIONS_BUILTIN),
13  * but existing versions of Xtensa's XCC do not.  So we define an
14  * inline implementation here that is more or less identical
15  */
16 
17 /** Implementation of @ref atomic_get. */
atomic_get(const atomic_t * target)18 static ALWAYS_INLINE atomic_val_t atomic_get(const atomic_t *target)
19 {
20 	atomic_val_t ret;
21 
22 	/* Actual Xtensa hardware seems to have only in-order
23 	 * pipelines, but the architecture does define a barrier load,
24 	 * so use it.  There is a matching s32ri instruction, but
25 	 * nothing in the Zephyr API requires a barrier store (all the
26 	 * atomic write ops have exchange semantics.
27 	 */
28 	__asm__ volatile("l32ai %0, %1, 0"
29 			 : "=r"(ret) : "r"(target) : "memory");
30 	return ret;
31 }
32 
33 /**
34  * @brief Xtensa specific atomic compare-and-set (CAS).
35  *
36  * @param addr Address of atomic variable.
37  * @param oldval Original value to compare against.
38  * @param newval New value to store.
39  *
40  * This utilizes SCOMPARE1 register and s32c1i instruction to
41  * perform compare-and-set atomic operation. This will
42  * unconditionally read from the atomic variable at @p addr
43  * before the comparison. This value is returned from
44  * the function.
45  *
46  * @return The value at the memory location before CAS.
47  *
48  * @see atomic_cas.
49  */
50 static ALWAYS_INLINE
xtensa_cas(atomic_t * addr,atomic_val_t oldval,atomic_val_t newval)51 atomic_val_t xtensa_cas(atomic_t *addr, atomic_val_t oldval,
52 			atomic_val_t newval)
53 {
54 	__asm__ volatile("wsr %1, SCOMPARE1; s32c1i %0, %2, 0"
55 			 : "+r"(newval), "+r"(oldval) : "r"(addr) : "memory");
56 
57 	return newval; /* got swapped with the old memory by s32c1i */
58 }
59 
60 /** Implementation of @ref atomic_cas. */
61 static ALWAYS_INLINE
atomic_cas(atomic_t * target,atomic_val_t oldval,atomic_val_t newval)62 bool atomic_cas(atomic_t *target, atomic_val_t oldval, atomic_val_t newval)
63 {
64 	return oldval == xtensa_cas(target, oldval, newval);
65 }
66 
67 /** Implementation of @ref atomic_ptr_cas. */
68 static ALWAYS_INLINE
atomic_ptr_cas(atomic_ptr_t * target,void * oldval,void * newval)69 bool atomic_ptr_cas(atomic_ptr_t *target, void *oldval, void *newval)
70 {
71 	return (atomic_val_t) oldval
72 		== xtensa_cas((atomic_t *) target, (atomic_val_t) oldval,
73 			      (atomic_val_t) newval);
74 }
75 
76 /* Generates an atomic exchange sequence that swaps the value at
77  * address "target", whose old value is read to be "cur", with the
78  * specified expression.  Evaluates to the old value which was
79  * atomically replaced.
80  */
81 #define Z__GEN_ATOMXCHG(expr) ({				\
82 	atomic_val_t res, cur;				\
83 	do {						\
84 		cur = *target;				\
85 		res = xtensa_cas(target, cur, (expr));	\
86 	} while (res != cur);				\
87 	res; })
88 
89 /** Implementation of @ref atomic_set. */
90 static ALWAYS_INLINE
atomic_set(atomic_t * target,atomic_val_t value)91 atomic_val_t atomic_set(atomic_t *target, atomic_val_t value)
92 {
93 	return Z__GEN_ATOMXCHG(value);
94 }
95 
96 /** Implementation of @ref atomic_add. */
97 static ALWAYS_INLINE
atomic_add(atomic_t * target,atomic_val_t value)98 atomic_val_t atomic_add(atomic_t *target, atomic_val_t value)
99 {
100 	return Z__GEN_ATOMXCHG(cur + value);
101 }
102 
103 /** Implementation of @ref atomic_sub. */
104 static ALWAYS_INLINE
atomic_sub(atomic_t * target,atomic_val_t value)105 atomic_val_t atomic_sub(atomic_t *target, atomic_val_t value)
106 {
107 	return Z__GEN_ATOMXCHG(cur - value);
108 }
109 
110 /** Implementation of @ref atomic_inc. */
111 static ALWAYS_INLINE
atomic_inc(atomic_t * target)112 atomic_val_t atomic_inc(atomic_t *target)
113 {
114 	return Z__GEN_ATOMXCHG(cur + 1);
115 }
116 
117 /** Implementation of @ref atomic_dec. */
118 static ALWAYS_INLINE
atomic_dec(atomic_t * target)119 atomic_val_t atomic_dec(atomic_t *target)
120 {
121 	return Z__GEN_ATOMXCHG(cur - 1);
122 }
123 
124 /** Implementation of @ref atomic_or. */
atomic_or(atomic_t * target,atomic_val_t value)125 static ALWAYS_INLINE atomic_val_t atomic_or(atomic_t *target,
126 					    atomic_val_t value)
127 {
128 	return Z__GEN_ATOMXCHG(cur | value);
129 }
130 
131 /** Implementation of @ref atomic_xor. */
atomic_xor(atomic_t * target,atomic_val_t value)132 static ALWAYS_INLINE atomic_val_t atomic_xor(atomic_t *target,
133 					     atomic_val_t value)
134 {
135 	return Z__GEN_ATOMXCHG(cur ^ value);
136 }
137 
138 /** Implementation of @ref atomic_and. */
atomic_and(atomic_t * target,atomic_val_t value)139 static ALWAYS_INLINE atomic_val_t atomic_and(atomic_t *target,
140 					     atomic_val_t value)
141 {
142 	return Z__GEN_ATOMXCHG(cur & value);
143 }
144 
145 /** Implementation of @ref atomic_nand. */
atomic_nand(atomic_t * target,atomic_val_t value)146 static ALWAYS_INLINE atomic_val_t atomic_nand(atomic_t *target,
147 					      atomic_val_t value)
148 {
149 	return Z__GEN_ATOMXCHG(~(cur & value));
150 }
151 
152 /** Implementation of @ref atomic_ptr_get. */
atomic_ptr_get(const atomic_ptr_t * target)153 static ALWAYS_INLINE void *atomic_ptr_get(const atomic_ptr_t *target)
154 {
155 	return (void *) atomic_get((atomic_t *)target);
156 }
157 
158 /** Implementation of @ref atomic_ptr_set. */
atomic_ptr_set(atomic_ptr_t * target,void * value)159 static ALWAYS_INLINE void *atomic_ptr_set(atomic_ptr_t *target, void *value)
160 {
161 	return (void *) atomic_set((atomic_t *) target, (atomic_val_t) value);
162 }
163 
164 /** Implementation of @ref atomic_clear. */
atomic_clear(atomic_t * target)165 static ALWAYS_INLINE atomic_val_t atomic_clear(atomic_t *target)
166 {
167 	return atomic_set(target, 0);
168 }
169 
170 /** Implementation of @ref atomic_ptr_clear. */
atomic_ptr_clear(atomic_ptr_t * target)171 static ALWAYS_INLINE void *atomic_ptr_clear(atomic_ptr_t *target)
172 {
173 	return (void *) atomic_set((atomic_t *) target, 0);
174 }
175 
176 #endif /* ZEPHYR_INCLUDE_ATOMIC_XTENSA_H_ */
177