Lines Matching full:target
43 static inline atomic_val_t z_vrfy_##name(atomic_t *target) \
45 K_OOPS(K_SYSCALL_MEMORY_WRITE(target, sizeof(atomic_t))); \
46 return z_impl_##name((atomic_t *)target); \
50 static inline atomic_val_t z_vrfy_##name(atomic_t *target, \
53 K_OOPS(K_SYSCALL_MEMORY_WRITE(target, sizeof(atomic_t))); \
54 return z_impl_##name((atomic_t *)target, value); \
66 * <target> equals <oldValue>, then <newValue> is stored at <target> and the
69 * If the original value at <target> does not equal <oldValue>, then the store
72 * The reading of the original value at <target>, the comparison,
74 * respect to both interrupts and accesses of other processors to <target>.
76 * @param target address to be tested
81 bool z_impl_atomic_cas(atomic_t *target, atomic_val_t old_value, in z_impl_atomic_cas() argument
97 if (*target == old_value) { in z_impl_atomic_cas()
98 *target = new_value; in z_impl_atomic_cas()
108 bool z_vrfy_atomic_cas(atomic_t *target, atomic_val_t old_value, in z_vrfy_atomic_cas() argument
111 K_OOPS(K_SYSCALL_MEMORY_WRITE(target, sizeof(atomic_t))); in z_vrfy_atomic_cas()
113 return z_impl_atomic_cas((atomic_t *)target, old_value, new_value); in z_vrfy_atomic_cas()
118 bool z_impl_atomic_ptr_cas(atomic_ptr_t *target, atomic_ptr_val_t old_value, in z_impl_atomic_ptr_cas() argument
126 if (*target == old_value) { in z_impl_atomic_ptr_cas()
127 *target = new_value; in z_impl_atomic_ptr_cas()
137 static inline bool z_vrfy_atomic_ptr_cas(atomic_ptr_t *target, in z_vrfy_atomic_ptr_cas() argument
141 K_OOPS(K_SYSCALL_MEMORY_WRITE(target, sizeof(atomic_ptr_t))); in z_vrfy_atomic_ptr_cas()
143 return z_impl_atomic_ptr_cas(target, old_value, new_value); in z_vrfy_atomic_ptr_cas()
153 * atomically added to the value at <target>, placing the result at <target>,
154 * and the old value from <target> is returned.
156 * @param target memory location to add to
159 * @return The previous value from <target>
161 atomic_val_t z_impl_atomic_add(atomic_t *target, atomic_val_t value) in z_impl_atomic_add() argument
168 ret = *target; in z_impl_atomic_add()
169 *target += value; in z_impl_atomic_add()
183 * atomically subtracted from the value at <target>, placing the result at
184 * <target>, and the old value from <target> is returned.
186 * @param target the memory location to subtract from
189 * @return The previous value from <target>
191 atomic_val_t z_impl_atomic_sub(atomic_t *target, atomic_val_t value) in z_impl_atomic_sub() argument
198 ret = *target; in z_impl_atomic_sub()
199 *target -= value; in z_impl_atomic_sub()
212 * @param target memory location to read from
215 * a value from <target>. It simply does an ordinary load. Note that <target>
218 * @return The value read from <target>
220 atomic_val_t atomic_get(const atomic_t *target) in atomic_get() argument
222 return *target; in atomic_get()
225 atomic_ptr_val_t atomic_ptr_get(const atomic_ptr_t *target) in atomic_ptr_get() argument
227 return *target; in atomic_ptr_get()
235 * written at <target> and the previous value at <target> is returned.
237 * @param target the memory location to write to
240 * @return The previous value from <target>
242 atomic_val_t z_impl_atomic_set(atomic_t *target, atomic_val_t value) in z_impl_atomic_set() argument
249 ret = *target; in z_impl_atomic_set()
250 *target = value; in z_impl_atomic_set()
259 atomic_ptr_val_t z_impl_atomic_ptr_set(atomic_ptr_t *target, in z_impl_atomic_ptr_set() argument
267 ret = *target; in z_impl_atomic_ptr_set()
268 *target = value; in z_impl_atomic_ptr_set()
276 static inline atomic_ptr_val_t z_vrfy_atomic_ptr_set(atomic_ptr_t *target, in z_vrfy_atomic_ptr_set() argument
279 K_OOPS(K_SYSCALL_MEMORY_WRITE(target, sizeof(atomic_ptr_t))); in z_vrfy_atomic_ptr_set()
281 return z_impl_atomic_ptr_set(target, value); in z_vrfy_atomic_ptr_set()
291 * is atomically bitwise OR'ed with the value at <target>, placing the result
292 * at <target>, and the previous value at <target> is returned.
294 * @param target the memory location to be modified
297 * @return The previous value from <target>
299 atomic_val_t z_impl_atomic_or(atomic_t *target, atomic_val_t value) in z_impl_atomic_or() argument
306 ret = *target; in z_impl_atomic_or()
307 *target |= value; in z_impl_atomic_or()
321 * is atomically bitwise XOR'ed with the value at <target>, placing the result
322 * at <target>, and the previous value at <target> is returned.
324 * @param target the memory location to be modified
327 * @return The previous value from <target>
329 atomic_val_t z_impl_atomic_xor(atomic_t *target, atomic_val_t value) in z_impl_atomic_xor() argument
336 ret = *target; in z_impl_atomic_xor()
337 *target ^= value; in z_impl_atomic_xor()
351 * atomically bitwise AND'ed with the value at <target>, placing the result
352 * at <target>, and the previous value at <target> is returned.
354 * @param target the memory location to be modified
357 * @return The previous value from <target>
359 atomic_val_t z_impl_atomic_and(atomic_t *target, atomic_val_t value) in z_impl_atomic_and() argument
366 ret = *target; in z_impl_atomic_and()
367 *target &= value; in z_impl_atomic_and()
381 * atomically bitwise NAND'ed with the value at <target>, placing the result
382 * at <target>, and the previous value at <target> is returned.
384 * @param target the memory location to be modified
387 * @return The previous value from <target>
389 atomic_val_t z_impl_atomic_nand(atomic_t *target, atomic_val_t value) in z_impl_atomic_nand() argument
396 ret = *target; in z_impl_atomic_nand()
397 *target = ~(*target & value); in z_impl_atomic_nand()