Lines Matching refs:thread

52 	struct k_thread *thread;  in k_thread_foreach()  local
67 for (thread = _kernel.threads; thread; thread = thread->next_thread) { in k_thread_foreach()
68 user_cb(thread, user_data); in k_thread_foreach()
80 struct k_thread *thread; in k_thread_foreach_unlocked() local
89 for (thread = _kernel.threads; thread; thread = thread->next_thread) { in k_thread_foreach_unlocked()
91 user_cb(thread, user_data); in k_thread_foreach_unlocked()
168 void z_thread_monitor_exit(struct k_thread *thread) in z_thread_monitor_exit() argument
172 if (thread == _kernel.threads) { in z_thread_monitor_exit()
179 (thread != prev_thread->next_thread)) { in z_thread_monitor_exit()
183 prev_thread->next_thread = thread->next_thread; in z_thread_monitor_exit()
191 int z_impl_k_thread_name_set(struct k_thread *thread, const char *value) in z_impl_k_thread_name_set() argument
194 if (thread == NULL) { in z_impl_k_thread_name_set()
195 thread = _current; in z_impl_k_thread_name_set()
198 strncpy(thread->name, value, CONFIG_THREAD_MAX_NAME_LEN); in z_impl_k_thread_name_set()
199 thread->name[CONFIG_THREAD_MAX_NAME_LEN - 1] = '\0'; in z_impl_k_thread_name_set()
201 SYS_PORT_TRACING_OBJ_FUNC(k_thread, name_set, thread, 0); in z_impl_k_thread_name_set()
205 ARG_UNUSED(thread); in z_impl_k_thread_name_set()
208 SYS_PORT_TRACING_OBJ_FUNC(k_thread, name_set, thread, -ENOSYS); in z_impl_k_thread_name_set()
215 static inline int z_vrfy_k_thread_name_set(struct k_thread *thread, const char *str) in z_vrfy_k_thread_name_set() argument
220 if (thread != NULL) { in z_vrfy_k_thread_name_set()
221 if (Z_SYSCALL_OBJ(thread, K_OBJ_THREAD) != 0) { in z_vrfy_k_thread_name_set()
234 return z_impl_k_thread_name_set(thread, name); in z_vrfy_k_thread_name_set()
242 const char *k_thread_name_get(struct k_thread *thread) in k_thread_name_get() argument
245 return (const char *)thread->name; in k_thread_name_get()
247 ARG_UNUSED(thread); in k_thread_name_get()
252 int z_impl_k_thread_name_copy(k_tid_t thread, char *buf, size_t size) in z_impl_k_thread_name_copy() argument
255 strncpy(buf, thread->name, size); in z_impl_k_thread_name_copy()
258 ARG_UNUSED(thread); in z_impl_k_thread_name_copy()
294 static inline int z_vrfy_k_thread_name_copy(k_tid_t thread, in z_vrfy_k_thread_name_copy() argument
299 struct z_object *ko = z_object_find(thread); in z_vrfy_k_thread_name_copy()
304 if (thread == NULL || ko->type != K_OBJ_THREAD || in z_vrfy_k_thread_name_copy()
311 len = strlen(thread->name); in z_vrfy_k_thread_name_copy()
316 return z_user_to_copy((void *)buf, thread->name, len + 1); in z_vrfy_k_thread_name_copy()
318 ARG_UNUSED(thread); in z_vrfy_k_thread_name_copy()
362 void z_impl_k_thread_start(struct k_thread *thread) in z_impl_k_thread_start() argument
364 SYS_PORT_TRACING_OBJ_FUNC(k_thread, start, thread); in z_impl_k_thread_start()
366 z_sched_start(thread); in z_impl_k_thread_start()
370 static inline void z_vrfy_k_thread_start(struct k_thread *thread) in z_vrfy_k_thread_start() argument
372 Z_OOPS(Z_SYSCALL_OBJ(thread, K_OBJ_THREAD)); in z_vrfy_k_thread_start()
373 return z_impl_k_thread_start(thread); in z_vrfy_k_thread_start()
380 static void schedule_new_thread(struct k_thread *thread, k_timeout_t delay) in schedule_new_thread() argument
384 k_thread_start(thread); in schedule_new_thread()
386 z_add_thread_timeout(thread, delay); in schedule_new_thread()
390 k_thread_start(thread); in schedule_new_thread()
717 pos->thread); in grant_static_access()
858 int z_impl_k_float_disable(struct k_thread *thread) in z_impl_k_float_disable() argument
861 return arch_float_disable(thread); in z_impl_k_float_disable()
867 int z_impl_k_float_enable(struct k_thread *thread, unsigned int options) in z_impl_k_float_enable() argument
870 return arch_float_enable(thread, options); in z_impl_k_float_enable()
877 static inline int z_vrfy_k_float_disable(struct k_thread *thread) in z_vrfy_k_float_disable() argument
879 Z_OOPS(Z_SYSCALL_OBJ(thread, K_OBJ_THREAD)); in z_vrfy_k_float_disable()
880 return z_impl_k_float_disable(thread); in z_vrfy_k_float_disable()
904 int z_impl_k_thread_stack_space_get(const struct k_thread *thread, in z_impl_k_thread_stack_space_get() argument
907 const uint8_t *start = (uint8_t *)thread->stack_info.start; in z_impl_k_thread_stack_space_get()
908 size_t size = thread->stack_info.size; in z_impl_k_thread_stack_space_get()
959 int z_vrfy_k_thread_stack_space_get(const struct k_thread *thread, in z_vrfy_k_thread_stack_space_get() argument
965 ret = Z_SYSCALL_OBJ(thread, K_OBJ_THREAD); in z_vrfy_k_thread_stack_space_get()
970 ret = z_impl_k_thread_stack_space_get(thread, &unused); in z_vrfy_k_thread_stack_space_get()
1012 struct k_thread *thread; in z_thread_mark_switched_in() local
1014 thread = z_current_get(); in z_thread_mark_switched_in()
1016 thread->rt_stats.last_switched_in = timing_counter_get(); in z_thread_mark_switched_in()
1018 thread->rt_stats.last_switched_in = k_cycle_get_32(); in z_thread_mark_switched_in()
1034 struct k_thread *thread; in z_thread_mark_switched_out() local
1036 thread = z_current_get(); in z_thread_mark_switched_out()
1038 if (unlikely(thread->rt_stats.last_switched_in == 0)) { in z_thread_mark_switched_out()
1043 if (unlikely(thread->base.thread_state == _THREAD_DUMMY)) { in z_thread_mark_switched_out()
1050 diff = timing_cycles_get(&thread->rt_stats.last_switched_in, &now); in z_thread_mark_switched_out()
1053 diff = (uint64_t)(now - thread->rt_stats.last_switched_in); in z_thread_mark_switched_out()
1054 thread->rt_stats.last_switched_in = 0; in z_thread_mark_switched_out()
1057 thread->rt_stats.stats.execution_cycles += diff; in z_thread_mark_switched_out()
1068 int k_thread_runtime_stats_get(k_tid_t thread, in k_thread_runtime_stats_get() argument
1071 if ((thread == NULL) || (stats == NULL)) { in k_thread_runtime_stats_get()
1075 (void)memcpy(stats, &thread->rt_stats.stats, in k_thread_runtime_stats_get()
1076 sizeof(thread->rt_stats.stats)); in k_thread_runtime_stats_get()