| /Zephyr-latest/kernel/include/ | 
| D | kthread.h | 38 void z_thread_monitor_exit(struct k_thread *thread);46 static inline void thread_schedule_new(struct k_thread *thread, k_timeout_t delay)  in thread_schedule_new()
 60 static inline int thread_is_preemptible(struct k_thread *thread)  in thread_is_preemptible()
 67 static inline int thread_is_metairq(struct k_thread *thread)  in thread_is_metairq()
 79 static inline bool is_thread_dummy(struct k_thread *thread)  in is_thread_dummy()
 86 static inline bool z_is_thread_suspended(struct k_thread *thread)  in z_is_thread_suspended()
 91 static inline bool z_is_thread_pending(struct k_thread *thread)  in z_is_thread_pending()
 96 static inline bool z_is_thread_prevented_from_running(struct k_thread *thread)  in z_is_thread_prevented_from_running()
 104 static inline bool z_is_thread_timeout_active(struct k_thread *thread)  in z_is_thread_timeout_active()
 109 static inline bool z_is_thread_ready(struct k_thread *thread)  in z_is_thread_ready()
 [all …]
 
 | 
| D | ksched.h | 49 extern struct k_thread _thread_dummy;52 void z_move_thread_to_end_of_prio_q(struct k_thread *thread);
 53 void z_unpend_thread_no_timeout(struct k_thread *thread);
 54 struct k_thread *z_unpend1_no_timeout(_wait_q_t *wait_q);
 57 void z_pend_thread(struct k_thread *thread, _wait_q_t *wait_q,
 61 void z_unpend_thread(struct k_thread *thread);
 63 bool z_thread_prio_set(struct k_thread *thread, int prio);
 67 void z_reset_time_slice(struct k_thread *curr);
 69 void z_sched_start(struct k_thread *thread);
 70 void z_ready_thread(struct k_thread *thread);
 [all …]
 
 | 
| D | kernel_arch_interface.h | 76 void arch_new_thread(struct k_thread *thread, k_thread_stack_t *stack,160 arch_thread_return_value_set(struct k_thread *thread, unsigned int value);
 174 void arch_switch_to_main_thread(struct k_thread *main_thread, char *stack_ptr,
 192 int arch_float_disable(struct k_thread *thread);
 212 int arch_float_enable(struct k_thread *thread, unsigned int options);
 239 int arch_thread_priv_stack_space_get(const struct k_thread *thread, size_t *stack_size,
 605 int arch_thread_name_set(struct k_thread *thread, const char *str);
 650 void arch_coredump_priv_stack_dump(struct k_thread *thread);
 672 size_t arch_tls_stack_setup(struct k_thread *new_thread, char *stack_ptr);
 
 | 
| D | priority_q.h | 73 static ALWAYS_INLINE int32_t z_sched_prio_cmp(struct k_thread *thread_1, struct k_thread *thread_2)  in z_sched_prio_cmp()106 static ALWAYS_INLINE void z_priq_simple_add(sys_dlist_t *pq, struct k_thread *thread)  in z_priq_simple_add()
 108 	struct k_thread *t;  in z_priq_simple_add()
 120 static ALWAYS_INLINE void z_priq_simple_remove(sys_dlist_t *pq, struct k_thread *thread)  in z_priq_simple_remove()
 136 	struct k_thread *t;  in z_priq_simple_yield()
 145 		t = CONTAINER_OF(n, struct k_thread, base.qnode_dlist);  in z_priq_simple_yield()
 158 static ALWAYS_INLINE struct k_thread *z_priq_simple_best(sys_dlist_t *pq)  in z_priq_simple_best()
 160 	struct k_thread *thread = NULL;  in z_priq_simple_best()
 164 		thread = CONTAINER_OF(n, struct k_thread, base.qnode_dlist);  in z_priq_simple_best()
 170 static ALWAYS_INLINE struct k_thread *z_priq_simple_mask_best(sys_dlist_t *pq)  in z_priq_simple_mask_best()
 [all …]
 
 | 
| D | kernel_internal.h | 73 extern char *z_setup_new_thread(struct k_thread *new_thread,119 arch_thread_return_value_set(struct k_thread *thread, unsigned int value)  in arch_thread_return_value_set()
 126 z_thread_return_value_set_with_data(struct k_thread *thread,  in z_thread_return_value_set_with_data()
 147 extern struct k_thread z_main_thread;
 151 extern struct k_thread z_idle_threads[CONFIG_MP_MAX_NUM_CPUS];
 168 void z_mem_domain_init_thread(struct k_thread *thread);
 171 void z_mem_domain_exit_thread(struct k_thread *thread);
 295 void k_thread_abort_cleanup(struct k_thread *thread);
 307 void k_thread_abort_cleanup_check_reuse(struct k_thread *thread);
 
 | 
| D | wait_q.h | 36 static inline struct k_thread *z_waitq_head(_wait_q_t *w)  in z_waitq_head()38 	return (struct k_thread *)rb_get_min(&w->waitq.tree);  in z_waitq_head()
 52 static inline struct k_thread *z_waitq_head(_wait_q_t *w)
 54 	return (struct k_thread *)sys_dlist_peek_head(&w->waitq);
 
 | 
| /Zephyr-latest/kernel/ | 
| D | thread_monitor.c | 15 void z_thread_monitor_exit(struct k_thread *thread)  in z_thread_monitor_exit()22 		struct k_thread *prev_thread;  in z_thread_monitor_exit()
 40 	struct k_thread *thread;  in k_thread_foreach()
 53 	SYS_PORT_TRACING_FUNC_ENTER(k_thread, foreach);  in k_thread_foreach()
 59 	SYS_PORT_TRACING_FUNC_EXIT(k_thread, foreach);  in k_thread_foreach()
 66 	struct k_thread *thread;  in k_thread_foreach_unlocked()
 73 	SYS_PORT_TRACING_FUNC_ENTER(k_thread, foreach_unlocked);  in k_thread_foreach_unlocked()
 81 	SYS_PORT_TRACING_FUNC_EXIT(k_thread, foreach_unlocked);  in k_thread_foreach_unlocked()
 91 	struct k_thread *thread;  in k_thread_foreach_filter_by_cpu()
 105 	SYS_PORT_TRACING_FUNC_ENTER(k_thread, foreach);  in k_thread_foreach_filter_by_cpu()
 [all …]
 
 | 
| D | sched.c | 28 extern struct k_thread *pending_current;36 __incoherent struct k_thread _thread_dummy;
 39 static ALWAYS_INLINE void halt_thread(struct k_thread *thread, uint8_t new_state);
 40 static void add_to_waitq_locked(struct k_thread *thread, _wait_q_t *wait_q);
 48 static ALWAYS_INLINE void *thread_runq(struct k_thread *thread)  in thread_runq()
 77 static ALWAYS_INLINE void runq_add(struct k_thread *thread)  in runq_add()
 84 static ALWAYS_INLINE void runq_remove(struct k_thread *thread)  in runq_remove()
 96 static ALWAYS_INLINE struct k_thread *runq_best(void)  in runq_best()
 104 static inline bool should_queue_thread(struct k_thread *thread)  in should_queue_thread()
 109 static ALWAYS_INLINE void queue_thread(struct k_thread *thread)  in queue_thread()
 [all …]
 
 | 
| D | float.c | 11 int z_impl_k_float_disable(struct k_thread *thread)  in z_impl_k_float_disable()21 int z_impl_k_float_enable(struct k_thread *thread, unsigned int options)  in z_impl_k_float_enable()
 33 static inline int z_vrfy_k_float_disable(struct k_thread *thread)  in z_vrfy_k_float_disable()
 40 static inline int z_vrfy_k_float_enable(struct k_thread *thread, unsigned int options)  in z_vrfy_k_float_enable()
 
 | 
| D | priority_queues.c | 14 	struct k_thread *thread_a, *thread_b;  in z_priq_rb_lessthan()17 	thread_a = CONTAINER_OF(a, struct k_thread, base.qnode_rb);  in z_priq_rb_lessthan()
 18 	thread_b = CONTAINER_OF(b, struct k_thread, base.qnode_rb);  in z_priq_rb_lessthan()
 
 | 
| D | timeslicing.c | 22 struct k_thread *pending_current;25 static inline int slice_time(struct k_thread *thread)  in slice_time()
 39 bool thread_is_sliceable(struct k_thread *thread)  in thread_is_sliceable()
 68 void z_reset_time_slice(struct k_thread *thread)  in z_reset_time_slice()
 90 void k_thread_time_slice_set(struct k_thread *thread, int32_t thread_slice_ticks,  in k_thread_time_slice_set()
 106 	struct k_thread *curr = _current;  in z_time_slice()
 
 | 
| /Zephyr-latest/arch/riscv/include/ | 
| D | pmp.h | 11 void z_riscv_pmp_stackguard_prepare(struct k_thread *thread);12 void z_riscv_pmp_stackguard_enable(struct k_thread *thread);
 14 void z_riscv_pmp_usermode_init(struct k_thread *thread);
 15 void z_riscv_pmp_usermode_prepare(struct k_thread *thread);
 16 void z_riscv_pmp_usermode_enable(struct k_thread *thread);
 
 | 
| /Zephyr-latest/subsys/debug/ | 
| D | thread_info.c | 48 	[THREAD_INFO_OFFSET_T_ENTRY] = offsetof(struct k_thread, entry),49 	[THREAD_INFO_OFFSET_T_NEXT_THREAD] = offsetof(struct k_thread,
 58 	[THREAD_INFO_OFFSET_T_STACK_PTR] = offsetof(struct k_thread,
 61 	[THREAD_INFO_OFFSET_T_STACK_PTR] = offsetof(struct k_thread,
 64 	[THREAD_INFO_OFFSET_T_STACK_PTR] = offsetof(struct k_thread,
 68 	[THREAD_INFO_OFFSET_T_STACK_PTR] = offsetof(struct k_thread,
 71 	[THREAD_INFO_OFFSET_T_STACK_PTR] = offsetof(struct k_thread,
 75 	[THREAD_INFO_OFFSET_T_STACK_PTR] = offsetof(struct k_thread,
 78 	[THREAD_INFO_OFFSET_T_STACK_PTR] = offsetof(struct k_thread,
 81 	[THREAD_INFO_OFFSET_T_STACK_PTR] = offsetof(struct k_thread,
 [all …]
 
 | 
| /Zephyr-latest/arch/sparc/include/ | 
| D | kernel_arch_func.h | 34 void z_sparc_context_switch(struct k_thread *newt, struct k_thread *oldt);43 	struct k_thread *newt = switch_to;  in arch_switch()
 44 	struct k_thread *oldt = CONTAINER_OF(switched_from, struct k_thread,  in arch_switch()
 
 | 
| /Zephyr-latest/arch/arm64/include/ | 
| D | kernel_arch_func.h | 43 	extern void z_arm64_context_switch(struct k_thread *new,  in arch_switch()44 					   struct k_thread *old);  in arch_switch()
 45 	struct k_thread *new = switch_to;  in arch_switch()
 46 	struct k_thread *old = CONTAINER_OF(switched_from, struct k_thread,  in arch_switch()
 
 | 
| /Zephyr-latest/arch/arm/include/cortex_a_r/ | 
| D | kernel_arch_func.h | 56 arch_thread_return_value_set(struct k_thread *thread, unsigned int value)  in arch_thread_return_value_set()65 	extern void z_arm_context_switch(struct k_thread *new,  in arch_switch()
 66 					struct k_thread *old);  in arch_switch()
 68 	struct k_thread *new = switch_to;  in arch_switch()
 69 	struct k_thread *old = CONTAINER_OF(switched_from, struct k_thread,  in arch_switch()
 
 | 
| /Zephyr-latest/arch/x86/core/ia32/ | 
| D | float.c | 131 static void FpCtxSave(struct k_thread *thread)  in FpCtxSave()148 static inline void FpCtxInit(struct k_thread *thread)  in FpCtxInit()
 167 void z_float_enable(struct k_thread *thread, unsigned int options)  in z_float_enable()
 170 	struct k_thread *fp_owner;  in z_float_enable()
 269 int z_float_disable(struct k_thread *thread)  in z_float_disable()
 283 		_kernel.cpus[0].arch.fpu_owner = (struct k_thread *)0;  in z_float_disable()
 286 			_kernel.cpus[0].arch.fpu_owner = (struct k_thread *)0;  in z_float_disable()
 
 | 
| /Zephyr-latest/subsys/tracing/test/ | 
| D | tracing_string_format_test.c | 16 	struct k_thread *thread;  in sys_trace_k_thread_switched_out()24 	struct k_thread *thread;  in sys_trace_k_thread_switched_in()
 30 void sys_trace_k_thread_priority_set(struct k_thread *thread)  in sys_trace_k_thread_priority_set()
 35 void sys_trace_k_thread_create(struct k_thread *thread, size_t stack_size,  in sys_trace_k_thread_create()
 41 void sys_trace_k_thread_start(struct k_thread *thread)  in sys_trace_k_thread_start()
 46 void sys_trace_k_thread_abort(struct k_thread *thread)  in sys_trace_k_thread_abort()
 51 void sys_trace_k_thread_suspend(struct k_thread *thread)  in sys_trace_k_thread_suspend()
 56 void sys_trace_k_thread_resume(struct k_thread *thread)  in sys_trace_k_thread_resume()
 61 void sys_trace_k_thread_resume_exit(struct k_thread *thread)  in sys_trace_k_thread_resume_exit()
 66 void sys_trace_k_thread_ready(struct k_thread *thread)  in sys_trace_k_thread_ready()
 [all …]
 
 | 
| /Zephyr-latest/include/zephyr/arch/arm64/ | 
| D | mm.h | 28 struct k_thread;29 void z_arm64_thread_mem_domains_init(struct k_thread *thread);
 30 void z_arm64_swap_mem_domains(struct k_thread *thread);
 
 | 
| /Zephyr-latest/subsys/tracing/user/ | 
| D | tracing_user.c | 13 void __weak sys_trace_thread_create_user(struct k_thread *thread) {}  in sys_trace_thread_create_user()14 void __weak sys_trace_thread_abort_user(struct k_thread *thread) {}  in sys_trace_thread_abort_user()
 15 void __weak sys_trace_thread_suspend_user(struct k_thread *thread) {}  in sys_trace_thread_suspend_user()
 16 void __weak sys_trace_thread_resume_user(struct k_thread *thread) {}  in sys_trace_thread_resume_user()
 17 void __weak sys_trace_thread_name_set_user(struct k_thread *thread) {}  in sys_trace_thread_name_set_user()
 20 void __weak sys_trace_thread_info_user(struct k_thread *thread) {}  in sys_trace_thread_info_user()
 21 void __weak sys_trace_thread_sched_ready_user(struct k_thread *thread) {}  in sys_trace_thread_sched_ready_user()
 22 void __weak sys_trace_thread_pend_user(struct k_thread *thread) {}  in sys_trace_thread_pend_user()
 23 void __weak sys_trace_thread_priority_set_user(struct k_thread *thread, int prio) {}  in sys_trace_thread_priority_set_user()
 79 void sys_trace_thread_create(struct k_thread *thread)  in sys_trace_thread_create()
 [all …]
 
 | 
| /Zephyr-latest/arch/posix/core/ | 
| D | thread.c | 32 void arch_new_thread(struct k_thread *thread, k_thread_stack_t *stack,  in arch_new_thread()58 int arch_thread_name_set(struct k_thread *thread, const char *str)  in arch_thread_name_set()
 100 int arch_float_disable(struct k_thread *thread)  in arch_float_disable()
 108 int arch_float_enable(struct k_thread *thread, unsigned int options)  in arch_float_enable()
 124 	SYS_PORT_TRACING_OBJ_FUNC_ENTER(k_thread, abort, thread);  in z_impl_k_thread_abort()
 168 	SYS_PORT_TRACING_OBJ_FUNC_EXIT(k_thread, abort, thread);  in z_impl_k_thread_abort()
 
 | 
| /Zephyr-latest/subsys/shell/modules/kernel_service/thread/ | 
| D | thread.c | 16 	const struct k_thread *const thread;20 static void thread_valid_cb(const struct k_thread *cthread, void *user_data)  in thread_valid_cb()
 29 bool z_thread_is_valid(const struct k_thread *thread)  in z_thread_is_valid()
 
 | 
| /Zephyr-latest/include/zephyr/kernel/ | 
| D | thread.h | 43 struct k_thread;54 	struct k_thread *thread;         /* Back pointer to pended thread */
 261 struct k_thread {  struct
 279 	struct k_thread *next_event_link;  argument
 293 	struct k_thread *next_thread;  argument
 381 typedef struct k_thread _thread_t;  argument
 382 typedef struct k_thread *k_tid_t;
 
 | 
| /Zephyr-latest/include/zephyr/ | 
| D | kernel_structs.h | 133 	struct k_thread *cache;155 	struct k_thread *current;
 158 	struct k_thread *idle_thread;
 167 	struct k_thread *metairq_preempted;
 221 	struct k_thread *threads; /* singly linked list of ALL threads */
 252 __attribute_const__ struct k_thread *z_smp_current_get(void);
 308 typedef void (*k_thread_timeslice_fn_t)(struct k_thread *thread, void *data);
 
 | 
| /Zephyr-latest/include/zephyr/sys/ | 
| D | kobject.h | 19 struct k_thread;93 				     struct k_thread *thread);
 105 void k_object_access_revoke(const void *object, struct k_thread *thread);
 158 						struct k_thread *thread)  in z_impl_k_object_access_grant()
 168 					  struct k_thread *thread)  in k_object_access_revoke()
 
 |