| /Zephyr-4.3.0/include/zephyr/arch/rx/ |
| D | arch.h | 68 _kernel.cpus[0].nested++; \ 74 _kernel.cpus[0].nested++; \ 86 if (check_reschedule && _kernel.cpus[0].nested == 1) { \ 87 if (_kernel.cpus->current->base.prio >= 0 || \ 89 if (_kernel.ready_q.cache != _kernel.cpus->current) { \ 94 _kernel.cpus[0].nested--; \ 103 if (check_reschedule && _kernel.cpus[0].nested == 1) { \ 104 if (_kernel.cpus->current->base.prio >= 0 || \ 106 if (_kernel.ready_q.cache != _kernel.cpus->current) { \ 111 _kernel.cpus[0].nested--; \ [all …]
|
| /Zephyr-4.3.0/arch/posix/core/ |
| D | swap.c | 46 _kernel.ready_q.cache->callee_saved.thread_status; in arch_swap() 53 z_current_thread_set(_kernel.ready_q.cache); in arch_swap() 91 _kernel.ready_q.cache->callee_saved.thread_status; in arch_switch_to_main_thread() 97 z_current_thread_set(_kernel.ready_q.cache); in arch_switch_to_main_thread() 113 if (_kernel.idle) { in posix_irq_check_idle_exit() 114 _kernel.idle = 0; in posix_irq_check_idle_exit()
|
| /Zephyr-4.3.0/kernel/ |
| D | init.c | 48 struct z_kernel _kernel; variable 323 __ASSERT_NO_MSG(sys_cache_is_mem_coherent(&_kernel)); in bg_thread_main() 383 stack_size, idle, &_kernel.cpus[i], in init_idle_thread() 396 _kernel.cpus[id].idle_thread = &z_idle_threads[id]; in z_init_cpu() 397 _kernel.cpus[id].id = id; in z_init_cpu() 398 _kernel.cpus[id].irq_stack = in z_init_cpu() 402 _kernel.cpus[id].usage = &_kernel.usage[id]; in z_init_cpu() 403 _kernel.cpus[id].usage->track_usage = in z_init_cpu() 416 k_obj_core_init_and_link(K_OBJ_CORE(&_kernel.cpus[id]), &obj_type_cpu); in z_init_cpu() 418 k_obj_core_stats_register(K_OBJ_CORE(&_kernel.cpus[id]), in z_init_cpu() [all …]
|
| D | idle.c | 54 _kernel.idle = z_get_next_timeout_expiry(); in idle() 71 if (k_is_pre_kernel() || !pm_system_suspend(_kernel.idle)) { in idle() 89 if (_kernel.ready_q.cache != _current) { in idle()
|
| D | thread_monitor.c | 19 if (thread == _kernel.threads) { in z_thread_monitor_exit() 20 _kernel.threads = _kernel.threads->next_thread; in z_thread_monitor_exit() 24 prev_thread = _kernel.threads; in z_thread_monitor_exit() 54 for (thread = _kernel.threads; thread; thread = thread->next_thread) { in thread_foreach_helper()
|
| D | ipi.c | 23 atomic_or(&_kernel.pending_ipi, (atomic_val_t)ipi_mask); in flag_ipi() 60 cpu_thread = _kernel.cpus[i].current; in ipi_mask_create() 86 cpu_bitmap = (uint32_t)atomic_clear(&_kernel.pending_ipi); in signal_pending_ipi() 139 sys_dlist_append(&_kernel.cpus[id].ipi_workq, &work->node[id]); in k_ipi_work_add() 204 ipi_work_process(&_kernel.cpus[_current_cpu->id].ipi_workq); in z_sched_ipi()
|
| /Zephyr-4.3.0/arch/riscv/include/ |
| D | kernel_arch_func.h | 35 csr_write(mscratch, &_kernel.cpus[0]); in arch_kernel_init() 38 _kernel.cpus[0].arch.hartid = csr_read(mhartid); in arch_kernel_init() 39 _kernel.cpus[0].arch.online = true; in arch_kernel_init() 48 if (cpu_node_list[hart_x] == _kernel.cpus[0].arch.hartid) { in arch_kernel_init() 51 _kernel.cpus[cpu_num].arch.hartid = cpu_node_list[hart_x]; in arch_kernel_init() 89 return _kernel.cpus[0].nested != 0U; in arch_is_in_isr()
|
| /Zephyr-4.3.0/arch/x86/core/ia32/ |
| D | float.c | 197 fp_owner = _kernel.cpus[0].arch.fpu_owner; in z_float_enable() 218 _kernel.cpus[0].arch.fpu_owner = thread; in z_float_enable() 233 _kernel.cpus[0].arch.fpu_owner = thread; in z_float_enable() 283 _kernel.cpus[0].arch.fpu_owner = (struct k_thread *)0; in z_float_disable() 285 if (_kernel.cpus[0].arch.fpu_owner == thread) { in z_float_disable() 286 _kernel.cpus[0].arch.fpu_owner = (struct k_thread *)0; in z_float_disable()
|
| D | irq_manage.c | 57 _kernel.cpus[0].arch.shstk_addr = stack + in z_x86_set_irq_shadow_stack() 59 _kernel.cpus[0].arch.shstk_size = stack_size; in z_x86_set_irq_shadow_stack() 60 _kernel.cpus[0].arch.shstk_base = stack; in z_x86_set_irq_shadow_stack()
|
| /Zephyr-4.3.0/arch/riscv/core/ |
| D | smp.c | 48 riscv_cpu_wake_flag = _kernel.cpus[cpu_num].arch.hartid; in arch_cpu_start() 58 if (_kernel.cpus[i].arch.hartid == hartid) { in arch_secondary_cpu_init() 62 csr_write(mscratch, &_kernel.cpus[cpu_num]); in arch_secondary_cpu_init() 64 _kernel.cpus[cpu_num].arch.online = true; in arch_secondary_cpu_init()
|
| D | ipi_clint.c | 29 if ((i != id) && _kernel.cpus[i].arch.online && ((cpu_bitmap & BIT(i)) != 0)) { in arch_sched_directed_ipi() 31 MSIP(_kernel.cpus[i].arch.hartid) = 1; in arch_sched_directed_ipi() 42 MSIP(_kernel.cpus[cpu].arch.hartid) = 1; in arch_flush_fpu_ipi()
|
| D | irq_manage.c | 87 if (_kernel.idle) { in arch_isr_direct_pm() 88 _kernel.idle = 0; in arch_isr_direct_pm()
|
| /Zephyr-4.3.0/arch/arm/core/cortex_m/ |
| D | exc_exit.c | 58 if (_kernel.ready_q.cache != _kernel.cpus->current) { in z_arm_exc_exit()
|
| D | isr_wrapper.c | 59 if (_kernel.idle != 0) { in _isr_wrapper() 61 _kernel.idle = 0; in _isr_wrapper()
|
| /Zephyr-4.3.0/include/zephyr/arch/x86/ia32/ |
| D | arch.h | 254 if (_kernel.idle) { in arch_irq_direct_pm() 255 _kernel.idle = 0; in arch_irq_direct_pm() 285 ++_kernel.cpus[0].nested; in arch_isr_direct_header() 301 --_kernel.cpus[0].nested; in arch_isr_direct_footer() 309 if (swap != 0 && _kernel.cpus[0].nested == 0 && in arch_isr_direct_footer() 310 _kernel.ready_q.cache != _current) { in arch_isr_direct_footer()
|
| /Zephyr-4.3.0/arch/rx/core/ |
| D | isr_exit.S | 13 mov #__kernel, r1 ; Load the base address of _kernel into r1 14 mov r1, r3 ; Load the base address of _kernel into r1 17 mov [r1], r2 ; Load the value of _kernel.cpus[0].current into r2
|
| /Zephyr-4.3.0/include/zephyr/arch/arc/ |
| D | arch_inlines.h | 24 return &_kernel.cpus[core]; in arch_curr_cpu() 26 return &_kernel.cpus[0]; in arch_curr_cpu()
|
| /Zephyr-4.3.0/tests/kernel/ipi_cascade/src/ |
| D | main.c | 80 str, _kernel.cpus[0].current, in show_executing_threads() 81 _kernel.cpus[0].current->name, in show_executing_threads() 82 _kernel.cpus[0].current->base.prio); in show_executing_threads() 84 str, _kernel.cpus[1].current, in show_executing_threads() 85 _kernel.cpus[1].current->name, in show_executing_threads() 86 _kernel.cpus[1].current->base.prio); in show_executing_threads()
|
| /Zephyr-4.3.0/boards/native/native_sim/ |
| D | irq_handler.c | 87 if (_kernel.cpus[0].nested == 0) { in posix_irq_handler() 91 _kernel.cpus[0].nested++; in posix_irq_handler() 107 _kernel.cpus[0].nested--; in posix_irq_handler() 116 && (_kernel.ready_q.cache) && (_kernel.ready_q.cache != _current)) { in posix_irq_handler()
|
| /Zephyr-4.3.0/boards/native/nrf_bsim/ |
| D | irq_handler.c | 105 if (_kernel.cpus[0].nested == 0) { in posix_irq_handler() 109 _kernel.cpus[0].nested++; in posix_irq_handler() 127 _kernel.cpus[0].nested--; in posix_irq_handler() 138 && (_kernel.ready_q.cache) && (_kernel.ready_q.cache != _current)) { in posix_irq_handler()
|
| /Zephyr-4.3.0/arch/arm/core/cortex_a_r/ |
| D | irq_manage.c | 108 if (_kernel.idle) { in _arch_isr_direct_pm() 109 _kernel.idle = 0; in _arch_isr_direct_pm()
|
| /Zephyr-4.3.0/arch/arm/core/ |
| D | userspace.S | 30 GDATA(_kernel) 60 ldr r0, =_kernel 124 ldr r0, =_kernel 223 ldr r0, =_kernel 237 ldr r0, =_kernel 318 ldr ip, =_kernel 444 ldr r3, =_kernel 474 ldr r0, =_kernel 490 ldr r0, =_kernel
|
| /Zephyr-4.3.0/include/zephyr/debug/ |
| D | object_tracing.h | 19 #define SYS_THREAD_MONITOR_HEAD ((struct k_thread *)(_kernel.threads))
|
| /Zephyr-4.3.0/subsys/debug/coredump/ |
| D | coredump_core.c | 179 for (thread = _kernel.threads; thread; thread = thread->next_thread) { in process_memory_region_list() 184 char *irq_stack = _kernel.cpus[0].irq_stack; in process_memory_region_list() 205 hdr.num_bytes += sizeof(_kernel); in dump_threads_metadata() 208 coredump_buffer_output((uint8_t *)&_kernel, sizeof(_kernel)); in dump_threads_metadata()
|
| /Zephyr-4.3.0/include/zephyr/ |
| D | kernel_structs.h | 242 extern struct z_kernel _kernel; 259 #define _current_cpu (&_kernel.cpus[0]) 260 #define _current _kernel.cpus[0].current
|