Home
last modified time | relevance | path

Searched refs:_current_cpu (Results 1 – 25 of 40) sorted by relevance

12

/Zephyr-latest/arch/riscv/core/
Dfpu.c38 buf[3] = '0' + _current_cpu->id; in DBG()
73 _current_cpu->arch.fpu_state = (status & MSTATUS_FS); in z_riscv_fpu_disable()
85 atomic_ptr_set(&_current_cpu->arch.fpu_owner, arch_current_thread()); in z_riscv_fpu_load()
108 struct k_thread *owner = atomic_ptr_get(&_current_cpu->arch.fpu_owner); in arch_flush_local_fpu()
111 bool dirty = (_current_cpu->arch.fpu_state == MSTATUS_FS_DIRTY); in arch_flush_local_fpu()
127 atomic_ptr_clear(&_current_cpu->arch.fpu_owner); in arch_flush_local_fpu()
150 if (i == _current_cpu->id) { in flush_owned_fpu()
261 if (_current_cpu->arch.fpu_owner == arch_current_thread()) { in fpu_access_allowed()
279 _current_cpu->arch.fpu_state = MSTATUS_FS_CLEAN; in fpu_access_allowed()
300 esf->mstatus |= _current_cpu->arch.fpu_state; in z_riscv_fpu_exit_exc()
[all …]
Dipi_clint.c26 unsigned int id = _current_cpu->id; in arch_sched_directed_ipi()
53 atomic_val_t pending_ipi = atomic_clear(&cpu_pending_ipi[_current_cpu->id]); in sched_ipi_handler()
82 atomic_val_t *pending_ipi = &cpu_pending_ipi[_current_cpu->id]; in arch_spin_relax()
89 arch_float_disable(_current_cpu->arch.fpu_owner); in arch_spin_relax()
/Zephyr-latest/arch/arm64/core/
Dfpu.c37 buf[3] = '0' + _current_cpu->id; in DBG()
71 struct k_thread *owner = atomic_ptr_get(&_current_cpu->arch.fpu_owner); in arch_flush_local_fpu()
85 atomic_ptr_clear(&_current_cpu->arch.fpu_owner); in arch_flush_local_fpu()
109 if (i == _current_cpu->id) { in flush_owned_fpu()
238 struct k_thread *owner = atomic_ptr_get(&_current_cpu->arch.fpu_owner); in z_arm64_fpu_trap()
243 atomic_ptr_clear(&_current_cpu->arch.fpu_owner); in z_arm64_fpu_trap()
267 atomic_ptr_set(&_current_cpu->arch.fpu_owner, arch_current_thread()); in z_arm64_fpu_trap()
290 if (atomic_ptr_get(&_current_cpu->arch.fpu_owner) == arch_current_thread()) { in fpu_access_update()
336 if (thread == atomic_ptr_get(&_current_cpu->arch.fpu_owner)) { in arch_float_disable()
/Zephyr-latest/kernel/
Dspinlock_validate.c14 if ((thread_cpu & 3U) == _current_cpu->id) { in z_spin_lock_valid()
31 if (tcpu != (_current_cpu->id | (uintptr_t)arch_current_thread())) { in z_spin_unlock_valid()
39 l->thread_cpu = _current_cpu->id | (uintptr_t)arch_current_thread(); in z_spin_lock_set_owner()
Dusage.c81 _current_cpu->usage0 = usage_now(); /* Always update */ in z_sched_usage_start()
95 _current_cpu->usage0 = usage_now(); in z_sched_usage_start()
103 struct _cpu *cpu = _current_cpu; in z_sched_usage_stop()
128 cpu = _current_cpu; in z_sched_cpu_usage()
180 cpu = _current_cpu; in z_sched_thread_usage()
258 struct _cpu *cpu = _current_cpu; in k_thread_runtime_stats_disable()
284 if (_current_cpu->usage->track_usage) { in k_sys_runtime_stats_enable()
318 if (!_current_cpu->usage->track_usage) { in k_sys_runtime_stats_disable()
386 if (thread != _current_cpu->current) { in z_thread_stats_reset()
404 uint32_t cycles = now - _current_cpu->usage0; in z_thread_stats_reset()
[all …]
Dtimeslicing.c63 if (cpu != _current_cpu->id) { in slice_timeout()
70 int cpu = _current_cpu->id; in z_reset_time_slice()
117 if (slice_expired[_current_cpu->id] && thread_is_sliceable(curr)) { in z_time_slice()
Dsched.c155 _current_cpu->swap_ok = true; in queue_thread()
220 struct k_thread *mirqp = _current_cpu->metairq_preempted; in next_up()
226 _current_cpu->metairq_preempted = NULL; in next_up()
241 return (thread != NULL) ? thread : _current_cpu->idle_thread; in next_up()
258 thread = _current_cpu->idle_thread; in next_up()
265 if ((cmp > 0) || ((cmp == 0) && !_current_cpu->swap_ok)) { in next_up()
269 if (!should_preempt(thread, _current_cpu->swap_ok)) { in next_up()
285 _current_cpu->swap_ok = false; in next_up()
310 _current_cpu->metairq_preempted = arch_current_thread(); in update_metairq_preempt()
313 _current_cpu->metairq_preempted = NULL; in update_metairq_preempt()
[all …]
Dipi.c34 uint32_t id = _current_cpu->id; in ipi_mask_create()
/Zephyr-latest/samples/subsys/tracing/src/
Dtracing_user.c17 __ASSERT_NO_MSG(nested_interrupts[_current_cpu->id] == 0); in sys_trace_thread_switched_in_user()
28 __ASSERT_NO_MSG(nested_interrupts[_current_cpu->id] == 0); in sys_trace_thread_switched_out_user()
38 _cpu_t *curr_cpu = _current_cpu; in sys_trace_isr_enter_user()
49 _cpu_t *curr_cpu = _current_cpu; in sys_trace_isr_exit_user()
/Zephyr-latest/include/zephyr/arch/common/
Darch_inlines.h28 struct k_thread *ret = _current_cpu->current; in arch_current_thread()
39 _current_cpu->current = thread; in arch_current_thread_set()
/Zephyr-latest/arch/sparc/core/
Dirq_manage.c31 _current_cpu->nested++; in z_sparc_enter_irq()
48 _current_cpu->nested--; in z_sparc_enter_irq()
/Zephyr-latest/arch/x86/core/intel64/
Dirq_offload.c26 uint8_t cpu_id = _current_cpu->id; in dispatcher()
36 uint8_t cpu_id = _current_cpu->id; in arch_irq_offload()
/Zephyr-latest/subsys/pm/
Dpm_stats.c56 time_start[_current_cpu->id] = k_cycle_get_32(); in pm_stats_start()
61 time_stop[_current_cpu->id] = k_cycle_get_32(); in pm_stats_stop()
66 uint8_t cpu = _current_cpu->id; in pm_stats_update()
Dpm.c65 callback(z_cpus_pm_state[_current_cpu->id].state); in pm_state_notify()
96 uint8_t id = _current_cpu->id; in pm_system_resume()
145 uint8_t id = _current_cpu->id; in pm_system_suspend()
/Zephyr-latest/arch/xtensa/core/
Dirq_offload.c19 uint8_t cpu_id = _current_cpu->id; in irq_offload_isr()
29 uint8_t cpu_id = _current_cpu->id; in arch_irq_offload()
/Zephyr-latest/arch/arm/core/cortex_a_r/
Dfault.c156 if (_current_cpu->nested > 1) { in z_arm_fault_undef_instruction_fp()
164 (struct __fpu_sf *)_current_cpu->fp_ctx; in z_arm_fault_undef_instruction_fp()
170 _current_cpu->fp_ctx = NULL; in z_arm_fault_undef_instruction_fp()
180 if (((_current_cpu->nested == 2) in z_arm_fault_undef_instruction_fp()
182 || ((_current_cpu->nested > 2) in z_arm_fault_undef_instruction_fp()
/Zephyr-latest/arch/mips/core/
Dirq_manage.c65 _current_cpu->nested++; in z_mips_enter_irq()
91 _current_cpu->nested--; in z_mips_enter_irq()
/Zephyr-latest/tests/subsys/pm/power_mgmt_multicore/src/
Dmain.c36 switch (state_testing[_current_cpu->id]) { in pm_state_set()
44 zassert_equal(_current_cpu->id, 1U); in pm_state_set()
/Zephyr-latest/arch/mips/include/
Dkernel_arch_func.h47 return _current_cpu->nested != 0U; in arch_is_in_isr()
/Zephyr-latest/include/zephyr/arch/riscv/
Darch_inlines.h36 __arch_current_thread = _current_cpu->current = (thread); \
/Zephyr-latest/arch/sparc/include/
Dkernel_arch_func.h55 return _current_cpu->nested != 0U; in arch_is_in_isr()
/Zephyr-latest/include/zephyr/
Dkernel_structs.h261 #define _current_cpu ({ __ASSERT_NO_MSG(!z_smp_cpu_mobile()); \ macro
265 #define _current_cpu (&_kernel.cpus[0]) macro
/Zephyr-latest/subsys/profiling/perf/backends/
Dperf_x86.c46 *((struct isf **)(((void **)_current_cpu->irq_stack)-1)); in arch_perf_current_stack_trace()
Dperf_riscv.c45 *((struct arch_esf **)(((uintptr_t)_current_cpu->irq_stack) - 16)); in arch_perf_current_stack_trace()
/Zephyr-latest/tests/kernel/ipi_optimize/src/
Dmain.c40 ipi_count[_current_cpu->id]++; in z_trace_sched_ipi()
68 id = _current_cpu->id; in busy_thread_entry()
141 id = _current_cpu->id; in busy_threads_create()

12