Home
last modified time | relevance | path

Searched refs:cpu_id (Results 1 – 25 of 26) sorted by relevance

12

/Zephyr-latest/arch/x86/core/intel64/
Dirq_offload.c26 uint8_t cpu_id = _current_cpu->id; in dispatcher() local
28 if (irq_offload_funcs[cpu_id] != NULL) { in dispatcher()
29 irq_offload_funcs[cpu_id](irq_offload_args[cpu_id]); in dispatcher()
36 uint8_t cpu_id = _current_cpu->id; in arch_irq_offload() local
38 irq_offload_funcs[cpu_id] = routine; in arch_irq_offload()
39 irq_offload_args[cpu_id] = parameter; in arch_irq_offload()
Dcpu.c78 x86_cpuboot[cpu_num].cpu_id = cpu_num; in arch_cpu_start()
102 x86_cpu_loapics[cpuboot->cpu_id], "APIC ID miss match!"); in z_x86_cpu_init()
106 if (cpuboot->cpu_id == 0U) { in z_x86_cpu_init()
112 z_loapic_enable(cpuboot->cpu_id); in z_x86_cpu_init()
/Zephyr-latest/arch/xtensa/core/
Dirq_offload.c19 uint8_t cpu_id = _current_cpu->id; in irq_offload_isr() local
21 offload_params[cpu_id].fn(offload_params[cpu_id].arg); in irq_offload_isr()
29 uint8_t cpu_id = _current_cpu->id; in arch_irq_offload() local
31 offload_params[cpu_id].fn = routine; in arch_irq_offload()
32 offload_params[cpu_id].arg = parameter; in arch_irq_offload()
Dvector_handlers.c61 int cpu_id; in xtensa_is_outside_stack_bounds() local
64 cpu_id = arch_curr_cpu()->id; in xtensa_is_outside_stack_bounds()
66 cpu_id = 0; in xtensa_is_outside_stack_bounds()
69 start = (uintptr_t)K_KERNEL_STACK_BUFFER(z_interrupt_stacks[cpu_id]); in xtensa_is_outside_stack_bounds()
/Zephyr-latest/drivers/pm_cpu_ops/
Dpsci_shell.c47 long cpu_id; in cmd_psci_cpuon() local
51 cpu_id = strtol(argv[1], NULL, 10); in cmd_psci_cpuon()
52 if (cpu_id == 0 || cpu_id == LONG_MIN || cpu_id == LONG_MAX) { in cmd_psci_cpuon()
54 shell_error(shctx, "psci: invalid input:%ld", cpu_id); in cmd_psci_cpuon()
59 result = pm_cpu_on((unsigned long)cpu_id, (uintptr_t)&__start); in cmd_psci_cpuon()
/Zephyr-latest/tests/arch/arc/arc_vpx_lock/src/
Dmain.c51 unsigned int cpu_id; in arc_vpx_lock_unlock_timed_payload() local
54 cpu_id = (unsigned int)(uintptr_t)(p1); in arc_vpx_lock_unlock_timed_payload()
66 isr_vpx_lock_id = cpu_id; in arc_vpx_lock_unlock_timed_payload()
78 int cpu_id; in ZTEST() local
81 cpu_id = current_cpu_id_get(); in ZTEST()
85 (void *)(uintptr_t)cpu_id, NULL, NULL, in ZTEST()
89 k_thread_cpu_pin(&payload_thread, cpu_id); in ZTEST()
131 int cpu_id; in ZTEST() local
134 cpu_id = current_cpu_id_get(); in ZTEST()
141 k_thread_cpu_pin(&payload_thread, cpu_id); in ZTEST()
/Zephyr-latest/drivers/ipm/
Dipm_mhu.c34 enum ipm_mhu_cpu_id_t cpu_id, in ipm_mhu_get_status() argument
45 switch (cpu_id) { in ipm_mhu_get_status()
58 static int ipm_mhu_send(const struct device *d, int wait, uint32_t cpu_id, in ipm_mhu_send() argument
67 if (cpu_id >= IPM_MHU_CPU_MAX) { in ipm_mhu_send()
77 switch (cpu_id) { in ipm_mhu_send()
91 enum ipm_mhu_cpu_id_t cpu_id, in ipm_mhu_clear_val() argument
98 switch (cpu_id) { in ipm_mhu_clear_val()
128 enum ipm_mhu_cpu_id_t cpu_id; in ipm_mhu_isr() local
131 cpu_id = ipm_mhu_get_cpu_id(d); in ipm_mhu_isr()
133 ipm_mhu_get_status(d, cpu_id, &ipm_mhu_status); in ipm_mhu_isr()
[all …]
Dipm_mcux.c58 mailbox_cpu_id_t cpu_id; in mcux_mailbox_isr() local
60 cpu_id = MAILBOX_ID_THIS_CPU; in mcux_mailbox_isr()
62 volatile uint32_t value = MAILBOX_GetValue(config->base, cpu_id); in mcux_mailbox_isr()
67 MAILBOX_ClearValueBits(config->base, cpu_id, value); in mcux_mailbox_isr()
/Zephyr-latest/samples/drivers/ipm/ipm_mhu_dual_core/src/
Dmain.c36 uint32_t cpu_id, volatile void *data) in mhu_isr_callback() argument
40 printk("MHU ISR on CPU %d\n", cpu_id); in mhu_isr_callback()
41 if (cpu_id == MHU_CPU0) { in mhu_isr_callback()
43 } else if (cpu_id == MHU_CPU1) { in mhu_isr_callback()
/Zephyr-latest/kernel/include/
Dipi.h16 #define IPI_CPU_MASK(cpu_id) \ argument
17 (IS_ENABLED(CONFIG_IPI_OPTIMIZE) ? BIT(cpu_id) : IPI_ALL_CPUS_MASK)
/Zephyr-latest/tests/kernel/mp/src/
Dmain.c39 int cpu_id = (*(int *)arg) / 12345; in cpu_fn() local
44 cpu_running[cpu_id] = 1; in cpu_fn()
/Zephyr-latest/subsys/testsuite/include/zephyr/
Dinterrupt_util.h144 int cpu_id = arch_curr_cpu()->id; in trigger_irq() local
146 int cpu_id = 0; in trigger_irq() local
148 z_loapic_ipi(cpu_id, LOAPIC_ICR_IPI_TEST, vector); in trigger_irq()
/Zephyr-latest/soc/intel/intel_adsp/ace/
Dmultiprocessing.c42 uint32_t cpu_id = arch_proc_id(); in ipc_isr() local
45 uint32_t msg = IDC[cpu_id].agents[0].ipc.tdr & ~INTEL_ADSP_IPC_BUSY; in ipc_isr()
61 IDC[cpu_id].agents[0].ipc.tdr = INTEL_ADSP_IPC_BUSY; in ipc_isr()
62 IDC[cpu_id].agents[0].ipc.tda = 0; in ipc_isr()
/Zephyr-latest/include/zephyr/arch/arc/v2/vpx/
Darc_vpx.h52 void arc_vpx_unlock_force(unsigned int cpu_id);
/Zephyr-latest/drivers/mbox/
Dmbox_nxp_mailbox.c70 mailbox_cpu_id_t cpu_id; in mailbox_isr() local
72 cpu_id = MAILBOX_ID_THIS_CPU; in mailbox_isr()
74 volatile uint32_t mailbox_value = MAILBOX_GetValue(config->base, cpu_id); in mailbox_isr()
78 MAILBOX_ClearValueBits(config->base, cpu_id, mailbox_value); in mailbox_isr()
/Zephyr-latest/drivers/interrupt_controller/
Dintc_plic.c501 uint32_t cpu_id = arch_curr_cpu()->id; in plic_irq_handler() local
506 uint16_t *cpu_count = get_irq_hit_count_cpu(dev, cpu_id, local_irq); in plic_irq_handler()
536 save_irq[cpu_id] = local_irq; in plic_irq_handler()
537 save_dev[cpu_id] = dev; in plic_irq_handler()
653 for (int cpu_id = 0; cpu_id < arch_num_cpus(); cpu_id++) { in cmd_stats_get() local
654 shell_fprintf(sh, SHELL_NORMAL, " CPU%2d", cpu_id); in cmd_stats_get()
671 for (int cpu_id = 0; cpu_id < arch_num_cpus(); cpu_id++) { in cmd_stats_get() local
672 uint16_t *cpu_count = get_irq_hit_count_cpu(dev, cpu_id, i); in cmd_stats_get()
/Zephyr-latest/drivers/clock_control/
Dclock_control_agilex5_ll.c158 uint8_t cpu_id = arch_curr_cpu()->id; in get_mpu_clk() local
162 if (cpu_id > CLKCTRL_CPU_ID_CORE1) { in get_mpu_clk()
172 switch (cpu_id) { in get_mpu_clk()
/Zephyr-latest/arch/riscv/core/
Dstacktrace.c28 static inline bool in_irq_stack_bound(uintptr_t addr, uint8_t cpu_id) in in_irq_stack_bound() argument
32 start = (uintptr_t)K_KERNEL_STACK_BUFFER(z_interrupt_stacks[cpu_id]); in in_irq_stack_bound()
238 uint8_t cpu_id = IS_ENABLED(CONFIG_SMP) ? arch_curr_cpu()->id : 0U; in in_fatal_stack_bound() local
240 return in_irq_stack_bound(addr, cpu_id); in in_fatal_stack_bound()
/Zephyr-latest/arch/x86/core/
Dfatal.c55 int cpu_id; in z_x86_check_stack_bounds() local
58 cpu_id = arch_curr_cpu()->id; in z_x86_check_stack_bounds()
60 cpu_id = 0; in z_x86_check_stack_bounds()
63 z_interrupt_stacks[cpu_id]); in z_x86_check_stack_bounds()
/Zephyr-latest/arch/arm64/core/
Dfatal.c35 int cpu_id; in z_arm64_safe_exception_stack_init() local
38 cpu_id = arch_curr_cpu()->id; in z_arm64_safe_exception_stack_init()
39 safe_exc_sp = K_KERNEL_STACK_BUFFER(z_arm64_safe_exception_stacks[cpu_id]) + in z_arm64_safe_exception_stack_init()
/Zephyr-latest/arch/x86/include/intel64/
Dkernel_arch_data.h29 uint8_t cpu_id; /* CPU ID */ member
/Zephyr-latest/tests/kernel/smp/src/
Dmain.c49 int cpu_id; member
209 tinfo[thread_num].cpu_id = curr_cpu(); in thread_entry_fn()
584 int cpu_id = -1; in thread_get_cpu_entry() local
593 cpu_id = curr_cpu->id; in thread_get_cpu_entry()
595 zassert_true(bsp_id != cpu_id, in thread_get_cpu_entry()
/Zephyr-latest/kernel/
Dusage.c122 void z_sched_cpu_usage(uint8_t cpu_id, struct k_thread_runtime_stats *stats) in z_sched_cpu_usage() argument
131 if (&_kernel.cpus[cpu_id] == cpu) { in z_sched_cpu_usage()
165 _kernel.cpus[cpu_id].idle_thread->base.usage.total; in z_sched_cpu_usage()
/Zephyr-latest/tests/lib/lockfree/src/
Dtest_mpsc.c80 int cpu_id; member
Dtest_spsc.c198 int cpu_id; member

12