1 /*
2 * Copyright (c) 2019 Carlo Caione <ccaione@baylibre.com>
3 * Copyright 2025 Arm Limited and/or its affiliates <open-source-office@arm.com>
4 *
5 * SPDX-License-Identifier: Apache-2.0
6 */
7 /**
8 * @file
9 * @brief Private kernel definitions (ARM)
10 *
11 * This file contains private kernel function definitions and various
12 * other definitions for the 32-bit ARM Cortex-M processor architecture
13 * family.
14 *
15 * This file is also included by assembly language files which must #define
16 * _ASMLANGUAGE before including this header file. Note that kernel
17 * assembly source files obtains structure offset values via "absolute symbols"
18 * in the offsets.o module.
19 */
20
21 #ifndef ZEPHYR_ARCH_ARM_INCLUDE_CORTEX_M_KERNEL_ARCH_FUNC_H_
22 #define ZEPHYR_ARCH_ARM_INCLUDE_CORTEX_M_KERNEL_ARCH_FUNC_H_
23
24 #include <zephyr/platform/hooks.h>
25
26 #ifdef __cplusplus
27 extern "C" {
28 #endif
29
30 #ifndef _ASMLANGUAGE
31 extern void z_arm_fault_init(void);
32 extern void z_arm_cpu_idle_init(void);
33 #ifdef CONFIG_ARM_MPU
34 extern void z_arm_configure_static_mpu_regions(void);
35 extern void z_arm_configure_dynamic_mpu_regions(struct k_thread *thread);
36 extern int z_arm_mpu_init(void);
37 #endif /* CONFIG_ARM_MPU */
38 #ifdef CONFIG_ARM_AARCH32_MMU
39 extern int z_arm_mmu_init(void);
40 #endif /* CONFIG_ARM_AARCH32_MMU */
41
arch_kernel_init(void)42 static ALWAYS_INLINE void arch_kernel_init(void)
43 {
44 z_arm_interrupt_stack_setup();
45 z_arm_exc_setup();
46 z_arm_fault_init();
47 z_arm_cpu_idle_init();
48 z_arm_clear_faults();
49 #if defined(CONFIG_ARM_MPU)
50 z_arm_mpu_init();
51 /* Configure static memory map. This will program MPU regions,
52 * to set up access permissions for fixed memory sections, such
53 * as Application Memory or No-Cacheable SRAM area.
54 *
55 * This function is invoked once, upon system initialization.
56 */
57 z_arm_configure_static_mpu_regions();
58 #endif /* CONFIG_ARM_MPU */
59
60 #ifdef CONFIG_SOC_PER_CORE_INIT_HOOK
61 soc_per_core_init_hook();
62 #endif /* CONFIG_SOC_PER_CORE_INIT_HOOK */
63 }
64
arch_thread_return_value_set(struct k_thread * thread,unsigned int value)65 static ALWAYS_INLINE void arch_thread_return_value_set(struct k_thread *thread, unsigned int value)
66 {
67 thread->arch.swap_return_value = value;
68 }
69
70 #if !defined(CONFIG_MULTITHREADING)
71 extern FUNC_NORETURN void z_arm_switch_to_main_no_multithreading(k_thread_entry_t main_func,
72 void *p1, void *p2, void *p3);
73
74 #define ARCH_SWITCH_TO_MAIN_NO_MULTITHREADING z_arm_switch_to_main_no_multithreading
75
76 #endif /* !CONFIG_MULTITHREADING */
77
78 extern FUNC_NORETURN void z_arm_userspace_enter(k_thread_entry_t user_entry, void *p1, void *p2,
79 void *p3, uint32_t stack_end, uint32_t stack_start,
80 uint32_t sp_is_priv);
81
82 extern void z_arm_fatal_error(unsigned int reason, const struct arch_esf *esf);
83
arch_swap(unsigned int key)84 static ALWAYS_INLINE int arch_swap(unsigned int key)
85 {
86 /* store off key and return value */
87 _current->arch.basepri = key;
88 _current->arch.swap_return_value = -EAGAIN;
89
90 /* set pending bit to make sure we will take a PendSV exception */
91 SCB->ICSR |= SCB_ICSR_PENDSVSET_Msk;
92
93 /* clear mask or enable all irqs to take a pendsv */
94 irq_unlock(0);
95
96 /* Context switch is performed here. Returning implies the
97 * thread has been context-switched-in again.
98 */
99 return _current->arch.swap_return_value;
100 }
101
102 #endif /* _ASMLANGUAGE */
103
104 #ifdef __cplusplus
105 }
106 #endif
107
108 #endif /* ZEPHYR_ARCH_ARM_INCLUDE_CORTEX_M_KERNEL_ARCH_FUNC_H_ */
109