1 /*
2  * Copyright (c) 2019 Carlo Caione <ccaione@baylibre.com>
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  */
6 /**
7  * @file
8  * @brief Private kernel definitions (ARM)
9  *
10  * This file contains private kernel function definitions and various
11  * other definitions for the 32-bit ARM Cortex-M processor architecture
12  * family.
13  *
14  * This file is also included by assembly language files which must #define
15  * _ASMLANGUAGE before including this header file.  Note that kernel
16  * assembly source files obtains structure offset values via "absolute symbols"
17  * in the offsets.o module.
18  */
19 
20 #ifndef ZEPHYR_ARCH_ARM_INCLUDE_CORTEX_M_KERNEL_ARCH_FUNC_H_
21 #define ZEPHYR_ARCH_ARM_INCLUDE_CORTEX_M_KERNEL_ARCH_FUNC_H_
22 
23 #include <zephyr/platform/hooks.h>
24 
25 #ifdef __cplusplus
26 extern "C" {
27 #endif
28 
29 #ifndef _ASMLANGUAGE
30 extern void z_arm_fault_init(void);
31 extern void z_arm_cpu_idle_init(void);
32 #ifdef CONFIG_ARM_MPU
33 extern void z_arm_configure_static_mpu_regions(void);
34 extern void z_arm_configure_dynamic_mpu_regions(struct k_thread *thread);
35 extern int z_arm_mpu_init(void);
36 #endif /* CONFIG_ARM_MPU */
37 #ifdef CONFIG_ARM_AARCH32_MMU
38 extern int z_arm_mmu_init(void);
39 #endif /* CONFIG_ARM_AARCH32_MMU */
40 
arch_kernel_init(void)41 static ALWAYS_INLINE void arch_kernel_init(void)
42 {
43 	z_arm_interrupt_stack_setup();
44 	z_arm_exc_setup();
45 	z_arm_fault_init();
46 	z_arm_cpu_idle_init();
47 	z_arm_clear_faults();
48 #if defined(CONFIG_ARM_MPU)
49 	z_arm_mpu_init();
50 	/* Configure static memory map. This will program MPU regions,
51 	 * to set up access permissions for fixed memory sections, such
52 	 * as Application Memory or No-Cacheable SRAM area.
53 	 *
54 	 * This function is invoked once, upon system initialization.
55 	 */
56 	z_arm_configure_static_mpu_regions();
57 #endif /* CONFIG_ARM_MPU */
58 
59 #ifdef CONFIG_SOC_PER_CORE_INIT_HOOK
60 	soc_per_core_init_hook();
61 #endif /* CONFIG_SOC_PER_CORE_INIT_HOOK */
62 }
63 
64 static ALWAYS_INLINE void
arch_thread_return_value_set(struct k_thread * thread,unsigned int value)65 arch_thread_return_value_set(struct k_thread *thread, unsigned int value)
66 {
67 	thread->arch.swap_return_value = value;
68 }
69 
70 #if !defined(CONFIG_MULTITHREADING)
71 extern FUNC_NORETURN void z_arm_switch_to_main_no_multithreading(
72 	k_thread_entry_t main_func,
73 	void *p1, void *p2, void *p3);
74 
75 #define ARCH_SWITCH_TO_MAIN_NO_MULTITHREADING \
76 	z_arm_switch_to_main_no_multithreading
77 
78 #endif /* !CONFIG_MULTITHREADING */
79 
80 extern FUNC_NORETURN void z_arm_userspace_enter(k_thread_entry_t user_entry,
81 					       void *p1, void *p2, void *p3,
82 					       uint32_t stack_end,
83 					       uint32_t stack_start);
84 
85 extern void z_arm_fatal_error(unsigned int reason, const struct arch_esf *esf);
86 
arch_swap(unsigned int key)87 static ALWAYS_INLINE int arch_swap(unsigned int key)
88 {
89 	/* store off key and return value */
90 	arch_current_thread()->arch.basepri = key;
91 	arch_current_thread()->arch.swap_return_value = -EAGAIN;
92 
93 	/* set pending bit to make sure we will take a PendSV exception */
94 	SCB->ICSR |= SCB_ICSR_PENDSVSET_Msk;
95 
96 	/* clear mask or enable all irqs to take a pendsv */
97 	irq_unlock(0);
98 
99 	/* Context switch is performed here. Returning implies the
100 	 * thread has been context-switched-in again.
101 	 */
102 	return arch_current_thread()->arch.swap_return_value;
103 }
104 
105 
106 #endif /* _ASMLANGUAGE */
107 
108 #ifdef __cplusplus
109 }
110 #endif
111 
112 #endif /* ZEPHYR_ARCH_ARM_INCLUDE_CORTEX_M_KERNEL_ARCH_FUNC_H_ */
113