1 /***************************************************************************//**
2  * @file
3  * @brief Core API
4  *******************************************************************************
5  * # License
6  * <b>Copyright 2023 Silicon Laboratories Inc. www.silabs.com</b>
7  *******************************************************************************
8  *
9  * SPDX-License-Identifier: Zlib
10  *
11  * The licensor of this software is Silicon Laboratories Inc.
12  *
13  * This software is provided 'as-is', without any express or implied
14  * warranty. In no event will the authors be held liable for any damages
15  * arising from the use of this software.
16  *
17  * Permission is granted to anyone to use this software for any purpose,
18  * including commercial applications, and to alter it and redistribute it
19  * freely, subject to the following restrictions:
20  *
21  * 1. The origin of this software must not be misrepresented; you must not
22  *    claim that you wrote the original software. If you use this software
23  *    in a product, an acknowledgment in the product documentation would be
24  *    appreciated but is not required.
25  * 2. Altered source versions must be plainly marked as such, and must not be
26  *    misrepresented as being the original software.
27  * 3. This notice may not be removed or altered from any source distribution.
28  *
29  ******************************************************************************/
30 
31 #ifndef SL_CORE_H
32 #define SL_CORE_H
33 
34 #include <stdint.h>
35 #include <stdbool.h>
36 #include "sl_code_classification.h"
37 
38 #ifdef __cplusplus
39 extern "C" {
40 #endif
41 
42 /***************************************************************************//**
43  * @addtogroup sl_core Core
44  *
45  * @section sl_core_intro Introduction
46  *
47  * The core abstraction API provides high-level, device agnostic, control of
48  * core peripherals, most notably the ability to execute code in sections with
49  * varying levels of interrupt masking.
50  *
51  * This module provides support for two types of critical sections, each
52  * with different interrupt masking capabilities.
53  *
54  * @li <b>CRITICAL section</b>: Inside a critical section, all interrupts are
55  *     masked (except for core exception handlers).
56  * @li <b>ATOMIC section</b>: Inside an atomic section, interrupts with a
57  *     priority less than the configurable @ref SL_CORE_BASE_PRIORITY_LEVEL
58  *     value will be masked.
59  *
60  * @section sl_core_conf Compile-time Configuration
61  *
62  * The following #define is used to configure sl_core:
63  * @code{.c}
64  * // Enables debug methods to measure the time spent in critical sections.
65  * #define SL_CORE_DEBUG_INTERRUPTS_MASKED_TIMING   0
66  * @endcode
67  *
68  * @section sl_core_macro_api Macro API
69  *
70  * The core abstraction API has macros to facilitate executing code in
71  * ATOMIC and CRITICAL sections.
72  *
73  * @ref CORE_DECLARE_IRQ_STATE, @ref CORE_ENTER_ATOMIC() and
74  * @ref CORE_EXIT_ATOMIC() can be used together to implement an ATOMIC section.
75  * @code{.c}
76  * {
77  *   CORE_DECLARE_IRQ_STATE;           // Storage for saving IRQ state prior to
78  *                                     // atomic section entry.
79  *
80  *   CORE_ENTER_ATOMIC();              // Enter atomic section.
81  *
82  *   ...
83  *   ... your code goes here ...
84  *   ...
85  *
86  *   CORE_EXIT_ATOMIC();               // Exit atomic section, IRQ state is restored.
87  * }
88  * @endcode
89  *
90  * @ref CORE_ATOMIC_SECTION(yourcode) is aconcatenation of all three of the
91  * macros above.
92  * @code{.c}
93  * {
94  *   CORE_ATOMIC_SECTION(
95  *     ...
96  *     ... your code goes here ...
97  *     ...
98  *   )
99  * }
100  * @endcode
101  *
102  * The following macros implement CRITICAL sections in a similar fashion as
103  * described above for ATOMIC sections:
104  * <li>@ref CORE_DECLARE_IRQ_STATE</li>
105  * <li>@ref CORE_ENTER_CRITICAL()</li>
106  * <li>@ref CORE_EXIT_CRITICAL()</li>
107  * <li>@ref CORE_CRITICAL_SECTION(yourcode)</li>
108  *
109  * @section sl_core_reimplementation API Reimplementation
110  *
111  * Most of the functions in the API are implemented as weak functions. This means
112  * that it is easy to reimplement when special needs arise. Shown below is a
113  * reimplementation of CRITICAL sections suitable if FreeRTOS OS is used:
114  * @code{.c}
115  * CORE_irqState_t CORE_EnterCritical(void)
116  * {
117  *   vPortEnterCritical();
118  *   return 0;
119  * }
120  *
121  * void CORE_ExitCritical(CORE_irqState_t irqState)
122  * {
123  *   (void)irqState;
124  *   vPortExitCritical();
125  * }
126  * @endcode
127  * Also note that CORE_Enter/ExitCritical() are not implemented as inline
128  * functions. As a result, reimplementations will be possible even when original
129  * implementations are inside a linked library.
130  *
131  * Some RTOSes must be notified on interrupt handler entry and exit. Macros
132  * @ref CORE_INTERRUPT_ENTRY() and @ref CORE_INTERRUPT_EXIT() are suitable
133  * placeholders for inserting such code. Insert these macros in all your
134  * interrupt handlers and then override the default macro implementations.
135  * This is an example if uC/OS is used:
136  * @code{.c}
137  * // In emlib_config.h:
138  *
139  * #define CORE_INTERRUPT_ENTRY()   OSIntEnter()
140  * #define CORE_INTERRUPT_EXIT()    OSIntExit()
141  * @endcode
142  *
143  * @section sl_core_max_timing Maximum Interrupt Disabled Time
144  *
145  * The maximum time spent (in cycles) in critical and atomic sections can be
146  * measured for performance and interrupt latency analysis.
147  * To enable the timings, use the SL_CORE_ENABLE_INTERRUPT_DISABLED_TIMING
148  * configuration option. When enabled, the functions
149  * @ref CORE_get_max_time_critical_section() and
150  * @ref CORE_get_max_time_atomic_section()
151  * can be used to get the max timings since startup.
152  *
153  * @section sl_core_porting Porting from em_int
154  *
155  * Existing code using INT_Enable() and INT_Disable() must be ported to the
156  * sl_core API. While em_int used, a global counter to store the interrupt state,
157  * sl_core uses a local variable. Any usage of INT_Disable(), therefore, needs to
158  * be replaced with a declaration of the interrupt state variable before entering
159  * the critical section.
160  *
161  * Since the state variable is in local scope, the critical section exit
162  * needs to occur within the scope of the variable. If multiple nested critical
163  * sections are used, each needs to have its own state variable in its own scope.
164  *
165  * In many cases, completely disabling all interrupts using CRITICAL sections
166  * might be more heavy-handed than needed. When porting, consider whether
167  * an ATOMIC section can be used to only disable a subset of the interrupts.
168  *
169  * Replacing em_int calls with sl_core function calls:
170  * @code{.c}
171  * void func(void)
172  * {
173  *   // INT_Disable();
174  *   CORE_DECLARE_IRQ_STATE;
175  *   CORE_ENTER_ATOMIC();
176  *     .
177  *     .
178  *     .
179  *   // INT_Enable();
180  *   CORE_EXIT_ATOMIC();
181  * }
182  * @endcode
183  * @{
184  ******************************************************************************/
185 
186 /*******************************************************************************
187  *****************************   DEFINES   *************************************
188  ******************************************************************************/
189 
190 #if !defined(CORE_ATOMIC_BASE_PRIORITY_LEVEL)
191 /** The interrupt priority level disabled within ATOMIC regions. Interrupts
192  *  with priority level equal to or lower than this definition will be disabled
193  *  within ATOMIC regions. */
194 #define CORE_ATOMIC_BASE_PRIORITY_LEVEL 3
195 #else
196 #ifndef SL_SUPPRESS_DEPRECATION_WARNINGS_SDK_2024_6
197 #warning "The CORE_ATOMIC_BASE_PRIORITY_LEVEL configuration is DEPRECATED. In \
198   later releases, the base priority of atomic sections will be hardcoded to 3 \
199   and will no longer be configurable. Please consider updating the priorities \
200   of interrupts in your application to account for this new hardcoded value."
201 #endif
202 #endif
203 
204 /*******************************************************************************
205  ************************   MACRO API   ***************************************
206  ******************************************************************************/
207 
208 /// Allocate storage for PRIMASK or BASEPRI value for use by
209 /// CORE_ENTER/EXIT_ATOMIC() and CORE_ENTER/EXIT_CRITICAL() macros.
210 #define CORE_DECLARE_IRQ_STATE      CORE_irqState_t irqState
211 
212 /// CRITICAL style interrupt disable.
213 #define CORE_CRITICAL_IRQ_DISABLE() CORE_CriticalDisableIrq()
214 
215 /// CRITICAL style interrupt enable.
216 #define CORE_CRITICAL_IRQ_ENABLE()  CORE_CriticalEnableIrq()
217 
218 /// Convenience macro for implementing a CRITICAL section.
219 #define CORE_CRITICAL_SECTION(yourcode) \
220   {                                     \
221     CORE_DECLARE_IRQ_STATE;             \
222     CORE_ENTER_CRITICAL();              \
223     {                                   \
224       yourcode                          \
225     }                                   \
226     CORE_EXIT_CRITICAL();               \
227   }
228 
229 /// Enter CRITICAL section. Assumes that a @ref CORE_DECLARE_IRQ_STATE exist in
230 /// scope.
231 #define CORE_ENTER_CRITICAL() irqState = CORE_EnterCritical()
232 
233 /// Exit CRITICAL section. Assumes that a @ref CORE_DECLARE_IRQ_STATE exist in
234 /// scope.
235 #define CORE_EXIT_CRITICAL()  CORE_ExitCritical(irqState)
236 
237 /// CRITICAL style yield.
238 #define CORE_YIELD_CRITICAL() CORE_YieldCritical()
239 
240 /// ATOMIC style interrupt disable.
241 #define CORE_ATOMIC_IRQ_DISABLE() CORE_AtomicDisableIrq()
242 
243 /// ATOMIC style interrupt enable.
244 #define CORE_ATOMIC_IRQ_ENABLE()  CORE_AtomicEnableIrq()
245 
246 /// Convenience macro for implementing an ATOMIC section.
247 #define CORE_ATOMIC_SECTION(yourcode) \
248   {                                   \
249     CORE_DECLARE_IRQ_STATE;           \
250     CORE_ENTER_ATOMIC();              \
251     {                                 \
252       yourcode                        \
253     }                                 \
254     CORE_EXIT_ATOMIC();               \
255   }
256 
257 /// Enter ATOMIC section. Assumes that a @ref CORE_DECLARE_IRQ_STATE exist in
258 /// scope.
259 #define CORE_ENTER_ATOMIC()   irqState = CORE_EnterAtomic()
260 
261 /// Exit ATOMIC section. Assumes that a @ref CORE_DECLARE_IRQ_STATE exist in
262 /// scope.
263 #define CORE_EXIT_ATOMIC()    CORE_ExitAtomic(irqState)
264 
265 /// ATOMIC style yield.
266 #define CORE_YIELD_ATOMIC()   CORE_YieldAtomic()
267 
268 /// Check if IRQ is disabled.
269 #define CORE_IRQ_DISABLED()   CORE_IrqIsDisabled()
270 
271 /// Check if inside an IRQ handler.
272 #define CORE_IN_IRQ_CONTEXT() CORE_InIrqContext()
273 
274 // Reset System.
275 #define CORE_RESET_SYSTEM()   CORE_ResetSystem()
276 
277 /*******************************************************************************
278  *************************   TYPEDEFS   ****************************************
279  ******************************************************************************/
280 
281 /// Storage for PRIMASK or BASEPRI value.
282 typedef uint32_t CORE_irqState_t;
283 
284 /*******************************************************************************
285  *****************************   PROTOTYPES   **********************************
286  ******************************************************************************/
287 
288 /***************************************************************************//**
289  * @brief
290  *   Disable interrupts.
291  *
292  *   Disable all interrupts by setting PRIMASK.
293  *   (Fault exception handlers will still be enabled).
294  ******************************************************************************/
295 SL_CODE_CLASSIFY(SL_CODE_COMPONENT_CORE, SL_CODE_CLASS_TIME_CRITICAL)
296 void CORE_CriticalDisableIrq(void);
297 
298 /***************************************************************************//**
299  * @brief
300  *   Enable interrupts.
301  *
302  *   Enable interrupts by clearing PRIMASK.
303  ******************************************************************************/
304 SL_CODE_CLASSIFY(SL_CODE_COMPONENT_CORE, SL_CODE_CLASS_TIME_CRITICAL)
305 void CORE_CriticalEnableIrq(void);
306 
307 /***************************************************************************//**
308  * @brief
309  *   Exit a CRITICAL section.
310  *
311  * @param[in] irqState
312  *   The interrupt priority blocking level to restore to PRIMASK when exiting
313  *   the CRITICAL section. This value is usually the one returned by a prior
314  *   call to @ref CORE_EnterCritical().
315  ******************************************************************************/
316 SL_CODE_CLASSIFY(SL_CODE_COMPONENT_CORE, SL_CODE_CLASS_TIME_CRITICAL)
317 void CORE_ExitCritical(CORE_irqState_t irqState);
318 
319 /***************************************************************************//**
320  * @brief
321  *   Brief interrupt enable/disable sequence to allow handling of
322  *   pending interrupts.
323  *
324  * @note
325  *   Usually used within a CRITICAL section.
326  ******************************************************************************/
327 SL_CODE_CLASSIFY(SL_CODE_COMPONENT_CORE, SL_CODE_CLASS_TIME_CRITICAL)
328 void CORE_YieldCritical(void);
329 
330 /***************************************************************************//**
331  * @brief
332  *   Enter a CRITICAL section.
333  *
334  *   When a CRITICAL section is entered, all interrupts (except fault handlers)
335  *   are disabled.
336  *
337  * @return
338  *   The value of PRIMASK register prior to the CRITICAL section entry.
339  ******************************************************************************/
340 SL_CODE_CLASSIFY(SL_CODE_COMPONENT_CORE, SL_CODE_CLASS_TIME_CRITICAL)
341 CORE_irqState_t CORE_EnterCritical(void);
342 
343 /***************************************************************************//**
344  * @brief
345  *   Disable interrupts.
346  *
347  *   Disable interrupts with a priority lower or equal to
348  *   @ref CORE_ATOMIC_BASE_PRIORITY_LEVEL. Sets core BASEPRI register
349  *   to CORE_ATOMIC_BASE_PRIORITY_LEVEL.
350  *
351  * @note
352  *   If @ref CORE_ATOMIC_METHOD is @ref CORE_ATOMIC_METHOD_PRIMASK, this
353  *   function is identical to @ref CORE_CriticalDisableIrq().
354  ******************************************************************************/
355 SL_CODE_CLASSIFY(SL_CODE_COMPONENT_CORE, SL_CODE_CLASS_TIME_CRITICAL)
356 void  CORE_AtomicDisableIrq(void);
357 
358 /***************************************************************************//**
359  * @brief
360  *   Enable interrupts.
361  *
362  *   Enable interrupts by setting core BASEPRI register to 0.
363  *
364  * @note
365  *   If @ref CORE_ATOMIC_METHOD is @ref CORE_ATOMIC_METHOD_BASEPRI and PRIMASK
366  *   is set (CPU is inside a CRITICAL section), interrupts will still be
367  *   disabled after calling this function.
368  *
369  * @note
370  *   If @ref CORE_ATOMIC_METHOD is @ref CORE_ATOMIC_METHOD_PRIMASK, this
371  *   function is identical to @ref CORE_CriticalEnableIrq().
372  ******************************************************************************/
373 SL_CODE_CLASSIFY(SL_CODE_COMPONENT_CORE, SL_CODE_CLASS_TIME_CRITICAL)
374 void  CORE_AtomicEnableIrq(void);
375 
376 /***************************************************************************//**
377  * @brief
378  *   Exit an ATOMIC section.
379  *
380  * @param[in] irqState
381  *   The interrupt priority blocking level to restore to BASEPRI when exiting
382  *   the ATOMIC section. This value is usually the one returned by a prior
383  *   call to @ref CORE_EnterAtomic().
384  *
385  * @note
386  *   If @ref CORE_ATOMIC_METHOD is set to @ref CORE_ATOMIC_METHOD_PRIMASK, this
387  *   function is identical to @ref CORE_ExitCritical().
388  ******************************************************************************/
389 SL_CODE_CLASSIFY(SL_CODE_COMPONENT_CORE, SL_CODE_CLASS_TIME_CRITICAL)
390 void  CORE_ExitAtomic(CORE_irqState_t irqState);
391 
392 /***************************************************************************//**
393  * @brief
394  *   Brief interrupt enable/disable sequence to allow handling of
395  *   pending interrupts.
396  *
397  * @note
398  *   Usually used within an ATOMIC section.
399  *
400  * @note
401  *   If @ref CORE_ATOMIC_METHOD is @ref CORE_ATOMIC_METHOD_PRIMASK, this
402  *   function is identical to @ref CORE_YieldCritical().
403  ******************************************************************************/
404 SL_CODE_CLASSIFY(SL_CODE_COMPONENT_CORE, SL_CODE_CLASS_TIME_CRITICAL)
405 void  CORE_YieldAtomic(void);
406 
407 /***************************************************************************//**
408  * @brief
409  *   Enter an ATOMIC section.
410  *
411  *   When an ATOMIC section is entered, interrupts with priority lower or equal
412  *   to @ref CORE_ATOMIC_BASE_PRIORITY_LEVEL are disabled.
413  *
414  * @note
415  *   If @ref CORE_ATOMIC_METHOD is @ref CORE_ATOMIC_METHOD_PRIMASK, this
416  *   function is identical to @ref CORE_EnterCritical().
417  *
418  * @return
419  *   The value of BASEPRI register prior to ATOMIC section entry.
420  ******************************************************************************/
421 SL_CODE_CLASSIFY(SL_CODE_COMPONENT_CORE, SL_CODE_CLASS_TIME_CRITICAL)
422 CORE_irqState_t CORE_EnterAtomic(void);
423 
424 /***************************************************************************//**
425  * @brief
426  *   Check whether the current CPU operation mode is handler mode.
427  *
428  * @return
429  *   True if the CPU is in handler mode (currently executing an interrupt handler).
430  *   @n False if the CPU is in thread mode.
431  ******************************************************************************/
432 SL_CODE_CLASSIFY(SL_CODE_COMPONENT_CORE, SL_CODE_CLASS_TIME_CRITICAL)
433 bool  CORE_InIrqContext(void);
434 
435 /***************************************************************************//**
436  * @brief
437  *   Check if interrupts are disabled.
438  *
439  * @return
440  *   True if interrupts are disabled.
441  ******************************************************************************/
442 SL_CODE_CLASSIFY(SL_CODE_COMPONENT_CORE, SL_CODE_CLASS_TIME_CRITICAL)
443 bool  CORE_IrqIsDisabled(void);
444 
445 /***************************************************************************//**
446  * @brief
447  *   Returns the max time spent in critical section.
448  *
449  * @return
450  *   The max time spent in critical section.
451  *
452  * @note SL_CORE_DEBUG_INTERRUPTS_MASKED_TIMING must be enabled.
453  ******************************************************************************/
454 SL_CODE_CLASSIFY(SL_CODE_COMPONENT_CORE, SL_CODE_CLASS_TIME_CRITICAL)
455 uint32_t CORE_get_max_time_critical_section(void);
456 
457 /***************************************************************************//**
458  * @brief
459  *   Returns the max time spent in atomic section.
460  *
461  * @return
462  *   The max time spent in atomic section.
463  *
464  * @note SL_CORE_DEBUG_INTERRUPTS_MASKED_TIMING must be enabled.
465  ******************************************************************************/
466 SL_CODE_CLASSIFY(SL_CODE_COMPONENT_CORE, SL_CODE_CLASS_TIME_CRITICAL)
467 uint32_t CORE_get_max_time_atomic_section(void);
468 
469 /***************************************************************************//**
470  * @brief
471  *   Clears the max time spent in atomic section.
472  *
473  * @note SL_CORE_DEBUG_INTERRUPTS_MASKED_TIMING must be enabled.
474  ******************************************************************************/
475 SL_CODE_CLASSIFY(SL_CODE_COMPONENT_CORE, SL_CODE_CLASS_TIME_CRITICAL)
476 void CORE_clear_max_time_critical_section(void);
477 
478 /***************************************************************************//**
479  * @brief
480  *   Clears the max time spent in atomic section.
481  *
482  * @note SL_CORE_DEBUG_INTERRUPTS_MASKED_TIMING must be enabled.
483  ******************************************************************************/
484 SL_CODE_CLASSIFY(SL_CODE_COMPONENT_CORE, SL_CODE_CLASS_TIME_CRITICAL)
485 void CORE_clear_max_time_atomic_section(void);
486 
487 /***************************************************************************//**
488  * @brief
489  *   Reset chip routine.
490  ******************************************************************************/
491 void CORE_ResetSystem(void);
492 
493 /** @} (end addtogroup sl_core) */
494 
495 #ifdef __cplusplus
496 }
497 #endif
498 
499 #endif /* SL_CORE_H */
500