1 /* 2 * Copyright (c) 2013-2014 Wind River Systems, Inc. 3 * 4 * SPDX-License-Identifier: Apache-2.0 5 */ 6 7 /** 8 * @file 9 * @brief ARM AArch32 public error handling 10 * 11 * ARM AArch32-specific kernel error handling interface. Included by 12 * arm/arch.h. 13 */ 14 15 #ifndef ZEPHYR_INCLUDE_ARCH_ARM_AARCH32_ERROR_H_ 16 #define ZEPHYR_INCLUDE_ARCH_ARM_AARCH32_ERROR_H_ 17 18 #include <zephyr/arch/arm/aarch32/syscall.h> 19 #include <zephyr/arch/arm/aarch32/exc.h> 20 #include <stdbool.h> 21 22 #ifdef __cplusplus 23 extern "C" { 24 #endif 25 26 #if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE) 27 /* ARMv6 will hard-fault if SVC is called with interrupts locked. Just 28 * force them unlocked, the thread is in an undefined state anyway 29 * 30 * On ARMv7m we won't get a HardFault, but if interrupts were locked the 31 * thread will continue executing after the exception and forbid PendSV to 32 * schedule a new thread until they are unlocked which is not what we want. 33 * Force them unlocked as well. 34 */ 35 #define ARCH_EXCEPT(reason_p) \ 36 register uint32_t r0 __asm__("r0") = reason_p; \ 37 do { \ 38 __asm__ volatile ( \ 39 "cpsie i\n\t" \ 40 "svc %[id]\n\t" \ 41 : \ 42 : "r" (r0), [id] "i" (_SVC_CALL_RUNTIME_EXCEPT) \ 43 : "memory"); \ 44 } while (false) 45 #elif defined(CONFIG_ARMV7_M_ARMV8_M_MAINLINE) 46 #define ARCH_EXCEPT(reason_p) do { \ 47 __asm__ volatile ( \ 48 "eors.n r0, r0\n\t" \ 49 "msr BASEPRI, r0\n\t" \ 50 "mov r0, %[reason]\n\t" \ 51 "svc %[id]\n\t" \ 52 : \ 53 : [reason] "i" (reason_p), [id] "i" (_SVC_CALL_RUNTIME_EXCEPT) \ 54 : "memory"); \ 55 } while (false) 56 #elif defined(CONFIG_ARMV7_R) || defined(CONFIG_AARCH32_ARMV8_R) \ 57 || defined(CONFIG_ARMV7_A) 58 /* 59 * In order to support using svc for an exception while running in an 60 * isr, stack $lr_svc before calling svc. While exiting the isr, 61 * z_check_stack_sentinel is called. $lr_svc contains the return address. 62 * If the sentinel is wrong, it calls svc to cause an oops. This svc 63 * call will overwrite $lr_svc, losing the return address from the 64 * z_check_stack_sentinel call if it is not stacked before the svc. 65 */ 66 #define ARCH_EXCEPT(reason_p) \ 67 register uint32_t r0 __asm__("r0") = reason_p; \ 68 do { \ 69 __asm__ volatile ( \ 70 "push {lr}\n\t" \ 71 "cpsie i\n\t" \ 72 "svc %[id]\n\t" \ 73 "pop {lr}\n\t" \ 74 : \ 75 : "r" (r0), [id] "i" (_SVC_CALL_RUNTIME_EXCEPT) \ 76 : "memory"); \ 77 } while (false) 78 #else 79 #error Unknown ARM architecture 80 #endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */ 81 82 #ifdef __cplusplus 83 } 84 #endif 85 86 #endif /* ZEPHYR_INCLUDE_ARCH_ARM_AARCH32_ERROR_H_ */ 87