error.h 2.3 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485
  1. /*
  2. * Copyright (c) 2013-2014 Wind River Systems, Inc.
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. /**
  7. * @file
  8. * @brief ARM AArch32 public error handling
  9. *
  10. * ARM AArch32-specific kernel error handling interface. Included by
  11. * arm/arch.h.
  12. */
  13. #ifndef ZEPHYR_INCLUDE_ARCH_ARM_AARCH32_ERROR_H_
  14. #define ZEPHYR_INCLUDE_ARCH_ARM_AARCH32_ERROR_H_
  15. #include <arch/arm/aarch32/syscall.h>
  16. #include <arch/arm/aarch32/exc.h>
  17. #include <stdbool.h>
  18. #ifdef __cplusplus
  19. extern "C" {
  20. #endif
  21. #if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE)
  22. /* ARMv6 will hard-fault if SVC is called with interrupts locked. Just
  23. * force them unlocked, the thread is in an undefined state anyway
  24. *
  25. * On ARMv7m we won't get a HardFault, but if interrupts were locked the
  26. * thread will continue executing after the exception and forbid PendSV to
  27. * schedule a new thread until they are unlocked which is not what we want.
  28. * Force them unlocked as well.
  29. */
  30. #define ARCH_EXCEPT(reason_p) \
  31. register uint32_t r0 __asm__("r0") = reason_p; \
  32. do { \
  33. __asm__ volatile ( \
  34. "cpsie i\n\t" \
  35. "svc %[id]\n\t" \
  36. : \
  37. : "r" (r0), [id] "i" (_SVC_CALL_RUNTIME_EXCEPT) \
  38. : "memory"); \
  39. } while (false)
  40. #elif defined(CONFIG_ARMV7_M_ARMV8_M_MAINLINE)
  41. #define ARCH_EXCEPT(reason_p) do { \
  42. __asm__ volatile ( \
  43. "eors.n r0, r0\n\t" \
  44. "msr BASEPRI, r0\n\t" \
  45. "mov r0, %[reason]\n\t" \
  46. "svc %[id]\n\t" \
  47. : \
  48. : [reason] "i" (reason_p), [id] "i" (_SVC_CALL_RUNTIME_EXCEPT) \
  49. : "memory"); \
  50. } while (false)
  51. #elif defined(CONFIG_ARMV7_R)
  52. /*
  53. * In order to support using svc for an exception while running in an
  54. * isr, stack $lr_svc before calling svc. While exiting the isr,
  55. * z_check_stack_sentinel is called. $lr_svc contains the return address.
  56. * If the sentinel is wrong, it calls svc to cause an oops. This svc
  57. * call will overwrite $lr_svc, losing the return address from the
  58. * z_check_stack_sentinel call if it is not stacked before the svc.
  59. */
  60. #define ARCH_EXCEPT(reason_p) \
  61. register uint32_t r0 __asm__("r0") = reason_p; \
  62. do { \
  63. __asm__ volatile ( \
  64. "push {lr}\n\t" \
  65. "cpsie i\n\t" \
  66. "svc %[id]\n\t" \
  67. "pop {lr}\n\t" \
  68. : \
  69. : "r" (r0), [id] "i" (_SVC_CALL_RUNTIME_EXCEPT) \
  70. : "memory"); \
  71. } while (false)
  72. #else
  73. #error Unknown ARM architecture
  74. #endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */
  75. #ifdef __cplusplus
  76. }
  77. #endif
  78. #endif /* ZEPHYR_INCLUDE_ARCH_ARM_AARCH32_ERROR_H_ */