zephyr/arch/arm/include/cortex_m/exc.h

143 lines
3.4 KiB
C
Raw Normal View History

/*
* Copyright (c) 2013-2014 Wind River Systems, Inc.
*
* SPDX-License-Identifier: Apache-2.0
*/
/**
* @file
* @brief Exception/interrupt context helpers for Cortex-M CPUs
*
* Exception/interrupt context helpers.
*/
#ifndef _ARM_CORTEXM_ISR__H_
#define _ARM_CORTEXM_ISR__H_
#include <arch/cpu.h>
#ifdef __cplusplus
extern "C" {
#endif
#ifdef _ASMLANGUAGE
/* nothing */
#else
#include <arch/arm/cortex_m/cmsis.h>
#include <irq_offload.h>
#ifdef CONFIG_IRQ_OFFLOAD
extern volatile irq_offload_routine_t offload_routine;
#endif
/**
*
* @brief Find out if running in an ISR context
*
* The current executing vector is found in the IPSR register. We consider the
* IRQs (exception 16 and up), and the PendSV and SYSTICK exceptions to be
* interrupts. Taking a fault within an exception is also considered in
* interrupt context.
*
* @return 1 if in ISR, 0 if not.
*/
static ALWAYS_INLINE int _IsInIsr(void)
{
u32_t vector = __get_IPSR();
/* IRQs + PendSV (14) + SYSTICK (15) are interrupts. */
return (vector > 13)
#ifdef CONFIG_IRQ_OFFLOAD
/* Only non-NULL if currently running an offloaded function */
|| offload_routine != NULL
#endif
#if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE)
/* On ARMv6-M there is no nested execution bit, so we check
* exception 3, hard fault, to a detect a nested exception.
*/
|| (vector == 3)
#elif defined(CONFIG_ARMV7_M_ARMV8_M_MAINLINE)
/* If not in thread mode, and if RETTOBASE bit in ICSR is 0,
* then there are preempted active exceptions to execute.
*/
#ifndef CONFIG_BOARD_QEMU_CORTEX_M3
/* The polarity of RETTOBASE is incorrectly flipped in
* all but the very latest master tip of QEMU's NVIC driver,
* see commit "armv7m: Rewrite NVIC to not use any GIC code".
* Until QEMU 2.9 is released, and the SDK is updated to
* include it, skip this check in QEMU.
*/
|| (vector && !(SCB->ICSR & SCB_ICSR_RETTOBASE_Msk))
#endif /* CONFIG_BOARD_QEMU_CORTEX_M3 */
arm: Restructure ARM cpu related preprocessor conditionals. The ARM code base provides for three mutually exclusive ARM architecture related conditional compilation choices. M0_M0PLUS, M3_M4 and M7. Throughout the code base we have conditional compilation gated around these three choices. Adjust the form of this conditional compilation to adopt a uniform structure. The uniform structure always selects code based on the definition of an appropriate config option rather the the absence of a definition. Removing the extensive use of #else ensures that when support for other ARM architecture versions is added we get hard compilation failures rather than attempting to compile inappropriate code for the added architecture with unexpected runtime consequences. Adopting this uniform structure makes it straight forward to replace the adhoc CPU_CORTEX_M3_M4 and CPU_CORTEX_M0_M0PLUS configuration variables with ones that directly represent the actual underlying ARM architectures we provide support for. This change also paves the way for folding adhoc conditional compilation related to CPU_CORTEX_M7 directly in support for ARMv7-M. This change is mechanical in nature involving two transforms: 1) #if !defined(CONFIG_CPU_CORTEX_M0_M0PLUS) ... is transformed to: #if defined(CONFIG_CPU_CORTEX_M0_M0PLUS) #elif defined(CONFIG_CPU_CORTEX_M3_M4) || defined(CONFIG_CPU_CORTEX_M7) ... 2) #if defined(CONFIG_CPU_CORTEX_M0_M0PLUS) ... #else ... #endif is transformed to: #if defined(CONFIG_CPU_CORTEX_M0_M0PLUS) ... #elif defined(CONFIG_CPU_CORTEX_M3_M4) || defined(CONFIG_CPU_CORTEX_M7) ... #else #error Unknown ARM architecture #endif Change-Id: I7229029b174da3a8b3c6fb2eec63d776f1d11e24 Signed-off-by: Marcus Shawcroft <marcus.shawcroft@arm.com>
2016-12-31 14:18:25 +01:00
#else
#error Unknown ARM architecture
#endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */
;
}
#define _EXC_SVC_PRIO 0
#define _EXC_FAULT_PRIO 0
/**
* @brief Setup system exceptions
*
* Set exception priorities to conform with the BASEPRI locking mechanism.
* Set PendSV priority to lowest possible.
*
* Enable fault exceptions.
*
* @return N/A
*/
static ALWAYS_INLINE void _ExcSetup(void)
{
NVIC_SetPriority(PendSV_IRQn, 0xff);
#ifdef CONFIG_CPU_CORTEX_M_HAS_BASEPRI
NVIC_SetPriority(SVCall_IRQn, _EXC_SVC_PRIO);
#endif
#ifdef CONFIG_CPU_CORTEX_M_HAS_PROGRAMMABLE_FAULT_PRIOS
NVIC_SetPriority(MemoryManagement_IRQn, _EXC_FAULT_PRIO);
NVIC_SetPriority(BusFault_IRQn, _EXC_FAULT_PRIO);
NVIC_SetPriority(UsageFault_IRQn, _EXC_FAULT_PRIO);
/* Enable Usage, Mem, & Bus Faults */
SCB->SHCSR |= SCB_SHCSR_USGFAULTENA_Msk | SCB_SHCSR_MEMFAULTENA_Msk |
SCB_SHCSR_BUSFAULTENA_Msk;
#endif
}
/**
* @brief Clear Fault exceptions
*
* Clear out exceptions for Mem, Bus, Usage and Hard Faults
*
* @return N/A
*/
static ALWAYS_INLINE void _ClearFaults(void)
{
#if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE)
#elif defined(CONFIG_ARMV7_M_ARMV8_M_MAINLINE)
/* Reset all faults */
SCB->CFSR = SCB_CFSR_USGFAULTSR_Msk |
SCB_CFSR_MEMFAULTSR_Msk |
SCB_CFSR_BUSFAULTSR_Msk;
/* Clear all Hard Faults - HFSR is write-one-to-clear */
SCB->HFSR = 0xffffffff;
#else
#error Unknown ARM architecture
#endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */
}
#endif /* _ASMLANGUAGE */
#ifdef __cplusplus
}
#endif
#endif /* _ARM_CORTEXM_ISR__H_ */