summaryrefslogtreecommitdiff
path: root/Include/cmsis_armcc.h
diff options
context:
space:
mode:
Diffstat (limited to 'Include/cmsis_armcc.h')
-rw-r--r--Include/cmsis_armcc.h243
1 files changed, 187 insertions, 56 deletions
diff --git a/Include/cmsis_armcc.h b/Include/cmsis_armcc.h
index 74c49c6..4d9d064 100644
--- a/Include/cmsis_armcc.h
+++ b/Include/cmsis_armcc.h
@@ -1,43 +1,104 @@
/**************************************************************************//**
* @file cmsis_armcc.h
- * @brief CMSIS Cortex-M Core Function/Instruction Header File
- * @version V4.30
- * @date 20. October 2015
+ * @brief CMSIS compiler ARMCC (Arm Compiler 5) header file
+ * @version V5.0.4
+ * @date 10. January 2018
******************************************************************************/
-/* Copyright (c) 2009 - 2015 ARM LIMITED
-
- All rights reserved.
- Redistribution and use in source and binary forms, with or without
- modification, are permitted provided that the following conditions are met:
- - Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
- - Redistributions in binary form must reproduce the above copyright
- notice, this list of conditions and the following disclaimer in the
- documentation and/or other materials provided with the distribution.
- - Neither the name of ARM nor the names of its contributors may be used
- to endorse or promote products derived from this software without
- specific prior written permission.
- *
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
- LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
- CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
- SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
- INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
- CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
- POSSIBILITY OF SUCH DAMAGE.
- ---------------------------------------------------------------------------*/
-
+/*
+ * Copyright (c) 2009-2018 Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * Licensed under the Apache License, Version 2.0 (the License); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
#ifndef __CMSIS_ARMCC_H
#define __CMSIS_ARMCC_H
#if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 400677)
- #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
+ #error "Please use Arm Compiler Toolchain V4.0.677 or later!"
+#endif
+
+/* CMSIS compiler control architecture macros */
+#if ((defined (__TARGET_ARCH_6_M ) && (__TARGET_ARCH_6_M == 1)) || \
+ (defined (__TARGET_ARCH_6S_M ) && (__TARGET_ARCH_6S_M == 1)) )
+ #define __ARM_ARCH_6M__ 1
+#endif
+
+#if (defined (__TARGET_ARCH_7_M ) && (__TARGET_ARCH_7_M == 1))
+ #define __ARM_ARCH_7M__ 1
+#endif
+
+#if (defined (__TARGET_ARCH_7E_M) && (__TARGET_ARCH_7E_M == 1))
+ #define __ARM_ARCH_7EM__ 1
+#endif
+
+ /* __ARM_ARCH_8M_BASE__ not applicable */
+ /* __ARM_ARCH_8M_MAIN__ not applicable */
+
+
+/* CMSIS compiler specific defines */
+#ifndef __ASM
+ #define __ASM __asm
+#endif
+#ifndef __INLINE
+ #define __INLINE __inline
+#endif
+#ifndef __STATIC_INLINE
+ #define __STATIC_INLINE static __inline
+#endif
+#ifndef __STATIC_FORCEINLINE
+ #define __STATIC_FORCEINLINE static __forceinline
+#endif
+#ifndef __NO_RETURN
+ #define __NO_RETURN __declspec(noreturn)
+#endif
+#ifndef __USED
+ #define __USED __attribute__((used))
+#endif
+#ifndef __WEAK
+ #define __WEAK __attribute__((weak))
+#endif
+#ifndef __PACKED
+ #define __PACKED __attribute__((packed))
+#endif
+#ifndef __PACKED_STRUCT
+ #define __PACKED_STRUCT __packed struct
+#endif
+#ifndef __PACKED_UNION
+ #define __PACKED_UNION __packed union
+#endif
+#ifndef __UNALIGNED_UINT32 /* deprecated */
+ #define __UNALIGNED_UINT32(x) (*((__packed uint32_t *)(x)))
+#endif
+#ifndef __UNALIGNED_UINT16_WRITE
+ #define __UNALIGNED_UINT16_WRITE(addr, val) ((*((__packed uint16_t *)(addr))) = (val))
+#endif
+#ifndef __UNALIGNED_UINT16_READ
+ #define __UNALIGNED_UINT16_READ(addr) (*((const __packed uint16_t *)(addr)))
+#endif
+#ifndef __UNALIGNED_UINT32_WRITE
+ #define __UNALIGNED_UINT32_WRITE(addr, val) ((*((__packed uint32_t *)(addr))) = (val))
+#endif
+#ifndef __UNALIGNED_UINT32_READ
+ #define __UNALIGNED_UINT32_READ(addr) (*((const __packed uint32_t *)(addr)))
+#endif
+#ifndef __ALIGNED
+ #define __ALIGNED(x) __attribute__((aligned(x)))
+#endif
+#ifndef __RESTRICT
+ #define __RESTRICT __restrict
#endif
/* ########################### Core Function Access ########################### */
@@ -46,7 +107,19 @@
@{
*/
+/**
+ \brief Enable IRQ Interrupts
+ \details Enables IRQ interrupts by clearing the I-bit in the CPSR.
+ Can only be executed in Privileged modes.
+ */
/* intrinsic void __enable_irq(); */
+
+
+/**
+ \brief Disable IRQ Interrupts
+ \details Disables IRQ interrupts by setting the I-bit in the CPSR.
+ Can only be executed in Privileged modes.
+ */
/* intrinsic void __disable_irq(); */
/**
@@ -181,7 +254,8 @@ __STATIC_INLINE void __set_PRIMASK(uint32_t priMask)
}
-#if (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U)
+#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
+ (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) )
/**
\brief Enable FIQ
@@ -256,13 +330,12 @@ __STATIC_INLINE uint32_t __get_FAULTMASK(void)
__STATIC_INLINE void __set_FAULTMASK(uint32_t faultMask)
{
register uint32_t __regFaultMask __ASM("faultmask");
- __regFaultMask = (faultMask & (uint32_t)1);
+ __regFaultMask = (faultMask & (uint32_t)1U);
}
-#endif /* (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U) */
-
+#endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
+ (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) ) */
-#if (__CORTEX_M == 0x04U) || (__CORTEX_M == 0x07U)
/**
\brief Get FPSCR
@@ -271,7 +344,8 @@ __STATIC_INLINE void __set_FAULTMASK(uint32_t faultMask)
*/
__STATIC_INLINE uint32_t __get_FPSCR(void)
{
-#if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U)
+#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
+ (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
register uint32_t __regfpscr __ASM("fpscr");
return(__regfpscr);
#else
@@ -287,15 +361,15 @@ __STATIC_INLINE uint32_t __get_FPSCR(void)
*/
__STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
{
-#if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U)
+#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
+ (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
register uint32_t __regfpscr __ASM("fpscr");
__regfpscr = (fpscr);
+#else
+ (void)fpscr;
#endif
}
-#endif /* (__CORTEX_M == 0x04U) || (__CORTEX_M == 0x07U) */
-
-
/*@} end of CMSIS_Core_RegAccFunctions */
@@ -369,9 +443,10 @@ __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
__schedule_barrier();\
} while (0U)
+
/**
\brief Reverse byte order (32 bit)
- \details Reverses the byte order in integer value.
+ \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
\param [in] value Value to reverse
\return Reversed value
*/
@@ -380,7 +455,7 @@ __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
/**
\brief Reverse byte order (16 bit)
- \details Reverses the byte order in two unsigned short values.
+ \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
\param [in] value Value to reverse
\return Reversed value
*/
@@ -392,14 +467,15 @@ __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(u
}
#endif
+
/**
- \brief Reverse byte order in signed short value
- \details Reverses the byte order in a signed short value with sign extension to integer.
+ \brief Reverse byte order (16 bit)
+ \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
\param [in] value Value to reverse
\return Reversed value
*/
#ifndef __NO_EMBEDDED_ASM
-__attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(int32_t value)
+__attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int16_t __REVSH(int16_t value)
{
revsh r0, r0
bx lr
@@ -410,8 +486,8 @@ __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(in
/**
\brief Rotate Right in unsigned value (32 bit)
\details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
- \param [in] value Value to rotate
- \param [in] value Number of Bits to rotate
+ \param [in] op1 Value to rotate
+ \param [in] op2 Number of Bits to rotate
\return Rotated value
*/
#define __ROR __ror
@@ -433,23 +509,24 @@ __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(in
\param [in] value Value to reverse
\return Reversed value
*/
-#if (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U)
+#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
+ (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) )
#define __RBIT __rbit
#else
__attribute__((always_inline)) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
{
uint32_t result;
- int32_t s = 4 /*sizeof(v)*/ * 8 - 1; /* extra shift needed at end */
+ uint32_t s = (4U /*sizeof(v)*/ * 8U) - 1U; /* extra shift needed at end */
result = value; /* r will be reversed bits of v; first get LSB of v */
- for (value >>= 1U; value; value >>= 1U)
+ for (value >>= 1U; value != 0U; value >>= 1U)
{
result <<= 1U;
result |= value & 1U;
s--;
}
result <<= s; /* shift when v's highest bits are zero */
- return(result);
+ return result;
}
#endif
@@ -463,7 +540,8 @@ __attribute__((always_inline)) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
#define __CLZ __clz
-#if (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U)
+#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
+ (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) )
/**
\brief LDR Exclusive (8 bit)
@@ -645,7 +723,60 @@ __attribute__((section(".rrx_text"))) __STATIC_INLINE __ASM uint32_t __RRX(uint3
*/
#define __STRT(value, ptr) __strt(value, ptr)
-#endif /* (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U) */
+#else /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
+ (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) ) */
+
+/**
+ \brief Signed Saturate
+ \details Saturates a signed value.
+ \param [in] value Value to be saturated
+ \param [in] sat Bit position to saturate to (1..32)
+ \return Saturated value
+ */
+__attribute__((always_inline)) __STATIC_INLINE int32_t __SSAT(int32_t val, uint32_t sat)
+{
+ if ((sat >= 1U) && (sat <= 32U))
+ {
+ const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
+ const int32_t min = -1 - max ;
+ if (val > max)
+ {
+ return max;
+ }
+ else if (val < min)
+ {
+ return min;
+ }
+ }
+ return val;
+}
+
+/**
+ \brief Unsigned Saturate
+ \details Saturates an unsigned value.
+ \param [in] value Value to be saturated
+ \param [in] sat Bit position to saturate to (0..31)
+ \return Saturated value
+ */
+__attribute__((always_inline)) __STATIC_INLINE uint32_t __USAT(int32_t val, uint32_t sat)
+{
+ if (sat <= 31U)
+ {
+ const uint32_t max = ((1U << sat) - 1U);
+ if (val > (int32_t)max)
+ {
+ return max;
+ }
+ else if (val < 0)
+ {
+ return 0U;
+ }
+ }
+ return (uint32_t)val;
+}
+
+#endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
+ (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) ) */
/*@}*/ /* end of group CMSIS_Core_InstructionInterface */
@@ -656,7 +787,7 @@ __attribute__((section(".rrx_text"))) __STATIC_INLINE __ASM uint32_t __RRX(uint3
@{
*/
-#if (__CORTEX_M >= 0x04U) /* only for Cortex-M4 and above */
+#if ((defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) )
#define __SADD8 __sadd8
#define __QADD8 __qadd8
@@ -727,7 +858,7 @@ __attribute__((section(".rrx_text"))) __STATIC_INLINE __ASM uint32_t __RRX(uint3
#define __SMMLA(ARG1,ARG2,ARG3) ( (int32_t)((((int64_t)(ARG1) * (ARG2)) + \
((int64_t)(ARG3) << 32U) ) >> 32U))
-#endif /* (__CORTEX_M >= 0x04) */
+#endif /* ((defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) ) */
/*@} end of group CMSIS_SIMD_intrinsics */