summaryrefslogtreecommitdiff
path: root/Include/core_cm7.h
diff options
context:
space:
mode:
authorrihab kouki <rihab.kouki@st.com>2020-07-28 11:24:49 +0100
committerrihab kouki <rihab.kouki@st.com>2020-07-28 11:24:49 +0100
commit96d6da4e252b06dcfdc041e7df23e86161c33007 (patch)
treea262f59bb1db7ec7819acae435f5049cbe5e2354 /Include/core_cm7.h
parent9f95ff5b6ba01db09552b84a0ab79607060a2666 (diff)
downloadst-cmsis-core-lowfat-96d6da4e252b06dcfdc041e7df23e86161c33007.tar.gz
st-cmsis-core-lowfat-96d6da4e252b06dcfdc041e7df23e86161c33007.tar.bz2
st-cmsis-core-lowfat-96d6da4e252b06dcfdc041e7df23e86161c33007.zip
Official ARM version: v5.6.0HEADmaster
Diffstat (limited to 'Include/core_cm7.h')
-rw-r--r--Include/core_cm7.h222
1 files changed, 138 insertions, 84 deletions
diff --git a/Include/core_cm7.h b/Include/core_cm7.h
index a14dc62..c4515d8 100644
--- a/Include/core_cm7.h
+++ b/Include/core_cm7.h
@@ -1,11 +1,11 @@
/**************************************************************************//**
* @file core_cm7.h
* @brief CMSIS Cortex-M7 Core Peripheral Access Layer Header File
- * @version V5.0.8
- * @date 04. June 2018
+ * @version V5.1.1
+ * @date 28. March 2019
******************************************************************************/
/*
- * Copyright (c) 2009-2018 Arm Limited. All rights reserved.
+ * Copyright (c) 2009-2019 Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: Apache-2.0
*
@@ -86,7 +86,7 @@
#endif
#elif defined (__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050)
- #if defined __ARM_PCS_VFP
+ #if defined __ARM_FP
#if defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)
#define __FPU_USED 1U
#else
@@ -423,7 +423,7 @@ typedef struct
__IOM uint32_t ISER[8U]; /*!< Offset: 0x000 (R/W) Interrupt Set Enable Register */
uint32_t RESERVED0[24U];
__IOM uint32_t ICER[8U]; /*!< Offset: 0x080 (R/W) Interrupt Clear Enable Register */
- uint32_t RSERVED1[24U];
+ uint32_t RESERVED1[24U];
__IOM uint32_t ISPR[8U]; /*!< Offset: 0x100 (R/W) Interrupt Set Pending Register */
uint32_t RESERVED2[24U];
__IOM uint32_t ICPR[8U]; /*!< Offset: 0x180 (R/W) Interrupt Clear Pending Register */
@@ -930,6 +930,24 @@ typedef struct
#define SCnSCB_ICTR_INTLINESNUM_Msk (0xFUL /*<< SCnSCB_ICTR_INTLINESNUM_Pos*/) /*!< ICTR: INTLINESNUM Mask */
/* Auxiliary Control Register Definitions */
+#define SCnSCB_ACTLR_DISDYNADD_Pos 26U /*!< ACTLR: DISDYNADD Position */
+#define SCnSCB_ACTLR_DISDYNADD_Msk (1UL << SCnSCB_ACTLR_DISDYNADD_Pos) /*!< ACTLR: DISDYNADD Mask */
+
+#define SCnSCB_ACTLR_DISISSCH1_Pos 21U /*!< ACTLR: DISISSCH1 Position */
+#define SCnSCB_ACTLR_DISISSCH1_Msk (0x1FUL << SCnSCB_ACTLR_DISISSCH1_Pos) /*!< ACTLR: DISISSCH1 Mask */
+
+#define SCnSCB_ACTLR_DISDI_Pos 16U /*!< ACTLR: DISDI Position */
+#define SCnSCB_ACTLR_DISDI_Msk (0x1FUL << SCnSCB_ACTLR_DISDI_Pos) /*!< ACTLR: DISDI Mask */
+
+#define SCnSCB_ACTLR_DISCRITAXIRUR_Pos 15U /*!< ACTLR: DISCRITAXIRUR Position */
+#define SCnSCB_ACTLR_DISCRITAXIRUR_Msk (1UL << SCnSCB_ACTLR_DISCRITAXIRUR_Pos) /*!< ACTLR: DISCRITAXIRUR Mask */
+
+#define SCnSCB_ACTLR_DISBTACALLOC_Pos 14U /*!< ACTLR: DISBTACALLOC Position */
+#define SCnSCB_ACTLR_DISBTACALLOC_Msk (1UL << SCnSCB_ACTLR_DISBTACALLOC_Pos) /*!< ACTLR: DISBTACALLOC Mask */
+
+#define SCnSCB_ACTLR_DISBTACREAD_Pos 13U /*!< ACTLR: DISBTACREAD Position */
+#define SCnSCB_ACTLR_DISBTACREAD_Msk (1UL << SCnSCB_ACTLR_DISBTACREAD_Pos) /*!< ACTLR: DISBTACREAD Mask */
+
#define SCnSCB_ACTLR_DISITMATBFLUSH_Pos 12U /*!< ACTLR: DISITMATBFLUSH Position */
#define SCnSCB_ACTLR_DISITMATBFLUSH_Msk (1UL << SCnSCB_ACTLR_DISITMATBFLUSH_Pos) /*!< ACTLR: DISITMATBFLUSH Mask */
@@ -1024,10 +1042,7 @@ typedef struct
__IOM uint32_t TPR; /*!< Offset: 0xE40 (R/W) ITM Trace Privilege Register */
uint32_t RESERVED2[15U];
__IOM uint32_t TCR; /*!< Offset: 0xE80 (R/W) ITM Trace Control Register */
- uint32_t RESERVED3[29U];
- __OM uint32_t IWR; /*!< Offset: 0xEF8 ( /W) ITM Integration Write Register */
- __IM uint32_t IRR; /*!< Offset: 0xEFC (R/ ) ITM Integration Read Register */
- __IOM uint32_t IMCR; /*!< Offset: 0xF00 (R/W) ITM Integration Mode Control Register */
+ uint32_t RESERVED3[32U];
uint32_t RESERVED4[43U];
__OM uint32_t LAR; /*!< Offset: 0xFB0 ( /W) ITM Lock Access Register */
__IM uint32_t LSR; /*!< Offset: 0xFB4 (R/ ) ITM Lock Status Register */
@@ -1078,18 +1093,6 @@ typedef struct
#define ITM_TCR_ITMENA_Pos 0U /*!< ITM TCR: ITM Enable bit Position */
#define ITM_TCR_ITMENA_Msk (1UL /*<< ITM_TCR_ITMENA_Pos*/) /*!< ITM TCR: ITM Enable bit Mask */
-/* ITM Integration Write Register Definitions */
-#define ITM_IWR_ATVALIDM_Pos 0U /*!< ITM IWR: ATVALIDM Position */
-#define ITM_IWR_ATVALIDM_Msk (1UL /*<< ITM_IWR_ATVALIDM_Pos*/) /*!< ITM IWR: ATVALIDM Mask */
-
-/* ITM Integration Read Register Definitions */
-#define ITM_IRR_ATREADYM_Pos 0U /*!< ITM IRR: ATREADYM Position */
-#define ITM_IRR_ATREADYM_Msk (1UL /*<< ITM_IRR_ATREADYM_Pos*/) /*!< ITM IRR: ATREADYM Mask */
-
-/* ITM Integration Mode Control Register Definitions */
-#define ITM_IMCR_INTEGRATION_Pos 0U /*!< ITM IMCR: INTEGRATION Position */
-#define ITM_IMCR_INTEGRATION_Msk (1UL /*<< ITM_IMCR_INTEGRATION_Pos*/) /*!< ITM IMCR: INTEGRATION Mask */
-
/* ITM Lock Status Register Definitions */
#define ITM_LSR_ByteAcc_Pos 2U /*!< ITM LSR: ByteAcc Position */
#define ITM_LSR_ByteAcc_Msk (1UL << ITM_LSR_ByteAcc_Pos) /*!< ITM LSR: ByteAcc Mask */
@@ -1325,13 +1328,13 @@ typedef struct
/* TPI Integration ETM Data Register Definitions (FIFO0) */
#define TPI_FIFO0_ITM_ATVALID_Pos 29U /*!< TPI FIFO0: ITM_ATVALID Position */
-#define TPI_FIFO0_ITM_ATVALID_Msk (0x3UL << TPI_FIFO0_ITM_ATVALID_Pos) /*!< TPI FIFO0: ITM_ATVALID Mask */
+#define TPI_FIFO0_ITM_ATVALID_Msk (0x1UL << TPI_FIFO0_ITM_ATVALID_Pos) /*!< TPI FIFO0: ITM_ATVALID Mask */
#define TPI_FIFO0_ITM_bytecount_Pos 27U /*!< TPI FIFO0: ITM_bytecount Position */
#define TPI_FIFO0_ITM_bytecount_Msk (0x3UL << TPI_FIFO0_ITM_bytecount_Pos) /*!< TPI FIFO0: ITM_bytecount Mask */
#define TPI_FIFO0_ETM_ATVALID_Pos 26U /*!< TPI FIFO0: ETM_ATVALID Position */
-#define TPI_FIFO0_ETM_ATVALID_Msk (0x3UL << TPI_FIFO0_ETM_ATVALID_Pos) /*!< TPI FIFO0: ETM_ATVALID Mask */
+#define TPI_FIFO0_ETM_ATVALID_Msk (0x1UL << TPI_FIFO0_ETM_ATVALID_Pos) /*!< TPI FIFO0: ETM_ATVALID Mask */
#define TPI_FIFO0_ETM_bytecount_Pos 24U /*!< TPI FIFO0: ETM_bytecount Position */
#define TPI_FIFO0_ETM_bytecount_Msk (0x3UL << TPI_FIFO0_ETM_bytecount_Pos) /*!< TPI FIFO0: ETM_bytecount Mask */
@@ -1354,13 +1357,13 @@ typedef struct
/* TPI Integration ITM Data Register Definitions (FIFO1) */
#define TPI_FIFO1_ITM_ATVALID_Pos 29U /*!< TPI FIFO1: ITM_ATVALID Position */
-#define TPI_FIFO1_ITM_ATVALID_Msk (0x3UL << TPI_FIFO1_ITM_ATVALID_Pos) /*!< TPI FIFO1: ITM_ATVALID Mask */
+#define TPI_FIFO1_ITM_ATVALID_Msk (0x1UL << TPI_FIFO1_ITM_ATVALID_Pos) /*!< TPI FIFO1: ITM_ATVALID Mask */
#define TPI_FIFO1_ITM_bytecount_Pos 27U /*!< TPI FIFO1: ITM_bytecount Position */
#define TPI_FIFO1_ITM_bytecount_Msk (0x3UL << TPI_FIFO1_ITM_bytecount_Pos) /*!< TPI FIFO1: ITM_bytecount Mask */
#define TPI_FIFO1_ETM_ATVALID_Pos 26U /*!< TPI FIFO1: ETM_ATVALID Position */
-#define TPI_FIFO1_ETM_ATVALID_Msk (0x3UL << TPI_FIFO1_ETM_ATVALID_Pos) /*!< TPI FIFO1: ETM_ATVALID Mask */
+#define TPI_FIFO1_ETM_ATVALID_Msk (0x1UL << TPI_FIFO1_ETM_ATVALID_Pos) /*!< TPI FIFO1: ETM_ATVALID Mask */
#define TPI_FIFO1_ETM_bytecount_Pos 24U /*!< TPI FIFO1: ETM_bytecount Position */
#define TPI_FIFO1_ETM_bytecount_Msk (0x3UL << TPI_FIFO1_ETM_bytecount_Pos) /*!< TPI FIFO1: ETM_bytecount Mask */
@@ -1617,6 +1620,9 @@ typedef struct
/* Media and FP Feature Register 2 Definitions */
+#define FPU_MVFR2_VFP_Misc_Pos 4U /*!< MVFR2: VFP Misc bits Position */
+#define FPU_MVFR2_VFP_Misc_Msk (0xFUL << FPU_MVFR2_VFP_Misc_Pos) /*!< MVFR2: VFP Misc bits Mask */
+
/*@} end of group CMSIS_FPU */
@@ -1897,7 +1903,9 @@ __STATIC_INLINE void __NVIC_EnableIRQ(IRQn_Type IRQn)
{
if ((int32_t)(IRQn) >= 0)
{
+ __COMPILER_BARRIER();
NVIC->ISER[(((uint32_t)IRQn) >> 5UL)] = (uint32_t)(1UL << (((uint32_t)IRQn) & 0x1FUL));
+ __COMPILER_BARRIER();
}
}
@@ -2120,8 +2128,9 @@ __STATIC_INLINE void NVIC_DecodePriority (uint32_t Priority, uint32_t PriorityGr
*/
__STATIC_INLINE void __NVIC_SetVector(IRQn_Type IRQn, uint32_t vector)
{
- uint32_t *vectors = (uint32_t *)SCB->VTOR;
- vectors[(int32_t)IRQn + NVIC_USER_IRQ_OFFSET] = vector;
+ uint32_t vectors = (uint32_t )SCB->VTOR;
+ (* (int *) (vectors + ((int32_t)IRQn + NVIC_USER_IRQ_OFFSET) * 4)) = vector;
+ __DSB();
}
@@ -2135,8 +2144,8 @@ __STATIC_INLINE void __NVIC_SetVector(IRQn_Type IRQn, uint32_t vector)
*/
__STATIC_INLINE uint32_t __NVIC_GetVector(IRQn_Type IRQn)
{
- uint32_t *vectors = (uint32_t *)SCB->VTOR;
- return vectors[(int32_t)IRQn + NVIC_USER_IRQ_OFFSET];
+ uint32_t vectors = (uint32_t )SCB->VTOR;
+ return (uint32_t)(* (int *) (vectors + ((int32_t)IRQn + NVIC_USER_IRQ_OFFSET) * 4));
}
@@ -2161,6 +2170,7 @@ __NO_RETURN __STATIC_INLINE void __NVIC_SystemReset(void)
/*@} end of CMSIS_Core_NVICFunctions */
+
/* ########################## MPU functions #################################### */
#if defined (__MPU_PRESENT) && (__MPU_PRESENT == 1U)
@@ -2169,6 +2179,7 @@ __NO_RETURN __STATIC_INLINE void __NVIC_SystemReset(void)
#endif
+
/* ########################## FPU functions #################################### */
/**
\ingroup CMSIS_Core_FunctionInterface
@@ -2204,7 +2215,6 @@ __STATIC_INLINE uint32_t SCB_GetFPUType(void)
}
}
-
/*@} end of CMSIS_Core_FpuFunctions */
@@ -2221,14 +2231,18 @@ __STATIC_INLINE uint32_t SCB_GetFPUType(void)
#define CCSIDR_WAYS(x) (((x) & SCB_CCSIDR_ASSOCIATIVITY_Msk) >> SCB_CCSIDR_ASSOCIATIVITY_Pos)
#define CCSIDR_SETS(x) (((x) & SCB_CCSIDR_NUMSETS_Msk ) >> SCB_CCSIDR_NUMSETS_Pos )
+#define __SCB_DCACHE_LINE_SIZE 32U /*!< Cortex-M7 cache line size is fixed to 32 bytes (8 words). See also register SCB_CCSIDR */
+#define __SCB_ICACHE_LINE_SIZE 32U /*!< Cortex-M7 cache line size is fixed to 32 bytes (8 words). See also register SCB_CCSIDR */
/**
\brief Enable I-Cache
\details Turns on I-Cache
*/
-__STATIC_INLINE void SCB_EnableICache (void)
+__STATIC_FORCEINLINE void SCB_EnableICache (void)
{
#if defined (__ICACHE_PRESENT) && (__ICACHE_PRESENT == 1U)
+ if (SCB->CCR & SCB_CCR_IC_Msk) return; /* return if ICache is already enabled */
+
__DSB();
__ISB();
SCB->ICIALLU = 0UL; /* invalidate I-Cache */
@@ -2245,7 +2259,7 @@ __STATIC_INLINE void SCB_EnableICache (void)
\brief Disable I-Cache
\details Turns off I-Cache
*/
-__STATIC_INLINE void SCB_DisableICache (void)
+__STATIC_FORCEINLINE void SCB_DisableICache (void)
{
#if defined (__ICACHE_PRESENT) && (__ICACHE_PRESENT == 1U)
__DSB();
@@ -2262,7 +2276,7 @@ __STATIC_INLINE void SCB_DisableICache (void)
\brief Invalidate I-Cache
\details Invalidates I-Cache
*/
-__STATIC_INLINE void SCB_InvalidateICache (void)
+__STATIC_FORCEINLINE void SCB_InvalidateICache (void)
{
#if defined (__ICACHE_PRESENT) && (__ICACHE_PRESENT == 1U)
__DSB();
@@ -2275,17 +2289,49 @@ __STATIC_INLINE void SCB_InvalidateICache (void)
/**
+ \brief I-Cache Invalidate by address
+ \details Invalidates I-Cache for the given address.
+ I-Cache is invalidated starting from a 32 byte aligned address in 32 byte granularity.
+ I-Cache memory blocks which are part of given address + given size are invalidated.
+ \param[in] addr address
+ \param[in] isize size of memory block (in number of bytes)
+*/
+__STATIC_FORCEINLINE void SCB_InvalidateICache_by_Addr (void *addr, int32_t isize)
+{
+ #if defined (__ICACHE_PRESENT) && (__ICACHE_PRESENT == 1U)
+ if ( isize > 0 ) {
+ int32_t op_size = isize + (((uint32_t)addr) & (__SCB_ICACHE_LINE_SIZE - 1U));
+ uint32_t op_addr = (uint32_t)addr /* & ~(__SCB_ICACHE_LINE_SIZE - 1U) */;
+
+ __DSB();
+
+ do {
+ SCB->ICIMVAU = op_addr; /* register accepts only 32byte aligned values, only bits 31..5 are valid */
+ op_addr += __SCB_ICACHE_LINE_SIZE;
+ op_size -= __SCB_ICACHE_LINE_SIZE;
+ } while ( op_size > 0 );
+
+ __DSB();
+ __ISB();
+ }
+ #endif
+}
+
+
+/**
\brief Enable D-Cache
\details Turns on D-Cache
*/
-__STATIC_INLINE void SCB_EnableDCache (void)
+__STATIC_FORCEINLINE void SCB_EnableDCache (void)
{
#if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
uint32_t ccsidr;
uint32_t sets;
uint32_t ways;
- SCB->CSSELR = 0U; /*(0U << 1U) | 0U;*/ /* Level 1 data cache */
+ if (SCB->CCR & SCB_CCR_DC_Msk) return; /* return if DCache is already enabled */
+
+ SCB->CSSELR = 0U; /* select Level 1 data cache */
__DSB();
ccsidr = SCB->CCSIDR;
@@ -2316,14 +2362,14 @@ __STATIC_INLINE void SCB_EnableDCache (void)
\brief Disable D-Cache
\details Turns off D-Cache
*/
-__STATIC_INLINE void SCB_DisableDCache (void)
+__STATIC_FORCEINLINE void SCB_DisableDCache (void)
{
#if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
uint32_t ccsidr;
uint32_t sets;
uint32_t ways;
- SCB->CSSELR = 0U; /*(0U << 1U) | 0U;*/ /* Level 1 data cache */
+ SCB->CSSELR = 0U; /* select Level 1 data cache */
__DSB();
SCB->CCR &= ~(uint32_t)SCB_CCR_DC_Msk; /* disable D-Cache */
@@ -2354,14 +2400,14 @@ __STATIC_INLINE void SCB_DisableDCache (void)
\brief Invalidate D-Cache
\details Invalidates D-Cache
*/
-__STATIC_INLINE void SCB_InvalidateDCache (void)
+__STATIC_FORCEINLINE void SCB_InvalidateDCache (void)
{
#if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
uint32_t ccsidr;
uint32_t sets;
uint32_t ways;
- SCB->CSSELR = 0U; /*(0U << 1U) | 0U;*/ /* Level 1 data cache */
+ SCB->CSSELR = 0U; /* select Level 1 data cache */
__DSB();
ccsidr = SCB->CCSIDR;
@@ -2389,15 +2435,15 @@ __STATIC_INLINE void SCB_InvalidateDCache (void)
\brief Clean D-Cache
\details Cleans D-Cache
*/
-__STATIC_INLINE void SCB_CleanDCache (void)
+__STATIC_FORCEINLINE void SCB_CleanDCache (void)
{
#if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
uint32_t ccsidr;
uint32_t sets;
uint32_t ways;
- SCB->CSSELR = 0U; /*(0U << 1U) | 0U;*/ /* Level 1 data cache */
- __DSB();
+ SCB->CSSELR = 0U; /* select Level 1 data cache */
+ __DSB();
ccsidr = SCB->CCSIDR;
@@ -2424,14 +2470,14 @@ __STATIC_INLINE void SCB_CleanDCache (void)
\brief Clean & Invalidate D-Cache
\details Cleans and Invalidates D-Cache
*/
-__STATIC_INLINE void SCB_CleanInvalidateDCache (void)
+__STATIC_FORCEINLINE void SCB_CleanInvalidateDCache (void)
{
#if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
uint32_t ccsidr;
uint32_t sets;
uint32_t ways;
- SCB->CSSELR = 0U; /*(0U << 1U) | 0U;*/ /* Level 1 data cache */
+ SCB->CSSELR = 0U; /* select Level 1 data cache */
__DSB();
ccsidr = SCB->CCSIDR;
@@ -2457,27 +2503,30 @@ __STATIC_INLINE void SCB_CleanInvalidateDCache (void)
/**
\brief D-Cache Invalidate by address
- \details Invalidates D-Cache for the given address
- \param[in] addr address (aligned to 32-byte boundary)
+ \details Invalidates D-Cache for the given address.
+ D-Cache is invalidated starting from a 32 byte aligned address in 32 byte granularity.
+ D-Cache memory blocks which are part of given address + given size are invalidated.
+ \param[in] addr address
\param[in] dsize size of memory block (in number of bytes)
*/
-__STATIC_INLINE void SCB_InvalidateDCache_by_Addr (uint32_t *addr, int32_t dsize)
+__STATIC_FORCEINLINE void SCB_InvalidateDCache_by_Addr (void *addr, int32_t dsize)
{
#if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
- int32_t op_size = dsize;
- uint32_t op_addr = (uint32_t)addr;
- int32_t linesize = 32; /* in Cortex-M7 size of cache line is fixed to 8 words (32 bytes) */
+ if ( dsize > 0 ) {
+ int32_t op_size = dsize + (((uint32_t)addr) & (__SCB_DCACHE_LINE_SIZE - 1U));
+ uint32_t op_addr = (uint32_t)addr /* & ~(__SCB_DCACHE_LINE_SIZE - 1U) */;
+
+ __DSB();
- __DSB();
+ do {
+ SCB->DCIMVAC = op_addr; /* register accepts only 32byte aligned values, only bits 31..5 are valid */
+ op_addr += __SCB_DCACHE_LINE_SIZE;
+ op_size -= __SCB_DCACHE_LINE_SIZE;
+ } while ( op_size > 0 );
- while (op_size > 0) {
- SCB->DCIMVAC = op_addr;
- op_addr += (uint32_t)linesize;
- op_size -= linesize;
+ __DSB();
+ __ISB();
}
-
- __DSB();
- __ISB();
#endif
}
@@ -2485,26 +2534,29 @@ __STATIC_INLINE void SCB_InvalidateDCache_by_Addr (uint32_t *addr, int32_t dsize
/**
\brief D-Cache Clean by address
\details Cleans D-Cache for the given address
- \param[in] addr address (aligned to 32-byte boundary)
+ D-Cache is cleaned starting from a 32 byte aligned address in 32 byte granularity.
+ D-Cache memory blocks which are part of given address + given size are cleaned.
+ \param[in] addr address
\param[in] dsize size of memory block (in number of bytes)
*/
-__STATIC_INLINE void SCB_CleanDCache_by_Addr (uint32_t *addr, int32_t dsize)
+__STATIC_FORCEINLINE void SCB_CleanDCache_by_Addr (uint32_t *addr, int32_t dsize)
{
#if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
- int32_t op_size = dsize;
- uint32_t op_addr = (uint32_t) addr;
- int32_t linesize = 32; /* in Cortex-M7 size of cache line is fixed to 8 words (32 bytes) */
+ if ( dsize > 0 ) {
+ int32_t op_size = dsize + (((uint32_t)addr) & (__SCB_DCACHE_LINE_SIZE - 1U));
+ uint32_t op_addr = (uint32_t)addr /* & ~(__SCB_DCACHE_LINE_SIZE - 1U) */;
+
+ __DSB();
- __DSB();
+ do {
+ SCB->DCCMVAC = op_addr; /* register accepts only 32byte aligned values, only bits 31..5 are valid */
+ op_addr += __SCB_DCACHE_LINE_SIZE;
+ op_size -= __SCB_DCACHE_LINE_SIZE;
+ } while ( op_size > 0 );
- while (op_size > 0) {
- SCB->DCCMVAC = op_addr;
- op_addr += (uint32_t)linesize;
- op_size -= linesize;
+ __DSB();
+ __ISB();
}
-
- __DSB();
- __ISB();
#endif
}
@@ -2512,30 +2564,32 @@ __STATIC_INLINE void SCB_CleanDCache_by_Addr (uint32_t *addr, int32_t dsize)
/**
\brief D-Cache Clean and Invalidate by address
\details Cleans and invalidates D_Cache for the given address
+ D-Cache is cleaned and invalidated starting from a 32 byte aligned address in 32 byte granularity.
+ D-Cache memory blocks which are part of given address + given size are cleaned and invalidated.
\param[in] addr address (aligned to 32-byte boundary)
\param[in] dsize size of memory block (in number of bytes)
*/
-__STATIC_INLINE void SCB_CleanInvalidateDCache_by_Addr (uint32_t *addr, int32_t dsize)
+__STATIC_FORCEINLINE void SCB_CleanInvalidateDCache_by_Addr (uint32_t *addr, int32_t dsize)
{
#if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
- int32_t op_size = dsize;
- uint32_t op_addr = (uint32_t) addr;
- int32_t linesize = 32; /* in Cortex-M7 size of cache line is fixed to 8 words (32 bytes) */
+ if ( dsize > 0 ) {
+ int32_t op_size = dsize + (((uint32_t)addr) & (__SCB_DCACHE_LINE_SIZE - 1U));
+ uint32_t op_addr = (uint32_t)addr /* & ~(__SCB_DCACHE_LINE_SIZE - 1U) */;
+
+ __DSB();
- __DSB();
+ do {
+ SCB->DCCIMVAC = op_addr; /* register accepts only 32byte aligned values, only bits 31..5 are valid */
+ op_addr += __SCB_DCACHE_LINE_SIZE;
+ op_size -= __SCB_DCACHE_LINE_SIZE;
+ } while ( op_size > 0 );
- while (op_size > 0) {
- SCB->DCCIMVAC = op_addr;
- op_addr += (uint32_t)linesize;
- op_size -= linesize;
+ __DSB();
+ __ISB();
}
-
- __DSB();
- __ISB();
#endif
}
-
/*@} end of CMSIS_Core_CacheFunctions */