cachel1_armv7.h 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411
  1. /******************************************************************************
  2. * @file cachel1_armv7.h
  3. * @brief CMSIS Level 1 Cache API for Armv7-M and later
  4. * @version V1.0.0
  5. * @date 03. March 2020
  6. ******************************************************************************/
  7. /*
  8. * Copyright (c) 2020 Arm Limited. All rights reserved.
  9. *
  10. * SPDX-License-Identifier: Apache-2.0
  11. *
  12. * Licensed under the Apache License, Version 2.0 (the License); you may
  13. * not use this file except in compliance with the License.
  14. * You may obtain a copy of the License at
  15. *
  16. * www.apache.org/licenses/LICENSE-2.0
  17. *
  18. * Unless required by applicable law or agreed to in writing, software
  19. * distributed under the License is distributed on an AS IS BASIS, WITHOUT
  20. * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  21. * See the License for the specific language governing permissions and
  22. * limitations under the License.
  23. */
  24. #if defined ( __ICCARM__ )
  25. #pragma system_include /* treat file as system include file for MISRA check */
  26. #elif defined (__clang__)
  27. #pragma clang system_header /* treat file as system include file */
  28. #endif
  29. #ifndef ARM_CACHEL1_ARMV7_H
  30. #define ARM_CACHEL1_ARMV7_H
  31. /**
  32. \ingroup CMSIS_Core_FunctionInterface
  33. \defgroup CMSIS_Core_CacheFunctions Cache Functions
  34. \brief Functions that configure Instruction and Data cache.
  35. @{
  36. */
  37. /* Cache Size ID Register Macros */
  38. #define CCSIDR_WAYS(x) (((x) & SCB_CCSIDR_ASSOCIATIVITY_Msk) >> SCB_CCSIDR_ASSOCIATIVITY_Pos)
  39. #define CCSIDR_SETS(x) (((x) & SCB_CCSIDR_NUMSETS_Msk ) >> SCB_CCSIDR_NUMSETS_Pos )
  40. #ifndef __SCB_DCACHE_LINE_SIZE
  41. #define __SCB_DCACHE_LINE_SIZE 32U /*!< Cortex-M7 cache line size is fixed to 32 bytes (8 words). See also register SCB_CCSIDR */
  42. #endif
  43. #ifndef __SCB_ICACHE_LINE_SIZE
  44. #define __SCB_ICACHE_LINE_SIZE 32U /*!< Cortex-M7 cache line size is fixed to 32 bytes (8 words). See also register SCB_CCSIDR */
  45. #endif
  46. /**
  47. \brief Enable I-Cache
  48. \details Turns on I-Cache
  49. */
  50. __STATIC_FORCEINLINE void SCB_EnableICache (void)
  51. {
  52. #if defined (__ICACHE_PRESENT) && (__ICACHE_PRESENT == 1U)
  53. if (SCB->CCR & SCB_CCR_IC_Msk) return; /* return if ICache is already enabled */
  54. __DSB();
  55. __ISB();
  56. SCB->ICIALLU = 0UL; /* invalidate I-Cache */
  57. __DSB();
  58. __ISB();
  59. SCB->CCR |= (uint32_t)SCB_CCR_IC_Msk; /* enable I-Cache */
  60. __DSB();
  61. __ISB();
  62. #endif
  63. }
  64. /**
  65. \brief Disable I-Cache
  66. \details Turns off I-Cache
  67. */
  68. __STATIC_FORCEINLINE void SCB_DisableICache (void)
  69. {
  70. #if defined (__ICACHE_PRESENT) && (__ICACHE_PRESENT == 1U)
  71. __DSB();
  72. __ISB();
  73. SCB->CCR &= ~(uint32_t)SCB_CCR_IC_Msk; /* disable I-Cache */
  74. SCB->ICIALLU = 0UL; /* invalidate I-Cache */
  75. __DSB();
  76. __ISB();
  77. #endif
  78. }
  79. /**
  80. \brief Invalidate I-Cache
  81. \details Invalidates I-Cache
  82. */
  83. __STATIC_FORCEINLINE void SCB_InvalidateICache (void)
  84. {
  85. #if defined (__ICACHE_PRESENT) && (__ICACHE_PRESENT == 1U)
  86. __DSB();
  87. __ISB();
  88. SCB->ICIALLU = 0UL;
  89. __DSB();
  90. __ISB();
  91. #endif
  92. }
  93. /**
  94. \brief I-Cache Invalidate by address
  95. \details Invalidates I-Cache for the given address.
  96. I-Cache is invalidated starting from a 32 byte aligned address in 32 byte granularity.
  97. I-Cache memory blocks which are part of given address + given size are invalidated.
  98. \param[in] addr address
  99. \param[in] isize size of memory block (in number of bytes)
  100. */
  101. __STATIC_FORCEINLINE void SCB_InvalidateICache_by_Addr (void *addr, int32_t isize)
  102. {
  103. #if defined (__ICACHE_PRESENT) && (__ICACHE_PRESENT == 1U)
  104. if ( isize > 0 ) {
  105. int32_t op_size = isize + (((uint32_t)addr) & (__SCB_ICACHE_LINE_SIZE - 1U));
  106. uint32_t op_addr = (uint32_t)addr /* & ~(__SCB_ICACHE_LINE_SIZE - 1U) */;
  107. __DSB();
  108. do {
  109. SCB->ICIMVAU = op_addr; /* register accepts only 32byte aligned values, only bits 31..5 are valid */
  110. op_addr += __SCB_ICACHE_LINE_SIZE;
  111. op_size -= __SCB_ICACHE_LINE_SIZE;
  112. } while ( op_size > 0 );
  113. __DSB();
  114. __ISB();
  115. }
  116. #endif
  117. }
  118. /**
  119. \brief Enable D-Cache
  120. \details Turns on D-Cache
  121. */
  122. __STATIC_FORCEINLINE void SCB_EnableDCache (void)
  123. {
  124. #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
  125. uint32_t ccsidr;
  126. uint32_t sets;
  127. uint32_t ways;
  128. if (SCB->CCR & SCB_CCR_DC_Msk) return; /* return if DCache is already enabled */
  129. SCB->CSSELR = 0U; /* select Level 1 data cache */
  130. __DSB();
  131. ccsidr = SCB->CCSIDR;
  132. /* invalidate D-Cache */
  133. sets = (uint32_t)(CCSIDR_SETS(ccsidr));
  134. do {
  135. ways = (uint32_t)(CCSIDR_WAYS(ccsidr));
  136. do {
  137. SCB->DCISW = (((sets << SCB_DCISW_SET_Pos) & SCB_DCISW_SET_Msk) |
  138. ((ways << SCB_DCISW_WAY_Pos) & SCB_DCISW_WAY_Msk) );
  139. #if defined ( __CC_ARM )
  140. __schedule_barrier();
  141. #endif
  142. } while (ways-- != 0U);
  143. } while(sets-- != 0U);
  144. __DSB();
  145. SCB->CCR |= (uint32_t)SCB_CCR_DC_Msk; /* enable D-Cache */
  146. __DSB();
  147. __ISB();
  148. #endif
  149. }
  150. /**
  151. \brief Disable D-Cache
  152. \details Turns off D-Cache
  153. */
  154. __STATIC_FORCEINLINE void SCB_DisableDCache (void)
  155. {
  156. #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
  157. uint32_t ccsidr;
  158. uint32_t sets;
  159. uint32_t ways;
  160. SCB->CSSELR = 0U; /* select Level 1 data cache */
  161. __DSB();
  162. SCB->CCR &= ~(uint32_t)SCB_CCR_DC_Msk; /* disable D-Cache */
  163. __DSB();
  164. ccsidr = SCB->CCSIDR;
  165. /* clean & invalidate D-Cache */
  166. sets = (uint32_t)(CCSIDR_SETS(ccsidr));
  167. do {
  168. ways = (uint32_t)(CCSIDR_WAYS(ccsidr));
  169. do {
  170. SCB->DCCISW = (((sets << SCB_DCCISW_SET_Pos) & SCB_DCCISW_SET_Msk) |
  171. ((ways << SCB_DCCISW_WAY_Pos) & SCB_DCCISW_WAY_Msk) );
  172. #if defined ( __CC_ARM )
  173. __schedule_barrier();
  174. #endif
  175. } while (ways-- != 0U);
  176. } while(sets-- != 0U);
  177. __DSB();
  178. __ISB();
  179. #endif
  180. }
  181. /**
  182. \brief Invalidate D-Cache
  183. \details Invalidates D-Cache
  184. */
  185. __STATIC_FORCEINLINE void SCB_InvalidateDCache (void)
  186. {
  187. #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
  188. uint32_t ccsidr;
  189. uint32_t sets;
  190. uint32_t ways;
  191. SCB->CSSELR = 0U; /* select Level 1 data cache */
  192. __DSB();
  193. ccsidr = SCB->CCSIDR;
  194. /* invalidate D-Cache */
  195. sets = (uint32_t)(CCSIDR_SETS(ccsidr));
  196. do {
  197. ways = (uint32_t)(CCSIDR_WAYS(ccsidr));
  198. do {
  199. SCB->DCISW = (((sets << SCB_DCISW_SET_Pos) & SCB_DCISW_SET_Msk) |
  200. ((ways << SCB_DCISW_WAY_Pos) & SCB_DCISW_WAY_Msk) );
  201. #if defined ( __CC_ARM )
  202. __schedule_barrier();
  203. #endif
  204. } while (ways-- != 0U);
  205. } while(sets-- != 0U);
  206. __DSB();
  207. __ISB();
  208. #endif
  209. }
  210. /**
  211. \brief Clean D-Cache
  212. \details Cleans D-Cache
  213. */
  214. __STATIC_FORCEINLINE void SCB_CleanDCache (void)
  215. {
  216. #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
  217. uint32_t ccsidr;
  218. uint32_t sets;
  219. uint32_t ways;
  220. SCB->CSSELR = 0U; /* select Level 1 data cache */
  221. __DSB();
  222. ccsidr = SCB->CCSIDR;
  223. /* clean D-Cache */
  224. sets = (uint32_t)(CCSIDR_SETS(ccsidr));
  225. do {
  226. ways = (uint32_t)(CCSIDR_WAYS(ccsidr));
  227. do {
  228. SCB->DCCSW = (((sets << SCB_DCCSW_SET_Pos) & SCB_DCCSW_SET_Msk) |
  229. ((ways << SCB_DCCSW_WAY_Pos) & SCB_DCCSW_WAY_Msk) );
  230. #if defined ( __CC_ARM )
  231. __schedule_barrier();
  232. #endif
  233. } while (ways-- != 0U);
  234. } while(sets-- != 0U);
  235. __DSB();
  236. __ISB();
  237. #endif
  238. }
  239. /**
  240. \brief Clean & Invalidate D-Cache
  241. \details Cleans and Invalidates D-Cache
  242. */
  243. __STATIC_FORCEINLINE void SCB_CleanInvalidateDCache (void)
  244. {
  245. #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
  246. uint32_t ccsidr;
  247. uint32_t sets;
  248. uint32_t ways;
  249. SCB->CSSELR = 0U; /* select Level 1 data cache */
  250. __DSB();
  251. ccsidr = SCB->CCSIDR;
  252. /* clean & invalidate D-Cache */
  253. sets = (uint32_t)(CCSIDR_SETS(ccsidr));
  254. do {
  255. ways = (uint32_t)(CCSIDR_WAYS(ccsidr));
  256. do {
  257. SCB->DCCISW = (((sets << SCB_DCCISW_SET_Pos) & SCB_DCCISW_SET_Msk) |
  258. ((ways << SCB_DCCISW_WAY_Pos) & SCB_DCCISW_WAY_Msk) );
  259. #if defined ( __CC_ARM )
  260. __schedule_barrier();
  261. #endif
  262. } while (ways-- != 0U);
  263. } while(sets-- != 0U);
  264. __DSB();
  265. __ISB();
  266. #endif
  267. }
  268. /**
  269. \brief D-Cache Invalidate by address
  270. \details Invalidates D-Cache for the given address.
  271. D-Cache is invalidated starting from a 32 byte aligned address in 32 byte granularity.
  272. D-Cache memory blocks which are part of given address + given size are invalidated.
  273. \param[in] addr address
  274. \param[in] dsize size of memory block (in number of bytes)
  275. */
  276. __STATIC_FORCEINLINE void SCB_InvalidateDCache_by_Addr (void *addr, int32_t dsize)
  277. {
  278. #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
  279. if ( dsize > 0 ) {
  280. int32_t op_size = dsize + (((uint32_t)addr) & (__SCB_DCACHE_LINE_SIZE - 1U));
  281. uint32_t op_addr = (uint32_t)addr /* & ~(__SCB_DCACHE_LINE_SIZE - 1U) */;
  282. __DSB();
  283. do {
  284. SCB->DCIMVAC = op_addr; /* register accepts only 32byte aligned values, only bits 31..5 are valid */
  285. op_addr += __SCB_DCACHE_LINE_SIZE;
  286. op_size -= __SCB_DCACHE_LINE_SIZE;
  287. } while ( op_size > 0 );
  288. __DSB();
  289. __ISB();
  290. }
  291. #endif
  292. }
  293. /**
  294. \brief D-Cache Clean by address
  295. \details Cleans D-Cache for the given address
  296. D-Cache is cleaned starting from a 32 byte aligned address in 32 byte granularity.
  297. D-Cache memory blocks which are part of given address + given size are cleaned.
  298. \param[in] addr address
  299. \param[in] dsize size of memory block (in number of bytes)
  300. */
  301. __STATIC_FORCEINLINE void SCB_CleanDCache_by_Addr (uint32_t *addr, int32_t dsize)
  302. {
  303. #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
  304. if ( dsize > 0 ) {
  305. int32_t op_size = dsize + (((uint32_t)addr) & (__SCB_DCACHE_LINE_SIZE - 1U));
  306. uint32_t op_addr = (uint32_t)addr /* & ~(__SCB_DCACHE_LINE_SIZE - 1U) */;
  307. __DSB();
  308. do {
  309. SCB->DCCMVAC = op_addr; /* register accepts only 32byte aligned values, only bits 31..5 are valid */
  310. op_addr += __SCB_DCACHE_LINE_SIZE;
  311. op_size -= __SCB_DCACHE_LINE_SIZE;
  312. } while ( op_size > 0 );
  313. __DSB();
  314. __ISB();
  315. }
  316. #endif
  317. }
  318. /**
  319. \brief D-Cache Clean and Invalidate by address
  320. \details Cleans and invalidates D_Cache for the given address
  321. D-Cache is cleaned and invalidated starting from a 32 byte aligned address in 32 byte granularity.
  322. D-Cache memory blocks which are part of given address + given size are cleaned and invalidated.
  323. \param[in] addr address (aligned to 32-byte boundary)
  324. \param[in] dsize size of memory block (in number of bytes)
  325. */
  326. __STATIC_FORCEINLINE void SCB_CleanInvalidateDCache_by_Addr (uint32_t *addr, int32_t dsize)
  327. {
  328. #if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
  329. if ( dsize > 0 ) {
  330. int32_t op_size = dsize + (((uint32_t)addr) & (__SCB_DCACHE_LINE_SIZE - 1U));
  331. uint32_t op_addr = (uint32_t)addr /* & ~(__SCB_DCACHE_LINE_SIZE - 1U) */;
  332. __DSB();
  333. do {
  334. SCB->DCCIMVAC = op_addr; /* register accepts only 32byte aligned values, only bits 31..5 are valid */
  335. op_addr += __SCB_DCACHE_LINE_SIZE;
  336. op_size -= __SCB_DCACHE_LINE_SIZE;
  337. } while ( op_size > 0 );
  338. __DSB();
  339. __ISB();
  340. }
  341. #endif
  342. }
  343. /*@} end of CMSIS_Core_CacheFunctions */
  344. #endif /* ARM_CACHEL1_ARMV7_H */