Naze32 clone with Frysky receiver
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

core_caFunc.h 41KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418
  1. /**************************************************************************//**
  2. * @file core_caFunc.h
  3. * @brief CMSIS Cortex-A Core Function Access Header File
  4. * @version V3.10
  5. * @date 30 Oct 2013
  6. *
  7. * @note
  8. *
  9. ******************************************************************************/
  10. /* Copyright (c) 2009 - 2013 ARM LIMITED
  11. All rights reserved.
  12. Redistribution and use in source and binary forms, with or without
  13. modification, are permitted provided that the following conditions are met:
  14. - Redistributions of source code must retain the above copyright
  15. notice, this list of conditions and the following disclaimer.
  16. - Redistributions in binary form must reproduce the above copyright
  17. notice, this list of conditions and the following disclaimer in the
  18. documentation and/or other materials provided with the distribution.
  19. - Neither the name of ARM nor the names of its contributors may be used
  20. to endorse or promote products derived from this software without
  21. specific prior written permission.
  22. *
  23. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
  24. AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  25. IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
  26. ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
  27. LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
  28. CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
  29. SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
  30. INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
  31. CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
  32. ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
  33. POSSIBILITY OF SUCH DAMAGE.
  34. ---------------------------------------------------------------------------*/
  35. #ifndef __CORE_CAFUNC_H__
  36. #define __CORE_CAFUNC_H__
  37. /* ########################### Core Function Access ########################### */
  38. /** \ingroup CMSIS_Core_FunctionInterface
  39. \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
  40. @{
  41. */
  42. #if defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/
  43. /* ARM armcc specific functions */
  44. #if (__ARMCC_VERSION < 400677)
  45. #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
  46. #endif
  47. #define MODE_USR 0x10
  48. #define MODE_FIQ 0x11
  49. #define MODE_IRQ 0x12
  50. #define MODE_SVC 0x13
  51. #define MODE_MON 0x16
  52. #define MODE_ABT 0x17
  53. #define MODE_HYP 0x1A
  54. #define MODE_UND 0x1B
  55. #define MODE_SYS 0x1F
  56. /** \brief Get APSR Register
  57. This function returns the content of the APSR Register.
  58. \return APSR Register value
  59. */
  60. __STATIC_INLINE uint32_t __get_APSR(void)
  61. {
  62. register uint32_t __regAPSR __ASM("apsr");
  63. return(__regAPSR);
  64. }
  65. /** \brief Get CPSR Register
  66. This function returns the content of the CPSR Register.
  67. \return CPSR Register value
  68. */
  69. __STATIC_INLINE uint32_t __get_CPSR(void)
  70. {
  71. register uint32_t __regCPSR __ASM("cpsr");
  72. return(__regCPSR);
  73. }
  74. /** \brief Set Stack Pointer
  75. This function assigns the given value to the current stack pointer.
  76. \param [in] topOfStack Stack Pointer value to set
  77. */
  78. register uint32_t __regSP __ASM("sp");
  79. __STATIC_INLINE void __set_SP(uint32_t topOfStack)
  80. {
  81. __regSP = topOfStack;
  82. }
  83. /** \brief Get link register
  84. This function returns the value of the link register
  85. \return Value of link register
  86. */
  87. register uint32_t __reglr __ASM("lr");
  88. __STATIC_INLINE uint32_t __get_LR(void)
  89. {
  90. return(__reglr);
  91. }
  92. /** \brief Set link register
  93. This function sets the value of the link register
  94. \param [in] lr LR value to set
  95. */
  96. __STATIC_INLINE void __set_LR(uint32_t lr)
  97. {
  98. __reglr = lr;
  99. }
  100. /** \brief Set Process Stack Pointer
  101. This function assigns the given value to the USR/SYS Stack Pointer (PSP).
  102. \param [in] topOfProcStack USR/SYS Stack Pointer value to set
  103. */
  104. __STATIC_ASM void __set_PSP(uint32_t topOfProcStack)
  105. {
  106. ARM
  107. PRESERVE8
  108. BIC R0, R0, #7 ;ensure stack is 8-byte aligned
  109. MRS R1, CPSR
  110. CPS #MODE_SYS ;no effect in USR mode
  111. MOV SP, R0
  112. MSR CPSR_c, R1 ;no effect in USR mode
  113. ISB
  114. BX LR
  115. }
  116. /** \brief Set User Mode
  117. This function changes the processor state to User Mode
  118. */
  119. __STATIC_ASM void __set_CPS_USR(void)
  120. {
  121. ARM
  122. CPS #MODE_USR
  123. BX LR
  124. }
  125. /** \brief Enable FIQ
  126. This function enables FIQ interrupts by clearing the F-bit in the CPSR.
  127. Can only be executed in Privileged modes.
  128. */
  129. #define __enable_fault_irq __enable_fiq
  130. /** \brief Disable FIQ
  131. This function disables FIQ interrupts by setting the F-bit in the CPSR.
  132. Can only be executed in Privileged modes.
  133. */
  134. #define __disable_fault_irq __disable_fiq
  135. /** \brief Get FPSCR
  136. This function returns the current value of the Floating Point Status/Control register.
  137. \return Floating Point Status/Control register value
  138. */
  139. __STATIC_INLINE uint32_t __get_FPSCR(void)
  140. {
  141. #if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
  142. register uint32_t __regfpscr __ASM("fpscr");
  143. return(__regfpscr);
  144. #else
  145. return(0);
  146. #endif
  147. }
  148. /** \brief Set FPSCR
  149. This function assigns the given value to the Floating Point Status/Control register.
  150. \param [in] fpscr Floating Point Status/Control value to set
  151. */
  152. __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
  153. {
  154. #if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
  155. register uint32_t __regfpscr __ASM("fpscr");
  156. __regfpscr = (fpscr);
  157. #endif
  158. }
  159. /** \brief Get FPEXC
  160. This function returns the current value of the Floating Point Exception Control register.
  161. \return Floating Point Exception Control register value
  162. */
  163. __STATIC_INLINE uint32_t __get_FPEXC(void)
  164. {
  165. #if (__FPU_PRESENT == 1)
  166. register uint32_t __regfpexc __ASM("fpexc");
  167. return(__regfpexc);
  168. #else
  169. return(0);
  170. #endif
  171. }
  172. /** \brief Set FPEXC
  173. This function assigns the given value to the Floating Point Exception Control register.
  174. \param [in] fpscr Floating Point Exception Control value to set
  175. */
  176. __STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
  177. {
  178. #if (__FPU_PRESENT == 1)
  179. register uint32_t __regfpexc __ASM("fpexc");
  180. __regfpexc = (fpexc);
  181. #endif
  182. }
  183. /** \brief Get CPACR
  184. This function returns the current value of the Coprocessor Access Control register.
  185. \return Coprocessor Access Control register value
  186. */
  187. __STATIC_INLINE uint32_t __get_CPACR(void)
  188. {
  189. register uint32_t __regCPACR __ASM("cp15:0:c1:c0:2");
  190. return __regCPACR;
  191. }
  192. /** \brief Set CPACR
  193. This function assigns the given value to the Coprocessor Access Control register.
  194. \param [in] cpacr Coprocessor Acccess Control value to set
  195. */
  196. __STATIC_INLINE void __set_CPACR(uint32_t cpacr)
  197. {
  198. register uint32_t __regCPACR __ASM("cp15:0:c1:c0:2");
  199. __regCPACR = cpacr;
  200. __ISB();
  201. }
  202. /** \brief Get CBAR
  203. This function returns the value of the Configuration Base Address register.
  204. \return Configuration Base Address register value
  205. */
  206. __STATIC_INLINE uint32_t __get_CBAR() {
  207. register uint32_t __regCBAR __ASM("cp15:4:c15:c0:0");
  208. return(__regCBAR);
  209. }
  210. /** \brief Get TTBR0
  211. This function returns the value of the Translation Table Base Register 0.
  212. \return Translation Table Base Register 0 value
  213. */
  214. __STATIC_INLINE uint32_t __get_TTBR0() {
  215. register uint32_t __regTTBR0 __ASM("cp15:0:c2:c0:0");
  216. return(__regTTBR0);
  217. }
  218. /** \brief Set TTBR0
  219. This function assigns the given value to the Translation Table Base Register 0.
  220. \param [in] ttbr0 Translation Table Base Register 0 value to set
  221. */
  222. __STATIC_INLINE void __set_TTBR0(uint32_t ttbr0) {
  223. register uint32_t __regTTBR0 __ASM("cp15:0:c2:c0:0");
  224. __regTTBR0 = ttbr0;
  225. __ISB();
  226. }
  227. /** \brief Get DACR
  228. This function returns the value of the Domain Access Control Register.
  229. \return Domain Access Control Register value
  230. */
  231. __STATIC_INLINE uint32_t __get_DACR() {
  232. register uint32_t __regDACR __ASM("cp15:0:c3:c0:0");
  233. return(__regDACR);
  234. }
  235. /** \brief Set DACR
  236. This function assigns the given value to the Domain Access Control Register.
  237. \param [in] dacr Domain Access Control Register value to set
  238. */
  239. __STATIC_INLINE void __set_DACR(uint32_t dacr) {
  240. register uint32_t __regDACR __ASM("cp15:0:c3:c0:0");
  241. __regDACR = dacr;
  242. __ISB();
  243. }
  244. /******************************** Cache and BTAC enable ****************************************************/
  245. /** \brief Set SCTLR
  246. This function assigns the given value to the System Control Register.
  247. \param [in] sctlr System Control Register value to set
  248. */
  249. __STATIC_INLINE void __set_SCTLR(uint32_t sctlr)
  250. {
  251. register uint32_t __regSCTLR __ASM("cp15:0:c1:c0:0");
  252. __regSCTLR = sctlr;
  253. }
  254. /** \brief Get SCTLR
  255. This function returns the value of the System Control Register.
  256. \return System Control Register value
  257. */
  258. __STATIC_INLINE uint32_t __get_SCTLR() {
  259. register uint32_t __regSCTLR __ASM("cp15:0:c1:c0:0");
  260. return(__regSCTLR);
  261. }
  262. /** \brief Enable Caches
  263. Enable Caches
  264. */
  265. __STATIC_INLINE void __enable_caches(void) {
  266. // Set I bit 12 to enable I Cache
  267. // Set C bit 2 to enable D Cache
  268. __set_SCTLR( __get_SCTLR() | (1 << 12) | (1 << 2));
  269. }
  270. /** \brief Disable Caches
  271. Disable Caches
  272. */
  273. __STATIC_INLINE void __disable_caches(void) {
  274. // Clear I bit 12 to disable I Cache
  275. // Clear C bit 2 to disable D Cache
  276. __set_SCTLR( __get_SCTLR() & ~(1 << 12) & ~(1 << 2));
  277. __ISB();
  278. }
  279. /** \brief Enable BTAC
  280. Enable BTAC
  281. */
  282. __STATIC_INLINE void __enable_btac(void) {
  283. // Set Z bit 11 to enable branch prediction
  284. __set_SCTLR( __get_SCTLR() | (1 << 11));
  285. __ISB();
  286. }
  287. /** \brief Disable BTAC
  288. Disable BTAC
  289. */
  290. __STATIC_INLINE void __disable_btac(void) {
  291. // Clear Z bit 11 to disable branch prediction
  292. __set_SCTLR( __get_SCTLR() & ~(1 << 11));
  293. }
  294. /** \brief Enable MMU
  295. Enable MMU
  296. */
  297. __STATIC_INLINE void __enable_mmu(void) {
  298. // Set M bit 0 to enable the MMU
  299. // Set AFE bit to enable simplified access permissions model
  300. // Clear TRE bit to disable TEX remap and A bit to disable strict alignment fault checking
  301. __set_SCTLR( (__get_SCTLR() & ~(1 << 28) & ~(1 << 1)) | 1 | (1 << 29));
  302. __ISB();
  303. }
  304. /** \brief Disable MMU
  305. Disable MMU
  306. */
  307. __STATIC_INLINE void __disable_mmu(void) {
  308. // Clear M bit 0 to disable the MMU
  309. __set_SCTLR( __get_SCTLR() & ~1);
  310. __ISB();
  311. }
  312. /******************************** TLB maintenance operations ************************************************/
  313. /** \brief Invalidate the whole tlb
  314. TLBIALL. Invalidate the whole tlb
  315. */
  316. __STATIC_INLINE void __ca9u_inv_tlb_all(void) {
  317. register uint32_t __TLBIALL __ASM("cp15:0:c8:c7:0");
  318. __TLBIALL = 0;
  319. __DSB();
  320. __ISB();
  321. }
  322. /******************************** BTB maintenance operations ************************************************/
  323. /** \brief Invalidate entire branch predictor array
  324. BPIALL. Branch Predictor Invalidate All.
  325. */
  326. __STATIC_INLINE void __v7_inv_btac(void) {
  327. register uint32_t __BPIALL __ASM("cp15:0:c7:c5:6");
  328. __BPIALL = 0;
  329. __DSB(); //ensure completion of the invalidation
  330. __ISB(); //ensure instruction fetch path sees new state
  331. }
  332. /******************************** L1 cache operations ******************************************************/
  333. /** \brief Invalidate the whole I$
  334. ICIALLU. Instruction Cache Invalidate All to PoU
  335. */
  336. __STATIC_INLINE void __v7_inv_icache_all(void) {
  337. register uint32_t __ICIALLU __ASM("cp15:0:c7:c5:0");
  338. __ICIALLU = 0;
  339. __DSB(); //ensure completion of the invalidation
  340. __ISB(); //ensure instruction fetch path sees new I cache state
  341. }
  342. /** \brief Clean D$ by MVA
  343. DCCMVAC. Data cache clean by MVA to PoC
  344. */
  345. __STATIC_INLINE void __v7_clean_dcache_mva(void *va) {
  346. register uint32_t __DCCMVAC __ASM("cp15:0:c7:c10:1");
  347. __DCCMVAC = (uint32_t)va;
  348. __DMB(); //ensure the ordering of data cache maintenance operations and their effects
  349. }
  350. /** \brief Invalidate D$ by MVA
  351. DCIMVAC. Data cache invalidate by MVA to PoC
  352. */
  353. __STATIC_INLINE void __v7_inv_dcache_mva(void *va) {
  354. register uint32_t __DCIMVAC __ASM("cp15:0:c7:c6:1");
  355. __DCIMVAC = (uint32_t)va;
  356. __DMB(); //ensure the ordering of data cache maintenance operations and their effects
  357. }
  358. /** \brief Clean and Invalidate D$ by MVA
  359. DCCIMVAC. Data cache clean and invalidate by MVA to PoC
  360. */
  361. __STATIC_INLINE void __v7_clean_inv_dcache_mva(void *va) {
  362. register uint32_t __DCCIMVAC __ASM("cp15:0:c7:c14:1");
  363. __DCCIMVAC = (uint32_t)va;
  364. __DMB(); //ensure the ordering of data cache maintenance operations and their effects
  365. }
  366. /** \brief Clean and Invalidate the entire data or unified cache
  367. Generic mechanism for cleaning/invalidating the entire data or unified cache to the point of coherency.
  368. */
  369. #pragma push
  370. #pragma arm
  371. __STATIC_ASM void __v7_all_cache(uint32_t op) {
  372. ARM
  373. PUSH {R4-R11}
  374. MRC p15, 1, R6, c0, c0, 1 // Read CLIDR
  375. ANDS R3, R6, #0x07000000 // Extract coherency level
  376. MOV R3, R3, LSR #23 // Total cache levels << 1
  377. BEQ Finished // If 0, no need to clean
  378. MOV R10, #0 // R10 holds current cache level << 1
  379. Loop1 ADD R2, R10, R10, LSR #1 // R2 holds cache "Set" position
  380. MOV R1, R6, LSR R2 // Bottom 3 bits are the Cache-type for this level
  381. AND R1, R1, #7 // Isolate those lower 3 bits
  382. CMP R1, #2
  383. BLT Skip // No cache or only instruction cache at this level
  384. MCR p15, 2, R10, c0, c0, 0 // Write the Cache Size selection register
  385. ISB // ISB to sync the change to the CacheSizeID reg
  386. MRC p15, 1, R1, c0, c0, 0 // Reads current Cache Size ID register
  387. AND R2, R1, #7 // Extract the line length field
  388. ADD R2, R2, #4 // Add 4 for the line length offset (log2 16 bytes)
  389. LDR R4, =0x3FF
  390. ANDS R4, R4, R1, LSR #3 // R4 is the max number on the way size (right aligned)
  391. CLZ R5, R4 // R5 is the bit position of the way size increment
  392. LDR R7, =0x7FFF
  393. ANDS R7, R7, R1, LSR #13 // R7 is the max number of the index size (right aligned)
  394. Loop2 MOV R9, R4 // R9 working copy of the max way size (right aligned)
  395. Loop3 ORR R11, R10, R9, LSL R5 // Factor in the Way number and cache number into R11
  396. ORR R11, R11, R7, LSL R2 // Factor in the Set number
  397. CMP R0, #0
  398. BNE Dccsw
  399. MCR p15, 0, R11, c7, c6, 2 // DCISW. Invalidate by Set/Way
  400. B cont
  401. Dccsw CMP R0, #1
  402. BNE Dccisw
  403. MCR p15, 0, R11, c7, c10, 2 // DCCSW. Clean by Set/Way
  404. B cont
  405. Dccisw MCR p15, 0, R11, c7, c14, 2 // DCCISW. Clean and Invalidate by Set/Way
  406. cont SUBS R9, R9, #1 // Decrement the Way number
  407. BGE Loop3
  408. SUBS R7, R7, #1 // Decrement the Set number
  409. BGE Loop2
  410. Skip ADD R10, R10, #2 // Increment the cache number
  411. CMP R3, R10
  412. BGT Loop1
  413. Finished
  414. DSB
  415. POP {R4-R11}
  416. BX lr
  417. }
  418. #pragma pop
  419. /** \brief Invalidate the whole D$
  420. DCISW. Invalidate by Set/Way
  421. */
  422. __STATIC_INLINE void __v7_inv_dcache_all(void) {
  423. __v7_all_cache(0);
  424. }
  425. /** \brief Clean the whole D$
  426. DCCSW. Clean by Set/Way
  427. */
  428. __STATIC_INLINE void __v7_clean_dcache_all(void) {
  429. __v7_all_cache(1);
  430. }
  431. /** \brief Clean and invalidate the whole D$
  432. DCCISW. Clean and Invalidate by Set/Way
  433. */
  434. __STATIC_INLINE void __v7_clean_inv_dcache_all(void) {
  435. __v7_all_cache(2);
  436. }
  437. #include "core_ca_mmu.h"
  438. #elif (defined (__ICCARM__)) /*---------------- ICC Compiler ---------------------*/
  439. #define __inline inline
  440. inline static uint32_t __disable_irq_iar() {
  441. int irq_dis = __get_CPSR() & 0x80; // 7bit CPSR.I
  442. __disable_irq();
  443. return irq_dis;
  444. }
  445. #define MODE_USR 0x10
  446. #define MODE_FIQ 0x11
  447. #define MODE_IRQ 0x12
  448. #define MODE_SVC 0x13
  449. #define MODE_MON 0x16
  450. #define MODE_ABT 0x17
  451. #define MODE_HYP 0x1A
  452. #define MODE_UND 0x1B
  453. #define MODE_SYS 0x1F
  454. /** \brief Set Process Stack Pointer
  455. This function assigns the given value to the USR/SYS Stack Pointer (PSP).
  456. \param [in] topOfProcStack USR/SYS Stack Pointer value to set
  457. */
  458. // from rt_CMSIS.c
  459. __arm static inline void __set_PSP(uint32_t topOfProcStack) {
  460. __asm(
  461. " ARM\n"
  462. // " PRESERVE8\n"
  463. " BIC R0, R0, #7 ;ensure stack is 8-byte aligned \n"
  464. " MRS R1, CPSR \n"
  465. " CPS #0x1F ;no effect in USR mode \n" // MODE_SYS
  466. " MOV SP, R0 \n"
  467. " MSR CPSR_c, R1 ;no effect in USR mode \n"
  468. " ISB \n"
  469. " BX LR \n");
  470. }
  471. /** \brief Set User Mode
  472. This function changes the processor state to User Mode
  473. */
  474. // from rt_CMSIS.c
  475. __arm static inline void __set_CPS_USR(void) {
  476. __asm(
  477. " ARM \n"
  478. " CPS #0x10 \n" // MODE_USR
  479. " BX LR\n");
  480. }
  481. /** \brief Set TTBR0
  482. This function assigns the given value to the Translation Table Base Register 0.
  483. \param [in] ttbr0 Translation Table Base Register 0 value to set
  484. */
  485. // from mmu_Renesas_RZ_A1.c
  486. __STATIC_INLINE void __set_TTBR0(uint32_t ttbr0) {
  487. __MCR(15, 0, ttbr0, 2, 0, 0); // reg to cp15
  488. __ISB();
  489. }
  490. /** \brief Set DACR
  491. This function assigns the given value to the Domain Access Control Register.
  492. \param [in] dacr Domain Access Control Register value to set
  493. */
  494. // from mmu_Renesas_RZ_A1.c
  495. __STATIC_INLINE void __set_DACR(uint32_t dacr) {
  496. __MCR(15, 0, dacr, 3, 0, 0); // reg to cp15
  497. __ISB();
  498. }
  499. /******************************** Cache and BTAC enable ****************************************************/
  500. /** \brief Set SCTLR
  501. This function assigns the given value to the System Control Register.
  502. \param [in] sctlr System Control Register value to set
  503. */
  504. // from __enable_mmu()
  505. __STATIC_INLINE void __set_SCTLR(uint32_t sctlr) {
  506. __MCR(15, 0, sctlr, 1, 0, 0); // reg to cp15
  507. }
  508. /** \brief Get SCTLR
  509. This function returns the value of the System Control Register.
  510. \return System Control Register value
  511. */
  512. // from __enable_mmu()
  513. __STATIC_INLINE uint32_t __get_SCTLR() {
  514. uint32_t __regSCTLR = __MRC(15, 0, 1, 0, 0);
  515. return __regSCTLR;
  516. }
  517. /** \brief Enable Caches
  518. Enable Caches
  519. */
  520. // from system_Renesas_RZ_A1.c
  521. __STATIC_INLINE void __enable_caches(void) {
  522. __set_SCTLR( __get_SCTLR() | (1 << 12) | (1 << 2));
  523. }
  524. /** \brief Enable BTAC
  525. Enable BTAC
  526. */
  527. // from system_Renesas_RZ_A1.c
  528. __STATIC_INLINE void __enable_btac(void) {
  529. __set_SCTLR( __get_SCTLR() | (1 << 11));
  530. __ISB();
  531. }
  532. /** \brief Enable MMU
  533. Enable MMU
  534. */
  535. // from system_Renesas_RZ_A1.c
  536. __STATIC_INLINE void __enable_mmu(void) {
  537. // Set M bit 0 to enable the MMU
  538. // Set AFE bit to enable simplified access permissions model
  539. // Clear TRE bit to disable TEX remap and A bit to disable strict alignment fault checking
  540. __set_SCTLR( (__get_SCTLR() & ~(1 << 28) & ~(1 << 1)) | 1 | (1 << 29));
  541. __ISB();
  542. }
  543. /******************************** TLB maintenance operations ************************************************/
  544. /** \brief Invalidate the whole tlb
  545. TLBIALL. Invalidate the whole tlb
  546. */
  547. // from system_Renesas_RZ_A1.c
  548. __STATIC_INLINE void __ca9u_inv_tlb_all(void) {
  549. uint32_t val = 0;
  550. __MCR(15, 0, val, 8, 7, 0); // reg to cp15
  551. __MCR(15, 0, val, 8, 6, 0); // reg to cp15
  552. __MCR(15, 0, val, 8, 5, 0); // reg to cp15
  553. __DSB();
  554. __ISB();
  555. }
  556. /******************************** BTB maintenance operations ************************************************/
  557. /** \brief Invalidate entire branch predictor array
  558. BPIALL. Branch Predictor Invalidate All.
  559. */
  560. // from system_Renesas_RZ_A1.c
  561. __STATIC_INLINE void __v7_inv_btac(void) {
  562. uint32_t val = 0;
  563. __MCR(15, 0, val, 7, 5, 6); // reg to cp15
  564. __DSB(); //ensure completion of the invalidation
  565. __ISB(); //ensure instruction fetch path sees new state
  566. }
  567. /******************************** L1 cache operations ******************************************************/
  568. /** \brief Invalidate the whole I$
  569. ICIALLU. Instruction Cache Invalidate All to PoU
  570. */
  571. // from system_Renesas_RZ_A1.c
  572. __STATIC_INLINE void __v7_inv_icache_all(void) {
  573. uint32_t val = 0;
  574. __MCR(15, 0, val, 7, 5, 0); // reg to cp15
  575. __DSB(); //ensure completion of the invalidation
  576. __ISB(); //ensure instruction fetch path sees new I cache state
  577. }
  578. // from __v7_inv_dcache_all()
  579. __arm static inline void __v7_all_cache(uint32_t op) {
  580. __asm(
  581. " ARM \n"
  582. " PUSH {R4-R11} \n"
  583. " MRC p15, 1, R6, c0, c0, 1\n" // Read CLIDR
  584. " ANDS R3, R6, #0x07000000\n" // Extract coherency level
  585. " MOV R3, R3, LSR #23\n" // Total cache levels << 1
  586. " BEQ Finished\n" // If 0, no need to clean
  587. " MOV R10, #0\n" // R10 holds current cache level << 1
  588. "Loop1: ADD R2, R10, R10, LSR #1\n" // R2 holds cache "Set" position
  589. " MOV R1, R6, LSR R2 \n" // Bottom 3 bits are the Cache-type for this level
  590. " AND R1, R1, #7 \n" // Isolate those lower 3 bits
  591. " CMP R1, #2 \n"
  592. " BLT Skip \n" // No cache or only instruction cache at this level
  593. " MCR p15, 2, R10, c0, c0, 0 \n" // Write the Cache Size selection register
  594. " ISB \n" // ISB to sync the change to the CacheSizeID reg
  595. " MRC p15, 1, R1, c0, c0, 0 \n" // Reads current Cache Size ID register
  596. " AND R2, R1, #7 \n" // Extract the line length field
  597. " ADD R2, R2, #4 \n" // Add 4 for the line length offset (log2 16 bytes)
  598. " movw R4, #0x3FF \n"
  599. " ANDS R4, R4, R1, LSR #3 \n" // R4 is the max number on the way size (right aligned)
  600. " CLZ R5, R4 \n" // R5 is the bit position of the way size increment
  601. " movw R7, #0x7FFF \n"
  602. " ANDS R7, R7, R1, LSR #13 \n" // R7 is the max number of the index size (right aligned)
  603. "Loop2: MOV R9, R4 \n" // R9 working copy of the max way size (right aligned)
  604. "Loop3: ORR R11, R10, R9, LSL R5 \n" // Factor in the Way number and cache number into R11
  605. " ORR R11, R11, R7, LSL R2 \n" // Factor in the Set number
  606. " CMP R0, #0 \n"
  607. " BNE Dccsw \n"
  608. " MCR p15, 0, R11, c7, c6, 2 \n" // DCISW. Invalidate by Set/Way
  609. " B cont \n"
  610. "Dccsw: CMP R0, #1 \n"
  611. " BNE Dccisw \n"
  612. " MCR p15, 0, R11, c7, c10, 2 \n" // DCCSW. Clean by Set/Way
  613. " B cont \n"
  614. "Dccisw: MCR p15, 0, R11, c7, c14, 2 \n" // DCCISW, Clean and Invalidate by Set/Way
  615. "cont: SUBS R9, R9, #1 \n" // Decrement the Way number
  616. " BGE Loop3 \n"
  617. " SUBS R7, R7, #1 \n" // Decrement the Set number
  618. " BGE Loop2 \n"
  619. "Skip: ADD R10, R10, #2 \n" // increment the cache number
  620. " CMP R3, R10 \n"
  621. " BGT Loop1 \n"
  622. "Finished: \n"
  623. " DSB \n"
  624. " POP {R4-R11} \n"
  625. " BX lr \n" );
  626. }
  627. /** \brief Invalidate the whole D$
  628. DCISW. Invalidate by Set/Way
  629. */
  630. // from system_Renesas_RZ_A1.c
  631. __STATIC_INLINE void __v7_inv_dcache_all(void) {
  632. __v7_all_cache(0);
  633. }
  634. #include "core_ca_mmu.h"
  635. #elif (defined (__GNUC__)) /*------------------ GNU Compiler ---------------------*/
  636. /* GNU gcc specific functions */
  637. #define MODE_USR 0x10
  638. #define MODE_FIQ 0x11
  639. #define MODE_IRQ 0x12
  640. #define MODE_SVC 0x13
  641. #define MODE_MON 0x16
  642. #define MODE_ABT 0x17
  643. #define MODE_HYP 0x1A
  644. #define MODE_UND 0x1B
  645. #define MODE_SYS 0x1F
  646. __attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_irq(void)
  647. {
  648. __ASM volatile ("cpsie i");
  649. }
  650. /** \brief Disable IRQ Interrupts
  651. This function disables IRQ interrupts by setting the I-bit in the CPSR.
  652. Can only be executed in Privileged modes.
  653. */
  654. __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __disable_irq(void)
  655. {
  656. uint32_t result;
  657. __ASM volatile ("mrs %0, cpsr" : "=r" (result));
  658. __ASM volatile ("cpsid i");
  659. return(result & 0x80);
  660. }
  661. /** \brief Get APSR Register
  662. This function returns the content of the APSR Register.
  663. \return APSR Register value
  664. */
  665. __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_APSR(void)
  666. {
  667. #if 1
  668. register uint32_t __regAPSR;
  669. __ASM volatile ("mrs %0, apsr" : "=r" (__regAPSR) );
  670. #else
  671. register uint32_t __regAPSR __ASM("apsr");
  672. #endif
  673. return(__regAPSR);
  674. }
  675. /** \brief Get CPSR Register
  676. This function returns the content of the CPSR Register.
  677. \return CPSR Register value
  678. */
  679. __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CPSR(void)
  680. {
  681. #if 1
  682. register uint32_t __regCPSR;
  683. __ASM volatile ("mrs %0, cpsr" : "=r" (__regCPSR));
  684. #else
  685. register uint32_t __regCPSR __ASM("cpsr");
  686. #endif
  687. return(__regCPSR);
  688. }
  689. #if 0
  690. /** \brief Set Stack Pointer
  691. This function assigns the given value to the current stack pointer.
  692. \param [in] topOfStack Stack Pointer value to set
  693. */
  694. __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_SP(uint32_t topOfStack)
  695. {
  696. register uint32_t __regSP __ASM("sp");
  697. __regSP = topOfStack;
  698. }
  699. #endif
  700. /** \brief Get link register
  701. This function returns the value of the link register
  702. \return Value of link register
  703. */
  704. __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_LR(void)
  705. {
  706. register uint32_t __reglr __ASM("lr");
  707. return(__reglr);
  708. }
  709. #if 0
  710. /** \brief Set link register
  711. This function sets the value of the link register
  712. \param [in] lr LR value to set
  713. */
  714. __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_LR(uint32_t lr)
  715. {
  716. register uint32_t __reglr __ASM("lr");
  717. __reglr = lr;
  718. }
  719. #endif
  720. /** \brief Set Process Stack Pointer
  721. This function assigns the given value to the USR/SYS Stack Pointer (PSP).
  722. \param [in] topOfProcStack USR/SYS Stack Pointer value to set
  723. */
  724. __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
  725. {
  726. __asm__ volatile (
  727. ".ARM;"
  728. ".eabi_attribute Tag_ABI_align8_preserved,1;"
  729. "BIC R0, R0, #7;" /* ;ensure stack is 8-byte aligned */
  730. "MRS R1, CPSR;"
  731. "CPS %0;" /* ;no effect in USR mode */
  732. "MOV SP, R0;"
  733. "MSR CPSR_c, R1;" /* ;no effect in USR mode */
  734. "ISB;"
  735. //"BX LR;"
  736. :
  737. : "i"(MODE_SYS)
  738. : "r0", "r1");
  739. return;
  740. }
  741. /** \brief Set User Mode
  742. This function changes the processor state to User Mode
  743. */
  744. __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_CPS_USR(void)
  745. {
  746. __asm__ volatile (
  747. ".ARM;"
  748. "CPS %0;"
  749. //"BX LR;"
  750. :
  751. : "i"(MODE_USR)
  752. : );
  753. return;
  754. }
  755. /** \brief Enable FIQ
  756. This function enables FIQ interrupts by clearing the F-bit in the CPSR.
  757. Can only be executed in Privileged modes.
  758. */
  759. #define __enable_fault_irq() __asm__ volatile ("cpsie f")
  760. /** \brief Disable FIQ
  761. This function disables FIQ interrupts by setting the F-bit in the CPSR.
  762. Can only be executed in Privileged modes.
  763. */
  764. #define __disable_fault_irq() __asm__ volatile ("cpsid f")
  765. /** \brief Get FPSCR
  766. This function returns the current value of the Floating Point Status/Control register.
  767. \return Floating Point Status/Control register value
  768. */
  769. __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_FPSCR(void)
  770. {
  771. #if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
  772. #if 1
  773. uint32_t result;
  774. __ASM volatile ("vmrs %0, fpscr" : "=r" (result) );
  775. return (result);
  776. #else
  777. register uint32_t __regfpscr __ASM("fpscr");
  778. return(__regfpscr);
  779. #endif
  780. #else
  781. return(0);
  782. #endif
  783. }
  784. /** \brief Set FPSCR
  785. This function assigns the given value to the Floating Point Status/Control register.
  786. \param [in] fpscr Floating Point Status/Control value to set
  787. */
  788. __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
  789. {
  790. #if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
  791. #if 1
  792. __ASM volatile ("vmsr fpscr, %0" : : "r" (fpscr) );
  793. #else
  794. register uint32_t __regfpscr __ASM("fpscr");
  795. __regfpscr = (fpscr);
  796. #endif
  797. #endif
  798. }
  799. /** \brief Get FPEXC
  800. This function returns the current value of the Floating Point Exception Control register.
  801. \return Floating Point Exception Control register value
  802. */
  803. __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_FPEXC(void)
  804. {
  805. #if (__FPU_PRESENT == 1)
  806. #if 1
  807. uint32_t result;
  808. __ASM volatile ("vmrs %0, fpexc" : "=r" (result));
  809. return (result);
  810. #else
  811. register uint32_t __regfpexc __ASM("fpexc");
  812. return(__regfpexc);
  813. #endif
  814. #else
  815. return(0);
  816. #endif
  817. }
  818. /** \brief Set FPEXC
  819. This function assigns the given value to the Floating Point Exception Control register.
  820. \param [in] fpscr Floating Point Exception Control value to set
  821. */
  822. __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
  823. {
  824. #if (__FPU_PRESENT == 1)
  825. #if 1
  826. __ASM volatile ("vmsr fpexc, %0" : : "r" (fpexc));
  827. #else
  828. register uint32_t __regfpexc __ASM("fpexc");
  829. __regfpexc = (fpexc);
  830. #endif
  831. #endif
  832. }
  833. /** \brief Get CPACR
  834. This function returns the current value of the Coprocessor Access Control register.
  835. \return Coprocessor Access Control register value
  836. */
  837. __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CPACR(void)
  838. {
  839. #if 1
  840. register uint32_t __regCPACR;
  841. __ASM volatile ("mrc p15, 0, %0, c1, c0, 2" : "=r" (__regCPACR));
  842. #else
  843. register uint32_t __regCPACR __ASM("cp15:0:c1:c0:2");
  844. #endif
  845. return __regCPACR;
  846. }
  847. /** \brief Set CPACR
  848. This function assigns the given value to the Coprocessor Access Control register.
  849. \param [in] cpacr Coprocessor Acccess Control value to set
  850. */
  851. __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_CPACR(uint32_t cpacr)
  852. {
  853. #if 1
  854. __ASM volatile ("mcr p15, 0, %0, c1, c0, 2" : : "r" (cpacr));
  855. #else
  856. register uint32_t __regCPACR __ASM("cp15:0:c1:c0:2");
  857. __regCPACR = cpacr;
  858. #endif
  859. __ISB();
  860. }
  861. /** \brief Get CBAR
  862. This function returns the value of the Configuration Base Address register.
  863. \return Configuration Base Address register value
  864. */
  865. __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CBAR() {
  866. #if 1
  867. register uint32_t __regCBAR;
  868. __ASM volatile ("mrc p15, 4, %0, c15, c0, 0" : "=r" (__regCBAR));
  869. #else
  870. register uint32_t __regCBAR __ASM("cp15:4:c15:c0:0");
  871. #endif
  872. return(__regCBAR);
  873. }
  874. /** \brief Get TTBR0
  875. This function returns the value of the Translation Table Base Register 0.
  876. \return Translation Table Base Register 0 value
  877. */
  878. __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_TTBR0() {
  879. #if 1
  880. register uint32_t __regTTBR0;
  881. __ASM volatile ("mrc p15, 0, %0, c2, c0, 0" : "=r" (__regTTBR0));
  882. #else
  883. register uint32_t __regTTBR0 __ASM("cp15:0:c2:c0:0");
  884. #endif
  885. return(__regTTBR0);
  886. }
  887. /** \brief Set TTBR0
  888. This function assigns the given value to the Translation Table Base Register 0.
  889. \param [in] ttbr0 Translation Table Base Register 0 value to set
  890. */
  891. __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_TTBR0(uint32_t ttbr0) {
  892. #if 1
  893. __ASM volatile ("mcr p15, 0, %0, c2, c0, 0" : : "r" (ttbr0));
  894. #else
  895. register uint32_t __regTTBR0 __ASM("cp15:0:c2:c0:0");
  896. __regTTBR0 = ttbr0;
  897. #endif
  898. __ISB();
  899. }
  900. /** \brief Get DACR
  901. This function returns the value of the Domain Access Control Register.
  902. \return Domain Access Control Register value
  903. */
  904. __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_DACR() {
  905. #if 1
  906. register uint32_t __regDACR;
  907. __ASM volatile ("mrc p15, 0, %0, c3, c0, 0" : "=r" (__regDACR));
  908. #else
  909. register uint32_t __regDACR __ASM("cp15:0:c3:c0:0");
  910. #endif
  911. return(__regDACR);
  912. }
  913. /** \brief Set DACR
  914. This function assigns the given value to the Domain Access Control Register.
  915. \param [in] dacr Domain Access Control Register value to set
  916. */
  917. __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_DACR(uint32_t dacr) {
  918. #if 1
  919. __ASM volatile ("mcr p15, 0, %0, c3, c0, 0" : : "r" (dacr));
  920. #else
  921. register uint32_t __regDACR __ASM("cp15:0:c3:c0:0");
  922. __regDACR = dacr;
  923. #endif
  924. __ISB();
  925. }
  926. /******************************** Cache and BTAC enable ****************************************************/
  927. /** \brief Set SCTLR
  928. This function assigns the given value to the System Control Register.
  929. \param [in] sctlr System Control Register value to set
  930. */
  931. __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_SCTLR(uint32_t sctlr)
  932. {
  933. #if 1
  934. __ASM volatile ("mcr p15, 0, %0, c1, c0, 0" : : "r" (sctlr));
  935. #else
  936. register uint32_t __regSCTLR __ASM("cp15:0:c1:c0:0");
  937. __regSCTLR = sctlr;
  938. #endif
  939. }
  940. /** \brief Get SCTLR
  941. This function returns the value of the System Control Register.
  942. \return System Control Register value
  943. */
  944. __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_SCTLR() {
  945. #if 1
  946. register uint32_t __regSCTLR;
  947. __ASM volatile ("mrc p15, 0, %0, c1, c0, 0" : "=r" (__regSCTLR));
  948. #else
  949. register uint32_t __regSCTLR __ASM("cp15:0:c1:c0:0");
  950. #endif
  951. return(__regSCTLR);
  952. }
  953. /** \brief Enable Caches
  954. Enable Caches
  955. */
  956. __attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_caches(void) {
  957. // Set I bit 12 to enable I Cache
  958. // Set C bit 2 to enable D Cache
  959. __set_SCTLR( __get_SCTLR() | (1 << 12) | (1 << 2));
  960. }
  961. /** \brief Disable Caches
  962. Disable Caches
  963. */
  964. __attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_caches(void) {
  965. // Clear I bit 12 to disable I Cache
  966. // Clear C bit 2 to disable D Cache
  967. __set_SCTLR( __get_SCTLR() & ~(1 << 12) & ~(1 << 2));
  968. __ISB();
  969. }
  970. /** \brief Enable BTAC
  971. Enable BTAC
  972. */
  973. __attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_btac(void) {
  974. // Set Z bit 11 to enable branch prediction
  975. __set_SCTLR( __get_SCTLR() | (1 << 11));
  976. __ISB();
  977. }
  978. /** \brief Disable BTAC
  979. Disable BTAC
  980. */
  981. __attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_btac(void) {
  982. // Clear Z bit 11 to disable branch prediction
  983. __set_SCTLR( __get_SCTLR() & ~(1 << 11));
  984. }
  985. /** \brief Enable MMU
  986. Enable MMU
  987. */
  988. __attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_mmu(void) {
  989. // Set M bit 0 to enable the MMU
  990. // Set AFE bit to enable simplified access permissions model
  991. // Clear TRE bit to disable TEX remap and A bit to disable strict alignment fault checking
  992. __set_SCTLR( (__get_SCTLR() & ~(1 << 28) & ~(1 << 1)) | 1 | (1 << 29));
  993. __ISB();
  994. }
  995. /** \brief Disable MMU
  996. Disable MMU
  997. */
  998. __attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_mmu(void) {
  999. // Clear M bit 0 to disable the MMU
  1000. __set_SCTLR( __get_SCTLR() & ~1);
  1001. __ISB();
  1002. }
  1003. /******************************** TLB maintenance operations ************************************************/
  1004. /** \brief Invalidate the whole tlb
  1005. TLBIALL. Invalidate the whole tlb
  1006. */
  1007. __attribute__( ( always_inline ) ) __STATIC_INLINE void __ca9u_inv_tlb_all(void) {
  1008. #if 1
  1009. __ASM volatile ("mcr p15, 0, %0, c8, c7, 0" : : "r" (0));
  1010. #else
  1011. register uint32_t __TLBIALL __ASM("cp15:0:c8:c7:0");
  1012. __TLBIALL = 0;
  1013. #endif
  1014. __DSB();
  1015. __ISB();
  1016. }
  1017. /******************************** BTB maintenance operations ************************************************/
  1018. /** \brief Invalidate entire branch predictor array
  1019. BPIALL. Branch Predictor Invalidate All.
  1020. */
  1021. __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_btac(void) {
  1022. #if 1
  1023. __ASM volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0));
  1024. #else
  1025. register uint32_t __BPIALL __ASM("cp15:0:c7:c5:6");
  1026. __BPIALL = 0;
  1027. #endif
  1028. __DSB(); //ensure completion of the invalidation
  1029. __ISB(); //ensure instruction fetch path sees new state
  1030. }
  1031. /******************************** L1 cache operations ******************************************************/
  1032. /** \brief Invalidate the whole I$
  1033. ICIALLU. Instruction Cache Invalidate All to PoU
  1034. */
  1035. __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_icache_all(void) {
  1036. #if 1
  1037. __ASM volatile ("mcr p15, 0, %0, c7, c5, 0" : : "r" (0));
  1038. #else
  1039. register uint32_t __ICIALLU __ASM("cp15:0:c7:c5:0");
  1040. __ICIALLU = 0;
  1041. #endif
  1042. __DSB(); //ensure completion of the invalidation
  1043. __ISB(); //ensure instruction fetch path sees new I cache state
  1044. }
  1045. /** \brief Clean D$ by MVA
  1046. DCCMVAC. Data cache clean by MVA to PoC
  1047. */
  1048. __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_dcache_mva(void *va) {
  1049. #if 1
  1050. __ASM volatile ("mcr p15, 0, %0, c7, c10, 1" : : "r" ((uint32_t)va));
  1051. #else
  1052. register uint32_t __DCCMVAC __ASM("cp15:0:c7:c10:1");
  1053. __DCCMVAC = (uint32_t)va;
  1054. #endif
  1055. __DMB(); //ensure the ordering of data cache maintenance operations and their effects
  1056. }
  1057. /** \brief Invalidate D$ by MVA
  1058. DCIMVAC. Data cache invalidate by MVA to PoC
  1059. */
  1060. __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_dcache_mva(void *va) {
  1061. #if 1
  1062. __ASM volatile ("mcr p15, 0, %0, c7, c6, 1" : : "r" ((uint32_t)va));
  1063. #else
  1064. register uint32_t __DCIMVAC __ASM("cp15:0:c7:c6:1");
  1065. __DCIMVAC = (uint32_t)va;
  1066. #endif
  1067. __DMB(); //ensure the ordering of data cache maintenance operations and their effects
  1068. }
  1069. /** \brief Clean and Invalidate D$ by MVA
  1070. DCCIMVAC. Data cache clean and invalidate by MVA to PoC
  1071. */
  1072. __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_inv_dcache_mva(void *va) {
  1073. #if 1
  1074. __ASM volatile ("mcr p15, 0, %0, c7, c14, 1" : : "r" ((uint32_t)va));
  1075. #else
  1076. register uint32_t __DCCIMVAC __ASM("cp15:0:c7:c14:1");
  1077. __DCCIMVAC = (uint32_t)va;
  1078. #endif
  1079. __DMB(); //ensure the ordering of data cache maintenance operations and their effects
  1080. }
  1081. /** \brief Clean and Invalidate the entire data or unified cache
  1082. Generic mechanism for cleaning/invalidating the entire data or unified cache to the point of coherency.
  1083. */
  1084. extern void __v7_all_cache(uint32_t op);
  1085. /** \brief Invalidate the whole D$
  1086. DCISW. Invalidate by Set/Way
  1087. */
  1088. __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_dcache_all(void) {
  1089. __v7_all_cache(0);
  1090. }
  1091. /** \brief Clean the whole D$
  1092. DCCSW. Clean by Set/Way
  1093. */
  1094. __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_dcache_all(void) {
  1095. __v7_all_cache(1);
  1096. }
  1097. /** \brief Clean and invalidate the whole D$
  1098. DCCISW. Clean and Invalidate by Set/Way
  1099. */
  1100. __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_inv_dcache_all(void) {
  1101. __v7_all_cache(2);
  1102. }
  1103. #include "core_ca_mmu.h"
  1104. #elif (defined (__TASKING__)) /*--------------- TASKING Compiler -----------------*/
  1105. #error TASKING Compiler support not implemented for Cortex-A
  1106. #endif
  1107. /*@} end of CMSIS_Core_RegAccFunctions */
  1108. #endif /* __CORE_CAFUNC_H__ */