1 /**************************************************************************//**
3 * @brief CMSIS Cortex-A Core Function Access Header File
9 ******************************************************************************/
10 /* Copyright (c) 2009 - 2012 ARM LIMITED
13 Redistribution and use in source and binary forms, with or without
14 modification, are permitted provided that the following conditions are met:
15 - Redistributions of source code must retain the above copyright
16 notice, this list of conditions and the following disclaimer.
17 - Redistributions in binary form must reproduce the above copyright
18 notice, this list of conditions and the following disclaimer in the
19 documentation and/or other materials provided with the distribution.
20 - Neither the name of ARM nor the names of its contributors may be used
21 to endorse or promote products derived from this software without
22 specific prior written permission.
24 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
25 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
26 IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
27 ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
28 LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
29 CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
30 SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
31 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
32 CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
33 ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
34 POSSIBILITY OF SUCH DAMAGE.
35 ---------------------------------------------------------------------------*/
38 #ifndef __CORE_CAFUNC_H__
39 #define __CORE_CAFUNC_H__
42 /* ########################### Core Function Access ########################### */
43 /** \ingroup CMSIS_Core_FunctionInterface
44 \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
48 #if defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/
49 /* ARM armcc specific functions */
51 #if (__ARMCC_VERSION < 400677)
52 #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
65 /** \brief Get APSR Register
67 This function returns the content of the APSR Register.
69 \return APSR Register value
71 __STATIC_INLINE uint32_t __get_APSR(void)
73 register uint32_t __regAPSR __ASM("apsr");
78 /** \brief Get CPSR Register
80 This function returns the content of the CPSR Register.
82 \return CPSR Register value
84 __STATIC_INLINE uint32_t __get_CPSR(void)
86 register uint32_t __regCPSR __ASM("cpsr");
90 /** \brief Set Stack Pointer
92 This function assigns the given value to the current stack pointer.
94 \param [in] topOfStack Stack Pointer value to set
96 register uint32_t __regSP __ASM("sp");
97 __STATIC_INLINE void __set_SP(uint32_t topOfStack)
103 /** \brief Get link register
105 This function returns the value of the link register
107 \return Value of link register
109 register uint32_t __reglr __ASM("lr");
110 __STATIC_INLINE uint32_t __get_LR(void)
115 /** \brief Set link register
117 This function sets the value of the link register
119 \param [in] lr LR value to set
121 __STATIC_INLINE void __set_LR(uint32_t lr)
126 /** \brief Set Process Stack Pointer
128 This function assigns the given value to the USR/SYS Stack Pointer (PSP).
130 \param [in] topOfProcStack USR/SYS Stack Pointer value to set
132 __STATIC_ASM void __set_PSP(uint32_t topOfProcStack)
137 BIC R0, R0, #7 ;ensure stack is 8-byte aligned
139 CPS #MODE_SYS ;no effect in USR mode
141 MSR CPSR_c, R1 ;no effect in USR mode
147 /** \brief Set User Mode
149 This function changes the processor state to User Mode
151 \param [in] topOfProcStack USR/SYS Stack Pointer value to set
153 __STATIC_ASM void __set_CPS_USR(void)
162 /** \brief Enable FIQ
164 This function enables FIQ interrupts by clearing the F-bit in the CPSR.
165 Can only be executed in Privileged modes.
167 #define __enable_fault_irq __enable_fiq
170 /** \brief Disable FIQ
172 This function disables FIQ interrupts by setting the F-bit in the CPSR.
173 Can only be executed in Privileged modes.
175 #define __disable_fault_irq __disable_fiq
180 This function returns the current value of the Floating Point Status/Control register.
182 \return Floating Point Status/Control register value
184 __STATIC_INLINE uint32_t __get_FPSCR(void)
186 #if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
187 register uint32_t __regfpscr __ASM("fpscr");
197 This function assigns the given value to the Floating Point Status/Control register.
199 \param [in] fpscr Floating Point Status/Control value to set
201 __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
203 #if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
204 register uint32_t __regfpscr __ASM("fpscr");
205 __regfpscr = (fpscr);
211 This function returns the current value of the Floating Point Exception Control register.
213 \return Floating Point Exception Control register value
215 __STATIC_INLINE uint32_t __get_FPEXC(void)
217 #if (__FPU_PRESENT == 1)
218 register uint32_t __regfpexc __ASM("fpexc");
228 This function assigns the given value to the Floating Point Exception Control register.
230 \param [in] fpscr Floating Point Exception Control value to set
232 __STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
234 #if (__FPU_PRESENT == 1)
235 register uint32_t __regfpexc __ASM("fpexc");
236 __regfpexc = (fpexc);
242 This function returns the current value of the Coprocessor Access Control register.
244 \return Coprocessor Access Control register value
246 __STATIC_INLINE uint32_t __get_CPACR(void)
248 register uint32_t __regCPACR __ASM("cp15:0:c1:c0:2");
254 This function assigns the given value to the Coprocessor Access Control register.
256 \param [in] cpacr Coporcessor Acccess Control value to set
258 __STATIC_INLINE void __set_CPACR(uint32_t cpacr)
260 register uint32_t __regCPACR __ASM("cp15:0:c1:c0:2");
267 This function returns the value of the Configuration Base Address register.
269 \return Configuration Base Address register value
271 __STATIC_INLINE uint32_t __get_CBAR() {
272 register uint32_t __regCBAR __ASM("cp15:4:c15:c0:0");
278 This function returns the value of the Configuration Base Address register.
280 \return Translation Table Base Register 0 value
282 __STATIC_INLINE uint32_t __get_TTBR0() {
283 register uint32_t __regTTBR0 __ASM("cp15:0:c2:c0:0");
289 This function assigns the given value to the Coprocessor Access Control register.
291 \param [in] ttbr0 Translation Table Base Register 0 value to set
293 __STATIC_INLINE void __set_TTBR0(uint32_t ttbr0) {
294 register uint32_t __regTTBR0 __ASM("cp15:0:c2:c0:0");
301 This function returns the value of the Domain Access Control Register.
303 \return Domain Access Control Register value
305 __STATIC_INLINE uint32_t __get_DACR() {
306 register uint32_t __regDACR __ASM("cp15:0:c3:c0:0");
312 This function assigns the given value to the Coprocessor Access Control register.
314 \param [in] dacr Domain Access Control Register value to set
316 __STATIC_INLINE void __set_DACR(uint32_t dacr) {
317 register uint32_t __regDACR __ASM("cp15:0:c3:c0:0");
322 /******************************** Cache and BTAC enable ****************************************************/
326 This function assigns the given value to the System Control Register.
328 \param [in] sctlr System Control Register, value to set
330 __STATIC_INLINE void __set_SCTLR(uint32_t sctlr)
332 register uint32_t __regSCTLR __ASM("cp15:0:c1:c0:0");
338 This function returns the value of the System Control Register.
340 \return System Control Register value
342 __STATIC_INLINE uint32_t __get_SCTLR() {
343 register uint32_t __regSCTLR __ASM("cp15:0:c1:c0:0");
347 /** \brief Enable Caches
351 __STATIC_INLINE void __enable_caches(void) {
352 // Set I bit 12 to enable I Cache
353 // Set C bit 2 to enable D Cache
354 __set_SCTLR( __get_SCTLR() | (1 << 12) | (1 << 2));
357 /** \brief Disable Caches
361 __STATIC_INLINE void __disable_caches(void) {
362 // Clear I bit 12 to disable I Cache
363 // Clear C bit 2 to disable D Cache
364 __set_SCTLR( __get_SCTLR() & ~(1 << 12) & ~(1 << 2));
368 /** \brief Enable BTAC
372 __STATIC_INLINE void __enable_btac(void) {
373 // Set Z bit 11 to enable branch prediction
374 __set_SCTLR( __get_SCTLR() | (1 << 11));
378 /** \brief Disable BTAC
382 __STATIC_INLINE void __disable_btac(void) {
383 // Clear Z bit 11 to disable branch prediction
384 __set_SCTLR( __get_SCTLR() & ~(1 << 11));
388 /** \brief Enable MMU
392 __STATIC_INLINE void __enable_mmu(void) {
393 // Set M bit 0 to enable the MMU
394 // Set AFE bit to enable simplified access permissions model
395 // Clear TRE bit to disable TEX remap and A bit to disable strict alignment fault checking
396 __set_SCTLR( (__get_SCTLR() & ~(1 << 28) & ~(1 << 1)) | 1 | (1 << 29));
400 /** \brief Enable MMU
404 __STATIC_INLINE void __disable_mmu(void) {
405 // Clear M bit 0 to disable the MMU
406 __set_SCTLR( __get_SCTLR() & ~1);
410 /******************************** TLB maintenance operations ************************************************/
411 /** \brief Invalidate the whole tlb
413 TLBIALL. Invalidate the whole tlb
416 __STATIC_INLINE void __ca9u_inv_tlb_all(void) {
417 register uint32_t __TLBIALL __ASM("cp15:0:c8:c7:0");
423 /******************************** BTB maintenance operations ************************************************/
424 /** \brief Invalidate entire branch predictor array
426 BPIALL. Branch Predictor Invalidate All.
429 __STATIC_INLINE void __v7_inv_btac(void) {
430 register uint32_t __BPIALL __ASM("cp15:0:c7:c5:6");
432 __DSB(); //ensure completion of the invalidation
433 __ISB(); //ensure instruction fetch path sees new state
437 /******************************** L1 cache operations ******************************************************/
439 /** \brief Invalidate the whole I$
441 ICIALLU. Instruction Cache Invalidate All to PoU
443 __STATIC_INLINE void __v7_inv_icache_all(void) {
444 register uint32_t __ICIALLU __ASM("cp15:0:c7:c5:0");
446 __DSB(); //ensure completion of the invalidation
447 __ISB(); //ensure instruction fetch path sees new I cache state
450 /** \brief Clean D$ by MVA
452 DCCMVAC. Data cache clean by MVA to PoC
454 __STATIC_INLINE void __v7_clean_dcache_mva(void *va) {
455 register uint32_t __DCCMVAC __ASM("cp15:0:c7:c10:1");
456 __DCCMVAC = (uint32_t)va;
457 __DMB(); //ensure the ordering of data cache maintenance operations and their effects
460 /** \brief Invalidate D$ by MVA
462 DCIMVAC. Data cache invalidate by MVA to PoC
464 __STATIC_INLINE void __v7_inv_dcache_mva(void *va) {
465 register uint32_t __DCIMVAC __ASM("cp15:0:c7:c6:1");
466 __DCIMVAC = (uint32_t)va;
467 __DMB(); //ensure the ordering of data cache maintenance operations and their effects
470 /** \brief Clean and Invalidate D$ by MVA
472 DCCIMVAC. Data cache clean and invalidate by MVA to PoC
474 __STATIC_INLINE void __v7_clean_inv_dcache_mva(void *va) {
475 register uint32_t __DCCIMVAC __ASM("cp15:0:c7:c14:1");
476 __DCCIMVAC = (uint32_t)va;
477 __DMB(); //ensure the ordering of data cache maintenance operations and their effects
481 * Generic mechanism for cleaning/invalidating the entire data or unified cache to the point of coherency.
485 __STATIC_ASM void __v7_all_cache(uint32_t op) {
490 MRC p15, 1, R6, c0, c0, 1 // Read CLIDR
491 ANDS R3, R6, #0x07000000 // Extract coherency level
492 MOV R3, R3, LSR #23 // Total cache levels << 1
493 BEQ Finished // If 0, no need to clean
495 MOV R10, #0 // R10 holds current cache level << 1
496 Loop1 ADD R2, R10, R10, LSR #1 // R2 holds cache "Set" position
497 MOV R1, R6, LSR R2 // Bottom 3 bits are the Cache-type for this level
498 AND R1, R1, #7 // Isolate those lower 3 bits
500 BLT Skip // No cache or only instruction cache at this level
502 MCR p15, 2, R10, c0, c0, 0 // Write the Cache Size selection register
503 ISB // ISB to sync the change to the CacheSizeID reg
504 MRC p15, 1, R1, c0, c0, 0 // Reads current Cache Size ID register
505 AND R2, R1, #7 // Extract the line length field
506 ADD R2, R2, #4 // Add 4 for the line length offset (log2 16 bytes)
508 ANDS R4, R4, R1, LSR #3 // R4 is the max number on the way size (right aligned)
509 CLZ R5, R4 // R5 is the bit position of the way size increment
511 ANDS R7, R7, R1, LSR #13 // R7 is the max number of the index size (right aligned)
513 Loop2 MOV R9, R4 // R9 working copy of the max way size (right aligned)
515 Loop3 ORR R11, R10, R9, LSL R5 // Factor in the Way number and cache number into R11
516 ORR R11, R11, R7, LSL R2 // Factor in the Set number
519 MCR p15, 0, R11, c7, c6, 2 // DCISW. Invalidate by Set/Way
523 MCR p15, 0, R11, c7, c10, 2 // DCCSW. Clean by Set/Way
525 Dccisw MCR p15, 0, R11, c7, c14, 2 // DCCISW, Clean and Invalidate by Set/Way
526 cont SUBS R9, R9, #1 // Decrement the Way number
528 SUBS R7, R7, #1 // Decrement the Set number
530 Skip ADD R10, R10, #2 // increment the cache number
542 /** \brief __v7_all_cache - helper function
546 /** \brief Invalidate the whole D$
548 DCISW. Invalidate by Set/Way
551 __STATIC_INLINE void __v7_inv_dcache_all(void) {
555 /** \brief Clean the whole D$
557 DCCSW. Clean by Set/Way
560 __STATIC_INLINE void __v7_clean_dcache_all(void) {
564 /** \brief Clean and invalidate the whole D$
566 DCCISW. Clean and Invalidate by Set/Way
569 __STATIC_INLINE void __v7_clean_inv_dcache_all(void) {
573 #include "core_ca_mmu.h"
575 #elif (defined (__ICCARM__)) /*---------------- ICC Compiler ---------------------*/
577 #error IAR Compiler support not implemented for Cortex-A
579 #elif (defined (__GNUC__)) /*------------------ GNU Compiler ---------------------*/
581 /* GNU gcc specific functions */
583 #define MODE_USR 0x10
584 #define MODE_FIQ 0x11
585 #define MODE_IRQ 0x12
586 #define MODE_SVC 0x13
587 #define MODE_MON 0x16
588 #define MODE_ABT 0x17
589 #define MODE_HYP 0x1A
590 #define MODE_UND 0x1B
591 #define MODE_SYS 0x1F
594 __attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_irq(void)
596 __ASM volatile ("cpsie i");
599 /** \brief Disable IRQ Interrupts
601 This function disables IRQ interrupts by setting the I-bit in the CPSR.
602 Can only be executed in Privileged modes.
604 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __disable_irq(void)
608 __ASM volatile ("mrs %0, cpsr" : "=r" (result));
609 __ASM volatile ("cpsid i");
610 return(result & 0x80);
614 /** \brief Get APSR Register
616 This function returns the content of the APSR Register.
618 \return APSR Register value
620 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_APSR(void)
625 __ASM volatile ("mrs %0, apsr" : "=r" (result) );
628 register uint32_t __regAPSR __ASM("apsr");
634 /** \brief Get CPSR Register
636 This function returns the content of the CPSR Register.
638 \return CPSR Register value
640 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CPSR(void)
643 register uint32_t __regCPSR;
644 __ASM volatile ("mrs %0, cpsr" : "=r" (__regCPSR));
646 register uint32_t __regCPSR __ASM("cpsr");
652 /** \brief Set Stack Pointer
654 This function assigns the given value to the current stack pointer.
656 \param [in] topOfStack Stack Pointer value to set
658 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_SP(uint32_t topOfStack)
660 register uint32_t __regSP __ASM("sp");
661 __regSP = topOfStack;
665 /** \brief Get link register
667 This function returns the value of the link register
669 \return Value of link register
671 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_LR(void)
673 register uint32_t __reglr __ASM("lr");
678 /** \brief Set link register
680 This function sets the value of the link register
682 \param [in] lr LR value to set
684 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_LR(uint32_t lr)
686 register uint32_t __reglr __ASM("lr");
691 /** \brief Set Process Stack Pointer
693 This function assigns the given value to the USR/SYS Stack Pointer (PSP).
695 \param [in] topOfProcStack USR/SYS Stack Pointer value to set
697 extern void __set_PSP(uint32_t topOfProcStack);
699 /** \brief Set User Mode
701 This function changes the processor state to User Mode
703 \param [in] topOfProcStack USR/SYS Stack Pointer value to set
705 extern void __set_CPS_USR(void);
707 /** \brief Enable FIQ
709 This function enables FIQ interrupts by clearing the F-bit in the CPSR.
710 Can only be executed in Privileged modes.
712 #define __enable_fault_irq __enable_fiq
715 /** \brief Disable FIQ
717 This function disables FIQ interrupts by setting the F-bit in the CPSR.
718 Can only be executed in Privileged modes.
720 #define __disable_fault_irq __disable_fiq
725 This function returns the current value of the Floating Point Status/Control register.
727 \return Floating Point Status/Control register value
729 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_FPSCR(void)
731 #if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
735 __ASM volatile ("vmrs %0, fpscr" : "=r" (result) );
738 register uint32_t __regfpscr __ASM("fpscr");
749 This function assigns the given value to the Floating Point Status/Control register.
751 \param [in] fpscr Floating Point Status/Control value to set
753 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
755 #if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
757 __ASM volatile ("vmsr fpscr, %0" : : "r" (fpscr) );
759 register uint32_t __regfpscr __ASM("fpscr");
760 __regfpscr = (fpscr);
767 This function returns the current value of the Floating Point Exception Control register.
769 \return Floating Point Exception Control register value
771 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_FPEXC(void)
773 #if (__FPU_PRESENT == 1)
777 __ASM volatile ("vmrs %0, fpexc" : "=r" (result));
780 register uint32_t __regfpexc __ASM("fpexc");
791 This function assigns the given value to the Floating Point Exception Control register.
793 \param [in] fpscr Floating Point Exception Control value to set
795 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
797 #if (__FPU_PRESENT == 1)
799 __ASM volatile ("vmsr fpexc, %0" : : "r" (fpexc));
801 register uint32_t __regfpexc __ASM("fpexc");
802 __regfpexc = (fpexc);
809 This function returns the current value of the Coprocessor Access Control register.
811 \return Coprocessor Access Control register value
813 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CPACR(void)
816 register uint32_t __regCPACR;
817 __ASM volatile ("mrc p15, 0, %0, c1, c0, 2" : "=r" (__regCPACR));
819 register uint32_t __regCPACR __ASM("cp15:0:c1:c0:2");
826 This function assigns the given value to the Coprocessor Access Control register.
828 \param [in] cpacr Coporcessor Acccess Control value to set
830 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_CPACR(uint32_t cpacr)
833 __ASM volatile ("mcr p15, 0, %0, c1, c0, 2" : : "r" (cpacr));
835 register uint32_t __regCPACR __ASM("cp15:0:c1:c0:2");
843 This function returns the value of the Configuration Base Address register.
845 \return Configuration Base Address register value
847 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CBAR() {
849 register uint32_t __regCBAR;
850 __ASM volatile ("mrc p15, 4, %0, c15, c0, 0" : "=r" (__regCBAR));
852 register uint32_t __regCBAR __ASM("cp15:4:c15:c0:0");
859 This function returns the value of the Configuration Base Address register.
861 \return Translation Table Base Register 0 value
863 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_TTBR0() {
865 register uint32_t __regTTBR0;
866 __ASM volatile ("mrc p15, 0, %0, c2, c0, 0" : "=r" (__regTTBR0));
868 register uint32_t __regTTBR0 __ASM("cp15:0:c2:c0:0");
875 This function assigns the given value to the Coprocessor Access Control register.
877 \param [in] ttbr0 Translation Table Base Register 0 value to set
879 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_TTBR0(uint32_t ttbr0) {
881 __ASM volatile ("mcr p15, 0, %0, c2, c0, 0" : : "r" (ttbr0));
883 register uint32_t __regTTBR0 __ASM("cp15:0:c2:c0:0");
891 This function returns the value of the Domain Access Control Register.
893 \return Domain Access Control Register value
895 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_DACR() {
897 register uint32_t __regDACR;
898 __ASM volatile ("mrc p15, 0, %0, c3, c0, 0" : "=r" (__regDACR));
900 register uint32_t __regDACR __ASM("cp15:0:c3:c0:0");
907 This function assigns the given value to the Coprocessor Access Control register.
909 \param [in] dacr Domain Access Control Register value to set
911 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_DACR(uint32_t dacr) {
913 __ASM volatile ("mcr p15, 0, %0, c3, c0, 0" : : "r" (dacr));
915 register uint32_t __regDACR __ASM("cp15:0:c3:c0:0");
921 /******************************** Cache and BTAC enable ****************************************************/
925 This function assigns the given value to the System Control Register.
927 \param [in] sctlr System Control Register, value to set
929 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_SCTLR(uint32_t sctlr)
932 __ASM volatile ("mcr p15, 0, %0, c1, c0, 0" : : "r" (sctlr));
934 register uint32_t __regSCTLR __ASM("cp15:0:c1:c0:0");
941 This function returns the value of the System Control Register.
943 \return System Control Register value
945 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_SCTLR() {
947 register uint32_t __regSCTLR;
948 __ASM volatile ("mrc p15, 0, %0, c1, c0, 0" : "=r" (__regSCTLR));
950 register uint32_t __regSCTLR __ASM("cp15:0:c1:c0:0");
955 /** \brief Enable Caches
959 __attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_caches(void) {
960 // Set I bit 12 to enable I Cache
961 // Set C bit 2 to enable D Cache
962 __set_SCTLR( __get_SCTLR() | (1 << 12) | (1 << 2));
965 /** \brief Disable Caches
969 __attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_caches(void) {
970 // Clear I bit 12 to disable I Cache
971 // Clear C bit 2 to disable D Cache
972 __set_SCTLR( __get_SCTLR() & ~(1 << 12) & ~(1 << 2));
976 /** \brief Enable BTAC
980 __attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_btac(void) {
981 // Set Z bit 11 to enable branch prediction
982 __set_SCTLR( __get_SCTLR() | (1 << 11));
986 /** \brief Disable BTAC
990 __attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_btac(void) {
991 // Clear Z bit 11 to disable branch prediction
992 __set_SCTLR( __get_SCTLR() & ~(1 << 11));
996 /** \brief Enable MMU
1000 __attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_mmu(void) {
1001 // Set M bit 0 to enable the MMU
1002 // Set AFE bit to enable simplified access permissions model
1003 // Clear TRE bit to disable TEX remap and A bit to disable strict alignment fault checking
1004 __set_SCTLR( (__get_SCTLR() & ~(1 << 28) & ~(1 << 1)) | 1 | (1 << 29));
1008 /** \brief Enable MMU
1012 __attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_mmu(void) {
1013 // Clear M bit 0 to disable the MMU
1014 __set_SCTLR( __get_SCTLR() & ~1);
1018 /******************************** TLB maintenance operations ************************************************/
1019 /** \brief Invalidate the whole tlb
1021 TLBIALL. Invalidate the whole tlb
1024 __attribute__( ( always_inline ) ) __STATIC_INLINE void __ca9u_inv_tlb_all(void) {
1026 __ASM volatile ("mcr p15, 0, %0, c8, c7, 0" : : "r" (0));
1028 register uint32_t __TLBIALL __ASM("cp15:0:c8:c7:0");
1035 /******************************** BTB maintenance operations ************************************************/
1036 /** \brief Invalidate entire branch predictor array
1038 BPIALL. Branch Predictor Invalidate All.
1041 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_btac(void) {
1043 __ASM volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0));
1045 register uint32_t __BPIALL __ASM("cp15:0:c7:c5:6");
1048 __DSB(); //ensure completion of the invalidation
1049 __ISB(); //ensure instruction fetch path sees new state
1053 /******************************** L1 cache operations ******************************************************/
1055 /** \brief Invalidate the whole I$
1057 ICIALLU. Instruction Cache Invalidate All to PoU
1059 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_icache_all(void) {
1061 __ASM volatile ("mcr p15, 0, %0, c7, c5, 0" : : "r" (0));
1063 register uint32_t __ICIALLU __ASM("cp15:0:c7:c5:0");
1066 __DSB(); //ensure completion of the invalidation
1067 __ISB(); //ensure instruction fetch path sees new I cache state
1070 /** \brief Clean D$ by MVA
1072 DCCMVAC. Data cache clean by MVA to PoC
1074 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_dcache_mva(void *va) {
1076 __ASM volatile ("mcr p15, 0, %0, c7, c10, 1" : : "r" ((uint32_t)va));
1078 register uint32_t __DCCMVAC __ASM("cp15:0:c7:c10:1");
1079 __DCCMVAC = (uint32_t)va;
1081 __DMB(); //ensure the ordering of data cache maintenance operations and their effects
1084 /** \brief Invalidate D$ by MVA
1086 DCIMVAC. Data cache invalidate by MVA to PoC
1088 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_dcache_mva(void *va) {
1090 __ASM volatile ("mcr p15, 0, %0, c7, c6, 1" : : "r" ((uint32_t)va));
1092 register uint32_t __DCIMVAC __ASM("cp15:0:c7:c6:1");
1093 __DCIMVAC = (uint32_t)va;
1095 __DMB(); //ensure the ordering of data cache maintenance operations and their effects
1098 /** \brief Clean and Invalidate D$ by MVA
1100 DCCIMVAC. Data cache clean and invalidate by MVA to PoC
1102 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_inv_dcache_mva(void *va) {
1104 __ASM volatile ("mcr p15, 0, %0, c7, c14, 1" : : "r" ((uint32_t)va));
1106 register uint32_t __DCCIMVAC __ASM("cp15:0:c7:c14:1");
1107 __DCCIMVAC = (uint32_t)va;
1109 __DMB(); //ensure the ordering of data cache maintenance operations and their effects
1113 * Generic mechanism for cleaning/invalidating the entire data or unified cache to the point of coherency.
1116 /** \brief __v7_all_cache - helper function
1120 extern void __v7_all_cache(uint32_t op);
1123 /** \brief Invalidate the whole D$
1125 DCISW. Invalidate by Set/Way
1128 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_dcache_all(void) {
1132 /** \brief Clean the whole D$
1134 DCCSW. Clean by Set/Way
1137 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_dcache_all(void) {
1141 /** \brief Clean and invalidate the whole D$
1143 DCCISW. Clean and Invalidate by Set/Way
1146 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_inv_dcache_all(void) {
1150 #include "core_ca_mmu.h"
1152 #elif (defined (__TASKING__)) /*--------------- TASKING Compiler -----------------*/
1154 #error TASKING Compiler support not implemented for Cortex-A
1158 /*@} end of CMSIS_Core_RegAccFunctions */
1161 #endif /* __CORE_CAFUNC_H__ */