source: EcnlProtoTool/trunk/asp3_dcre/mbed/targets/cmsis/core_caFunc.h@ 270

Last change on this file since 270 was 270, checked in by coas-nagasima, 7 years ago

mruby版ECNLプロトタイピング・ツールを追加

  • Property svn:eol-style set to native
  • Property svn:keywords set to Id
  • Property svn:mime-type set to text/x-chdr
File size: 31.7 KB
Line 
1/**************************************************************************//**
2 * @file core_caFunc.h
3 * @brief CMSIS Cortex-A Core Function Access Header File
4 * @version V3.10
5 * @date 30 Oct 2013
6 *
7 * @note
8 *
9 ******************************************************************************/
10/* Copyright (c) 2009 - 2013 ARM LIMITED
11
12 All rights reserved.
13 Redistribution and use in source and binary forms, with or without
14 modification, are permitted provided that the following conditions are met:
15 - Redistributions of source code must retain the above copyright
16 notice, this list of conditions and the following disclaimer.
17 - Redistributions in binary form must reproduce the above copyright
18 notice, this list of conditions and the following disclaimer in the
19 documentation and/or other materials provided with the distribution.
20 - Neither the name of ARM nor the names of its contributors may be used
21 to endorse or promote products derived from this software without
22 specific prior written permission.
23 *
24 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
25 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
26 IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
27 ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
28 LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
29 CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
30 SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
31 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
32 CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
33 ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
34 POSSIBILITY OF SUCH DAMAGE.
35 ---------------------------------------------------------------------------*/
36
37
38#ifndef __CORE_CAFUNC_H__
39#define __CORE_CAFUNC_H__
40
41
42/* ########################### Core Function Access ########################### */
43/** \ingroup CMSIS_Core_FunctionInterface
44 \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
45 @{
46 */
47
48#if defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/
49/* ARM armcc specific functions */
50
51#if (__ARMCC_VERSION < 400677)
52 #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
53#endif
54
55#define MODE_USR 0x10
56#define MODE_FIQ 0x11
57#define MODE_IRQ 0x12
58#define MODE_SVC 0x13
59#define MODE_MON 0x16
60#define MODE_ABT 0x17
61#define MODE_HYP 0x1A
62#define MODE_UND 0x1B
63#define MODE_SYS 0x1F
64
65/** \brief Get APSR Register
66
67 This function returns the content of the APSR Register.
68
69 \return APSR Register value
70 */
71__STATIC_INLINE uint32_t __get_APSR(void)
72{
73 register uint32_t __regAPSR __ASM("apsr");
74 return(__regAPSR);
75}
76
77
78/** \brief Get CPSR Register
79
80 This function returns the content of the CPSR Register.
81
82 \return CPSR Register value
83 */
84__STATIC_INLINE uint32_t __get_CPSR(void)
85{
86 register uint32_t __regCPSR __ASM("cpsr");
87 return(__regCPSR);
88}
89
90/** \brief Set Stack Pointer
91
92 This function assigns the given value to the current stack pointer.
93
94 \param [in] topOfStack Stack Pointer value to set
95 */
96register uint32_t __regSP __ASM("sp");
97__STATIC_INLINE void __set_SP(uint32_t topOfStack)
98{
99 __regSP = topOfStack;
100}
101
102
103/** \brief Get link register
104
105 This function returns the value of the link register
106
107 \return Value of link register
108 */
109register uint32_t __reglr __ASM("lr");
110__STATIC_INLINE uint32_t __get_LR(void)
111{
112 return(__reglr);
113}
114
115/** \brief Set link register
116
117 This function sets the value of the link register
118
119 \param [in] lr LR value to set
120 */
121__STATIC_INLINE void __set_LR(uint32_t lr)
122{
123 __reglr = lr;
124}
125
126/** \brief Set Process Stack Pointer
127
128 This function assigns the given value to the USR/SYS Stack Pointer (PSP).
129
130 \param [in] topOfProcStack USR/SYS Stack Pointer value to set
131 */
132__STATIC_ASM void __set_PSP(uint32_t topOfProcStack)
133{
134 ARM
135 PRESERVE8
136
137 BIC R0, R0, #7 ;ensure stack is 8-byte aligned
138 MRS R1, CPSR
139 CPS #MODE_SYS ;no effect in USR mode
140 MOV SP, R0
141 MSR CPSR_c, R1 ;no effect in USR mode
142 ISB
143 BX LR
144
145}
146
147/** \brief Set User Mode
148
149 This function changes the processor state to User Mode
150 */
151__STATIC_ASM void __set_CPS_USR(void)
152{
153 ARM
154
155 CPS #MODE_USR
156 BX LR
157}
158
159
160/** \brief Enable FIQ
161
162 This function enables FIQ interrupts by clearing the F-bit in the CPSR.
163 Can only be executed in Privileged modes.
164 */
165#define __enable_fault_irq __enable_fiq
166
167
168/** \brief Disable FIQ
169
170 This function disables FIQ interrupts by setting the F-bit in the CPSR.
171 Can only be executed in Privileged modes.
172 */
173#define __disable_fault_irq __disable_fiq
174
175
176/** \brief Get FPSCR
177
178 This function returns the current value of the Floating Point Status/Control register.
179
180 \return Floating Point Status/Control register value
181 */
182__STATIC_INLINE uint32_t __get_FPSCR(void)
183{
184#if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
185 register uint32_t __regfpscr __ASM("fpscr");
186 return(__regfpscr);
187#else
188 return(0);
189#endif
190}
191
192
193/** \brief Set FPSCR
194
195 This function assigns the given value to the Floating Point Status/Control register.
196
197 \param [in] fpscr Floating Point Status/Control value to set
198 */
199__STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
200{
201#if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
202 register uint32_t __regfpscr __ASM("fpscr");
203 __regfpscr = (fpscr);
204#endif
205}
206
207/** \brief Get FPEXC
208
209 This function returns the current value of the Floating Point Exception Control register.
210
211 \return Floating Point Exception Control register value
212 */
213__STATIC_INLINE uint32_t __get_FPEXC(void)
214{
215#if (__FPU_PRESENT == 1)
216 register uint32_t __regfpexc __ASM("fpexc");
217 return(__regfpexc);
218#else
219 return(0);
220#endif
221}
222
223
224/** \brief Set FPEXC
225
226 This function assigns the given value to the Floating Point Exception Control register.
227
228 \param [in] fpscr Floating Point Exception Control value to set
229 */
230__STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
231{
232#if (__FPU_PRESENT == 1)
233 register uint32_t __regfpexc __ASM("fpexc");
234 __regfpexc = (fpexc);
235#endif
236}
237
238/** \brief Get CPACR
239
240 This function returns the current value of the Coprocessor Access Control register.
241
242 \return Coprocessor Access Control register value
243 */
244__STATIC_INLINE uint32_t __get_CPACR(void)
245{
246 register uint32_t __regCPACR __ASM("cp15:0:c1:c0:2");
247 return __regCPACR;
248}
249
250/** \brief Set CPACR
251
252 This function assigns the given value to the Coprocessor Access Control register.
253
254 \param [in] cpacr Coprocessor Acccess Control value to set
255 */
256__STATIC_INLINE void __set_CPACR(uint32_t cpacr)
257{
258 register uint32_t __regCPACR __ASM("cp15:0:c1:c0:2");
259 __regCPACR = cpacr;
260 __ISB();
261}
262
263/** \brief Get CBAR
264
265 This function returns the value of the Configuration Base Address register.
266
267 \return Configuration Base Address register value
268 */
269__STATIC_INLINE uint32_t __get_CBAR() {
270 register uint32_t __regCBAR __ASM("cp15:4:c15:c0:0");
271 return(__regCBAR);
272}
273
274/** \brief Get TTBR0
275
276 This function returns the value of the Translation Table Base Register 0.
277
278 \return Translation Table Base Register 0 value
279 */
280__STATIC_INLINE uint32_t __get_TTBR0() {
281 register uint32_t __regTTBR0 __ASM("cp15:0:c2:c0:0");
282 return(__regTTBR0);
283}
284
285/** \brief Set TTBR0
286
287 This function assigns the given value to the Translation Table Base Register 0.
288
289 \param [in] ttbr0 Translation Table Base Register 0 value to set
290 */
291__STATIC_INLINE void __set_TTBR0(uint32_t ttbr0) {
292 register uint32_t __regTTBR0 __ASM("cp15:0:c2:c0:0");
293 __regTTBR0 = ttbr0;
294 __ISB();
295}
296
297/** \brief Get DACR
298
299 This function returns the value of the Domain Access Control Register.
300
301 \return Domain Access Control Register value
302 */
303__STATIC_INLINE uint32_t __get_DACR() {
304 register uint32_t __regDACR __ASM("cp15:0:c3:c0:0");
305 return(__regDACR);
306}
307
308/** \brief Set DACR
309
310 This function assigns the given value to the Domain Access Control Register.
311
312 \param [in] dacr Domain Access Control Register value to set
313 */
314__STATIC_INLINE void __set_DACR(uint32_t dacr) {
315 register uint32_t __regDACR __ASM("cp15:0:c3:c0:0");
316 __regDACR = dacr;
317 __ISB();
318}
319
320/******************************** Cache and BTAC enable ****************************************************/
321
322/** \brief Set SCTLR
323
324 This function assigns the given value to the System Control Register.
325
326 \param [in] sctlr System Control Register value to set
327 */
328__STATIC_INLINE void __set_SCTLR(uint32_t sctlr)
329{
330 register uint32_t __regSCTLR __ASM("cp15:0:c1:c0:0");
331 __regSCTLR = sctlr;
332}
333
334/** \brief Get SCTLR
335
336 This function returns the value of the System Control Register.
337
338 \return System Control Register value
339 */
340__STATIC_INLINE uint32_t __get_SCTLR() {
341 register uint32_t __regSCTLR __ASM("cp15:0:c1:c0:0");
342 return(__regSCTLR);
343}
344
345/** \brief Enable Caches
346
347 Enable Caches
348 */
349__STATIC_INLINE void __enable_caches(void) {
350 // Set I bit 12 to enable I Cache
351 // Set C bit 2 to enable D Cache
352 __set_SCTLR( __get_SCTLR() | (1 << 12) | (1 << 2));
353}
354
355/** \brief Disable Caches
356
357 Disable Caches
358 */
359__STATIC_INLINE void __disable_caches(void) {
360 // Clear I bit 12 to disable I Cache
361 // Clear C bit 2 to disable D Cache
362 __set_SCTLR( __get_SCTLR() & ~(1 << 12) & ~(1 << 2));
363 __ISB();
364}
365
366/** \brief Enable BTAC
367
368 Enable BTAC
369 */
370__STATIC_INLINE void __enable_btac(void) {
371 // Set Z bit 11 to enable branch prediction
372 __set_SCTLR( __get_SCTLR() | (1 << 11));
373 __ISB();
374}
375
376/** \brief Disable BTAC
377
378 Disable BTAC
379 */
380__STATIC_INLINE void __disable_btac(void) {
381 // Clear Z bit 11 to disable branch prediction
382 __set_SCTLR( __get_SCTLR() & ~(1 << 11));
383}
384
385
386/** \brief Enable MMU
387
388 Enable MMU
389 */
390__STATIC_INLINE void __enable_mmu(void) {
391 // Set M bit 0 to enable the MMU
392 // Set AFE bit to enable simplified access permissions model
393 // Clear TRE bit to disable TEX remap and A bit to disable strict alignment fault checking
394 __set_SCTLR( (__get_SCTLR() & ~(1 << 28) & ~(1 << 1)) | 1 | (1 << 29));
395 __ISB();
396}
397
398/** \brief Disable MMU
399
400 Disable MMU
401 */
402__STATIC_INLINE void __disable_mmu(void) {
403 // Clear M bit 0 to disable the MMU
404 __set_SCTLR( __get_SCTLR() & ~1);
405 __ISB();
406}
407
408/******************************** TLB maintenance operations ************************************************/
409/** \brief Invalidate the whole tlb
410
411 TLBIALL. Invalidate the whole tlb
412 */
413
414__STATIC_INLINE void __ca9u_inv_tlb_all(void) {
415 register uint32_t __TLBIALL __ASM("cp15:0:c8:c7:0");
416 __TLBIALL = 0;
417 __DSB();
418 __ISB();
419}
420
421/******************************** BTB maintenance operations ************************************************/
422/** \brief Invalidate entire branch predictor array
423
424 BPIALL. Branch Predictor Invalidate All.
425 */
426
427__STATIC_INLINE void __v7_inv_btac(void) {
428 register uint32_t __BPIALL __ASM("cp15:0:c7:c5:6");
429 __BPIALL = 0;
430 __DSB(); //ensure completion of the invalidation
431 __ISB(); //ensure instruction fetch path sees new state
432}
433
434
435/******************************** L1 cache operations ******************************************************/
436
437/** \brief Invalidate the whole I$
438
439 ICIALLU. Instruction Cache Invalidate All to PoU
440 */
441__STATIC_INLINE void __v7_inv_icache_all(void) {
442 register uint32_t __ICIALLU __ASM("cp15:0:c7:c5:0");
443 __ICIALLU = 0;
444 __DSB(); //ensure completion of the invalidation
445 __ISB(); //ensure instruction fetch path sees new I cache state
446}
447
448/** \brief Clean D$ by MVA
449
450 DCCMVAC. Data cache clean by MVA to PoC
451 */
452__STATIC_INLINE void __v7_clean_dcache_mva(void *va) {
453 register uint32_t __DCCMVAC __ASM("cp15:0:c7:c10:1");
454 __DCCMVAC = (uint32_t)va;
455 __DMB(); //ensure the ordering of data cache maintenance operations and their effects
456}
457
458/** \brief Invalidate D$ by MVA
459
460 DCIMVAC. Data cache invalidate by MVA to PoC
461 */
462__STATIC_INLINE void __v7_inv_dcache_mva(void *va) {
463 register uint32_t __DCIMVAC __ASM("cp15:0:c7:c6:1");
464 __DCIMVAC = (uint32_t)va;
465 __DMB(); //ensure the ordering of data cache maintenance operations and their effects
466}
467
468/** \brief Clean and Invalidate D$ by MVA
469
470 DCCIMVAC. Data cache clean and invalidate by MVA to PoC
471 */
472__STATIC_INLINE void __v7_clean_inv_dcache_mva(void *va) {
473 register uint32_t __DCCIMVAC __ASM("cp15:0:c7:c14:1");
474 __DCCIMVAC = (uint32_t)va;
475 __DMB(); //ensure the ordering of data cache maintenance operations and their effects
476}
477
478/** \brief Clean and Invalidate the entire data or unified cache
479
480 Generic mechanism for cleaning/invalidating the entire data or unified cache to the point of coherency.
481 */
482#pragma push
483#pragma arm
484__STATIC_ASM void __v7_all_cache(uint32_t op) {
485 ARM
486
487 PUSH {R4-R11}
488
489 MRC p15, 1, R6, c0, c0, 1 // Read CLIDR
490 ANDS R3, R6, #0x07000000 // Extract coherency level
491 MOV R3, R3, LSR #23 // Total cache levels << 1
492 BEQ Finished // If 0, no need to clean
493
494 MOV R10, #0 // R10 holds current cache level << 1
495Loop1 ADD R2, R10, R10, LSR #1 // R2 holds cache "Set" position
496 MOV R1, R6, LSR R2 // Bottom 3 bits are the Cache-type for this level
497 AND R1, R1, #7 // Isolate those lower 3 bits
498 CMP R1, #2
499 BLT Skip // No cache or only instruction cache at this level
500
501 MCR p15, 2, R10, c0, c0, 0 // Write the Cache Size selection register
502 ISB // ISB to sync the change to the CacheSizeID reg
503 MRC p15, 1, R1, c0, c0, 0 // Reads current Cache Size ID register
504 AND R2, R1, #7 // Extract the line length field
505 ADD R2, R2, #4 // Add 4 for the line length offset (log2 16 bytes)
506 LDR R4, =0x3FF
507 ANDS R4, R4, R1, LSR #3 // R4 is the max number on the way size (right aligned)
508 CLZ R5, R4 // R5 is the bit position of the way size increment
509 LDR R7, =0x7FFF
510 ANDS R7, R7, R1, LSR #13 // R7 is the max number of the index size (right aligned)
511
512Loop2 MOV R9, R4 // R9 working copy of the max way size (right aligned)
513
514Loop3 ORR R11, R10, R9, LSL R5 // Factor in the Way number and cache number into R11
515 ORR R11, R11, R7, LSL R2 // Factor in the Set number
516 CMP R0, #0
517 BNE Dccsw
518 MCR p15, 0, R11, c7, c6, 2 // DCISW. Invalidate by Set/Way
519 B cont
520Dccsw CMP R0, #1
521 BNE Dccisw
522 MCR p15, 0, R11, c7, c10, 2 // DCCSW. Clean by Set/Way
523 B cont
524Dccisw MCR p15, 0, R11, c7, c14, 2 // DCCISW. Clean and Invalidate by Set/Way
525cont SUBS R9, R9, #1 // Decrement the Way number
526 BGE Loop3
527 SUBS R7, R7, #1 // Decrement the Set number
528 BGE Loop2
529Skip ADD R10, R10, #2 // Increment the cache number
530 CMP R3, R10
531 BGT Loop1
532
533Finished
534 DSB
535 POP {R4-R11}
536 BX lr
537
538}
539#pragma pop
540
541
542/** \brief Invalidate the whole D$
543
544 DCISW. Invalidate by Set/Way
545 */
546
547__STATIC_INLINE void __v7_inv_dcache_all(void) {
548 __v7_all_cache(0);
549}
550
551/** \brief Clean the whole D$
552
553 DCCSW. Clean by Set/Way
554 */
555
556__STATIC_INLINE void __v7_clean_dcache_all(void) {
557 __v7_all_cache(1);
558}
559
560/** \brief Clean and invalidate the whole D$
561
562 DCCISW. Clean and Invalidate by Set/Way
563 */
564
565__STATIC_INLINE void __v7_clean_inv_dcache_all(void) {
566 __v7_all_cache(2);
567}
568
569#include "core_ca_mmu.h"
570
571#elif (defined (__ICCARM__)) /*---------------- ICC Compiler ---------------------*/
572
573#error IAR Compiler support not implemented for Cortex-A
574
575#elif (defined (__GNUC__)) /*------------------ GNU Compiler ---------------------*/
576/* GNU gcc specific functions */
577
578#define MODE_USR 0x10
579#define MODE_FIQ 0x11
580#define MODE_IRQ 0x12
581#define MODE_SVC 0x13
582#define MODE_MON 0x16
583#define MODE_ABT 0x17
584#define MODE_HYP 0x1A
585#define MODE_UND 0x1B
586#define MODE_SYS 0x1F
587
588
589__attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_irq(void)
590{
591 __ASM volatile ("cpsie i");
592}
593
594/** \brief Disable IRQ Interrupts
595
596 This function disables IRQ interrupts by setting the I-bit in the CPSR.
597 Can only be executed in Privileged modes.
598 */
599__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __disable_irq(void)
600{
601 uint32_t result;
602
603 __ASM volatile ("mrs %0, cpsr" : "=r" (result));
604 __ASM volatile ("cpsid i");
605 return(result & 0x80);
606}
607
608
609/** \brief Get APSR Register
610
611 This function returns the content of the APSR Register.
612
613 \return APSR Register value
614 */
615__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_APSR(void)
616{
617#if 1
618 register uint32_t __regAPSR;
619 __ASM volatile ("mrs %0, apsr" : "=r" (__regAPSR) );
620#else
621 register uint32_t __regAPSR __ASM("apsr");
622#endif
623 return(__regAPSR);
624}
625
626
627/** \brief Get CPSR Register
628
629 This function returns the content of the CPSR Register.
630
631 \return CPSR Register value
632 */
633__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CPSR(void)
634{
635#if 1
636 register uint32_t __regCPSR;
637 __ASM volatile ("mrs %0, cpsr" : "=r" (__regCPSR));
638#else
639 register uint32_t __regCPSR __ASM("cpsr");
640#endif
641 return(__regCPSR);
642}
643
644#if 0
645/** \brief Set Stack Pointer
646
647 This function assigns the given value to the current stack pointer.
648
649 \param [in] topOfStack Stack Pointer value to set
650 */
651__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_SP(uint32_t topOfStack)
652{
653 register uint32_t __regSP __ASM("sp");
654 __regSP = topOfStack;
655}
656#endif
657
658/** \brief Get link register
659
660 This function returns the value of the link register
661
662 \return Value of link register
663 */
664__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_LR(void)
665{
666 register uint32_t __reglr __ASM("lr");
667 return(__reglr);
668}
669
670#if 0
671/** \brief Set link register
672
673 This function sets the value of the link register
674
675 \param [in] lr LR value to set
676 */
677__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_LR(uint32_t lr)
678{
679 register uint32_t __reglr __ASM("lr");
680 __reglr = lr;
681}
682#endif
683
684/** \brief Set Process Stack Pointer
685
686 This function assigns the given value to the USR/SYS Stack Pointer (PSP).
687
688 \param [in] topOfProcStack USR/SYS Stack Pointer value to set
689 */
690__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
691{
692 __asm__ volatile (
693 ".ARM;"
694 ".eabi_attribute Tag_ABI_align8_preserved,1;"
695
696 "BIC R0, R0, #7;" /* ;ensure stack is 8-byte aligned */
697 "MRS R1, CPSR;"
698 "CPS %0;" /* ;no effect in USR mode */
699 "MOV SP, R0;"
700 "MSR CPSR_c, R1;" /* ;no effect in USR mode */
701 "ISB;"
702 //"BX LR;"
703 :
704 : "i"(MODE_SYS)
705 : "r0", "r1");
706 return;
707}
708
709/** \brief Set User Mode
710
711 This function changes the processor state to User Mode
712 */
713__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_CPS_USR(void)
714{
715 __asm__ volatile (
716 ".ARM;"
717
718 "CPS %0;"
719 //"BX LR;"
720 :
721 : "i"(MODE_USR)
722 : );
723 return;
724}
725
726
727/** \brief Enable FIQ
728
729 This function enables FIQ interrupts by clearing the F-bit in the CPSR.
730 Can only be executed in Privileged modes.
731 */
732#define __enable_fault_irq() __asm__ volatile ("cpsie f")
733
734
735/** \brief Disable FIQ
736
737 This function disables FIQ interrupts by setting the F-bit in the CPSR.
738 Can only be executed in Privileged modes.
739 */
740#define __disable_fault_irq() __asm__ volatile ("cpsid f")
741
742
743/** \brief Get FPSCR
744
745 This function returns the current value of the Floating Point Status/Control register.
746
747 \return Floating Point Status/Control register value
748 */
749__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_FPSCR(void)
750{
751#if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
752#if 1
753 uint32_t result;
754
755 __ASM volatile ("vmrs %0, fpscr" : "=r" (result) );
756 return (result);
757#else
758 register uint32_t __regfpscr __ASM("fpscr");
759 return(__regfpscr);
760#endif
761#else
762 return(0);
763#endif
764}
765
766
767/** \brief Set FPSCR
768
769 This function assigns the given value to the Floating Point Status/Control register.
770
771 \param [in] fpscr Floating Point Status/Control value to set
772 */
773__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
774{
775#if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
776#if 1
777 __ASM volatile ("vmsr fpscr, %0" : : "r" (fpscr) );
778#else
779 register uint32_t __regfpscr __ASM("fpscr");
780 __regfpscr = (fpscr);
781#endif
782#endif
783}
784
785/** \brief Get FPEXC
786
787 This function returns the current value of the Floating Point Exception Control register.
788
789 \return Floating Point Exception Control register value
790 */
791__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_FPEXC(void)
792{
793#if (__FPU_PRESENT == 1)
794#if 1
795 uint32_t result;
796
797 __ASM volatile ("vmrs %0, fpexc" : "=r" (result));
798 return (result);
799#else
800 register uint32_t __regfpexc __ASM("fpexc");
801 return(__regfpexc);
802#endif
803#else
804 return(0);
805#endif
806}
807
808
809/** \brief Set FPEXC
810
811 This function assigns the given value to the Floating Point Exception Control register.
812
813 \param [in] fpscr Floating Point Exception Control value to set
814 */
815__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
816{
817#if (__FPU_PRESENT == 1)
818#if 1
819 __ASM volatile ("vmsr fpexc, %0" : : "r" (fpexc));
820#else
821 register uint32_t __regfpexc __ASM("fpexc");
822 __regfpexc = (fpexc);
823#endif
824#endif
825}
826
827/** \brief Get CPACR
828
829 This function returns the current value of the Coprocessor Access Control register.
830
831 \return Coprocessor Access Control register value
832 */
833__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CPACR(void)
834{
835#if 1
836 register uint32_t __regCPACR;
837 __ASM volatile ("mrc p15, 0, %0, c1, c0, 2" : "=r" (__regCPACR));
838#else
839 register uint32_t __regCPACR __ASM("cp15:0:c1:c0:2");
840#endif
841 return __regCPACR;
842}
843
844/** \brief Set CPACR
845
846 This function assigns the given value to the Coprocessor Access Control register.
847
848 \param [in] cpacr Coprocessor Acccess Control value to set
849 */
850__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_CPACR(uint32_t cpacr)
851{
852#if 1
853 __ASM volatile ("mcr p15, 0, %0, c1, c0, 2" : : "r" (cpacr));
854#else
855 register uint32_t __regCPACR __ASM("cp15:0:c1:c0:2");
856 __regCPACR = cpacr;
857#endif
858 __ISB();
859}
860
861/** \brief Get CBAR
862
863 This function returns the value of the Configuration Base Address register.
864
865 \return Configuration Base Address register value
866 */
867__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CBAR() {
868#if 1
869 register uint32_t __regCBAR;
870 __ASM volatile ("mrc p15, 4, %0, c15, c0, 0" : "=r" (__regCBAR));
871#else
872 register uint32_t __regCBAR __ASM("cp15:4:c15:c0:0");
873#endif
874 return(__regCBAR);
875}
876
877/** \brief Get TTBR0
878
879 This function returns the value of the Translation Table Base Register 0.
880
881 \return Translation Table Base Register 0 value
882 */
883__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_TTBR0() {
884#if 1
885 register uint32_t __regTTBR0;
886 __ASM volatile ("mrc p15, 0, %0, c2, c0, 0" : "=r" (__regTTBR0));
887#else
888 register uint32_t __regTTBR0 __ASM("cp15:0:c2:c0:0");
889#endif
890 return(__regTTBR0);
891}
892
893/** \brief Set TTBR0
894
895 This function assigns the given value to the Translation Table Base Register 0.
896
897 \param [in] ttbr0 Translation Table Base Register 0 value to set
898 */
899__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_TTBR0(uint32_t ttbr0) {
900#if 1
901 __ASM volatile ("mcr p15, 0, %0, c2, c0, 0" : : "r" (ttbr0));
902#else
903 register uint32_t __regTTBR0 __ASM("cp15:0:c2:c0:0");
904 __regTTBR0 = ttbr0;
905#endif
906 __ISB();
907}
908
909/** \brief Get DACR
910
911 This function returns the value of the Domain Access Control Register.
912
913 \return Domain Access Control Register value
914 */
915__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_DACR() {
916#if 1
917 register uint32_t __regDACR;
918 __ASM volatile ("mrc p15, 0, %0, c3, c0, 0" : "=r" (__regDACR));
919#else
920 register uint32_t __regDACR __ASM("cp15:0:c3:c0:0");
921#endif
922 return(__regDACR);
923}
924
925/** \brief Set DACR
926
927 This function assigns the given value to the Domain Access Control Register.
928
929 \param [in] dacr Domain Access Control Register value to set
930 */
931__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_DACR(uint32_t dacr) {
932#if 1
933 __ASM volatile ("mcr p15, 0, %0, c3, c0, 0" : : "r" (dacr));
934#else
935 register uint32_t __regDACR __ASM("cp15:0:c3:c0:0");
936 __regDACR = dacr;
937#endif
938 __ISB();
939}
940
941/******************************** Cache and BTAC enable ****************************************************/
942
943/** \brief Set SCTLR
944
945 This function assigns the given value to the System Control Register.
946
947 \param [in] sctlr System Control Register value to set
948 */
949__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_SCTLR(uint32_t sctlr)
950{
951#if 1
952 __ASM volatile ("mcr p15, 0, %0, c1, c0, 0" : : "r" (sctlr));
953#else
954 register uint32_t __regSCTLR __ASM("cp15:0:c1:c0:0");
955 __regSCTLR = sctlr;
956#endif
957}
958
959/** \brief Get SCTLR
960
961 This function returns the value of the System Control Register.
962
963 \return System Control Register value
964 */
965__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_SCTLR() {
966#if 1
967 register uint32_t __regSCTLR;
968 __ASM volatile ("mrc p15, 0, %0, c1, c0, 0" : "=r" (__regSCTLR));
969#else
970 register uint32_t __regSCTLR __ASM("cp15:0:c1:c0:0");
971#endif
972 return(__regSCTLR);
973}
974
975/** \brief Enable Caches
976
977 Enable Caches
978 */
979__attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_caches(void) {
980 // Set I bit 12 to enable I Cache
981 // Set C bit 2 to enable D Cache
982 __set_SCTLR( __get_SCTLR() | (1 << 12) | (1 << 2));
983}
984
985/** \brief Disable Caches
986
987 Disable Caches
988 */
989__attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_caches(void) {
990 // Clear I bit 12 to disable I Cache
991 // Clear C bit 2 to disable D Cache
992 __set_SCTLR( __get_SCTLR() & ~(1 << 12) & ~(1 << 2));
993 __ISB();
994}
995
996/** \brief Enable BTAC
997
998 Enable BTAC
999 */
1000__attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_btac(void) {
1001 // Set Z bit 11 to enable branch prediction
1002 __set_SCTLR( __get_SCTLR() | (1 << 11));
1003 __ISB();
1004}
1005
1006/** \brief Disable BTAC
1007
1008 Disable BTAC
1009 */
1010__attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_btac(void) {
1011 // Clear Z bit 11 to disable branch prediction
1012 __set_SCTLR( __get_SCTLR() & ~(1 << 11));
1013}
1014
1015
1016/** \brief Enable MMU
1017
1018 Enable MMU
1019 */
1020__attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_mmu(void) {
1021 // Set M bit 0 to enable the MMU
1022 // Set AFE bit to enable simplified access permissions model
1023 // Clear TRE bit to disable TEX remap and A bit to disable strict alignment fault checking
1024 __set_SCTLR( (__get_SCTLR() & ~(1 << 28) & ~(1 << 1)) | 1 | (1 << 29));
1025 __ISB();
1026}
1027
1028/** \brief Disable MMU
1029
1030 Disable MMU
1031 */
1032__attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_mmu(void) {
1033 // Clear M bit 0 to disable the MMU
1034 __set_SCTLR( __get_SCTLR() & ~1);
1035 __ISB();
1036}
1037
1038/******************************** TLB maintenance operations ************************************************/
1039/** \brief Invalidate the whole tlb
1040
1041 TLBIALL. Invalidate the whole tlb
1042 */
1043
1044__attribute__( ( always_inline ) ) __STATIC_INLINE void __ca9u_inv_tlb_all(void) {
1045#if 1
1046 __ASM volatile ("mcr p15, 0, %0, c8, c7, 0" : : "r" (0));
1047#else
1048 register uint32_t __TLBIALL __ASM("cp15:0:c8:c7:0");
1049 __TLBIALL = 0;
1050#endif
1051 __DSB();
1052 __ISB();
1053}
1054
1055/******************************** BTB maintenance operations ************************************************/
1056/** \brief Invalidate entire branch predictor array
1057
1058 BPIALL. Branch Predictor Invalidate All.
1059 */
1060
1061__attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_btac(void) {
1062#if 1
1063 __ASM volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0));
1064#else
1065 register uint32_t __BPIALL __ASM("cp15:0:c7:c5:6");
1066 __BPIALL = 0;
1067#endif
1068 __DSB(); //ensure completion of the invalidation
1069 __ISB(); //ensure instruction fetch path sees new state
1070}
1071
1072
1073/******************************** L1 cache operations ******************************************************/
1074
1075/** \brief Invalidate the whole I$
1076
1077 ICIALLU. Instruction Cache Invalidate All to PoU
1078 */
1079__attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_icache_all(void) {
1080#if 1
1081 __ASM volatile ("mcr p15, 0, %0, c7, c5, 0" : : "r" (0));
1082#else
1083 register uint32_t __ICIALLU __ASM("cp15:0:c7:c5:0");
1084 __ICIALLU = 0;
1085#endif
1086 __DSB(); //ensure completion of the invalidation
1087 __ISB(); //ensure instruction fetch path sees new I cache state
1088}
1089
1090/** \brief Clean D$ by MVA
1091
1092 DCCMVAC. Data cache clean by MVA to PoC
1093 */
1094__attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_dcache_mva(void *va) {
1095#if 1
1096 __ASM volatile ("mcr p15, 0, %0, c7, c10, 1" : : "r" ((uint32_t)va));
1097#else
1098 register uint32_t __DCCMVAC __ASM("cp15:0:c7:c10:1");
1099 __DCCMVAC = (uint32_t)va;
1100#endif
1101 __DMB(); //ensure the ordering of data cache maintenance operations and their effects
1102}
1103
1104/** \brief Invalidate D$ by MVA
1105
1106 DCIMVAC. Data cache invalidate by MVA to PoC
1107 */
1108__attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_dcache_mva(void *va) {
1109#if 1
1110 __ASM volatile ("mcr p15, 0, %0, c7, c6, 1" : : "r" ((uint32_t)va));
1111#else
1112 register uint32_t __DCIMVAC __ASM("cp15:0:c7:c6:1");
1113 __DCIMVAC = (uint32_t)va;
1114#endif
1115 __DMB(); //ensure the ordering of data cache maintenance operations and their effects
1116}
1117
1118/** \brief Clean and Invalidate D$ by MVA
1119
1120 DCCIMVAC. Data cache clean and invalidate by MVA to PoC
1121 */
1122__attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_inv_dcache_mva(void *va) {
1123#if 1
1124 __ASM volatile ("mcr p15, 0, %0, c7, c14, 1" : : "r" ((uint32_t)va));
1125#else
1126 register uint32_t __DCCIMVAC __ASM("cp15:0:c7:c14:1");
1127 __DCCIMVAC = (uint32_t)va;
1128#endif
1129 __DMB(); //ensure the ordering of data cache maintenance operations and their effects
1130}
1131
1132/** \brief Clean and Invalidate the entire data or unified cache
1133
1134 Generic mechanism for cleaning/invalidating the entire data or unified cache to the point of coherency.
1135 */
1136extern void __v7_all_cache(uint32_t op);
1137
1138
1139/** \brief Invalidate the whole D$
1140
1141 DCISW. Invalidate by Set/Way
1142 */
1143
1144__attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_dcache_all(void) {
1145 __v7_all_cache(0);
1146}
1147
1148/** \brief Clean the whole D$
1149
1150 DCCSW. Clean by Set/Way
1151 */
1152
1153__attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_dcache_all(void) {
1154 __v7_all_cache(1);
1155}
1156
1157/** \brief Clean and invalidate the whole D$
1158
1159 DCCISW. Clean and Invalidate by Set/Way
1160 */
1161
1162__attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_inv_dcache_all(void) {
1163 __v7_all_cache(2);
1164}
1165
1166#include "core_ca_mmu.h"
1167
1168#elif (defined (__TASKING__)) /*--------------- TASKING Compiler -----------------*/
1169
1170#error TASKING Compiler support not implemented for Cortex-A
1171
1172#endif
1173
1174/*@} end of CMSIS_Core_RegAccFunctions */
1175
1176
1177#endif /* __CORE_CAFUNC_H__ */
Note: See TracBrowser for help on using the repository browser.