source: asp3_tinet_ecnl_arm/trunk/asp3_dcre/mbed/cmsis/core_caFunc.h@ 352

Last change on this file since 352 was 352, checked in by coas-nagasima, 6 years ago

arm向けASP3版ECNLを追加

  • Property svn:eol-style set to native
  • Property svn:mime-type set to text/x-chdr;charset=UTF-8
File size: 39.9 KB
Line 
1/**************************************************************************//**
2 * @file core_caFunc.h
3 * @brief CMSIS Cortex-A Core Function Access Header File
4 * @version V3.10
5 * @date 30 Oct 2013
6 *
7 * @note
8 *
9 ******************************************************************************/
10/* Copyright (c) 2009 - 2013 ARM LIMITED
11
12 All rights reserved.
13 Redistribution and use in source and binary forms, with or without
14 modification, are permitted provided that the following conditions are met:
15 - Redistributions of source code must retain the above copyright
16 notice, this list of conditions and the following disclaimer.
17 - Redistributions in binary form must reproduce the above copyright
18 notice, this list of conditions and the following disclaimer in the
19 documentation and/or other materials provided with the distribution.
20 - Neither the name of ARM nor the names of its contributors may be used
21 to endorse or promote products derived from this software without
22 specific prior written permission.
23 *
24 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
25 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
26 IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
27 ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
28 LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
29 CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
30 SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
31 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
32 CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
33 ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
34 POSSIBILITY OF SUCH DAMAGE.
35 ---------------------------------------------------------------------------*/
36
37
38#ifndef __CORE_CAFUNC_H__
39#define __CORE_CAFUNC_H__
40
41
42/* ########################### Core Function Access ########################### */
43/** \ingroup CMSIS_Core_FunctionInterface
44 \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
45 @{
46 */
47
48#if defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/
49/* ARM armcc specific functions */
50
51#if (__ARMCC_VERSION < 400677)
52 #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
53#endif
54
55#define MODE_USR 0x10
56#define MODE_FIQ 0x11
57#define MODE_IRQ 0x12
58#define MODE_SVC 0x13
59#define MODE_MON 0x16
60#define MODE_ABT 0x17
61#define MODE_HYP 0x1A
62#define MODE_UND 0x1B
63#define MODE_SYS 0x1F
64
65/** \brief Get APSR Register
66
67 This function returns the content of the APSR Register.
68
69 \return APSR Register value
70 */
71__STATIC_INLINE uint32_t __get_APSR(void)
72{
73 register uint32_t __regAPSR __ASM("apsr");
74 return(__regAPSR);
75}
76
77
78/** \brief Get CPSR Register
79
80 This function returns the content of the CPSR Register.
81
82 \return CPSR Register value
83 */
84__STATIC_INLINE uint32_t __get_CPSR(void)
85{
86 register uint32_t __regCPSR __ASM("cpsr");
87 return(__regCPSR);
88}
89
90/** \brief Set Stack Pointer
91
92 This function assigns the given value to the current stack pointer.
93
94 \param [in] topOfStack Stack Pointer value to set
95 */
96register uint32_t __regSP __ASM("sp");
97__STATIC_INLINE void __set_SP(uint32_t topOfStack)
98{
99 __regSP = topOfStack;
100}
101
102
103/** \brief Get link register
104
105 This function returns the value of the link register
106
107 \return Value of link register
108 */
109register uint32_t __reglr __ASM("lr");
110__STATIC_INLINE uint32_t __get_LR(void)
111{
112 return(__reglr);
113}
114
115/** \brief Set link register
116
117 This function sets the value of the link register
118
119 \param [in] lr LR value to set
120 */
121__STATIC_INLINE void __set_LR(uint32_t lr)
122{
123 __reglr = lr;
124}
125
126/** \brief Set Process Stack Pointer
127
128 This function assigns the given value to the USR/SYS Stack Pointer (PSP).
129
130 \param [in] topOfProcStack USR/SYS Stack Pointer value to set
131 */
132__STATIC_ASM void __set_PSP(uint32_t topOfProcStack)
133{
134 ARM
135 PRESERVE8
136
137 BIC R0, R0, #7 ;ensure stack is 8-byte aligned
138 MRS R1, CPSR
139 CPS #MODE_SYS ;no effect in USR mode
140 MOV SP, R0
141 MSR CPSR_c, R1 ;no effect in USR mode
142 ISB
143 BX LR
144
145}
146
147/** \brief Set User Mode
148
149 This function changes the processor state to User Mode
150 */
151__STATIC_ASM void __set_CPS_USR(void)
152{
153 ARM
154
155 CPS #MODE_USR
156 BX LR
157}
158
159
160/** \brief Enable FIQ
161
162 This function enables FIQ interrupts by clearing the F-bit in the CPSR.
163 Can only be executed in Privileged modes.
164 */
165#define __enable_fault_irq __enable_fiq
166
167
168/** \brief Disable FIQ
169
170 This function disables FIQ interrupts by setting the F-bit in the CPSR.
171 Can only be executed in Privileged modes.
172 */
173#define __disable_fault_irq __disable_fiq
174
175
176/** \brief Get FPSCR
177
178 This function returns the current value of the Floating Point Status/Control register.
179
180 \return Floating Point Status/Control register value
181 */
182__STATIC_INLINE uint32_t __get_FPSCR(void)
183{
184#if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
185 register uint32_t __regfpscr __ASM("fpscr");
186 return(__regfpscr);
187#else
188 return(0);
189#endif
190}
191
192
193/** \brief Set FPSCR
194
195 This function assigns the given value to the Floating Point Status/Control register.
196
197 \param [in] fpscr Floating Point Status/Control value to set
198 */
199__STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
200{
201#if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
202 register uint32_t __regfpscr __ASM("fpscr");
203 __regfpscr = (fpscr);
204#endif
205}
206
207/** \brief Get FPEXC
208
209 This function returns the current value of the Floating Point Exception Control register.
210
211 \return Floating Point Exception Control register value
212 */
213__STATIC_INLINE uint32_t __get_FPEXC(void)
214{
215#if (__FPU_PRESENT == 1)
216 register uint32_t __regfpexc __ASM("fpexc");
217 return(__regfpexc);
218#else
219 return(0);
220#endif
221}
222
223
224/** \brief Set FPEXC
225
226 This function assigns the given value to the Floating Point Exception Control register.
227
228 \param [in] fpscr Floating Point Exception Control value to set
229 */
230__STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
231{
232#if (__FPU_PRESENT == 1)
233 register uint32_t __regfpexc __ASM("fpexc");
234 __regfpexc = (fpexc);
235#endif
236}
237
238/** \brief Get CPACR
239
240 This function returns the current value of the Coprocessor Access Control register.
241
242 \return Coprocessor Access Control register value
243 */
244__STATIC_INLINE uint32_t __get_CPACR(void)
245{
246 register uint32_t __regCPACR __ASM("cp15:0:c1:c0:2");
247 return __regCPACR;
248}
249
250/** \brief Set CPACR
251
252 This function assigns the given value to the Coprocessor Access Control register.
253
254 \param [in] cpacr Coprocessor Acccess Control value to set
255 */
256__STATIC_INLINE void __set_CPACR(uint32_t cpacr)
257{
258 register uint32_t __regCPACR __ASM("cp15:0:c1:c0:2");
259 __regCPACR = cpacr;
260 __ISB();
261}
262
263/** \brief Get CBAR
264
265 This function returns the value of the Configuration Base Address register.
266
267 \return Configuration Base Address register value
268 */
269__STATIC_INLINE uint32_t __get_CBAR() {
270 register uint32_t __regCBAR __ASM("cp15:4:c15:c0:0");
271 return(__regCBAR);
272}
273
274/** \brief Get TTBR0
275
276 This function returns the value of the Translation Table Base Register 0.
277
278 \return Translation Table Base Register 0 value
279 */
280__STATIC_INLINE uint32_t __get_TTBR0() {
281 register uint32_t __regTTBR0 __ASM("cp15:0:c2:c0:0");
282 return(__regTTBR0);
283}
284
285/** \brief Set TTBR0
286
287 This function assigns the given value to the Translation Table Base Register 0.
288
289 \param [in] ttbr0 Translation Table Base Register 0 value to set
290 */
291__STATIC_INLINE void __set_TTBR0(uint32_t ttbr0) {
292 register uint32_t __regTTBR0 __ASM("cp15:0:c2:c0:0");
293 __regTTBR0 = ttbr0;
294 __ISB();
295}
296
297/** \brief Get DACR
298
299 This function returns the value of the Domain Access Control Register.
300
301 \return Domain Access Control Register value
302 */
303__STATIC_INLINE uint32_t __get_DACR() {
304 register uint32_t __regDACR __ASM("cp15:0:c3:c0:0");
305 return(__regDACR);
306}
307
308/** \brief Set DACR
309
310 This function assigns the given value to the Domain Access Control Register.
311
312 \param [in] dacr Domain Access Control Register value to set
313 */
314__STATIC_INLINE void __set_DACR(uint32_t dacr) {
315 register uint32_t __regDACR __ASM("cp15:0:c3:c0:0");
316 __regDACR = dacr;
317 __ISB();
318}
319
320/******************************** Cache and BTAC enable ****************************************************/
321
322/** \brief Set SCTLR
323
324 This function assigns the given value to the System Control Register.
325
326 \param [in] sctlr System Control Register value to set
327 */
328__STATIC_INLINE void __set_SCTLR(uint32_t sctlr)
329{
330 register uint32_t __regSCTLR __ASM("cp15:0:c1:c0:0");
331 __regSCTLR = sctlr;
332}
333
334/** \brief Get SCTLR
335
336 This function returns the value of the System Control Register.
337
338 \return System Control Register value
339 */
340__STATIC_INLINE uint32_t __get_SCTLR() {
341 register uint32_t __regSCTLR __ASM("cp15:0:c1:c0:0");
342 return(__regSCTLR);
343}
344
345/** \brief Enable Caches
346
347 Enable Caches
348 */
349__STATIC_INLINE void __enable_caches(void) {
350 // Set I bit 12 to enable I Cache
351 // Set C bit 2 to enable D Cache
352 __set_SCTLR( __get_SCTLR() | (1 << 12) | (1 << 2));
353}
354
355/** \brief Disable Caches
356
357 Disable Caches
358 */
359__STATIC_INLINE void __disable_caches(void) {
360 // Clear I bit 12 to disable I Cache
361 // Clear C bit 2 to disable D Cache
362 __set_SCTLR( __get_SCTLR() & ~(1 << 12) & ~(1 << 2));
363 __ISB();
364}
365
366/** \brief Enable BTAC
367
368 Enable BTAC
369 */
370__STATIC_INLINE void __enable_btac(void) {
371 // Set Z bit 11 to enable branch prediction
372 __set_SCTLR( __get_SCTLR() | (1 << 11));
373 __ISB();
374}
375
376/** \brief Disable BTAC
377
378 Disable BTAC
379 */
380__STATIC_INLINE void __disable_btac(void) {
381 // Clear Z bit 11 to disable branch prediction
382 __set_SCTLR( __get_SCTLR() & ~(1 << 11));
383}
384
385
386/** \brief Enable MMU
387
388 Enable MMU
389 */
390__STATIC_INLINE void __enable_mmu(void) {
391 // Set M bit 0 to enable the MMU
392 // Set AFE bit to enable simplified access permissions model
393 // Clear TRE bit to disable TEX remap and A bit to disable strict alignment fault checking
394 __set_SCTLR( (__get_SCTLR() & ~(1 << 28) & ~(1 << 1)) | 1 | (1 << 29));
395 __ISB();
396}
397
398/** \brief Disable MMU
399
400 Disable MMU
401 */
402__STATIC_INLINE void __disable_mmu(void) {
403 // Clear M bit 0 to disable the MMU
404 __set_SCTLR( __get_SCTLR() & ~1);
405 __ISB();
406}
407
408/******************************** TLB maintenance operations ************************************************/
409/** \brief Invalidate the whole tlb
410
411 TLBIALL. Invalidate the whole tlb
412 */
413
414__STATIC_INLINE void __ca9u_inv_tlb_all(void) {
415 register uint32_t __TLBIALL __ASM("cp15:0:c8:c7:0");
416 __TLBIALL = 0;
417 __DSB();
418 __ISB();
419}
420
421/******************************** BTB maintenance operations ************************************************/
422/** \brief Invalidate entire branch predictor array
423
424 BPIALL. Branch Predictor Invalidate All.
425 */
426
427__STATIC_INLINE void __v7_inv_btac(void) {
428 register uint32_t __BPIALL __ASM("cp15:0:c7:c5:6");
429 __BPIALL = 0;
430 __DSB(); //ensure completion of the invalidation
431 __ISB(); //ensure instruction fetch path sees new state
432}
433
434
435/******************************** L1 cache operations ******************************************************/
436
437/** \brief Invalidate the whole I$
438
439 ICIALLU. Instruction Cache Invalidate All to PoU
440 */
441__STATIC_INLINE void __v7_inv_icache_all(void) {
442 register uint32_t __ICIALLU __ASM("cp15:0:c7:c5:0");
443 __ICIALLU = 0;
444 __DSB(); //ensure completion of the invalidation
445 __ISB(); //ensure instruction fetch path sees new I cache state
446}
447
448/** \brief Clean D$ by MVA
449
450 DCCMVAC. Data cache clean by MVA to PoC
451 */
452__STATIC_INLINE void __v7_clean_dcache_mva(void *va) {
453 register uint32_t __DCCMVAC __ASM("cp15:0:c7:c10:1");
454 __DCCMVAC = (uint32_t)va;
455 __DMB(); //ensure the ordering of data cache maintenance operations and their effects
456}
457
458/** \brief Invalidate D$ by MVA
459
460 DCIMVAC. Data cache invalidate by MVA to PoC
461 */
462__STATIC_INLINE void __v7_inv_dcache_mva(void *va) {
463 register uint32_t __DCIMVAC __ASM("cp15:0:c7:c6:1");
464 __DCIMVAC = (uint32_t)va;
465 __DMB(); //ensure the ordering of data cache maintenance operations and their effects
466}
467
468/** \brief Clean and Invalidate D$ by MVA
469
470 DCCIMVAC. Data cache clean and invalidate by MVA to PoC
471 */
472__STATIC_INLINE void __v7_clean_inv_dcache_mva(void *va) {
473 register uint32_t __DCCIMVAC __ASM("cp15:0:c7:c14:1");
474 __DCCIMVAC = (uint32_t)va;
475 __DMB(); //ensure the ordering of data cache maintenance operations and their effects
476}
477
478/** \brief Clean and Invalidate the entire data or unified cache
479
480 Generic mechanism for cleaning/invalidating the entire data or unified cache to the point of coherency.
481 */
482#pragma push
483#pragma arm
484__STATIC_ASM void __v7_all_cache(uint32_t op) {
485 ARM
486
487 PUSH {R4-R11}
488
489 MRC p15, 1, R6, c0, c0, 1 // Read CLIDR
490 ANDS R3, R6, #0x07000000 // Extract coherency level
491 MOV R3, R3, LSR #23 // Total cache levels << 1
492 BEQ Finished // If 0, no need to clean
493
494 MOV R10, #0 // R10 holds current cache level << 1
495Loop1 ADD R2, R10, R10, LSR #1 // R2 holds cache "Set" position
496 MOV R1, R6, LSR R2 // Bottom 3 bits are the Cache-type for this level
497 AND R1, R1, #7 // Isolate those lower 3 bits
498 CMP R1, #2
499 BLT Skip // No cache or only instruction cache at this level
500
501 MCR p15, 2, R10, c0, c0, 0 // Write the Cache Size selection register
502 ISB // ISB to sync the change to the CacheSizeID reg
503 MRC p15, 1, R1, c0, c0, 0 // Reads current Cache Size ID register
504 AND R2, R1, #7 // Extract the line length field
505 ADD R2, R2, #4 // Add 4 for the line length offset (log2 16 bytes)
506 LDR R4, =0x3FF
507 ANDS R4, R4, R1, LSR #3 // R4 is the max number on the way size (right aligned)
508 CLZ R5, R4 // R5 is the bit position of the way size increment
509 LDR R7, =0x7FFF
510 ANDS R7, R7, R1, LSR #13 // R7 is the max number of the index size (right aligned)
511
512Loop2 MOV R9, R4 // R9 working copy of the max way size (right aligned)
513
514Loop3 ORR R11, R10, R9, LSL R5 // Factor in the Way number and cache number into R11
515 ORR R11, R11, R7, LSL R2 // Factor in the Set number
516 CMP R0, #0
517 BNE Dccsw
518 MCR p15, 0, R11, c7, c6, 2 // DCISW. Invalidate by Set/Way
519 B cont
520Dccsw CMP R0, #1
521 BNE Dccisw
522 MCR p15, 0, R11, c7, c10, 2 // DCCSW. Clean by Set/Way
523 B cont
524Dccisw MCR p15, 0, R11, c7, c14, 2 // DCCISW. Clean and Invalidate by Set/Way
525cont SUBS R9, R9, #1 // Decrement the Way number
526 BGE Loop3
527 SUBS R7, R7, #1 // Decrement the Set number
528 BGE Loop2
529Skip ADD R10, R10, #2 // Increment the cache number
530 CMP R3, R10
531 BGT Loop1
532
533Finished
534 DSB
535 POP {R4-R11}
536 BX lr
537
538}
539#pragma pop
540
541
542/** \brief Invalidate the whole D$
543
544 DCISW. Invalidate by Set/Way
545 */
546
547__STATIC_INLINE void __v7_inv_dcache_all(void) {
548 __v7_all_cache(0);
549}
550
551/** \brief Clean the whole D$
552
553 DCCSW. Clean by Set/Way
554 */
555
556__STATIC_INLINE void __v7_clean_dcache_all(void) {
557 __v7_all_cache(1);
558}
559
560/** \brief Clean and invalidate the whole D$
561
562 DCCISW. Clean and Invalidate by Set/Way
563 */
564
565__STATIC_INLINE void __v7_clean_inv_dcache_all(void) {
566 __v7_all_cache(2);
567}
568
569#include "core_ca_mmu.h"
570
571#elif (defined (__ICCARM__)) /*---------------- ICC Compiler ---------------------*/
572
573#define __inline inline
574
575inline static uint32_t __disable_irq_iar() {
576 int irq_dis = __get_CPSR() & 0x80; // 7bit CPSR.I
577 __disable_irq();
578 return irq_dis;
579}
580
581#define MODE_USR 0x10
582#define MODE_FIQ 0x11
583#define MODE_IRQ 0x12
584#define MODE_SVC 0x13
585#define MODE_MON 0x16
586#define MODE_ABT 0x17
587#define MODE_HYP 0x1A
588#define MODE_UND 0x1B
589#define MODE_SYS 0x1F
590
591/** \brief Set Process Stack Pointer
592
593 This function assigns the given value to the USR/SYS Stack Pointer (PSP).
594
595 \param [in] topOfProcStack USR/SYS Stack Pointer value to set
596 */
597// from rt_CMSIS.c
598__arm static inline void __set_PSP(uint32_t topOfProcStack) {
599__asm(
600 " ARM\n"
601// " PRESERVE8\n"
602
603 " BIC R0, R0, #7 ;ensure stack is 8-byte aligned \n"
604 " MRS R1, CPSR \n"
605 " CPS #0x1F ;no effect in USR mode \n" // MODE_SYS
606 " MOV SP, R0 \n"
607 " MSR CPSR_c, R1 ;no effect in USR mode \n"
608 " ISB \n"
609 " BX LR \n");
610}
611
612/** \brief Set User Mode
613
614 This function changes the processor state to User Mode
615 */
616// from rt_CMSIS.c
617__arm static inline void __set_CPS_USR(void) {
618__asm(
619 " ARM \n"
620
621 " CPS #0x10 \n" // MODE_USR
622 " BX LR\n");
623}
624
625/** \brief Set TTBR0
626
627 This function assigns the given value to the Translation Table Base Register 0.
628
629 \param [in] ttbr0 Translation Table Base Register 0 value to set
630 */
631// from mmu_Renesas_RZ_A1.c
632__STATIC_INLINE void __set_TTBR0(uint32_t ttbr0) {
633 __MCR(15, 0, ttbr0, 2, 0, 0); // reg to cp15
634 __ISB();
635}
636
637/** \brief Set DACR
638
639 This function assigns the given value to the Domain Access Control Register.
640
641 \param [in] dacr Domain Access Control Register value to set
642 */
643// from mmu_Renesas_RZ_A1.c
644__STATIC_INLINE void __set_DACR(uint32_t dacr) {
645 __MCR(15, 0, dacr, 3, 0, 0); // reg to cp15
646 __ISB();
647}
648
649
650/******************************** Cache and BTAC enable ****************************************************/
651/** \brief Set SCTLR
652
653 This function assigns the given value to the System Control Register.
654
655 \param [in] sctlr System Control Register value to set
656 */
657// from __enable_mmu()
658__STATIC_INLINE void __set_SCTLR(uint32_t sctlr) {
659 __MCR(15, 0, sctlr, 1, 0, 0); // reg to cp15
660}
661
662/** \brief Get SCTLR
663
664 This function returns the value of the System Control Register.
665
666 \return System Control Register value
667 */
668// from __enable_mmu()
669__STATIC_INLINE uint32_t __get_SCTLR() {
670 uint32_t __regSCTLR = __MRC(15, 0, 1, 0, 0);
671 return __regSCTLR;
672}
673
674/** \brief Enable Caches
675
676 Enable Caches
677 */
678// from system_Renesas_RZ_A1.c
679__STATIC_INLINE void __enable_caches(void) {
680 __set_SCTLR( __get_SCTLR() | (1 << 12) | (1 << 2));
681}
682
683/** \brief Enable BTAC
684
685 Enable BTAC
686 */
687// from system_Renesas_RZ_A1.c
688__STATIC_INLINE void __enable_btac(void) {
689 __set_SCTLR( __get_SCTLR() | (1 << 11));
690 __ISB();
691}
692
693/** \brief Enable MMU
694
695 Enable MMU
696 */
697// from system_Renesas_RZ_A1.c
698__STATIC_INLINE void __enable_mmu(void) {
699 // Set M bit 0 to enable the MMU
700 // Set AFE bit to enable simplified access permissions model
701 // Clear TRE bit to disable TEX remap and A bit to disable strict alignment fault checking
702 __set_SCTLR( (__get_SCTLR() & ~(1 << 28) & ~(1 << 1)) | 1 | (1 << 29));
703 __ISB();
704}
705
706/******************************** TLB maintenance operations ************************************************/
707/** \brief Invalidate the whole tlb
708
709 TLBIALL. Invalidate the whole tlb
710 */
711// from system_Renesas_RZ_A1.c
712__STATIC_INLINE void __ca9u_inv_tlb_all(void) {
713 uint32_t val = 0;
714 __MCR(15, 0, val, 8, 7, 0); // reg to cp15
715 __MCR(15, 0, val, 8, 6, 0); // reg to cp15
716 __MCR(15, 0, val, 8, 5, 0); // reg to cp15
717 __DSB();
718 __ISB();
719}
720
721/******************************** BTB maintenance operations ************************************************/
722/** \brief Invalidate entire branch predictor array
723
724 BPIALL. Branch Predictor Invalidate All.
725 */
726// from system_Renesas_RZ_A1.c
727__STATIC_INLINE void __v7_inv_btac(void) {
728 uint32_t val = 0;
729 __MCR(15, 0, val, 7, 5, 6); // reg to cp15
730 __DSB(); //ensure completion of the invalidation
731 __ISB(); //ensure instruction fetch path sees new state
732}
733
734
735/******************************** L1 cache operations ******************************************************/
736
737/** \brief Invalidate the whole I$
738
739 ICIALLU. Instruction Cache Invalidate All to PoU
740 */
741// from system_Renesas_RZ_A1.c
742__STATIC_INLINE void __v7_inv_icache_all(void) {
743 uint32_t val = 0;
744 __MCR(15, 0, val, 7, 5, 0); // reg to cp15
745 __DSB(); //ensure completion of the invalidation
746 __ISB(); //ensure instruction fetch path sees new I cache state
747}
748
749// from __v7_inv_dcache_all()
750__arm static inline void __v7_all_cache(uint32_t op) {
751__asm(
752 " ARM \n"
753
754 " PUSH {R4-R11} \n"
755
756 " MRC p15, 1, R6, c0, c0, 1\n" // Read CLIDR
757 " ANDS R3, R6, #0x07000000\n" // Extract coherency level
758 " MOV R3, R3, LSR #23\n" // Total cache levels << 1
759 " BEQ Finished\n" // If 0, no need to clean
760
761 " MOV R10, #0\n" // R10 holds current cache level << 1
762 "Loop1: ADD R2, R10, R10, LSR #1\n" // R2 holds cache "Set" position
763 " MOV R1, R6, LSR R2 \n" // Bottom 3 bits are the Cache-type for this level
764 " AND R1, R1, #7 \n" // Isolate those lower 3 bits
765 " CMP R1, #2 \n"
766 " BLT Skip \n" // No cache or only instruction cache at this level
767
768 " MCR p15, 2, R10, c0, c0, 0 \n" // Write the Cache Size selection register
769 " ISB \n" // ISB to sync the change to the CacheSizeID reg
770 " MRC p15, 1, R1, c0, c0, 0 \n" // Reads current Cache Size ID register
771 " AND R2, R1, #7 \n" // Extract the line length field
772 " ADD R2, R2, #4 \n" // Add 4 for the line length offset (log2 16 bytes)
773 " movw R4, #0x3FF \n"
774 " ANDS R4, R4, R1, LSR #3 \n" // R4 is the max number on the way size (right aligned)
775 " CLZ R5, R4 \n" // R5 is the bit position of the way size increment
776 " movw R7, #0x7FFF \n"
777 " ANDS R7, R7, R1, LSR #13 \n" // R7 is the max number of the index size (right aligned)
778
779 "Loop2: MOV R9, R4 \n" // R9 working copy of the max way size (right aligned)
780
781 "Loop3: ORR R11, R10, R9, LSL R5 \n" // Factor in the Way number and cache number into R11
782 " ORR R11, R11, R7, LSL R2 \n" // Factor in the Set number
783 " CMP R0, #0 \n"
784 " BNE Dccsw \n"
785 " MCR p15, 0, R11, c7, c6, 2 \n" // DCISW. Invalidate by Set/Way
786 " B cont \n"
787 "Dccsw: CMP R0, #1 \n"
788 " BNE Dccisw \n"
789 " MCR p15, 0, R11, c7, c10, 2 \n" // DCCSW. Clean by Set/Way
790 " B cont \n"
791 "Dccisw: MCR p15, 0, R11, c7, c14, 2 \n" // DCCISW, Clean and Invalidate by Set/Way
792 "cont: SUBS R9, R9, #1 \n" // Decrement the Way number
793 " BGE Loop3 \n"
794 " SUBS R7, R7, #1 \n" // Decrement the Set number
795 " BGE Loop2 \n"
796 "Skip: ADD R10, R10, #2 \n" // increment the cache number
797 " CMP R3, R10 \n"
798 " BGT Loop1 \n"
799
800 "Finished: \n"
801 " DSB \n"
802 " POP {R4-R11} \n"
803 " BX lr \n" );
804}
805
806/** \brief Invalidate the whole D$
807
808 DCISW. Invalidate by Set/Way
809 */
810// from system_Renesas_RZ_A1.c
811__STATIC_INLINE void __v7_inv_dcache_all(void) {
812 __v7_all_cache(0);
813}
814/** \brief Clean the whole D$
815
816 DCCSW. Clean by Set/Way
817 */
818
819__STATIC_INLINE void __v7_clean_dcache_all(void) {
820 __v7_all_cache(1);
821}
822
823/** \brief Clean and invalidate the whole D$
824
825 DCCISW. Clean and Invalidate by Set/Way
826 */
827
828__STATIC_INLINE void __v7_clean_inv_dcache_all(void) {
829 __v7_all_cache(2);
830}
831/** \brief Clean and Invalidate D$ by MVA
832
833 DCCIMVAC. Data cache clean and invalidate by MVA to PoC
834 */
835__STATIC_INLINE void __v7_clean_inv_dcache_mva(void *va) {
836 __MCR(15, 0, (uint32_t)va, 7, 14, 1);
837 __DMB();
838}
839
840#include "core_ca_mmu.h"
841
842#elif (defined (__GNUC__)) /*------------------ GNU Compiler ---------------------*/
843/* GNU gcc specific functions */
844
845#define MODE_USR 0x10
846#define MODE_FIQ 0x11
847#define MODE_IRQ 0x12
848#define MODE_SVC 0x13
849#define MODE_MON 0x16
850#define MODE_ABT 0x17
851#define MODE_HYP 0x1A
852#define MODE_UND 0x1B
853#define MODE_SYS 0x1F
854
855
856__attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_irq(void)
857{
858 __ASM volatile ("cpsie i");
859}
860
861/** \brief Disable IRQ Interrupts
862
863 This function disables IRQ interrupts by setting the I-bit in the CPSR.
864 Can only be executed in Privileged modes.
865 */
866__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __disable_irq(void)
867{
868 uint32_t result;
869
870 __ASM volatile ("mrs %0, cpsr" : "=r" (result));
871 __ASM volatile ("cpsid i");
872 return(result & 0x80);
873}
874
875
876/** \brief Get APSR Register
877
878 This function returns the content of the APSR Register.
879
880 \return APSR Register value
881 */
882__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_APSR(void)
883{
884#if 1
885 register uint32_t __regAPSR;
886 __ASM volatile ("mrs %0, apsr" : "=r" (__regAPSR) );
887#else
888 register uint32_t __regAPSR __ASM("apsr");
889#endif
890 return(__regAPSR);
891}
892
893
894/** \brief Get CPSR Register
895
896 This function returns the content of the CPSR Register.
897
898 \return CPSR Register value
899 */
900__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CPSR(void)
901{
902#if 1
903 register uint32_t __regCPSR;
904 __ASM volatile ("mrs %0, cpsr" : "=r" (__regCPSR));
905#else
906 register uint32_t __regCPSR __ASM("cpsr");
907#endif
908 return(__regCPSR);
909}
910
911#if 0
912/** \brief Set Stack Pointer
913
914 This function assigns the given value to the current stack pointer.
915
916 \param [in] topOfStack Stack Pointer value to set
917 */
918__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_SP(uint32_t topOfStack)
919{
920 register uint32_t __regSP __ASM("sp");
921 __regSP = topOfStack;
922}
923#endif
924
925/** \brief Get link register
926
927 This function returns the value of the link register
928
929 \return Value of link register
930 */
931__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_LR(void)
932{
933 register uint32_t __reglr __ASM("lr");
934 return(__reglr);
935}
936
937#if 0
938/** \brief Set link register
939
940 This function sets the value of the link register
941
942 \param [in] lr LR value to set
943 */
944__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_LR(uint32_t lr)
945{
946 register uint32_t __reglr __ASM("lr");
947 __reglr = lr;
948}
949#endif
950
951/** \brief Set Process Stack Pointer
952
953 This function assigns the given value to the USR/SYS Stack Pointer (PSP).
954
955 \param [in] topOfProcStack USR/SYS Stack Pointer value to set
956 */
957__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
958{
959 __asm__ volatile (
960 ".ARM;"
961 ".eabi_attribute Tag_ABI_align8_preserved,1;"
962
963 "BIC R0, R0, #7;" /* ;ensure stack is 8-byte aligned */
964 "MRS R1, CPSR;"
965 "CPS %0;" /* ;no effect in USR mode */
966 "MOV SP, R0;"
967 "MSR CPSR_c, R1;" /* ;no effect in USR mode */
968 "ISB;"
969 //"BX LR;"
970 :
971 : "i"(MODE_SYS)
972 : "r0", "r1");
973 return;
974}
975
976/** \brief Set User Mode
977
978 This function changes the processor state to User Mode
979 */
980__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_CPS_USR(void)
981{
982 __asm__ volatile (
983 ".ARM;"
984
985 "CPS %0;"
986 //"BX LR;"
987 :
988 : "i"(MODE_USR)
989 : );
990 return;
991}
992
993
994/** \brief Enable FIQ
995
996 This function enables FIQ interrupts by clearing the F-bit in the CPSR.
997 Can only be executed in Privileged modes.
998 */
999#define __enable_fault_irq() __asm__ volatile ("cpsie f")
1000
1001
1002/** \brief Disable FIQ
1003
1004 This function disables FIQ interrupts by setting the F-bit in the CPSR.
1005 Can only be executed in Privileged modes.
1006 */
1007#define __disable_fault_irq() __asm__ volatile ("cpsid f")
1008
1009
1010/** \brief Get FPSCR
1011
1012 This function returns the current value of the Floating Point Status/Control register.
1013
1014 \return Floating Point Status/Control register value
1015 */
1016__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_FPSCR(void)
1017{
1018#if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
1019#if 1
1020 uint32_t result;
1021
1022 __ASM volatile ("vmrs %0, fpscr" : "=r" (result) );
1023 return (result);
1024#else
1025 register uint32_t __regfpscr __ASM("fpscr");
1026 return(__regfpscr);
1027#endif
1028#else
1029 return(0);
1030#endif
1031}
1032
1033
1034/** \brief Set FPSCR
1035
1036 This function assigns the given value to the Floating Point Status/Control register.
1037
1038 \param [in] fpscr Floating Point Status/Control value to set
1039 */
1040__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
1041{
1042#if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
1043#if 1
1044 __ASM volatile ("vmsr fpscr, %0" : : "r" (fpscr) );
1045#else
1046 register uint32_t __regfpscr __ASM("fpscr");
1047 __regfpscr = (fpscr);
1048#endif
1049#endif
1050}
1051
1052/** \brief Get FPEXC
1053
1054 This function returns the current value of the Floating Point Exception Control register.
1055
1056 \return Floating Point Exception Control register value
1057 */
1058__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_FPEXC(void)
1059{
1060#if (__FPU_PRESENT == 1)
1061#if 1
1062 uint32_t result;
1063
1064 __ASM volatile ("vmrs %0, fpexc" : "=r" (result));
1065 return (result);
1066#else
1067 register uint32_t __regfpexc __ASM("fpexc");
1068 return(__regfpexc);
1069#endif
1070#else
1071 return(0);
1072#endif
1073}
1074
1075
1076/** \brief Set FPEXC
1077
1078 This function assigns the given value to the Floating Point Exception Control register.
1079
1080 \param [in] fpscr Floating Point Exception Control value to set
1081 */
1082__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
1083{
1084#if (__FPU_PRESENT == 1)
1085#if 1
1086 __ASM volatile ("vmsr fpexc, %0" : : "r" (fpexc));
1087#else
1088 register uint32_t __regfpexc __ASM("fpexc");
1089 __regfpexc = (fpexc);
1090#endif
1091#endif
1092}
1093
1094/** \brief Get CPACR
1095
1096 This function returns the current value of the Coprocessor Access Control register.
1097
1098 \return Coprocessor Access Control register value
1099 */
1100__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CPACR(void)
1101{
1102#if 1
1103 register uint32_t __regCPACR;
1104 __ASM volatile ("mrc p15, 0, %0, c1, c0, 2" : "=r" (__regCPACR));
1105#else
1106 register uint32_t __regCPACR __ASM("cp15:0:c1:c0:2");
1107#endif
1108 return __regCPACR;
1109}
1110
1111/** \brief Set CPACR
1112
1113 This function assigns the given value to the Coprocessor Access Control register.
1114
1115 \param [in] cpacr Coprocessor Acccess Control value to set
1116 */
1117__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_CPACR(uint32_t cpacr)
1118{
1119#if 1
1120 __ASM volatile ("mcr p15, 0, %0, c1, c0, 2" : : "r" (cpacr));
1121#else
1122 register uint32_t __regCPACR __ASM("cp15:0:c1:c0:2");
1123 __regCPACR = cpacr;
1124#endif
1125 __ISB();
1126}
1127
1128/** \brief Get CBAR
1129
1130 This function returns the value of the Configuration Base Address register.
1131
1132 \return Configuration Base Address register value
1133 */
1134__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CBAR() {
1135#if 1
1136 register uint32_t __regCBAR;
1137 __ASM volatile ("mrc p15, 4, %0, c15, c0, 0" : "=r" (__regCBAR));
1138#else
1139 register uint32_t __regCBAR __ASM("cp15:4:c15:c0:0");
1140#endif
1141 return(__regCBAR);
1142}
1143
1144/** \brief Get TTBR0
1145
1146 This function returns the value of the Translation Table Base Register 0.
1147
1148 \return Translation Table Base Register 0 value
1149 */
1150__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_TTBR0() {
1151#if 1
1152 register uint32_t __regTTBR0;
1153 __ASM volatile ("mrc p15, 0, %0, c2, c0, 0" : "=r" (__regTTBR0));
1154#else
1155 register uint32_t __regTTBR0 __ASM("cp15:0:c2:c0:0");
1156#endif
1157 return(__regTTBR0);
1158}
1159
1160/** \brief Set TTBR0
1161
1162 This function assigns the given value to the Translation Table Base Register 0.
1163
1164 \param [in] ttbr0 Translation Table Base Register 0 value to set
1165 */
1166__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_TTBR0(uint32_t ttbr0) {
1167#if 1
1168 __ASM volatile ("mcr p15, 0, %0, c2, c0, 0" : : "r" (ttbr0));
1169#else
1170 register uint32_t __regTTBR0 __ASM("cp15:0:c2:c0:0");
1171 __regTTBR0 = ttbr0;
1172#endif
1173 __ISB();
1174}
1175
1176/** \brief Get DACR
1177
1178 This function returns the value of the Domain Access Control Register.
1179
1180 \return Domain Access Control Register value
1181 */
1182__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_DACR() {
1183#if 1
1184 register uint32_t __regDACR;
1185 __ASM volatile ("mrc p15, 0, %0, c3, c0, 0" : "=r" (__regDACR));
1186#else
1187 register uint32_t __regDACR __ASM("cp15:0:c3:c0:0");
1188#endif
1189 return(__regDACR);
1190}
1191
1192/** \brief Set DACR
1193
1194 This function assigns the given value to the Domain Access Control Register.
1195
1196 \param [in] dacr Domain Access Control Register value to set
1197 */
1198__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_DACR(uint32_t dacr) {
1199#if 1
1200 __ASM volatile ("mcr p15, 0, %0, c3, c0, 0" : : "r" (dacr));
1201#else
1202 register uint32_t __regDACR __ASM("cp15:0:c3:c0:0");
1203 __regDACR = dacr;
1204#endif
1205 __ISB();
1206}
1207
1208/******************************** Cache and BTAC enable ****************************************************/
1209
1210/** \brief Set SCTLR
1211
1212 This function assigns the given value to the System Control Register.
1213
1214 \param [in] sctlr System Control Register value to set
1215 */
1216__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_SCTLR(uint32_t sctlr)
1217{
1218#if 1
1219 __ASM volatile ("mcr p15, 0, %0, c1, c0, 0" : : "r" (sctlr));
1220#else
1221 register uint32_t __regSCTLR __ASM("cp15:0:c1:c0:0");
1222 __regSCTLR = sctlr;
1223#endif
1224}
1225
1226/** \brief Get SCTLR
1227
1228 This function returns the value of the System Control Register.
1229
1230 \return System Control Register value
1231 */
1232__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_SCTLR() {
1233#if 1
1234 register uint32_t __regSCTLR;
1235 __ASM volatile ("mrc p15, 0, %0, c1, c0, 0" : "=r" (__regSCTLR));
1236#else
1237 register uint32_t __regSCTLR __ASM("cp15:0:c1:c0:0");
1238#endif
1239 return(__regSCTLR);
1240}
1241
1242/** \brief Enable Caches
1243
1244 Enable Caches
1245 */
1246__attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_caches(void) {
1247 // Set I bit 12 to enable I Cache
1248 // Set C bit 2 to enable D Cache
1249 __set_SCTLR( __get_SCTLR() | (1 << 12) | (1 << 2));
1250}
1251
1252/** \brief Disable Caches
1253
1254 Disable Caches
1255 */
1256__attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_caches(void) {
1257 // Clear I bit 12 to disable I Cache
1258 // Clear C bit 2 to disable D Cache
1259 __set_SCTLR( __get_SCTLR() & ~(1 << 12) & ~(1 << 2));
1260 __ISB();
1261}
1262
1263/** \brief Enable BTAC
1264
1265 Enable BTAC
1266 */
1267__attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_btac(void) {
1268 // Set Z bit 11 to enable branch prediction
1269 __set_SCTLR( __get_SCTLR() | (1 << 11));
1270 __ISB();
1271}
1272
1273/** \brief Disable BTAC
1274
1275 Disable BTAC
1276 */
1277__attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_btac(void) {
1278 // Clear Z bit 11 to disable branch prediction
1279 __set_SCTLR( __get_SCTLR() & ~(1 << 11));
1280}
1281
1282
1283/** \brief Enable MMU
1284
1285 Enable MMU
1286 */
1287__attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_mmu(void) {
1288 // Set M bit 0 to enable the MMU
1289 // Set AFE bit to enable simplified access permissions model
1290 // Clear TRE bit to disable TEX remap and A bit to disable strict alignment fault checking
1291 __set_SCTLR( (__get_SCTLR() & ~(1 << 28) & ~(1 << 1)) | 1 | (1 << 29));
1292 __ISB();
1293}
1294
1295/** \brief Disable MMU
1296
1297 Disable MMU
1298 */
1299__attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_mmu(void) {
1300 // Clear M bit 0 to disable the MMU
1301 __set_SCTLR( __get_SCTLR() & ~1);
1302 __ISB();
1303}
1304
1305/******************************** TLB maintenance operations ************************************************/
1306/** \brief Invalidate the whole tlb
1307
1308 TLBIALL. Invalidate the whole tlb
1309 */
1310
1311__attribute__( ( always_inline ) ) __STATIC_INLINE void __ca9u_inv_tlb_all(void) {
1312#if 1
1313 __ASM volatile ("mcr p15, 0, %0, c8, c7, 0" : : "r" (0));
1314#else
1315 register uint32_t __TLBIALL __ASM("cp15:0:c8:c7:0");
1316 __TLBIALL = 0;
1317#endif
1318 __DSB();
1319 __ISB();
1320}
1321
1322/******************************** BTB maintenance operations ************************************************/
1323/** \brief Invalidate entire branch predictor array
1324
1325 BPIALL. Branch Predictor Invalidate All.
1326 */
1327
1328__attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_btac(void) {
1329#if 1
1330 __ASM volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0));
1331#else
1332 register uint32_t __BPIALL __ASM("cp15:0:c7:c5:6");
1333 __BPIALL = 0;
1334#endif
1335 __DSB(); //ensure completion of the invalidation
1336 __ISB(); //ensure instruction fetch path sees new state
1337}
1338
1339
1340/******************************** L1 cache operations ******************************************************/
1341
1342/** \brief Invalidate the whole I$
1343
1344 ICIALLU. Instruction Cache Invalidate All to PoU
1345 */
1346__attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_icache_all(void) {
1347#if 1
1348 __ASM volatile ("mcr p15, 0, %0, c7, c5, 0" : : "r" (0));
1349#else
1350 register uint32_t __ICIALLU __ASM("cp15:0:c7:c5:0");
1351 __ICIALLU = 0;
1352#endif
1353 __DSB(); //ensure completion of the invalidation
1354 __ISB(); //ensure instruction fetch path sees new I cache state
1355}
1356
1357/** \brief Clean D$ by MVA
1358
1359 DCCMVAC. Data cache clean by MVA to PoC
1360 */
1361__attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_dcache_mva(void *va) {
1362#if 1
1363 __ASM volatile ("mcr p15, 0, %0, c7, c10, 1" : : "r" ((uint32_t)va));
1364#else
1365 register uint32_t __DCCMVAC __ASM("cp15:0:c7:c10:1");
1366 __DCCMVAC = (uint32_t)va;
1367#endif
1368 __DMB(); //ensure the ordering of data cache maintenance operations and their effects
1369}
1370
1371/** \brief Invalidate D$ by MVA
1372
1373 DCIMVAC. Data cache invalidate by MVA to PoC
1374 */
1375__attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_dcache_mva(void *va) {
1376#if 1
1377 __ASM volatile ("mcr p15, 0, %0, c7, c6, 1" : : "r" ((uint32_t)va));
1378#else
1379 register uint32_t __DCIMVAC __ASM("cp15:0:c7:c6:1");
1380 __DCIMVAC = (uint32_t)va;
1381#endif
1382 __DMB(); //ensure the ordering of data cache maintenance operations and their effects
1383}
1384
1385/** \brief Clean and Invalidate D$ by MVA
1386
1387 DCCIMVAC. Data cache clean and invalidate by MVA to PoC
1388 */
1389__attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_inv_dcache_mva(void *va) {
1390#if 1
1391 __ASM volatile ("mcr p15, 0, %0, c7, c14, 1" : : "r" ((uint32_t)va));
1392#else
1393 register uint32_t __DCCIMVAC __ASM("cp15:0:c7:c14:1");
1394 __DCCIMVAC = (uint32_t)va;
1395#endif
1396 __DMB(); //ensure the ordering of data cache maintenance operations and their effects
1397}
1398
1399/** \brief Clean and Invalidate the entire data or unified cache
1400
1401 Generic mechanism for cleaning/invalidating the entire data or unified cache to the point of coherency.
1402 */
1403extern void __v7_all_cache(uint32_t op);
1404
1405
1406/** \brief Invalidate the whole D$
1407
1408 DCISW. Invalidate by Set/Way
1409 */
1410
1411__attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_dcache_all(void) {
1412 __v7_all_cache(0);
1413}
1414
1415/** \brief Clean the whole D$
1416
1417 DCCSW. Clean by Set/Way
1418 */
1419
1420__attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_dcache_all(void) {
1421 __v7_all_cache(1);
1422}
1423
1424/** \brief Clean and invalidate the whole D$
1425
1426 DCCISW. Clean and Invalidate by Set/Way
1427 */
1428
1429__attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_inv_dcache_all(void) {
1430 __v7_all_cache(2);
1431}
1432
1433#include "core_ca_mmu.h"
1434
1435#elif (defined (__TASKING__)) /*--------------- TASKING Compiler -----------------*/
1436
1437#error TASKING Compiler support not implemented for Cortex-A
1438
1439#endif
1440
1441/*@} end of CMSIS_Core_RegAccFunctions */
1442
1443
1444#endif /* __CORE_CAFUNC_H__ */
Note: See TracBrowser for help on using the repository browser.