source: EcnlProtoTool/trunk/asp3_dcre/mbed/targets/cmsis/core_cmInstr.h@ 270

Last change on this file since 270 was 270, checked in by coas-nagasima, 7 years ago

mruby版ECNLプロトタイピング・ツールを追加

  • Property svn:eol-style set to native
  • Property svn:keywords set to Id
  • Property svn:mime-type set to text/x-chdr
File size: 19.4 KB
Line 
1/**************************************************************************//**
2 * @file core_cmInstr.h
3 * @brief CMSIS Cortex-M Core Instruction Access Header File
4 * @version V3.20
5 * @date 05. March 2013
6 *
7 * @note
8 *
9 ******************************************************************************/
10/* Copyright (c) 2009 - 2013 ARM LIMITED
11
12 All rights reserved.
13 Redistribution and use in source and binary forms, with or without
14 modification, are permitted provided that the following conditions are met:
15 - Redistributions of source code must retain the above copyright
16 notice, this list of conditions and the following disclaimer.
17 - Redistributions in binary form must reproduce the above copyright
18 notice, this list of conditions and the following disclaimer in the
19 documentation and/or other materials provided with the distribution.
20 - Neither the name of ARM nor the names of its contributors may be used
21 to endorse or promote products derived from this software without
22 specific prior written permission.
23 *
24 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
25 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
26 IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
27 ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
28 LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
29 CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
30 SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
31 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
32 CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
33 ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
34 POSSIBILITY OF SUCH DAMAGE.
35 ---------------------------------------------------------------------------*/
36
37
38#ifndef __CORE_CMINSTR_H
39#define __CORE_CMINSTR_H
40
41
42/* ########################## Core Instruction Access ######################### */
43/** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
44 Access to dedicated instructions
45 @{
46*/
47
48#if defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/
49/* ARM armcc specific functions */
50
51#if (__ARMCC_VERSION < 400677)
52 #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
53#endif
54
55
56/** \brief No Operation
57
58 No Operation does nothing. This instruction can be used for code alignment purposes.
59 */
60#define __NOP __nop
61
62
63/** \brief Wait For Interrupt
64
65 Wait For Interrupt is a hint instruction that suspends execution
66 until one of a number of events occurs.
67 */
68#define __WFI __wfi
69
70
71/** \brief Wait For Event
72
73 Wait For Event is a hint instruction that permits the processor to enter
74 a low-power state until one of a number of events occurs.
75 */
76#define __WFE __wfe
77
78
79/** \brief Send Event
80
81 Send Event is a hint instruction. It causes an event to be signaled to the CPU.
82 */
83#define __SEV __sev
84
85
86/** \brief Instruction Synchronization Barrier
87
88 Instruction Synchronization Barrier flushes the pipeline in the processor,
89 so that all instructions following the ISB are fetched from cache or
90 memory, after the instruction has been completed.
91 */
92#define __ISB() __isb(0xF)
93
94
95/** \brief Data Synchronization Barrier
96
97 This function acts as a special kind of Data Memory Barrier.
98 It completes when all explicit memory accesses before this instruction complete.
99 */
100#define __DSB() __dsb(0xF)
101
102
103/** \brief Data Memory Barrier
104
105 This function ensures the apparent order of the explicit memory operations before
106 and after the instruction, without ensuring their completion.
107 */
108#define __DMB() __dmb(0xF)
109
110
111/** \brief Reverse byte order (32 bit)
112
113 This function reverses the byte order in integer value.
114
115 \param [in] value Value to reverse
116 \return Reversed value
117 */
118#define __REV __rev
119
120
121/** \brief Reverse byte order (16 bit)
122
123 This function reverses the byte order in two unsigned short values.
124
125 \param [in] value Value to reverse
126 \return Reversed value
127 */
128#ifndef __NO_EMBEDDED_ASM
129__attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
130{
131 rev16 r0, r0
132 bx lr
133}
134#endif
135
136/** \brief Reverse byte order in signed short value
137
138 This function reverses the byte order in a signed short value with sign extension to integer.
139
140 \param [in] value Value to reverse
141 \return Reversed value
142 */
143#ifndef __NO_EMBEDDED_ASM
144__attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(int32_t value)
145{
146 revsh r0, r0
147 bx lr
148}
149#endif
150
151
152/** \brief Rotate Right in unsigned value (32 bit)
153
154 This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
155
156 \param [in] value Value to rotate
157 \param [in] value Number of Bits to rotate
158 \return Rotated value
159 */
160#define __ROR __ror
161
162
163/** \brief Breakpoint
164
165 This function causes the processor to enter Debug state.
166 Debug tools can use this to investigate system state when the instruction at a particular address is reached.
167
168 \param [in] value is ignored by the processor.
169 If required, a debugger can use it to store additional information about the breakpoint.
170 */
171#define __BKPT(value) __breakpoint(value)
172
173
174#if (__CORTEX_M >= 0x03)
175
176/** \brief Reverse bit order of value
177
178 This function reverses the bit order of the given value.
179
180 \param [in] value Value to reverse
181 \return Reversed value
182 */
183#define __RBIT __rbit
184
185
186/** \brief LDR Exclusive (8 bit)
187
188 This function performs a exclusive LDR command for 8 bit value.
189
190 \param [in] ptr Pointer to data
191 \return value of type uint8_t at (*ptr)
192 */
193#define __LDREXB(ptr) ((uint8_t ) __ldrex(ptr))
194
195
196/** \brief LDR Exclusive (16 bit)
197
198 This function performs a exclusive LDR command for 16 bit values.
199
200 \param [in] ptr Pointer to data
201 \return value of type uint16_t at (*ptr)
202 */
203#define __LDREXH(ptr) ((uint16_t) __ldrex(ptr))
204
205
206/** \brief LDR Exclusive (32 bit)
207
208 This function performs a exclusive LDR command for 32 bit values.
209
210 \param [in] ptr Pointer to data
211 \return value of type uint32_t at (*ptr)
212 */
213#define __LDREXW(ptr) ((uint32_t ) __ldrex(ptr))
214
215
216/** \brief STR Exclusive (8 bit)
217
218 This function performs a exclusive STR command for 8 bit values.
219
220 \param [in] value Value to store
221 \param [in] ptr Pointer to location
222 \return 0 Function succeeded
223 \return 1 Function failed
224 */
225#define __STREXB(value, ptr) __strex(value, ptr)
226
227
228/** \brief STR Exclusive (16 bit)
229
230 This function performs a exclusive STR command for 16 bit values.
231
232 \param [in] value Value to store
233 \param [in] ptr Pointer to location
234 \return 0 Function succeeded
235 \return 1 Function failed
236 */
237#define __STREXH(value, ptr) __strex(value, ptr)
238
239
240/** \brief STR Exclusive (32 bit)
241
242 This function performs a exclusive STR command for 32 bit values.
243
244 \param [in] value Value to store
245 \param [in] ptr Pointer to location
246 \return 0 Function succeeded
247 \return 1 Function failed
248 */
249#define __STREXW(value, ptr) __strex(value, ptr)
250
251
252/** \brief Remove the exclusive lock
253
254 This function removes the exclusive lock which is created by LDREX.
255
256 */
257#define __CLREX __clrex
258
259
260/** \brief Signed Saturate
261
262 This function saturates a signed value.
263
264 \param [in] value Value to be saturated
265 \param [in] sat Bit position to saturate to (1..32)
266 \return Saturated value
267 */
268#define __SSAT __ssat
269
270
271/** \brief Unsigned Saturate
272
273 This function saturates an unsigned value.
274
275 \param [in] value Value to be saturated
276 \param [in] sat Bit position to saturate to (0..31)
277 \return Saturated value
278 */
279#define __USAT __usat
280
281
282/** \brief Count leading zeros
283
284 This function counts the number of leading zeros of a data value.
285
286 \param [in] value Value to count the leading zeros
287 \return number of leading zeros in value
288 */
289#define __CLZ __clz
290
291#endif /* (__CORTEX_M >= 0x03) */
292
293
294
295#elif defined ( __ICCARM__ ) /*------------------ ICC Compiler -------------------*/
296/* IAR iccarm specific functions */
297
298#include <cmsis_iar.h>
299
300
301#elif defined ( __TMS470__ ) /*---------------- TI CCS Compiler ------------------*/
302/* TI CCS specific functions */
303
304#include <cmsis_ccs.h>
305
306
307#elif defined ( __GNUC__ ) /*------------------ GNU Compiler ---------------------*/
308/* GNU gcc specific functions */
309
310/* Define macros for porting to both thumb1 and thumb2.
311 * For thumb1, use low register (r0-r7), specified by constrant "l"
312 * Otherwise, use general registers, specified by constrant "r" */
313#if defined (__thumb__) && !defined (__thumb2__)
314#define __CMSIS_GCC_OUT_REG(r) "=l" (r)
315#define __CMSIS_GCC_USE_REG(r) "l" (r)
316#else
317#define __CMSIS_GCC_OUT_REG(r) "=r" (r)
318#define __CMSIS_GCC_USE_REG(r) "r" (r)
319#endif
320
321/** \brief No Operation
322
323 No Operation does nothing. This instruction can be used for code alignment purposes.
324 */
325__attribute__( ( always_inline ) ) __STATIC_INLINE void __NOP(void)
326{
327 __ASM volatile ("nop");
328}
329
330
331/** \brief Wait For Interrupt
332
333 Wait For Interrupt is a hint instruction that suspends execution
334 until one of a number of events occurs.
335 */
336__attribute__( ( always_inline ) ) __STATIC_INLINE void __WFI(void)
337{
338 __ASM volatile ("wfi");
339}
340
341
342/** \brief Wait For Event
343
344 Wait For Event is a hint instruction that permits the processor to enter
345 a low-power state until one of a number of events occurs.
346 */
347__attribute__( ( always_inline ) ) __STATIC_INLINE void __WFE(void)
348{
349 __ASM volatile ("wfe");
350}
351
352
353/** \brief Send Event
354
355 Send Event is a hint instruction. It causes an event to be signaled to the CPU.
356 */
357__attribute__( ( always_inline ) ) __STATIC_INLINE void __SEV(void)
358{
359 __ASM volatile ("sev");
360}
361
362
363/** \brief Instruction Synchronization Barrier
364
365 Instruction Synchronization Barrier flushes the pipeline in the processor,
366 so that all instructions following the ISB are fetched from cache or
367 memory, after the instruction has been completed.
368 */
369__attribute__( ( always_inline ) ) __STATIC_INLINE void __ISB(void)
370{
371 __ASM volatile ("isb");
372}
373
374
375/** \brief Data Synchronization Barrier
376
377 This function acts as a special kind of Data Memory Barrier.
378 It completes when all explicit memory accesses before this instruction complete.
379 */
380__attribute__( ( always_inline ) ) __STATIC_INLINE void __DSB(void)
381{
382 __ASM volatile ("dsb");
383}
384
385
386/** \brief Data Memory Barrier
387
388 This function ensures the apparent order of the explicit memory operations before
389 and after the instruction, without ensuring their completion.
390 */
391__attribute__( ( always_inline ) ) __STATIC_INLINE void __DMB(void)
392{
393 __ASM volatile ("dmb");
394}
395
396
397/** \brief Reverse byte order (32 bit)
398
399 This function reverses the byte order in integer value.
400
401 \param [in] value Value to reverse
402 \return Reversed value
403 */
404__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV(uint32_t value)
405{
406#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
407 return __builtin_bswap32(value);
408#else
409 uint32_t result;
410
411 __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
412 return(result);
413#endif
414}
415
416
417/** \brief Reverse byte order (16 bit)
418
419 This function reverses the byte order in two unsigned short values.
420
421 \param [in] value Value to reverse
422 \return Reversed value
423 */
424__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV16(uint32_t value)
425{
426 uint32_t result;
427
428 __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
429 return(result);
430}
431
432
433/** \brief Reverse byte order in signed short value
434
435 This function reverses the byte order in a signed short value with sign extension to integer.
436
437 \param [in] value Value to reverse
438 \return Reversed value
439 */
440__attribute__( ( always_inline ) ) __STATIC_INLINE int32_t __REVSH(int32_t value)
441{
442#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
443 return (short)__builtin_bswap16(value);
444#else
445 uint32_t result;
446
447 __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
448 return(result);
449#endif
450}
451
452
453/** \brief Rotate Right in unsigned value (32 bit)
454
455 This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
456
457 \param [in] value Value to rotate
458 \param [in] value Number of Bits to rotate
459 \return Rotated value
460 */
461__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
462{
463 return (op1 >> op2) | (op1 << (32 - op2));
464}
465
466
467/** \brief Breakpoint
468
469 This function causes the processor to enter Debug state.
470 Debug tools can use this to investigate system state when the instruction at a particular address is reached.
471
472 \param [in] value is ignored by the processor.
473 If required, a debugger can use it to store additional information about the breakpoint.
474 */
475#define __BKPT(value) __ASM volatile ("bkpt "#value)
476
477
478#if (__CORTEX_M >= 0x03)
479
480/** \brief Reverse bit order of value
481
482 This function reverses the bit order of the given value.
483
484 \param [in] value Value to reverse
485 \return Reversed value
486 */
487__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
488{
489 uint32_t result;
490
491 __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
492 return(result);
493}
494
495
496/** \brief LDR Exclusive (8 bit)
497
498 This function performs a exclusive LDR command for 8 bit value.
499
500 \param [in] ptr Pointer to data
501 \return value of type uint8_t at (*ptr)
502 */
503__attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __LDREXB(volatile uint8_t *addr)
504{
505 uint32_t result;
506
507#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
508 __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
509#else
510 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
511 accepted by assembler. So has to use following less efficient pattern.
512 */
513 __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
514#endif
515 return(result);
516}
517
518
519/** \brief LDR Exclusive (16 bit)
520
521 This function performs a exclusive LDR command for 16 bit values.
522
523 \param [in] ptr Pointer to data
524 \return value of type uint16_t at (*ptr)
525 */
526__attribute__( ( always_inline ) ) __STATIC_INLINE uint16_t __LDREXH(volatile uint16_t *addr)
527{
528 uint32_t result;
529
530#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
531 __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
532#else
533 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
534 accepted by assembler. So has to use following less efficient pattern.
535 */
536 __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
537#endif
538 return(result);
539}
540
541
542/** \brief LDR Exclusive (32 bit)
543
544 This function performs a exclusive LDR command for 32 bit values.
545
546 \param [in] ptr Pointer to data
547 \return value of type uint32_t at (*ptr)
548 */
549__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __LDREXW(volatile uint32_t *addr)
550{
551 uint32_t result;
552
553 __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
554 return(result);
555}
556
557
558/** \brief STR Exclusive (8 bit)
559
560 This function performs a exclusive STR command for 8 bit values.
561
562 \param [in] value Value to store
563 \param [in] ptr Pointer to location
564 \return 0 Function succeeded
565 \return 1 Function failed
566 */
567__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
568{
569 uint32_t result;
570
571 __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
572 return(result);
573}
574
575
576/** \brief STR Exclusive (16 bit)
577
578 This function performs a exclusive STR command for 16 bit values.
579
580 \param [in] value Value to store
581 \param [in] ptr Pointer to location
582 \return 0 Function succeeded
583 \return 1 Function failed
584 */
585__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
586{
587 uint32_t result;
588
589 __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
590 return(result);
591}
592
593
594/** \brief STR Exclusive (32 bit)
595
596 This function performs a exclusive STR command for 32 bit values.
597
598 \param [in] value Value to store
599 \param [in] ptr Pointer to location
600 \return 0 Function succeeded
601 \return 1 Function failed
602 */
603__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
604{
605 uint32_t result;
606
607 __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
608 return(result);
609}
610
611
612/** \brief Remove the exclusive lock
613
614 This function removes the exclusive lock which is created by LDREX.
615
616 */
617__attribute__( ( always_inline ) ) __STATIC_INLINE void __CLREX(void)
618{
619 __ASM volatile ("clrex" ::: "memory");
620}
621
622
623/** \brief Signed Saturate
624
625 This function saturates a signed value.
626
627 \param [in] value Value to be saturated
628 \param [in] sat Bit position to saturate to (1..32)
629 \return Saturated value
630 */
631#define __SSAT(ARG1,ARG2) \
632({ \
633 uint32_t __RES, __ARG1 = (ARG1); \
634 __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
635 __RES; \
636 })
637
638
639/** \brief Unsigned Saturate
640
641 This function saturates an unsigned value.
642
643 \param [in] value Value to be saturated
644 \param [in] sat Bit position to saturate to (0..31)
645 \return Saturated value
646 */
647#define __USAT(ARG1,ARG2) \
648({ \
649 uint32_t __RES, __ARG1 = (ARG1); \
650 __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
651 __RES; \
652 })
653
654
655/** \brief Count leading zeros
656
657 This function counts the number of leading zeros of a data value.
658
659 \param [in] value Value to count the leading zeros
660 \return number of leading zeros in value
661 */
662__attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __CLZ(uint32_t value)
663{
664 uint32_t result;
665
666 __ASM volatile ("clz %0, %1" : "=r" (result) : "r" (value) );
667 return(result);
668}
669
670#endif /* (__CORTEX_M >= 0x03) */
671
672
673
674
675#elif defined ( __TASKING__ ) /*------------------ TASKING Compiler --------------*/
676/* TASKING carm specific functions */
677
678/*
679 * The CMSIS functions have been implemented as intrinsics in the compiler.
680 * Please use "carm -?i" to get an up to date list of all intrinsics,
681 * Including the CMSIS ones.
682 */
683
684#endif
685
686/*@}*/ /* end of group CMSIS_Core_InstructionInterface */
687
688#endif /* __CORE_CMINSTR_H */
Note: See TracBrowser for help on using the repository browser.