core_cmInstr.h
Go to the documentation of this file.
1 /**************************************************************************/
10 /* Copyright (c) 2009 - 2014 ARM LIMITED
11 
12  All rights reserved.
13  Redistribution and use in source and binary forms, with or without
14  modification, are permitted provided that the following conditions are met:
15  - Redistributions of source code must retain the above copyright
16  notice, this list of conditions and the following disclaimer.
17  - Redistributions in binary form must reproduce the above copyright
18  notice, this list of conditions and the following disclaimer in the
19  documentation and/or other materials provided with the distribution.
20  - Neither the name of ARM nor the names of its contributors may be used
21  to endorse or promote products derived from this software without
22  specific prior written permission.
23  *
24  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
25  AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
26  IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
27  ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
28  LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
29  CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
30  SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
31  INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
32  CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
33  ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
34  POSSIBILITY OF SUCH DAMAGE.
35  ---------------------------------------------------------------------------*/
36 
37 
38 #ifndef __CORE_CMINSTR_H
39 #define __CORE_CMINSTR_H
40 
41 
42 /* ########################## Core Instruction Access ######################### */
48 #if defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/
49 /* ARM armcc specific functions */
50 
51 #if (__ARMCC_VERSION < 400677)
52  #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
53 #endif
54 
55 
60 #define __NOP __nop
61 
62 
68 #define __WFI __wfi
69 
70 
76 #define __WFE __wfe
77 
78 
83 #define __SEV __sev
84 
85 
92 #define __ISB() __isb(0xF)
93 
94 
100 #define __DSB() __dsb(0xF)
101 
102 
108 #define __DMB() __dmb(0xF)
109 
110 
118 #define __REV __rev
119 
120 
128 #ifndef __NO_EMBEDDED_ASM
129 __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
130 {
131  rev16 r0, r0
132  bx lr
133 }
134 #endif
135 
143 #ifndef __NO_EMBEDDED_ASM
144 __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(int32_t value)
145 {
146  revsh r0, r0
147  bx lr
148 }
149 #endif
150 
151 
160 #define __ROR __ror
161 
162 
171 #define __BKPT(value) __breakpoint(value)
172 
173 
174 #if (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300)
175 
183 #define __RBIT __rbit
184 
185 
193 #define __LDREXB(ptr) ((uint8_t ) __ldrex(ptr))
194 
195 
203 #define __LDREXH(ptr) ((uint16_t) __ldrex(ptr))
204 
205 
213 #define __LDREXW(ptr) ((uint32_t ) __ldrex(ptr))
214 
215 
225 #define __STREXB(value, ptr) __strex(value, ptr)
226 
227 
237 #define __STREXH(value, ptr) __strex(value, ptr)
238 
239 
249 #define __STREXW(value, ptr) __strex(value, ptr)
250 
251 
257 #define __CLREX __clrex
258 
259 
268 #define __SSAT __ssat
269 
270 
279 #define __USAT __usat
280 
281 
289 #define __CLZ __clz
290 
291 
299 #ifndef __NO_EMBEDDED_ASM
300 __attribute__((section(".rrx_text"))) __STATIC_INLINE __ASM uint32_t __RRX(uint32_t value)
301 {
302  rrx r0, r0
303  bx lr
304 }
305 #endif
306 
307 
315 #define __LDRBT(ptr) ((uint8_t ) __ldrt(ptr))
316 
317 
325 #define __LDRHT(ptr) ((uint16_t) __ldrt(ptr))
326 
327 
335 #define __LDRT(ptr) ((uint32_t ) __ldrt(ptr))
336 
337 
345 #define __STRBT(value, ptr) __strt(value, ptr)
346 
347 
355 #define __STRHT(value, ptr) __strt(value, ptr)
356 
357 
365 #define __STRT(value, ptr) __strt(value, ptr)
366 
367 #endif /* (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300) */
368 
369 
370 #elif defined ( __GNUC__ ) /*------------------ GNU Compiler ---------------------*/
371 /* GNU gcc specific functions */
372 
373 /* Define macros for porting to both thumb1 and thumb2.
374  * For thumb1, use low register (r0-r7), specified by constrant "l"
375  * Otherwise, use general registers, specified by constrant "r" */
376 #if defined (__thumb__) && !defined (__thumb2__)
377 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
378 #define __CMSIS_GCC_USE_REG(r) "l" (r)
379 #else
380 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
381 #define __CMSIS_GCC_USE_REG(r) "r" (r)
382 #endif
383 
388 __attribute__( ( always_inline ) ) __STATIC_INLINE void __NOP(void)
389 {
390  __ASM volatile ("nop");
391 }
392 
393 
399 __attribute__( ( always_inline ) ) __STATIC_INLINE void __WFI(void)
400 {
401  __ASM volatile ("wfi");
402 }
403 
404 
410 __attribute__( ( always_inline ) ) __STATIC_INLINE void __WFE(void)
411 {
412  __ASM volatile ("wfe");
413 }
414 
415 
420 __attribute__( ( always_inline ) ) __STATIC_INLINE void __SEV(void)
421 {
422  __ASM volatile ("sev");
423 }
424 
425 
432 __attribute__( ( always_inline ) ) __STATIC_INLINE void __ISB(void)
433 {
434  __ASM volatile ("isb");
435 }
436 
437 
443 __attribute__( ( always_inline ) ) __STATIC_INLINE void __DSB(void)
444 {
445  __ASM volatile ("dsb");
446 }
447 
448 
454 __attribute__( ( always_inline ) ) __STATIC_INLINE void __DMB(void)
455 {
456  __ASM volatile ("dmb");
457 }
458 
459 
467 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV(uint32_t value)
468 {
469 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
470  return __builtin_bswap32(value);
471 #else
472  uint32_t result;
473 
474  __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
475  return(result);
476 #endif
477 }
478 
479 
487 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV16(uint32_t value)
488 {
489  uint32_t result;
490 
491  __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
492  return(result);
493 }
494 
495 
503 __attribute__( ( always_inline ) ) __STATIC_INLINE int32_t __REVSH(int32_t value)
504 {
505 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
506  return (short)__builtin_bswap16(value);
507 #else
508  uint32_t result;
509 
510  __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
511  return(result);
512 #endif
513 }
514 
515 
524 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
525 {
526  return (op1 >> op2) | (op1 << (32 - op2));
527 }
528 
529 
538 #define __BKPT(value) __ASM volatile ("bkpt "#value)
539 
540 
541 #if (__CORTEX_M >= 0x03) || ((defined(__CORTEX_SC)) && (__CORTEX_SC >= 300))
542 
550 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
551 {
552  uint32_t result;
553 
554  __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
555  return(result);
556 }
557 
558 
566 __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __LDREXB(volatile uint8_t *addr)
567 {
568  uint32_t result;
569 
570 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
571  __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
572 #else
573  /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
574  accepted by assembler. So has to use following less efficient pattern.
575  */
576  __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
577 #endif
578  return ((uint8_t) result); /* Add explicit type cast here */
579 }
580 
581 
589 __attribute__( ( always_inline ) ) __STATIC_INLINE uint16_t __LDREXH(volatile uint16_t *addr)
590 {
591  uint32_t result;
592 
593 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
594  __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
595 #else
596  /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
597  accepted by assembler. So has to use following less efficient pattern.
598  */
599  __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
600 #endif
601  return ((uint16_t) result); /* Add explicit type cast here */
602 }
603 
604 
612 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __LDREXW(volatile uint32_t *addr)
613 {
614  uint32_t result;
615 
616  __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
617  return(result);
618 }
619 
620 
630 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
631 {
632  uint32_t result;
633 
634  __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
635  return(result);
636 }
637 
638 
648 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
649 {
650  uint32_t result;
651 
652  __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
653  return(result);
654 }
655 
656 
666 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
667 {
668  uint32_t result;
669 
670  __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
671  return(result);
672 }
673 
674 
680 __attribute__( ( always_inline ) ) __STATIC_INLINE void __CLREX(void)
681 {
682  __ASM volatile ("clrex" ::: "memory");
683 }
684 
685 
694 #define __SSAT(ARG1,ARG2) \
695 ({ \
696  uint32_t __RES, __ARG1 = (ARG1); \
697  __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
698  __RES; \
699  })
700 
701 
710 #define __USAT(ARG1,ARG2) \
711 ({ \
712  uint32_t __RES, __ARG1 = (ARG1); \
713  __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
714  __RES; \
715  })
716 
717 
725 __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __CLZ(uint32_t value)
726 {
727  uint32_t result;
728 
729  __ASM volatile ("clz %0, %1" : "=r" (result) : "r" (value) );
730  return ((uint8_t) result); /* Add explicit type cast here */
731 }
732 
733 
741 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __RRX(uint32_t value)
742 {
743  uint32_t result;
744 
745  __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
746  return(result);
747 }
748 
749 
757 __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __LDRBT(volatile uint8_t *addr)
758 {
759  uint32_t result;
760 
761 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
762  __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*addr) );
763 #else
764  /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
765  accepted by assembler. So has to use following less efficient pattern.
766  */
767  __ASM volatile ("ldrbt %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
768 #endif
769  return ((uint8_t) result); /* Add explicit type cast here */
770 }
771 
772 
780 __attribute__( ( always_inline ) ) __STATIC_INLINE uint16_t __LDRHT(volatile uint16_t *addr)
781 {
782  uint32_t result;
783 
784 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
785  __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*addr) );
786 #else
787  /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
788  accepted by assembler. So has to use following less efficient pattern.
789  */
790  __ASM volatile ("ldrht %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
791 #endif
792  return ((uint16_t) result); /* Add explicit type cast here */
793 }
794 
795 
803 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __LDRT(volatile uint32_t *addr)
804 {
805  uint32_t result;
806 
807  __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*addr) );
808  return(result);
809 }
810 
811 
819 __attribute__( ( always_inline ) ) __STATIC_INLINE void __STRBT(uint8_t value, volatile uint8_t *addr)
820 {
821  __ASM volatile ("strbt %1, %0" : "=Q" (*addr) : "r" ((uint32_t)value) );
822 }
823 
824 
832 __attribute__( ( always_inline ) ) __STATIC_INLINE void __STRHT(uint16_t value, volatile uint16_t *addr)
833 {
834  __ASM volatile ("strht %1, %0" : "=Q" (*addr) : "r" ((uint32_t)value) );
835 }
836 
837 
845 __attribute__( ( always_inline ) ) __STATIC_INLINE void __STRT(uint32_t value, volatile uint32_t *addr)
846 {
847  __ASM volatile ("strt %1, %0" : "=Q" (*addr) : "r" (value) );
848 }
849 
850 #endif /* (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300) */
851 
852 
853 #elif defined ( __ICCARM__ ) /*------------------ ICC Compiler -------------------*/
854 /* IAR iccarm specific functions */
855 #include <cmsis_iar.h>
856 
857 
858 #elif defined ( __TMS470__ ) /*---------------- TI CCS Compiler ------------------*/
859 /* TI CCS specific functions */
860 #include <cmsis_ccs.h>
861 
862 
863 #elif defined ( __TASKING__ ) /*------------------ TASKING Compiler --------------*/
864 /* TASKING carm specific functions */
865 /*
866  * The CMSIS functions have been implemented as intrinsics in the compiler.
867  * Please use "carm -?i" to get an up to date list of all intrinsics,
868  * Including the CMSIS ones.
869  */
870 
871 
872 #elif defined ( __CSMC__ ) /*------------------ COSMIC Compiler -------------------*/
873 /* Cosmic specific functions */
874 #include <cmsis_csm.h>
875 
876 #endif
877  /* end of group CMSIS_Core_InstructionInterface */
879 
880 #endif /* __CORE_CMINSTR_H */
GeneratorWrapper< T > value(T &&value)
Definition: catch.hpp:3589
typedef __attribute__
USB Device LPM Descriptor structure.
Definition: d_usartDMA.c:1064


inertial_sense_ros
Author(s):
autogenerated on Sun Feb 28 2021 03:17:57