24 #ifndef __CORE_CMINSTR_H
25 #define __CORE_CMINSTR_H
34 #if defined ( __CC_ARM )
37 #if (__ARMCC_VERSION < 400677)
38 #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
78 #define __ISB() __isb(0xF)
86 #define __DSB() __dsb(0xF)
94 #define __DMB() __dmb(0xF)
146 #if (__CORTEX_M >= 0x03)
155 #define __RBIT __rbit
165 #define __LDREXB(ptr) ((uint8_t ) __ldrex(ptr))
175 #define __LDREXH(ptr) ((uint16_t) __ldrex(ptr))
185 #define __LDREXW(ptr) ((uint32_t ) __ldrex(ptr))
197 #define __STREXB(value, ptr) __strex(value, ptr)
209 #define __STREXH(value, ptr) __strex(value, ptr)
221 #define __STREXW(value, ptr) __strex(value, ptr)
229 #define __CLREX __clrex
240 #define __SSAT __ssat
251 #define __USAT __usat
267 #elif defined ( __ICCARM__ )
270 #include <cmsis_iar.h>
273 #elif defined ( __TMS470__ )
276 #include <cmsis_ccs.h>
279 #elif defined ( __GNUC__ )
286 __attribute__( ( always_inline ) ) __STATIC_INLINE
void __NOP(
void)
288 __ASM
volatile (
"nop");
297 __attribute__( ( always_inline ) ) __STATIC_INLINE
void __WFI(
void)
299 __ASM
volatile (
"wfi");
308 __attribute__( ( always_inline ) ) __STATIC_INLINE
void __WFE(
void)
310 __ASM
volatile (
"wfe");
318 __attribute__( ( always_inline ) ) __STATIC_INLINE
void __SEV(
void)
320 __ASM
volatile (
"sev");
330 __attribute__( ( always_inline ) ) __STATIC_INLINE
void __ISB(
void)
332 __ASM
volatile (
"isb");
341 __attribute__( ( always_inline ) ) __STATIC_INLINE
void __DSB(
void)
343 __ASM
volatile (
"dsb");
352 __attribute__( ( always_inline ) ) __STATIC_INLINE
void __DMB(
void)
354 __ASM
volatile (
"dmb");
369 __ASM
volatile (
"rev %0, %1" :
"=r" (result) :
"r" (value) );
385 __ASM
volatile (
"rev16 %0, %1" :
"=r" (result) :
"r" (value) );
401 __ASM
volatile (
"revsh %0, %1" :
"=r" (result) :
"r" (value) );
417 __ASM
volatile (
"ror %0, %0, %1" :
"+r" (op1) :
"r" (op2) );
422 #if (__CORTEX_M >= 0x03)
435 __ASM
volatile (
"rbit %0, %1" :
"=r" (result) :
"r" (value) );
451 __ASM
volatile (
"ldrexb %0, [%1]" :
"=r" (result) :
"r" (addr) );
467 __ASM
volatile (
"ldrexh %0, [%1]" :
"=r" (result) :
"r" (addr) );
483 __ASM
volatile (
"ldrex %0, [%1]" :
"=r" (result) :
"r" (addr) );
501 __ASM
volatile (
"strexb %0, %2, [%1]" :
"=&r" (result) :
"r" (addr),
"r" (value) );
519 __ASM
volatile (
"strexh %0, %2, [%1]" :
"=&r" (result) :
"r" (addr),
"r" (value) );
537 __ASM
volatile (
"strex %0, %2, [%1]" :
"=&r" (result) :
"r" (addr),
"r" (value) );
547 __attribute__( ( always_inline ) ) __STATIC_INLINE
void __CLREX(
void)
549 __ASM
volatile (
"clrex");
561 #define __SSAT(ARG1,ARG2) \
563 uint32_t __RES, __ARG1 = (ARG1); \
564 __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
577 #define __USAT(ARG1,ARG2) \
579 uint32_t __RES, __ARG1 = (ARG1); \
580 __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
596 __ASM
volatile (
"clz %0, %1" :
"=r" (result) :
"r" (value) );
605 #elif defined ( __TASKING__ )