27 #ifndef __CMSIS_ARMCLANG_H
28 #define __CMSIS_ARMCLANG_H
30 #pragma clang system_header
32 #ifndef __ARM_COMPAT_H
33 #include <arm_compat.h>
41 #define __INLINE __inline
43 #ifndef __STATIC_INLINE
44 #define __STATIC_INLINE static __inline
46 #ifndef __STATIC_FORCEINLINE
47 #define __STATIC_FORCEINLINE __attribute__((always_inline)) static __inline
50 #define __NO_RETURN __attribute__((__noreturn__))
53 #define __USED __attribute__((used))
56 #define __WEAK __attribute__((weak))
59 #define __PACKED __attribute__((packed, aligned(1)))
61 #ifndef __PACKED_STRUCT
62 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
64 #ifndef __PACKED_UNION
65 #define __PACKED_UNION union __attribute__((packed, aligned(1)))
67 #ifndef __UNALIGNED_UINT32
68 #pragma clang diagnostic push
69 #pragma clang diagnostic ignored "-Wpacked"
72 #pragma clang diagnostic pop
73 #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v)
75 #ifndef __UNALIGNED_UINT16_WRITE
76 #pragma clang diagnostic push
77 #pragma clang diagnostic ignored "-Wpacked"
80 #pragma clang diagnostic pop
81 #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
83 #ifndef __UNALIGNED_UINT16_READ
84 #pragma clang diagnostic push
85 #pragma clang diagnostic ignored "-Wpacked"
88 #pragma clang diagnostic pop
89 #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
91 #ifndef __UNALIGNED_UINT32_WRITE
92 #pragma clang diagnostic push
93 #pragma clang diagnostic ignored "-Wpacked"
96 #pragma clang diagnostic pop
97 #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
99 #ifndef __UNALIGNED_UINT32_READ
100 #pragma clang diagnostic push
101 #pragma clang diagnostic ignored "-Wpacked"
104 #pragma clang diagnostic pop
105 #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
108 #define __ALIGNED(x) __attribute__((aligned(x)))
111 #define __RESTRICT __restrict
113 #ifndef __COMPILER_BARRIER
114 #define __COMPILER_BARRIER() __ASM volatile("":::"memory")
119 #ifndef __PROGRAM_START
120 #define __PROGRAM_START __main
124 #define __INITIAL_SP Image$$ARM_LIB_STACK$$ZI$$Limit
127 #ifndef __STACK_LIMIT
128 #define __STACK_LIMIT Image$$ARM_LIB_STACK$$ZI$$Base
131 #ifndef __VECTOR_TABLE
132 #define __VECTOR_TABLE __Vectors
135 #ifndef __VECTOR_TABLE_ATTRIBUTE
136 #define __VECTOR_TABLE_ATTRIBUTE __attribute((used, section("RESET")))
170 __ASM volatile (
"MRS %0, control" :
"=r" (result) );
175 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
185 __ASM volatile (
"MRS %0, control_ns" :
"=r" (result) );
198 __ASM volatile (
"MSR control, %0" : :
"r" (control) :
"memory");
202 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
210 __ASM volatile (
"MSR control_ns, %0" : :
"r" (control) :
"memory");
224 __ASM volatile (
"MRS %0, ipsr" :
"=r" (result) );
238 __ASM volatile (
"MRS %0, apsr" :
"=r" (result) );
252 __ASM volatile (
"MRS %0, xpsr" :
"=r" (result) );
266 __ASM volatile (
"MRS %0, psp" :
"=r" (result) );
271 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
281 __ASM volatile (
"MRS %0, psp_ns" :
"=r" (result) );
294 __ASM volatile (
"MSR psp, %0" : :
"r" (topOfProcStack) : );
298 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
306 __ASM volatile (
"MSR psp_ns, %0" : :
"r" (topOfProcStack) : );
320 __ASM volatile (
"MRS %0, msp" :
"=r" (result) );
325 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
335 __ASM volatile (
"MRS %0, msp_ns" :
"=r" (result) );
348 __ASM volatile (
"MSR msp, %0" : :
"r" (topOfMainStack) : );
352 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
360 __ASM volatile (
"MSR msp_ns, %0" : :
"r" (topOfMainStack) : );
365 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
375 __ASM volatile (
"MRS %0, sp_ns" :
"=r" (result) );
387 __ASM volatile (
"MSR sp_ns, %0" : :
"r" (topOfStack) : );
401 __ASM volatile (
"MRS %0, primask" :
"=r" (result) );
406 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
416 __ASM volatile (
"MRS %0, primask_ns" :
"=r" (result) );
429 __ASM volatile (
"MSR primask, %0" : :
"r" (priMask) :
"memory");
433 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
441 __ASM volatile (
"MSR primask_ns, %0" : :
"r" (priMask) :
"memory");
446 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
447 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
448 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
454 #define __enable_fault_irq __enable_fiq
462 #define __disable_fault_irq __disable_fiq
474 __ASM volatile (
"MRS %0, basepri" :
"=r" (result) );
479 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
489 __ASM volatile (
"MRS %0, basepri_ns" :
"=r" (result) );
502 __ASM volatile (
"MSR basepri, %0" : :
"r" (basePri) :
"memory");
506 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
514 __ASM volatile (
"MSR basepri_ns, %0" : :
"r" (basePri) :
"memory");
527 __ASM volatile (
"MSR basepri_max, %0" : :
"r" (basePri) :
"memory");
540 __ASM volatile (
"MRS %0, faultmask" :
"=r" (result) );
545 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
555 __ASM volatile (
"MRS %0, faultmask_ns" :
"=r" (result) );
568 __ASM volatile (
"MSR faultmask, %0" : :
"r" (faultMask) :
"memory");
572 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
580 __ASM volatile (
"MSR faultmask_ns, %0" : :
"r" (faultMask) :
"memory");
589 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
590 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
603 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
604 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
609 __ASM volatile (
"MRS %0, psplim" :
"=r" (result) );
614 #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
626 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
631 __ASM volatile (
"MRS %0, psplim_ns" :
"=r" (result) );
649 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
650 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
652 (void)ProcStackPtrLimit;
654 __ASM volatile (
"MSR psplim, %0" : :
"r" (ProcStackPtrLimit));
659 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
671 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
673 (void)ProcStackPtrLimit;
675 __ASM volatile (
"MSR psplim_ns, %0\n" : :
"r" (ProcStackPtrLimit));
691 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
692 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
697 __ASM volatile (
"MRS %0, msplim" :
"=r" (result) );
703 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
714 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
719 __ASM volatile (
"MRS %0, msplim_ns" :
"=r" (result) );
736 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
737 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
739 (void)MainStackPtrLimit;
741 __ASM volatile (
"MSR msplim, %0" : :
"r" (MainStackPtrLimit));
746 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
757 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
759 (void)MainStackPtrLimit;
761 __ASM volatile (
"MSR msplim_ns, %0" : :
"r" (MainStackPtrLimit));
774 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
775 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
776 #define __get_FPSCR (uint32_t)__builtin_arm_get_fpscr
778 #define __get_FPSCR() ((uint32_t)0U)
786 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
787 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
788 #define __set_FPSCR __builtin_arm_set_fpscr
790 #define __set_FPSCR(x) ((void)(x))
806 #if defined (__thumb__) && !defined (__thumb2__)
807 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
808 #define __CMSIS_GCC_RW_REG(r) "+l" (r)
809 #define __CMSIS_GCC_USE_REG(r) "l" (r)
811 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
812 #define __CMSIS_GCC_RW_REG(r) "+r" (r)
813 #define __CMSIS_GCC_USE_REG(r) "r" (r)
820 #define __NOP __builtin_arm_nop
826 #define __WFI __builtin_arm_wfi
834 #define __WFE __builtin_arm_wfe
841 #define __SEV __builtin_arm_sev
850 #define __ISB() __builtin_arm_isb(0xF)
857 #define __DSB() __builtin_arm_dsb(0xF)
865 #define __DMB() __builtin_arm_dmb(0xF)
874 #define __REV(value) __builtin_bswap32(value)
883 #define __REV16(value) __ROR(__REV(value), 16)
892 #define __REVSH(value) (int16_t)__builtin_bswap16(value)
909 return (op1 >> op2) | (op1 << (32U - op2));
920 #define __BKPT(value) __ASM volatile ("bkpt "#value)
929 #define __RBIT __builtin_arm_rbit
952 return __builtin_clz(value);
956 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
957 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
958 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
959 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
966 #define __LDREXB (uint8_t)__builtin_arm_ldrex
975 #define __LDREXH (uint16_t)__builtin_arm_ldrex
984 #define __LDREXW (uint32_t)__builtin_arm_ldrex
995 #define __STREXB (uint32_t)__builtin_arm_strex
1006 #define __STREXH (uint32_t)__builtin_arm_strex
1017 #define __STREXW (uint32_t)__builtin_arm_strex
1024 #define __CLREX __builtin_arm_clrex
1032 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1033 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1034 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
1043 #define __SSAT __builtin_arm_ssat
1053 #define __USAT __builtin_arm_usat
1082 __ASM volatile (
"ldrbt %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1083 return ((uint8_t) result);
1097 __ASM volatile (
"ldrht %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1098 return ((uint16_t) result);
1112 __ASM volatile (
"ldrt %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1125 __ASM volatile (
"strbt %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1137 __ASM volatile (
"strht %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1149 __ASM volatile (
"strt %1, %0" :
"=Q" (*ptr) :
"r" (value) );
1165 if ((
sat >= 1U) && (
sat <= 32U))
1167 const int32_t max = (int32_t)((1U << (
sat - 1U)) - 1U);
1168 const int32_t
min = -1 - max ;
1192 const uint32_t max = ((1U <<
sat) - 1U);
1193 if (val > (int32_t)max)
1202 return (uint32_t)val;
1210 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1211 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1222 __ASM volatile (
"ldab %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1223 return ((uint8_t) result);
1237 __ASM volatile (
"ldah %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1238 return ((uint16_t) result);
1252 __ASM volatile (
"lda %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1265 __ASM volatile (
"stlb %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1277 __ASM volatile (
"stlh %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1289 __ASM volatile (
"stl %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1299 #define __LDAEXB (uint8_t)__builtin_arm_ldaex
1308 #define __LDAEXH (uint16_t)__builtin_arm_ldaex
1317 #define __LDAEX (uint32_t)__builtin_arm_ldaex
1328 #define __STLEXB (uint32_t)__builtin_arm_stlex
1339 #define __STLEXH (uint32_t)__builtin_arm_stlex
1350 #define __STLEX (uint32_t)__builtin_arm_stlex
1364 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1366 #define __SADD8 __builtin_arm_sadd8
1367 #define __QADD8 __builtin_arm_qadd8
1368 #define __SHADD8 __builtin_arm_shadd8
1369 #define __UADD8 __builtin_arm_uadd8
1370 #define __UQADD8 __builtin_arm_uqadd8
1371 #define __UHADD8 __builtin_arm_uhadd8
1372 #define __SSUB8 __builtin_arm_ssub8
1373 #define __QSUB8 __builtin_arm_qsub8
1374 #define __SHSUB8 __builtin_arm_shsub8
1375 #define __USUB8 __builtin_arm_usub8
1376 #define __UQSUB8 __builtin_arm_uqsub8
1377 #define __UHSUB8 __builtin_arm_uhsub8
1378 #define __SADD16 __builtin_arm_sadd16
1379 #define __QADD16 __builtin_arm_qadd16
1380 #define __SHADD16 __builtin_arm_shadd16
1381 #define __UADD16 __builtin_arm_uadd16
1382 #define __UQADD16 __builtin_arm_uqadd16
1383 #define __UHADD16 __builtin_arm_uhadd16
1384 #define __SSUB16 __builtin_arm_ssub16
1385 #define __QSUB16 __builtin_arm_qsub16
1386 #define __SHSUB16 __builtin_arm_shsub16
1387 #define __USUB16 __builtin_arm_usub16
1388 #define __UQSUB16 __builtin_arm_uqsub16
1389 #define __UHSUB16 __builtin_arm_uhsub16
1390 #define __SASX __builtin_arm_sasx
1391 #define __QASX __builtin_arm_qasx
1392 #define __SHASX __builtin_arm_shasx
1393 #define __UASX __builtin_arm_uasx
1394 #define __UQASX __builtin_arm_uqasx
1395 #define __UHASX __builtin_arm_uhasx
1396 #define __SSAX __builtin_arm_ssax
1397 #define __QSAX __builtin_arm_qsax
1398 #define __SHSAX __builtin_arm_shsax
1399 #define __USAX __builtin_arm_usax
1400 #define __UQSAX __builtin_arm_uqsax
1401 #define __UHSAX __builtin_arm_uhsax
1402 #define __USAD8 __builtin_arm_usad8
1403 #define __USADA8 __builtin_arm_usada8
1404 #define __SSAT16 __builtin_arm_ssat16
1405 #define __USAT16 __builtin_arm_usat16
1406 #define __UXTB16 __builtin_arm_uxtb16
1407 #define __UXTAB16 __builtin_arm_uxtab16
1408 #define __SXTB16 __builtin_arm_sxtb16
1409 #define __SXTAB16 __builtin_arm_sxtab16
1410 #define __SMUAD __builtin_arm_smuad
1411 #define __SMUADX __builtin_arm_smuadx
1412 #define __SMLAD __builtin_arm_smlad
1413 #define __SMLADX __builtin_arm_smladx
1414 #define __SMLALD __builtin_arm_smlald
1415 #define __SMLALDX __builtin_arm_smlaldx
1416 #define __SMUSD __builtin_arm_smusd
1417 #define __SMUSDX __builtin_arm_smusdx
1418 #define __SMLSD __builtin_arm_smlsd
1419 #define __SMLSDX __builtin_arm_smlsdx
1420 #define __SMLSLD __builtin_arm_smlsld
1421 #define __SMLSLDX __builtin_arm_smlsldx
1422 #define __SEL __builtin_arm_sel
1423 #define __QADD __builtin_arm_qadd
1424 #define __QSUB __builtin_arm_qsub
1426 #define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
1427 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
1429 #define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
1430 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
1436 __ASM volatile (
"smmla %0, %1, %2, %3" :
"=r" (result):
"r" (op1),
"r" (op2),
"r" (op3) );