29 #pragma GCC diagnostic push
30 #pragma GCC diagnostic ignored "-Wsign-conversion"
31 #pragma GCC diagnostic ignored "-Wconversion"
32 #pragma GCC diagnostic ignored "-Wunused-parameter"
36 #define __has_builtin(x) (0)
44 #define __INLINE inline
46 #ifndef __STATIC_INLINE
47 #define __STATIC_INLINE static inline
49 #ifndef __STATIC_FORCEINLINE
50 #define __STATIC_FORCEINLINE __attribute__((always_inline)) static inline
53 #define __NO_RETURN __attribute__((__noreturn__))
56 #define __USED __attribute__((used))
59 #define __WEAK __attribute__((weak))
62 #define __PACKED __attribute__((packed, aligned(1)))
64 #ifndef __PACKED_STRUCT
65 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
67 #ifndef __PACKED_UNION
68 #define __PACKED_UNION union __attribute__((packed, aligned(1)))
70 #ifndef __UNALIGNED_UINT32
71 #pragma GCC diagnostic push
72 #pragma GCC diagnostic ignored "-Wpacked"
73 #pragma GCC diagnostic ignored "-Wattributes"
75 #pragma GCC diagnostic pop
76 #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v)
78 #ifndef __UNALIGNED_UINT16_WRITE
79 #pragma GCC diagnostic push
80 #pragma GCC diagnostic ignored "-Wpacked"
81 #pragma GCC diagnostic ignored "-Wattributes"
83 #pragma GCC diagnostic pop
84 #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
86 #ifndef __UNALIGNED_UINT16_READ
87 #pragma GCC diagnostic push
88 #pragma GCC diagnostic ignored "-Wpacked"
89 #pragma GCC diagnostic ignored "-Wattributes"
91 #pragma GCC diagnostic pop
92 #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
94 #ifndef __UNALIGNED_UINT32_WRITE
95 #pragma GCC diagnostic push
96 #pragma GCC diagnostic ignored "-Wpacked"
97 #pragma GCC diagnostic ignored "-Wattributes"
99 #pragma GCC diagnostic pop
100 #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
102 #ifndef __UNALIGNED_UINT32_READ
103 #pragma GCC diagnostic push
104 #pragma GCC diagnostic ignored "-Wpacked"
105 #pragma GCC diagnostic ignored "-Wattributes"
107 #pragma GCC diagnostic pop
108 #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
111 #define __ALIGNED(x) __attribute__((aligned(x)))
114 #define __RESTRICT __restrict
116 #ifndef __COMPILER_BARRIER
117 #define __COMPILER_BARRIER() __ASM volatile("":::"memory")
122 #ifndef __PROGRAM_START
146 extern const __copy_table_t __copy_table_start__;
147 extern const __copy_table_t __copy_table_end__;
148 extern const __zero_table_t __zero_table_start__;
149 extern const __zero_table_t __zero_table_end__;
151 for (__copy_table_t
const* pTable = &__copy_table_start__; pTable < &__copy_table_end__; ++pTable) {
152 for(uint32_t i=0u; i<pTable->wlen; ++i) {
153 pTable->dest[i] = pTable->src[i];
157 for (__zero_table_t
const* pTable = &__zero_table_start__; pTable < &__zero_table_end__; ++pTable) {
158 for(uint32_t i=0u; i<pTable->wlen; ++i) {
159 pTable->dest[i] = 0u;
166 #define __PROGRAM_START __cmsis_start
170 #define __INITIAL_SP __StackTop
173 #ifndef __STACK_LIMIT
174 #define __STACK_LIMIT __StackLimit
177 #ifndef __VECTOR_TABLE
178 #define __VECTOR_TABLE __Vectors
181 #ifndef __VECTOR_TABLE_ATTRIBUTE
182 #define __VECTOR_TABLE_ATTRIBUTE __attribute((used, section(".vectors")))
198 __ASM volatile (
"cpsie i" : : :
"memory");
209 __ASM volatile (
"cpsid i" : : :
"memory");
222 __ASM volatile (
"MRS %0, control" :
"=r" (result) );
227 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
237 __ASM volatile (
"MRS %0, control_ns" :
"=r" (result) );
250 __ASM volatile (
"MSR control, %0" : :
"r" (control) :
"memory");
254 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
262 __ASM volatile (
"MSR control_ns, %0" : :
"r" (control) :
"memory");
276 __ASM volatile (
"MRS %0, ipsr" :
"=r" (result) );
290 __ASM volatile (
"MRS %0, apsr" :
"=r" (result) );
304 __ASM volatile (
"MRS %0, xpsr" :
"=r" (result) );
318 __ASM volatile (
"MRS %0, psp" :
"=r" (result) );
323 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
333 __ASM volatile (
"MRS %0, psp_ns" :
"=r" (result) );
346 __ASM volatile (
"MSR psp, %0" : :
"r" (topOfProcStack) : );
350 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
358 __ASM volatile (
"MSR psp_ns, %0" : :
"r" (topOfProcStack) : );
372 __ASM volatile (
"MRS %0, msp" :
"=r" (result) );
377 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
387 __ASM volatile (
"MRS %0, msp_ns" :
"=r" (result) );
400 __ASM volatile (
"MSR msp, %0" : :
"r" (topOfMainStack) : );
404 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
412 __ASM volatile (
"MSR msp_ns, %0" : :
"r" (topOfMainStack) : );
417 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
427 __ASM volatile (
"MRS %0, sp_ns" :
"=r" (result) );
439 __ASM volatile (
"MSR sp_ns, %0" : :
"r" (topOfStack) : );
453 __ASM volatile (
"MRS %0, primask" :
"=r" (result) ::
"memory");
458 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
468 __ASM volatile (
"MRS %0, primask_ns" :
"=r" (result) ::
"memory");
481 __ASM volatile (
"MSR primask, %0" : :
"r" (priMask) :
"memory");
485 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
493 __ASM volatile (
"MSR primask_ns, %0" : :
"r" (priMask) :
"memory");
498 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
499 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
500 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
508 __ASM volatile (
"cpsie f" : : :
"memory");
519 __ASM volatile (
"cpsid f" : : :
"memory");
532 __ASM volatile (
"MRS %0, basepri" :
"=r" (result) );
537 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
547 __ASM volatile (
"MRS %0, basepri_ns" :
"=r" (result) );
560 __ASM volatile (
"MSR basepri, %0" : :
"r" (basePri) :
"memory");
564 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
572 __ASM volatile (
"MSR basepri_ns, %0" : :
"r" (basePri) :
"memory");
585 __ASM volatile (
"MSR basepri_max, %0" : :
"r" (basePri) :
"memory");
598 __ASM volatile (
"MRS %0, faultmask" :
"=r" (result) );
603 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
613 __ASM volatile (
"MRS %0, faultmask_ns" :
"=r" (result) );
626 __ASM volatile (
"MSR faultmask, %0" : :
"r" (faultMask) :
"memory");
630 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
638 __ASM volatile (
"MSR faultmask_ns, %0" : :
"r" (faultMask) :
"memory");
647 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
648 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
661 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
662 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
667 __ASM volatile (
"MRS %0, psplim" :
"=r" (result) );
672 #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
683 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
688 __ASM volatile (
"MRS %0, psplim_ns" :
"=r" (result) );
706 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
707 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
709 (void)ProcStackPtrLimit;
711 __ASM volatile (
"MSR psplim, %0" : :
"r" (ProcStackPtrLimit));
716 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
727 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
729 (void)ProcStackPtrLimit;
731 __ASM volatile (
"MSR psplim_ns, %0\n" : :
"r" (ProcStackPtrLimit));
748 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
749 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
754 __ASM volatile (
"MRS %0, msplim" :
"=r" (result) );
760 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
771 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
776 __ASM volatile (
"MRS %0, msplim_ns" :
"=r" (result) );
794 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
795 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
797 (void)MainStackPtrLimit;
799 __ASM volatile (
"MSR msplim, %0" : :
"r" (MainStackPtrLimit));
804 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
815 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
817 (void)MainStackPtrLimit;
819 __ASM volatile (
"MSR msplim_ns, %0" : :
"r" (MainStackPtrLimit));
835 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
836 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
837 #if __has_builtin(__builtin_arm_get_fpscr)
841 return __builtin_arm_get_fpscr();
845 __ASM volatile (
"VMRS %0, fpscr" :
"=r" (result) );
861 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
862 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
863 #if __has_builtin(__builtin_arm_set_fpscr)
867 __builtin_arm_set_fpscr(fpscr);
869 __ASM volatile (
"VMSR fpscr, %0" : :
"r" (fpscr) :
"vfpcc",
"memory");
889 #if defined (__thumb__) && !defined (__thumb2__)
890 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
891 #define __CMSIS_GCC_RW_REG(r) "+l" (r)
892 #define __CMSIS_GCC_USE_REG(r) "l" (r)
894 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
895 #define __CMSIS_GCC_RW_REG(r) "+r" (r)
896 #define __CMSIS_GCC_USE_REG(r) "r" (r)
903 #define __NOP() __ASM volatile ("nop")
909 #define __WFI() __ASM volatile ("wfi")
917 #define __WFE() __ASM volatile ("wfe")
924 #define __SEV() __ASM volatile ("sev")
935 __ASM volatile (
"isb 0xF":::
"memory");
946 __ASM volatile (
"dsb 0xF":::
"memory");
957 __ASM volatile (
"dmb 0xF":::
"memory");
969 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
970 return __builtin_bswap32(value);
1003 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1004 return (int16_t)__builtin_bswap16(value);
1028 return (op1 >> op2) | (op1 << (32U - op2));
1039 #define __BKPT(value) __ASM volatile ("bkpt "#value)
1052 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1053 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1054 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
1055 __ASM volatile (
"rbit %0, %1" :
"=r" (result) :
"r" (value) );
1057 uint32_t
s = (4U * 8U) - 1U;
1060 for (value >>= 1U; value != 0U; value >>= 1U)
1063 result |= value & 1U;
1093 return __builtin_clz(value);
1097 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1098 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1099 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1100 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1111 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1112 __ASM volatile (
"ldrexb %0, %1" :
"=r" (result) :
"Q" (*addr) );
1117 __ASM volatile (
"ldrexb %0, [%1]" :
"=r" (result) :
"r" (addr) :
"memory" );
1119 return ((uint8_t) result);
1133 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1134 __ASM volatile (
"ldrexh %0, %1" :
"=r" (result) :
"Q" (*addr) );
1139 __ASM volatile (
"ldrexh %0, [%1]" :
"=r" (result) :
"r" (addr) :
"memory" );
1141 return ((uint16_t) result);
1155 __ASM volatile (
"ldrex %0, %1" :
"=r" (result) :
"Q" (*addr) );
1172 __ASM volatile (
"strexb %0, %2, %1" :
"=&r" (result),
"=Q" (*addr) :
"r" ((uint32_t)value) );
1189 __ASM volatile (
"strexh %0, %2, %1" :
"=&r" (result),
"=Q" (*addr) :
"r" ((uint32_t)value) );
1206 __ASM volatile (
"strex %0, %2, %1" :
"=&r" (result),
"=Q" (*addr) :
"r" (value) );
1217 __ASM volatile (
"clrex" :::
"memory");
1226 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1227 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1228 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
1236 #define __SSAT(ARG1,ARG2) \
1239 int32_t __RES, __ARG1 = (ARG1); \
1240 __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1252 #define __USAT(ARG1,ARG2) \
1255 uint32_t __RES, __ARG1 = (ARG1); \
1256 __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1287 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1288 __ASM volatile (
"ldrbt %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1293 __ASM volatile (
"ldrbt %0, [%1]" :
"=r" (result) :
"r" (ptr) :
"memory" );
1295 return ((uint8_t) result);
1309 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1310 __ASM volatile (
"ldrht %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1315 __ASM volatile (
"ldrht %0, [%1]" :
"=r" (result) :
"r" (ptr) :
"memory" );
1317 return ((uint16_t) result);
1331 __ASM volatile (
"ldrt %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1344 __ASM volatile (
"strbt %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1356 __ASM volatile (
"strht %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1368 __ASM volatile (
"strt %1, %0" :
"=Q" (*ptr) :
"r" (value) );
1384 if ((
sat >= 1U) && (
sat <= 32U))
1386 const int32_t max = (int32_t)((1U << (
sat - 1U)) - 1U);
1387 const int32_t
min = -1 - max ;
1411 const uint32_t max = ((1U <<
sat) - 1U);
1412 if (val > (int32_t)max)
1421 return (uint32_t)val;
1429 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1430 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1441 __ASM volatile (
"ldab %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1442 return ((uint8_t) result);
1456 __ASM volatile (
"ldah %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1457 return ((uint16_t) result);
1471 __ASM volatile (
"lda %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1484 __ASM volatile (
"stlb %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1496 __ASM volatile (
"stlh %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1508 __ASM volatile (
"stl %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1522 __ASM volatile (
"ldaexb %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1523 return ((uint8_t) result);
1537 __ASM volatile (
"ldaexh %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1538 return ((uint16_t) result);
1552 __ASM volatile (
"ldaex %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1569 __ASM volatile (
"stlexb %0, %2, %1" :
"=&r" (result),
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1586 __ASM volatile (
"stlexh %0, %2, %1" :
"=&r" (result),
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1603 __ASM volatile (
"stlex %0, %2, %1" :
"=&r" (result),
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1619 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1625 __ASM volatile (
"sadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1633 __ASM volatile (
"qadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1641 __ASM volatile (
"shadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1649 __ASM volatile (
"uadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1657 __ASM volatile (
"uqadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1665 __ASM volatile (
"uhadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1674 __ASM volatile (
"ssub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1682 __ASM volatile (
"qsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1690 __ASM volatile (
"shsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1698 __ASM volatile (
"usub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1706 __ASM volatile (
"uqsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1714 __ASM volatile (
"uhsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1723 __ASM volatile (
"sadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1731 __ASM volatile (
"qadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1739 __ASM volatile (
"shadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1747 __ASM volatile (
"uadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1755 __ASM volatile (
"uqadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1763 __ASM volatile (
"uhadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1771 __ASM volatile (
"ssub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1779 __ASM volatile (
"qsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1787 __ASM volatile (
"shsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1795 __ASM volatile (
"usub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1803 __ASM volatile (
"uqsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1811 __ASM volatile (
"uhsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1819 __ASM volatile (
"sasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1827 __ASM volatile (
"qasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1835 __ASM volatile (
"shasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1843 __ASM volatile (
"uasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1851 __ASM volatile (
"uqasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1859 __ASM volatile (
"uhasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1867 __ASM volatile (
"ssax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1875 __ASM volatile (
"qsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1883 __ASM volatile (
"shsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1891 __ASM volatile (
"usax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1899 __ASM volatile (
"uqsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1907 __ASM volatile (
"uhsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1915 __ASM volatile (
"usad8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1923 __ASM volatile (
"usada8 %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1927 #define __SSAT16(ARG1,ARG2) \
1929 int32_t __RES, __ARG1 = (ARG1); \
1930 __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1934 #define __USAT16(ARG1,ARG2) \
1936 uint32_t __RES, __ARG1 = (ARG1); \
1937 __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1945 __ASM volatile (
"uxtb16 %0, %1" :
"=r" (result) :
"r" (op1));
1953 __ASM volatile (
"uxtab16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1961 __ASM volatile (
"sxtb16 %0, %1" :
"=r" (result) :
"r" (op1));
1969 __ASM volatile (
"sxtab16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1977 __ASM volatile (
"smuad %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1985 __ASM volatile (
"smuadx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1993 __ASM volatile (
"smlad %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
2001 __ASM volatile (
"smladx %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
2014 __ASM volatile (
"smlald %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
2016 __ASM volatile (
"smlald %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
2031 __ASM volatile (
"smlaldx %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
2033 __ASM volatile (
"smlaldx %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
2043 __ASM volatile (
"smusd %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2051 __ASM volatile (
"smusdx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2059 __ASM volatile (
"smlsd %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
2067 __ASM volatile (
"smlsdx %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
2080 __ASM volatile (
"smlsld %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
2082 __ASM volatile (
"smlsld %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
2097 __ASM volatile (
"smlsldx %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
2099 __ASM volatile (
"smlsldx %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
2109 __ASM volatile (
"sel %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2117 __ASM volatile (
"qadd %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2125 __ASM volatile (
"qsub %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2130 #define __PKHBT(ARG1,ARG2,ARG3) \
2132 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2133 __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
2137 #define __PKHTB(ARG1,ARG2,ARG3) \
2139 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2141 __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
2143 __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
2148 #define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
2149 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
2151 #define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
2152 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
2158 __ASM volatile (
"smmla %0, %1, %2, %3" :
"=r" (result):
"r" (op1),
"r" (op2),
"r" (op3) );
2166 #pragma GCC diagnostic pop