27 #ifndef __CMSIS_ARMCLANG_H
28 #define __CMSIS_ARMCLANG_H
30 #pragma clang system_header
32 #ifndef __ARM_COMPAT_H
33 #include <arm_compat.h>
41 #define __INLINE __inline
43 #ifndef __STATIC_INLINE
44 #define __STATIC_INLINE static __inline
46 #ifndef __STATIC_FORCEINLINE
47 #define __STATIC_FORCEINLINE __attribute__((always_inline)) static __inline
50 #define __NO_RETURN __attribute__((__noreturn__))
53 #define __USED __attribute__((used))
56 #define __WEAK __attribute__((weak))
59 #define __PACKED __attribute__((packed, aligned(1)))
61 #ifndef __PACKED_STRUCT
62 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
64 #ifndef __PACKED_UNION
65 #define __PACKED_UNION union __attribute__((packed, aligned(1)))
67 #ifndef __UNALIGNED_UINT32
68 #pragma clang diagnostic push
69 #pragma clang diagnostic ignored "-Wpacked"
72 #pragma clang diagnostic pop
73 #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v)
75 #ifndef __UNALIGNED_UINT16_WRITE
76 #pragma clang diagnostic push
77 #pragma clang diagnostic ignored "-Wpacked"
80 #pragma clang diagnostic pop
81 #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
83 #ifndef __UNALIGNED_UINT16_READ
84 #pragma clang diagnostic push
85 #pragma clang diagnostic ignored "-Wpacked"
88 #pragma clang diagnostic pop
89 #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
91 #ifndef __UNALIGNED_UINT32_WRITE
92 #pragma clang diagnostic push
93 #pragma clang diagnostic ignored "-Wpacked"
96 #pragma clang diagnostic pop
97 #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
99 #ifndef __UNALIGNED_UINT32_READ
100 #pragma clang diagnostic push
101 #pragma clang diagnostic ignored "-Wpacked"
104 #pragma clang diagnostic pop
105 #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
108 #define __ALIGNED(x) __attribute__((aligned(x)))
111 #define __RESTRICT __restrict
146 __ASM volatile (
"MRS %0, control" :
"=r" (result) );
151 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
161 __ASM volatile (
"MRS %0, control_ns" :
"=r" (result) );
174 __ASM volatile (
"MSR control, %0" : :
"r" (control) :
"memory");
178 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
186 __ASM volatile (
"MSR control_ns, %0" : :
"r" (control) :
"memory");
200 __ASM volatile (
"MRS %0, ipsr" :
"=r" (result) );
214 __ASM volatile (
"MRS %0, apsr" :
"=r" (result) );
228 __ASM volatile (
"MRS %0, xpsr" :
"=r" (result) );
242 __ASM volatile (
"MRS %0, psp" :
"=r" (result) );
247 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
257 __ASM volatile (
"MRS %0, psp_ns" :
"=r" (result) );
270 __ASM volatile (
"MSR psp, %0" : :
"r" (topOfProcStack) : );
274 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
282 __ASM volatile (
"MSR psp_ns, %0" : :
"r" (topOfProcStack) : );
296 __ASM volatile (
"MRS %0, msp" :
"=r" (result) );
301 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
311 __ASM volatile (
"MRS %0, msp_ns" :
"=r" (result) );
324 __ASM volatile (
"MSR msp, %0" : :
"r" (topOfMainStack) : );
328 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
336 __ASM volatile (
"MSR msp_ns, %0" : :
"r" (topOfMainStack) : );
341 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
351 __ASM volatile (
"MRS %0, sp_ns" :
"=r" (result) );
363 __ASM volatile (
"MSR sp_ns, %0" : :
"r" (topOfStack) : );
377 __ASM volatile (
"MRS %0, primask" :
"=r" (result) );
382 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
392 __ASM volatile (
"MRS %0, primask_ns" :
"=r" (result) );
405 __ASM volatile (
"MSR primask, %0" : :
"r" (priMask) :
"memory");
409 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
417 __ASM volatile (
"MSR primask_ns, %0" : :
"r" (priMask) :
"memory");
422 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
423 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
424 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
430 #define __enable_fault_irq __enable_fiq
438 #define __disable_fault_irq __disable_fiq
450 __ASM volatile (
"MRS %0, basepri" :
"=r" (result) );
455 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
465 __ASM volatile (
"MRS %0, basepri_ns" :
"=r" (result) );
478 __ASM volatile (
"MSR basepri, %0" : :
"r" (basePri) :
"memory");
482 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
490 __ASM volatile (
"MSR basepri_ns, %0" : :
"r" (basePri) :
"memory");
503 __ASM volatile (
"MSR basepri_max, %0" : :
"r" (basePri) :
"memory");
516 __ASM volatile (
"MRS %0, faultmask" :
"=r" (result) );
521 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
531 __ASM volatile (
"MRS %0, faultmask_ns" :
"=r" (result) );
544 __ASM volatile (
"MSR faultmask, %0" : :
"r" (faultMask) :
"memory");
548 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
556 __ASM volatile (
"MSR faultmask_ns, %0" : :
"r" (faultMask) :
"memory");
565 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
566 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
579 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
580 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
585 __ASM volatile (
"MRS %0, psplim" :
"=r" (result) );
590 #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
602 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
607 __ASM volatile (
"MRS %0, psplim_ns" :
"=r" (result) );
625 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
626 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
628 (void)ProcStackPtrLimit;
630 __ASM volatile (
"MSR psplim, %0" : :
"r" (ProcStackPtrLimit));
635 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
647 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
649 (void)ProcStackPtrLimit;
651 __ASM volatile (
"MSR psplim_ns, %0\n" : :
"r" (ProcStackPtrLimit));
667 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
668 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
673 __ASM volatile (
"MRS %0, msplim" :
"=r" (result) );
679 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
690 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
695 __ASM volatile (
"MRS %0, msplim_ns" :
"=r" (result) );
712 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
713 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
715 (void)MainStackPtrLimit;
717 __ASM volatile (
"MSR msplim, %0" : :
"r" (MainStackPtrLimit));
722 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
733 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
735 (void)MainStackPtrLimit;
737 __ASM volatile (
"MSR msplim_ns, %0" : :
"r" (MainStackPtrLimit));
750 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
751 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
752 #define __get_FPSCR (uint32_t)__builtin_arm_get_fpscr
754 #define __get_FPSCR() ((uint32_t)0U)
762 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
763 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
764 #define __set_FPSCR __builtin_arm_set_fpscr
766 #define __set_FPSCR(x) ((void)(x))
782 #if defined (__thumb__) && !defined (__thumb2__)
783 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
784 #define __CMSIS_GCC_USE_REG(r) "l" (r)
786 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
787 #define __CMSIS_GCC_USE_REG(r) "r" (r)
794 #define __NOP __builtin_arm_nop
800 #define __WFI __builtin_arm_wfi
808 #define __WFE __builtin_arm_wfe
815 #define __SEV __builtin_arm_sev
824 #define __ISB() __builtin_arm_isb(0xF);
831 #define __DSB() __builtin_arm_dsb(0xF);
839 #define __DMB() __builtin_arm_dmb(0xF);
848 #define __REV(value) __builtin_bswap32(value)
857 #define __REV16(value) __ROR(__REV(value), 16)
866 #define __REVSH(value) (int16_t)__builtin_bswap16(value)
883 return (op1 >> op2) | (op1 << (32U - op2));
894 #define __BKPT(value) __ASM volatile ("bkpt "#value)
903 #define __RBIT __builtin_arm_rbit
911 #define __CLZ (uint8_t)__builtin_clz
914 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
915 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
916 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
917 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
924 #define __LDREXB (uint8_t)__builtin_arm_ldrex
933 #define __LDREXH (uint16_t)__builtin_arm_ldrex
942 #define __LDREXW (uint32_t)__builtin_arm_ldrex
953 #define __STREXB (uint32_t)__builtin_arm_strex
964 #define __STREXH (uint32_t)__builtin_arm_strex
975 #define __STREXW (uint32_t)__builtin_arm_strex
982 #define __CLREX __builtin_arm_clrex
990 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
991 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
992 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
1001 #define __SSAT __builtin_arm_ssat
1011 #define __USAT __builtin_arm_usat
1040 __ASM volatile (
"ldrbt %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1041 return ((uint8_t) result);
1055 __ASM volatile (
"ldrht %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1056 return ((uint16_t) result);
1070 __ASM volatile (
"ldrt %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1083 __ASM volatile (
"strbt %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1095 __ASM volatile (
"strht %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1107 __ASM volatile (
"strt %1, %0" :
"=Q" (*ptr) :
"r" (value) );
1123 if ((
sat >= 1U) && (
sat <= 32U))
1125 const int32_t max = (int32_t)((1U << (
sat - 1U)) - 1U);
1126 const int32_t
min = -1 - max ;
1150 const uint32_t max = ((1U <<
sat) - 1U);
1151 if (val > (int32_t)max)
1160 return (uint32_t)val;
1168 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1169 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1180 __ASM volatile (
"ldab %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1181 return ((uint8_t) result);
1195 __ASM volatile (
"ldah %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1196 return ((uint16_t) result);
1210 __ASM volatile (
"lda %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1223 __ASM volatile (
"stlb %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1235 __ASM volatile (
"stlh %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1247 __ASM volatile (
"stl %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1257 #define __LDAEXB (uint8_t)__builtin_arm_ldaex
1266 #define __LDAEXH (uint16_t)__builtin_arm_ldaex
1275 #define __LDAEX (uint32_t)__builtin_arm_ldaex
1286 #define __STLEXB (uint32_t)__builtin_arm_stlex
1297 #define __STLEXH (uint32_t)__builtin_arm_stlex
1308 #define __STLEX (uint32_t)__builtin_arm_stlex
1322 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1328 __ASM volatile (
"sadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1336 __ASM volatile (
"qadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1344 __ASM volatile (
"shadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1352 __ASM volatile (
"uadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1360 __ASM volatile (
"uqadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1368 __ASM volatile (
"uhadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1377 __ASM volatile (
"ssub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1385 __ASM volatile (
"qsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1393 __ASM volatile (
"shsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1401 __ASM volatile (
"usub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1409 __ASM volatile (
"uqsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1417 __ASM volatile (
"uhsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1426 __ASM volatile (
"sadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1434 __ASM volatile (
"qadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1442 __ASM volatile (
"shadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1450 __ASM volatile (
"uadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1458 __ASM volatile (
"uqadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1466 __ASM volatile (
"uhadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1474 __ASM volatile (
"ssub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1482 __ASM volatile (
"qsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1490 __ASM volatile (
"shsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1498 __ASM volatile (
"usub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1506 __ASM volatile (
"uqsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1514 __ASM volatile (
"uhsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1522 __ASM volatile (
"sasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1530 __ASM volatile (
"qasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1538 __ASM volatile (
"shasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1546 __ASM volatile (
"uasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1554 __ASM volatile (
"uqasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1562 __ASM volatile (
"uhasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1570 __ASM volatile (
"ssax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1578 __ASM volatile (
"qsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1586 __ASM volatile (
"shsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1594 __ASM volatile (
"usax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1602 __ASM volatile (
"uqsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1610 __ASM volatile (
"uhsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1618 __ASM volatile (
"usad8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1626 __ASM volatile (
"usada8 %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1630 #define __SSAT16(ARG1,ARG2) \
1632 int32_t __RES, __ARG1 = (ARG1); \
1633 __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1637 #define __USAT16(ARG1,ARG2) \
1639 uint32_t __RES, __ARG1 = (ARG1); \
1640 __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1648 __ASM volatile (
"uxtb16 %0, %1" :
"=r" (result) :
"r" (op1));
1656 __ASM volatile (
"uxtab16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1664 __ASM volatile (
"sxtb16 %0, %1" :
"=r" (result) :
"r" (op1));
1672 __ASM volatile (
"sxtab16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1680 __ASM volatile (
"smuad %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1688 __ASM volatile (
"smuadx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1696 __ASM volatile (
"smlad %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1704 __ASM volatile (
"smladx %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1717 __ASM volatile (
"smlald %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1719 __ASM volatile (
"smlald %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1734 __ASM volatile (
"smlaldx %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1736 __ASM volatile (
"smlaldx %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1746 __ASM volatile (
"smusd %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1754 __ASM volatile (
"smusdx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1762 __ASM volatile (
"smlsd %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1770 __ASM volatile (
"smlsdx %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1783 __ASM volatile (
"smlsld %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1785 __ASM volatile (
"smlsld %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1800 __ASM volatile (
"smlsldx %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1802 __ASM volatile (
"smlsldx %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1812 __ASM volatile (
"sel %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1820 __ASM volatile (
"qadd %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1828 __ASM volatile (
"qsub %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1833 #define __PKHBT(ARG1,ARG2,ARG3) \
1835 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
1836 __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
1840 #define __PKHTB(ARG1,ARG2,ARG3) \
1842 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
1844 __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
1846 __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
1851 #define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
1852 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
1854 #define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
1855 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
1861 __ASM volatile (
"smmla %0, %1, %2, %3" :
"=r" (result):
"r" (op1),
"r" (op2),
"r" (op3) );