stm32f407/stm32f407g-disc1/Drivers/CMSIS/Include/cmsis_armclang.h
Go to the documentation of this file.
1 /**************************************************************************/
7 /*
8  * Copyright (c) 2009-2018 Arm Limited. All rights reserved.
9  *
10  * SPDX-License-Identifier: Apache-2.0
11  *
12  * Licensed under the Apache License, Version 2.0 (the License); you may
13  * not use this file except in compliance with the License.
14  * You may obtain a copy of the License at
15  *
16  * www.apache.org/licenses/LICENSE-2.0
17  *
18  * Unless required by applicable law or agreed to in writing, software
19  * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21  * See the License for the specific language governing permissions and
22  * limitations under the License.
23  */
24 
25 /*lint -esym(9058, IRQn)*/ /* disable MISRA 2012 Rule 2.4 for IRQn */
26 
27 #ifndef __CMSIS_ARMCLANG_H
28 #define __CMSIS_ARMCLANG_H
29 
30 #pragma clang system_header /* treat file as system include file */
31 
32 #ifndef __ARM_COMPAT_H
33 #include <arm_compat.h> /* Compatibility header for Arm Compiler 5 intrinsics */
34 #endif
35 
36 /* CMSIS compiler specific defines */
37 #ifndef __ASM
38  #define __ASM __asm
39 #endif
40 #ifndef __INLINE
41  #define __INLINE __inline
42 #endif
43 #ifndef __STATIC_INLINE
44  #define __STATIC_INLINE static __inline
45 #endif
46 #ifndef __STATIC_FORCEINLINE
47  #define __STATIC_FORCEINLINE __attribute__((always_inline)) static __inline
48 #endif
49 #ifndef __NO_RETURN
50  #define __NO_RETURN __attribute__((__noreturn__))
51 #endif
52 #ifndef __USED
53  #define __USED __attribute__((used))
54 #endif
55 #ifndef __WEAK
56  #define __WEAK __attribute__((weak))
57 #endif
58 #ifndef __PACKED
59  #define __PACKED __attribute__((packed, aligned(1)))
60 #endif
61 #ifndef __PACKED_STRUCT
62  #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
63 #endif
64 #ifndef __PACKED_UNION
65  #define __PACKED_UNION union __attribute__((packed, aligned(1)))
66 #endif
67 #ifndef __UNALIGNED_UINT32 /* deprecated */
68  #pragma clang diagnostic push
69  #pragma clang diagnostic ignored "-Wpacked"
70 /*lint -esym(9058, T_UINT32)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32 */
71  struct __attribute__((packed)) T_UINT32 { uint32_t v; };
72  #pragma clang diagnostic pop
73  #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v)
74 #endif
75 #ifndef __UNALIGNED_UINT16_WRITE
76  #pragma clang diagnostic push
77  #pragma clang diagnostic ignored "-Wpacked"
78 /*lint -esym(9058, T_UINT16_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_WRITE */
80  #pragma clang diagnostic pop
81  #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
82 #endif
83 #ifndef __UNALIGNED_UINT16_READ
84  #pragma clang diagnostic push
85  #pragma clang diagnostic ignored "-Wpacked"
86 /*lint -esym(9058, T_UINT16_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_READ */
87  __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
88  #pragma clang diagnostic pop
89  #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
90 #endif
91 #ifndef __UNALIGNED_UINT32_WRITE
92  #pragma clang diagnostic push
93  #pragma clang diagnostic ignored "-Wpacked"
94 /*lint -esym(9058, T_UINT32_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_WRITE */
96  #pragma clang diagnostic pop
97  #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
98 #endif
99 #ifndef __UNALIGNED_UINT32_READ
100  #pragma clang diagnostic push
101  #pragma clang diagnostic ignored "-Wpacked"
102 /*lint -esym(9058, T_UINT32_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_READ */
104  #pragma clang diagnostic pop
105  #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
106 #endif
107 #ifndef __ALIGNED
108  #define __ALIGNED(x) __attribute__((aligned(x)))
109 #endif
110 #ifndef __RESTRICT
111  #define __RESTRICT __restrict
112 #endif
113 
114 
115 /* ########################### Core Function Access ########################### */
126 /* intrinsic void __enable_irq(); see arm_compat.h */
127 
128 
134 /* intrinsic void __disable_irq(); see arm_compat.h */
135 
136 
142 __STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
143 {
144  uint32_t result;
145 
146  __ASM volatile ("MRS %0, control" : "=r" (result) );
147  return(result);
148 }
149 
150 
151 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
152 
157 __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
158 {
159  uint32_t result;
160 
161  __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
162  return(result);
163 }
164 #endif
165 
166 
172 __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
173 {
174  __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
175 }
176 
177 
178 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
179 
184 __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
185 {
186  __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
187 }
188 #endif
189 
190 
196 __STATIC_FORCEINLINE uint32_t __get_IPSR(void)
197 {
198  uint32_t result;
199 
200  __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
201  return(result);
202 }
203 
204 
210 __STATIC_FORCEINLINE uint32_t __get_APSR(void)
211 {
212  uint32_t result;
213 
214  __ASM volatile ("MRS %0, apsr" : "=r" (result) );
215  return(result);
216 }
217 
218 
224 __STATIC_FORCEINLINE uint32_t __get_xPSR(void)
225 {
226  uint32_t result;
227 
228  __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
229  return(result);
230 }
231 
232 
238 __STATIC_FORCEINLINE uint32_t __get_PSP(void)
239 {
240  uint32_t result;
241 
242  __ASM volatile ("MRS %0, psp" : "=r" (result) );
243  return(result);
244 }
245 
246 
247 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
248 
253 __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
254 {
255  uint32_t result;
256 
257  __ASM volatile ("MRS %0, psp_ns" : "=r" (result) );
258  return(result);
259 }
260 #endif
261 
262 
268 __STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
269 {
270  __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
271 }
272 
273 
274 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
275 
280 __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
281 {
282  __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
283 }
284 #endif
285 
286 
292 __STATIC_FORCEINLINE uint32_t __get_MSP(void)
293 {
294  uint32_t result;
295 
296  __ASM volatile ("MRS %0, msp" : "=r" (result) );
297  return(result);
298 }
299 
300 
301 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
302 
307 __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
308 {
309  uint32_t result;
310 
311  __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
312  return(result);
313 }
314 #endif
315 
316 
322 __STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
323 {
324  __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
325 }
326 
327 
328 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
329 
334 __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
335 {
336  __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
337 }
338 #endif
339 
340 
341 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
342 
347 __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
348 {
349  uint32_t result;
350 
351  __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
352  return(result);
353 }
354 
355 
361 __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
362 {
363  __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
364 }
365 #endif
366 
367 
373 __STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
374 {
375  uint32_t result;
376 
377  __ASM volatile ("MRS %0, primask" : "=r" (result) );
378  return(result);
379 }
380 
381 
382 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
383 
388 __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
389 {
390  uint32_t result;
391 
392  __ASM volatile ("MRS %0, primask_ns" : "=r" (result) );
393  return(result);
394 }
395 #endif
396 
397 
403 __STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
404 {
405  __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
406 }
407 
408 
409 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
410 
415 __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
416 {
417  __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
418 }
419 #endif
420 
421 
422 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
423  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
424  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
425 
430 #define __enable_fault_irq __enable_fiq /* see arm_compat.h */
431 
432 
438 #define __disable_fault_irq __disable_fiq /* see arm_compat.h */
439 
440 
446 __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
447 {
448  uint32_t result;
449 
450  __ASM volatile ("MRS %0, basepri" : "=r" (result) );
451  return(result);
452 }
453 
454 
455 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
456 
461 __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
462 {
463  uint32_t result;
464 
465  __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
466  return(result);
467 }
468 #endif
469 
470 
476 __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
477 {
478  __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
479 }
480 
481 
482 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
483 
488 __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
489 {
490  __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
491 }
492 #endif
493 
494 
501 __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
502 {
503  __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
504 }
505 
506 
512 __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
513 {
514  uint32_t result;
515 
516  __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
517  return(result);
518 }
519 
520 
521 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
522 
527 __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
528 {
529  uint32_t result;
530 
531  __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
532  return(result);
533 }
534 #endif
535 
536 
542 __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
543 {
544  __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
545 }
546 
547 
548 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
549 
554 __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
555 {
556  __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
557 }
558 #endif
559 
560 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
561  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
562  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
563 
564 
565 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
566  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
567 
577 __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
578 {
579 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
580  (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
581  // without main extensions, the non-secure PSPLIM is RAZ/WI
582  return 0U;
583 #else
584  uint32_t result;
585  __ASM volatile ("MRS %0, psplim" : "=r" (result) );
586  return result;
587 #endif
588 }
589 
590 #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
591 
600 __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
601 {
602 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
603  // without main extensions, the non-secure PSPLIM is RAZ/WI
604  return 0U;
605 #else
606  uint32_t result;
607  __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) );
608  return result;
609 #endif
610 }
611 #endif
612 
613 
623 __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
624 {
625 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
626  (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
627  // without main extensions, the non-secure PSPLIM is RAZ/WI
628  (void)ProcStackPtrLimit;
629 #else
630  __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
631 #endif
632 }
633 
634 
635 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
636 
645 __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
646 {
647 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
648  // without main extensions, the non-secure PSPLIM is RAZ/WI
649  (void)ProcStackPtrLimit;
650 #else
651  __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
652 #endif
653 }
654 #endif
655 
656 
665 __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
666 {
667 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
668  (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
669  // without main extensions, the non-secure MSPLIM is RAZ/WI
670  return 0U;
671 #else
672  uint32_t result;
673  __ASM volatile ("MRS %0, msplim" : "=r" (result) );
674  return result;
675 #endif
676 }
677 
678 
679 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
680 
688 __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
689 {
690 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
691  // without main extensions, the non-secure MSPLIM is RAZ/WI
692  return 0U;
693 #else
694  uint32_t result;
695  __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
696  return result;
697 #endif
698 }
699 #endif
700 
701 
710 __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
711 {
712 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
713  (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
714  // without main extensions, the non-secure MSPLIM is RAZ/WI
715  (void)MainStackPtrLimit;
716 #else
717  __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
718 #endif
719 }
720 
721 
722 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
723 
731 __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
732 {
733 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
734  // without main extensions, the non-secure MSPLIM is RAZ/WI
735  (void)MainStackPtrLimit;
736 #else
737  __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
738 #endif
739 }
740 #endif
741 
742 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
743  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
744 
750 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
751  (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
752 #define __get_FPSCR (uint32_t)__builtin_arm_get_fpscr
753 #else
754 #define __get_FPSCR() ((uint32_t)0U)
755 #endif
756 
762 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
763  (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
764 #define __set_FPSCR __builtin_arm_set_fpscr
765 #else
766 #define __set_FPSCR(x) ((void)(x))
767 #endif
768 
769 
773 /* ########################## Core Instruction Access ######################### */
779 /* Define macros for porting to both thumb1 and thumb2.
780  * For thumb1, use low register (r0-r7), specified by constraint "l"
781  * Otherwise, use general registers, specified by constraint "r" */
782 #if defined (__thumb__) && !defined (__thumb2__)
783 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
784 #define __CMSIS_GCC_USE_REG(r) "l" (r)
785 #else
786 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
787 #define __CMSIS_GCC_USE_REG(r) "r" (r)
788 #endif
789 
794 #define __NOP __builtin_arm_nop
795 
800 #define __WFI __builtin_arm_wfi
801 
802 
808 #define __WFE __builtin_arm_wfe
809 
810 
815 #define __SEV __builtin_arm_sev
816 
817 
824 #define __ISB() __builtin_arm_isb(0xF);
825 
831 #define __DSB() __builtin_arm_dsb(0xF);
832 
833 
839 #define __DMB() __builtin_arm_dmb(0xF);
840 
841 
848 #define __REV(value) __builtin_bswap32(value)
849 
850 
857 #define __REV16(value) __ROR(__REV(value), 16)
858 
859 
866 #define __REVSH(value) (int16_t)__builtin_bswap16(value)
867 
868 
876 __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
877 {
878  op2 %= 32U;
879  if (op2 == 0U)
880  {
881  return op1;
882  }
883  return (op1 >> op2) | (op1 << (32U - op2));
884 }
885 
886 
894 #define __BKPT(value) __ASM volatile ("bkpt "#value)
895 
896 
903 #define __RBIT __builtin_arm_rbit
904 
911 #define __CLZ (uint8_t)__builtin_clz
912 
913 
914 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
915  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
916  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
917  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
918 
924 #define __LDREXB (uint8_t)__builtin_arm_ldrex
925 
926 
933 #define __LDREXH (uint16_t)__builtin_arm_ldrex
934 
935 
942 #define __LDREXW (uint32_t)__builtin_arm_ldrex
943 
944 
953 #define __STREXB (uint32_t)__builtin_arm_strex
954 
955 
964 #define __STREXH (uint32_t)__builtin_arm_strex
965 
966 
975 #define __STREXW (uint32_t)__builtin_arm_strex
976 
977 
982 #define __CLREX __builtin_arm_clrex
983 
984 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
985  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
986  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
987  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
988 
989 
990 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
991  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
992  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
993 
1001 #define __SSAT __builtin_arm_ssat
1002 
1003 
1011 #define __USAT __builtin_arm_usat
1012 
1013 
1021 __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
1022 {
1023  uint32_t result;
1024 
1025  __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
1026  return(result);
1027 }
1028 
1029 
1036 __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
1037 {
1038  uint32_t result;
1039 
1040  __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
1041  return ((uint8_t) result); /* Add explicit type cast here */
1042 }
1043 
1044 
1051 __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
1052 {
1053  uint32_t result;
1054 
1055  __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
1056  return ((uint16_t) result); /* Add explicit type cast here */
1057 }
1058 
1059 
1066 __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
1067 {
1068  uint32_t result;
1069 
1070  __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
1071  return(result);
1072 }
1073 
1074 
1081 __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
1082 {
1083  __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1084 }
1085 
1086 
1093 __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
1094 {
1095  __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1096 }
1097 
1098 
1105 __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
1106 {
1107  __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
1108 }
1109 
1110 #else /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1111  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1112  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
1113 
1121 __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
1122 {
1123  if ((sat >= 1U) && (sat <= 32U))
1124  {
1125  const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
1126  const int32_t min = -1 - max ;
1127  if (val > max)
1128  {
1129  return max;
1130  }
1131  else if (val < min)
1132  {
1133  return min;
1134  }
1135  }
1136  return val;
1137 }
1138 
1146 __STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
1147 {
1148  if (sat <= 31U)
1149  {
1150  const uint32_t max = ((1U << sat) - 1U);
1151  if (val > (int32_t)max)
1152  {
1153  return max;
1154  }
1155  else if (val < 0)
1156  {
1157  return 0U;
1158  }
1159  }
1160  return (uint32_t)val;
1161 }
1162 
1163 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1164  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1165  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
1166 
1167 
1168 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1169  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1170 
1176 __STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
1177 {
1178  uint32_t result;
1179 
1180  __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) );
1181  return ((uint8_t) result);
1182 }
1183 
1184 
1191 __STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
1192 {
1193  uint32_t result;
1194 
1195  __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) );
1196  return ((uint16_t) result);
1197 }
1198 
1199 
1206 __STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
1207 {
1208  uint32_t result;
1209 
1210  __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) );
1211  return(result);
1212 }
1213 
1214 
1221 __STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
1222 {
1223  __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1224 }
1225 
1226 
1233 __STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
1234 {
1235  __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1236 }
1237 
1238 
1245 __STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
1246 {
1247  __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1248 }
1249 
1250 
1257 #define __LDAEXB (uint8_t)__builtin_arm_ldaex
1258 
1259 
1266 #define __LDAEXH (uint16_t)__builtin_arm_ldaex
1267 
1268 
1275 #define __LDAEX (uint32_t)__builtin_arm_ldaex
1276 
1277 
1286 #define __STLEXB (uint32_t)__builtin_arm_stlex
1287 
1288 
1297 #define __STLEXH (uint32_t)__builtin_arm_stlex
1298 
1299 
1308 #define __STLEX (uint32_t)__builtin_arm_stlex
1309 
1310 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1311  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
1312  /* end of group CMSIS_Core_InstructionInterface */
1314 
1315 
1316 /* ################### Compiler specific Intrinsics ########################### */
1322 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1323 
1324 __STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1325 {
1326  uint32_t result;
1327 
1328  __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1329  return(result);
1330 }
1331 
1332 __STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1333 {
1334  uint32_t result;
1335 
1336  __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1337  return(result);
1338 }
1339 
1340 __STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1341 {
1342  uint32_t result;
1343 
1344  __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1345  return(result);
1346 }
1347 
1348 __STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1349 {
1350  uint32_t result;
1351 
1352  __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1353  return(result);
1354 }
1355 
1356 __STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1357 {
1358  uint32_t result;
1359 
1360  __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1361  return(result);
1362 }
1363 
1364 __STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1365 {
1366  uint32_t result;
1367 
1368  __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1369  return(result);
1370 }
1371 
1372 
1373 __STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1374 {
1375  uint32_t result;
1376 
1377  __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1378  return(result);
1379 }
1380 
1381 __STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1382 {
1383  uint32_t result;
1384 
1385  __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1386  return(result);
1387 }
1388 
1389 __STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1390 {
1391  uint32_t result;
1392 
1393  __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1394  return(result);
1395 }
1396 
1397 __STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1398 {
1399  uint32_t result;
1400 
1401  __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1402  return(result);
1403 }
1404 
1405 __STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1406 {
1407  uint32_t result;
1408 
1409  __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1410  return(result);
1411 }
1412 
1413 __STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1414 {
1415  uint32_t result;
1416 
1417  __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1418  return(result);
1419 }
1420 
1421 
1422 __STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1423 {
1424  uint32_t result;
1425 
1426  __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1427  return(result);
1428 }
1429 
1430 __STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1431 {
1432  uint32_t result;
1433 
1434  __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1435  return(result);
1436 }
1437 
1438 __STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1439 {
1440  uint32_t result;
1441 
1442  __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1443  return(result);
1444 }
1445 
1446 __STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1447 {
1448  uint32_t result;
1449 
1450  __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1451  return(result);
1452 }
1453 
1454 __STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1455 {
1456  uint32_t result;
1457 
1458  __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1459  return(result);
1460 }
1461 
1462 __STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1463 {
1464  uint32_t result;
1465 
1466  __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1467  return(result);
1468 }
1469 
1470 __STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1471 {
1472  uint32_t result;
1473 
1474  __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1475  return(result);
1476 }
1477 
1478 __STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1479 {
1480  uint32_t result;
1481 
1482  __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1483  return(result);
1484 }
1485 
1486 __STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1487 {
1488  uint32_t result;
1489 
1490  __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1491  return(result);
1492 }
1493 
1494 __STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1495 {
1496  uint32_t result;
1497 
1498  __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1499  return(result);
1500 }
1501 
1502 __STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1503 {
1504  uint32_t result;
1505 
1506  __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1507  return(result);
1508 }
1509 
1510 __STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1511 {
1512  uint32_t result;
1513 
1514  __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1515  return(result);
1516 }
1517 
1518 __STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1519 {
1520  uint32_t result;
1521 
1522  __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1523  return(result);
1524 }
1525 
1526 __STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1527 {
1528  uint32_t result;
1529 
1530  __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1531  return(result);
1532 }
1533 
1534 __STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1535 {
1536  uint32_t result;
1537 
1538  __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1539  return(result);
1540 }
1541 
1542 __STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1543 {
1544  uint32_t result;
1545 
1546  __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1547  return(result);
1548 }
1549 
1550 __STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1551 {
1552  uint32_t result;
1553 
1554  __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1555  return(result);
1556 }
1557 
1558 __STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1559 {
1560  uint32_t result;
1561 
1562  __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1563  return(result);
1564 }
1565 
1566 __STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1567 {
1568  uint32_t result;
1569 
1570  __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1571  return(result);
1572 }
1573 
1574 __STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1575 {
1576  uint32_t result;
1577 
1578  __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1579  return(result);
1580 }
1581 
1582 __STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1583 {
1584  uint32_t result;
1585 
1586  __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1587  return(result);
1588 }
1589 
1590 __STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1591 {
1592  uint32_t result;
1593 
1594  __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1595  return(result);
1596 }
1597 
1598 __STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1599 {
1600  uint32_t result;
1601 
1602  __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1603  return(result);
1604 }
1605 
1606 __STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1607 {
1608  uint32_t result;
1609 
1610  __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1611  return(result);
1612 }
1613 
1614 __STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1615 {
1616  uint32_t result;
1617 
1618  __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1619  return(result);
1620 }
1621 
1622 __STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1623 {
1624  uint32_t result;
1625 
1626  __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1627  return(result);
1628 }
1629 
1630 #define __SSAT16(ARG1,ARG2) \
1631 ({ \
1632  int32_t __RES, __ARG1 = (ARG1); \
1633  __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1634  __RES; \
1635  })
1636 
1637 #define __USAT16(ARG1,ARG2) \
1638 ({ \
1639  uint32_t __RES, __ARG1 = (ARG1); \
1640  __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1641  __RES; \
1642  })
1643 
1644 __STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
1645 {
1646  uint32_t result;
1647 
1648  __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
1649  return(result);
1650 }
1651 
1652 __STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1653 {
1654  uint32_t result;
1655 
1656  __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1657  return(result);
1658 }
1659 
1660 __STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
1661 {
1662  uint32_t result;
1663 
1664  __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
1665  return(result);
1666 }
1667 
1668 __STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
1669 {
1670  uint32_t result;
1671 
1672  __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1673  return(result);
1674 }
1675 
1676 __STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
1677 {
1678  uint32_t result;
1679 
1680  __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1681  return(result);
1682 }
1683 
1684 __STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
1685 {
1686  uint32_t result;
1687 
1688  __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1689  return(result);
1690 }
1691 
1692 __STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
1693 {
1694  uint32_t result;
1695 
1696  __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1697  return(result);
1698 }
1699 
1700 __STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
1701 {
1702  uint32_t result;
1703 
1704  __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1705  return(result);
1706 }
1707 
1708 __STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
1709 {
1710  union llreg_u{
1711  uint32_t w32[2];
1712  uint64_t w64;
1713  } llr;
1714  llr.w64 = acc;
1715 
1716 #ifndef __ARMEB__ /* Little endian */
1717  __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1718 #else /* Big endian */
1719  __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1720 #endif
1721 
1722  return(llr.w64);
1723 }
1724 
1725 __STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
1726 {
1727  union llreg_u{
1728  uint32_t w32[2];
1729  uint64_t w64;
1730  } llr;
1731  llr.w64 = acc;
1732 
1733 #ifndef __ARMEB__ /* Little endian */
1734  __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1735 #else /* Big endian */
1736  __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1737 #endif
1738 
1739  return(llr.w64);
1740 }
1741 
1742 __STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
1743 {
1744  uint32_t result;
1745 
1746  __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1747  return(result);
1748 }
1749 
1750 __STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
1751 {
1752  uint32_t result;
1753 
1754  __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1755  return(result);
1756 }
1757 
1758 __STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
1759 {
1760  uint32_t result;
1761 
1762  __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1763  return(result);
1764 }
1765 
1766 __STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
1767 {
1768  uint32_t result;
1769 
1770  __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1771  return(result);
1772 }
1773 
1774 __STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
1775 {
1776  union llreg_u{
1777  uint32_t w32[2];
1778  uint64_t w64;
1779  } llr;
1780  llr.w64 = acc;
1781 
1782 #ifndef __ARMEB__ /* Little endian */
1783  __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1784 #else /* Big endian */
1785  __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1786 #endif
1787 
1788  return(llr.w64);
1789 }
1790 
1791 __STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
1792 {
1793  union llreg_u{
1794  uint32_t w32[2];
1795  uint64_t w64;
1796  } llr;
1797  llr.w64 = acc;
1798 
1799 #ifndef __ARMEB__ /* Little endian */
1800  __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1801 #else /* Big endian */
1802  __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1803 #endif
1804 
1805  return(llr.w64);
1806 }
1807 
1808 __STATIC_FORCEINLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
1809 {
1810  uint32_t result;
1811 
1812  __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1813  return(result);
1814 }
1815 
1816 __STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2)
1817 {
1818  int32_t result;
1819 
1820  __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1821  return(result);
1822 }
1823 
1824 __STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2)
1825 {
1826  int32_t result;
1827 
1828  __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1829  return(result);
1830 }
1831 
1832 #if 0
1833 #define __PKHBT(ARG1,ARG2,ARG3) \
1834 ({ \
1835  uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
1836  __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
1837  __RES; \
1838  })
1839 
1840 #define __PKHTB(ARG1,ARG2,ARG3) \
1841 ({ \
1842  uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
1843  if (ARG3 == 0) \
1844  __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
1845  else \
1846  __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
1847  __RES; \
1848  })
1849 #endif
1850 
1851 #define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
1852  ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
1853 
1854 #define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
1855  ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
1856 
1857 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
1858 {
1859  int32_t result;
1860 
1861  __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
1862  return(result);
1863 }
1864 
1865 #endif /* (__ARM_FEATURE_DSP == 1) */
1866 
1869 #endif /* __CMSIS_ARMCLANG_H */
__get_CONTROL
__STATIC_INLINE uint32_t __get_CONTROL(void)
Enable IRQ Interrupts.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:159
min
int min(int a, int b)
__get_PRIMASK
__STATIC_INLINE uint32_t __get_PRIMASK(void)
Get Priority Mask.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:267
__ASM
#define __ASM
Definition: stm32f407/stm32f407g-disc1/Drivers/CMSIS/Include/cmsis_armclang.h:38
__SMLALDX
__STATIC_FORCEINLINE uint64_t __SMLALDX(uint32_t x, uint32_t y, uint64_t sum)
Definition: arm_math.h:1134
__SMUADX
__STATIC_FORCEINLINE uint32_t __SMUADX(uint32_t x, uint32_t y)
Definition: arm_math.h:1043
__SMUSD
__STATIC_FORCEINLINE uint32_t __SMUSD(uint32_t x, uint32_t y)
Definition: arm_math.h:1161
sat
uint32_t sat
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:766
__QASX
__STATIC_FORCEINLINE uint32_t __QASX(uint32_t x, uint32_t y)
Definition: arm_math.h:968
__SHASX
__STATIC_FORCEINLINE uint32_t __SHASX(uint32_t x, uint32_t y)
Definition: arm_math.h:984
__CMSIS_GCC_OUT_REG
#define __CMSIS_GCC_OUT_REG(r)
Definition: stm32f407/stm32f407g-disc1/Drivers/CMSIS/Include/cmsis_armclang.h:786
__SMLADX
__STATIC_FORCEINLINE uint32_t __SMLADX(uint32_t x, uint32_t y, uint32_t sum)
Definition: arm_math.h:1091
__SHADD16
__STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t x, uint32_t y)
Definition: arm_math.h:920
__SXTB16
__STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t x)
Definition: arm_math.h:1173
__QADD16
__STATIC_FORCEINLINE uint32_t __QADD16(uint32_t x, uint32_t y)
Definition: arm_math.h:903
__SMLALD
__STATIC_FORCEINLINE uint64_t __SMLALD(uint32_t x, uint32_t y, uint64_t sum)
Definition: arm_math.h:1119
__set_PRIMASK
__STATIC_INLINE void __set_PRIMASK(uint32_t priMask)
Set Priority Mask.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:279
__QSUB
__STATIC_FORCEINLINE int32_t __QSUB(int32_t x, int32_t y)
Definition: arm_math.h:1066
__SMMLA
__STATIC_FORCEINLINE int32_t __SMMLA(int32_t x, int32_t y, int32_t sum)
Definition: arm_math.h:1183
__attribute__
struct __attribute__((packed)) T_UINT32
Definition: stm32f407/stm32f407g-disc1/Drivers/CMSIS/Include/cmsis_armclang.h:71
__get_MSP
__STATIC_INLINE uint32_t __get_MSP(void)
Get Main Stack Pointer.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:243
__get_xPSR
__STATIC_INLINE uint32_t __get_xPSR(void)
Get xPSR Register.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:207
__set_CONTROL
__STATIC_INLINE void __set_CONTROL(uint32_t control)
Set Control Register.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:171
T_UINT16_READ
__PACKED_STRUCT T_UINT16_READ
Definition: stm32f407/stm32f407g-disc1/Drivers/CMSIS/Include/cmsis_armclang.h:87
__QADD8
__STATIC_FORCEINLINE uint32_t __QADD8(uint32_t x, uint32_t y)
Definition: arm_math.h:867
T_UINT32_WRITE
__PACKED_STRUCT T_UINT32_WRITE
Definition: stm32f407/stm32f407g-disc1/Drivers/CMSIS/Include/cmsis_armclang.h:95
__SMUSDX
__STATIC_FORCEINLINE uint32_t __SMUSDX(uint32_t x, uint32_t y)
Definition: arm_math.h:1032
__set_MSP
__STATIC_INLINE void __set_MSP(uint32_t topOfMainStack)
Set Main Stack Pointer.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:255
T_UINT32_READ
__PACKED_STRUCT T_UINT32_READ
Definition: stm32f407/stm32f407g-disc1/Drivers/CMSIS/Include/cmsis_armclang.h:103
__PACKED_STRUCT
#define __PACKED_STRUCT
Definition: stm32f407/stm32f407g-disc1/Drivers/CMSIS/Include/cmsis_armclang.h:62
__CMSIS_GCC_USE_REG
#define __CMSIS_GCC_USE_REG(r)
Definition: stm32f407/stm32f407g-disc1/Drivers/CMSIS/Include/cmsis_armclang.h:787
__SHSUB16
__STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t x, uint32_t y)
Definition: arm_math.h:952
__SMLAD
__STATIC_FORCEINLINE uint32_t __SMLAD(uint32_t x, uint32_t y, uint32_t sum)
Definition: arm_math.h:1077
__SHSAX
__STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t x, uint32_t y)
Definition: arm_math.h:1016
__STATIC_FORCEINLINE
#define __STATIC_FORCEINLINE
Definition: stm32f407/stm32f407g-disc1/Drivers/CMSIS/Include/cmsis_armclang.h:47
T_UINT16_WRITE
__PACKED_STRUCT T_UINT16_WRITE
Definition: stm32f407/stm32f407g-disc1/Drivers/CMSIS/Include/cmsis_armclang.h:79
__QSUB8
__STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t x, uint32_t y)
Definition: arm_math.h:885
__QSAX
__STATIC_FORCEINLINE uint32_t __QSAX(uint32_t x, uint32_t y)
Definition: arm_math.h:1000
__QADD
__STATIC_FORCEINLINE int32_t __QADD(int32_t x, int32_t y)
Definition: arm_math.h:1055
__get_PSP
__STATIC_INLINE uint32_t __get_PSP(void)
Get Process Stack Pointer.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:219
__get_IPSR
__STATIC_INLINE uint32_t __get_IPSR(void)
Get IPSR Register.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:183
__SMUAD
__STATIC_FORCEINLINE uint32_t __SMUAD(uint32_t x, uint32_t y)
Definition: arm_math.h:1149
__ROR
#define __ROR
Rotate Right in unsigned value (32 bit)
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:522
__set_PSP
__STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
Set Process Stack Pointer.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:231
__SSAT
__STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
Signed Saturate.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armclang.h:1163
__get_APSR
__STATIC_INLINE uint32_t __get_APSR(void)
Get APSR Register.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:195
__USAT
__STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
Unsigned Saturate.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armclang.h:1188
__QSUB16
__STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t x, uint32_t y)
Definition: arm_math.h:936
__SMLSDX
__STATIC_FORCEINLINE uint32_t __SMLSDX(uint32_t x, uint32_t y, uint32_t sum)
Definition: arm_math.h:1105


picovoice_driver
Author(s):
autogenerated on Fri Apr 1 2022 02:13:47