stm32h747/stm32h747i-disco/CM7/Drivers/CMSIS/Include/cmsis_gcc.h
Go to the documentation of this file.
1 /**************************************************************************/
7 /*
8  * Copyright (c) 2009-2018 Arm Limited. All rights reserved.
9  *
10  * SPDX-License-Identifier: Apache-2.0
11  *
12  * Licensed under the Apache License, Version 2.0 (the License); you may
13  * not use this file except in compliance with the License.
14  * You may obtain a copy of the License at
15  *
16  * www.apache.org/licenses/LICENSE-2.0
17  *
18  * Unless required by applicable law or agreed to in writing, software
19  * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21  * See the License for the specific language governing permissions and
22  * limitations under the License.
23  */
24 
25 #ifndef __CMSIS_GCC_H
26 #define __CMSIS_GCC_H
27 
28 /* ignore some GCC warnings */
29 #pragma GCC diagnostic push
30 #pragma GCC diagnostic ignored "-Wsign-conversion"
31 #pragma GCC diagnostic ignored "-Wconversion"
32 #pragma GCC diagnostic ignored "-Wunused-parameter"
33 
34 /* Fallback for __has_builtin */
35 #ifndef __has_builtin
36  #define __has_builtin(x) (0)
37 #endif
38 
39 /* CMSIS compiler specific defines */
40 #ifndef __ASM
41  #define __ASM __asm
42 #endif
43 #ifndef __INLINE
44  #define __INLINE inline
45 #endif
46 #ifndef __STATIC_INLINE
47  #define __STATIC_INLINE static inline
48 #endif
49 #ifndef __STATIC_FORCEINLINE
50  #define __STATIC_FORCEINLINE __attribute__((always_inline)) static inline
51 #endif
52 #ifndef __NO_RETURN
53  #define __NO_RETURN __attribute__((__noreturn__))
54 #endif
55 #ifndef __USED
56  #define __USED __attribute__((used))
57 #endif
58 #ifndef __WEAK
59  #define __WEAK __attribute__((weak))
60 #endif
61 #ifndef __PACKED
62  #define __PACKED __attribute__((packed, aligned(1)))
63 #endif
64 #ifndef __PACKED_STRUCT
65  #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
66 #endif
67 #ifndef __PACKED_UNION
68  #define __PACKED_UNION union __attribute__((packed, aligned(1)))
69 #endif
70 #ifndef __UNALIGNED_UINT32 /* deprecated */
71  #pragma GCC diagnostic push
72  #pragma GCC diagnostic ignored "-Wpacked"
73  #pragma GCC diagnostic ignored "-Wattributes"
74  struct __attribute__((packed)) T_UINT32 { uint32_t v; };
75  #pragma GCC diagnostic pop
76  #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v)
77 #endif
78 #ifndef __UNALIGNED_UINT16_WRITE
79  #pragma GCC diagnostic push
80  #pragma GCC diagnostic ignored "-Wpacked"
81  #pragma GCC diagnostic ignored "-Wattributes"
83  #pragma GCC diagnostic pop
84  #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
85 #endif
86 #ifndef __UNALIGNED_UINT16_READ
87  #pragma GCC diagnostic push
88  #pragma GCC diagnostic ignored "-Wpacked"
89  #pragma GCC diagnostic ignored "-Wattributes"
90  __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
91  #pragma GCC diagnostic pop
92  #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
93 #endif
94 #ifndef __UNALIGNED_UINT32_WRITE
95  #pragma GCC diagnostic push
96  #pragma GCC diagnostic ignored "-Wpacked"
97  #pragma GCC diagnostic ignored "-Wattributes"
99  #pragma GCC diagnostic pop
100  #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
101 #endif
102 #ifndef __UNALIGNED_UINT32_READ
103  #pragma GCC diagnostic push
104  #pragma GCC diagnostic ignored "-Wpacked"
105  #pragma GCC diagnostic ignored "-Wattributes"
107  #pragma GCC diagnostic pop
108  #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
109 #endif
110 #ifndef __ALIGNED
111  #define __ALIGNED(x) __attribute__((aligned(x)))
112 #endif
113 #ifndef __RESTRICT
114  #define __RESTRICT __restrict
115 #endif
116 
117 
118 /* ########################### Core Function Access ########################### */
130 {
131  __ASM volatile ("cpsie i" : : : "memory");
132 }
133 
134 
141 {
142  __ASM volatile ("cpsid i" : : : "memory");
143 }
144 
145 
151 __STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
152 {
153  uint32_t result;
154 
155  __ASM volatile ("MRS %0, control" : "=r" (result) );
156  return(result);
157 }
158 
159 
160 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
161 
166 __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
167 {
168  uint32_t result;
169 
170  __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
171  return(result);
172 }
173 #endif
174 
175 
181 __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
182 {
183  __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
184 }
185 
186 
187 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
188 
193 __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
194 {
195  __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
196 }
197 #endif
198 
199 
205 __STATIC_FORCEINLINE uint32_t __get_IPSR(void)
206 {
207  uint32_t result;
208 
209  __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
210  return(result);
211 }
212 
213 
219 __STATIC_FORCEINLINE uint32_t __get_APSR(void)
220 {
221  uint32_t result;
222 
223  __ASM volatile ("MRS %0, apsr" : "=r" (result) );
224  return(result);
225 }
226 
227 
233 __STATIC_FORCEINLINE uint32_t __get_xPSR(void)
234 {
235  uint32_t result;
236 
237  __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
238  return(result);
239 }
240 
241 
247 __STATIC_FORCEINLINE uint32_t __get_PSP(void)
248 {
249  uint32_t result;
250 
251  __ASM volatile ("MRS %0, psp" : "=r" (result) );
252  return(result);
253 }
254 
255 
256 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
257 
262 __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
263 {
264  uint32_t result;
265 
266  __ASM volatile ("MRS %0, psp_ns" : "=r" (result) );
267  return(result);
268 }
269 #endif
270 
271 
277 __STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
278 {
279  __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
280 }
281 
282 
283 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
284 
289 __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
290 {
291  __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
292 }
293 #endif
294 
295 
301 __STATIC_FORCEINLINE uint32_t __get_MSP(void)
302 {
303  uint32_t result;
304 
305  __ASM volatile ("MRS %0, msp" : "=r" (result) );
306  return(result);
307 }
308 
309 
310 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
311 
316 __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
317 {
318  uint32_t result;
319 
320  __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
321  return(result);
322 }
323 #endif
324 
325 
331 __STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
332 {
333  __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
334 }
335 
336 
337 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
338 
343 __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
344 {
345  __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
346 }
347 #endif
348 
349 
350 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
351 
356 __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
357 {
358  uint32_t result;
359 
360  __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
361  return(result);
362 }
363 
364 
370 __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
371 {
372  __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
373 }
374 #endif
375 
376 
382 __STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
383 {
384  uint32_t result;
385 
386  __ASM volatile ("MRS %0, primask" : "=r" (result) :: "memory");
387  return(result);
388 }
389 
390 
391 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
392 
397 __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
398 {
399  uint32_t result;
400 
401  __ASM volatile ("MRS %0, primask_ns" : "=r" (result) :: "memory");
402  return(result);
403 }
404 #endif
405 
406 
412 __STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
413 {
414  __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
415 }
416 
417 
418 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
419 
424 __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
425 {
426  __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
427 }
428 #endif
429 
430 
431 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
432  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
433  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
434 
439 __STATIC_FORCEINLINE void __enable_fault_irq(void)
440 {
441  __ASM volatile ("cpsie f" : : : "memory");
442 }
443 
444 
450 __STATIC_FORCEINLINE void __disable_fault_irq(void)
451 {
452  __ASM volatile ("cpsid f" : : : "memory");
453 }
454 
455 
461 __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
462 {
463  uint32_t result;
464 
465  __ASM volatile ("MRS %0, basepri" : "=r" (result) );
466  return(result);
467 }
468 
469 
470 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
471 
476 __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
477 {
478  uint32_t result;
479 
480  __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
481  return(result);
482 }
483 #endif
484 
485 
491 __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
492 {
493  __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
494 }
495 
496 
497 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
498 
503 __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
504 {
505  __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
506 }
507 #endif
508 
509 
516 __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
517 {
518  __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
519 }
520 
521 
527 __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
528 {
529  uint32_t result;
530 
531  __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
532  return(result);
533 }
534 
535 
536 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
537 
542 __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
543 {
544  uint32_t result;
545 
546  __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
547  return(result);
548 }
549 #endif
550 
551 
557 __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
558 {
559  __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
560 }
561 
562 
563 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
564 
569 __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
570 {
571  __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
572 }
573 #endif
574 
575 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
576  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
577  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
578 
579 
580 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
581  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
582 
592 __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
593 {
594 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
595  (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
596  // without main extensions, the non-secure PSPLIM is RAZ/WI
597  return 0U;
598 #else
599  uint32_t result;
600  __ASM volatile ("MRS %0, psplim" : "=r" (result) );
601  return result;
602 #endif
603 }
604 
605 #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
606 
614 __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
615 {
616 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
617  // without main extensions, the non-secure PSPLIM is RAZ/WI
618  return 0U;
619 #else
620  uint32_t result;
621  __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) );
622  return result;
623 #endif
624 }
625 #endif
626 
627 
637 __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
638 {
639 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
640  (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
641  // without main extensions, the non-secure PSPLIM is RAZ/WI
642  (void)ProcStackPtrLimit;
643 #else
644  __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
645 #endif
646 }
647 
648 
649 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
650 
658 __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
659 {
660 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
661  // without main extensions, the non-secure PSPLIM is RAZ/WI
662  (void)ProcStackPtrLimit;
663 #else
664  __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
665 #endif
666 }
667 #endif
668 
669 
679 __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
680 {
681 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
682  (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
683  // without main extensions, the non-secure MSPLIM is RAZ/WI
684  return 0U;
685 #else
686  uint32_t result;
687  __ASM volatile ("MRS %0, msplim" : "=r" (result) );
688  return result;
689 #endif
690 }
691 
692 
693 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
694 
702 __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
703 {
704 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
705  // without main extensions, the non-secure MSPLIM is RAZ/WI
706  return 0U;
707 #else
708  uint32_t result;
709  __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
710  return result;
711 #endif
712 }
713 #endif
714 
715 
725 __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
726 {
727 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
728  (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
729  // without main extensions, the non-secure MSPLIM is RAZ/WI
730  (void)MainStackPtrLimit;
731 #else
732  __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
733 #endif
734 }
735 
736 
737 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
738 
746 __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
747 {
748 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
749  // without main extensions, the non-secure MSPLIM is RAZ/WI
750  (void)MainStackPtrLimit;
751 #else
752  __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
753 #endif
754 }
755 #endif
756 
757 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
758  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
759 
760 
766 __STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
767 {
768 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
769  (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
770 #if __has_builtin(__builtin_arm_get_fpscr)
771 // Re-enable using built-in when GCC has been fixed
772 // || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
773  /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
774  return __builtin_arm_get_fpscr();
775 #else
776  uint32_t result;
777 
778  __ASM volatile ("VMRS %0, fpscr" : "=r" (result) );
779  return(result);
780 #endif
781 #else
782  return(0U);
783 #endif
784 }
785 
786 
792 __STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
793 {
794 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
795  (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
796 #if __has_builtin(__builtin_arm_set_fpscr)
797 // Re-enable using built-in when GCC has been fixed
798 // || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
799  /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
800  __builtin_arm_set_fpscr(fpscr);
801 #else
802  __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) : "vfpcc", "memory");
803 #endif
804 #else
805  (void)fpscr;
806 #endif
807 }
808 
809 
813 /* ########################## Core Instruction Access ######################### */
819 /* Define macros for porting to both thumb1 and thumb2.
820  * For thumb1, use low register (r0-r7), specified by constraint "l"
821  * Otherwise, use general registers, specified by constraint "r" */
822 #if defined (__thumb__) && !defined (__thumb2__)
823 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
824 #define __CMSIS_GCC_RW_REG(r) "+l" (r)
825 #define __CMSIS_GCC_USE_REG(r) "l" (r)
826 #else
827 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
828 #define __CMSIS_GCC_RW_REG(r) "+r" (r)
829 #define __CMSIS_GCC_USE_REG(r) "r" (r)
830 #endif
831 
836 #define __NOP() __ASM volatile ("nop")
837 
842 #define __WFI() __ASM volatile ("wfi")
843 
844 
850 #define __WFE() __ASM volatile ("wfe")
851 
852 
857 #define __SEV() __ASM volatile ("sev")
858 
859 
866 __STATIC_FORCEINLINE void __ISB(void)
867 {
868  __ASM volatile ("isb 0xF":::"memory");
869 }
870 
871 
877 __STATIC_FORCEINLINE void __DSB(void)
878 {
879  __ASM volatile ("dsb 0xF":::"memory");
880 }
881 
882 
888 __STATIC_FORCEINLINE void __DMB(void)
889 {
890  __ASM volatile ("dmb 0xF":::"memory");
891 }
892 
893 
900 __STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
901 {
902 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
903  return __builtin_bswap32(value);
904 #else
905  uint32_t result;
906 
907  __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
908  return result;
909 #endif
910 }
911 
912 
919 __STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
920 {
921  uint32_t result;
922 
923  __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
924  return result;
925 }
926 
927 
934 __STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
935 {
936 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
937  return (int16_t)__builtin_bswap16(value);
938 #else
939  int16_t result;
940 
941  __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
942  return result;
943 #endif
944 }
945 
946 
954 __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
955 {
956  op2 %= 32U;
957  if (op2 == 0U)
958  {
959  return op1;
960  }
961  return (op1 >> op2) | (op1 << (32U - op2));
962 }
963 
964 
972 #define __BKPT(value) __ASM volatile ("bkpt "#value)
973 
974 
981 __STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
982 {
983  uint32_t result;
984 
985 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
986  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
987  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
988  __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
989 #else
990  uint32_t s = (4U /*sizeof(v)*/ * 8U) - 1U; /* extra shift needed at end */
991 
992  result = value; /* r will be reversed bits of v; first get LSB of v */
993  for (value >>= 1U; value != 0U; value >>= 1U)
994  {
995  result <<= 1U;
996  result |= value & 1U;
997  s--;
998  }
999  result <<= s; /* shift when v's highest bits are zero */
1000 #endif
1001  return result;
1002 }
1003 
1004 
1011 #define __CLZ (uint8_t)__builtin_clz
1012 
1013 
1014 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1015  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1016  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1017  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1018 
1024 __STATIC_FORCEINLINE uint8_t __LDREXB(volatile uint8_t *addr)
1025 {
1026  uint32_t result;
1027 
1028 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1029  __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
1030 #else
1031  /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1032  accepted by assembler. So has to use following less efficient pattern.
1033  */
1034  __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
1035 #endif
1036  return ((uint8_t) result); /* Add explicit type cast here */
1037 }
1038 
1039 
1046 __STATIC_FORCEINLINE uint16_t __LDREXH(volatile uint16_t *addr)
1047 {
1048  uint32_t result;
1049 
1050 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1051  __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
1052 #else
1053  /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1054  accepted by assembler. So has to use following less efficient pattern.
1055  */
1056  __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
1057 #endif
1058  return ((uint16_t) result); /* Add explicit type cast here */
1059 }
1060 
1061 
1068 __STATIC_FORCEINLINE uint32_t __LDREXW(volatile uint32_t *addr)
1069 {
1070  uint32_t result;
1071 
1072  __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
1073  return(result);
1074 }
1075 
1076 
1085 __STATIC_FORCEINLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
1086 {
1087  uint32_t result;
1088 
1089  __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
1090  return(result);
1091 }
1092 
1093 
1102 __STATIC_FORCEINLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
1103 {
1104  uint32_t result;
1105 
1106  __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
1107  return(result);
1108 }
1109 
1110 
1119 __STATIC_FORCEINLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
1120 {
1121  uint32_t result;
1122 
1123  __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
1124  return(result);
1125 }
1126 
1127 
1132 __STATIC_FORCEINLINE void __CLREX(void)
1133 {
1134  __ASM volatile ("clrex" ::: "memory");
1135 }
1136 
1137 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1138  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1139  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1140  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
1141 
1142 
1143 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1144  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1145  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
1146 
1153 #define __SSAT(ARG1,ARG2) \
1154 __extension__ \
1155 ({ \
1156  int32_t __RES, __ARG1 = (ARG1); \
1157  __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1158  __RES; \
1159  })
1160 
1161 
1169 #define __USAT(ARG1,ARG2) \
1170  __extension__ \
1171 ({ \
1172  uint32_t __RES, __ARG1 = (ARG1); \
1173  __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1174  __RES; \
1175  })
1176 
1177 
1185 __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
1186 {
1187  uint32_t result;
1188 
1189  __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
1190  return(result);
1191 }
1192 
1193 
1200 __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
1201 {
1202  uint32_t result;
1203 
1204 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1205  __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
1206 #else
1207  /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1208  accepted by assembler. So has to use following less efficient pattern.
1209  */
1210  __ASM volatile ("ldrbt %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
1211 #endif
1212  return ((uint8_t) result); /* Add explicit type cast here */
1213 }
1214 
1215 
1222 __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
1223 {
1224  uint32_t result;
1225 
1226 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1227  __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
1228 #else
1229  /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1230  accepted by assembler. So has to use following less efficient pattern.
1231  */
1232  __ASM volatile ("ldrht %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
1233 #endif
1234  return ((uint16_t) result); /* Add explicit type cast here */
1235 }
1236 
1237 
1244 __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
1245 {
1246  uint32_t result;
1247 
1248  __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
1249  return(result);
1250 }
1251 
1252 
1259 __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
1260 {
1261  __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1262 }
1263 
1264 
1271 __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
1272 {
1273  __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1274 }
1275 
1276 
1283 __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
1284 {
1285  __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
1286 }
1287 
1288 #else /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1289  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1290  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
1291 
1299 __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
1300 {
1301  if ((sat >= 1U) && (sat <= 32U))
1302  {
1303  const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
1304  const int32_t min = -1 - max ;
1305  if (val > max)
1306  {
1307  return max;
1308  }
1309  else if (val < min)
1310  {
1311  return min;
1312  }
1313  }
1314  return val;
1315 }
1316 
1324 __STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
1325 {
1326  if (sat <= 31U)
1327  {
1328  const uint32_t max = ((1U << sat) - 1U);
1329  if (val > (int32_t)max)
1330  {
1331  return max;
1332  }
1333  else if (val < 0)
1334  {
1335  return 0U;
1336  }
1337  }
1338  return (uint32_t)val;
1339 }
1340 
1341 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1342  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1343  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
1344 
1345 
1346 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1347  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1348 
1354 __STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
1355 {
1356  uint32_t result;
1357 
1358  __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) );
1359  return ((uint8_t) result);
1360 }
1361 
1362 
1369 __STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
1370 {
1371  uint32_t result;
1372 
1373  __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) );
1374  return ((uint16_t) result);
1375 }
1376 
1377 
1384 __STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
1385 {
1386  uint32_t result;
1387 
1388  __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) );
1389  return(result);
1390 }
1391 
1392 
1399 __STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
1400 {
1401  __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1402 }
1403 
1404 
1411 __STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
1412 {
1413  __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1414 }
1415 
1416 
1423 __STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
1424 {
1425  __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1426 }
1427 
1428 
1435 __STATIC_FORCEINLINE uint8_t __LDAEXB(volatile uint8_t *ptr)
1436 {
1437  uint32_t result;
1438 
1439  __ASM volatile ("ldaexb %0, %1" : "=r" (result) : "Q" (*ptr) );
1440  return ((uint8_t) result);
1441 }
1442 
1443 
1450 __STATIC_FORCEINLINE uint16_t __LDAEXH(volatile uint16_t *ptr)
1451 {
1452  uint32_t result;
1453 
1454  __ASM volatile ("ldaexh %0, %1" : "=r" (result) : "Q" (*ptr) );
1455  return ((uint16_t) result);
1456 }
1457 
1458 
1465 __STATIC_FORCEINLINE uint32_t __LDAEX(volatile uint32_t *ptr)
1466 {
1467  uint32_t result;
1468 
1469  __ASM volatile ("ldaex %0, %1" : "=r" (result) : "Q" (*ptr) );
1470  return(result);
1471 }
1472 
1473 
1482 __STATIC_FORCEINLINE uint32_t __STLEXB(uint8_t value, volatile uint8_t *ptr)
1483 {
1484  uint32_t result;
1485 
1486  __ASM volatile ("stlexb %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
1487  return(result);
1488 }
1489 
1490 
1499 __STATIC_FORCEINLINE uint32_t __STLEXH(uint16_t value, volatile uint16_t *ptr)
1500 {
1501  uint32_t result;
1502 
1503  __ASM volatile ("stlexh %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
1504  return(result);
1505 }
1506 
1507 
1516 __STATIC_FORCEINLINE uint32_t __STLEX(uint32_t value, volatile uint32_t *ptr)
1517 {
1518  uint32_t result;
1519 
1520  __ASM volatile ("stlex %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
1521  return(result);
1522 }
1523 
1524 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1525  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
1526  /* end of group CMSIS_Core_InstructionInterface */
1528 
1529 
1530 /* ################### Compiler specific Intrinsics ########################### */
1536 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1537 
1538 __STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1539 {
1540  uint32_t result;
1541 
1542  __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1543  return(result);
1544 }
1545 
1546 __STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1547 {
1548  uint32_t result;
1549 
1550  __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1551  return(result);
1552 }
1553 
1554 __STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1555 {
1556  uint32_t result;
1557 
1558  __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1559  return(result);
1560 }
1561 
1562 __STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1563 {
1564  uint32_t result;
1565 
1566  __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1567  return(result);
1568 }
1569 
1570 __STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1571 {
1572  uint32_t result;
1573 
1574  __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1575  return(result);
1576 }
1577 
1578 __STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1579 {
1580  uint32_t result;
1581 
1582  __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1583  return(result);
1584 }
1585 
1586 
1587 __STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1588 {
1589  uint32_t result;
1590 
1591  __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1592  return(result);
1593 }
1594 
1595 __STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1596 {
1597  uint32_t result;
1598 
1599  __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1600  return(result);
1601 }
1602 
1603 __STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1604 {
1605  uint32_t result;
1606 
1607  __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1608  return(result);
1609 }
1610 
1611 __STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1612 {
1613  uint32_t result;
1614 
1615  __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1616  return(result);
1617 }
1618 
1619 __STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1620 {
1621  uint32_t result;
1622 
1623  __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1624  return(result);
1625 }
1626 
1627 __STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1628 {
1629  uint32_t result;
1630 
1631  __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1632  return(result);
1633 }
1634 
1635 
1636 __STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1637 {
1638  uint32_t result;
1639 
1640  __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1641  return(result);
1642 }
1643 
1644 __STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1645 {
1646  uint32_t result;
1647 
1648  __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1649  return(result);
1650 }
1651 
1652 __STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1653 {
1654  uint32_t result;
1655 
1656  __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1657  return(result);
1658 }
1659 
1660 __STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1661 {
1662  uint32_t result;
1663 
1664  __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1665  return(result);
1666 }
1667 
1668 __STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1669 {
1670  uint32_t result;
1671 
1672  __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1673  return(result);
1674 }
1675 
1676 __STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1677 {
1678  uint32_t result;
1679 
1680  __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1681  return(result);
1682 }
1683 
1684 __STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1685 {
1686  uint32_t result;
1687 
1688  __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1689  return(result);
1690 }
1691 
1692 __STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1693 {
1694  uint32_t result;
1695 
1696  __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1697  return(result);
1698 }
1699 
1700 __STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1701 {
1702  uint32_t result;
1703 
1704  __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1705  return(result);
1706 }
1707 
1708 __STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1709 {
1710  uint32_t result;
1711 
1712  __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1713  return(result);
1714 }
1715 
1716 __STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1717 {
1718  uint32_t result;
1719 
1720  __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1721  return(result);
1722 }
1723 
1724 __STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1725 {
1726  uint32_t result;
1727 
1728  __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1729  return(result);
1730 }
1731 
1732 __STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1733 {
1734  uint32_t result;
1735 
1736  __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1737  return(result);
1738 }
1739 
1740 __STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1741 {
1742  uint32_t result;
1743 
1744  __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1745  return(result);
1746 }
1747 
1748 __STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1749 {
1750  uint32_t result;
1751 
1752  __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1753  return(result);
1754 }
1755 
1756 __STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1757 {
1758  uint32_t result;
1759 
1760  __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1761  return(result);
1762 }
1763 
1764 __STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1765 {
1766  uint32_t result;
1767 
1768  __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1769  return(result);
1770 }
1771 
1772 __STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1773 {
1774  uint32_t result;
1775 
1776  __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1777  return(result);
1778 }
1779 
1780 __STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1781 {
1782  uint32_t result;
1783 
1784  __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1785  return(result);
1786 }
1787 
1788 __STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1789 {
1790  uint32_t result;
1791 
1792  __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1793  return(result);
1794 }
1795 
1796 __STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1797 {
1798  uint32_t result;
1799 
1800  __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1801  return(result);
1802 }
1803 
1804 __STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1805 {
1806  uint32_t result;
1807 
1808  __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1809  return(result);
1810 }
1811 
1812 __STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1813 {
1814  uint32_t result;
1815 
1816  __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1817  return(result);
1818 }
1819 
1820 __STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1821 {
1822  uint32_t result;
1823 
1824  __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1825  return(result);
1826 }
1827 
1828 __STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1829 {
1830  uint32_t result;
1831 
1832  __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1833  return(result);
1834 }
1835 
1836 __STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1837 {
1838  uint32_t result;
1839 
1840  __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1841  return(result);
1842 }
1843 
1844 #define __SSAT16(ARG1,ARG2) \
1845 ({ \
1846  int32_t __RES, __ARG1 = (ARG1); \
1847  __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1848  __RES; \
1849  })
1850 
1851 #define __USAT16(ARG1,ARG2) \
1852 ({ \
1853  uint32_t __RES, __ARG1 = (ARG1); \
1854  __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1855  __RES; \
1856  })
1857 
1858 __STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
1859 {
1860  uint32_t result;
1861 
1862  __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
1863  return(result);
1864 }
1865 
1866 __STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1867 {
1868  uint32_t result;
1869 
1870  __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1871  return(result);
1872 }
1873 
1874 __STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
1875 {
1876  uint32_t result;
1877 
1878  __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
1879  return(result);
1880 }
1881 
1882 __STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
1883 {
1884  uint32_t result;
1885 
1886  __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1887  return(result);
1888 }
1889 
1890 __STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
1891 {
1892  uint32_t result;
1893 
1894  __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1895  return(result);
1896 }
1897 
1898 __STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
1899 {
1900  uint32_t result;
1901 
1902  __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1903  return(result);
1904 }
1905 
1906 __STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
1907 {
1908  uint32_t result;
1909 
1910  __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1911  return(result);
1912 }
1913 
1914 __STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
1915 {
1916  uint32_t result;
1917 
1918  __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1919  return(result);
1920 }
1921 
1922 __STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
1923 {
1924  union llreg_u{
1925  uint32_t w32[2];
1926  uint64_t w64;
1927  } llr;
1928  llr.w64 = acc;
1929 
1930 #ifndef __ARMEB__ /* Little endian */
1931  __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1932 #else /* Big endian */
1933  __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1934 #endif
1935 
1936  return(llr.w64);
1937 }
1938 
1939 __STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
1940 {
1941  union llreg_u{
1942  uint32_t w32[2];
1943  uint64_t w64;
1944  } llr;
1945  llr.w64 = acc;
1946 
1947 #ifndef __ARMEB__ /* Little endian */
1948  __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1949 #else /* Big endian */
1950  __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1951 #endif
1952 
1953  return(llr.w64);
1954 }
1955 
1956 __STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
1957 {
1958  uint32_t result;
1959 
1960  __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1961  return(result);
1962 }
1963 
1964 __STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
1965 {
1966  uint32_t result;
1967 
1968  __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1969  return(result);
1970 }
1971 
1972 __STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
1973 {
1974  uint32_t result;
1975 
1976  __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1977  return(result);
1978 }
1979 
1980 __STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
1981 {
1982  uint32_t result;
1983 
1984  __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1985  return(result);
1986 }
1987 
1988 __STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
1989 {
1990  union llreg_u{
1991  uint32_t w32[2];
1992  uint64_t w64;
1993  } llr;
1994  llr.w64 = acc;
1995 
1996 #ifndef __ARMEB__ /* Little endian */
1997  __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1998 #else /* Big endian */
1999  __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2000 #endif
2001 
2002  return(llr.w64);
2003 }
2004 
2005 __STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
2006 {
2007  union llreg_u{
2008  uint32_t w32[2];
2009  uint64_t w64;
2010  } llr;
2011  llr.w64 = acc;
2012 
2013 #ifndef __ARMEB__ /* Little endian */
2014  __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
2015 #else /* Big endian */
2016  __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2017 #endif
2018 
2019  return(llr.w64);
2020 }
2021 
2022 __STATIC_FORCEINLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
2023 {
2024  uint32_t result;
2025 
2026  __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2027  return(result);
2028 }
2029 
2030 __STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2)
2031 {
2032  int32_t result;
2033 
2034  __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2035  return(result);
2036 }
2037 
2038 __STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2)
2039 {
2040  int32_t result;
2041 
2042  __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2043  return(result);
2044 }
2045 
2046 #if 0
2047 #define __PKHBT(ARG1,ARG2,ARG3) \
2048 ({ \
2049  uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2050  __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
2051  __RES; \
2052  })
2053 
2054 #define __PKHTB(ARG1,ARG2,ARG3) \
2055 ({ \
2056  uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2057  if (ARG3 == 0) \
2058  __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
2059  else \
2060  __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
2061  __RES; \
2062  })
2063 #endif
2064 
2065 #define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
2066  ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
2067 
2068 #define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
2069  ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
2070 
2071 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
2072 {
2073  int32_t result;
2074 
2075  __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
2076  return(result);
2077 }
2078 
2079 #endif /* (__ARM_FEATURE_DSP == 1) */
2080 
2083 #pragma GCC diagnostic pop
2084 
2085 #endif /* __CMSIS_GCC_H */
__get_CONTROL
__STATIC_INLINE uint32_t __get_CONTROL(void)
Enable IRQ Interrupts.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:159
min
int min(int a, int b)
__get_PRIMASK
__STATIC_INLINE uint32_t __get_PRIMASK(void)
Get Priority Mask.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:267
__SMLALDX
__STATIC_FORCEINLINE uint64_t __SMLALDX(uint32_t x, uint32_t y, uint64_t sum)
Definition: arm_math.h:1134
__DSB
__STATIC_FORCEINLINE void __DSB(void)
Data Synchronization Barrier.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:944
__SMUADX
__STATIC_FORCEINLINE uint32_t __SMUADX(uint32_t x, uint32_t y)
Definition: arm_math.h:1043
__SMUSD
__STATIC_FORCEINLINE uint32_t __SMUSD(uint32_t x, uint32_t y)
Definition: arm_math.h:1161
sat
uint32_t sat
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:766
s
XmlRpcServer s
__QASX
__STATIC_FORCEINLINE uint32_t __QASX(uint32_t x, uint32_t y)
Definition: arm_math.h:968
__ISB
__STATIC_FORCEINLINE void __ISB(void)
Instruction Synchronization Barrier.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:933
__disable_irq
__STATIC_FORCEINLINE void __disable_irq(void)
Disable IRQ Interrupts.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:207
__REVSH
__STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
Reverse byte order (16 bit)
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:1001
__SHASX
__STATIC_FORCEINLINE uint32_t __SHASX(uint32_t x, uint32_t y)
Definition: arm_math.h:984
__CMSIS_GCC_OUT_REG
#define __CMSIS_GCC_OUT_REG(r)
Definition: stm32h747/stm32h747i-disco/CM7/Drivers/CMSIS/Include/cmsis_gcc.h:827
__SMLADX
__STATIC_FORCEINLINE uint32_t __SMLADX(uint32_t x, uint32_t y, uint32_t sum)
Definition: arm_math.h:1091
__ASM
#define __ASM
Definition: stm32h747/stm32h747i-disco/CM7/Drivers/CMSIS/Include/cmsis_gcc.h:41
__SHADD16
__STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t x, uint32_t y)
Definition: arm_math.h:920
__PACKED_STRUCT
#define __PACKED_STRUCT
Definition: stm32h747/stm32h747i-disco/CM7/Drivers/CMSIS/Include/cmsis_gcc.h:65
T_UINT16_WRITE
__PACKED_STRUCT T_UINT16_WRITE
Definition: stm32h747/stm32h747i-disco/CM7/Drivers/CMSIS/Include/cmsis_gcc.h:82
T_UINT32_WRITE
__PACKED_STRUCT T_UINT32_WRITE
Definition: stm32h747/stm32h747i-disco/CM7/Drivers/CMSIS/Include/cmsis_gcc.h:98
__SXTB16
__STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t x)
Definition: arm_math.h:1173
__QADD16
__STATIC_FORCEINLINE uint32_t __QADD16(uint32_t x, uint32_t y)
Definition: arm_math.h:903
__SMLALD
__STATIC_FORCEINLINE uint64_t __SMLALD(uint32_t x, uint32_t y, uint64_t sum)
Definition: arm_math.h:1119
__set_PRIMASK
__STATIC_INLINE void __set_PRIMASK(uint32_t priMask)
Set Priority Mask.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:279
__QSUB
__STATIC_FORCEINLINE int32_t __QSUB(int32_t x, int32_t y)
Definition: arm_math.h:1066
__SMMLA
__STATIC_FORCEINLINE int32_t __SMMLA(int32_t x, int32_t y, int32_t sum)
Definition: arm_math.h:1183
__get_MSP
__STATIC_INLINE uint32_t __get_MSP(void)
Get Main Stack Pointer.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:243
__get_xPSR
__STATIC_INLINE uint32_t __get_xPSR(void)
Get xPSR Register.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:207
__get_FPSCR
__STATIC_INLINE uint32_t __get_FPSCR(void)
Get FPSCR.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:374
__set_CONTROL
__STATIC_INLINE void __set_CONTROL(uint32_t control)
Set Control Register.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:171
__REV
__STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
Reverse byte order (32 bit)
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:967
__QADD8
__STATIC_FORCEINLINE uint32_t __QADD8(uint32_t x, uint32_t y)
Definition: arm_math.h:867
__enable_irq
__STATIC_FORCEINLINE void __enable_irq(void)
Enable IRQ Interrupts.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:196
__SMUSDX
__STATIC_FORCEINLINE uint32_t __SMUSDX(uint32_t x, uint32_t y)
Definition: arm_math.h:1032
__LDREXW
__IAR_FT uint32_t __LDREXW(uint32_t volatile *ptr)
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_iccarm.h:581
__set_MSP
__STATIC_INLINE void __set_MSP(uint32_t topOfMainStack)
Set Main Stack Pointer.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:255
T_UINT16_READ
__PACKED_STRUCT T_UINT16_READ
Definition: stm32h747/stm32h747i-disco/CM7/Drivers/CMSIS/Include/cmsis_gcc.h:90
__CMSIS_GCC_USE_REG
#define __CMSIS_GCC_USE_REG(r)
Definition: stm32h747/stm32h747i-disco/CM7/Drivers/CMSIS/Include/cmsis_gcc.h:829
__SHSUB16
__STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t x, uint32_t y)
Definition: arm_math.h:952
__SMLAD
__STATIC_FORCEINLINE uint32_t __SMLAD(uint32_t x, uint32_t y, uint32_t sum)
Definition: arm_math.h:1077
__SHSAX
__STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t x, uint32_t y)
Definition: arm_math.h:1016
__STATIC_FORCEINLINE
#define __STATIC_FORCEINLINE
Definition: stm32h747/stm32h747i-disco/CM7/Drivers/CMSIS/Include/cmsis_gcc.h:50
__RBIT
__STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
Reverse bit order of value.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:1048
__REV16
__STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
Reverse byte order (16 bit)
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:986
__set_FPSCR
__STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
Set FPSCR.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:391
__DMB
__STATIC_FORCEINLINE void __DMB(void)
Data Memory Barrier.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:955
__QSUB8
__STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t x, uint32_t y)
Definition: arm_math.h:885
__QSAX
__STATIC_FORCEINLINE uint32_t __QSAX(uint32_t x, uint32_t y)
Definition: arm_math.h:1000
__QADD
__STATIC_FORCEINLINE int32_t __QADD(int32_t x, int32_t y)
Definition: arm_math.h:1055
__get_PSP
__STATIC_INLINE uint32_t __get_PSP(void)
Get Process Stack Pointer.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:219
__get_IPSR
__STATIC_INLINE uint32_t __get_IPSR(void)
Get IPSR Register.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:183
__SMUAD
__STATIC_FORCEINLINE uint32_t __SMUAD(uint32_t x, uint32_t y)
Definition: arm_math.h:1149
__attribute__
struct __attribute__((packed)) T_UINT32
Definition: stm32h747/stm32h747i-disco/CM7/Drivers/CMSIS/Include/cmsis_gcc.h:74
__ROR
#define __ROR
Rotate Right in unsigned value (32 bit)
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:522
__set_PSP
__STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
Set Process Stack Pointer.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:231
__STREXW
__IAR_FT uint32_t __STREXW(uint32_t value, uint32_t volatile *ptr)
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_iccarm.h:586
__SSAT
__STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
Signed Saturate.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armclang.h:1163
T_UINT32_READ
__PACKED_STRUCT T_UINT32_READ
Definition: stm32h747/stm32h747i-disco/CM7/Drivers/CMSIS/Include/cmsis_gcc.h:106
__get_APSR
__STATIC_INLINE uint32_t __get_APSR(void)
Get APSR Register.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:195
__USAT
__STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
Unsigned Saturate.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armclang.h:1188
__QSUB16
__STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t x, uint32_t y)
Definition: arm_math.h:936
__SMLSDX
__STATIC_FORCEINLINE uint32_t __SMLSDX(uint32_t x, uint32_t y, uint32_t sum)
Definition: arm_math.h:1105


picovoice_driver
Author(s):
autogenerated on Fri Apr 1 2022 02:13:47