imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h
Go to the documentation of this file.
1 /**************************************************************************/
7 /*
8  * Copyright (c) 2009-2019 Arm Limited. All rights reserved.
9  *
10  * SPDX-License-Identifier: Apache-2.0
11  *
12  * Licensed under the Apache License, Version 2.0 (the License); you may
13  * not use this file except in compliance with the License.
14  * You may obtain a copy of the License at
15  *
16  * www.apache.org/licenses/LICENSE-2.0
17  *
18  * Unless required by applicable law or agreed to in writing, software
19  * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21  * See the License for the specific language governing permissions and
22  * limitations under the License.
23  */
24 
25 #ifndef __CMSIS_GCC_H
26 #define __CMSIS_GCC_H
27 
28 /* ignore some GCC warnings */
29 #pragma GCC diagnostic push
30 #pragma GCC diagnostic ignored "-Wsign-conversion"
31 #pragma GCC diagnostic ignored "-Wconversion"
32 #pragma GCC diagnostic ignored "-Wunused-parameter"
33 
34 /* Fallback for __has_builtin */
35 #ifndef __has_builtin
36  #define __has_builtin(x) (0)
37 #endif
38 
39 /* CMSIS compiler specific defines */
40 #ifndef __ASM
41  #define __ASM __asm
42 #endif
43 #ifndef __INLINE
44  #define __INLINE inline
45 #endif
46 #ifndef __STATIC_INLINE
47  #define __STATIC_INLINE static inline
48 #endif
49 #ifndef __STATIC_FORCEINLINE
50  #define __STATIC_FORCEINLINE __attribute__((always_inline)) static inline
51 #endif
52 #ifndef __NO_RETURN
53  #define __NO_RETURN __attribute__((__noreturn__))
54 #endif
55 #ifndef __USED
56  #define __USED __attribute__((used))
57 #endif
58 #ifndef __WEAK
59  #define __WEAK __attribute__((weak))
60 #endif
61 #ifndef __PACKED
62  #define __PACKED __attribute__((packed, aligned(1)))
63 #endif
64 #ifndef __PACKED_STRUCT
65  #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
66 #endif
67 #ifndef __PACKED_UNION
68  #define __PACKED_UNION union __attribute__((packed, aligned(1)))
69 #endif
70 #ifndef __UNALIGNED_UINT32 /* deprecated */
71  #pragma GCC diagnostic push
72  #pragma GCC diagnostic ignored "-Wpacked"
73  #pragma GCC diagnostic ignored "-Wattributes"
74  struct __attribute__((packed)) T_UINT32 { uint32_t v; };
75  #pragma GCC diagnostic pop
76  #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v)
77 #endif
78 #ifndef __UNALIGNED_UINT16_WRITE
79  #pragma GCC diagnostic push
80  #pragma GCC diagnostic ignored "-Wpacked"
81  #pragma GCC diagnostic ignored "-Wattributes"
83  #pragma GCC diagnostic pop
84  #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
85 #endif
86 #ifndef __UNALIGNED_UINT16_READ
87  #pragma GCC diagnostic push
88  #pragma GCC diagnostic ignored "-Wpacked"
89  #pragma GCC diagnostic ignored "-Wattributes"
90  __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
91  #pragma GCC diagnostic pop
92  #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
93 #endif
94 #ifndef __UNALIGNED_UINT32_WRITE
95  #pragma GCC diagnostic push
96  #pragma GCC diagnostic ignored "-Wpacked"
97  #pragma GCC diagnostic ignored "-Wattributes"
99  #pragma GCC diagnostic pop
100  #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
101 #endif
102 #ifndef __UNALIGNED_UINT32_READ
103  #pragma GCC diagnostic push
104  #pragma GCC diagnostic ignored "-Wpacked"
105  #pragma GCC diagnostic ignored "-Wattributes"
107  #pragma GCC diagnostic pop
108  #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
109 #endif
110 #ifndef __ALIGNED
111  #define __ALIGNED(x) __attribute__((aligned(x)))
112 #endif
113 #ifndef __RESTRICT
114  #define __RESTRICT __restrict
115 #endif
116 #ifndef __COMPILER_BARRIER
117  #define __COMPILER_BARRIER() __ASM volatile("":::"memory")
118 #endif
119 
120 /* ######################### Startup and Lowlevel Init ######################## */
121 
122 #ifndef __PROGRAM_START
123 
132 {
133  extern void _start(void) __NO_RETURN;
134 
135  typedef struct {
136  uint32_t const* src;
137  uint32_t* dest;
138  uint32_t wlen;
139  } __copy_table_t;
140 
141  typedef struct {
142  uint32_t* dest;
143  uint32_t wlen;
144  } __zero_table_t;
145 
146  extern const __copy_table_t __copy_table_start__;
147  extern const __copy_table_t __copy_table_end__;
148  extern const __zero_table_t __zero_table_start__;
149  extern const __zero_table_t __zero_table_end__;
150 
151  for (__copy_table_t const* pTable = &__copy_table_start__; pTable < &__copy_table_end__; ++pTable) {
152  for(uint32_t i=0u; i<pTable->wlen; ++i) {
153  pTable->dest[i] = pTable->src[i];
154  }
155  }
156 
157  for (__zero_table_t const* pTable = &__zero_table_start__; pTable < &__zero_table_end__; ++pTable) {
158  for(uint32_t i=0u; i<pTable->wlen; ++i) {
159  pTable->dest[i] = 0u;
160  }
161  }
162 
163  _start();
164 }
165 
166 #define __PROGRAM_START __cmsis_start
167 #endif
168 
169 #ifndef __INITIAL_SP
170 #define __INITIAL_SP __StackTop
171 #endif
172 
173 #ifndef __STACK_LIMIT
174 #define __STACK_LIMIT __StackLimit
175 #endif
176 
177 #ifndef __VECTOR_TABLE
178 #define __VECTOR_TABLE __Vectors
179 #endif
180 
181 #ifndef __VECTOR_TABLE_ATTRIBUTE
182 #define __VECTOR_TABLE_ATTRIBUTE __attribute((used, section(".vectors")))
183 #endif
184 
185 /* ########################### Core Function Access ########################### */
197 {
198  __ASM volatile ("cpsie i" : : : "memory");
199 }
200 
201 
208 {
209  __ASM volatile ("cpsid i" : : : "memory");
210 }
211 
212 
218 __STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
219 {
220  uint32_t result;
221 
222  __ASM volatile ("MRS %0, control" : "=r" (result) );
223  return(result);
224 }
225 
226 
227 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
228 
233 __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
234 {
235  uint32_t result;
236 
237  __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
238  return(result);
239 }
240 #endif
241 
242 
248 __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
249 {
250  __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
251 }
252 
253 
254 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
255 
260 __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
261 {
262  __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
263 }
264 #endif
265 
266 
272 __STATIC_FORCEINLINE uint32_t __get_IPSR(void)
273 {
274  uint32_t result;
275 
276  __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
277  return(result);
278 }
279 
280 
286 __STATIC_FORCEINLINE uint32_t __get_APSR(void)
287 {
288  uint32_t result;
289 
290  __ASM volatile ("MRS %0, apsr" : "=r" (result) );
291  return(result);
292 }
293 
294 
300 __STATIC_FORCEINLINE uint32_t __get_xPSR(void)
301 {
302  uint32_t result;
303 
304  __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
305  return(result);
306 }
307 
308 
314 __STATIC_FORCEINLINE uint32_t __get_PSP(void)
315 {
316  uint32_t result;
317 
318  __ASM volatile ("MRS %0, psp" : "=r" (result) );
319  return(result);
320 }
321 
322 
323 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
324 
329 __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
330 {
331  uint32_t result;
332 
333  __ASM volatile ("MRS %0, psp_ns" : "=r" (result) );
334  return(result);
335 }
336 #endif
337 
338 
344 __STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
345 {
346  __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
347 }
348 
349 
350 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
351 
356 __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
357 {
358  __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
359 }
360 #endif
361 
362 
368 __STATIC_FORCEINLINE uint32_t __get_MSP(void)
369 {
370  uint32_t result;
371 
372  __ASM volatile ("MRS %0, msp" : "=r" (result) );
373  return(result);
374 }
375 
376 
377 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
378 
383 __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
384 {
385  uint32_t result;
386 
387  __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
388  return(result);
389 }
390 #endif
391 
392 
398 __STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
399 {
400  __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
401 }
402 
403 
404 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
405 
410 __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
411 {
412  __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
413 }
414 #endif
415 
416 
417 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
418 
423 __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
424 {
425  uint32_t result;
426 
427  __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
428  return(result);
429 }
430 
431 
437 __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
438 {
439  __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
440 }
441 #endif
442 
443 
449 __STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
450 {
451  uint32_t result;
452 
453  __ASM volatile ("MRS %0, primask" : "=r" (result) :: "memory");
454  return(result);
455 }
456 
457 
458 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
459 
464 __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
465 {
466  uint32_t result;
467 
468  __ASM volatile ("MRS %0, primask_ns" : "=r" (result) :: "memory");
469  return(result);
470 }
471 #endif
472 
473 
479 __STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
480 {
481  __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
482 }
483 
484 
485 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
486 
491 __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
492 {
493  __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
494 }
495 #endif
496 
497 
498 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
499  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
500  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
501 
506 __STATIC_FORCEINLINE void __enable_fault_irq(void)
507 {
508  __ASM volatile ("cpsie f" : : : "memory");
509 }
510 
511 
517 __STATIC_FORCEINLINE void __disable_fault_irq(void)
518 {
519  __ASM volatile ("cpsid f" : : : "memory");
520 }
521 
522 
528 __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
529 {
530  uint32_t result;
531 
532  __ASM volatile ("MRS %0, basepri" : "=r" (result) );
533  return(result);
534 }
535 
536 
537 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
538 
543 __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
544 {
545  uint32_t result;
546 
547  __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
548  return(result);
549 }
550 #endif
551 
552 
558 __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
559 {
560  __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
561 }
562 
563 
564 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
565 
570 __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
571 {
572  __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
573 }
574 #endif
575 
576 
583 __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
584 {
585  __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
586 }
587 
588 
594 __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
595 {
596  uint32_t result;
597 
598  __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
599  return(result);
600 }
601 
602 
603 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
604 
609 __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
610 {
611  uint32_t result;
612 
613  __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
614  return(result);
615 }
616 #endif
617 
618 
624 __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
625 {
626  __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
627 }
628 
629 
630 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
631 
636 __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
637 {
638  __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
639 }
640 #endif
641 
642 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
643  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
644  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
645 
646 
647 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
648  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
649 
659 __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
660 {
661 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
662  (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
663  // without main extensions, the non-secure PSPLIM is RAZ/WI
664  return 0U;
665 #else
666  uint32_t result;
667  __ASM volatile ("MRS %0, psplim" : "=r" (result) );
668  return result;
669 #endif
670 }
671 
672 #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
673 
681 __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
682 {
683 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
684  // without main extensions, the non-secure PSPLIM is RAZ/WI
685  return 0U;
686 #else
687  uint32_t result;
688  __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) );
689  return result;
690 #endif
691 }
692 #endif
693 
694 
704 __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
705 {
706 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
707  (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
708  // without main extensions, the non-secure PSPLIM is RAZ/WI
709  (void)ProcStackPtrLimit;
710 #else
711  __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
712 #endif
713 }
714 
715 
716 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
717 
725 __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
726 {
727 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
728  // without main extensions, the non-secure PSPLIM is RAZ/WI
729  (void)ProcStackPtrLimit;
730 #else
731  __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
732 #endif
733 }
734 #endif
735 
736 
746 __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
747 {
748 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
749  (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
750  // without main extensions, the non-secure MSPLIM is RAZ/WI
751  return 0U;
752 #else
753  uint32_t result;
754  __ASM volatile ("MRS %0, msplim" : "=r" (result) );
755  return result;
756 #endif
757 }
758 
759 
760 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
761 
769 __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
770 {
771 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
772  // without main extensions, the non-secure MSPLIM is RAZ/WI
773  return 0U;
774 #else
775  uint32_t result;
776  __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
777  return result;
778 #endif
779 }
780 #endif
781 
782 
792 __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
793 {
794 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
795  (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
796  // without main extensions, the non-secure MSPLIM is RAZ/WI
797  (void)MainStackPtrLimit;
798 #else
799  __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
800 #endif
801 }
802 
803 
804 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
805 
813 __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
814 {
815 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
816  // without main extensions, the non-secure MSPLIM is RAZ/WI
817  (void)MainStackPtrLimit;
818 #else
819  __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
820 #endif
821 }
822 #endif
823 
824 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
825  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
826 
827 
833 __STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
834 {
835 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
836  (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
837 #if __has_builtin(__builtin_arm_get_fpscr)
838 // Re-enable using built-in when GCC has been fixed
839 // || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
840  /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
841  return __builtin_arm_get_fpscr();
842 #else
843  uint32_t result;
844 
845  __ASM volatile ("VMRS %0, fpscr" : "=r" (result) );
846  return(result);
847 #endif
848 #else
849  return(0U);
850 #endif
851 }
852 
853 
859 __STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
860 {
861 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
862  (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
863 #if __has_builtin(__builtin_arm_set_fpscr)
864 // Re-enable using built-in when GCC has been fixed
865 // || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
866  /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
867  __builtin_arm_set_fpscr(fpscr);
868 #else
869  __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) : "vfpcc", "memory");
870 #endif
871 #else
872  (void)fpscr;
873 #endif
874 }
875 
876 
880 /* ########################## Core Instruction Access ######################### */
886 /* Define macros for porting to both thumb1 and thumb2.
887  * For thumb1, use low register (r0-r7), specified by constraint "l"
888  * Otherwise, use general registers, specified by constraint "r" */
889 #if defined (__thumb__) && !defined (__thumb2__)
890 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
891 #define __CMSIS_GCC_RW_REG(r) "+l" (r)
892 #define __CMSIS_GCC_USE_REG(r) "l" (r)
893 #else
894 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
895 #define __CMSIS_GCC_RW_REG(r) "+r" (r)
896 #define __CMSIS_GCC_USE_REG(r) "r" (r)
897 #endif
898 
903 #define __NOP() __ASM volatile ("nop")
904 
909 #define __WFI() __ASM volatile ("wfi")
910 
911 
917 #define __WFE() __ASM volatile ("wfe")
918 
919 
924 #define __SEV() __ASM volatile ("sev")
925 
926 
934 {
935  __ASM volatile ("isb 0xF":::"memory");
936 }
937 
938 
945 {
946  __ASM volatile ("dsb 0xF":::"memory");
947 }
948 
949 
956 {
957  __ASM volatile ("dmb 0xF":::"memory");
958 }
959 
960 
967 __STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
968 {
969 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
970  return __builtin_bswap32(value);
971 #else
972  uint32_t result;
973 
974  __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
975  return result;
976 #endif
977 }
978 
979 
986 __STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
987 {
988  uint32_t result;
989 
990  __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
991  return result;
992 }
993 
994 
1001 __STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
1002 {
1003 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1004  return (int16_t)__builtin_bswap16(value);
1005 #else
1006  int16_t result;
1007 
1008  __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
1009  return result;
1010 #endif
1011 }
1012 
1013 
1021 __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
1022 {
1023  op2 %= 32U;
1024  if (op2 == 0U)
1025  {
1026  return op1;
1027  }
1028  return (op1 >> op2) | (op1 << (32U - op2));
1029 }
1030 
1031 
1039 #define __BKPT(value) __ASM volatile ("bkpt "#value)
1040 
1041 
1048 __STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
1049 {
1050  uint32_t result;
1051 
1052 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1053  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1054  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
1055  __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
1056 #else
1057  uint32_t s = (4U /*sizeof(v)*/ * 8U) - 1U; /* extra shift needed at end */
1058 
1059  result = value; /* r will be reversed bits of v; first get LSB of v */
1060  for (value >>= 1U; value != 0U; value >>= 1U)
1061  {
1062  result <<= 1U;
1063  result |= value & 1U;
1064  s--;
1065  }
1066  result <<= s; /* shift when v's highest bits are zero */
1067 #endif
1068  return result;
1069 }
1070 
1071 
1078 __STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
1079 {
1080  /* Even though __builtin_clz produces a CLZ instruction on ARM, formally
1081  __builtin_clz(0) is undefined behaviour, so handle this case specially.
1082  This guarantees ARM-compatible results if happening to compile on a non-ARM
1083  target, and ensures the compiler doesn't decide to activate any
1084  optimisations using the logic "value was passed to __builtin_clz, so it
1085  is non-zero".
1086  ARM GCC 7.3 and possibly earlier will optimise this test away, leaving a
1087  single CLZ instruction.
1088  */
1089  if (value == 0U)
1090  {
1091  return 32U;
1092  }
1093  return __builtin_clz(value);
1094 }
1095 
1096 
1097 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1098  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1099  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1100  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1101 
1107 __STATIC_FORCEINLINE uint8_t __LDREXB(volatile uint8_t *addr)
1108 {
1109  uint32_t result;
1110 
1111 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1112  __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
1113 #else
1114  /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1115  accepted by assembler. So has to use following less efficient pattern.
1116  */
1117  __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
1118 #endif
1119  return ((uint8_t) result); /* Add explicit type cast here */
1120 }
1121 
1122 
1129 __STATIC_FORCEINLINE uint16_t __LDREXH(volatile uint16_t *addr)
1130 {
1131  uint32_t result;
1132 
1133 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1134  __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
1135 #else
1136  /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1137  accepted by assembler. So has to use following less efficient pattern.
1138  */
1139  __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
1140 #endif
1141  return ((uint16_t) result); /* Add explicit type cast here */
1142 }
1143 
1144 
1151 __STATIC_FORCEINLINE uint32_t __LDREXW(volatile uint32_t *addr)
1152 {
1153  uint32_t result;
1154 
1155  __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
1156  return(result);
1157 }
1158 
1159 
1168 __STATIC_FORCEINLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
1169 {
1170  uint32_t result;
1171 
1172  __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
1173  return(result);
1174 }
1175 
1176 
1185 __STATIC_FORCEINLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
1186 {
1187  uint32_t result;
1188 
1189  __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
1190  return(result);
1191 }
1192 
1193 
1202 __STATIC_FORCEINLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
1203 {
1204  uint32_t result;
1205 
1206  __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
1207  return(result);
1208 }
1209 
1210 
1215 __STATIC_FORCEINLINE void __CLREX(void)
1216 {
1217  __ASM volatile ("clrex" ::: "memory");
1218 }
1219 
1220 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1221  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1222  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1223  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
1224 
1225 
1226 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1227  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1228  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
1229 
1236 #define __SSAT(ARG1,ARG2) \
1237 __extension__ \
1238 ({ \
1239  int32_t __RES, __ARG1 = (ARG1); \
1240  __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1241  __RES; \
1242  })
1243 
1244 
1252 #define __USAT(ARG1,ARG2) \
1253  __extension__ \
1254 ({ \
1255  uint32_t __RES, __ARG1 = (ARG1); \
1256  __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1257  __RES; \
1258  })
1259 
1260 
1268 __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
1269 {
1270  uint32_t result;
1271 
1272  __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
1273  return(result);
1274 }
1275 
1276 
1283 __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
1284 {
1285  uint32_t result;
1286 
1287 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1288  __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
1289 #else
1290  /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1291  accepted by assembler. So has to use following less efficient pattern.
1292  */
1293  __ASM volatile ("ldrbt %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
1294 #endif
1295  return ((uint8_t) result); /* Add explicit type cast here */
1296 }
1297 
1298 
1305 __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
1306 {
1307  uint32_t result;
1308 
1309 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1310  __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
1311 #else
1312  /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1313  accepted by assembler. So has to use following less efficient pattern.
1314  */
1315  __ASM volatile ("ldrht %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
1316 #endif
1317  return ((uint16_t) result); /* Add explicit type cast here */
1318 }
1319 
1320 
1327 __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
1328 {
1329  uint32_t result;
1330 
1331  __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
1332  return(result);
1333 }
1334 
1335 
1342 __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
1343 {
1344  __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1345 }
1346 
1347 
1354 __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
1355 {
1356  __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1357 }
1358 
1359 
1366 __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
1367 {
1368  __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
1369 }
1370 
1371 #else /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1372  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1373  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
1374 
1382 __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
1383 {
1384  if ((sat >= 1U) && (sat <= 32U))
1385  {
1386  const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
1387  const int32_t min = -1 - max ;
1388  if (val > max)
1389  {
1390  return max;
1391  }
1392  else if (val < min)
1393  {
1394  return min;
1395  }
1396  }
1397  return val;
1398 }
1399 
1407 __STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
1408 {
1409  if (sat <= 31U)
1410  {
1411  const uint32_t max = ((1U << sat) - 1U);
1412  if (val > (int32_t)max)
1413  {
1414  return max;
1415  }
1416  else if (val < 0)
1417  {
1418  return 0U;
1419  }
1420  }
1421  return (uint32_t)val;
1422 }
1423 
1424 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1425  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1426  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
1427 
1428 
1429 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1430  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1431 
1437 __STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
1438 {
1439  uint32_t result;
1440 
1441  __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) );
1442  return ((uint8_t) result);
1443 }
1444 
1445 
1452 __STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
1453 {
1454  uint32_t result;
1455 
1456  __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) );
1457  return ((uint16_t) result);
1458 }
1459 
1460 
1467 __STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
1468 {
1469  uint32_t result;
1470 
1471  __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) );
1472  return(result);
1473 }
1474 
1475 
1482 __STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
1483 {
1484  __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1485 }
1486 
1487 
1494 __STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
1495 {
1496  __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1497 }
1498 
1499 
1506 __STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
1507 {
1508  __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1509 }
1510 
1511 
1518 __STATIC_FORCEINLINE uint8_t __LDAEXB(volatile uint8_t *ptr)
1519 {
1520  uint32_t result;
1521 
1522  __ASM volatile ("ldaexb %0, %1" : "=r" (result) : "Q" (*ptr) );
1523  return ((uint8_t) result);
1524 }
1525 
1526 
1533 __STATIC_FORCEINLINE uint16_t __LDAEXH(volatile uint16_t *ptr)
1534 {
1535  uint32_t result;
1536 
1537  __ASM volatile ("ldaexh %0, %1" : "=r" (result) : "Q" (*ptr) );
1538  return ((uint16_t) result);
1539 }
1540 
1541 
1548 __STATIC_FORCEINLINE uint32_t __LDAEX(volatile uint32_t *ptr)
1549 {
1550  uint32_t result;
1551 
1552  __ASM volatile ("ldaex %0, %1" : "=r" (result) : "Q" (*ptr) );
1553  return(result);
1554 }
1555 
1556 
1565 __STATIC_FORCEINLINE uint32_t __STLEXB(uint8_t value, volatile uint8_t *ptr)
1566 {
1567  uint32_t result;
1568 
1569  __ASM volatile ("stlexb %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
1570  return(result);
1571 }
1572 
1573 
1582 __STATIC_FORCEINLINE uint32_t __STLEXH(uint16_t value, volatile uint16_t *ptr)
1583 {
1584  uint32_t result;
1585 
1586  __ASM volatile ("stlexh %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
1587  return(result);
1588 }
1589 
1590 
1599 __STATIC_FORCEINLINE uint32_t __STLEX(uint32_t value, volatile uint32_t *ptr)
1600 {
1601  uint32_t result;
1602 
1603  __ASM volatile ("stlex %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
1604  return(result);
1605 }
1606 
1607 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1608  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
1609  /* end of group CMSIS_Core_InstructionInterface */
1611 
1612 
1613 /* ################### Compiler specific Intrinsics ########################### */
1619 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1620 
1621 __STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1622 {
1623  uint32_t result;
1624 
1625  __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1626  return(result);
1627 }
1628 
1629 __STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1630 {
1631  uint32_t result;
1632 
1633  __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1634  return(result);
1635 }
1636 
1637 __STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1638 {
1639  uint32_t result;
1640 
1641  __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1642  return(result);
1643 }
1644 
1645 __STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1646 {
1647  uint32_t result;
1648 
1649  __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1650  return(result);
1651 }
1652 
1653 __STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1654 {
1655  uint32_t result;
1656 
1657  __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1658  return(result);
1659 }
1660 
1661 __STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1662 {
1663  uint32_t result;
1664 
1665  __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1666  return(result);
1667 }
1668 
1669 
1670 __STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1671 {
1672  uint32_t result;
1673 
1674  __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1675  return(result);
1676 }
1677 
1678 __STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1679 {
1680  uint32_t result;
1681 
1682  __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1683  return(result);
1684 }
1685 
1686 __STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1687 {
1688  uint32_t result;
1689 
1690  __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1691  return(result);
1692 }
1693 
1694 __STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1695 {
1696  uint32_t result;
1697 
1698  __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1699  return(result);
1700 }
1701 
1702 __STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1703 {
1704  uint32_t result;
1705 
1706  __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1707  return(result);
1708 }
1709 
1710 __STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1711 {
1712  uint32_t result;
1713 
1714  __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1715  return(result);
1716 }
1717 
1718 
1719 __STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1720 {
1721  uint32_t result;
1722 
1723  __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1724  return(result);
1725 }
1726 
1727 __STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1728 {
1729  uint32_t result;
1730 
1731  __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1732  return(result);
1733 }
1734 
1735 __STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1736 {
1737  uint32_t result;
1738 
1739  __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1740  return(result);
1741 }
1742 
1743 __STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1744 {
1745  uint32_t result;
1746 
1747  __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1748  return(result);
1749 }
1750 
1751 __STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1752 {
1753  uint32_t result;
1754 
1755  __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1756  return(result);
1757 }
1758 
1759 __STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1760 {
1761  uint32_t result;
1762 
1763  __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1764  return(result);
1765 }
1766 
1767 __STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1768 {
1769  uint32_t result;
1770 
1771  __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1772  return(result);
1773 }
1774 
1775 __STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1776 {
1777  uint32_t result;
1778 
1779  __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1780  return(result);
1781 }
1782 
1783 __STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1784 {
1785  uint32_t result;
1786 
1787  __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1788  return(result);
1789 }
1790 
1791 __STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1792 {
1793  uint32_t result;
1794 
1795  __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1796  return(result);
1797 }
1798 
1799 __STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1800 {
1801  uint32_t result;
1802 
1803  __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1804  return(result);
1805 }
1806 
1807 __STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1808 {
1809  uint32_t result;
1810 
1811  __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1812  return(result);
1813 }
1814 
1815 __STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1816 {
1817  uint32_t result;
1818 
1819  __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1820  return(result);
1821 }
1822 
1823 __STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1824 {
1825  uint32_t result;
1826 
1827  __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1828  return(result);
1829 }
1830 
1831 __STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1832 {
1833  uint32_t result;
1834 
1835  __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1836  return(result);
1837 }
1838 
1839 __STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1840 {
1841  uint32_t result;
1842 
1843  __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1844  return(result);
1845 }
1846 
1847 __STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1848 {
1849  uint32_t result;
1850 
1851  __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1852  return(result);
1853 }
1854 
1855 __STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1856 {
1857  uint32_t result;
1858 
1859  __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1860  return(result);
1861 }
1862 
1863 __STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1864 {
1865  uint32_t result;
1866 
1867  __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1868  return(result);
1869 }
1870 
1871 __STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1872 {
1873  uint32_t result;
1874 
1875  __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1876  return(result);
1877 }
1878 
1879 __STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1880 {
1881  uint32_t result;
1882 
1883  __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1884  return(result);
1885 }
1886 
1887 __STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1888 {
1889  uint32_t result;
1890 
1891  __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1892  return(result);
1893 }
1894 
1895 __STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1896 {
1897  uint32_t result;
1898 
1899  __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1900  return(result);
1901 }
1902 
1903 __STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1904 {
1905  uint32_t result;
1906 
1907  __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1908  return(result);
1909 }
1910 
1911 __STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1912 {
1913  uint32_t result;
1914 
1915  __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1916  return(result);
1917 }
1918 
1919 __STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1920 {
1921  uint32_t result;
1922 
1923  __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1924  return(result);
1925 }
1926 
1927 #define __SSAT16(ARG1,ARG2) \
1928 ({ \
1929  int32_t __RES, __ARG1 = (ARG1); \
1930  __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1931  __RES; \
1932  })
1933 
1934 #define __USAT16(ARG1,ARG2) \
1935 ({ \
1936  uint32_t __RES, __ARG1 = (ARG1); \
1937  __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1938  __RES; \
1939  })
1940 
1941 __STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
1942 {
1943  uint32_t result;
1944 
1945  __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
1946  return(result);
1947 }
1948 
1949 __STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1950 {
1951  uint32_t result;
1952 
1953  __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1954  return(result);
1955 }
1956 
1957 __STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
1958 {
1959  uint32_t result;
1960 
1961  __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
1962  return(result);
1963 }
1964 
1965 __STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
1966 {
1967  uint32_t result;
1968 
1969  __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1970  return(result);
1971 }
1972 
1973 __STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
1974 {
1975  uint32_t result;
1976 
1977  __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1978  return(result);
1979 }
1980 
1981 __STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
1982 {
1983  uint32_t result;
1984 
1985  __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1986  return(result);
1987 }
1988 
1989 __STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
1990 {
1991  uint32_t result;
1992 
1993  __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1994  return(result);
1995 }
1996 
1997 __STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
1998 {
1999  uint32_t result;
2000 
2001  __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
2002  return(result);
2003 }
2004 
2005 __STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
2006 {
2007  union llreg_u{
2008  uint32_t w32[2];
2009  uint64_t w64;
2010  } llr;
2011  llr.w64 = acc;
2012 
2013 #ifndef __ARMEB__ /* Little endian */
2014  __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
2015 #else /* Big endian */
2016  __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2017 #endif
2018 
2019  return(llr.w64);
2020 }
2021 
2022 __STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
2023 {
2024  union llreg_u{
2025  uint32_t w32[2];
2026  uint64_t w64;
2027  } llr;
2028  llr.w64 = acc;
2029 
2030 #ifndef __ARMEB__ /* Little endian */
2031  __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
2032 #else /* Big endian */
2033  __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2034 #endif
2035 
2036  return(llr.w64);
2037 }
2038 
2039 __STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
2040 {
2041  uint32_t result;
2042 
2043  __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2044  return(result);
2045 }
2046 
2047 __STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
2048 {
2049  uint32_t result;
2050 
2051  __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2052  return(result);
2053 }
2054 
2055 __STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
2056 {
2057  uint32_t result;
2058 
2059  __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
2060  return(result);
2061 }
2062 
2063 __STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
2064 {
2065  uint32_t result;
2066 
2067  __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
2068  return(result);
2069 }
2070 
2071 __STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
2072 {
2073  union llreg_u{
2074  uint32_t w32[2];
2075  uint64_t w64;
2076  } llr;
2077  llr.w64 = acc;
2078 
2079 #ifndef __ARMEB__ /* Little endian */
2080  __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
2081 #else /* Big endian */
2082  __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2083 #endif
2084 
2085  return(llr.w64);
2086 }
2087 
2088 __STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
2089 {
2090  union llreg_u{
2091  uint32_t w32[2];
2092  uint64_t w64;
2093  } llr;
2094  llr.w64 = acc;
2095 
2096 #ifndef __ARMEB__ /* Little endian */
2097  __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
2098 #else /* Big endian */
2099  __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2100 #endif
2101 
2102  return(llr.w64);
2103 }
2104 
2105 __STATIC_FORCEINLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
2106 {
2107  uint32_t result;
2108 
2109  __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2110  return(result);
2111 }
2112 
2113 __STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2)
2114 {
2115  int32_t result;
2116 
2117  __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2118  return(result);
2119 }
2120 
2121 __STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2)
2122 {
2123  int32_t result;
2124 
2125  __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2126  return(result);
2127 }
2128 
2129 #if 0
2130 #define __PKHBT(ARG1,ARG2,ARG3) \
2131 ({ \
2132  uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2133  __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
2134  __RES; \
2135  })
2136 
2137 #define __PKHTB(ARG1,ARG2,ARG3) \
2138 ({ \
2139  uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2140  if (ARG3 == 0) \
2141  __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
2142  else \
2143  __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
2144  __RES; \
2145  })
2146 #endif
2147 
2148 #define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
2149  ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
2150 
2151 #define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
2152  ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
2153 
2154 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
2155 {
2156  int32_t result;
2157 
2158  __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
2159  return(result);
2160 }
2161 
2162 #endif /* (__ARM_FEATURE_DSP == 1) */
2163 
2166 #pragma GCC diagnostic pop
2167 
2168 #endif /* __CMSIS_GCC_H */
__get_CONTROL
__STATIC_INLINE uint32_t __get_CONTROL(void)
Enable IRQ Interrupts.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:159
min
int min(int a, int b)
__get_PRIMASK
__STATIC_INLINE uint32_t __get_PRIMASK(void)
Get Priority Mask.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:267
__SMLALDX
__STATIC_FORCEINLINE uint64_t __SMLALDX(uint32_t x, uint32_t y, uint64_t sum)
Definition: arm_math.h:1134
__DSB
__STATIC_FORCEINLINE void __DSB(void)
Data Synchronization Barrier.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:944
__SMUADX
__STATIC_FORCEINLINE uint32_t __SMUADX(uint32_t x, uint32_t y)
Definition: arm_math.h:1043
__SMUSD
__STATIC_FORCEINLINE uint32_t __SMUSD(uint32_t x, uint32_t y)
Definition: arm_math.h:1161
__CLZ
__STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
Count leading zeros.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armclang.h:937
sat
uint32_t sat
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:766
s
XmlRpcServer s
__QASX
__STATIC_FORCEINLINE uint32_t __QASX(uint32_t x, uint32_t y)
Definition: arm_math.h:968
__ISB
__STATIC_FORCEINLINE void __ISB(void)
Instruction Synchronization Barrier.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:933
__disable_irq
__STATIC_FORCEINLINE void __disable_irq(void)
Disable IRQ Interrupts.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:207
__REVSH
__STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
Reverse byte order (16 bit)
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:1001
T_UINT16_READ
__PACKED_STRUCT T_UINT16_READ
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:90
__SHASX
__STATIC_FORCEINLINE uint32_t __SHASX(uint32_t x, uint32_t y)
Definition: arm_math.h:984
__CMSIS_GCC_OUT_REG
#define __CMSIS_GCC_OUT_REG(r)
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:894
__SMLADX
__STATIC_FORCEINLINE uint32_t __SMLADX(uint32_t x, uint32_t y, uint32_t sum)
Definition: arm_math.h:1091
__SHADD16
__STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t x, uint32_t y)
Definition: arm_math.h:920
__SXTB16
__STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t x)
Definition: arm_math.h:1173
__QADD16
__STATIC_FORCEINLINE uint32_t __QADD16(uint32_t x, uint32_t y)
Definition: arm_math.h:903
__SMLALD
__STATIC_FORCEINLINE uint64_t __SMLALD(uint32_t x, uint32_t y, uint64_t sum)
Definition: arm_math.h:1119
__set_PRIMASK
__STATIC_INLINE void __set_PRIMASK(uint32_t priMask)
Set Priority Mask.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:279
__QSUB
__STATIC_FORCEINLINE int32_t __QSUB(int32_t x, int32_t y)
Definition: arm_math.h:1066
__SMMLA
__STATIC_FORCEINLINE int32_t __SMMLA(int32_t x, int32_t y, int32_t sum)
Definition: arm_math.h:1183
__get_MSP
__STATIC_INLINE uint32_t __get_MSP(void)
Get Main Stack Pointer.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:243
__get_xPSR
__STATIC_INLINE uint32_t __get_xPSR(void)
Get xPSR Register.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:207
__get_FPSCR
__STATIC_INLINE uint32_t __get_FPSCR(void)
Get FPSCR.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:374
__set_CONTROL
__STATIC_INLINE void __set_CONTROL(uint32_t control)
Set Control Register.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:171
__REV
__STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
Reverse byte order (32 bit)
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:967
__QADD8
__STATIC_FORCEINLINE uint32_t __QADD8(uint32_t x, uint32_t y)
Definition: arm_math.h:867
__attribute__
struct __attribute__((packed)) T_UINT32
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:74
__enable_irq
__STATIC_FORCEINLINE void __enable_irq(void)
Enable IRQ Interrupts.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:196
__SMUSDX
__STATIC_FORCEINLINE uint32_t __SMUSDX(uint32_t x, uint32_t y)
Definition: arm_math.h:1032
__STATIC_FORCEINLINE
#define __STATIC_FORCEINLINE
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:50
__LDREXW
__IAR_FT uint32_t __LDREXW(uint32_t volatile *ptr)
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_iccarm.h:581
__set_MSP
__STATIC_INLINE void __set_MSP(uint32_t topOfMainStack)
Set Main Stack Pointer.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:255
__CMSIS_GCC_USE_REG
#define __CMSIS_GCC_USE_REG(r)
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:896
__SHSUB16
__STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t x, uint32_t y)
Definition: arm_math.h:952
__SMLAD
__STATIC_FORCEINLINE uint32_t __SMLAD(uint32_t x, uint32_t y, uint32_t sum)
Definition: arm_math.h:1077
__SHSAX
__STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t x, uint32_t y)
Definition: arm_math.h:1016
__RBIT
__STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
Reverse bit order of value.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:1048
__REV16
__STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
Reverse byte order (16 bit)
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:986
__NO_RETURN
#define __NO_RETURN
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:53
__set_FPSCR
__STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
Set FPSCR.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:391
__DMB
__STATIC_FORCEINLINE void __DMB(void)
Data Memory Barrier.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:955
__QSUB8
__STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t x, uint32_t y)
Definition: arm_math.h:885
__QSAX
__STATIC_FORCEINLINE uint32_t __QSAX(uint32_t x, uint32_t y)
Definition: arm_math.h:1000
__QADD
__STATIC_FORCEINLINE int32_t __QADD(int32_t x, int32_t y)
Definition: arm_math.h:1055
__get_PSP
__STATIC_INLINE uint32_t __get_PSP(void)
Get Process Stack Pointer.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:219
__get_IPSR
__STATIC_INLINE uint32_t __get_IPSR(void)
Get IPSR Register.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:183
__PACKED_STRUCT
#define __PACKED_STRUCT
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:65
__SMUAD
__STATIC_FORCEINLINE uint32_t __SMUAD(uint32_t x, uint32_t y)
Definition: arm_math.h:1149
__ROR
#define __ROR
Rotate Right in unsigned value (32 bit)
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:522
__cmsis_start
__STATIC_FORCEINLINE __NO_RETURN void __cmsis_start(void)
Initializes data and bss sections.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:131
T_UINT32_READ
__PACKED_STRUCT T_UINT32_READ
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:106
__set_PSP
__STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
Set Process Stack Pointer.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:231
__STREXW
__IAR_FT uint32_t __STREXW(uint32_t value, uint32_t volatile *ptr)
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_iccarm.h:586
__SSAT
__STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
Signed Saturate.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armclang.h:1163
__ASM
#define __ASM
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:41
T_UINT32_WRITE
__PACKED_STRUCT T_UINT32_WRITE
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:98
__get_APSR
__STATIC_INLINE uint32_t __get_APSR(void)
Get APSR Register.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armcc.h:195
__USAT
__STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
Unsigned Saturate.
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_armclang.h:1188
__QSUB16
__STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t x, uint32_t y)
Definition: arm_math.h:936
__SMLSDX
__STATIC_FORCEINLINE uint32_t __SMLSDX(uint32_t x, uint32_t y, uint32_t sum)
Definition: arm_math.h:1105
T_UINT16_WRITE
__PACKED_STRUCT T_UINT16_WRITE
Definition: imxrt1050/imxrt1050-evkb/CMSIS/cmsis_gcc.h:82


picovoice_driver
Author(s):
autogenerated on Fri Apr 1 2022 02:13:47