CC27xxDriverLibrary
cmsis_gcc.h
Go to the documentation of this file.
1 /**************************************************************************/
7 /*
8  * Copyright (c) 2009-2021 Arm Limited. All rights reserved.
9  *
10  * SPDX-License-Identifier: Apache-2.0
11  *
12  * Licensed under the Apache License, Version 2.0 (the License); you may
13  * not use this file except in compliance with the License.
14  * You may obtain a copy of the License at
15  *
16  * www.apache.org/licenses/LICENSE-2.0
17  *
18  * Unless required by applicable law or agreed to in writing, software
19  * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21  * See the License for the specific language governing permissions and
22  * limitations under the License.
23  */
24 
25 #ifndef __CMSIS_GCC_H
26 #define __CMSIS_GCC_H
27 
28 /* ignore some GCC warnings */
29 #pragma GCC diagnostic push
30 #pragma GCC diagnostic ignored "-Wsign-conversion"
31 #pragma GCC diagnostic ignored "-Wconversion"
32 #pragma GCC diagnostic ignored "-Wunused-parameter"
33 
34 /* Fallback for __has_builtin */
35 #ifndef __has_builtin
36  #define __has_builtin(x) (0)
37 #endif
38 
39 /* CMSIS compiler specific defines */
40 #ifndef __ASM
41  #define __ASM __asm
42 #endif
43 #ifndef __INLINE
44  #define __INLINE inline
45 #endif
46 #ifndef __STATIC_INLINE
47  #define __STATIC_INLINE static inline
48 #endif
49 #ifndef __STATIC_FORCEINLINE
50  #define __STATIC_FORCEINLINE __attribute__((always_inline)) static inline
51 #endif
52 #ifndef __NO_RETURN
53  #define __NO_RETURN __attribute__((__noreturn__))
54 #endif
55 #ifndef __USED
56  #define __USED __attribute__((used))
57 #endif
58 #ifndef __WEAK
59  #define __WEAK __attribute__((weak))
60 #endif
61 #ifndef __PACKED
62  #define __PACKED __attribute__((packed, aligned(1)))
63 #endif
64 #ifndef __PACKED_STRUCT
65  #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
66 #endif
67 #ifndef __PACKED_UNION
68  #define __PACKED_UNION union __attribute__((packed, aligned(1)))
69 #endif
70 #ifndef __UNALIGNED_UINT32 /* deprecated */
71  #pragma GCC diagnostic push
72  #pragma GCC diagnostic ignored "-Wpacked"
73  #pragma GCC diagnostic ignored "-Wattributes"
74  struct __attribute__((packed)) T_UINT32 { uint32_t v; };
75  #pragma GCC diagnostic pop
76  #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v)
77 #endif
78 #ifndef __UNALIGNED_UINT16_WRITE
79  #pragma GCC diagnostic push
80  #pragma GCC diagnostic ignored "-Wpacked"
81  #pragma GCC diagnostic ignored "-Wattributes"
83  #pragma GCC diagnostic pop
84  #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
85 #endif
86 #ifndef __UNALIGNED_UINT16_READ
87  #pragma GCC diagnostic push
88  #pragma GCC diagnostic ignored "-Wpacked"
89  #pragma GCC diagnostic ignored "-Wattributes"
90  __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
91  #pragma GCC diagnostic pop
92  #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
93 #endif
94 #ifndef __UNALIGNED_UINT32_WRITE
95  #pragma GCC diagnostic push
96  #pragma GCC diagnostic ignored "-Wpacked"
97  #pragma GCC diagnostic ignored "-Wattributes"
99  #pragma GCC diagnostic pop
100  #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
101 #endif
102 #ifndef __UNALIGNED_UINT32_READ
103  #pragma GCC diagnostic push
104  #pragma GCC diagnostic ignored "-Wpacked"
105  #pragma GCC diagnostic ignored "-Wattributes"
107  #pragma GCC diagnostic pop
108  #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
109 #endif
110 #ifndef __ALIGNED
111  #define __ALIGNED(x) __attribute__((aligned(x)))
112 #endif
113 #ifndef __RESTRICT
114  #define __RESTRICT __restrict
115 #endif
116 #ifndef __COMPILER_BARRIER
117  #define __COMPILER_BARRIER() __ASM volatile("":::"memory")
118 #endif
119 #ifndef __NO_INIT
120  #define __NO_INIT __attribute__ ((section (".bss.noinit")))
121 #endif
122 #ifndef __ALIAS
123  #define __ALIAS(x) __attribute__ ((alias(x)))
124 #endif
125 
126 /* ######################### Startup and Lowlevel Init ######################## */
127 
128 #ifndef __PROGRAM_START
129 
138 {
139  extern void _start(void) __NO_RETURN;
140 
141  typedef struct __copy_table {
142  uint32_t const* src;
143  uint32_t* dest;
144  uint32_t wlen;
145  } __copy_table_t;
146 
147  typedef struct __zero_table {
148  uint32_t* dest;
149  uint32_t wlen;
150  } __zero_table_t;
151 
152  extern const __copy_table_t __copy_table_start__;
153  extern const __copy_table_t __copy_table_end__;
154  extern const __zero_table_t __zero_table_start__;
155  extern const __zero_table_t __zero_table_end__;
156 
157  for (__copy_table_t const* pTable = &__copy_table_start__; pTable < &__copy_table_end__; ++pTable) {
158  for(uint32_t i=0u; i<pTable->wlen; ++i) {
159  pTable->dest[i] = pTable->src[i];
160  }
161  }
162 
163  for (__zero_table_t const* pTable = &__zero_table_start__; pTable < &__zero_table_end__; ++pTable) {
164  for(uint32_t i=0u; i<pTable->wlen; ++i) {
165  pTable->dest[i] = 0u;
166  }
167  }
168 
169  _start();
170 }
171 
172 #define __PROGRAM_START __cmsis_start
173 #endif
174 
175 #ifndef __INITIAL_SP
176 #define __INITIAL_SP __StackTop
177 #endif
178 
179 #ifndef __STACK_LIMIT
180 #define __STACK_LIMIT __StackLimit
181 #endif
182 
183 #ifndef __VECTOR_TABLE
184 #define __VECTOR_TABLE __Vectors
185 #endif
186 
187 #ifndef __VECTOR_TABLE_ATTRIBUTE
188 #define __VECTOR_TABLE_ATTRIBUTE __attribute__((used, section(".vectors")))
189 #endif
190 
191 #if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3U)
192 #ifndef __STACK_SEAL
193 #define __STACK_SEAL __StackSeal
194 #endif
195 
196 #ifndef __TZ_STACK_SEAL_SIZE
197 #define __TZ_STACK_SEAL_SIZE 8U
198 #endif
199 
200 #ifndef __TZ_STACK_SEAL_VALUE
201 #define __TZ_STACK_SEAL_VALUE 0xFEF5EDA5FEF5EDA5ULL
202 #endif
203 
204 
205 __STATIC_FORCEINLINE void __TZ_set_STACKSEAL_S (uint32_t* stackTop) {
206  *((uint64_t *)stackTop) = __TZ_STACK_SEAL_VALUE;
207 }
208 #endif
209 
210 
211 /* ########################## Core Instruction Access ######################### */
217 /* Define macros for porting to both thumb1 and thumb2.
218  * For thumb1, use low register (r0-r7), specified by constraint "l"
219  * Otherwise, use general registers, specified by constraint "r" */
220 #if defined (__thumb__) && !defined (__thumb2__)
221 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
222 #define __CMSIS_GCC_RW_REG(r) "+l" (r)
223 #define __CMSIS_GCC_USE_REG(r) "l" (r)
224 #else
225 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
226 #define __CMSIS_GCC_RW_REG(r) "+r" (r)
227 #define __CMSIS_GCC_USE_REG(r) "r" (r)
228 #endif
229 
234 #define __NOP() __ASM volatile ("nop")
235 
240 #define __WFI() __ASM volatile ("wfi":::"memory")
241 
242 
248 #define __WFE() __ASM volatile ("wfe":::"memory")
249 
250 
255 #define __SEV() __ASM volatile ("sev")
256 
257 
265 {
266  __ASM volatile ("isb 0xF":::"memory");
267 }
268 
269 
276 {
277  __ASM volatile ("dsb 0xF":::"memory");
278 }
279 
280 
287 {
288  __ASM volatile ("dmb 0xF":::"memory");
289 }
290 
291 
298 __STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
299 {
300 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
301  return __builtin_bswap32(value);
302 #else
303  uint32_t result;
304 
305  __ASM ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
306  return result;
307 #endif
308 }
309 
310 
317 __STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
318 {
319  uint32_t result;
320 
321  __ASM ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
322  return result;
323 }
324 
325 
332 __STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
333 {
334 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
335  return (int16_t)__builtin_bswap16(value);
336 #else
337  int16_t result;
338 
339  __ASM ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
340  return result;
341 #endif
342 }
343 
344 
352 __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
353 {
354  op2 %= 32U;
355  if (op2 == 0U)
356  {
357  return op1;
358  }
359  return (op1 >> op2) | (op1 << (32U - op2));
360 }
361 
362 
370 #define __BKPT(value) __ASM volatile ("bkpt "#value)
371 
372 
379 __STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
380 {
381  uint32_t result;
382 
383 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
384  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
385  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
386  __ASM ("rbit %0, %1" : "=r" (result) : "r" (value) );
387 #else
388  uint32_t s = (4U /*sizeof(v)*/ * 8U) - 1U; /* extra shift needed at end */
389 
390  result = value; /* r will be reversed bits of v; first get LSB of v */
391  for (value >>= 1U; value != 0U; value >>= 1U)
392  {
393  result <<= 1U;
394  result |= value & 1U;
395  s--;
396  }
397  result <<= s; /* shift when v's highest bits are zero */
398 #endif
399  return result;
400 }
401 
402 
409 __STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
410 {
411  /* Even though __builtin_clz produces a CLZ instruction on ARM, formally
412  __builtin_clz(0) is undefined behaviour, so handle this case specially.
413  This guarantees ARM-compatible results if happening to compile on a non-ARM
414  target, and ensures the compiler doesn't decide to activate any
415  optimisations using the logic "value was passed to __builtin_clz, so it
416  is non-zero".
417  ARM GCC 7.3 and possibly earlier will optimise this test away, leaving a
418  single CLZ instruction.
419  */
420  if (value == 0U)
421  {
422  return 32U;
423  }
424  return __builtin_clz(value);
425 }
426 
427 
428 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
429  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
430  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
431  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
432 
438 __STATIC_FORCEINLINE uint8_t __LDREXB(volatile uint8_t *addr)
439 {
440  uint32_t result;
441 
442 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
443  __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
444 #else
445  /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
446  accepted by assembler. So has to use following less efficient pattern.
447  */
448  __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
449 #endif
450  return ((uint8_t) result); /* Add explicit type cast here */
451 }
452 
453 
460 __STATIC_FORCEINLINE uint16_t __LDREXH(volatile uint16_t *addr)
461 {
462  uint32_t result;
463 
464 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
465  __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
466 #else
467  /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
468  accepted by assembler. So has to use following less efficient pattern.
469  */
470  __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
471 #endif
472  return ((uint16_t) result); /* Add explicit type cast here */
473 }
474 
475 
482 __STATIC_FORCEINLINE uint32_t __LDREXW(volatile uint32_t *addr)
483 {
484  uint32_t result;
485 
486  __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
487  return(result);
488 }
489 
490 
499 __STATIC_FORCEINLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
500 {
501  uint32_t result;
502 
503  __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
504  return(result);
505 }
506 
507 
516 __STATIC_FORCEINLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
517 {
518  uint32_t result;
519 
520  __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
521  return(result);
522 }
523 
524 
533 __STATIC_FORCEINLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
534 {
535  uint32_t result;
536 
537  __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
538  return(result);
539 }
540 
541 
546 __STATIC_FORCEINLINE void __CLREX(void)
547 {
548  __ASM volatile ("clrex" ::: "memory");
549 }
550 
551 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
552  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
553  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
554  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
555 
556 
557 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
558  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
559  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
560 
567 #define __SSAT(ARG1, ARG2) \
568 __extension__ \
569 ({ \
570  int32_t __RES, __ARG1 = (ARG1); \
571  __ASM volatile ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \
572  __RES; \
573  })
574 
575 
583 #define __USAT(ARG1, ARG2) \
584 __extension__ \
585 ({ \
586  uint32_t __RES, __ARG1 = (ARG1); \
587  __ASM volatile ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \
588  __RES; \
589  })
590 
591 
599 __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
600 {
601  uint32_t result;
602 
603  __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
604  return(result);
605 }
606 
607 
614 __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
615 {
616  uint32_t result;
617 
618 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
619  __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
620 #else
621  /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
622  accepted by assembler. So has to use following less efficient pattern.
623  */
624  __ASM volatile ("ldrbt %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
625 #endif
626  return ((uint8_t) result); /* Add explicit type cast here */
627 }
628 
629 
636 __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
637 {
638  uint32_t result;
639 
640 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
641  __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
642 #else
643  /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
644  accepted by assembler. So has to use following less efficient pattern.
645  */
646  __ASM volatile ("ldrht %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
647 #endif
648  return ((uint16_t) result); /* Add explicit type cast here */
649 }
650 
651 
658 __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
659 {
660  uint32_t result;
661 
662  __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
663  return(result);
664 }
665 
666 
673 __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
674 {
675  __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
676 }
677 
678 
685 __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
686 {
687  __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
688 }
689 
690 
697 __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
698 {
699  __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
700 }
701 
702 #else /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
703  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
704  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
705 
713 __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
714 {
715  if ((sat >= 1U) && (sat <= 32U))
716  {
717  const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
718  const int32_t min = -1 - max ;
719  if (val > max)
720  {
721  return max;
722  }
723  else if (val < min)
724  {
725  return min;
726  }
727  }
728  return val;
729 }
730 
738 __STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
739 {
740  if (sat <= 31U)
741  {
742  const uint32_t max = ((1U << sat) - 1U);
743  if (val > (int32_t)max)
744  {
745  return max;
746  }
747  else if (val < 0)
748  {
749  return 0U;
750  }
751  }
752  return (uint32_t)val;
753 }
754 
755 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
756  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
757  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
758 
759 
760 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
761  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
762 
768 __STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
769 {
770  uint32_t result;
771 
772  __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
773  return ((uint8_t) result);
774 }
775 
776 
783 __STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
784 {
785  uint32_t result;
786 
787  __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
788  return ((uint16_t) result);
789 }
790 
791 
798 __STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
799 {
800  uint32_t result;
801 
802  __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
803  return(result);
804 }
805 
806 
813 __STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
814 {
815  __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
816 }
817 
818 
825 __STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
826 {
827  __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
828 }
829 
830 
837 __STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
838 {
839  __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
840 }
841 
842 
849 __STATIC_FORCEINLINE uint8_t __LDAEXB(volatile uint8_t *ptr)
850 {
851  uint32_t result;
852 
853  __ASM volatile ("ldaexb %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
854  return ((uint8_t) result);
855 }
856 
857 
864 __STATIC_FORCEINLINE uint16_t __LDAEXH(volatile uint16_t *ptr)
865 {
866  uint32_t result;
867 
868  __ASM volatile ("ldaexh %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
869  return ((uint16_t) result);
870 }
871 
872 
879 __STATIC_FORCEINLINE uint32_t __LDAEX(volatile uint32_t *ptr)
880 {
881  uint32_t result;
882 
883  __ASM volatile ("ldaex %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
884  return(result);
885 }
886 
887 
896 __STATIC_FORCEINLINE uint32_t __STLEXB(uint8_t value, volatile uint8_t *ptr)
897 {
898  uint32_t result;
899 
900  __ASM volatile ("stlexb %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
901  return(result);
902 }
903 
904 
913 __STATIC_FORCEINLINE uint32_t __STLEXH(uint16_t value, volatile uint16_t *ptr)
914 {
915  uint32_t result;
916 
917  __ASM volatile ("stlexh %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
918  return(result);
919 }
920 
921 
930 __STATIC_FORCEINLINE uint32_t __STLEX(uint32_t value, volatile uint32_t *ptr)
931 {
932  uint32_t result;
933 
934  __ASM volatile ("stlex %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
935  return(result);
936 }
937 
938 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
939  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
940  /* end of group CMSIS_Core_InstructionInterface */
942 
943 
944 /* ########################### Core Function Access ########################### */
956 {
957  __ASM volatile ("cpsie i" : : : "memory");
958 }
959 
960 
967 {
968  __ASM volatile ("cpsid i" : : : "memory");
969 }
970 
971 
978 {
979  uint32_t result;
980 
981  __ASM volatile ("MRS %0, control" : "=r" (result) );
982  return(result);
983 }
984 
985 
986 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
987 
992 __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
993 {
994  uint32_t result;
995 
996  __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
997  return(result);
998 }
999 #endif
1000 
1001 
1007 __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
1008 {
1009  __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
1010  __ISB();
1011 }
1012 
1013 
1014 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1015 
1020 __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
1021 {
1022  __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
1023  __ISB();
1024 }
1025 #endif
1026 
1027 
1034 {
1035  uint32_t result;
1036 
1037  __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
1038  return(result);
1039 }
1040 
1041 
1048 {
1049  uint32_t result;
1050 
1051  __ASM volatile ("MRS %0, apsr" : "=r" (result) );
1052  return(result);
1053 }
1054 
1055 
1062 {
1063  uint32_t result;
1064 
1065  __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
1066  return(result);
1067 }
1068 
1069 
1076 {
1077  uint32_t result;
1078 
1079  __ASM volatile ("MRS %0, psp" : "=r" (result) );
1080  return(result);
1081 }
1082 
1083 
1084 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1085 
1090 __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
1091 {
1092  uint32_t result;
1093 
1094  __ASM volatile ("MRS %0, psp_ns" : "=r" (result) );
1095  return(result);
1096 }
1097 #endif
1098 
1099 
1105 __STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
1106 {
1107  __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
1108 }
1109 
1110 
1111 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1112 
1117 __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
1118 {
1119  __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
1120 }
1121 #endif
1122 
1123 
1130 {
1131  uint32_t result;
1132 
1133  __ASM volatile ("MRS %0, msp" : "=r" (result) );
1134  return(result);
1135 }
1136 
1137 
1138 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1139 
1144 __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
1145 {
1146  uint32_t result;
1147 
1148  __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
1149  return(result);
1150 }
1151 #endif
1152 
1153 
1159 __STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
1160 {
1161  __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
1162 }
1163 
1164 
1165 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1166 
1171 __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
1172 {
1173  __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
1174 }
1175 #endif
1176 
1177 
1178 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1179 
1184 __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
1185 {
1186  uint32_t result;
1187 
1188  __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
1189  return(result);
1190 }
1191 
1192 
1198 __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
1199 {
1200  __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
1201 }
1202 #endif
1203 
1204 
1211 {
1212  uint32_t result;
1213 
1214  __ASM volatile ("MRS %0, primask" : "=r" (result) );
1215  return(result);
1216 }
1217 
1218 
1219 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1220 
1225 __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
1226 {
1227  uint32_t result;
1228 
1229  __ASM volatile ("MRS %0, primask_ns" : "=r" (result) );
1230  return(result);
1231 }
1232 #endif
1233 
1234 
1240 __STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
1241 {
1242  __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
1243 }
1244 
1245 
1246 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1247 
1252 __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
1253 {
1254  __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
1255 }
1256 #endif
1257 
1258 
1259 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1260  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1261  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
1262 
1267 __STATIC_FORCEINLINE void __enable_fault_irq(void)
1268 {
1269  __ASM volatile ("cpsie f" : : : "memory");
1270 }
1271 
1272 
1278 __STATIC_FORCEINLINE void __disable_fault_irq(void)
1279 {
1280  __ASM volatile ("cpsid f" : : : "memory");
1281 }
1282 
1283 
1289 __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
1290 {
1291  uint32_t result;
1292 
1293  __ASM volatile ("MRS %0, basepri" : "=r" (result) );
1294  return(result);
1295 }
1296 
1297 
1298 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1299 
1304 __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
1305 {
1306  uint32_t result;
1307 
1308  __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
1309  return(result);
1310 }
1311 #endif
1312 
1313 
1319 __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
1320 {
1321  __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
1322 }
1323 
1324 
1325 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1326 
1331 __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
1332 {
1333  __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
1334 }
1335 #endif
1336 
1337 
1344 __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
1345 {
1346  __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
1347 }
1348 
1349 
1355 __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
1356 {
1357  uint32_t result;
1358 
1359  __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
1360  return(result);
1361 }
1362 
1363 
1364 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1365 
1370 __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
1371 {
1372  uint32_t result;
1373 
1374  __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
1375  return(result);
1376 }
1377 #endif
1378 
1379 
1385 __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
1386 {
1387  __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
1388 }
1389 
1390 
1391 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1392 
1397 __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
1398 {
1399  __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
1400 }
1401 #endif
1402 
1403 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1404  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1405  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
1406 
1407 
1408 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1409  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1410 
1420 __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
1421 {
1422 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1423  (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1424  // without main extensions, the non-secure PSPLIM is RAZ/WI
1425  return 0U;
1426 #else
1427  uint32_t result;
1428  __ASM volatile ("MRS %0, psplim" : "=r" (result) );
1429  return result;
1430 #endif
1431 }
1432 
1433 #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
1434 
1442 __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
1443 {
1444 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1445  // without main extensions, the non-secure PSPLIM is RAZ/WI
1446  return 0U;
1447 #else
1448  uint32_t result;
1449  __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) );
1450  return result;
1451 #endif
1452 }
1453 #endif
1454 
1455 
1465 __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
1466 {
1467 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1468  (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1469  // without main extensions, the non-secure PSPLIM is RAZ/WI
1470  (void)ProcStackPtrLimit;
1471 #else
1472  __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
1473 #endif
1474 }
1475 
1476 
1477 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1478 
1486 __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
1487 {
1488 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1489  // without main extensions, the non-secure PSPLIM is RAZ/WI
1490  (void)ProcStackPtrLimit;
1491 #else
1492  __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
1493 #endif
1494 }
1495 #endif
1496 
1497 
1507 __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
1508 {
1509 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1510  (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1511  // without main extensions, the non-secure MSPLIM is RAZ/WI
1512  return 0U;
1513 #else
1514  uint32_t result;
1515  __ASM volatile ("MRS %0, msplim" : "=r" (result) );
1516  return result;
1517 #endif
1518 }
1519 
1520 
1521 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1522 
1530 __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
1531 {
1532 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1533  // without main extensions, the non-secure MSPLIM is RAZ/WI
1534  return 0U;
1535 #else
1536  uint32_t result;
1537  __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
1538  return result;
1539 #endif
1540 }
1541 #endif
1542 
1543 
1553 __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
1554 {
1555 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1556  (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1557  // without main extensions, the non-secure MSPLIM is RAZ/WI
1558  (void)MainStackPtrLimit;
1559 #else
1560  __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
1561 #endif
1562 }
1563 
1564 
1565 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1566 
1574 __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
1575 {
1576 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1577  // without main extensions, the non-secure MSPLIM is RAZ/WI
1578  (void)MainStackPtrLimit;
1579 #else
1580  __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
1581 #endif
1582 }
1583 #endif
1584 
1585 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1586  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
1587 
1588 
1595 {
1596 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
1597  (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
1598 #if __has_builtin(__builtin_arm_get_fpscr)
1599 // Re-enable using built-in when GCC has been fixed
1600 // || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
1601  /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
1602  return __builtin_arm_get_fpscr();
1603 #else
1604  uint32_t result;
1605 
1606  __ASM volatile ("VMRS %0, fpscr" : "=r" (result) );
1607  return(result);
1608 #endif
1609 #else
1610  return(0U);
1611 #endif
1612 }
1613 
1614 
1620 __STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
1621 {
1622 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
1623  (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
1624 #if __has_builtin(__builtin_arm_set_fpscr)
1625 // Re-enable using built-in when GCC has been fixed
1626 // || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
1627  /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
1628  __builtin_arm_set_fpscr(fpscr);
1629 #else
1630  __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) : "vfpcc", "memory");
1631 #endif
1632 #else
1633  (void)fpscr;
1634 #endif
1635 }
1636 
1637 
1641 /* ################### Compiler specific Intrinsics ########################### */
1647 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1648 
1649 __STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1650 {
1651  uint32_t result;
1652 
1653  __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1654  return(result);
1655 }
1656 
1657 __STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1658 {
1659  uint32_t result;
1660 
1661  __ASM ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1662  return(result);
1663 }
1664 
1665 __STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1666 {
1667  uint32_t result;
1668 
1669  __ASM ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1670  return(result);
1671 }
1672 
1673 __STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1674 {
1675  uint32_t result;
1676 
1677  __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1678  return(result);
1679 }
1680 
1681 __STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1682 {
1683  uint32_t result;
1684 
1685  __ASM ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1686  return(result);
1687 }
1688 
1689 __STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1690 {
1691  uint32_t result;
1692 
1693  __ASM ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1694  return(result);
1695 }
1696 
1697 
1698 __STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1699 {
1700  uint32_t result;
1701 
1702  __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1703  return(result);
1704 }
1705 
1706 __STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1707 {
1708  uint32_t result;
1709 
1710  __ASM ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1711  return(result);
1712 }
1713 
1714 __STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1715 {
1716  uint32_t result;
1717 
1718  __ASM ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1719  return(result);
1720 }
1721 
1722 __STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1723 {
1724  uint32_t result;
1725 
1726  __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1727  return(result);
1728 }
1729 
1730 __STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1731 {
1732  uint32_t result;
1733 
1734  __ASM ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1735  return(result);
1736 }
1737 
1738 __STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1739 {
1740  uint32_t result;
1741 
1742  __ASM ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1743  return(result);
1744 }
1745 
1746 
1747 __STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1748 {
1749  uint32_t result;
1750 
1751  __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1752  return(result);
1753 }
1754 
1755 __STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1756 {
1757  uint32_t result;
1758 
1759  __ASM ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1760  return(result);
1761 }
1762 
1763 __STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1764 {
1765  uint32_t result;
1766 
1767  __ASM ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1768  return(result);
1769 }
1770 
1771 __STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1772 {
1773  uint32_t result;
1774 
1775  __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1776  return(result);
1777 }
1778 
1779 __STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1780 {
1781  uint32_t result;
1782 
1783  __ASM ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1784  return(result);
1785 }
1786 
1787 __STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1788 {
1789  uint32_t result;
1790 
1791  __ASM ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1792  return(result);
1793 }
1794 
1795 __STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1796 {
1797  uint32_t result;
1798 
1799  __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1800  return(result);
1801 }
1802 
1803 __STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1804 {
1805  uint32_t result;
1806 
1807  __ASM ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1808  return(result);
1809 }
1810 
1811 __STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1812 {
1813  uint32_t result;
1814 
1815  __ASM ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1816  return(result);
1817 }
1818 
1819 __STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1820 {
1821  uint32_t result;
1822 
1823  __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1824  return(result);
1825 }
1826 
1827 __STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1828 {
1829  uint32_t result;
1830 
1831  __ASM ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1832  return(result);
1833 }
1834 
1835 __STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1836 {
1837  uint32_t result;
1838 
1839  __ASM ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1840  return(result);
1841 }
1842 
1843 __STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1844 {
1845  uint32_t result;
1846 
1847  __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1848  return(result);
1849 }
1850 
1851 __STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1852 {
1853  uint32_t result;
1854 
1855  __ASM ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1856  return(result);
1857 }
1858 
1859 __STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1860 {
1861  uint32_t result;
1862 
1863  __ASM ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1864  return(result);
1865 }
1866 
1867 __STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1868 {
1869  uint32_t result;
1870 
1871  __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1872  return(result);
1873 }
1874 
1875 __STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1876 {
1877  uint32_t result;
1878 
1879  __ASM ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1880  return(result);
1881 }
1882 
1883 __STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1884 {
1885  uint32_t result;
1886 
1887  __ASM ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1888  return(result);
1889 }
1890 
1891 __STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1892 {
1893  uint32_t result;
1894 
1895  __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1896  return(result);
1897 }
1898 
1899 __STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1900 {
1901  uint32_t result;
1902 
1903  __ASM ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1904  return(result);
1905 }
1906 
1907 __STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1908 {
1909  uint32_t result;
1910 
1911  __ASM ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1912  return(result);
1913 }
1914 
1915 __STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1916 {
1917  uint32_t result;
1918 
1919  __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1920  return(result);
1921 }
1922 
1923 __STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1924 {
1925  uint32_t result;
1926 
1927  __ASM ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1928  return(result);
1929 }
1930 
1931 __STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1932 {
1933  uint32_t result;
1934 
1935  __ASM ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1936  return(result);
1937 }
1938 
1939 __STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1940 {
1941  uint32_t result;
1942 
1943  __ASM ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1944  return(result);
1945 }
1946 
1947 __STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1948 {
1949  uint32_t result;
1950 
1951  __ASM ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1952  return(result);
1953 }
1954 
1955 #define __SSAT16(ARG1, ARG2) \
1956 __extension__ \
1957 ({ \
1958  int32_t __RES, __ARG1 = (ARG1); \
1959  __ASM volatile ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \
1960  __RES; \
1961  })
1962 
1963 #define __USAT16(ARG1, ARG2) \
1964 __extension__ \
1965 ({ \
1966  uint32_t __RES, __ARG1 = (ARG1); \
1967  __ASM volatile ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \
1968  __RES; \
1969  })
1970 
1971 __STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
1972 {
1973  uint32_t result;
1974 
1975  __ASM ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
1976  return(result);
1977 }
1978 
1979 __STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1980 {
1981  uint32_t result;
1982 
1983  __ASM ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1984  return(result);
1985 }
1986 
1987 __STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
1988 {
1989  uint32_t result;
1990 
1991  __ASM ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
1992  return(result);
1993 }
1994 
1995 __STATIC_FORCEINLINE uint32_t __SXTB16_RORn(uint32_t op1, uint32_t rotate)
1996 {
1997  uint32_t result;
1998  if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U))) {
1999  __ASM volatile ("sxtb16 %0, %1, ROR %2" : "=r" (result) : "r" (op1), "i" (rotate) );
2000  } else {
2001  result = __SXTB16(__ROR(op1, rotate)) ;
2002  }
2003  return result;
2004 }
2005 
2006 __STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
2007 {
2008  uint32_t result;
2009 
2010  __ASM ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2011  return(result);
2012 }
2013 
2014 __STATIC_FORCEINLINE uint32_t __SXTAB16_RORn(uint32_t op1, uint32_t op2, uint32_t rotate)
2015 {
2016  uint32_t result;
2017  if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U))) {
2018  __ASM volatile ("sxtab16 %0, %1, %2, ROR %3" : "=r" (result) : "r" (op1) , "r" (op2) , "i" (rotate));
2019  } else {
2020  result = __SXTAB16(op1, __ROR(op2, rotate));
2021  }
2022  return result;
2023 }
2024 
2025 
2026 __STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
2027 {
2028  uint32_t result;
2029 
2030  __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2031  return(result);
2032 }
2033 
2034 __STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
2035 {
2036  uint32_t result;
2037 
2038  __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2039  return(result);
2040 }
2041 
2042 __STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
2043 {
2044  uint32_t result;
2045 
2046  __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
2047  return(result);
2048 }
2049 
2050 __STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
2051 {
2052  uint32_t result;
2053 
2054  __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
2055  return(result);
2056 }
2057 
2058 __STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
2059 {
2060  union llreg_u{
2061  uint32_t w32[2];
2062  uint64_t w64;
2063  } llr;
2064  llr.w64 = acc;
2065 
2066 #ifndef __ARMEB__ /* Little endian */
2067  __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
2068 #else /* Big endian */
2069  __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2070 #endif
2071 
2072  return(llr.w64);
2073 }
2074 
2075 __STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
2076 {
2077  union llreg_u{
2078  uint32_t w32[2];
2079  uint64_t w64;
2080  } llr;
2081  llr.w64 = acc;
2082 
2083 #ifndef __ARMEB__ /* Little endian */
2084  __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
2085 #else /* Big endian */
2086  __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2087 #endif
2088 
2089  return(llr.w64);
2090 }
2091 
2092 __STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
2093 {
2094  uint32_t result;
2095 
2096  __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2097  return(result);
2098 }
2099 
2100 __STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
2101 {
2102  uint32_t result;
2103 
2104  __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2105  return(result);
2106 }
2107 
2108 __STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
2109 {
2110  uint32_t result;
2111 
2112  __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
2113  return(result);
2114 }
2115 
2116 __STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
2117 {
2118  uint32_t result;
2119 
2120  __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
2121  return(result);
2122 }
2123 
2124 __STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
2125 {
2126  union llreg_u{
2127  uint32_t w32[2];
2128  uint64_t w64;
2129  } llr;
2130  llr.w64 = acc;
2131 
2132 #ifndef __ARMEB__ /* Little endian */
2133  __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
2134 #else /* Big endian */
2135  __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2136 #endif
2137 
2138  return(llr.w64);
2139 }
2140 
2141 __STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
2142 {
2143  union llreg_u{
2144  uint32_t w32[2];
2145  uint64_t w64;
2146  } llr;
2147  llr.w64 = acc;
2148 
2149 #ifndef __ARMEB__ /* Little endian */
2150  __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
2151 #else /* Big endian */
2152  __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2153 #endif
2154 
2155  return(llr.w64);
2156 }
2157 
2158 __STATIC_FORCEINLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
2159 {
2160  uint32_t result;
2161 
2162  __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2163  return(result);
2164 }
2165 
2166 __STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2)
2167 {
2168  int32_t result;
2169 
2170  __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2171  return(result);
2172 }
2173 
2174 __STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2)
2175 {
2176  int32_t result;
2177 
2178  __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2179  return(result);
2180 }
2181 
2182 
2183 #define __PKHBT(ARG1,ARG2,ARG3) \
2184 __extension__ \
2185 ({ \
2186  uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2187  __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
2188  __RES; \
2189  })
2190 
2191 #define __PKHTB(ARG1,ARG2,ARG3) \
2192 __extension__ \
2193 ({ \
2194  uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2195  if (ARG3 == 0) \
2196  __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
2197  else \
2198  __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
2199  __RES; \
2200  })
2201 
2202 
2203 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
2204 {
2205  int32_t result;
2206 
2207  __ASM ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
2208  return(result);
2209 }
2210 
2211 #endif /* (__ARM_FEATURE_DSP == 1) */
2212 
2215 #pragma GCC diagnostic pop
2216 
2217 #endif /* __CMSIS_GCC_H */
__STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
Get Control Register.
Definition: cmsis_gcc.h:977
#define __CMSIS_GCC_USE_REG(r)
Definition: cmsis_gcc.h:227
__STATIC_FORCEINLINE uint32_t __get_IPSR(void)
Get IPSR Register.
Definition: cmsis_gcc.h:1033
__STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
Unsigned Saturate.
Definition: cmsis_gcc.h:738
__STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
Get FPSCR.
Definition: cmsis_gcc.h:1594
__STATIC_FORCEINLINE uint32_t __get_PSP(void)
Get Process Stack Pointer.
Definition: cmsis_gcc.h:1075
__STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
Set Process Stack Pointer.
Definition: cmsis_gcc.h:1105
__STATIC_FORCEINLINE void __DMB(void)
Data Memory Barrier.
Definition: cmsis_gcc.h:286
__PACKED_STRUCT T_UINT16_WRITE
Definition: cmsis_gcc.h:82
#define __PACKED_STRUCT
Definition: cmsis_gcc.h:65
struct __attribute__((packed)) T_UINT32
Definition: cmsis_gcc.h:74
#define __SXTB16_RORn(ARG1, ARG2)
Definition: cmsis_iccarm.h:1004
__STATIC_FORCEINLINE uint32_t __get_xPSR(void)
Get xPSR Register.
Definition: cmsis_gcc.h:1061
__STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
Reverse byte order (16 bit)
Definition: cmsis_gcc.h:332
__STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
Set Main Stack Pointer.
Definition: cmsis_gcc.h:1159
__PACKED_STRUCT T_UINT32_WRITE
Definition: cmsis_gcc.h:98
__STATIC_FORCEINLINE void __DSB(void)
Data Synchronization Barrier.
Definition: cmsis_gcc.h:275
__STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
Set Priority Mask.
Definition: cmsis_gcc.h:1240
__STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
Reverse byte order (16 bit)
Definition: cmsis_gcc.h:317
__STATIC_FORCEINLINE void __disable_irq(void)
Disable IRQ Interrupts.
Definition: cmsis_gcc.h:966
#define __STATIC_FORCEINLINE
Definition: cmsis_gcc.h:50
__STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
Reverse bit order of value.
Definition: cmsis_gcc.h:379
__STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
Set FPSCR.
Definition: cmsis_gcc.h:1620
__STATIC_FORCEINLINE uint32_t __get_APSR(void)
Get APSR Register.
Definition: cmsis_gcc.h:1047
__STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
Get Priority Mask.
Definition: cmsis_gcc.h:1210
__STATIC_FORCEINLINE uint32_t __get_MSP(void)
Get Main Stack Pointer.
Definition: cmsis_gcc.h:1129
#define __SXTAB16_RORn(ARG1, ARG2, ARG3)
Definition: cmsis_iccarm.h:1006
__STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
Rotate Right in unsigned value (32 bit)
Definition: cmsis_gcc.h:352
__STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
Count leading zeros.
Definition: cmsis_gcc.h:409
__PACKED_STRUCT T_UINT32_READ
Definition: cmsis_gcc.h:106
#define __ASM
Definition: cmsis_gcc.h:41
#define __CMSIS_GCC_OUT_REG(r)
Definition: cmsis_gcc.h:225
__STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
Set Control Register.
Definition: cmsis_gcc.h:1007
__STATIC_FORCEINLINE void __enable_irq(void)
Enable IRQ Interrupts.
Definition: cmsis_gcc.h:955
__IAR_FT uint32_t __LDREXW(uint32_t volatile *ptr)
Definition: cmsis_iccarm.h:620
__STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
Signed Saturate.
Definition: cmsis_gcc.h:713
__PACKED_STRUCT T_UINT16_READ
Definition: cmsis_gcc.h:90
__STATIC_FORCEINLINE __NO_RETURN void __cmsis_start(void)
Initializes data and bss sections.
Definition: cmsis_gcc.h:137
__STATIC_FORCEINLINE void __ISB(void)
Instruction Synchronization Barrier.
Definition: cmsis_gcc.h:264
#define __NO_RETURN
Definition: cmsis_gcc.h:53
__IAR_FT uint32_t __STREXW(uint32_t value, uint32_t volatile *ptr)
Definition: cmsis_iccarm.h:625
__STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
Reverse byte order (32 bit)
Definition: cmsis_gcc.h:298