CC27xxDriverLibrary
cmsis_tiarmclang.h
Go to the documentation of this file.
1 /**************************************************************************/
7 /*
8  * Copyright (c) 2023 Arm Limited. All rights reserved.
9  *
10  * SPDX-License-Identifier: Apache-2.0
11  *
12  * Licensed under the Apache License, Version 2.0 (the License); you may
13  * not use this file except in compliance with the License.
14  * You may obtain a copy of the License at
15  *
16  * www.apache.org/licenses/LICENSE-2.0
17  *
18  * Unless required by applicable law or agreed to in writing, software
19  * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21  * See the License for the specific language governing permissions and
22  * limitations under the License.
23  */
24 
25 /*lint -esym(9058, IRQn)*/ /* disable MISRA 2012 Rule 2.4 for IRQn */
26 
27 #ifndef __CMSIS_TIARMCLANG_H
28 #define __CMSIS_TIARMCLANG_H
29 
30 #pragma clang system_header /* treat file as system include file */
31 
32 /* CMSIS compiler specific defines */
33 #ifndef __ASM
34  #define __ASM __asm
35 #endif
36 #ifndef __INLINE
37  #define __INLINE __inline
38 #endif
39 #ifndef __STATIC_INLINE
40  #define __STATIC_INLINE static __inline
41 #endif
42 #ifndef __STATIC_FORCEINLINE
43  #define __STATIC_FORCEINLINE __attribute__((always_inline)) static __inline
44 #endif
45 #ifndef __NO_RETURN
46  #define __NO_RETURN __attribute__((__noreturn__))
47 #endif
48 #ifndef __USED
49  #define __USED __attribute__((used))
50 #endif
51 #ifndef __WEAK
52  #define __WEAK __attribute__((weak))
53 #endif
54 #ifndef __PACKED
55  #define __PACKED __attribute__((packed, aligned(1)))
56 #endif
57 #ifndef __PACKED_STRUCT
58  #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
59 #endif
60 #ifndef __PACKED_UNION
61  #define __PACKED_UNION union __attribute__((packed, aligned(1)))
62 #endif
63 #ifndef __UNALIGNED_UINT32 /* deprecated */
64  #pragma clang diagnostic push
65  #pragma clang diagnostic ignored "-Wpacked"
66 /*lint -esym(9058, T_UINT32)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32 */
67  struct __attribute__((packed)) T_UINT32 { uint32_t v; };
68  #pragma clang diagnostic pop
69  #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v)
70 #endif
71 #ifndef __UNALIGNED_UINT16_WRITE
72  #pragma clang diagnostic push
73  #pragma clang diagnostic ignored "-Wpacked"
74 /*lint -esym(9058, T_UINT16_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_WRITE */
76  #pragma clang diagnostic pop
77  #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
78 #endif
79 #ifndef __UNALIGNED_UINT16_READ
80  #pragma clang diagnostic push
81  #pragma clang diagnostic ignored "-Wpacked"
82 /*lint -esym(9058, T_UINT16_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_READ */
83  __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
84  #pragma clang diagnostic pop
85  #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
86 #endif
87 #ifndef __UNALIGNED_UINT32_WRITE
88  #pragma clang diagnostic push
89  #pragma clang diagnostic ignored "-Wpacked"
90 /*lint -esym(9058, T_UINT32_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_WRITE */
92  #pragma clang diagnostic pop
93  #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
94 #endif
95 #ifndef __UNALIGNED_UINT32_READ
96  #pragma clang diagnostic push
97  #pragma clang diagnostic ignored "-Wpacked"
98 /*lint -esym(9058, T_UINT32_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_READ */
99  __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
100  #pragma clang diagnostic pop
101  #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
102 #endif
103 #ifndef __ALIGNED
104  #define __ALIGNED(x) __attribute__((aligned(x)))
105 #endif
106 #ifndef __RESTRICT
107  #define __RESTRICT __restrict
108 #endif
109 #ifndef __COMPILER_BARRIER
110  #define __COMPILER_BARRIER() __ASM volatile("":::"memory")
111 #endif
112 #ifndef __NO_INIT
113  #define __NO_INIT __attribute__ ((section (".bss.noinit")))
114 #endif
115 #ifndef __ALIAS
116  #define __ALIAS(x) __attribute__ ((alias(x)))
117 #endif
118 
119 
120 /* ######################### Startup and Lowlevel Init ######################## */
121 
122 #ifndef __PROGRAM_START
123 #define __PROGRAM_START _c_int00
124 #endif
125 
126 #ifndef __INITIAL_SP
127 #define __INITIAL_SP __STACK_END
128 #endif
129 
130 #ifndef __STACK_LIMIT
131 #define __STACK_LIMIT __STACK_SIZE
132 #endif
133 
134 #ifndef __VECTOR_TABLE
135 #define __VECTOR_TABLE __Vectors
136 #endif
137 
138 #ifndef __VECTOR_TABLE_ATTRIBUTE
139 #define __VECTOR_TABLE_ATTRIBUTE __attribute__((used, section(".intvecs")))
140 #endif
141 
142 #if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3U)
143 #ifndef __STACK_SEAL
144 #define __STACK_SEAL Image$$STACKSEAL$$ZI$$Base
145 #endif
146 
147 #ifndef __TZ_STACK_SEAL_SIZE
148 #define __TZ_STACK_SEAL_SIZE 8U
149 #endif
150 
151 #ifndef __TZ_STACK_SEAL_VALUE
152 #define __TZ_STACK_SEAL_VALUE 0xFEF5EDA5FEF5EDA5ULL
153 #endif
154 
155 
156 __STATIC_FORCEINLINE void __TZ_set_STACKSEAL_S (uint32_t* stackTop) {
157  *((uint64_t *)stackTop) = __TZ_STACK_SEAL_VALUE;
158 }
159 #endif
160 
161 
162 /* ########################## Core Instruction Access ######################### */
168 /* Define macros for porting to both thumb1 and thumb2.
169  * For thumb1, use low register (r0-r7), specified by constraint "l"
170  * Otherwise, use general registers, specified by constraint "r" */
171 #if defined (__thumb__) && !defined (__thumb2__)
172 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
173 #define __CMSIS_GCC_RW_REG(r) "+l" (r)
174 #define __CMSIS_GCC_USE_REG(r) "l" (r)
175 #else
176 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
177 #define __CMSIS_GCC_RW_REG(r) "+r" (r)
178 #define __CMSIS_GCC_USE_REG(r) "r" (r)
179 #endif
180 
185 #define __NOP __builtin_arm_nop
186 
191 #define __WFI __builtin_arm_wfi
192 
193 
199 #define __WFE __builtin_arm_wfe
200 
201 
206 #define __SEV __builtin_arm_sev
207 
208 
215 #define __ISB() __builtin_arm_isb(0xF)
216 
222 #define __DSB() __builtin_arm_dsb(0xF)
223 
224 
230 #define __DMB() __builtin_arm_dmb(0xF)
231 
232 
239 #define __REV(value) __builtin_bswap32(value)
240 
241 
248 #define __REV16(value) __ROR(__REV(value), 16)
249 
250 
257 #define __REVSH(value) (int16_t)__builtin_bswap16(value)
258 
259 
267 __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
268 {
269  op2 %= 32U;
270  if (op2 == 0U)
271  {
272  return op1;
273  }
274  return (op1 >> op2) | (op1 << (32U - op2));
275 }
276 
277 
285 #define __BKPT(value) __ASM volatile ("bkpt "#value)
286 
287 
294 #define __RBIT __builtin_arm_rbit
295 
302 __STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
303 {
304  /* Even though __builtin_clz produces a CLZ instruction on ARM, formally
305  __builtin_clz(0) is undefined behaviour, so handle this case specially.
306  This guarantees ARM-compatible results if happening to compile on a non-ARM
307  target, and ensures the compiler doesn't decide to activate any
308  optimisations using the logic "value was passed to __builtin_clz, so it
309  is non-zero".
310  ARM Compiler 6.10 and possibly earlier will optimise this test away, leaving a
311  single CLZ instruction.
312  */
313  if (value == 0U)
314  {
315  return 32U;
316  }
317  return __builtin_clz(value);
318 }
319 
320 
321 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
322  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
323  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
324  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) || \
325  (defined (__ARM_ARCH_8_1M_MAIN__) && (__ARM_ARCH_8_1M_MAIN__ == 1)) )
326 
333 #define __LDREXB (uint8_t)__builtin_arm_ldrex
334 
335 
342 #define __LDREXH (uint16_t)__builtin_arm_ldrex
343 
344 
351 #define __LDREXW (uint32_t)__builtin_arm_ldrex
352 
353 
362 #define __STREXB (uint32_t)__builtin_arm_strex
363 
364 
373 #define __STREXH (uint32_t)__builtin_arm_strex
374 
375 
384 #define __STREXW (uint32_t)__builtin_arm_strex
385 
386 
391 #define __CLREX __builtin_arm_clrex
392 
393 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
394  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
395  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
396  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) || \
397  (defined (__ARM_ARCH_8_1M_MAIN__) && (__ARM_ARCH_8_1M_MAIN__ == 1)) ) */
398 
399 
400 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
401  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
402  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
403  (defined (__ARM_ARCH_8_1M_MAIN__) && (__ARM_ARCH_8_1M_MAIN__ == 1)) )
404 
412 #define __SSAT __builtin_arm_ssat
413 
414 
422 #define __USAT __builtin_arm_usat
423 
424 
432 __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
433 {
434  uint32_t result;
435 
436  __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
437  return(result);
438 }
439 
440 
447 __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
448 {
449  uint32_t result;
450 
451  __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
452  return ((uint8_t) result); /* Add explicit type cast here */
453 }
454 
455 
462 __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
463 {
464  uint32_t result;
465 
466  __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
467  return ((uint16_t) result); /* Add explicit type cast here */
468 }
469 
470 
477 __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
478 {
479  uint32_t result;
480 
481  __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
482  return(result);
483 }
484 
485 
492 __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
493 {
494  __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
495 }
496 
497 
504 __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
505 {
506  __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
507 }
508 
509 
516 __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
517 {
518  __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
519 }
520 
521 #else /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
522  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
523  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
524  (defined (__ARM_ARCH_8_1M_MAIN__) && (__ARM_ARCH_8_1M_MAIN__ == 1)) ) */
525 
533 __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
534 {
535  if ((sat >= 1U) && (sat <= 32U))
536  {
537  const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
538  const int32_t min = -1 - max ;
539  if (val > max)
540  {
541  return max;
542  }
543  else if (val < min)
544  {
545  return min;
546  }
547  }
548  return val;
549 }
550 
558 __STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
559 {
560  if (sat <= 31U)
561  {
562  const uint32_t max = ((1U << sat) - 1U);
563  if (val > (int32_t)max)
564  {
565  return max;
566  }
567  else if (val < 0)
568  {
569  return 0U;
570  }
571  }
572  return (uint32_t)val;
573 }
574 
575 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
576  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
577  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
578  (defined (__ARM_ARCH_8_1M_MAIN__) && (__ARM_ARCH_8_1M_MAIN__ == 1)) ) */
579 
580 
581 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
582  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) || \
583  (defined (__ARM_ARCH_8_1M_MAIN__) && (__ARM_ARCH_8_1M_MAIN__ == 1)) )
584 
591 __STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
592 {
593  uint32_t result;
594 
595  __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
596  return ((uint8_t) result);
597 }
598 
599 
606 __STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
607 {
608  uint32_t result;
609 
610  __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
611  return ((uint16_t) result);
612 }
613 
614 
621 __STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
622 {
623  uint32_t result;
624 
625  __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
626  return(result);
627 }
628 
629 
636 __STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
637 {
638  __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
639 }
640 
641 
648 __STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
649 {
650  __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
651 }
652 
653 
660 __STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
661 {
662  __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
663 }
664 
665 
672 #define __LDAEXB (uint8_t)__builtin_arm_ldaex
673 
674 
681 #define __LDAEXH (uint16_t)__builtin_arm_ldaex
682 
683 
690 #define __LDAEX (uint32_t)__builtin_arm_ldaex
691 
692 
701 #define __STLEXB (uint32_t)__builtin_arm_stlex
702 
703 
712 #define __STLEXH (uint32_t)__builtin_arm_stlex
713 
714 
723 #define __STLEX (uint32_t)__builtin_arm_stlex
724 
725 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
726  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) || \
727  (defined (__ARM_ARCH_8_1M_MAIN__) && (__ARM_ARCH_8_1M_MAIN__ == 1)) ) */
728  /* end of group CMSIS_Core_InstructionInterface */
730 
731 
732 /* ########################### Core Function Access ########################### */
743 #ifndef __ARM_COMPAT_H
745 {
746  __ASM volatile ("cpsie i" : : : "memory");
747 }
748 #endif
749 
750 
756 #ifndef __ARM_COMPAT_H
758 {
759  __ASM volatile ("cpsid i" : : : "memory");
760 }
761 #endif
762 
763 
769 __STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
770 {
771  uint32_t result;
772 
773  __ASM volatile ("MRS %0, control" : "=r" (result) );
774  return(result);
775 }
776 
777 
778 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
779 
784 __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
785 {
786  uint32_t result;
787 
788  __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
789  return(result);
790 }
791 #endif
792 
793 
799 __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
800 {
801  __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
802  __ISB();
803 }
804 
805 
806 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
807 
812 __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
813 {
814  __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
815  __ISB();
816 }
817 #endif
818 
819 
825 __STATIC_FORCEINLINE uint32_t __get_IPSR(void)
826 {
827  uint32_t result;
828 
829  __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
830  return(result);
831 }
832 
833 
839 __STATIC_FORCEINLINE uint32_t __get_APSR(void)
840 {
841  uint32_t result;
842 
843  __ASM volatile ("MRS %0, apsr" : "=r" (result) );
844  return(result);
845 }
846 
847 
853 __STATIC_FORCEINLINE uint32_t __get_xPSR(void)
854 {
855  uint32_t result;
856 
857  __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
858  return(result);
859 }
860 
861 
867 __STATIC_FORCEINLINE uint32_t __get_PSP(void)
868 {
869  uint32_t result;
870 
871  __ASM volatile ("MRS %0, psp" : "=r" (result) );
872  return(result);
873 }
874 
875 
876 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
877 
882 __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
883 {
884  uint32_t result;
885 
886  __ASM volatile ("MRS %0, psp_ns" : "=r" (result) );
887  return(result);
888 }
889 #endif
890 
891 
897 __STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
898 {
899  __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
900 }
901 
902 
903 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
904 
909 __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
910 {
911  __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
912 }
913 #endif
914 
915 
921 __STATIC_FORCEINLINE uint32_t __get_MSP(void)
922 {
923  uint32_t result;
924 
925  __ASM volatile ("MRS %0, msp" : "=r" (result) );
926  return(result);
927 }
928 
929 
930 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
931 
936 __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
937 {
938  uint32_t result;
939 
940  __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
941  return(result);
942 }
943 #endif
944 
945 
951 __STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
952 {
953  __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
954 }
955 
956 
957 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
958 
963 __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
964 {
965  __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
966 }
967 #endif
968 
969 
970 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
971 
976 __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
977 {
978  uint32_t result;
979 
980  __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
981  return(result);
982 }
983 
984 
990 __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
991 {
992  __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
993 }
994 #endif
995 
996 
1002 __STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
1003 {
1004  uint32_t result;
1005 
1006  __ASM volatile ("MRS %0, primask" : "=r" (result) );
1007  return(result);
1008 }
1009 
1010 
1011 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1012 
1017 __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
1018 {
1019  uint32_t result;
1020 
1021  __ASM volatile ("MRS %0, primask_ns" : "=r" (result) );
1022  return(result);
1023 }
1024 #endif
1025 
1026 
1032 __STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
1033 {
1034  __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
1035 }
1036 
1037 
1038 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1039 
1044 __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
1045 {
1046  __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
1047 }
1048 #endif
1049 
1050 
1051 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1052  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1053  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1054  (defined (__ARM_ARCH_8_1M_MAIN__) && (__ARM_ARCH_8_1M_MAIN__ == 1)) )
1055 
1060 __STATIC_FORCEINLINE void __enable_fault_irq(void)
1061 {
1062  __ASM volatile ("cpsie f" : : : "memory");
1063 }
1064 
1065 
1071 __STATIC_FORCEINLINE void __disable_fault_irq(void)
1072 {
1073  __ASM volatile ("cpsid f" : : : "memory");
1074 }
1075 
1076 
1082 __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
1083 {
1084  uint32_t result;
1085 
1086  __ASM volatile ("MRS %0, basepri" : "=r" (result) );
1087  return(result);
1088 }
1089 
1090 
1091 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1092 
1097 __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
1098 {
1099  uint32_t result;
1100 
1101  __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
1102  return(result);
1103 }
1104 #endif
1105 
1106 
1112 __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
1113 {
1114  __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
1115 }
1116 
1117 
1118 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1119 
1124 __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
1125 {
1126  __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
1127 }
1128 #endif
1129 
1130 
1137 __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
1138 {
1139  __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
1140 }
1141 
1142 
1148 __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
1149 {
1150  uint32_t result;
1151 
1152  __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
1153  return(result);
1154 }
1155 
1156 
1157 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1158 
1163 __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
1164 {
1165  uint32_t result;
1166 
1167  __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
1168  return(result);
1169 }
1170 #endif
1171 
1172 
1178 __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
1179 {
1180  __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
1181 }
1182 
1183 
1184 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1185 
1190 __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
1191 {
1192  __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
1193 }
1194 #endif
1195 
1196 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1197  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1198  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1199  (defined (__ARM_ARCH_8_1M_MAIN__) && (__ARM_ARCH_8_1M_MAIN__ == 1)) ) */
1200 
1201 
1202 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1203  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) || \
1204  (defined (__ARM_ARCH_8_1M_MAIN__) && (__ARM_ARCH_8_1M_MAIN__ == 1)) )
1205 
1215 __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
1216 {
1217 #if (!((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1218  (defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) ) && \
1219  (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1220  // without main extensions, the non-secure PSPLIM is RAZ/WI
1221  return 0U;
1222 #else
1223  uint32_t result;
1224  __ASM volatile ("MRS %0, psplim" : "=r" (result) );
1225  return result;
1226 #endif
1227 }
1228 
1229 #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
1230 
1239 __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
1240 {
1241 #if (!((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1242  (defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) ) )
1243  // without main extensions, the non-secure PSPLIM is RAZ/WI
1244  return 0U;
1245 #else
1246  uint32_t result;
1247  __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) );
1248  return result;
1249 #endif
1250 }
1251 #endif
1252 
1253 
1263 __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
1264 {
1265 #if (!((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1266  (defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) ) && \
1267  (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1268  // without main extensions, the non-secure PSPLIM is RAZ/WI
1269  (void)ProcStackPtrLimit;
1270 #else
1271  __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
1272 #endif
1273 }
1274 
1275 
1276 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1277 
1286 __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
1287 {
1288 #if (!((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1289  (defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) ) )
1290  // without main extensions, the non-secure PSPLIM is RAZ/WI
1291  (void)ProcStackPtrLimit;
1292 #else
1293  __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
1294 #endif
1295 }
1296 #endif
1297 
1298 
1307 __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
1308 {
1309 #if (!((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1310  (defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) ) && \
1311  (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1312  // without main extensions, the non-secure MSPLIM is RAZ/WI
1313  return 0U;
1314 #else
1315  uint32_t result;
1316  __ASM volatile ("MRS %0, msplim" : "=r" (result) );
1317  return result;
1318 #endif
1319 }
1320 
1321 
1322 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1323 
1331 __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
1332 {
1333 #if (!((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1334  (defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) ) )
1335  // without main extensions, the non-secure MSPLIM is RAZ/WI
1336  return 0U;
1337 #else
1338  uint32_t result;
1339  __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
1340  return result;
1341 #endif
1342 }
1343 #endif
1344 
1345 
1354 __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
1355 {
1356 #if (!((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1357  (defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) ) && \
1358  (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1359  // without main extensions, the non-secure MSPLIM is RAZ/WI
1360  (void)MainStackPtrLimit;
1361 #else
1362  __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
1363 #endif
1364 }
1365 
1366 
1367 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1368 
1376 __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
1377 {
1378 #if (!((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1379  (defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) ) )
1380  // without main extensions, the non-secure MSPLIM is RAZ/WI
1381  (void)MainStackPtrLimit;
1382 #else
1383  __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
1384 #endif
1385 }
1386 #endif
1387 
1388 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1389  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) || \
1390  (defined (__ARM_ARCH_8_1M_MAIN__) && (__ARM_ARCH_8_1M_MAIN__ == 1)) ) */
1391 
1397 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
1398  (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
1399 #define __get_FPSCR (uint32_t)__builtin_arm_get_fpscr
1400 #else
1401 #define __get_FPSCR() ((uint32_t)0U)
1402 #endif
1403 
1409 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
1410  (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
1411 #define __set_FPSCR __builtin_arm_set_fpscr
1412 #else
1413 #define __set_FPSCR(fpscr) ((void)(fpscr))
1414 #endif
1415 
1416 
1420 /* ################### Compiler specific Intrinsics ########################### */
1426 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1427 
1428 #define __SADD8 __builtin_arm_sadd8
1429 #define __QADD8 __builtin_arm_qadd8
1430 #define __SHADD8 __builtin_arm_shadd8
1431 #define __UADD8 __builtin_arm_uadd8
1432 #define __UQADD8 __builtin_arm_uqadd8
1433 #define __UHADD8 __builtin_arm_uhadd8
1434 #define __SSUB8 __builtin_arm_ssub8
1435 #define __QSUB8 __builtin_arm_qsub8
1436 #define __SHSUB8 __builtin_arm_shsub8
1437 #define __USUB8 __builtin_arm_usub8
1438 #define __UQSUB8 __builtin_arm_uqsub8
1439 #define __UHSUB8 __builtin_arm_uhsub8
1440 #define __SADD16 __builtin_arm_sadd16
1441 #define __QADD16 __builtin_arm_qadd16
1442 #define __SHADD16 __builtin_arm_shadd16
1443 #define __UADD16 __builtin_arm_uadd16
1444 #define __UQADD16 __builtin_arm_uqadd16
1445 #define __UHADD16 __builtin_arm_uhadd16
1446 #define __SSUB16 __builtin_arm_ssub16
1447 #define __QSUB16 __builtin_arm_qsub16
1448 #define __SHSUB16 __builtin_arm_shsub16
1449 #define __USUB16 __builtin_arm_usub16
1450 #define __UQSUB16 __builtin_arm_uqsub16
1451 #define __UHSUB16 __builtin_arm_uhsub16
1452 #define __SASX __builtin_arm_sasx
1453 #define __QASX __builtin_arm_qasx
1454 #define __SHASX __builtin_arm_shasx
1455 #define __UASX __builtin_arm_uasx
1456 #define __UQASX __builtin_arm_uqasx
1457 #define __UHASX __builtin_arm_uhasx
1458 #define __SSAX __builtin_arm_ssax
1459 #define __QSAX __builtin_arm_qsax
1460 #define __SHSAX __builtin_arm_shsax
1461 #define __USAX __builtin_arm_usax
1462 #define __UQSAX __builtin_arm_uqsax
1463 #define __UHSAX __builtin_arm_uhsax
1464 #define __USAD8 __builtin_arm_usad8
1465 #define __USADA8 __builtin_arm_usada8
1466 #define __SSAT16 __builtin_arm_ssat16
1467 #define __USAT16 __builtin_arm_usat16
1468 #define __UXTB16 __builtin_arm_uxtb16
1469 #define __UXTAB16 __builtin_arm_uxtab16
1470 #define __SXTB16 __builtin_arm_sxtb16
1471 #define __SXTAB16 __builtin_arm_sxtab16
1472 #define __SMUAD __builtin_arm_smuad
1473 #define __SMUADX __builtin_arm_smuadx
1474 #define __SMLAD __builtin_arm_smlad
1475 #define __SMLADX __builtin_arm_smladx
1476 #define __SMLALD __builtin_arm_smlald
1477 #define __SMLALDX __builtin_arm_smlaldx
1478 #define __SMUSD __builtin_arm_smusd
1479 #define __SMUSDX __builtin_arm_smusdx
1480 #define __SMLSD __builtin_arm_smlsd
1481 #define __SMLSDX __builtin_arm_smlsdx
1482 #define __SMLSLD __builtin_arm_smlsld
1483 #define __SMLSLDX __builtin_arm_smlsldx
1484 #define __SEL __builtin_arm_sel
1485 #define __QADD __builtin_arm_qadd
1486 #define __QSUB __builtin_arm_qsub
1487 
1488 #define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
1489  ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
1490 
1491 #define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
1492  ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
1493 
1494 #define __SXTB16_RORn(ARG1, ARG2) __SXTB16(__ROR(ARG1, ARG2))
1495 
1496 #define __SXTAB16_RORn(ARG1, ARG2, ARG3) __SXTAB16(ARG1, __ROR(ARG2, ARG3))
1497 
1498 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
1499 {
1500  int32_t result;
1501 
1502  __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
1503  return(result);
1504 }
1505 
1506 #endif /* (__ARM_FEATURE_DSP == 1) */
1507 
1510 #endif /* __CMSIS_TIARMCLANG_H */
__STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
Get Control Register.
Definition: cmsis_gcc.h:977
#define __CMSIS_GCC_USE_REG(r)
Definition: cmsis_tiarmclang.h:178
__PACKED_STRUCT T_UINT32_WRITE
Definition: cmsis_tiarmclang.h:91
__STATIC_FORCEINLINE uint32_t __get_IPSR(void)
Get IPSR Register.
Definition: cmsis_gcc.h:1033
__STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
Unsigned Saturate.
Definition: cmsis_gcc.h:738
__STATIC_FORCEINLINE uint32_t __get_PSP(void)
Get Process Stack Pointer.
Definition: cmsis_gcc.h:1075
__STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
Set Process Stack Pointer.
Definition: cmsis_gcc.h:1105
__STATIC_FORCEINLINE uint32_t __get_xPSR(void)
Get xPSR Register.
Definition: cmsis_gcc.h:1061
__STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
Set Main Stack Pointer.
Definition: cmsis_gcc.h:1159
__STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
Set Priority Mask.
Definition: cmsis_gcc.h:1240
__STATIC_FORCEINLINE void __disable_irq(void)
Disable IRQ Interrupts.
Definition: cmsis_gcc.h:966
__STATIC_FORCEINLINE uint32_t __get_APSR(void)
Get APSR Register.
Definition: cmsis_gcc.h:1047
__STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
Get Priority Mask.
Definition: cmsis_gcc.h:1210
#define __ASM
Definition: cmsis_tiarmclang.h:34
__PACKED_STRUCT T_UINT32_READ
Definition: cmsis_tiarmclang.h:99
__STATIC_FORCEINLINE uint32_t __get_MSP(void)
Get Main Stack Pointer.
Definition: cmsis_gcc.h:1129
#define __STATIC_FORCEINLINE
Definition: cmsis_tiarmclang.h:43
#define __ISB()
Instruction Synchronization Barrier.
Definition: cmsis_tiarmclang.h:215
__STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
Rotate Right in unsigned value (32 bit)
Definition: cmsis_gcc.h:352
__STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
Count leading zeros.
Definition: cmsis_gcc.h:409
#define __CMSIS_GCC_OUT_REG(r)
Definition: cmsis_tiarmclang.h:176
__STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
Set Control Register.
Definition: cmsis_gcc.h:1007
__PACKED_STRUCT T_UINT16_WRITE
Definition: cmsis_tiarmclang.h:75
__STATIC_FORCEINLINE void __enable_irq(void)
Enable IRQ Interrupts.
Definition: cmsis_gcc.h:955
__PACKED_STRUCT T_UINT16_READ
Definition: cmsis_tiarmclang.h:83
__STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
Signed Saturate.
Definition: cmsis_gcc.h:713
#define __PACKED_STRUCT
Definition: cmsis_tiarmclang.h:58
struct __attribute__((packed)) T_UINT32
Definition: cmsis_tiarmclang.h:67