29 #pragma GCC diagnostic push 30 #pragma GCC diagnostic ignored "-Wsign-conversion" 31 #pragma GCC diagnostic ignored "-Wconversion" 32 #pragma GCC diagnostic ignored "-Wunused-parameter" 36 #define __has_builtin(x) (0) 44 #define __INLINE inline 46 #ifndef __STATIC_INLINE 47 #define __STATIC_INLINE static inline 49 #ifndef __STATIC_FORCEINLINE 50 #define __STATIC_FORCEINLINE __attribute__((always_inline)) static inline 53 #define __NO_RETURN __attribute__((__noreturn__)) 56 #define __USED __attribute__((used)) 59 #define __WEAK __attribute__((weak)) 62 #define __PACKED __attribute__((packed, aligned(1))) 64 #ifndef __PACKED_STRUCT 65 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1))) 67 #ifndef __PACKED_UNION 68 #define __PACKED_UNION union __attribute__((packed, aligned(1))) 70 #ifndef __UNALIGNED_UINT32 71 #pragma GCC diagnostic push 72 #pragma GCC diagnostic ignored "-Wpacked" 73 #pragma GCC diagnostic ignored "-Wattributes" 75 #pragma GCC diagnostic pop 76 #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v) 78 #ifndef __UNALIGNED_UINT16_WRITE 79 #pragma GCC diagnostic push 80 #pragma GCC diagnostic ignored "-Wpacked" 81 #pragma GCC diagnostic ignored "-Wattributes" 83 #pragma GCC diagnostic pop 84 #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val)) 86 #ifndef __UNALIGNED_UINT16_READ 87 #pragma GCC diagnostic push 88 #pragma GCC diagnostic ignored "-Wpacked" 89 #pragma GCC diagnostic ignored "-Wattributes" 91 #pragma GCC diagnostic pop 92 #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v) 94 #ifndef __UNALIGNED_UINT32_WRITE 95 #pragma GCC diagnostic push 96 #pragma GCC diagnostic ignored "-Wpacked" 97 #pragma GCC diagnostic ignored "-Wattributes" 99 #pragma GCC diagnostic pop 100 #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val)) 102 #ifndef __UNALIGNED_UINT32_READ 103 #pragma GCC diagnostic push 104 #pragma GCC diagnostic ignored "-Wpacked" 105 #pragma GCC diagnostic ignored "-Wattributes" 107 #pragma GCC diagnostic pop 108 #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v) 111 #define __ALIGNED(x) __attribute__((aligned(x))) 114 #define __RESTRICT __restrict 116 #ifndef __COMPILER_BARRIER 117 #define __COMPILER_BARRIER() __ASM volatile("":::"memory") 120 #define __NO_INIT __attribute__ ((section (".bss.noinit"))) 123 #define __ALIAS(x) __attribute__ ((alias(x))) 128 #ifndef __PROGRAM_START 141 typedef struct __copy_table {
147 typedef struct __zero_table {
152 extern const __copy_table_t __copy_table_start__;
153 extern const __copy_table_t __copy_table_end__;
154 extern const __zero_table_t __zero_table_start__;
155 extern const __zero_table_t __zero_table_end__;
157 for (__copy_table_t
const* pTable = &__copy_table_start__; pTable < &__copy_table_end__; ++pTable) {
158 for(uint32_t i=0u; i<pTable->wlen; ++i) {
159 pTable->dest[i] = pTable->src[i];
163 for (__zero_table_t
const* pTable = &__zero_table_start__; pTable < &__zero_table_end__; ++pTable) {
164 for(uint32_t i=0u; i<pTable->wlen; ++i) {
165 pTable->dest[i] = 0u;
172 #define __PROGRAM_START __cmsis_start 176 #define __INITIAL_SP __StackTop 179 #ifndef __STACK_LIMIT 180 #define __STACK_LIMIT __StackLimit 183 #ifndef __VECTOR_TABLE 184 #define __VECTOR_TABLE __Vectors 187 #ifndef __VECTOR_TABLE_ATTRIBUTE 188 #define __VECTOR_TABLE_ATTRIBUTE __attribute__((used, section(".vectors"))) 191 #if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3U) 193 #define __STACK_SEAL __StackSeal 196 #ifndef __TZ_STACK_SEAL_SIZE 197 #define __TZ_STACK_SEAL_SIZE 8U 200 #ifndef __TZ_STACK_SEAL_VALUE 201 #define __TZ_STACK_SEAL_VALUE 0xFEF5EDA5FEF5EDA5ULL 206 *((uint64_t *)stackTop) = __TZ_STACK_SEAL_VALUE;
220 #if defined (__thumb__) && !defined (__thumb2__) 221 #define __CMSIS_GCC_OUT_REG(r) "=l" (r) 222 #define __CMSIS_GCC_RW_REG(r) "+l" (r) 223 #define __CMSIS_GCC_USE_REG(r) "l" (r) 225 #define __CMSIS_GCC_OUT_REG(r) "=r" (r) 226 #define __CMSIS_GCC_RW_REG(r) "+r" (r) 227 #define __CMSIS_GCC_USE_REG(r) "r" (r) 234 #define __NOP() __ASM volatile ("nop") 240 #define __WFI() __ASM volatile ("wfi":::"memory") 248 #define __WFE() __ASM volatile ("wfe":::"memory") 255 #define __SEV() __ASM volatile ("sev") 266 __ASM volatile (
"isb 0xF":::
"memory");
277 __ASM volatile (
"dsb 0xF":::
"memory");
288 __ASM volatile (
"dmb 0xF":::
"memory");
300 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5) 301 return __builtin_bswap32(value);
334 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) 335 return (int16_t)__builtin_bswap16(value);
359 return (op1 >> op2) | (op1 << (32U - op2));
370 #define __BKPT(value) __ASM volatile ("bkpt "#value) 383 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ 384 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \ 385 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) 386 __ASM (
"rbit %0, %1" :
"=r" (result) :
"r" (value) );
388 uint32_t s = (4U * 8U) - 1U;
391 for (value >>= 1U; value != 0U; value >>= 1U)
394 result |= value & 1U;
424 return __builtin_clz(value);
428 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ 429 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \ 430 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ 431 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) 442 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) 443 __ASM volatile (
"ldrexb %0, %1" :
"=r" (result) :
"Q" (*addr) );
448 __ASM volatile (
"ldrexb %0, [%1]" :
"=r" (result) :
"r" (addr) :
"memory" );
450 return ((uint8_t) result);
464 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) 465 __ASM volatile (
"ldrexh %0, %1" :
"=r" (result) :
"Q" (*addr) );
470 __ASM volatile (
"ldrexh %0, [%1]" :
"=r" (result) :
"r" (addr) :
"memory" );
472 return ((uint16_t) result);
486 __ASM volatile (
"ldrex %0, %1" :
"=r" (result) :
"Q" (*addr) );
503 __ASM volatile (
"strexb %0, %2, %1" :
"=&r" (result),
"=Q" (*addr) :
"r" ((uint32_t)value) );
520 __ASM volatile (
"strexh %0, %2, %1" :
"=&r" (result),
"=Q" (*addr) :
"r" ((uint32_t)value) );
537 __ASM volatile (
"strex %0, %2, %1" :
"=&r" (result),
"=Q" (*addr) :
"r" (value) );
548 __ASM volatile (
"clrex" :::
"memory");
557 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ 558 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \ 559 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) 567 #define __SSAT(ARG1, ARG2) \ 570 int32_t __RES, __ARG1 = (ARG1); \ 571 __ASM volatile ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \ 583 #define __USAT(ARG1, ARG2) \ 586 uint32_t __RES, __ARG1 = (ARG1); \ 587 __ASM volatile ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \ 618 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) 619 __ASM volatile (
"ldrbt %0, %1" :
"=r" (result) :
"Q" (*ptr) );
624 __ASM volatile (
"ldrbt %0, [%1]" :
"=r" (result) :
"r" (ptr) :
"memory" );
626 return ((uint8_t) result);
640 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) 641 __ASM volatile (
"ldrht %0, %1" :
"=r" (result) :
"Q" (*ptr) );
646 __ASM volatile (
"ldrht %0, [%1]" :
"=r" (result) :
"r" (ptr) :
"memory" );
648 return ((uint16_t) result);
662 __ASM volatile (
"ldrt %0, %1" :
"=r" (result) :
"Q" (*ptr) );
675 __ASM volatile (
"strbt %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
687 __ASM volatile (
"strht %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
699 __ASM volatile (
"strt %1, %0" :
"=Q" (*ptr) :
"r" (value) );
715 if ((sat >= 1U) && (sat <= 32U))
717 const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
718 const int32_t min = -1 - max ;
742 const uint32_t max = ((1U << sat) - 1U);
743 if (val > (int32_t)max)
752 return (uint32_t)val;
760 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ 761 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) 772 __ASM volatile (
"ldab %0, %1" :
"=r" (result) :
"Q" (*ptr) :
"memory" );
773 return ((uint8_t) result);
787 __ASM volatile (
"ldah %0, %1" :
"=r" (result) :
"Q" (*ptr) :
"memory" );
788 return ((uint16_t) result);
802 __ASM volatile (
"lda %0, %1" :
"=r" (result) :
"Q" (*ptr) :
"memory" );
815 __ASM volatile (
"stlb %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) :
"memory" );
827 __ASM volatile (
"stlh %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) :
"memory" );
839 __ASM volatile (
"stl %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) :
"memory" );
853 __ASM volatile (
"ldaexb %0, %1" :
"=r" (result) :
"Q" (*ptr) :
"memory" );
854 return ((uint8_t) result);
868 __ASM volatile (
"ldaexh %0, %1" :
"=r" (result) :
"Q" (*ptr) :
"memory" );
869 return ((uint16_t) result);
883 __ASM volatile (
"ldaex %0, %1" :
"=r" (result) :
"Q" (*ptr) :
"memory" );
900 __ASM volatile (
"stlexb %0, %2, %1" :
"=&r" (result),
"=Q" (*ptr) :
"r" ((uint32_t)value) :
"memory" );
917 __ASM volatile (
"stlexh %0, %2, %1" :
"=&r" (result),
"=Q" (*ptr) :
"r" ((uint32_t)value) :
"memory" );
934 __ASM volatile (
"stlex %0, %2, %1" :
"=&r" (result),
"=Q" (*ptr) :
"r" ((uint32_t)value) :
"memory" );
957 __ASM volatile (
"cpsie i" : : :
"memory");
968 __ASM volatile (
"cpsid i" : : :
"memory");
981 __ASM volatile (
"MRS %0, control" :
"=r" (result) );
986 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 996 __ASM volatile (
"MRS %0, control_ns" :
"=r" (result) );
1009 __ASM volatile (
"MSR control, %0" : :
"r" (control) :
"memory");
1014 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1022 __ASM volatile (
"MSR control_ns, %0" : :
"r" (control) :
"memory");
1037 __ASM volatile (
"MRS %0, ipsr" :
"=r" (result) );
1051 __ASM volatile (
"MRS %0, apsr" :
"=r" (result) );
1065 __ASM volatile (
"MRS %0, xpsr" :
"=r" (result) );
1079 __ASM volatile (
"MRS %0, psp" :
"=r" (result) );
1084 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1094 __ASM volatile (
"MRS %0, psp_ns" :
"=r" (result) );
1107 __ASM volatile (
"MSR psp, %0" : :
"r" (topOfProcStack) : );
1111 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1119 __ASM volatile (
"MSR psp_ns, %0" : :
"r" (topOfProcStack) : );
1133 __ASM volatile (
"MRS %0, msp" :
"=r" (result) );
1138 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1148 __ASM volatile (
"MRS %0, msp_ns" :
"=r" (result) );
1161 __ASM volatile (
"MSR msp, %0" : :
"r" (topOfMainStack) : );
1165 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1173 __ASM volatile (
"MSR msp_ns, %0" : :
"r" (topOfMainStack) : );
1178 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1188 __ASM volatile (
"MRS %0, sp_ns" :
"=r" (result) );
1200 __ASM volatile (
"MSR sp_ns, %0" : :
"r" (topOfStack) : );
1214 __ASM volatile (
"MRS %0, primask" :
"=r" (result) );
1219 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1229 __ASM volatile (
"MRS %0, primask_ns" :
"=r" (result) );
1242 __ASM volatile (
"MSR primask, %0" : :
"r" (priMask) :
"memory");
1246 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1254 __ASM volatile (
"MSR primask_ns, %0" : :
"r" (priMask) :
"memory");
1259 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ 1260 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \ 1261 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) 1269 __ASM volatile (
"cpsie f" : : :
"memory");
1280 __ASM volatile (
"cpsid f" : : :
"memory");
1293 __ASM volatile (
"MRS %0, basepri" :
"=r" (result) );
1298 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1308 __ASM volatile (
"MRS %0, basepri_ns" :
"=r" (result) );
1321 __ASM volatile (
"MSR basepri, %0" : :
"r" (basePri) :
"memory");
1325 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1333 __ASM volatile (
"MSR basepri_ns, %0" : :
"r" (basePri) :
"memory");
1346 __ASM volatile (
"MSR basepri_max, %0" : :
"r" (basePri) :
"memory");
1359 __ASM volatile (
"MRS %0, faultmask" :
"=r" (result) );
1364 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1374 __ASM volatile (
"MRS %0, faultmask_ns" :
"=r" (result) );
1387 __ASM volatile (
"MSR faultmask, %0" : :
"r" (faultMask) :
"memory");
1391 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1399 __ASM volatile (
"MSR faultmask_ns, %0" : :
"r" (faultMask) :
"memory");
1408 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ 1409 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) 1422 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 1423 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) 1428 __ASM volatile (
"MRS %0, psplim" :
"=r" (result) );
1433 #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)) 1444 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))) 1449 __ASM volatile (
"MRS %0, psplim_ns" :
"=r" (result) );
1467 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 1468 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) 1470 (void)ProcStackPtrLimit;
1472 __ASM volatile (
"MSR psplim, %0" : :
"r" (ProcStackPtrLimit));
1477 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1488 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))) 1490 (void)ProcStackPtrLimit;
1492 __ASM volatile (
"MSR psplim_ns, %0\n" : :
"r" (ProcStackPtrLimit));
1509 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 1510 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) 1515 __ASM volatile (
"MRS %0, msplim" :
"=r" (result) );
1521 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1532 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))) 1537 __ASM volatile (
"MRS %0, msplim_ns" :
"=r" (result) );
1555 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 1556 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) 1558 (void)MainStackPtrLimit;
1560 __ASM volatile (
"MSR msplim, %0" : :
"r" (MainStackPtrLimit));
1565 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1576 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))) 1578 (void)MainStackPtrLimit;
1580 __ASM volatile (
"MSR msplim_ns, %0" : :
"r" (MainStackPtrLimit));
1596 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ 1597 (defined (__FPU_USED ) && (__FPU_USED == 1U)) ) 1598 #if __has_builtin(__builtin_arm_get_fpscr) 1602 return __builtin_arm_get_fpscr();
1606 __ASM volatile (
"VMRS %0, fpscr" :
"=r" (result) );
1622 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ 1623 (defined (__FPU_USED ) && (__FPU_USED == 1U)) ) 1624 #if __has_builtin(__builtin_arm_set_fpscr) 1628 __builtin_arm_set_fpscr(fpscr);
1630 __ASM volatile (
"VMSR fpscr, %0" : :
"r" (fpscr) :
"vfpcc",
"memory");
1647 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1)) 1653 __ASM volatile (
"sadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1661 __ASM (
"qadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1669 __ASM (
"shadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1677 __ASM volatile (
"uadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1685 __ASM (
"uqadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1693 __ASM (
"uhadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1702 __ASM volatile (
"ssub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1710 __ASM (
"qsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1718 __ASM (
"shsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1726 __ASM volatile (
"usub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1734 __ASM (
"uqsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1742 __ASM (
"uhsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1751 __ASM volatile (
"sadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1759 __ASM (
"qadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1767 __ASM (
"shadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1775 __ASM volatile (
"uadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1783 __ASM (
"uqadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1791 __ASM (
"uhadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1799 __ASM volatile (
"ssub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1807 __ASM (
"qsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1815 __ASM (
"shsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1823 __ASM volatile (
"usub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1831 __ASM (
"uqsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1839 __ASM (
"uhsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1847 __ASM volatile (
"sasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1855 __ASM (
"qasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1863 __ASM (
"shasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1871 __ASM volatile (
"uasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1879 __ASM (
"uqasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1887 __ASM (
"uhasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1895 __ASM volatile (
"ssax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1903 __ASM (
"qsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1911 __ASM (
"shsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1919 __ASM volatile (
"usax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1927 __ASM (
"uqsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1935 __ASM (
"uhsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1943 __ASM (
"usad8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1951 __ASM (
"usada8 %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1955 #define __SSAT16(ARG1, ARG2) \ 1958 int32_t __RES, __ARG1 = (ARG1); \ 1959 __ASM volatile ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \ 1963 #define __USAT16(ARG1, ARG2) \ 1966 uint32_t __RES, __ARG1 = (ARG1); \ 1967 __ASM volatile ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \ 1975 __ASM (
"uxtb16 %0, %1" :
"=r" (result) :
"r" (op1));
1983 __ASM (
"uxtab16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1991 __ASM (
"sxtb16 %0, %1" :
"=r" (result) :
"r" (op1));
1998 if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U))) {
1999 __ASM volatile (
"sxtb16 %0, %1, ROR %2" :
"=r" (result) :
"r" (op1),
"i" (rotate) );
2001 result = __SXTB16(
__ROR(op1, rotate)) ;
2010 __ASM (
"sxtab16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2017 if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U))) {
2018 __ASM volatile (
"sxtab16 %0, %1, %2, ROR %3" :
"=r" (result) :
"r" (op1) ,
"r" (op2) ,
"i" (rotate));
2020 result = __SXTAB16(op1,
__ROR(op2, rotate));
2030 __ASM volatile (
"smuad %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2038 __ASM volatile (
"smuadx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2046 __ASM volatile (
"smlad %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
2054 __ASM volatile (
"smladx %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
2067 __ASM volatile (
"smlald %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
2069 __ASM volatile (
"smlald %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
2084 __ASM volatile (
"smlaldx %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
2086 __ASM volatile (
"smlaldx %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
2096 __ASM volatile (
"smusd %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2104 __ASM volatile (
"smusdx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2112 __ASM volatile (
"smlsd %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
2120 __ASM volatile (
"smlsdx %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
2133 __ASM volatile (
"smlsld %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
2135 __ASM volatile (
"smlsld %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
2150 __ASM volatile (
"smlsldx %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
2152 __ASM volatile (
"smlsldx %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
2162 __ASM volatile (
"sel %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2170 __ASM volatile (
"qadd %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2178 __ASM volatile (
"qsub %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2183 #define __PKHBT(ARG1,ARG2,ARG3) \ 2186 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \ 2187 __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \ 2191 #define __PKHTB(ARG1,ARG2,ARG3) \ 2194 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \ 2196 __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \ 2198 __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \ 2207 __ASM (
"smmla %0, %1, %2, %3" :
"=r" (result):
"r" (op1),
"r" (op2),
"r" (op3) );
2215 #pragma GCC diagnostic pop __STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
Get Control Register.
Definition: cmsis_gcc.h:977
#define __CMSIS_GCC_USE_REG(r)
Definition: cmsis_gcc.h:227
__STATIC_FORCEINLINE uint32_t __get_IPSR(void)
Get IPSR Register.
Definition: cmsis_gcc.h:1033
__STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
Unsigned Saturate.
Definition: cmsis_gcc.h:738
__STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
Get FPSCR.
Definition: cmsis_gcc.h:1594
__STATIC_FORCEINLINE uint32_t __get_PSP(void)
Get Process Stack Pointer.
Definition: cmsis_gcc.h:1075
__STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
Set Process Stack Pointer.
Definition: cmsis_gcc.h:1105
__STATIC_FORCEINLINE void __DMB(void)
Data Memory Barrier.
Definition: cmsis_gcc.h:286
__PACKED_STRUCT T_UINT16_WRITE
Definition: cmsis_gcc.h:82
#define __PACKED_STRUCT
Definition: cmsis_gcc.h:65
struct __attribute__((packed)) T_UINT32
Definition: cmsis_gcc.h:74
#define __SXTB16_RORn(ARG1, ARG2)
Definition: cmsis_iccarm.h:1004
__STATIC_FORCEINLINE uint32_t __get_xPSR(void)
Get xPSR Register.
Definition: cmsis_gcc.h:1061
__STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
Reverse byte order (16 bit)
Definition: cmsis_gcc.h:332
__STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
Set Main Stack Pointer.
Definition: cmsis_gcc.h:1159
__PACKED_STRUCT T_UINT32_WRITE
Definition: cmsis_gcc.h:98
__STATIC_FORCEINLINE void __DSB(void)
Data Synchronization Barrier.
Definition: cmsis_gcc.h:275
__STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
Set Priority Mask.
Definition: cmsis_gcc.h:1240
__STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
Reverse byte order (16 bit)
Definition: cmsis_gcc.h:317
__STATIC_FORCEINLINE void __disable_irq(void)
Disable IRQ Interrupts.
Definition: cmsis_gcc.h:966
#define __STATIC_FORCEINLINE
Definition: cmsis_gcc.h:50
__STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
Reverse bit order of value.
Definition: cmsis_gcc.h:379
__STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
Set FPSCR.
Definition: cmsis_gcc.h:1620
__STATIC_FORCEINLINE uint32_t __get_APSR(void)
Get APSR Register.
Definition: cmsis_gcc.h:1047
__STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
Get Priority Mask.
Definition: cmsis_gcc.h:1210
__STATIC_FORCEINLINE uint32_t __get_MSP(void)
Get Main Stack Pointer.
Definition: cmsis_gcc.h:1129
#define __SXTAB16_RORn(ARG1, ARG2, ARG3)
Definition: cmsis_iccarm.h:1006
__STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
Rotate Right in unsigned value (32 bit)
Definition: cmsis_gcc.h:352
__STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
Count leading zeros.
Definition: cmsis_gcc.h:409
__PACKED_STRUCT T_UINT32_READ
Definition: cmsis_gcc.h:106
#define __ASM
Definition: cmsis_gcc.h:41
#define __CMSIS_GCC_OUT_REG(r)
Definition: cmsis_gcc.h:225
__STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
Set Control Register.
Definition: cmsis_gcc.h:1007
__STATIC_FORCEINLINE void __enable_irq(void)
Enable IRQ Interrupts.
Definition: cmsis_gcc.h:955
__IAR_FT uint32_t __LDREXW(uint32_t volatile *ptr)
Definition: cmsis_iccarm.h:620
__STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
Signed Saturate.
Definition: cmsis_gcc.h:713
__PACKED_STRUCT T_UINT16_READ
Definition: cmsis_gcc.h:90
__STATIC_FORCEINLINE __NO_RETURN void __cmsis_start(void)
Initializes data and bss sections.
Definition: cmsis_gcc.h:137
__STATIC_FORCEINLINE void __ISB(void)
Instruction Synchronization Barrier.
Definition: cmsis_gcc.h:264
#define __NO_RETURN
Definition: cmsis_gcc.h:53
__IAR_FT uint32_t __STREXW(uint32_t value, uint32_t volatile *ptr)
Definition: cmsis_iccarm.h:625
__STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
Reverse byte order (32 bit)
Definition: cmsis_gcc.h:298