openMMC
Open Source Modular MMC for AMCs
Loading...
Searching...
No Matches
cmsis_armclang_ltm.h
Go to the documentation of this file.
1/**************************************************************************/
7/*
8 * Copyright (c) 2018-2019 Arm Limited. All rights reserved.
9 *
10 * SPDX-License-Identifier: Apache-2.0
11 *
12 * Licensed under the Apache License, Version 2.0 (the License); you may
13 * not use this file except in compliance with the License.
14 * You may obtain a copy of the License at
15 *
16 * www.apache.org/licenses/LICENSE-2.0
17 *
18 * Unless required by applicable law or agreed to in writing, software
19 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 * See the License for the specific language governing permissions and
22 * limitations under the License.
23 */
24
25/*lint -esym(9058, IRQn)*/ /* disable MISRA 2012 Rule 2.4 for IRQn */
26
27#ifndef __CMSIS_ARMCLANG_H
28#define __CMSIS_ARMCLANG_H
29
30#pragma clang system_header /* treat file as system include file */
31
32#ifndef __ARM_COMPAT_H
33#include <arm_compat.h> /* Compatibility header for Arm Compiler 5 intrinsics */
34#endif
35
36/* CMSIS compiler specific defines */
37#ifndef __ASM
38 #define __ASM __asm
39#endif
40#ifndef __INLINE
41 #define __INLINE __inline
42#endif
43#ifndef __STATIC_INLINE
44 #define __STATIC_INLINE static __inline
45#endif
46#ifndef __STATIC_FORCEINLINE
47 #define __STATIC_FORCEINLINE __attribute__((always_inline)) static __inline
48#endif
49#ifndef __NO_RETURN
50 #define __NO_RETURN __attribute__((__noreturn__))
51#endif
52#ifndef __USED
53 #define __USED __attribute__((used))
54#endif
55#ifndef __WEAK
56 #define __WEAK __attribute__((weak))
57#endif
58#ifndef __PACKED
59 #define __PACKED __attribute__((packed, aligned(1)))
60#endif
61#ifndef __PACKED_STRUCT
62 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
63#endif
64#ifndef __PACKED_UNION
65 #define __PACKED_UNION union __attribute__((packed, aligned(1)))
66#endif
67#ifndef __UNALIGNED_UINT32 /* deprecated */
68 #pragma clang diagnostic push
69 #pragma clang diagnostic ignored "-Wpacked"
70/*lint -esym(9058, T_UINT32)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32 */
71 struct __attribute__((packed)) T_UINT32 { uint32_t v; };
72 #pragma clang diagnostic pop
73 #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v)
74#endif
75#ifndef __UNALIGNED_UINT16_WRITE
76 #pragma clang diagnostic push
77 #pragma clang diagnostic ignored "-Wpacked"
78/*lint -esym(9058, T_UINT16_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_WRITE */
80 #pragma clang diagnostic pop
81 #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
82#endif
83#ifndef __UNALIGNED_UINT16_READ
84 #pragma clang diagnostic push
85 #pragma clang diagnostic ignored "-Wpacked"
86/*lint -esym(9058, T_UINT16_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_READ */
88 #pragma clang diagnostic pop
89 #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
90#endif
91#ifndef __UNALIGNED_UINT32_WRITE
92 #pragma clang diagnostic push
93 #pragma clang diagnostic ignored "-Wpacked"
94/*lint -esym(9058, T_UINT32_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_WRITE */
96 #pragma clang diagnostic pop
97 #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
98#endif
99#ifndef __UNALIGNED_UINT32_READ
100 #pragma clang diagnostic push
101 #pragma clang diagnostic ignored "-Wpacked"
102/*lint -esym(9058, T_UINT32_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_READ */
104 #pragma clang diagnostic pop
105 #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
106#endif
107#ifndef __ALIGNED
108 #define __ALIGNED(x) __attribute__((aligned(x)))
109#endif
110#ifndef __RESTRICT
111 #define __RESTRICT __restrict
112#endif
113#ifndef __COMPILER_BARRIER
114 #define __COMPILER_BARRIER() __ASM volatile("":::"memory")
115#endif
116
117/* ######################### Startup and Lowlevel Init ######################## */
118
119#ifndef __PROGRAM_START
120#define __PROGRAM_START __main
121#endif
122
123#ifndef __INITIAL_SP
124#define __INITIAL_SP Image$$ARM_LIB_STACK$$ZI$$Limit
125#endif
126
127#ifndef __STACK_LIMIT
128#define __STACK_LIMIT Image$$ARM_LIB_STACK$$ZI$$Base
129#endif
130
131#ifndef __VECTOR_TABLE
132#define __VECTOR_TABLE __Vectors
133#endif
134
135#ifndef __VECTOR_TABLE_ATTRIBUTE
136#define __VECTOR_TABLE_ATTRIBUTE __attribute((used, section("RESET")))
137#endif
138
139
140/* ########################### Core Function Access ########################### */
151/* intrinsic void __enable_irq(); see arm_compat.h */
152
153
159/* intrinsic void __disable_irq(); see arm_compat.h */
160
161
168{
169 uint32_t result;
170
171 __ASM volatile ("MRS %0, control" : "=r" (result) );
172 return(result);
173}
174
175
176#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
182__STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
183{
184 uint32_t result;
185
186 __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
187 return(result);
188}
189#endif
190
191
197__STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
198{
199 __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
200}
201
202
203#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
209__STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
210{
211 __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
212}
213#endif
214
215
221__STATIC_FORCEINLINE uint32_t __get_IPSR(void)
222{
223 uint32_t result;
224
225 __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
226 return(result);
227}
228
229
235__STATIC_FORCEINLINE uint32_t __get_APSR(void)
236{
237 uint32_t result;
238
239 __ASM volatile ("MRS %0, apsr" : "=r" (result) );
240 return(result);
241}
242
243
249__STATIC_FORCEINLINE uint32_t __get_xPSR(void)
250{
251 uint32_t result;
252
253 __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
254 return(result);
255}
256
257
263__STATIC_FORCEINLINE uint32_t __get_PSP(void)
264{
265 uint32_t result;
266
267 __ASM volatile ("MRS %0, psp" : "=r" (result) );
268 return(result);
269}
270
271
272#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
278__STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
279{
280 uint32_t result;
281
282 __ASM volatile ("MRS %0, psp_ns" : "=r" (result) );
283 return(result);
284}
285#endif
286
287
293__STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
294{
295 __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
296}
297
298
299#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
305__STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
306{
307 __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
308}
309#endif
310
311
317__STATIC_FORCEINLINE uint32_t __get_MSP(void)
318{
319 uint32_t result;
320
321 __ASM volatile ("MRS %0, msp" : "=r" (result) );
322 return(result);
323}
324
325
326#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
332__STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
333{
334 uint32_t result;
335
336 __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
337 return(result);
338}
339#endif
340
341
347__STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
348{
349 __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
350}
351
352
353#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
359__STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
360{
361 __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
362}
363#endif
364
365
366#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
372__STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
373{
374 uint32_t result;
375
376 __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
377 return(result);
378}
379
380
386__STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
387{
388 __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
389}
390#endif
391
392
399{
400 uint32_t result;
401
402 __ASM volatile ("MRS %0, primask" : "=r" (result) );
403 return(result);
404}
405
406
407#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
413__STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
414{
415 uint32_t result;
416
417 __ASM volatile ("MRS %0, primask_ns" : "=r" (result) );
418 return(result);
419}
420#endif
421
422
428__STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
429{
430 __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
431}
432
433
434#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
440__STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
441{
442 __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
443}
444#endif
445
446
447#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
448 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
449 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
455#define __enable_fault_irq __enable_fiq /* see arm_compat.h */
456
457
463#define __disable_fault_irq __disable_fiq /* see arm_compat.h */
464
465
471__STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
472{
473 uint32_t result;
474
475 __ASM volatile ("MRS %0, basepri" : "=r" (result) );
476 return(result);
477}
478
479
480#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
486__STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
487{
488 uint32_t result;
489
490 __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
491 return(result);
492}
493#endif
494
495
501__STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
502{
503 __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
504}
505
506
507#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
513__STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
514{
515 __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
516}
517#endif
518
519
526__STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
527{
528 __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
529}
530
531
537__STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
538{
539 uint32_t result;
540
541 __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
542 return(result);
543}
544
545
546#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
552__STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
553{
554 uint32_t result;
555
556 __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
557 return(result);
558}
559#endif
560
561
567__STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
568{
569 __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
570}
571
572
573#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
579__STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
580{
581 __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
582}
583#endif
584
585#endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
586 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
587 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
588
589
590#if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
591 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
592
602__STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
603{
604#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
605 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
606 // without main extensions, the non-secure PSPLIM is RAZ/WI
607 return 0U;
608#else
609 uint32_t result;
610 __ASM volatile ("MRS %0, psplim" : "=r" (result) );
611 return result;
612#endif
613}
614
615#if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
625__STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
626{
627#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
628 // without main extensions, the non-secure PSPLIM is RAZ/WI
629 return 0U;
630#else
631 uint32_t result;
632 __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) );
633 return result;
634#endif
635}
636#endif
637
638
648__STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
649{
650#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
651 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
652 // without main extensions, the non-secure PSPLIM is RAZ/WI
653 (void)ProcStackPtrLimit;
654#else
655 __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
656#endif
657}
658
659
660#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
670__STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
671{
672#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
673 // without main extensions, the non-secure PSPLIM is RAZ/WI
674 (void)ProcStackPtrLimit;
675#else
676 __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
677#endif
678}
679#endif
680
681
690__STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
691{
692#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
693 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
694 // without main extensions, the non-secure MSPLIM is RAZ/WI
695 return 0U;
696#else
697 uint32_t result;
698 __ASM volatile ("MRS %0, msplim" : "=r" (result) );
699 return result;
700#endif
701}
702
703
704#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
713__STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
714{
715#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
716 // without main extensions, the non-secure MSPLIM is RAZ/WI
717 return 0U;
718#else
719 uint32_t result;
720 __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
721 return result;
722#endif
723}
724#endif
725
726
735__STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
736{
737#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
738 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
739 // without main extensions, the non-secure MSPLIM is RAZ/WI
740 (void)MainStackPtrLimit;
741#else
742 __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
743#endif
744}
745
746
747#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
756__STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
757{
758#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
759 // without main extensions, the non-secure MSPLIM is RAZ/WI
760 (void)MainStackPtrLimit;
761#else
762 __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
763#endif
764}
765#endif
766
767#endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
768 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
769
775#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
776 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
777#define __get_FPSCR (uint32_t)__builtin_arm_get_fpscr
778#else
779#define __get_FPSCR() ((uint32_t)0U)
780#endif
781
787#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
788 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
789#define __set_FPSCR __builtin_arm_set_fpscr
790#else
791#define __set_FPSCR(x) ((void)(x))
792#endif
793
794
798/* ########################## Core Instruction Access ######################### */
804/* Define macros for porting to both thumb1 and thumb2.
805 * For thumb1, use low register (r0-r7), specified by constraint "l"
806 * Otherwise, use general registers, specified by constraint "r" */
807#if defined (__thumb__) && !defined (__thumb2__)
808#define __CMSIS_GCC_OUT_REG(r) "=l" (r)
809#define __CMSIS_GCC_USE_REG(r) "l" (r)
810#else
811#define __CMSIS_GCC_OUT_REG(r) "=r" (r)
812#define __CMSIS_GCC_USE_REG(r) "r" (r)
813#endif
814
819#define __NOP __builtin_arm_nop
820
825#define __WFI __builtin_arm_wfi
826
827
833#define __WFE __builtin_arm_wfe
834
835
840#define __SEV __builtin_arm_sev
841
842
849#define __ISB() __builtin_arm_isb(0xF)
850
856#define __DSB() __builtin_arm_dsb(0xF)
857
858
864#define __DMB() __builtin_arm_dmb(0xF)
865
866
873#define __REV(value) __builtin_bswap32(value)
874
875
882#define __REV16(value) __ROR(__REV(value), 16)
883
884
891#define __REVSH(value) (int16_t)__builtin_bswap16(value)
892
893
901__STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
902{
903 op2 %= 32U;
904 if (op2 == 0U)
905 {
906 return op1;
907 }
908 return (op1 >> op2) | (op1 << (32U - op2));
909}
910
911
919#define __BKPT(value) __ASM volatile ("bkpt "#value)
920
921
928#define __RBIT __builtin_arm_rbit
929
936__STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
937{
938 /* Even though __builtin_clz produces a CLZ instruction on ARM, formally
939 __builtin_clz(0) is undefined behaviour, so handle this case specially.
940 This guarantees ARM-compatible results if happening to compile on a non-ARM
941 target, and ensures the compiler doesn't decide to activate any
942 optimisations using the logic "value was passed to __builtin_clz, so it
943 is non-zero".
944 ARM Compiler 6.10 and possibly earlier will optimise this test away, leaving a
945 single CLZ instruction.
946 */
947 if (value == 0U)
948 {
949 return 32U;
950 }
951 return __builtin_clz(value);
952}
953
954
955#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
956 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
957 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
958 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
965#define __LDREXB (uint8_t)__builtin_arm_ldrex
966
967
974#define __LDREXH (uint16_t)__builtin_arm_ldrex
975
976
983#define __LDREXW (uint32_t)__builtin_arm_ldrex
984
985
994#define __STREXB (uint32_t)__builtin_arm_strex
995
996
1005#define __STREXH (uint32_t)__builtin_arm_strex
1006
1007
1016#define __STREXW (uint32_t)__builtin_arm_strex
1017
1018
1023#define __CLREX __builtin_arm_clrex
1024
1025#endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1026 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1027 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1028 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
1029
1030
1031#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1032 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1033 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
1034
1042#define __SSAT __builtin_arm_ssat
1043
1044
1052#define __USAT __builtin_arm_usat
1053
1054
1062__STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
1063{
1064 uint32_t result;
1065
1066 __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
1067 return(result);
1068}
1069
1070
1077__STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
1078{
1079 uint32_t result;
1080
1081 __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
1082 return ((uint8_t) result); /* Add explicit type cast here */
1083}
1084
1085
1092__STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
1093{
1094 uint32_t result;
1095
1096 __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
1097 return ((uint16_t) result); /* Add explicit type cast here */
1098}
1099
1100
1107__STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
1108{
1109 uint32_t result;
1110
1111 __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
1112 return(result);
1113}
1114
1115
1122__STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
1123{
1124 __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1125}
1126
1127
1134__STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
1135{
1136 __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1137}
1138
1139
1146__STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
1147{
1148 __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
1149}
1150
1151#else /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1152 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1153 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
1154
1162__STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
1163{
1164 if ((sat >= 1U) && (sat <= 32U))
1165 {
1166 const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
1167 const int32_t min = -1 - max ;
1168 if (val > max)
1169 {
1170 return max;
1171 }
1172 else if (val < min)
1173 {
1174 return min;
1175 }
1176 }
1177 return val;
1178}
1179
1187__STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
1188{
1189 if (sat <= 31U)
1190 {
1191 const uint32_t max = ((1U << sat) - 1U);
1192 if (val > (int32_t)max)
1193 {
1194 return max;
1195 }
1196 else if (val < 0)
1197 {
1198 return 0U;
1199 }
1200 }
1201 return (uint32_t)val;
1202}
1203
1204#endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1205 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1206 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
1207
1208
1209#if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1210 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1217__STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
1218{
1219 uint32_t result;
1220
1221 __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
1222 return ((uint8_t) result);
1223}
1224
1225
1232__STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
1233{
1234 uint32_t result;
1235
1236 __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
1237 return ((uint16_t) result);
1238}
1239
1240
1247__STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
1248{
1249 uint32_t result;
1250
1251 __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
1252 return(result);
1253}
1254
1255
1262__STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
1263{
1264 __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
1265}
1266
1267
1274__STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
1275{
1276 __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
1277}
1278
1279
1286__STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
1287{
1288 __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
1289}
1290
1291
1298#define __LDAEXB (uint8_t)__builtin_arm_ldaex
1299
1300
1307#define __LDAEXH (uint16_t)__builtin_arm_ldaex
1308
1309
1316#define __LDAEX (uint32_t)__builtin_arm_ldaex
1317
1318
1327#define __STLEXB (uint32_t)__builtin_arm_stlex
1328
1329
1338#define __STLEXH (uint32_t)__builtin_arm_stlex
1339
1340
1349#define __STLEX (uint32_t)__builtin_arm_stlex
1350
1351#endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1352 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
1353 /* end of group CMSIS_Core_InstructionInterface */
1355
1356
1357/* ################### Compiler specific Intrinsics ########################### */
1363#if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1364
1365__STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1366{
1367 uint32_t result;
1368
1369 __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1370 return(result);
1371}
1372
1373__STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1374{
1375 uint32_t result;
1376
1377 __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1378 return(result);
1379}
1380
1381__STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1382{
1383 uint32_t result;
1384
1385 __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1386 return(result);
1387}
1388
1389__STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1390{
1391 uint32_t result;
1392
1393 __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1394 return(result);
1395}
1396
1397__STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1398{
1399 uint32_t result;
1400
1401 __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1402 return(result);
1403}
1404
1405__STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1406{
1407 uint32_t result;
1408
1409 __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1410 return(result);
1411}
1412
1413
1414__STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1415{
1416 uint32_t result;
1417
1418 __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1419 return(result);
1420}
1421
1422__STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1423{
1424 uint32_t result;
1425
1426 __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1427 return(result);
1428}
1429
1430__STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1431{
1432 uint32_t result;
1433
1434 __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1435 return(result);
1436}
1437
1438__STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1439{
1440 uint32_t result;
1441
1442 __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1443 return(result);
1444}
1445
1446__STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1447{
1448 uint32_t result;
1449
1450 __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1451 return(result);
1452}
1453
1454__STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1455{
1456 uint32_t result;
1457
1458 __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1459 return(result);
1460}
1461
1462
1463__STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1464{
1465 uint32_t result;
1466
1467 __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1468 return(result);
1469}
1470
1471__STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1472{
1473 uint32_t result;
1474
1475 __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1476 return(result);
1477}
1478
1479__STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1480{
1481 uint32_t result;
1482
1483 __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1484 return(result);
1485}
1486
1487__STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1488{
1489 uint32_t result;
1490
1491 __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1492 return(result);
1493}
1494
1495__STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1496{
1497 uint32_t result;
1498
1499 __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1500 return(result);
1501}
1502
1503__STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1504{
1505 uint32_t result;
1506
1507 __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1508 return(result);
1509}
1510
1511__STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1512{
1513 uint32_t result;
1514
1515 __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1516 return(result);
1517}
1518
1519__STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1520{
1521 uint32_t result;
1522
1523 __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1524 return(result);
1525}
1526
1527__STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1528{
1529 uint32_t result;
1530
1531 __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1532 return(result);
1533}
1534
1535__STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1536{
1537 uint32_t result;
1538
1539 __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1540 return(result);
1541}
1542
1543__STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1544{
1545 uint32_t result;
1546
1547 __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1548 return(result);
1549}
1550
1551__STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1552{
1553 uint32_t result;
1554
1555 __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1556 return(result);
1557}
1558
1559__STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1560{
1561 uint32_t result;
1562
1563 __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1564 return(result);
1565}
1566
1567__STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1568{
1569 uint32_t result;
1570
1571 __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1572 return(result);
1573}
1574
1575__STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1576{
1577 uint32_t result;
1578
1579 __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1580 return(result);
1581}
1582
1583__STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1584{
1585 uint32_t result;
1586
1587 __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1588 return(result);
1589}
1590
1591__STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1592{
1593 uint32_t result;
1594
1595 __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1596 return(result);
1597}
1598
1599__STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1600{
1601 uint32_t result;
1602
1603 __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1604 return(result);
1605}
1606
1607__STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1608{
1609 uint32_t result;
1610
1611 __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1612 return(result);
1613}
1614
1615__STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1616{
1617 uint32_t result;
1618
1619 __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1620 return(result);
1621}
1622
1623__STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1624{
1625 uint32_t result;
1626
1627 __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1628 return(result);
1629}
1630
1631__STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1632{
1633 uint32_t result;
1634
1635 __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1636 return(result);
1637}
1638
1639__STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1640{
1641 uint32_t result;
1642
1643 __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1644 return(result);
1645}
1646
1647__STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1648{
1649 uint32_t result;
1650
1651 __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1652 return(result);
1653}
1654
1655__STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1656{
1657 uint32_t result;
1658
1659 __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1660 return(result);
1661}
1662
1663__STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1664{
1665 uint32_t result;
1666
1667 __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1668 return(result);
1669}
1670
1671#define __SSAT16(ARG1,ARG2) \
1672({ \
1673 int32_t __RES, __ARG1 = (ARG1); \
1674 __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1675 __RES; \
1676 })
1677
1678#define __USAT16(ARG1,ARG2) \
1679({ \
1680 uint32_t __RES, __ARG1 = (ARG1); \
1681 __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1682 __RES; \
1683 })
1684
1685__STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
1686{
1687 uint32_t result;
1688
1689 __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
1690 return(result);
1691}
1692
1693__STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1694{
1695 uint32_t result;
1696
1697 __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1698 return(result);
1699}
1700
1701__STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
1702{
1703 uint32_t result;
1704
1705 __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
1706 return(result);
1707}
1708
1709__STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
1710{
1711 uint32_t result;
1712
1713 __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1714 return(result);
1715}
1716
1717__STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
1718{
1719 uint32_t result;
1720
1721 __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1722 return(result);
1723}
1724
1725__STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
1726{
1727 uint32_t result;
1728
1729 __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1730 return(result);
1731}
1732
1733__STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
1734{
1735 uint32_t result;
1736
1737 __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1738 return(result);
1739}
1740
1741__STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
1742{
1743 uint32_t result;
1744
1745 __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1746 return(result);
1747}
1748
1749__STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
1750{
1751 union llreg_u{
1752 uint32_t w32[2];
1753 uint64_t w64;
1754 } llr;
1755 llr.w64 = acc;
1756
1757#ifndef __ARMEB__ /* Little endian */
1758 __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1759#else /* Big endian */
1760 __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1761#endif
1762
1763 return(llr.w64);
1764}
1765
1766__STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
1767{
1768 union llreg_u{
1769 uint32_t w32[2];
1770 uint64_t w64;
1771 } llr;
1772 llr.w64 = acc;
1773
1774#ifndef __ARMEB__ /* Little endian */
1775 __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1776#else /* Big endian */
1777 __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1778#endif
1779
1780 return(llr.w64);
1781}
1782
1783__STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
1784{
1785 uint32_t result;
1786
1787 __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1788 return(result);
1789}
1790
1791__STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
1792{
1793 uint32_t result;
1794
1795 __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1796 return(result);
1797}
1798
1799__STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
1800{
1801 uint32_t result;
1802
1803 __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1804 return(result);
1805}
1806
1807__STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
1808{
1809 uint32_t result;
1810
1811 __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1812 return(result);
1813}
1814
1815__STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
1816{
1817 union llreg_u{
1818 uint32_t w32[2];
1819 uint64_t w64;
1820 } llr;
1821 llr.w64 = acc;
1822
1823#ifndef __ARMEB__ /* Little endian */
1824 __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1825#else /* Big endian */
1826 __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1827#endif
1828
1829 return(llr.w64);
1830}
1831
1832__STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
1833{
1834 union llreg_u{
1835 uint32_t w32[2];
1836 uint64_t w64;
1837 } llr;
1838 llr.w64 = acc;
1839
1840#ifndef __ARMEB__ /* Little endian */
1841 __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1842#else /* Big endian */
1843 __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1844#endif
1845
1846 return(llr.w64);
1847}
1848
1849__STATIC_FORCEINLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
1850{
1851 uint32_t result;
1852
1853 __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1854 return(result);
1855}
1856
1857__STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2)
1858{
1859 int32_t result;
1860
1861 __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1862 return(result);
1863}
1864
1865__STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2)
1866{
1867 int32_t result;
1868
1869 __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1870 return(result);
1871}
1872
1873#define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
1874 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
1875
1876#define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
1877 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
1878
1879#define __SXTB16_RORn(ARG1, ARG2) __SXTB16(__ROR(ARG1, ARG2))
1880
1881__STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
1882{
1883 int32_t result;
1884
1885 __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
1886 return(result);
1887}
1888
1889#endif /* (__ARM_FEATURE_DSP == 1) */
1893#endif /* __CMSIS_ARMCLANG_H */
#define __ASM
Definition cmsis_armclang_ltm.h:38
#define __PACKED_STRUCT
Definition cmsis_armclang_ltm.h:62
__PACKED_STRUCT T_UINT16_READ
Definition cmsis_armclang_ltm.h:87
__PACKED_STRUCT T_UINT32_READ
Definition cmsis_armclang_ltm.h:103
#define __STATIC_FORCEINLINE
Definition cmsis_armclang_ltm.h:47
__PACKED_STRUCT T_UINT32_WRITE
Definition cmsis_armclang_ltm.h:95
__PACKED_STRUCT T_UINT16_WRITE
Definition cmsis_armclang_ltm.h:79
struct __attribute__((packed)) T_UINT32
Definition cmsis_armclang_ltm.h:71
__STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
Signed Saturate.
Definition cmsis_armclang.h:1163
#define __CLZ
Count leading zeros.
Definition cmsis_armcc.h:557
__STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
Unsigned Saturate.
Definition cmsis_armclang.h:1188
#define __ROR
Rotate Right in unsigned value (32 bit)
Definition cmsis_armcc.h:510
#define __CMSIS_GCC_USE_REG(r)
Definition cmsis_armclang_ltm.h:812
uint32_t sat
Definition cmsis_armcc.h:754
#define __CMSIS_GCC_OUT_REG(r)
Definition cmsis_armclang_ltm.h:811
__STATIC_INLINE void __set_CONTROL(uint32_t control)
Set Control Register.
Definition cmsis_armcc.h:171
__STATIC_INLINE void __set_MSP(uint32_t topOfMainStack)
Set Main Stack Pointer.
Definition cmsis_armcc.h:255
__STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
Set Process Stack Pointer.
Definition cmsis_armcc.h:231
__STATIC_INLINE uint32_t __get_PRIMASK(void)
Get Priority Mask.
Definition cmsis_armcc.h:267
__STATIC_INLINE uint32_t __get_MSP(void)
Get Main Stack Pointer.
Definition cmsis_armcc.h:243
__STATIC_INLINE uint32_t __get_CONTROL(void)
Enable IRQ Interrupts.
Definition cmsis_armcc.h:159
__STATIC_INLINE uint32_t __get_PSP(void)
Get Process Stack Pointer.
Definition cmsis_armcc.h:219
__STATIC_INLINE uint32_t __get_APSR(void)
Get APSR Register.
Definition cmsis_armcc.h:195
#define __get_xPSR
Get xPSR Register.
Definition cmsis_iccarm.h:577
__STATIC_INLINE uint32_t __get_IPSR(void)
Get IPSR Register.
Definition cmsis_armcc.h:183
__STATIC_INLINE void __set_PRIMASK(uint32_t priMask)
Set Priority Mask.
Definition cmsis_armcc.h:279