openMMC
Open Source Modular MMC for AMCs
Loading...
Searching...
No Matches
cmsis_armclang.h
Go to the documentation of this file.
1/**************************************************************************/
7/*
8 * Copyright (c) 2009-2019 Arm Limited. All rights reserved.
9 *
10 * SPDX-License-Identifier: Apache-2.0
11 *
12 * Licensed under the Apache License, Version 2.0 (the License); you may
13 * not use this file except in compliance with the License.
14 * You may obtain a copy of the License at
15 *
16 * www.apache.org/licenses/LICENSE-2.0
17 *
18 * Unless required by applicable law or agreed to in writing, software
19 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 * See the License for the specific language governing permissions and
22 * limitations under the License.
23 */
24
25/*lint -esym(9058, IRQn)*/ /* disable MISRA 2012 Rule 2.4 for IRQn */
26
27#ifndef __CMSIS_ARMCLANG_H
28#define __CMSIS_ARMCLANG_H
29
30#pragma clang system_header /* treat file as system include file */
31
32#ifndef __ARM_COMPAT_H
33#include <arm_compat.h> /* Compatibility header for Arm Compiler 5 intrinsics */
34#endif
35
36/* CMSIS compiler specific defines */
37#ifndef __ASM
38 #define __ASM __asm
39#endif
40#ifndef __INLINE
41 #define __INLINE __inline
42#endif
43#ifndef __STATIC_INLINE
44 #define __STATIC_INLINE static __inline
45#endif
46#ifndef __STATIC_FORCEINLINE
47 #define __STATIC_FORCEINLINE __attribute__((always_inline)) static __inline
48#endif
49#ifndef __NO_RETURN
50 #define __NO_RETURN __attribute__((__noreturn__))
51#endif
52#ifndef __USED
53 #define __USED __attribute__((used))
54#endif
55#ifndef __WEAK
56 #define __WEAK __attribute__((weak))
57#endif
58#ifndef __PACKED
59 #define __PACKED __attribute__((packed, aligned(1)))
60#endif
61#ifndef __PACKED_STRUCT
62 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
63#endif
64#ifndef __PACKED_UNION
65 #define __PACKED_UNION union __attribute__((packed, aligned(1)))
66#endif
67#ifndef __UNALIGNED_UINT32 /* deprecated */
68 #pragma clang diagnostic push
69 #pragma clang diagnostic ignored "-Wpacked"
70/*lint -esym(9058, T_UINT32)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32 */
71 struct __attribute__((packed)) T_UINT32 { uint32_t v; };
72 #pragma clang diagnostic pop
73 #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v)
74#endif
75#ifndef __UNALIGNED_UINT16_WRITE
76 #pragma clang diagnostic push
77 #pragma clang diagnostic ignored "-Wpacked"
78/*lint -esym(9058, T_UINT16_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_WRITE */
80 #pragma clang diagnostic pop
81 #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
82#endif
83#ifndef __UNALIGNED_UINT16_READ
84 #pragma clang diagnostic push
85 #pragma clang diagnostic ignored "-Wpacked"
86/*lint -esym(9058, T_UINT16_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_READ */
88 #pragma clang diagnostic pop
89 #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
90#endif
91#ifndef __UNALIGNED_UINT32_WRITE
92 #pragma clang diagnostic push
93 #pragma clang diagnostic ignored "-Wpacked"
94/*lint -esym(9058, T_UINT32_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_WRITE */
96 #pragma clang diagnostic pop
97 #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
98#endif
99#ifndef __UNALIGNED_UINT32_READ
100 #pragma clang diagnostic push
101 #pragma clang diagnostic ignored "-Wpacked"
102/*lint -esym(9058, T_UINT32_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_READ */
104 #pragma clang diagnostic pop
105 #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
106#endif
107#ifndef __ALIGNED
108 #define __ALIGNED(x) __attribute__((aligned(x)))
109#endif
110#ifndef __RESTRICT
111 #define __RESTRICT __restrict
112#endif
113#ifndef __COMPILER_BARRIER
114 #define __COMPILER_BARRIER() __ASM volatile("":::"memory")
115#endif
116
117/* ######################### Startup and Lowlevel Init ######################## */
118
119#ifndef __PROGRAM_START
120#define __PROGRAM_START __main
121#endif
122
123#ifndef __INITIAL_SP
124#define __INITIAL_SP Image$$ARM_LIB_STACK$$ZI$$Limit
125#endif
126
127#ifndef __STACK_LIMIT
128#define __STACK_LIMIT Image$$ARM_LIB_STACK$$ZI$$Base
129#endif
130
131#ifndef __VECTOR_TABLE
132#define __VECTOR_TABLE __Vectors
133#endif
134
135#ifndef __VECTOR_TABLE_ATTRIBUTE
136#define __VECTOR_TABLE_ATTRIBUTE __attribute((used, section("RESET")))
137#endif
138
139/* ########################### Core Function Access ########################### */
150/* intrinsic void __enable_irq(); see arm_compat.h */
151
152
158/* intrinsic void __disable_irq(); see arm_compat.h */
159
160
167{
168 uint32_t result;
169
170 __ASM volatile ("MRS %0, control" : "=r" (result) );
171 return(result);
172}
173
174
175#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
181__STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
182{
183 uint32_t result;
184
185 __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
186 return(result);
187}
188#endif
189
190
196__STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
197{
198 __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
199}
200
201
202#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
208__STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
209{
210 __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
211}
212#endif
213
214
220__STATIC_FORCEINLINE uint32_t __get_IPSR(void)
221{
222 uint32_t result;
223
224 __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
225 return(result);
226}
227
228
234__STATIC_FORCEINLINE uint32_t __get_APSR(void)
235{
236 uint32_t result;
237
238 __ASM volatile ("MRS %0, apsr" : "=r" (result) );
239 return(result);
240}
241
242
248__STATIC_FORCEINLINE uint32_t __get_xPSR(void)
249{
250 uint32_t result;
251
252 __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
253 return(result);
254}
255
256
262__STATIC_FORCEINLINE uint32_t __get_PSP(void)
263{
264 uint32_t result;
265
266 __ASM volatile ("MRS %0, psp" : "=r" (result) );
267 return(result);
268}
269
270
271#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
277__STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
278{
279 uint32_t result;
280
281 __ASM volatile ("MRS %0, psp_ns" : "=r" (result) );
282 return(result);
283}
284#endif
285
286
292__STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
293{
294 __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
295}
296
297
298#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
304__STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
305{
306 __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
307}
308#endif
309
310
316__STATIC_FORCEINLINE uint32_t __get_MSP(void)
317{
318 uint32_t result;
319
320 __ASM volatile ("MRS %0, msp" : "=r" (result) );
321 return(result);
322}
323
324
325#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
331__STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
332{
333 uint32_t result;
334
335 __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
336 return(result);
337}
338#endif
339
340
346__STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
347{
348 __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
349}
350
351
352#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
358__STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
359{
360 __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
361}
362#endif
363
364
365#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
371__STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
372{
373 uint32_t result;
374
375 __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
376 return(result);
377}
378
379
385__STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
386{
387 __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
388}
389#endif
390
391
398{
399 uint32_t result;
400
401 __ASM volatile ("MRS %0, primask" : "=r" (result) );
402 return(result);
403}
404
405
406#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
412__STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
413{
414 uint32_t result;
415
416 __ASM volatile ("MRS %0, primask_ns" : "=r" (result) );
417 return(result);
418}
419#endif
420
421
427__STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
428{
429 __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
430}
431
432
433#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
439__STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
440{
441 __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
442}
443#endif
444
445
446#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
447 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
448 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
454#define __enable_fault_irq __enable_fiq /* see arm_compat.h */
455
456
462#define __disable_fault_irq __disable_fiq /* see arm_compat.h */
463
464
470__STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
471{
472 uint32_t result;
473
474 __ASM volatile ("MRS %0, basepri" : "=r" (result) );
475 return(result);
476}
477
478
479#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
485__STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
486{
487 uint32_t result;
488
489 __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
490 return(result);
491}
492#endif
493
494
500__STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
501{
502 __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
503}
504
505
506#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
512__STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
513{
514 __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
515}
516#endif
517
518
525__STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
526{
527 __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
528}
529
530
536__STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
537{
538 uint32_t result;
539
540 __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
541 return(result);
542}
543
544
545#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
551__STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
552{
553 uint32_t result;
554
555 __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
556 return(result);
557}
558#endif
559
560
566__STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
567{
568 __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
569}
570
571
572#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
578__STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
579{
580 __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
581}
582#endif
583
584#endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
585 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
586 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
587
588
589#if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
590 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
591
601__STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
602{
603#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
604 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
605 // without main extensions, the non-secure PSPLIM is RAZ/WI
606 return 0U;
607#else
608 uint32_t result;
609 __ASM volatile ("MRS %0, psplim" : "=r" (result) );
610 return result;
611#endif
612}
613
614#if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
624__STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
625{
626#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
627 // without main extensions, the non-secure PSPLIM is RAZ/WI
628 return 0U;
629#else
630 uint32_t result;
631 __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) );
632 return result;
633#endif
634}
635#endif
636
637
647__STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
648{
649#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
650 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
651 // without main extensions, the non-secure PSPLIM is RAZ/WI
652 (void)ProcStackPtrLimit;
653#else
654 __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
655#endif
656}
657
658
659#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
669__STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
670{
671#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
672 // without main extensions, the non-secure PSPLIM is RAZ/WI
673 (void)ProcStackPtrLimit;
674#else
675 __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
676#endif
677}
678#endif
679
680
689__STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
690{
691#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
692 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
693 // without main extensions, the non-secure MSPLIM is RAZ/WI
694 return 0U;
695#else
696 uint32_t result;
697 __ASM volatile ("MRS %0, msplim" : "=r" (result) );
698 return result;
699#endif
700}
701
702
703#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
712__STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
713{
714#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
715 // without main extensions, the non-secure MSPLIM is RAZ/WI
716 return 0U;
717#else
718 uint32_t result;
719 __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
720 return result;
721#endif
722}
723#endif
724
725
734__STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
735{
736#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
737 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
738 // without main extensions, the non-secure MSPLIM is RAZ/WI
739 (void)MainStackPtrLimit;
740#else
741 __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
742#endif
743}
744
745
746#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
755__STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
756{
757#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
758 // without main extensions, the non-secure MSPLIM is RAZ/WI
759 (void)MainStackPtrLimit;
760#else
761 __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
762#endif
763}
764#endif
765
766#endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
767 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
768
774#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
775 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
776#define __get_FPSCR (uint32_t)__builtin_arm_get_fpscr
777#else
778#define __get_FPSCR() ((uint32_t)0U)
779#endif
780
786#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
787 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
788#define __set_FPSCR __builtin_arm_set_fpscr
789#else
790#define __set_FPSCR(x) ((void)(x))
791#endif
792
793
797/* ########################## Core Instruction Access ######################### */
803/* Define macros for porting to both thumb1 and thumb2.
804 * For thumb1, use low register (r0-r7), specified by constraint "l"
805 * Otherwise, use general registers, specified by constraint "r" */
806#if defined (__thumb__) && !defined (__thumb2__)
807#define __CMSIS_GCC_OUT_REG(r) "=l" (r)
808#define __CMSIS_GCC_RW_REG(r) "+l" (r)
809#define __CMSIS_GCC_USE_REG(r) "l" (r)
810#else
811#define __CMSIS_GCC_OUT_REG(r) "=r" (r)
812#define __CMSIS_GCC_RW_REG(r) "+r" (r)
813#define __CMSIS_GCC_USE_REG(r) "r" (r)
814#endif
815
820#define __NOP __builtin_arm_nop
821
826#define __WFI __builtin_arm_wfi
827
828
834#define __WFE __builtin_arm_wfe
835
836
841#define __SEV __builtin_arm_sev
842
843
850#define __ISB() __builtin_arm_isb(0xF)
851
857#define __DSB() __builtin_arm_dsb(0xF)
858
859
865#define __DMB() __builtin_arm_dmb(0xF)
866
867
874#define __REV(value) __builtin_bswap32(value)
875
876
883#define __REV16(value) __ROR(__REV(value), 16)
884
885
892#define __REVSH(value) (int16_t)__builtin_bswap16(value)
893
894
902__STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
903{
904 op2 %= 32U;
905 if (op2 == 0U)
906 {
907 return op1;
908 }
909 return (op1 >> op2) | (op1 << (32U - op2));
910}
911
912
920#define __BKPT(value) __ASM volatile ("bkpt "#value)
921
922
929#define __RBIT __builtin_arm_rbit
930
937__STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
938{
939 /* Even though __builtin_clz produces a CLZ instruction on ARM, formally
940 __builtin_clz(0) is undefined behaviour, so handle this case specially.
941 This guarantees ARM-compatible results if happening to compile on a non-ARM
942 target, and ensures the compiler doesn't decide to activate any
943 optimisations using the logic "value was passed to __builtin_clz, so it
944 is non-zero".
945 ARM Compiler 6.10 and possibly earlier will optimise this test away, leaving a
946 single CLZ instruction.
947 */
948 if (value == 0U)
949 {
950 return 32U;
951 }
952 return __builtin_clz(value);
953}
954
955
956#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
957 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
958 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
959 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
966#define __LDREXB (uint8_t)__builtin_arm_ldrex
967
968
975#define __LDREXH (uint16_t)__builtin_arm_ldrex
976
977
984#define __LDREXW (uint32_t)__builtin_arm_ldrex
985
986
995#define __STREXB (uint32_t)__builtin_arm_strex
996
997
1006#define __STREXH (uint32_t)__builtin_arm_strex
1007
1008
1017#define __STREXW (uint32_t)__builtin_arm_strex
1018
1019
1024#define __CLREX __builtin_arm_clrex
1025
1026#endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1027 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1028 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1029 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
1030
1031
1032#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1033 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1034 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
1035
1043#define __SSAT __builtin_arm_ssat
1044
1045
1053#define __USAT __builtin_arm_usat
1054
1055
1063__STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
1064{
1065 uint32_t result;
1066
1067 __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
1068 return(result);
1069}
1070
1071
1078__STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
1079{
1080 uint32_t result;
1081
1082 __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
1083 return ((uint8_t) result); /* Add explicit type cast here */
1084}
1085
1086
1093__STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
1094{
1095 uint32_t result;
1096
1097 __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
1098 return ((uint16_t) result); /* Add explicit type cast here */
1099}
1100
1101
1108__STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
1109{
1110 uint32_t result;
1111
1112 __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
1113 return(result);
1114}
1115
1116
1123__STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
1124{
1125 __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1126}
1127
1128
1135__STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
1136{
1137 __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1138}
1139
1140
1147__STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
1148{
1149 __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
1150}
1151
1152#else /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1153 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1154 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
1155
1163__STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
1164{
1165 if ((sat >= 1U) && (sat <= 32U))
1166 {
1167 const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
1168 const int32_t min = -1 - max ;
1169 if (val > max)
1170 {
1171 return max;
1172 }
1173 else if (val < min)
1174 {
1175 return min;
1176 }
1177 }
1178 return val;
1179}
1180
1188__STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
1189{
1190 if (sat <= 31U)
1191 {
1192 const uint32_t max = ((1U << sat) - 1U);
1193 if (val > (int32_t)max)
1194 {
1195 return max;
1196 }
1197 else if (val < 0)
1198 {
1199 return 0U;
1200 }
1201 }
1202 return (uint32_t)val;
1203}
1204
1205#endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1206 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1207 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
1208
1209
1210#if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1211 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1218__STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
1219{
1220 uint32_t result;
1221
1222 __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
1223 return ((uint8_t) result);
1224}
1225
1226
1233__STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
1234{
1235 uint32_t result;
1236
1237 __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
1238 return ((uint16_t) result);
1239}
1240
1241
1248__STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
1249{
1250 uint32_t result;
1251
1252 __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
1253 return(result);
1254}
1255
1256
1263__STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
1264{
1265 __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
1266}
1267
1268
1275__STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
1276{
1277 __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
1278}
1279
1280
1287__STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
1288{
1289 __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
1290}
1291
1292
1299#define __LDAEXB (uint8_t)__builtin_arm_ldaex
1300
1301
1308#define __LDAEXH (uint16_t)__builtin_arm_ldaex
1309
1310
1317#define __LDAEX (uint32_t)__builtin_arm_ldaex
1318
1319
1328#define __STLEXB (uint32_t)__builtin_arm_stlex
1329
1330
1339#define __STLEXH (uint32_t)__builtin_arm_stlex
1340
1341
1350#define __STLEX (uint32_t)__builtin_arm_stlex
1351
1352#endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1353 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
1354 /* end of group CMSIS_Core_InstructionInterface */
1356
1357
1358/* ################### Compiler specific Intrinsics ########################### */
1364#if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1365
1366#define __SADD8 __builtin_arm_sadd8
1367#define __QADD8 __builtin_arm_qadd8
1368#define __SHADD8 __builtin_arm_shadd8
1369#define __UADD8 __builtin_arm_uadd8
1370#define __UQADD8 __builtin_arm_uqadd8
1371#define __UHADD8 __builtin_arm_uhadd8
1372#define __SSUB8 __builtin_arm_ssub8
1373#define __QSUB8 __builtin_arm_qsub8
1374#define __SHSUB8 __builtin_arm_shsub8
1375#define __USUB8 __builtin_arm_usub8
1376#define __UQSUB8 __builtin_arm_uqsub8
1377#define __UHSUB8 __builtin_arm_uhsub8
1378#define __SADD16 __builtin_arm_sadd16
1379#define __QADD16 __builtin_arm_qadd16
1380#define __SHADD16 __builtin_arm_shadd16
1381#define __UADD16 __builtin_arm_uadd16
1382#define __UQADD16 __builtin_arm_uqadd16
1383#define __UHADD16 __builtin_arm_uhadd16
1384#define __SSUB16 __builtin_arm_ssub16
1385#define __QSUB16 __builtin_arm_qsub16
1386#define __SHSUB16 __builtin_arm_shsub16
1387#define __USUB16 __builtin_arm_usub16
1388#define __UQSUB16 __builtin_arm_uqsub16
1389#define __UHSUB16 __builtin_arm_uhsub16
1390#define __SASX __builtin_arm_sasx
1391#define __QASX __builtin_arm_qasx
1392#define __SHASX __builtin_arm_shasx
1393#define __UASX __builtin_arm_uasx
1394#define __UQASX __builtin_arm_uqasx
1395#define __UHASX __builtin_arm_uhasx
1396#define __SSAX __builtin_arm_ssax
1397#define __QSAX __builtin_arm_qsax
1398#define __SHSAX __builtin_arm_shsax
1399#define __USAX __builtin_arm_usax
1400#define __UQSAX __builtin_arm_uqsax
1401#define __UHSAX __builtin_arm_uhsax
1402#define __USAD8 __builtin_arm_usad8
1403#define __USADA8 __builtin_arm_usada8
1404#define __SSAT16 __builtin_arm_ssat16
1405#define __USAT16 __builtin_arm_usat16
1406#define __UXTB16 __builtin_arm_uxtb16
1407#define __UXTAB16 __builtin_arm_uxtab16
1408#define __SXTB16 __builtin_arm_sxtb16
1409#define __SXTAB16 __builtin_arm_sxtab16
1410#define __SMUAD __builtin_arm_smuad
1411#define __SMUADX __builtin_arm_smuadx
1412#define __SMLAD __builtin_arm_smlad
1413#define __SMLADX __builtin_arm_smladx
1414#define __SMLALD __builtin_arm_smlald
1415#define __SMLALDX __builtin_arm_smlaldx
1416#define __SMUSD __builtin_arm_smusd
1417#define __SMUSDX __builtin_arm_smusdx
1418#define __SMLSD __builtin_arm_smlsd
1419#define __SMLSDX __builtin_arm_smlsdx
1420#define __SMLSLD __builtin_arm_smlsld
1421#define __SMLSLDX __builtin_arm_smlsldx
1422#define __SEL __builtin_arm_sel
1423#define __QADD __builtin_arm_qadd
1424#define __QSUB __builtin_arm_qsub
1425
1426#define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
1427 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
1428
1429#define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
1430 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
1431
1432#define __SXTB16_RORn(ARG1, ARG2) __SXTB16(__ROR(ARG1, ARG2))
1433
1434__STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
1435{
1436 int32_t result;
1437
1438 __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
1439 return(result);
1440}
1441
1442#endif /* (__ARM_FEATURE_DSP == 1) */
1446#endif /* __CMSIS_ARMCLANG_H */
#define __ASM
Definition cmsis_armclang.h:38
#define __PACKED_STRUCT
Definition cmsis_armclang.h:62
__PACKED_STRUCT T_UINT16_READ
Definition cmsis_armclang.h:87
__PACKED_STRUCT T_UINT32_READ
Definition cmsis_armclang.h:103
#define __STATIC_FORCEINLINE
Definition cmsis_armclang.h:47
__PACKED_STRUCT T_UINT32_WRITE
Definition cmsis_armclang.h:95
__PACKED_STRUCT T_UINT16_WRITE
Definition cmsis_armclang.h:79
struct __attribute__((packed)) T_UINT32
Definition cmsis_armclang.h:71
__STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
Signed Saturate.
Definition cmsis_armclang.h:1163
#define __CLZ
Count leading zeros.
Definition cmsis_armcc.h:557
__STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
Unsigned Saturate.
Definition cmsis_armclang.h:1188
#define __ROR
Rotate Right in unsigned value (32 bit)
Definition cmsis_armcc.h:510
#define __CMSIS_GCC_USE_REG(r)
Definition cmsis_armclang.h:813
uint32_t sat
Definition cmsis_armcc.h:754
#define __CMSIS_GCC_OUT_REG(r)
Definition cmsis_armclang.h:811
__STATIC_INLINE void __set_CONTROL(uint32_t control)
Set Control Register.
Definition cmsis_armcc.h:171
__STATIC_INLINE void __set_MSP(uint32_t topOfMainStack)
Set Main Stack Pointer.
Definition cmsis_armcc.h:255
__STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
Set Process Stack Pointer.
Definition cmsis_armcc.h:231
__STATIC_INLINE uint32_t __get_PRIMASK(void)
Get Priority Mask.
Definition cmsis_armcc.h:267
__STATIC_INLINE uint32_t __get_MSP(void)
Get Main Stack Pointer.
Definition cmsis_armcc.h:243
__STATIC_INLINE uint32_t __get_CONTROL(void)
Enable IRQ Interrupts.
Definition cmsis_armcc.h:159
__STATIC_INLINE uint32_t __get_PSP(void)
Get Process Stack Pointer.
Definition cmsis_armcc.h:219
__STATIC_INLINE uint32_t __get_APSR(void)
Get APSR Register.
Definition cmsis_armcc.h:195
#define __get_xPSR
Get xPSR Register.
Definition cmsis_iccarm.h:577
__STATIC_INLINE uint32_t __get_IPSR(void)
Get IPSR Register.
Definition cmsis_armcc.h:183
__STATIC_INLINE void __set_PRIMASK(uint32_t priMask)
Set Priority Mask.
Definition cmsis_armcc.h:279