Hilscher netX microcontroller driver  V0.0.5.0
Documentation of the netX driver package
cmsis_gcc.h
Go to the documentation of this file.
1 /**************************************************************************/
7 /*
8  * Copyright (c) 2009-2017 ARM Limited. All rights reserved.
9  *
10  * SPDX-License-Identifier: Apache-2.0
11  *
12  * Licensed under the Apache License, Version 2.0 (the License); you may
13  * not use this file except in compliance with the License.
14  * You may obtain a copy of the License at
15  *
16  * www.apache.org/licenses/LICENSE-2.0
17  *
18  * Unless required by applicable law or agreed to in writing, software
19  * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21  * See the License for the specific language governing permissions and
22  * limitations under the License.
23  */
24 
25 #ifndef __CMSIS_GCC_H
26 #define __CMSIS_GCC_H
27 
28 /* ignore some GCC warnings */
29 #pragma GCC diagnostic push
30 #pragma GCC diagnostic ignored "-Wsign-conversion"
31 #pragma GCC diagnostic ignored "-Wconversion"
32 #pragma GCC diagnostic ignored "-Wunused-parameter"
33 
34 /* Fallback for __has_builtin */
35 #ifndef __has_builtin
36  #define __has_builtin(x) (0)
37 #endif
38 
39 /* CMSIS compiler specific defines */
40 #ifndef __ASM
41  #define __ASM __asm
42 #endif
43 #ifndef __INLINE
44  #define __INLINE inline
45 #endif
46 #ifndef __STATIC_INLINE
47  #define __STATIC_INLINE static inline
48 #endif
49 #ifndef __STATIC_FORCEINLINE
50  #define __STATIC_FORCEINLINE __attribute__((always_inline)) static inline
51 #endif
52 #ifndef __NO_RETURN
53  #define __NO_RETURN __attribute__((__noreturn__))
54 #endif
55 #ifndef __USED
56  #define __USED __attribute__((used))
57 #endif
58 #ifndef __WEAK
59  #define __WEAK __attribute__((weak))
60 #endif
61 #ifndef __PACKED
62  #define __PACKED __attribute__((packed, aligned(1)))
63 #endif
64 #ifndef __PACKED_STRUCT
65  #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
66 #endif
67 #ifndef __PACKED_UNION
68  #define __PACKED_UNION union __attribute__((packed, aligned(1)))
69 #endif
70 #ifndef __UNALIGNED_UINT32 /* deprecated */
71  #pragma GCC diagnostic push
72  #pragma GCC diagnostic ignored "-Wpacked"
73  #pragma GCC diagnostic ignored "-Wattributes"
74  struct __attribute__((packed)) T_UINT32 { uint32_t v; };
75  #pragma GCC diagnostic pop
76  #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v)
77 #endif
78 #ifndef __UNALIGNED_UINT16_WRITE
79  #pragma GCC diagnostic push
80  #pragma GCC diagnostic ignored "-Wpacked"
81  #pragma GCC diagnostic ignored "-Wattributes"
83  #pragma GCC diagnostic pop
84  #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
85 #endif
86 #ifndef __UNALIGNED_UINT16_READ
87  #pragma GCC diagnostic push
88  #pragma GCC diagnostic ignored "-Wpacked"
89  #pragma GCC diagnostic ignored "-Wattributes"
90  __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
91  #pragma GCC diagnostic pop
92  #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
93 #endif
94 #ifndef __UNALIGNED_UINT32_WRITE
95  #pragma GCC diagnostic push
96  #pragma GCC diagnostic ignored "-Wpacked"
97  #pragma GCC diagnostic ignored "-Wattributes"
99  #pragma GCC diagnostic pop
100  #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
101 #endif
102 #ifndef __UNALIGNED_UINT32_READ
103  #pragma GCC diagnostic push
104  #pragma GCC diagnostic ignored "-Wpacked"
105  #pragma GCC diagnostic ignored "-Wattributes"
107  #pragma GCC diagnostic pop
108  #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
109 #endif
110 #ifndef __ALIGNED
111  #define __ALIGNED(x) __attribute__((aligned(x)))
112 #endif
113 #ifndef __RESTRICT
114  #define __RESTRICT __restrict
115 #endif
116 
117 
118 /* ########################### Core Function Access ########################### */
130 {
131  __ASM volatile ("cpsie i" : : : "memory");
132 }
133 
134 
141 {
142  __ASM volatile ("cpsid i" : : : "memory");
143 }
144 
145 
151 __STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
152 {
153  uint32_t result;
154 
155  __ASM volatile ("MRS %0, control" : "=r" (result) );
156  return(result);
157 }
158 
159 
160 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
161 
166 __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
167 {
168  uint32_t result;
169 
170  __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
171  return(result);
172 }
173 #endif
174 
175 
181 __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
182 {
183  __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
184 }
185 
186 
187 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
188 
193 __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
194 {
195  __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
196 }
197 #endif
198 
199 
205 __STATIC_FORCEINLINE uint32_t __get_IPSR(void)
206 {
207  uint32_t result;
208 
209  __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
210  return(result);
211 }
212 
213 
219 __STATIC_FORCEINLINE uint32_t __get_APSR(void)
220 {
221  uint32_t result;
222 
223  __ASM volatile ("MRS %0, apsr" : "=r" (result) );
224  return(result);
225 }
226 
227 
233 __STATIC_FORCEINLINE uint32_t __get_xPSR(void)
234 {
235  uint32_t result;
236 
237  __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
238  return(result);
239 }
240 
241 
247 __STATIC_FORCEINLINE uint32_t __get_PSP(void)
248 {
249  register uint32_t result;
250 
251  __ASM volatile ("MRS %0, psp" : "=r" (result) );
252  return(result);
253 }
254 
255 
256 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
257 
262 __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
263 {
264  register uint32_t result;
265 
266  __ASM volatile ("MRS %0, psp_ns" : "=r" (result) );
267  return(result);
268 }
269 #endif
270 
271 
277 __STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
278 {
279  __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
280 }
281 
282 
283 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
284 
289 __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
290 {
291  __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
292 }
293 #endif
294 
295 
301 __STATIC_FORCEINLINE uint32_t __get_MSP(void)
302 {
303  register uint32_t result;
304 
305  __ASM volatile ("MRS %0, msp" : "=r" (result) );
306  return(result);
307 }
308 
309 
310 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
311 
316 __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
317 {
318  register uint32_t result;
319 
320  __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
321  return(result);
322 }
323 #endif
324 
325 
331 __STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
332 {
333  __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
334 }
335 
336 
337 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
338 
343 __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
344 {
345  __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
346 }
347 #endif
348 
349 
350 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
351 
356 __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
357 {
358  register uint32_t result;
359 
360  __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
361  return(result);
362 }
363 
364 
370 __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
371 {
372  __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
373 }
374 #endif
375 
376 
382 __STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
383 {
384  uint32_t result;
385 
386  __ASM volatile ("MRS %0, primask" : "=r" (result) :: "memory");
387  return(result);
388 }
389 
390 
391 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
392 
397 __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
398 {
399  uint32_t result;
400 
401  __ASM volatile ("MRS %0, primask_ns" : "=r" (result) :: "memory");
402  return(result);
403 }
404 #endif
405 
406 
412 __STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
413 {
414  __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
415 }
416 
417 
418 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
419 
424 __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
425 {
426  __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
427 }
428 #endif
429 
430 
431 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
432  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
433  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
434 
439 __STATIC_FORCEINLINE void __enable_fault_irq(void)
440 {
441  __ASM volatile ("cpsie f" : : : "memory");
442 }
443 
444 
450 __STATIC_FORCEINLINE void __disable_fault_irq(void)
451 {
452  __ASM volatile ("cpsid f" : : : "memory");
453 }
454 
455 
461 __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
462 {
463  uint32_t result;
464 
465  __ASM volatile ("MRS %0, basepri" : "=r" (result) );
466  return(result);
467 }
468 
469 
470 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
471 
476 __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
477 {
478  uint32_t result;
479 
480  __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
481  return(result);
482 }
483 #endif
484 
485 
491 __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
492 {
493  __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
494 }
495 
496 
497 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
498 
503 __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
504 {
505  __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
506 }
507 #endif
508 
509 
516 __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
517 {
518  __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
519 }
520 
521 
527 __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
528 {
529  uint32_t result;
530 
531  __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
532  return(result);
533 }
534 
535 
536 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
537 
542 __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
543 {
544  uint32_t result;
545 
546  __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
547  return(result);
548 }
549 #endif
550 
551 
557 __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
558 {
559  __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
560 }
561 
562 
563 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
564 
569 __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
570 {
571  __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
572 }
573 #endif
574 
575 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
576  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
577  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
578 
579 
580 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
581  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
582 
592 __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
593 {
594 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
595  (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
596  // without main extensions, the non-secure PSPLIM is RAZ/WI
597  return 0U;
598 #else
599  register uint32_t result;
600  __ASM volatile ("MRS %0, psplim" : "=r" (result) );
601  return result;
602 #endif
603 }
604 
605 #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
606 
614 __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
615 {
616 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
617  // without main extensions, the non-secure PSPLIM is RAZ/WI
618  return 0U;
619 #else
620  register uint32_t result;
621  __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) );
622  return result;
623 #endif
624 }
625 #endif
626 
627 
637 __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
638 {
639 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
640  (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
641  // without main extensions, the non-secure PSPLIM is RAZ/WI
642  (void)ProcStackPtrLimit;
643 #else
644  __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
645 #endif
646 }
647 
648 
649 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
650 
658 __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
659 {
660 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
661  // without main extensions, the non-secure PSPLIM is RAZ/WI
662  (void)ProcStackPtrLimit;
663 #else
664  __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
665 #endif
666 }
667 #endif
668 
669 
679 __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
680 {
681 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
682  (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
683  // without main extensions, the non-secure MSPLIM is RAZ/WI
684  return 0U;
685 #else
686  register uint32_t result;
687  __ASM volatile ("MRS %0, msplim" : "=r" (result) );
688  return result;
689 #endif
690 }
691 
692 
693 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
694 
702 __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
703 {
704 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
705  // without main extensions, the non-secure MSPLIM is RAZ/WI
706  return 0U;
707 #else
708  register uint32_t result;
709  __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
710  return result;
711 #endif
712 }
713 #endif
714 
715 
725 __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
726 {
727 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
728  (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
729  // without main extensions, the non-secure MSPLIM is RAZ/WI
730  (void)MainStackPtrLimit;
731 #else
732  __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
733 #endif
734 }
735 
736 
737 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
738 
746 __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
747 {
748 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
749  // without main extensions, the non-secure MSPLIM is RAZ/WI
750  (void)MainStackPtrLimit;
751 #else
752  __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
753 #endif
754 }
755 #endif
756 
757 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
758  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
759 
760 
761 #if ((defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
762  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
763 
769 __STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
770 {
771 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
772  (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
773 #if __has_builtin(__builtin_arm_get_fpscr) || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
774  /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
775  return __builtin_arm_get_fpscr();
776 #else
777  uint32_t result;
778 
779  __ASM volatile ("VMRS %0, fpscr" : "=r" (result) );
780  return(result);
781 #endif
782 #else
783  return(0U);
784 #endif
785 }
786 
787 
793 __STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
794 {
795 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
796  (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
797 #if __has_builtin(__builtin_arm_set_fpscr) || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
798  /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
799  __builtin_arm_set_fpscr(fpscr);
800 #else
801  __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) : "vfpcc", "memory");
802 #endif
803 #else
804  (void)fpscr;
805 #endif
806 }
807 
808 #endif /* ((defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
809  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
810 
811 
812 
816 /* ########################## Core Instruction Access ######################### */
822 /* Define macros for porting to both thumb1 and thumb2.
823  * For thumb1, use low register (r0-r7), specified by constraint "l"
824  * Otherwise, use general registers, specified by constraint "r" */
825 #if defined (__thumb__) && !defined (__thumb2__)
826 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
827 #define __CMSIS_GCC_RW_REG(r) "+l" (r)
828 #define __CMSIS_GCC_USE_REG(r) "l" (r)
829 #else
830 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
831 #define __CMSIS_GCC_RW_REG(r) "+r" (r)
832 #define __CMSIS_GCC_USE_REG(r) "r" (r)
833 #endif
834 
839 #define __NOP() __ASM volatile ("nop")
840 
845 #define __WFI() __ASM volatile ("wfi")
846 
847 
853 #define __WFE() __ASM volatile ("wfe")
854 
855 
860 #define __SEV() __ASM volatile ("sev")
861 
862 
870 {
871  __ASM volatile ("isb 0xF":::"memory");
872 }
873 
874 
881 {
882  __ASM volatile ("dsb 0xF":::"memory");
883 }
884 
885 
892 {
893  __ASM volatile ("dmb 0xF":::"memory");
894 }
895 
896 
903 __STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
904 {
905 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
906  return __builtin_bswap32(value);
907 #else
908  uint32_t result;
909 
910  __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
911  return result;
912 #endif
913 }
914 
915 
922 __STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
923 {
924  uint32_t result;
925 
926  __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
927  return result;
928 }
929 
930 
937 __STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
938 {
939 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
940  return (int16_t)__builtin_bswap16(value);
941 #else
942  int16_t result;
943 
944  __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
945  return result;
946 #endif
947 }
948 
949 
957 __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
958 {
959  op2 %= 32U;
960  if (op2 == 0U)
961  {
962  return op1;
963  }
964  return (op1 >> op2) | (op1 << (32U - op2));
965 }
966 
967 
975 #define __BKPT(value) __ASM volatile ("bkpt "#value)
976 
977 
984 __STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
985 {
986  uint32_t result;
987 
988 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
989  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
990  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
991  __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
992 #else
993  uint32_t s = (4U /*sizeof(v)*/ * 8U) - 1U; /* extra shift needed at end */
994 
995  result = value; /* r will be reversed bits of v; first get LSB of v */
996  for (value >>= 1U; value != 0U; value >>= 1U)
997  {
998  result <<= 1U;
999  result |= value & 1U;
1000  s--;
1001  }
1002  result <<= s; /* shift when v's highest bits are zero */
1003 #endif
1004  return result;
1005 }
1006 
1007 
1014 #define __CLZ (uint8_t)__builtin_clz
1015 
1016 
1017 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1018  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1019  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1020  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1021 
1027 __STATIC_FORCEINLINE uint8_t __LDREXB(volatile uint8_t *addr)
1028 {
1029  uint32_t result;
1030 
1031 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1032  __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
1033 #else
1034  /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1035  accepted by assembler. So has to use following less efficient pattern.
1036  */
1037  __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
1038 #endif
1039  return ((uint8_t) result); /* Add explicit type cast here */
1040 }
1041 
1042 
1049 __STATIC_FORCEINLINE uint16_t __LDREXH(volatile uint16_t *addr)
1050 {
1051  uint32_t result;
1052 
1053 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1054  __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
1055 #else
1056  /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1057  accepted by assembler. So has to use following less efficient pattern.
1058  */
1059  __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
1060 #endif
1061  return ((uint16_t) result); /* Add explicit type cast here */
1062 }
1063 
1064 
1071 __STATIC_FORCEINLINE uint32_t __LDREXW(volatile uint32_t *addr)
1072 {
1073  uint32_t result;
1074 
1075  __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
1076  return(result);
1077 }
1078 
1079 
1088 __STATIC_FORCEINLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
1089 {
1090  uint32_t result;
1091 
1092  __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
1093  return(result);
1094 }
1095 
1096 
1105 __STATIC_FORCEINLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
1106 {
1107  uint32_t result;
1108 
1109  __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
1110  return(result);
1111 }
1112 
1113 
1122 __STATIC_FORCEINLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
1123 {
1124  uint32_t result;
1125 
1126  __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
1127  return(result);
1128 }
1129 
1130 
1135 __STATIC_FORCEINLINE void __CLREX(void)
1136 {
1137  __ASM volatile ("clrex" ::: "memory");
1138 }
1139 
1140 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1141  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1142  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1143  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
1144 
1145 
1146 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1147  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1148  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
1149 
1156 #define __SSAT(ARG1,ARG2) \
1157 __extension__ \
1158 ({ \
1159  int32_t __RES, __ARG1 = (ARG1); \
1160  __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1161  __RES; \
1162  })
1163 
1164 
1172 #define __USAT(ARG1,ARG2) \
1173  __extension__ \
1174 ({ \
1175  uint32_t __RES, __ARG1 = (ARG1); \
1176  __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1177  __RES; \
1178  })
1179 
1180 
1188 __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
1189 {
1190  uint32_t result;
1191 
1192  __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
1193  return(result);
1194 }
1195 
1196 
1203 __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
1204 {
1205  uint32_t result;
1206 
1207 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1208  __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
1209 #else
1210  /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1211  accepted by assembler. So has to use following less efficient pattern.
1212  */
1213  __ASM volatile ("ldrbt %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
1214 #endif
1215  return ((uint8_t) result); /* Add explicit type cast here */
1216 }
1217 
1218 
1225 __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
1226 {
1227  uint32_t result;
1228 
1229 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1230  __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
1231 #else
1232  /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1233  accepted by assembler. So has to use following less efficient pattern.
1234  */
1235  __ASM volatile ("ldrht %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
1236 #endif
1237  return ((uint16_t) result); /* Add explicit type cast here */
1238 }
1239 
1240 
1247 __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
1248 {
1249  uint32_t result;
1250 
1251  __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
1252  return(result);
1253 }
1254 
1255 
1262 __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
1263 {
1264  __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1265 }
1266 
1267 
1274 __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
1275 {
1276  __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1277 }
1278 
1279 
1286 __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
1287 {
1288  __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
1289 }
1290 
1291 #else /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1292  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1293  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
1294 
1302 __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
1303 {
1304  if ((sat >= 1U) && (sat <= 32U))
1305  {
1306  const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
1307  const int32_t min = -1 - max ;
1308  if (val > max)
1309  {
1310  return max;
1311  }
1312  else if (val < min)
1313  {
1314  return min;
1315  }
1316  }
1317  return val;
1318 }
1319 
1327 __STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
1328 {
1329  if (sat <= 31U)
1330  {
1331  const uint32_t max = ((1U << sat) - 1U);
1332  if (val > (int32_t)max)
1333  {
1334  return max;
1335  }
1336  else if (val < 0)
1337  {
1338  return 0U;
1339  }
1340  }
1341  return (uint32_t)val;
1342 }
1343 
1344 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1345  (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1346  (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
1347 
1348 
1349 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1350  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1351 
1357 __STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
1358 {
1359  uint32_t result;
1360 
1361  __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) );
1362  return ((uint8_t) result);
1363 }
1364 
1365 
1372 __STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
1373 {
1374  uint32_t result;
1375 
1376  __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) );
1377  return ((uint16_t) result);
1378 }
1379 
1380 
1387 __STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
1388 {
1389  uint32_t result;
1390 
1391  __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) );
1392  return(result);
1393 }
1394 
1395 
1402 __STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
1403 {
1404  __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1405 }
1406 
1407 
1414 __STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
1415 {
1416  __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1417 }
1418 
1419 
1426 __STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
1427 {
1428  __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1429 }
1430 
1431 
1438 __STATIC_FORCEINLINE uint8_t __LDAEXB(volatile uint8_t *ptr)
1439 {
1440  uint32_t result;
1441 
1442  __ASM volatile ("ldaexb %0, %1" : "=r" (result) : "Q" (*ptr) );
1443  return ((uint8_t) result);
1444 }
1445 
1446 
1453 __STATIC_FORCEINLINE uint16_t __LDAEXH(volatile uint16_t *ptr)
1454 {
1455  uint32_t result;
1456 
1457  __ASM volatile ("ldaexh %0, %1" : "=r" (result) : "Q" (*ptr) );
1458  return ((uint16_t) result);
1459 }
1460 
1461 
1468 __STATIC_FORCEINLINE uint32_t __LDAEX(volatile uint32_t *ptr)
1469 {
1470  uint32_t result;
1471 
1472  __ASM volatile ("ldaex %0, %1" : "=r" (result) : "Q" (*ptr) );
1473  return(result);
1474 }
1475 
1476 
1485 __STATIC_FORCEINLINE uint32_t __STLEXB(uint8_t value, volatile uint8_t *ptr)
1486 {
1487  uint32_t result;
1488 
1489  __ASM volatile ("stlexb %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
1490  return(result);
1491 }
1492 
1493 
1502 __STATIC_FORCEINLINE uint32_t __STLEXH(uint16_t value, volatile uint16_t *ptr)
1503 {
1504  uint32_t result;
1505 
1506  __ASM volatile ("stlexh %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
1507  return(result);
1508 }
1509 
1510 
1519 __STATIC_FORCEINLINE uint32_t __STLEX(uint32_t value, volatile uint32_t *ptr)
1520 {
1521  uint32_t result;
1522 
1523  __ASM volatile ("stlex %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
1524  return(result);
1525 }
1526 
1527 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1528  (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
1529  /* end of group CMSIS_Core_InstructionInterface */
1531 
1532 
1533 /* ################### Compiler specific Intrinsics ########################### */
1539 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1540 
1541 __STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1542 {
1543  uint32_t result;
1544 
1545  __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1546  return(result);
1547 }
1548 
1549 __STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1550 {
1551  uint32_t result;
1552 
1553  __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1554  return(result);
1555 }
1556 
1557 __STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1558 {
1559  uint32_t result;
1560 
1561  __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1562  return(result);
1563 }
1564 
1565 __STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1566 {
1567  uint32_t result;
1568 
1569  __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1570  return(result);
1571 }
1572 
1573 __STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1574 {
1575  uint32_t result;
1576 
1577  __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1578  return(result);
1579 }
1580 
1581 __STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1582 {
1583  uint32_t result;
1584 
1585  __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1586  return(result);
1587 }
1588 
1589 
1590 __STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1591 {
1592  uint32_t result;
1593 
1594  __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1595  return(result);
1596 }
1597 
1598 __STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1599 {
1600  uint32_t result;
1601 
1602  __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1603  return(result);
1604 }
1605 
1606 __STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1607 {
1608  uint32_t result;
1609 
1610  __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1611  return(result);
1612 }
1613 
1614 __STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1615 {
1616  uint32_t result;
1617 
1618  __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1619  return(result);
1620 }
1621 
1622 __STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1623 {
1624  uint32_t result;
1625 
1626  __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1627  return(result);
1628 }
1629 
1630 __STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1631 {
1632  uint32_t result;
1633 
1634  __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1635  return(result);
1636 }
1637 
1638 
1639 __STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1640 {
1641  uint32_t result;
1642 
1643  __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1644  return(result);
1645 }
1646 
1647 __STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1648 {
1649  uint32_t result;
1650 
1651  __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1652  return(result);
1653 }
1654 
1655 __STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1656 {
1657  uint32_t result;
1658 
1659  __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1660  return(result);
1661 }
1662 
1663 __STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1664 {
1665  uint32_t result;
1666 
1667  __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1668  return(result);
1669 }
1670 
1671 __STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1672 {
1673  uint32_t result;
1674 
1675  __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1676  return(result);
1677 }
1678 
1679 __STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1680 {
1681  uint32_t result;
1682 
1683  __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1684  return(result);
1685 }
1686 
1687 __STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1688 {
1689  uint32_t result;
1690 
1691  __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1692  return(result);
1693 }
1694 
1695 __STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1696 {
1697  uint32_t result;
1698 
1699  __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1700  return(result);
1701 }
1702 
1703 __STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1704 {
1705  uint32_t result;
1706 
1707  __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1708  return(result);
1709 }
1710 
1711 __STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1712 {
1713  uint32_t result;
1714 
1715  __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1716  return(result);
1717 }
1718 
1719 __STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1720 {
1721  uint32_t result;
1722 
1723  __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1724  return(result);
1725 }
1726 
1727 __STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1728 {
1729  uint32_t result;
1730 
1731  __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1732  return(result);
1733 }
1734 
1735 __STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1736 {
1737  uint32_t result;
1738 
1739  __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1740  return(result);
1741 }
1742 
1743 __STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1744 {
1745  uint32_t result;
1746 
1747  __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1748  return(result);
1749 }
1750 
1751 __STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1752 {
1753  uint32_t result;
1754 
1755  __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1756  return(result);
1757 }
1758 
1759 __STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1760 {
1761  uint32_t result;
1762 
1763  __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1764  return(result);
1765 }
1766 
1767 __STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1768 {
1769  uint32_t result;
1770 
1771  __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1772  return(result);
1773 }
1774 
1775 __STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1776 {
1777  uint32_t result;
1778 
1779  __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1780  return(result);
1781 }
1782 
1783 __STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1784 {
1785  uint32_t result;
1786 
1787  __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1788  return(result);
1789 }
1790 
1791 __STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1792 {
1793  uint32_t result;
1794 
1795  __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1796  return(result);
1797 }
1798 
1799 __STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1800 {
1801  uint32_t result;
1802 
1803  __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1804  return(result);
1805 }
1806 
1807 __STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1808 {
1809  uint32_t result;
1810 
1811  __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1812  return(result);
1813 }
1814 
1815 __STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1816 {
1817  uint32_t result;
1818 
1819  __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1820  return(result);
1821 }
1822 
1823 __STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1824 {
1825  uint32_t result;
1826 
1827  __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1828  return(result);
1829 }
1830 
1831 __STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1832 {
1833  uint32_t result;
1834 
1835  __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1836  return(result);
1837 }
1838 
1839 __STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1840 {
1841  uint32_t result;
1842 
1843  __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1844  return(result);
1845 }
1846 
1847 #define __SSAT16(ARG1,ARG2) \
1848 ({ \
1849  int32_t __RES, __ARG1 = (ARG1); \
1850  __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1851  __RES; \
1852  })
1853 
1854 #define __USAT16(ARG1,ARG2) \
1855 ({ \
1856  uint32_t __RES, __ARG1 = (ARG1); \
1857  __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1858  __RES; \
1859  })
1860 
1861 __STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
1862 {
1863  uint32_t result;
1864 
1865  __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
1866  return(result);
1867 }
1868 
1869 __STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1870 {
1871  uint32_t result;
1872 
1873  __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1874  return(result);
1875 }
1876 
1877 __STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
1878 {
1879  uint32_t result;
1880 
1881  __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
1882  return(result);
1883 }
1884 
1885 __STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
1886 {
1887  uint32_t result;
1888 
1889  __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1890  return(result);
1891 }
1892 
1893 __STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
1894 {
1895  uint32_t result;
1896 
1897  __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1898  return(result);
1899 }
1900 
1901 __STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
1902 {
1903  uint32_t result;
1904 
1905  __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1906  return(result);
1907 }
1908 
1909 __STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
1910 {
1911  uint32_t result;
1912 
1913  __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1914  return(result);
1915 }
1916 
1917 __STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
1918 {
1919  uint32_t result;
1920 
1921  __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1922  return(result);
1923 }
1924 
1925 __STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
1926 {
1927  union llreg_u{
1928  uint32_t w32[2];
1929  uint64_t w64;
1930  } llr;
1931  llr.w64 = acc;
1932 
1933 #ifndef __ARMEB__ /* Little endian */
1934  __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1935 #else /* Big endian */
1936  __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1937 #endif
1938 
1939  return(llr.w64);
1940 }
1941 
1942 __STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
1943 {
1944  union llreg_u{
1945  uint32_t w32[2];
1946  uint64_t w64;
1947  } llr;
1948  llr.w64 = acc;
1949 
1950 #ifndef __ARMEB__ /* Little endian */
1951  __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1952 #else /* Big endian */
1953  __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1954 #endif
1955 
1956  return(llr.w64);
1957 }
1958 
1959 __STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
1960 {
1961  uint32_t result;
1962 
1963  __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1964  return(result);
1965 }
1966 
1967 __STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
1968 {
1969  uint32_t result;
1970 
1971  __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1972  return(result);
1973 }
1974 
1975 __STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
1976 {
1977  uint32_t result;
1978 
1979  __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1980  return(result);
1981 }
1982 
1983 __STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
1984 {
1985  uint32_t result;
1986 
1987  __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1988  return(result);
1989 }
1990 
1991 __STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
1992 {
1993  union llreg_u{
1994  uint32_t w32[2];
1995  uint64_t w64;
1996  } llr;
1997  llr.w64 = acc;
1998 
1999 #ifndef __ARMEB__ /* Little endian */
2000  __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
2001 #else /* Big endian */
2002  __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2003 #endif
2004 
2005  return(llr.w64);
2006 }
2007 
2008 __STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
2009 {
2010  union llreg_u{
2011  uint32_t w32[2];
2012  uint64_t w64;
2013  } llr;
2014  llr.w64 = acc;
2015 
2016 #ifndef __ARMEB__ /* Little endian */
2017  __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
2018 #else /* Big endian */
2019  __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2020 #endif
2021 
2022  return(llr.w64);
2023 }
2024 
2025 __STATIC_FORCEINLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
2026 {
2027  uint32_t result;
2028 
2029  __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2030  return(result);
2031 }
2032 
2033 __STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2)
2034 {
2035  int32_t result;
2036 
2037  __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2038  return(result);
2039 }
2040 
2041 __STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2)
2042 {
2043  int32_t result;
2044 
2045  __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2046  return(result);
2047 }
2048 
2049 #if 0
2050 #define __PKHBT(ARG1,ARG2,ARG3) \
2051 ({ \
2052  uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2053  __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
2054  __RES; \
2055  })
2056 
2057 #define __PKHTB(ARG1,ARG2,ARG3) \
2058 ({ \
2059  uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2060  if (ARG3 == 0) \
2061  __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
2062  else \
2063  __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
2064  __RES; \
2065  })
2066 #endif
2067 
2068 #define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
2069  ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
2070 
2071 #define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
2072  ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
2073 
2074 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
2075 {
2076  int32_t result;
2077 
2078  __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
2079  return(result);
2080 }
2081 
2082 #endif /* (__ARM_FEATURE_DSP == 1) */
2083 
2086 #pragma GCC diagnostic pop
2087 
2088 #endif /* __CMSIS_GCC_H */
__STATIC_INLINE uint32_t __get_CONTROL(void)
Enable IRQ Interrupts.
Definition: cmsis_armcc.h:130
#define __CMSIS_GCC_USE_REG(r)
Definition: cmsis_gcc.h:832
__STATIC_INLINE uint32_t __get_IPSR(void)
Get IPSR Register.
Definition: cmsis_armcc.h:154
__STATIC_INLINE uint32_t __USAT(int32_t val, uint32_t sat)
Unsigned Saturate.
Definition: cmsis_armcc.h:766
__STATIC_INLINE uint32_t __get_PSP(void)
Get Process Stack Pointer.
Definition: cmsis_armcc.h:190
__STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
Set Process Stack Pointer.
Definition: cmsis_armcc.h:202
__STATIC_FORCEINLINE void __DMB(void)
Data Memory Barrier.
Definition: cmsis_gcc.h:891
#define __get_FPSCR
Definition: cmsis_iccarm.h:528
__PACKED_STRUCT T_UINT16_WRITE
Definition: cmsis_gcc.h:82
#define __PACKED_STRUCT
Definition: cmsis_gcc.h:65
__STATIC_INLINE uint32_t __get_xPSR(void)
Get xPSR Register.
Definition: cmsis_armcc.h:178
__STATIC_INLINE __ASM int16_t __REVSH(int16_t value)
Reverse byte order (16 bit)
Definition: cmsis_armcc.h:483
__STATIC_INLINE void __set_MSP(uint32_t topOfMainStack)
Set Main Stack Pointer.
Definition: cmsis_armcc.h:226
__PACKED_STRUCT T_UINT32_WRITE
Definition: cmsis_gcc.h:98
__STATIC_FORCEINLINE void __DSB(void)
Data Synchronization Barrier.
Definition: cmsis_gcc.h:880
__STATIC_INLINE void __set_PRIMASK(uint32_t priMask)
Set Priority Mask.
Definition: cmsis_armcc.h:250
__STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
Reverse byte order (16 bit)
Definition: cmsis_armcc.h:468
__STATIC_FORCEINLINE void __disable_irq(void)
Disable IRQ Interrupts.
Definition: cmsis_gcc.h:140
#define __STATIC_FORCEINLINE
Definition: cmsis_gcc.h:50
__STATIC_INLINE uint32_t __RBIT(uint32_t value)
Reverse bit order of value.
Definition: cmsis_armcc.h:521
__STATIC_INLINE uint32_t __get_APSR(void)
Get APSR Register.
Definition: cmsis_armcc.h:166
#define __set_FPSCR
Definition: cmsis_iccarm.h:529
__STATIC_INLINE uint32_t __get_PRIMASK(void)
Get Priority Mask.
Definition: cmsis_armcc.h:238
__STATIC_INLINE uint32_t __get_MSP(void)
Get Main Stack Pointer.
Definition: cmsis_armcc.h:214
#define __ROR
Rotate Right in unsigned value (32 bit)
Definition: cmsis_armcc.h:498
__PACKED_STRUCT T_UINT32_READ
Definition: cmsis_gcc.h:106
#define __ASM
Definition: cmsis_gcc.h:41
#define __CMSIS_GCC_OUT_REG(r)
Definition: cmsis_gcc.h:830
__STATIC_INLINE void __set_CONTROL(uint32_t control)
Set Control Register.
Definition: cmsis_armcc.h:142
__STATIC_FORCEINLINE void __enable_irq(void)
Enable IRQ Interrupts.
Definition: cmsis_gcc.h:129
__IAR_FT uint32_t __LDREXW(uint32_t volatile *ptr)
Definition: cmsis_iccarm.h:543
__STATIC_INLINE int32_t __SSAT(int32_t val, uint32_t sat)
Signed Saturate.
Definition: cmsis_armcc.h:741
__PACKED_STRUCT T_UINT16_READ
Definition: cmsis_gcc.h:90
__STATIC_FORCEINLINE void __ISB(void)
Instruction Synchronization Barrier.
Definition: cmsis_gcc.h:869
__IAR_FT uint32_t __STREXW(uint32_t value, uint32_t volatile *ptr)
Definition: cmsis_iccarm.h:548
__STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
Reverse byte order (32 bit)
Definition: cmsis_gcc.h:903