1 /**************************************************************************//**
2 * @file cmsis_armcc.h
3 * @brief CMSIS compiler ARMCC (Arm Compiler 5) header file
4 * @version V5.4.0
5 * @date 20. January 2023
6 ******************************************************************************/
7 /*
8 * Copyright (c) 2009-2023 Arm Limited. All rights reserved.
9 *
10 * SPDX-License-Identifier: Apache-2.0
11 *
12 * Licensed under the Apache License, Version 2.0 (the License); you may
13 * not use this file except in compliance with the License.
14 * You may obtain a copy of the License at
15 *
16 * www.apache.org/licenses/LICENSE-2.0
17 *
18 * Unless required by applicable law or agreed to in writing, software
19 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 * See the License for the specific language governing permissions and
22 * limitations under the License.
23 */
24
25 #ifndef __CMSIS_ARMCC_H
26 #define __CMSIS_ARMCC_H
27
28
29 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 400677)
30 #error "Please use Arm Compiler Toolchain V4.0.677 or later!"
31 #endif
32
33 /* CMSIS compiler control architecture macros */
34 #if ((defined (__TARGET_ARCH_6_M ) && (__TARGET_ARCH_6_M == 1)) || \
35 (defined (__TARGET_ARCH_6S_M ) && (__TARGET_ARCH_6S_M == 1)) )
36 #define __ARM_ARCH_6M__ 1
37 #endif
38
39 #if (defined (__TARGET_ARCH_7_M ) && (__TARGET_ARCH_7_M == 1))
40 #define __ARM_ARCH_7M__ 1
41 #endif
42
43 #if (defined (__TARGET_ARCH_7E_M) && (__TARGET_ARCH_7E_M == 1))
44 #define __ARM_ARCH_7EM__ 1
45 #endif
46
47 /* __ARM_ARCH_8M_BASE__ not applicable */
48 /* __ARM_ARCH_8M_MAIN__ not applicable */
49 /* __ARM_ARCH_8_1M_MAIN__ not applicable */
50
51 /* CMSIS compiler control DSP macros */
52 #if ((defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) )
53 #define __ARM_FEATURE_DSP 1
54 #endif
55
56 /* CMSIS compiler specific defines */
57 #ifndef __ASM
58 #define __ASM __asm
59 #endif
60 #ifndef __INLINE
61 #define __INLINE __inline
62 #endif
63 #ifndef __STATIC_INLINE
64 #define __STATIC_INLINE static __inline
65 #endif
66 #ifndef __STATIC_FORCEINLINE
67 #define __STATIC_FORCEINLINE static __forceinline
68 #endif
69 #ifndef __NO_RETURN
70 #define __NO_RETURN __declspec(noreturn)
71 #endif
72 #ifndef __USED
73 #define __USED __attribute__((used))
74 #endif
75 #ifndef __WEAK
76 #define __WEAK __attribute__((weak))
77 #endif
78 #ifndef __PACKED
79 #define __PACKED __attribute__((packed))
80 #endif
81 #ifndef __PACKED_STRUCT
82 #define __PACKED_STRUCT __packed struct
83 #endif
84 #ifndef __PACKED_UNION
85 #define __PACKED_UNION __packed union
86 #endif
87 #ifndef __UNALIGNED_UINT32 /* deprecated */
88 #define __UNALIGNED_UINT32(x) (*((__packed uint32_t *)(x)))
89 #endif
90 #ifndef __UNALIGNED_UINT16_WRITE
91 #define __UNALIGNED_UINT16_WRITE(addr, val) ((*((__packed uint16_t *)(addr))) = (val))
92 #endif
93 #ifndef __UNALIGNED_UINT16_READ
94 #define __UNALIGNED_UINT16_READ(addr) (*((const __packed uint16_t *)(addr)))
95 #endif
96 #ifndef __UNALIGNED_UINT32_WRITE
97 #define __UNALIGNED_UINT32_WRITE(addr, val) ((*((__packed uint32_t *)(addr))) = (val))
98 #endif
99 #ifndef __UNALIGNED_UINT32_READ
100 #define __UNALIGNED_UINT32_READ(addr) (*((const __packed uint32_t *)(addr)))
101 #endif
102 #ifndef __ALIGNED
103 #define __ALIGNED(x) __attribute__((aligned(x)))
104 #endif
105 #ifndef __RESTRICT
106 #define __RESTRICT __restrict
107 #endif
108 #ifndef __COMPILER_BARRIER
109 #define __COMPILER_BARRIER() __memory_changed()
110 #endif
111 #ifndef __NO_INIT
112 #define __NO_INIT __attribute__ ((section (".bss.noinit"), zero_init))
113 #endif
114 #ifndef __ALIAS
115 #define __ALIAS(x) __attribute__ ((alias(x)))
116 #endif
117
118 /* ######################### Startup and Lowlevel Init ######################## */
119
120 #ifndef __PROGRAM_START
121 #define __PROGRAM_START __main
122 #endif
123
124 #ifndef __INITIAL_SP
125 #define __INITIAL_SP Image$$ARM_LIB_STACK$$ZI$$Limit
126 #endif
127
128 #ifndef __STACK_LIMIT
129 #define __STACK_LIMIT Image$$ARM_LIB_STACK$$ZI$$Base
130 #endif
131
132 #ifndef __VECTOR_TABLE
133 #define __VECTOR_TABLE __Vectors
134 #endif
135
136 #ifndef __VECTOR_TABLE_ATTRIBUTE
137 #define __VECTOR_TABLE_ATTRIBUTE __attribute__((used, section("RESET")))
138 #endif
139
140 /* ########################## Core Instruction Access ######################### */
141 /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
142 Access to dedicated instructions
143 @{
144 */
145
146 /**
147 \brief No Operation
148 \details No Operation does nothing. This instruction can be used for code alignment purposes.
149 */
150 #define __NOP __nop
151
152
153 /**
154 \brief Wait For Interrupt
155 \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
156 */
157 #define __WFI __wfi
158
159
160 /**
161 \brief Wait For Event
162 \details Wait For Event is a hint instruction that permits the processor to enter
163 a low-power state until one of a number of events occurs.
164 */
165 #define __WFE __wfe
166
167
168 /**
169 \brief Send Event
170 \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
171 */
172 #define __SEV __sev
173
174
175 /**
176 \brief Instruction Synchronization Barrier
177 \details Instruction Synchronization Barrier flushes the pipeline in the processor,
178 so that all instructions following the ISB are fetched from cache or memory,
179 after the instruction has been completed.
180 */
181 #define __ISB() __isb(0xF)
182
183 /**
184 \brief Data Synchronization Barrier
185 \details Acts as a special kind of Data Memory Barrier.
186 It completes when all explicit memory accesses before this instruction complete.
187 */
188 #define __DSB() __dsb(0xF)
189
190 /**
191 \brief Data Memory Barrier
192 \details Ensures the apparent order of the explicit memory operations before
193 and after the instruction, without ensuring their completion.
194 */
195 #define __DMB() __dmb(0xF)
196
197
198 /**
199 \brief Reverse byte order (32 bit)
200 \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
201 \param [in] value Value to reverse
202 \return Reversed value
203 */
204 #define __REV __rev
205
206
207 /**
208 \brief Reverse byte order (16 bit)
209 \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
210 \param [in] value Value to reverse
211 \return Reversed value
212 */
213 #ifndef __NO_EMBEDDED_ASM
__REV16(uint32_t value)214 __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
215 {
216 rev16 r0, r0
217 bx lr
218 }
219 #endif
220
221
222 /**
223 \brief Reverse byte order (16 bit)
224 \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
225 \param [in] value Value to reverse
226 \return Reversed value
227 */
228 #ifndef __NO_EMBEDDED_ASM
__REVSH(int16_t value)229 __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int16_t __REVSH(int16_t value)
230 {
231 revsh r0, r0
232 bx lr
233 }
234 #endif
235
236
237 /**
238 \brief Rotate Right in unsigned value (32 bit)
239 \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
240 \param [in] op1 Value to rotate
241 \param [in] op2 Number of Bits to rotate
242 \return Rotated value
243 */
244 #define __ROR __ror
245
246
247 /**
248 \brief Breakpoint
249 \details Causes the processor to enter Debug state.
250 Debug tools can use this to investigate system state when the instruction at a particular address is reached.
251 \param [in] value is ignored by the processor.
252 If required, a debugger can use it to store additional information about the breakpoint.
253 */
254 #define __BKPT(value) __breakpoint(value)
255
256
257 /**
258 \brief Reverse bit order of value
259 \details Reverses the bit order of the given value.
260 \param [in] value Value to reverse
261 \return Reversed value
262 */
263 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
264 (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) )
265 #define __RBIT __rbit
266 #else
__RBIT(uint32_t value)267 __attribute__((always_inline)) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
268 {
269 uint32_t result;
270 uint32_t s = (4U /*sizeof(v)*/ * 8U) - 1U; /* extra shift needed at end */
271
272 result = value; /* r will be reversed bits of v; first get LSB of v */
273 for (value >>= 1U; value != 0U; value >>= 1U)
274 {
275 result <<= 1U;
276 result |= value & 1U;
277 s--;
278 }
279 result <<= s; /* shift when v's highest bits are zero */
280 return result;
281 }
282 #endif
283
284
285 /**
286 \brief Count leading zeros
287 \details Counts the number of leading zeros of a data value.
288 \param [in] value Value to count the leading zeros
289 \return number of leading zeros in value
290 */
291 #define __CLZ __clz
292
293
294 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
295 (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) )
296
297 /**
298 \brief LDR Exclusive (8 bit)
299 \details Executes a exclusive LDR instruction for 8 bit value.
300 \param [in] ptr Pointer to data
301 \return value of type uint8_t at (*ptr)
302 */
303 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
304 #define __LDREXB(ptr) ((uint8_t ) __ldrex(ptr))
305 #else
306 #define __LDREXB(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint8_t ) __ldrex(ptr)) _Pragma("pop")
307 #endif
308
309
310 /**
311 \brief LDR Exclusive (16 bit)
312 \details Executes a exclusive LDR instruction for 16 bit values.
313 \param [in] ptr Pointer to data
314 \return value of type uint16_t at (*ptr)
315 */
316 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
317 #define __LDREXH(ptr) ((uint16_t) __ldrex(ptr))
318 #else
319 #define __LDREXH(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint16_t) __ldrex(ptr)) _Pragma("pop")
320 #endif
321
322
323 /**
324 \brief LDR Exclusive (32 bit)
325 \details Executes a exclusive LDR instruction for 32 bit values.
326 \param [in] ptr Pointer to data
327 \return value of type uint32_t at (*ptr)
328 */
329 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
330 #define __LDREXW(ptr) ((uint32_t ) __ldrex(ptr))
331 #else
332 #define __LDREXW(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint32_t ) __ldrex(ptr)) _Pragma("pop")
333 #endif
334
335
336 /**
337 \brief STR Exclusive (8 bit)
338 \details Executes a exclusive STR instruction for 8 bit values.
339 \param [in] value Value to store
340 \param [in] ptr Pointer to location
341 \return 0 Function succeeded
342 \return 1 Function failed
343 */
344 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
345 #define __STREXB(value, ptr) __strex(value, ptr)
346 #else
347 #define __STREXB(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
348 #endif
349
350
351 /**
352 \brief STR Exclusive (16 bit)
353 \details Executes a exclusive STR instruction for 16 bit values.
354 \param [in] value Value to store
355 \param [in] ptr Pointer to location
356 \return 0 Function succeeded
357 \return 1 Function failed
358 */
359 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
360 #define __STREXH(value, ptr) __strex(value, ptr)
361 #else
362 #define __STREXH(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
363 #endif
364
365
366 /**
367 \brief STR Exclusive (32 bit)
368 \details Executes a exclusive STR instruction for 32 bit values.
369 \param [in] value Value to store
370 \param [in] ptr Pointer to location
371 \return 0 Function succeeded
372 \return 1 Function failed
373 */
374 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
375 #define __STREXW(value, ptr) __strex(value, ptr)
376 #else
377 #define __STREXW(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
378 #endif
379
380
381 /**
382 \brief Remove the exclusive lock
383 \details Removes the exclusive lock which is created by LDREX.
384 */
385 #define __CLREX __clrex
386
387
388 /**
389 \brief Signed Saturate
390 \details Saturates a signed value.
391 \param [in] value Value to be saturated
392 \param [in] sat Bit position to saturate to (1..32)
393 \return Saturated value
394 */
395 #define __SSAT __ssat
396
397
398 /**
399 \brief Unsigned Saturate
400 \details Saturates an unsigned value.
401 \param [in] value Value to be saturated
402 \param [in] sat Bit position to saturate to (0..31)
403 \return Saturated value
404 */
405 #define __USAT __usat
406
407
408 /**
409 \brief Rotate Right with Extend (32 bit)
410 \details Moves each bit of a bitstring right by one bit.
411 The carry input is shifted in at the left end of the bitstring.
412 \param [in] value Value to rotate
413 \return Rotated value
414 */
415 #ifndef __NO_EMBEDDED_ASM
__RRX(uint32_t value)416 __attribute__((section(".rrx_text"))) __STATIC_INLINE __ASM uint32_t __RRX(uint32_t value)
417 {
418 rrx r0, r0
419 bx lr
420 }
421 #endif
422
423
424 /**
425 \brief LDRT Unprivileged (8 bit)
426 \details Executes a Unprivileged LDRT instruction for 8 bit value.
427 \param [in] ptr Pointer to data
428 \return value of type uint8_t at (*ptr)
429 */
430 #define __LDRBT(ptr) ((uint8_t ) __ldrt(ptr))
431
432
433 /**
434 \brief LDRT Unprivileged (16 bit)
435 \details Executes a Unprivileged LDRT instruction for 16 bit values.
436 \param [in] ptr Pointer to data
437 \return value of type uint16_t at (*ptr)
438 */
439 #define __LDRHT(ptr) ((uint16_t) __ldrt(ptr))
440
441
442 /**
443 \brief LDRT Unprivileged (32 bit)
444 \details Executes a Unprivileged LDRT instruction for 32 bit values.
445 \param [in] ptr Pointer to data
446 \return value of type uint32_t at (*ptr)
447 */
448 #define __LDRT(ptr) ((uint32_t ) __ldrt(ptr))
449
450
451 /**
452 \brief STRT Unprivileged (8 bit)
453 \details Executes a Unprivileged STRT instruction for 8 bit values.
454 \param [in] value Value to store
455 \param [in] ptr Pointer to location
456 */
457 #define __STRBT(value, ptr) __strt(value, ptr)
458
459
460 /**
461 \brief STRT Unprivileged (16 bit)
462 \details Executes a Unprivileged STRT instruction for 16 bit values.
463 \param [in] value Value to store
464 \param [in] ptr Pointer to location
465 */
466 #define __STRHT(value, ptr) __strt(value, ptr)
467
468
469 /**
470 \brief STRT Unprivileged (32 bit)
471 \details Executes a Unprivileged STRT instruction for 32 bit values.
472 \param [in] value Value to store
473 \param [in] ptr Pointer to location
474 */
475 #define __STRT(value, ptr) __strt(value, ptr)
476
477 #else /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
478 (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) ) */
479
480 /**
481 \brief Signed Saturate
482 \details Saturates a signed value.
483 \param [in] value Value to be saturated
484 \param [in] sat Bit position to saturate to (1..32)
485 \return Saturated value
486 */
__SSAT(int32_t val,uint32_t sat)487 __attribute__((always_inline)) __STATIC_INLINE int32_t __SSAT(int32_t val, uint32_t sat)
488 {
489 if ((sat >= 1U) && (sat <= 32U))
490 {
491 const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
492 const int32_t min = -1 - max ;
493 if (val > max)
494 {
495 return max;
496 }
497 else if (val < min)
498 {
499 return min;
500 }
501 }
502 return val;
503 }
504
505 /**
506 \brief Unsigned Saturate
507 \details Saturates an unsigned value.
508 \param [in] value Value to be saturated
509 \param [in] sat Bit position to saturate to (0..31)
510 \return Saturated value
511 */
__USAT(int32_t val,uint32_t sat)512 __attribute__((always_inline)) __STATIC_INLINE uint32_t __USAT(int32_t val, uint32_t sat)
513 {
514 if (sat <= 31U)
515 {
516 const uint32_t max = ((1U << sat) - 1U);
517 if (val > (int32_t)max)
518 {
519 return max;
520 }
521 else if (val < 0)
522 {
523 return 0U;
524 }
525 }
526 return (uint32_t)val;
527 }
528
529 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
530 (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) ) */
531
532 /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
533
534
535 /* ########################### Core Function Access ########################### */
536 /** \ingroup CMSIS_Core_FunctionInterface
537 \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
538 @{
539 */
540
541 /**
542 \brief Enable IRQ Interrupts
543 \details Enables IRQ interrupts by clearing special-purpose register PRIMASK.
544 Can only be executed in Privileged modes.
545 */
546 /* intrinsic void __enable_irq(); */
547
548
549 /**
550 \brief Disable IRQ Interrupts
551 \details Disables IRQ interrupts by setting special-purpose register PRIMASK.
552 Can only be executed in Privileged modes.
553 */
554 /* intrinsic void __disable_irq(); */
555
556 /**
557 \brief Get Control Register
558 \details Returns the content of the Control Register.
559 \return Control Register value
560 */
__get_CONTROL(void)561 __STATIC_INLINE uint32_t __get_CONTROL(void)
562 {
563 register uint32_t __regControl __ASM("control");
564 return(__regControl);
565 }
566
567
568 /**
569 \brief Set Control Register
570 \details Writes the given value to the Control Register.
571 \param [in] control Control Register value to set
572 */
__set_CONTROL(uint32_t control)573 __STATIC_INLINE void __set_CONTROL(uint32_t control)
574 {
575 register uint32_t __regControl __ASM("control");
576 __regControl = control;
577 __ISB();
578 }
579
580
581 /**
582 \brief Get IPSR Register
583 \details Returns the content of the IPSR Register.
584 \return IPSR Register value
585 */
__get_IPSR(void)586 __STATIC_INLINE uint32_t __get_IPSR(void)
587 {
588 register uint32_t __regIPSR __ASM("ipsr");
589 return(__regIPSR);
590 }
591
592
593 /**
594 \brief Get APSR Register
595 \details Returns the content of the APSR Register.
596 \return APSR Register value
597 */
__get_APSR(void)598 __STATIC_INLINE uint32_t __get_APSR(void)
599 {
600 register uint32_t __regAPSR __ASM("apsr");
601 return(__regAPSR);
602 }
603
604
605 /**
606 \brief Get xPSR Register
607 \details Returns the content of the xPSR Register.
608 \return xPSR Register value
609 */
__get_xPSR(void)610 __STATIC_INLINE uint32_t __get_xPSR(void)
611 {
612 register uint32_t __regXPSR __ASM("xpsr");
613 return(__regXPSR);
614 }
615
616
617 /**
618 \brief Get Process Stack Pointer
619 \details Returns the current value of the Process Stack Pointer (PSP).
620 \return PSP Register value
621 */
__get_PSP(void)622 __STATIC_INLINE uint32_t __get_PSP(void)
623 {
624 register uint32_t __regProcessStackPointer __ASM("psp");
625 return(__regProcessStackPointer);
626 }
627
628
629 /**
630 \brief Set Process Stack Pointer
631 \details Assigns the given value to the Process Stack Pointer (PSP).
632 \param [in] topOfProcStack Process Stack Pointer value to set
633 */
__set_PSP(uint32_t topOfProcStack)634 __STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
635 {
636 register uint32_t __regProcessStackPointer __ASM("psp");
637 __regProcessStackPointer = topOfProcStack;
638 }
639
640
641 /**
642 \brief Get Main Stack Pointer
643 \details Returns the current value of the Main Stack Pointer (MSP).
644 \return MSP Register value
645 */
__get_MSP(void)646 __STATIC_INLINE uint32_t __get_MSP(void)
647 {
648 register uint32_t __regMainStackPointer __ASM("msp");
649 return(__regMainStackPointer);
650 }
651
652
653 /**
654 \brief Set Main Stack Pointer
655 \details Assigns the given value to the Main Stack Pointer (MSP).
656 \param [in] topOfMainStack Main Stack Pointer value to set
657 */
__set_MSP(uint32_t topOfMainStack)658 __STATIC_INLINE void __set_MSP(uint32_t topOfMainStack)
659 {
660 register uint32_t __regMainStackPointer __ASM("msp");
661 __regMainStackPointer = topOfMainStack;
662 }
663
664
665 /**
666 \brief Get Priority Mask
667 \details Returns the current state of the priority mask bit from the Priority Mask Register.
668 \return Priority Mask value
669 */
__get_PRIMASK(void)670 __STATIC_INLINE uint32_t __get_PRIMASK(void)
671 {
672 register uint32_t __regPriMask __ASM("primask");
673 return(__regPriMask);
674 }
675
676
677 /**
678 \brief Set Priority Mask
679 \details Assigns the given value to the Priority Mask Register.
680 \param [in] priMask Priority Mask
681 */
__set_PRIMASK(uint32_t priMask)682 __STATIC_INLINE void __set_PRIMASK(uint32_t priMask)
683 {
684 register uint32_t __regPriMask __ASM("primask");
685 __regPriMask = (priMask);
686 }
687
688
689 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
690 (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) )
691
692 /**
693 \brief Enable FIQ
694 \details Enables FIQ interrupts by clearing special-purpose register FAULTMASK.
695 Can only be executed in Privileged modes.
696 */
697 #define __enable_fault_irq __enable_fiq
698
699
700 /**
701 \brief Disable FIQ
702 \details Disables FIQ interrupts by setting special-purpose register FAULTMASK.
703 Can only be executed in Privileged modes.
704 */
705 #define __disable_fault_irq __disable_fiq
706
707
708 /**
709 \brief Get Base Priority
710 \details Returns the current value of the Base Priority register.
711 \return Base Priority register value
712 */
__get_BASEPRI(void)713 __STATIC_INLINE uint32_t __get_BASEPRI(void)
714 {
715 register uint32_t __regBasePri __ASM("basepri");
716 return(__regBasePri);
717 }
718
719
720 /**
721 \brief Set Base Priority
722 \details Assigns the given value to the Base Priority register.
723 \param [in] basePri Base Priority value to set
724 */
__set_BASEPRI(uint32_t basePri)725 __STATIC_INLINE void __set_BASEPRI(uint32_t basePri)
726 {
727 register uint32_t __regBasePri __ASM("basepri");
728 __regBasePri = (basePri & 0xFFU);
729 }
730
731
732 /**
733 \brief Set Base Priority with condition
734 \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
735 or the new value increases the BASEPRI priority level.
736 \param [in] basePri Base Priority value to set
737 */
__set_BASEPRI_MAX(uint32_t basePri)738 __STATIC_INLINE void __set_BASEPRI_MAX(uint32_t basePri)
739 {
740 register uint32_t __regBasePriMax __ASM("basepri_max");
741 __regBasePriMax = (basePri & 0xFFU);
742 }
743
744
745 /**
746 \brief Get Fault Mask
747 \details Returns the current value of the Fault Mask register.
748 \return Fault Mask register value
749 */
__get_FAULTMASK(void)750 __STATIC_INLINE uint32_t __get_FAULTMASK(void)
751 {
752 register uint32_t __regFaultMask __ASM("faultmask");
753 return(__regFaultMask);
754 }
755
756
757 /**
758 \brief Set Fault Mask
759 \details Assigns the given value to the Fault Mask register.
760 \param [in] faultMask Fault Mask value to set
761 */
__set_FAULTMASK(uint32_t faultMask)762 __STATIC_INLINE void __set_FAULTMASK(uint32_t faultMask)
763 {
764 register uint32_t __regFaultMask __ASM("faultmask");
765 __regFaultMask = (faultMask & (uint32_t)1U);
766 }
767
768 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
769 (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) ) */
770
771
772 /**
773 \brief Get FPSCR
774 \details Returns the current value of the Floating Point Status/Control register.
775 \return Floating Point Status/Control register value
776 */
__get_FPSCR(void)777 __STATIC_INLINE uint32_t __get_FPSCR(void)
778 {
779 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
780 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
781 register uint32_t __regfpscr __ASM("fpscr");
782 return(__regfpscr);
783 #else
784 return(0U);
785 #endif
786 }
787
788
789 /**
790 \brief Set FPSCR
791 \details Assigns the given value to the Floating Point Status/Control register.
792 \param [in] fpscr Floating Point Status/Control value to set
793 */
__set_FPSCR(uint32_t fpscr)794 __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
795 {
796 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
797 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
798 register uint32_t __regfpscr __ASM("fpscr");
799 __regfpscr = (fpscr);
800 #else
801 (void)fpscr;
802 #endif
803 }
804
805
806 /*@} end of CMSIS_Core_RegAccFunctions */
807
808
809 /* ################### Compiler specific Intrinsics ########################### */
810 /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
811 Access to dedicated SIMD instructions
812 @{
813 */
814
815 #if ((defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) )
816
817 #define __SADD8 __sadd8
818 #define __QADD8 __qadd8
819 #define __SHADD8 __shadd8
820 #define __UADD8 __uadd8
821 #define __UQADD8 __uqadd8
822 #define __UHADD8 __uhadd8
823 #define __SSUB8 __ssub8
824 #define __QSUB8 __qsub8
825 #define __SHSUB8 __shsub8
826 #define __USUB8 __usub8
827 #define __UQSUB8 __uqsub8
828 #define __UHSUB8 __uhsub8
829 #define __SADD16 __sadd16
830 #define __QADD16 __qadd16
831 #define __SHADD16 __shadd16
832 #define __UADD16 __uadd16
833 #define __UQADD16 __uqadd16
834 #define __UHADD16 __uhadd16
835 #define __SSUB16 __ssub16
836 #define __QSUB16 __qsub16
837 #define __SHSUB16 __shsub16
838 #define __USUB16 __usub16
839 #define __UQSUB16 __uqsub16
840 #define __UHSUB16 __uhsub16
841 #define __SASX __sasx
842 #define __QASX __qasx
843 #define __SHASX __shasx
844 #define __UASX __uasx
845 #define __UQASX __uqasx
846 #define __UHASX __uhasx
847 #define __SSAX __ssax
848 #define __QSAX __qsax
849 #define __SHSAX __shsax
850 #define __USAX __usax
851 #define __UQSAX __uqsax
852 #define __UHSAX __uhsax
853 #define __USAD8 __usad8
854 #define __USADA8 __usada8
855 #define __SSAT16 __ssat16
856 #define __USAT16 __usat16
857 #define __UXTB16 __uxtb16
858 #define __UXTAB16 __uxtab16
859 #define __SXTB16 __sxtb16
860 #define __SXTAB16 __sxtab16
861 #define __SMUAD __smuad
862 #define __SMUADX __smuadx
863 #define __SMLAD __smlad
864 #define __SMLADX __smladx
865 #define __SMLALD __smlald
866 #define __SMLALDX __smlaldx
867 #define __SMUSD __smusd
868 #define __SMUSDX __smusdx
869 #define __SMLSD __smlsd
870 #define __SMLSDX __smlsdx
871 #define __SMLSLD __smlsld
872 #define __SMLSLDX __smlsldx
873 #define __SEL __sel
874 #define __QADD __qadd
875 #define __QSUB __qsub
876
877 #define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
878 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
879
880 #define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
881 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
882
883 #define __SMMLA(ARG1,ARG2,ARG3) ( (int32_t)((((int64_t)(ARG1) * (ARG2)) + \
884 ((int64_t)(ARG3) << 32U) ) >> 32U))
885
886 #define __SXTB16_RORn(ARG1, ARG2) __SXTB16(__ROR(ARG1, ARG2))
887
888 #define __SXTAB16_RORn(ARG1, ARG2, ARG3) __SXTAB16(ARG1, __ROR(ARG2, ARG3))
889
890 #endif /* ((defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1)) ) */
891 /*@} end of group CMSIS_SIMD_intrinsics */
892
893
894 #endif /* __CMSIS_ARMCC_H */
895