1 /**************************************************************************//**
2 * @file cmsis_gcc.h
3 * @brief CMSIS compiler GCC header file
4 * @version V5.0.3
5 * @date 16. January 2018
6 ******************************************************************************/
7 /*
8 * Copyright (c) 2009-2017 ARM Limited. All rights reserved.
9 *
10 * SPDX-License-Identifier: Apache-2.0
11 *
12 * Licensed under the Apache License, Version 2.0 (the License); you may
13 * not use this file except in compliance with the License.
14 * You may obtain a copy of the License at
15 *
16 * www.apache.org/licenses/LICENSE-2.0
17 *
18 * Unless required by applicable law or agreed to in writing, software
19 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 * See the License for the specific language governing permissions and
22 * limitations under the License.
23 */
24
25 #ifndef __CMSIS_GCC_H
26 #define __CMSIS_GCC_H
27
28 /* ignore some GCC warnings */
29 #pragma GCC diagnostic push
30 #pragma GCC diagnostic ignored "-Wsign-conversion"
31 #pragma GCC diagnostic ignored "-Wconversion"
32 #pragma GCC diagnostic ignored "-Wunused-parameter"
33
34 /* Fallback for __has_builtin */
35 #ifndef __has_builtin
36 #define __has_builtin(x) (0)
37 #endif
38
39 /* CMSIS compiler specific defines */
40 #ifndef __ASM
41 #define __ASM __asm
42 #endif
43 #ifndef __INLINE
44 #define __INLINE inline
45 #endif
46 #ifndef __STATIC_INLINE
47 #define __STATIC_INLINE static inline
48 #endif
49 #ifndef __STATIC_FORCEINLINE
50 #define __STATIC_FORCEINLINE __attribute__((always_inline)) static inline
51 #endif
52 #ifndef __NO_RETURN
53 #define __NO_RETURN __attribute__((__noreturn__))
54 #endif
55 #ifndef __USED
56 #define __USED __attribute__((used))
57 #endif
58 #ifndef __WEAK
59 #define __WEAK __attribute__((weak))
60 #endif
61 #ifndef __PACKED
62 #define __PACKED __attribute__((packed, aligned(1)))
63 #endif
64 #ifndef __PACKED_STRUCT
65 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
66 #endif
67 #ifndef __PACKED_UNION
68 #define __PACKED_UNION union __attribute__((packed, aligned(1)))
69 #endif
70 #ifndef __UNALIGNED_UINT32 /* deprecated */
71 #pragma GCC diagnostic push
72 #pragma GCC diagnostic ignored "-Wpacked"
73 #pragma GCC diagnostic ignored "-Wattributes"
74 struct __attribute__((packed)) T_UINT32 { uint32_t v; };
75 #pragma GCC diagnostic pop
76 #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v)
77 #endif
78 #ifndef __UNALIGNED_UINT16_WRITE
79 #pragma GCC diagnostic push
80 #pragma GCC diagnostic ignored "-Wpacked"
81 #pragma GCC diagnostic ignored "-Wattributes"
82 __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
83 #pragma GCC diagnostic pop
84 #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
85 #endif
86 #ifndef __UNALIGNED_UINT16_READ
87 #pragma GCC diagnostic push
88 #pragma GCC diagnostic ignored "-Wpacked"
89 #pragma GCC diagnostic ignored "-Wattributes"
90 __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
91 #pragma GCC diagnostic pop
92 #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
93 #endif
94 #ifndef __UNALIGNED_UINT32_WRITE
95 #pragma GCC diagnostic push
96 #pragma GCC diagnostic ignored "-Wpacked"
97 #pragma GCC diagnostic ignored "-Wattributes"
98 __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
99 #pragma GCC diagnostic pop
100 #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
101 #endif
102 #ifndef __UNALIGNED_UINT32_READ
103 #pragma GCC diagnostic push
104 #pragma GCC diagnostic ignored "-Wpacked"
105 #pragma GCC diagnostic ignored "-Wattributes"
106 __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
107 #pragma GCC diagnostic pop
108 #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
109 #endif
110 #ifndef __ALIGNED
111 #define __ALIGNED(x) __attribute__((aligned(x)))
112 #endif
113 #ifndef __RESTRICT
114 #define __RESTRICT __restrict
115 #endif
116
117
118 /* ########################### Core Function Access ########################### */
119 /** \ingroup CMSIS_Core_FunctionInterface
120 \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
121 @{
122 */
123
124 /**
125 \brief Enable IRQ Interrupts
126 \details Enables IRQ interrupts by clearing the I-bit in the CPSR.
127 Can only be executed in Privileged modes.
128 */
__enable_irq(void)129 __STATIC_FORCEINLINE void __enable_irq(void)
130 {
131 __ASM volatile ("cpsie i" : : : "memory");
132 }
133
134
135 /**
136 \brief Disable IRQ Interrupts
137 \details Disables IRQ interrupts by setting the I-bit in the CPSR.
138 Can only be executed in Privileged modes.
139 */
__disable_irq(void)140 __STATIC_FORCEINLINE void __disable_irq(void)
141 {
142 __ASM volatile ("cpsid i" : : : "memory");
143 }
144
145
146 /**
147 \brief Get Control Register
148 \details Returns the content of the Control Register.
149 \return Control Register value
150 */
__get_CONTROL(void)151 __STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
152 {
153 uint32_t result;
154
155 __ASM volatile ("MRS %0, control" : "=r" (result) );
156 return(result);
157 }
158
159
160 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
161 /**
162 \brief Get Control Register (non-secure)
163 \details Returns the content of the non-secure Control Register when in secure mode.
164 \return non-secure Control Register value
165 */
__TZ_get_CONTROL_NS(void)166 __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
167 {
168 uint32_t result;
169
170 __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
171 return(result);
172 }
173 #endif
174
175
176 /**
177 \brief Set Control Register
178 \details Writes the given value to the Control Register.
179 \param [in] control Control Register value to set
180 */
__set_CONTROL(uint32_t control)181 __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
182 {
183 __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
184 }
185
186
187 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
188 /**
189 \brief Set Control Register (non-secure)
190 \details Writes the given value to the non-secure Control Register when in secure state.
191 \param [in] control Control Register value to set
192 */
__TZ_set_CONTROL_NS(uint32_t control)193 __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
194 {
195 __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
196 }
197 #endif
198
199
200 /**
201 \brief Get IPSR Register
202 \details Returns the content of the IPSR Register.
203 \return IPSR Register value
204 */
__get_IPSR(void)205 __STATIC_FORCEINLINE uint32_t __get_IPSR(void)
206 {
207 uint32_t result;
208
209 __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
210 return(result);
211 }
212
213
214 /**
215 \brief Get APSR Register
216 \details Returns the content of the APSR Register.
217 \return APSR Register value
218 */
__get_APSR(void)219 __STATIC_FORCEINLINE uint32_t __get_APSR(void)
220 {
221 uint32_t result;
222
223 __ASM volatile ("MRS %0, apsr" : "=r" (result) );
224 return(result);
225 }
226
227
228 /**
229 \brief Get xPSR Register
230 \details Returns the content of the xPSR Register.
231 \return xPSR Register value
232 */
__get_xPSR(void)233 __STATIC_FORCEINLINE uint32_t __get_xPSR(void)
234 {
235 uint32_t result;
236
237 __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
238 return(result);
239 }
240
241
242 /**
243 \brief Get Process Stack Pointer
244 \details Returns the current value of the Process Stack Pointer (PSP).
245 \return PSP Register value
246 */
__get_PSP(void)247 __STATIC_FORCEINLINE uint32_t __get_PSP(void)
248 {
249 register uint32_t result;
250
251 __ASM volatile ("MRS %0, psp" : "=r" (result) );
252 return(result);
253 }
254
255
256 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
257 /**
258 \brief Get Process Stack Pointer (non-secure)
259 \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state.
260 \return PSP Register value
261 */
__TZ_get_PSP_NS(void)262 __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
263 {
264 register uint32_t result;
265
266 __ASM volatile ("MRS %0, psp_ns" : "=r" (result) );
267 return(result);
268 }
269 #endif
270
271
272 /**
273 \brief Set Process Stack Pointer
274 \details Assigns the given value to the Process Stack Pointer (PSP).
275 \param [in] topOfProcStack Process Stack Pointer value to set
276 */
__set_PSP(uint32_t topOfProcStack)277 __STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
278 {
279 __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
280 }
281
282
283 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
284 /**
285 \brief Set Process Stack Pointer (non-secure)
286 \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state.
287 \param [in] topOfProcStack Process Stack Pointer value to set
288 */
__TZ_set_PSP_NS(uint32_t topOfProcStack)289 __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
290 {
291 __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
292 }
293 #endif
294
295
296 /**
297 \brief Get Main Stack Pointer
298 \details Returns the current value of the Main Stack Pointer (MSP).
299 \return MSP Register value
300 */
__get_MSP(void)301 __STATIC_FORCEINLINE uint32_t __get_MSP(void)
302 {
303 register uint32_t result;
304
305 __ASM volatile ("MRS %0, msp" : "=r" (result) );
306 return(result);
307 }
308
309
310 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
311 /**
312 \brief Get Main Stack Pointer (non-secure)
313 \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state.
314 \return MSP Register value
315 */
__TZ_get_MSP_NS(void)316 __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
317 {
318 register uint32_t result;
319
320 __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
321 return(result);
322 }
323 #endif
324
325
326 /**
327 \brief Set Main Stack Pointer
328 \details Assigns the given value to the Main Stack Pointer (MSP).
329 \param [in] topOfMainStack Main Stack Pointer value to set
330 */
__set_MSP(uint32_t topOfMainStack)331 __STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
332 {
333 __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
334 }
335
336
337 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
338 /**
339 \brief Set Main Stack Pointer (non-secure)
340 \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state.
341 \param [in] topOfMainStack Main Stack Pointer value to set
342 */
__TZ_set_MSP_NS(uint32_t topOfMainStack)343 __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
344 {
345 __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
346 }
347 #endif
348
349
350 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
351 /**
352 \brief Get Stack Pointer (non-secure)
353 \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state.
354 \return SP Register value
355 */
__TZ_get_SP_NS(void)356 __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
357 {
358 register uint32_t result;
359
360 __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
361 return(result);
362 }
363
364
365 /**
366 \brief Set Stack Pointer (non-secure)
367 \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state.
368 \param [in] topOfStack Stack Pointer value to set
369 */
__TZ_set_SP_NS(uint32_t topOfStack)370 __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
371 {
372 __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
373 }
374 #endif
375
376
377 /**
378 \brief Get Priority Mask
379 \details Returns the current state of the priority mask bit from the Priority Mask Register.
380 \return Priority Mask value
381 */
__get_PRIMASK(void)382 __STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
383 {
384 uint32_t result;
385
386 __ASM volatile ("MRS %0, primask" : "=r" (result) :: "memory");
387 return(result);
388 }
389
390
391 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
392 /**
393 \brief Get Priority Mask (non-secure)
394 \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state.
395 \return Priority Mask value
396 */
__TZ_get_PRIMASK_NS(void)397 __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
398 {
399 uint32_t result;
400
401 __ASM volatile ("MRS %0, primask_ns" : "=r" (result) :: "memory");
402 return(result);
403 }
404 #endif
405
406
407 /**
408 \brief Set Priority Mask
409 \details Assigns the given value to the Priority Mask Register.
410 \param [in] priMask Priority Mask
411 */
__set_PRIMASK(uint32_t priMask)412 __STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
413 {
414 __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
415 }
416
417
418 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
419 /**
420 \brief Set Priority Mask (non-secure)
421 \details Assigns the given value to the non-secure Priority Mask Register when in secure state.
422 \param [in] priMask Priority Mask
423 */
__TZ_set_PRIMASK_NS(uint32_t priMask)424 __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
425 {
426 __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
427 }
428 #endif
429
430
431 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
432 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
433 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
434 /**
435 \brief Enable FIQ
436 \details Enables FIQ interrupts by clearing the F-bit in the CPSR.
437 Can only be executed in Privileged modes.
438 */
__enable_fault_irq(void)439 __STATIC_FORCEINLINE void __enable_fault_irq(void)
440 {
441 __ASM volatile ("cpsie f" : : : "memory");
442 }
443
444
445 /**
446 \brief Disable FIQ
447 \details Disables FIQ interrupts by setting the F-bit in the CPSR.
448 Can only be executed in Privileged modes.
449 */
__disable_fault_irq(void)450 __STATIC_FORCEINLINE void __disable_fault_irq(void)
451 {
452 __ASM volatile ("cpsid f" : : : "memory");
453 }
454
455
456 /**
457 \brief Get Base Priority
458 \details Returns the current value of the Base Priority register.
459 \return Base Priority register value
460 */
__get_BASEPRI(void)461 __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
462 {
463 uint32_t result;
464
465 __ASM volatile ("MRS %0, basepri" : "=r" (result) );
466 return(result);
467 }
468
469
470 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
471 /**
472 \brief Get Base Priority (non-secure)
473 \details Returns the current value of the non-secure Base Priority register when in secure state.
474 \return Base Priority register value
475 */
__TZ_get_BASEPRI_NS(void)476 __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
477 {
478 uint32_t result;
479
480 __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
481 return(result);
482 }
483 #endif
484
485
486 /**
487 \brief Set Base Priority
488 \details Assigns the given value to the Base Priority register.
489 \param [in] basePri Base Priority value to set
490 */
__set_BASEPRI(uint32_t basePri)491 __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
492 {
493 __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
494 }
495
496
497 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
498 /**
499 \brief Set Base Priority (non-secure)
500 \details Assigns the given value to the non-secure Base Priority register when in secure state.
501 \param [in] basePri Base Priority value to set
502 */
__TZ_set_BASEPRI_NS(uint32_t basePri)503 __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
504 {
505 __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
506 }
507 #endif
508
509
510 /**
511 \brief Set Base Priority with condition
512 \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
513 or the new value increases the BASEPRI priority level.
514 \param [in] basePri Base Priority value to set
515 */
__set_BASEPRI_MAX(uint32_t basePri)516 __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
517 {
518 __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
519 }
520
521
522 /**
523 \brief Get Fault Mask
524 \details Returns the current value of the Fault Mask register.
525 \return Fault Mask register value
526 */
__get_FAULTMASK(void)527 __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
528 {
529 uint32_t result;
530
531 __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
532 return(result);
533 }
534
535
536 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
537 /**
538 \brief Get Fault Mask (non-secure)
539 \details Returns the current value of the non-secure Fault Mask register when in secure state.
540 \return Fault Mask register value
541 */
__TZ_get_FAULTMASK_NS(void)542 __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
543 {
544 uint32_t result;
545
546 __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
547 return(result);
548 }
549 #endif
550
551
552 /**
553 \brief Set Fault Mask
554 \details Assigns the given value to the Fault Mask register.
555 \param [in] faultMask Fault Mask value to set
556 */
__set_FAULTMASK(uint32_t faultMask)557 __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
558 {
559 __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
560 }
561
562
563 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
564 /**
565 \brief Set Fault Mask (non-secure)
566 \details Assigns the given value to the non-secure Fault Mask register when in secure state.
567 \param [in] faultMask Fault Mask value to set
568 */
__TZ_set_FAULTMASK_NS(uint32_t faultMask)569 __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
570 {
571 __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
572 }
573 #endif
574
575 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
576 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
577 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
578
579
580 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
581 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
582
583 /**
584 \brief Get Process Stack Pointer Limit
585 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
586 Stack Pointer Limit register hence zero is returned always in non-secure
587 mode.
588
589 \details Returns the current value of the Process Stack Pointer Limit (PSPLIM).
590 \return PSPLIM Register value
591 */
__get_PSPLIM(void)592 __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
593 {
594 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
595 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
596 // without main extensions, the non-secure PSPLIM is RAZ/WI
597 return 0U;
598 #else
599 register uint32_t result;
600 __ASM volatile ("MRS %0, psplim" : "=r" (result) );
601 return result;
602 #endif
603 }
604
605 #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
606 /**
607 \brief Get Process Stack Pointer Limit (non-secure)
608 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
609 Stack Pointer Limit register hence zero is returned always.
610
611 \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
612 \return PSPLIM Register value
613 */
__TZ_get_PSPLIM_NS(void)614 __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
615 {
616 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
617 // without main extensions, the non-secure PSPLIM is RAZ/WI
618 return 0U;
619 #else
620 register uint32_t result;
621 __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) );
622 return result;
623 #endif
624 }
625 #endif
626
627
628 /**
629 \brief Set Process Stack Pointer Limit
630 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
631 Stack Pointer Limit register hence the write is silently ignored in non-secure
632 mode.
633
634 \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM).
635 \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set
636 */
__set_PSPLIM(uint32_t ProcStackPtrLimit)637 __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
638 {
639 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
640 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
641 // without main extensions, the non-secure PSPLIM is RAZ/WI
642 (void)ProcStackPtrLimit;
643 #else
644 __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
645 #endif
646 }
647
648
649 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
650 /**
651 \brief Set Process Stack Pointer (non-secure)
652 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
653 Stack Pointer Limit register hence the write is silently ignored.
654
655 \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
656 \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set
657 */
__TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)658 __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
659 {
660 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
661 // without main extensions, the non-secure PSPLIM is RAZ/WI
662 (void)ProcStackPtrLimit;
663 #else
664 __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
665 #endif
666 }
667 #endif
668
669
670 /**
671 \brief Get Main Stack Pointer Limit
672 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
673 Stack Pointer Limit register hence zero is returned always in non-secure
674 mode.
675
676 \details Returns the current value of the Main Stack Pointer Limit (MSPLIM).
677 \return MSPLIM Register value
678 */
__get_MSPLIM(void)679 __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
680 {
681 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
682 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
683 // without main extensions, the non-secure MSPLIM is RAZ/WI
684 return 0U;
685 #else
686 register uint32_t result;
687 __ASM volatile ("MRS %0, msplim" : "=r" (result) );
688 return result;
689 #endif
690 }
691
692
693 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
694 /**
695 \brief Get Main Stack Pointer Limit (non-secure)
696 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
697 Stack Pointer Limit register hence zero is returned always.
698
699 \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state.
700 \return MSPLIM Register value
701 */
__TZ_get_MSPLIM_NS(void)702 __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
703 {
704 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
705 // without main extensions, the non-secure MSPLIM is RAZ/WI
706 return 0U;
707 #else
708 register uint32_t result;
709 __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
710 return result;
711 #endif
712 }
713 #endif
714
715
716 /**
717 \brief Set Main Stack Pointer Limit
718 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
719 Stack Pointer Limit register hence the write is silently ignored in non-secure
720 mode.
721
722 \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM).
723 \param [in] MainStackPtrLimit Main Stack Pointer Limit value to set
724 */
__set_MSPLIM(uint32_t MainStackPtrLimit)725 __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
726 {
727 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
728 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
729 // without main extensions, the non-secure MSPLIM is RAZ/WI
730 (void)MainStackPtrLimit;
731 #else
732 __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
733 #endif
734 }
735
736
737 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
738 /**
739 \brief Set Main Stack Pointer Limit (non-secure)
740 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
741 Stack Pointer Limit register hence the write is silently ignored.
742
743 \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state.
744 \param [in] MainStackPtrLimit Main Stack Pointer value to set
745 */
__TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)746 __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
747 {
748 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
749 // without main extensions, the non-secure MSPLIM is RAZ/WI
750 (void)MainStackPtrLimit;
751 #else
752 __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
753 #endif
754 }
755 #endif
756
757 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
758 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
759
760
761 #if ((defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
762 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
763
764 /**
765 \brief Get FPSCR
766 \details Returns the current value of the Floating Point Status/Control register.
767 \return Floating Point Status/Control register value
768 */
__get_FPSCR(void)769 __STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
770 {
771 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
772 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
773 #if __has_builtin(__builtin_arm_get_fpscr) || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
774 /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
775 return __builtin_arm_get_fpscr();
776 #else
777 uint32_t result;
778
779 __ASM volatile ("VMRS %0, fpscr" : "=r" (result) );
780 return(result);
781 #endif
782 #else
783 return(0U);
784 #endif
785 }
786
787
788 /**
789 \brief Set FPSCR
790 \details Assigns the given value to the Floating Point Status/Control register.
791 \param [in] fpscr Floating Point Status/Control value to set
792 */
__set_FPSCR(uint32_t fpscr)793 __STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
794 {
795 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
796 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
797 #if __has_builtin(__builtin_arm_set_fpscr) || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
798 /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
799 __builtin_arm_set_fpscr(fpscr);
800 #else
801 __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) : "vfpcc", "memory");
802 #endif
803 #else
804 (void)fpscr;
805 #endif
806 }
807
808 #endif /* ((defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
809 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
810
811
812
813 /*@} end of CMSIS_Core_RegAccFunctions */
814
815
816 /* ########################## Core Instruction Access ######################### */
817 /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
818 Access to dedicated instructions
819 @{
820 */
821
822 /* Define macros for porting to both thumb1 and thumb2.
823 * For thumb1, use low register (r0-r7), specified by constraint "l"
824 * Otherwise, use general registers, specified by constraint "r" */
825 #if defined (__thumb__) && !defined (__thumb2__)
826 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
827 #define __CMSIS_GCC_RW_REG(r) "+l" (r)
828 #define __CMSIS_GCC_USE_REG(r) "l" (r)
829 #else
830 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
831 #define __CMSIS_GCC_RW_REG(r) "+r" (r)
832 #define __CMSIS_GCC_USE_REG(r) "r" (r)
833 #endif
834
835 /**
836 \brief No Operation
837 \details No Operation does nothing. This instruction can be used for code alignment purposes.
838 */
839 #define __NOP() __ASM volatile ("nop")
840
841 /**
842 \brief Wait For Interrupt
843 \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
844 */
845 #define __WFI() __ASM volatile ("wfi")
846
847
848 /**
849 \brief Wait For Event
850 \details Wait For Event is a hint instruction that permits the processor to enter
851 a low-power state until one of a number of events occurs.
852 */
853 #define __WFE() __ASM volatile ("wfe")
854
855
856 /**
857 \brief Send Event
858 \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
859 */
860 #define __SEV() __ASM volatile ("sev")
861
862
863 /**
864 \brief Instruction Synchronization Barrier
865 \details Instruction Synchronization Barrier flushes the pipeline in the processor,
866 so that all instructions following the ISB are fetched from cache or memory,
867 after the instruction has been completed.
868 */
__ISB(void)869 __STATIC_FORCEINLINE void __ISB(void)
870 {
871 __ASM volatile ("isb 0xF":::"memory");
872 }
873
874
875 /**
876 \brief Data Synchronization Barrier
877 \details Acts as a special kind of Data Memory Barrier.
878 It completes when all explicit memory accesses before this instruction complete.
879 */
__DSB(void)880 __STATIC_FORCEINLINE void __DSB(void)
881 {
882 __ASM volatile ("dsb 0xF":::"memory");
883 }
884
885
886 /**
887 \brief Data Memory Barrier
888 \details Ensures the apparent order of the explicit memory operations before
889 and after the instruction, without ensuring their completion.
890 */
__DMB(void)891 __STATIC_FORCEINLINE void __DMB(void)
892 {
893 __ASM volatile ("dmb 0xF":::"memory");
894 }
895
896
897 /**
898 \brief Reverse byte order (32 bit)
899 \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
900 \param [in] value Value to reverse
901 \return Reversed value
902 */
__REV(uint32_t value)903 __STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
904 {
905 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
906 return __builtin_bswap32(value);
907 #else
908 uint32_t result;
909
910 __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
911 return result;
912 #endif
913 }
914
915
916 /**
917 \brief Reverse byte order (16 bit)
918 \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
919 \param [in] value Value to reverse
920 \return Reversed value
921 */
__REV16(uint32_t value)922 __STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
923 {
924 uint32_t result;
925
926 __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
927 return result;
928 }
929
930
931 /**
932 \brief Reverse byte order (16 bit)
933 \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
934 \param [in] value Value to reverse
935 \return Reversed value
936 */
__REVSH(int16_t value)937 __STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
938 {
939 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
940 return (int16_t)__builtin_bswap16(value);
941 #else
942 int16_t result;
943
944 __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
945 return result;
946 #endif
947 }
948
949
950 /**
951 \brief Rotate Right in unsigned value (32 bit)
952 \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
953 \param [in] op1 Value to rotate
954 \param [in] op2 Number of Bits to rotate
955 \return Rotated value
956 */
__ROR(uint32_t op1,uint32_t op2)957 __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
958 {
959 op2 %= 32U;
960 if (op2 == 0U)
961 {
962 return op1;
963 }
964 return (op1 >> op2) | (op1 << (32U - op2));
965 }
966
967
968 /**
969 \brief Breakpoint
970 \details Causes the processor to enter Debug state.
971 Debug tools can use this to investigate system state when the instruction at a particular address is reached.
972 \param [in] value is ignored by the processor.
973 If required, a debugger can use it to store additional information about the breakpoint.
974 */
975 #define __BKPT(value) __ASM volatile ("bkpt "#value)
976
977
978 /**
979 \brief Reverse bit order of value
980 \details Reverses the bit order of the given value.
981 \param [in] value Value to reverse
982 \return Reversed value
983 */
__RBIT(uint32_t value)984 __STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
985 {
986 uint32_t result;
987
988 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
989 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
990 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
991 __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
992 #else
993 uint32_t s = (4U /*sizeof(v)*/ * 8U) - 1U; /* extra shift needed at end */
994
995 result = value; /* r will be reversed bits of v; first get LSB of v */
996 for (value >>= 1U; value != 0U; value >>= 1U)
997 {
998 result <<= 1U;
999 result |= value & 1U;
1000 s--;
1001 }
1002 result <<= s; /* shift when v's highest bits are zero */
1003 #endif
1004 return result;
1005 }
1006
1007
1008 /**
1009 \brief Count leading zeros
1010 \details Counts the number of leading zeros of a data value.
1011 \param [in] value Value to count the leading zeros
1012 \return number of leading zeros in value
1013 */
1014 #define __CLZ (uint8_t)__builtin_clz
1015
1016
1017 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1018 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1019 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1020 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1021 /**
1022 \brief LDR Exclusive (8 bit)
1023 \details Executes a exclusive LDR instruction for 8 bit value.
1024 \param [in] ptr Pointer to data
1025 \return value of type uint8_t at (*ptr)
1026 */
__LDREXB(volatile uint8_t * addr)1027 __STATIC_FORCEINLINE uint8_t __LDREXB(volatile uint8_t *addr)
1028 {
1029 uint32_t result;
1030
1031 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1032 __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
1033 #else
1034 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1035 accepted by assembler. So has to use following less efficient pattern.
1036 */
1037 __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
1038 #endif
1039 return ((uint8_t) result); /* Add explicit type cast here */
1040 }
1041
1042
1043 /**
1044 \brief LDR Exclusive (16 bit)
1045 \details Executes a exclusive LDR instruction for 16 bit values.
1046 \param [in] ptr Pointer to data
1047 \return value of type uint16_t at (*ptr)
1048 */
__LDREXH(volatile uint16_t * addr)1049 __STATIC_FORCEINLINE uint16_t __LDREXH(volatile uint16_t *addr)
1050 {
1051 uint32_t result;
1052
1053 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1054 __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
1055 #else
1056 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1057 accepted by assembler. So has to use following less efficient pattern.
1058 */
1059 __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
1060 #endif
1061 return ((uint16_t) result); /* Add explicit type cast here */
1062 }
1063
1064
1065 /**
1066 \brief LDR Exclusive (32 bit)
1067 \details Executes a exclusive LDR instruction for 32 bit values.
1068 \param [in] ptr Pointer to data
1069 \return value of type uint32_t at (*ptr)
1070 */
__LDREXW(volatile uint32_t * addr)1071 __STATIC_FORCEINLINE uint32_t __LDREXW(volatile uint32_t *addr)
1072 {
1073 uint32_t result;
1074
1075 __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
1076 return(result);
1077 }
1078
1079
1080 /**
1081 \brief STR Exclusive (8 bit)
1082 \details Executes a exclusive STR instruction for 8 bit values.
1083 \param [in] value Value to store
1084 \param [in] ptr Pointer to location
1085 \return 0 Function succeeded
1086 \return 1 Function failed
1087 */
__STREXB(uint8_t value,volatile uint8_t * addr)1088 __STATIC_FORCEINLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
1089 {
1090 uint32_t result;
1091
1092 __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
1093 return(result);
1094 }
1095
1096
1097 /**
1098 \brief STR Exclusive (16 bit)
1099 \details Executes a exclusive STR instruction for 16 bit values.
1100 \param [in] value Value to store
1101 \param [in] ptr Pointer to location
1102 \return 0 Function succeeded
1103 \return 1 Function failed
1104 */
__STREXH(uint16_t value,volatile uint16_t * addr)1105 __STATIC_FORCEINLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
1106 {
1107 uint32_t result;
1108
1109 __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
1110 return(result);
1111 }
1112
1113
1114 /**
1115 \brief STR Exclusive (32 bit)
1116 \details Executes a exclusive STR instruction for 32 bit values.
1117 \param [in] value Value to store
1118 \param [in] ptr Pointer to location
1119 \return 0 Function succeeded
1120 \return 1 Function failed
1121 */
__STREXW(uint32_t value,volatile uint32_t * addr)1122 __STATIC_FORCEINLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
1123 {
1124 uint32_t result;
1125
1126 __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
1127 return(result);
1128 }
1129
1130
1131 /**
1132 \brief Remove the exclusive lock
1133 \details Removes the exclusive lock which is created by LDREX.
1134 */
__CLREX(void)1135 __STATIC_FORCEINLINE void __CLREX(void)
1136 {
1137 __ASM volatile ("clrex" ::: "memory");
1138 }
1139
1140 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1141 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1142 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1143 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
1144
1145
1146 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1147 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1148 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
1149 /**
1150 \brief Signed Saturate
1151 \details Saturates a signed value.
1152 \param [in] ARG1 Value to be saturated
1153 \param [in] ARG2 Bit position to saturate to (1..32)
1154 \return Saturated value
1155 */
1156 #define __SSAT(ARG1,ARG2) \
1157 __extension__ \
1158 ({ \
1159 int32_t __RES, __ARG1 = (ARG1); \
1160 __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1161 __RES; \
1162 })
1163
1164
1165 /**
1166 \brief Unsigned Saturate
1167 \details Saturates an unsigned value.
1168 \param [in] ARG1 Value to be saturated
1169 \param [in] ARG2 Bit position to saturate to (0..31)
1170 \return Saturated value
1171 */
1172 #define __USAT(ARG1,ARG2) \
1173 __extension__ \
1174 ({ \
1175 uint32_t __RES, __ARG1 = (ARG1); \
1176 __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1177 __RES; \
1178 })
1179
1180
1181 /**
1182 \brief Rotate Right with Extend (32 bit)
1183 \details Moves each bit of a bitstring right by one bit.
1184 The carry input is shifted in at the left end of the bitstring.
1185 \param [in] value Value to rotate
1186 \return Rotated value
1187 */
__RRX(uint32_t value)1188 __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
1189 {
1190 uint32_t result;
1191
1192 __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
1193 return(result);
1194 }
1195
1196
1197 /**
1198 \brief LDRT Unprivileged (8 bit)
1199 \details Executes a Unprivileged LDRT instruction for 8 bit value.
1200 \param [in] ptr Pointer to data
1201 \return value of type uint8_t at (*ptr)
1202 */
__LDRBT(volatile uint8_t * ptr)1203 __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
1204 {
1205 uint32_t result;
1206
1207 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1208 __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
1209 #else
1210 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1211 accepted by assembler. So has to use following less efficient pattern.
1212 */
1213 __ASM volatile ("ldrbt %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
1214 #endif
1215 return ((uint8_t) result); /* Add explicit type cast here */
1216 }
1217
1218
1219 /**
1220 \brief LDRT Unprivileged (16 bit)
1221 \details Executes a Unprivileged LDRT instruction for 16 bit values.
1222 \param [in] ptr Pointer to data
1223 \return value of type uint16_t at (*ptr)
1224 */
__LDRHT(volatile uint16_t * ptr)1225 __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
1226 {
1227 uint32_t result;
1228
1229 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1230 __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
1231 #else
1232 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1233 accepted by assembler. So has to use following less efficient pattern.
1234 */
1235 __ASM volatile ("ldrht %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
1236 #endif
1237 return ((uint16_t) result); /* Add explicit type cast here */
1238 }
1239
1240
1241 /**
1242 \brief LDRT Unprivileged (32 bit)
1243 \details Executes a Unprivileged LDRT instruction for 32 bit values.
1244 \param [in] ptr Pointer to data
1245 \return value of type uint32_t at (*ptr)
1246 */
__LDRT(volatile uint32_t * ptr)1247 __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
1248 {
1249 uint32_t result;
1250
1251 __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
1252 return(result);
1253 }
1254
1255
1256 /**
1257 \brief STRT Unprivileged (8 bit)
1258 \details Executes a Unprivileged STRT instruction for 8 bit values.
1259 \param [in] value Value to store
1260 \param [in] ptr Pointer to location
1261 */
__STRBT(uint8_t value,volatile uint8_t * ptr)1262 __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
1263 {
1264 __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1265 }
1266
1267
1268 /**
1269 \brief STRT Unprivileged (16 bit)
1270 \details Executes a Unprivileged STRT instruction for 16 bit values.
1271 \param [in] value Value to store
1272 \param [in] ptr Pointer to location
1273 */
__STRHT(uint16_t value,volatile uint16_t * ptr)1274 __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
1275 {
1276 __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1277 }
1278
1279
1280 /**
1281 \brief STRT Unprivileged (32 bit)
1282 \details Executes a Unprivileged STRT instruction for 32 bit values.
1283 \param [in] value Value to store
1284 \param [in] ptr Pointer to location
1285 */
__STRT(uint32_t value,volatile uint32_t * ptr)1286 __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
1287 {
1288 __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
1289 }
1290
1291 #else /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1292 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1293 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
1294
1295 /**
1296 \brief Signed Saturate
1297 \details Saturates a signed value.
1298 \param [in] value Value to be saturated
1299 \param [in] sat Bit position to saturate to (1..32)
1300 \return Saturated value
1301 */
__SSAT(int32_t val,uint32_t sat)1302 __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
1303 {
1304 if ((sat >= 1U) && (sat <= 32U))
1305 {
1306 const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
1307 const int32_t min = -1 - max ;
1308 if (val > max)
1309 {
1310 return max;
1311 }
1312 else if (val < min)
1313 {
1314 return min;
1315 }
1316 }
1317 return val;
1318 }
1319
1320 /**
1321 \brief Unsigned Saturate
1322 \details Saturates an unsigned value.
1323 \param [in] value Value to be saturated
1324 \param [in] sat Bit position to saturate to (0..31)
1325 \return Saturated value
1326 */
__USAT(int32_t val,uint32_t sat)1327 __STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
1328 {
1329 if (sat <= 31U)
1330 {
1331 const uint32_t max = ((1U << sat) - 1U);
1332 if (val > (int32_t)max)
1333 {
1334 return max;
1335 }
1336 else if (val < 0)
1337 {
1338 return 0U;
1339 }
1340 }
1341 return (uint32_t)val;
1342 }
1343
1344 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1345 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1346 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
1347
1348
1349 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1350 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1351 /**
1352 \brief Load-Acquire (8 bit)
1353 \details Executes a LDAB instruction for 8 bit value.
1354 \param [in] ptr Pointer to data
1355 \return value of type uint8_t at (*ptr)
1356 */
__LDAB(volatile uint8_t * ptr)1357 __STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
1358 {
1359 uint32_t result;
1360
1361 __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) );
1362 return ((uint8_t) result);
1363 }
1364
1365
1366 /**
1367 \brief Load-Acquire (16 bit)
1368 \details Executes a LDAH instruction for 16 bit values.
1369 \param [in] ptr Pointer to data
1370 \return value of type uint16_t at (*ptr)
1371 */
__LDAH(volatile uint16_t * ptr)1372 __STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
1373 {
1374 uint32_t result;
1375
1376 __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) );
1377 return ((uint16_t) result);
1378 }
1379
1380
1381 /**
1382 \brief Load-Acquire (32 bit)
1383 \details Executes a LDA instruction for 32 bit values.
1384 \param [in] ptr Pointer to data
1385 \return value of type uint32_t at (*ptr)
1386 */
__LDA(volatile uint32_t * ptr)1387 __STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
1388 {
1389 uint32_t result;
1390
1391 __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) );
1392 return(result);
1393 }
1394
1395
1396 /**
1397 \brief Store-Release (8 bit)
1398 \details Executes a STLB instruction for 8 bit values.
1399 \param [in] value Value to store
1400 \param [in] ptr Pointer to location
1401 */
__STLB(uint8_t value,volatile uint8_t * ptr)1402 __STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
1403 {
1404 __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1405 }
1406
1407
1408 /**
1409 \brief Store-Release (16 bit)
1410 \details Executes a STLH instruction for 16 bit values.
1411 \param [in] value Value to store
1412 \param [in] ptr Pointer to location
1413 */
__STLH(uint16_t value,volatile uint16_t * ptr)1414 __STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
1415 {
1416 __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1417 }
1418
1419
1420 /**
1421 \brief Store-Release (32 bit)
1422 \details Executes a STL instruction for 32 bit values.
1423 \param [in] value Value to store
1424 \param [in] ptr Pointer to location
1425 */
__STL(uint32_t value,volatile uint32_t * ptr)1426 __STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
1427 {
1428 __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1429 }
1430
1431
1432 /**
1433 \brief Load-Acquire Exclusive (8 bit)
1434 \details Executes a LDAB exclusive instruction for 8 bit value.
1435 \param [in] ptr Pointer to data
1436 \return value of type uint8_t at (*ptr)
1437 */
__LDAEXB(volatile uint8_t * ptr)1438 __STATIC_FORCEINLINE uint8_t __LDAEXB(volatile uint8_t *ptr)
1439 {
1440 uint32_t result;
1441
1442 __ASM volatile ("ldaexb %0, %1" : "=r" (result) : "Q" (*ptr) );
1443 return ((uint8_t) result);
1444 }
1445
1446
1447 /**
1448 \brief Load-Acquire Exclusive (16 bit)
1449 \details Executes a LDAH exclusive instruction for 16 bit values.
1450 \param [in] ptr Pointer to data
1451 \return value of type uint16_t at (*ptr)
1452 */
__LDAEXH(volatile uint16_t * ptr)1453 __STATIC_FORCEINLINE uint16_t __LDAEXH(volatile uint16_t *ptr)
1454 {
1455 uint32_t result;
1456
1457 __ASM volatile ("ldaexh %0, %1" : "=r" (result) : "Q" (*ptr) );
1458 return ((uint16_t) result);
1459 }
1460
1461
1462 /**
1463 \brief Load-Acquire Exclusive (32 bit)
1464 \details Executes a LDA exclusive instruction for 32 bit values.
1465 \param [in] ptr Pointer to data
1466 \return value of type uint32_t at (*ptr)
1467 */
__LDAEX(volatile uint32_t * ptr)1468 __STATIC_FORCEINLINE uint32_t __LDAEX(volatile uint32_t *ptr)
1469 {
1470 uint32_t result;
1471
1472 __ASM volatile ("ldaex %0, %1" : "=r" (result) : "Q" (*ptr) );
1473 return(result);
1474 }
1475
1476
1477 /**
1478 \brief Store-Release Exclusive (8 bit)
1479 \details Executes a STLB exclusive instruction for 8 bit values.
1480 \param [in] value Value to store
1481 \param [in] ptr Pointer to location
1482 \return 0 Function succeeded
1483 \return 1 Function failed
1484 */
__STLEXB(uint8_t value,volatile uint8_t * ptr)1485 __STATIC_FORCEINLINE uint32_t __STLEXB(uint8_t value, volatile uint8_t *ptr)
1486 {
1487 uint32_t result;
1488
1489 __ASM volatile ("stlexb %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
1490 return(result);
1491 }
1492
1493
1494 /**
1495 \brief Store-Release Exclusive (16 bit)
1496 \details Executes a STLH exclusive instruction for 16 bit values.
1497 \param [in] value Value to store
1498 \param [in] ptr Pointer to location
1499 \return 0 Function succeeded
1500 \return 1 Function failed
1501 */
__STLEXH(uint16_t value,volatile uint16_t * ptr)1502 __STATIC_FORCEINLINE uint32_t __STLEXH(uint16_t value, volatile uint16_t *ptr)
1503 {
1504 uint32_t result;
1505
1506 __ASM volatile ("stlexh %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
1507 return(result);
1508 }
1509
1510
1511 /**
1512 \brief Store-Release Exclusive (32 bit)
1513 \details Executes a STL exclusive instruction for 32 bit values.
1514 \param [in] value Value to store
1515 \param [in] ptr Pointer to location
1516 \return 0 Function succeeded
1517 \return 1 Function failed
1518 */
__STLEX(uint32_t value,volatile uint32_t * ptr)1519 __STATIC_FORCEINLINE uint32_t __STLEX(uint32_t value, volatile uint32_t *ptr)
1520 {
1521 uint32_t result;
1522
1523 __ASM volatile ("stlex %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
1524 return(result);
1525 }
1526
1527 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1528 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
1529
1530 /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
1531
1532
1533 /* ################### Compiler specific Intrinsics ########################### */
1534 /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
1535 Access to dedicated SIMD instructions
1536 @{
1537 */
1538
1539 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1540
__SADD8(uint32_t op1,uint32_t op2)1541 __STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1542 {
1543 uint32_t result;
1544
1545 __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1546 return(result);
1547 }
1548
__QADD8(uint32_t op1,uint32_t op2)1549 __STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1550 {
1551 uint32_t result;
1552
1553 __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1554 return(result);
1555 }
1556
__SHADD8(uint32_t op1,uint32_t op2)1557 __STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1558 {
1559 uint32_t result;
1560
1561 __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1562 return(result);
1563 }
1564
__UADD8(uint32_t op1,uint32_t op2)1565 __STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1566 {
1567 uint32_t result;
1568
1569 __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1570 return(result);
1571 }
1572
__UQADD8(uint32_t op1,uint32_t op2)1573 __STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1574 {
1575 uint32_t result;
1576
1577 __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1578 return(result);
1579 }
1580
__UHADD8(uint32_t op1,uint32_t op2)1581 __STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1582 {
1583 uint32_t result;
1584
1585 __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1586 return(result);
1587 }
1588
1589
__SSUB8(uint32_t op1,uint32_t op2)1590 __STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1591 {
1592 uint32_t result;
1593
1594 __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1595 return(result);
1596 }
1597
__QSUB8(uint32_t op1,uint32_t op2)1598 __STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1599 {
1600 uint32_t result;
1601
1602 __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1603 return(result);
1604 }
1605
__SHSUB8(uint32_t op1,uint32_t op2)1606 __STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1607 {
1608 uint32_t result;
1609
1610 __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1611 return(result);
1612 }
1613
__USUB8(uint32_t op1,uint32_t op2)1614 __STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1615 {
1616 uint32_t result;
1617
1618 __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1619 return(result);
1620 }
1621
__UQSUB8(uint32_t op1,uint32_t op2)1622 __STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1623 {
1624 uint32_t result;
1625
1626 __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1627 return(result);
1628 }
1629
__UHSUB8(uint32_t op1,uint32_t op2)1630 __STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1631 {
1632 uint32_t result;
1633
1634 __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1635 return(result);
1636 }
1637
1638
__SADD16(uint32_t op1,uint32_t op2)1639 __STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1640 {
1641 uint32_t result;
1642
1643 __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1644 return(result);
1645 }
1646
__QADD16(uint32_t op1,uint32_t op2)1647 __STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1648 {
1649 uint32_t result;
1650
1651 __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1652 return(result);
1653 }
1654
__SHADD16(uint32_t op1,uint32_t op2)1655 __STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1656 {
1657 uint32_t result;
1658
1659 __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1660 return(result);
1661 }
1662
__UADD16(uint32_t op1,uint32_t op2)1663 __STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1664 {
1665 uint32_t result;
1666
1667 __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1668 return(result);
1669 }
1670
__UQADD16(uint32_t op1,uint32_t op2)1671 __STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1672 {
1673 uint32_t result;
1674
1675 __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1676 return(result);
1677 }
1678
__UHADD16(uint32_t op1,uint32_t op2)1679 __STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1680 {
1681 uint32_t result;
1682
1683 __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1684 return(result);
1685 }
1686
__SSUB16(uint32_t op1,uint32_t op2)1687 __STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1688 {
1689 uint32_t result;
1690
1691 __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1692 return(result);
1693 }
1694
__QSUB16(uint32_t op1,uint32_t op2)1695 __STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1696 {
1697 uint32_t result;
1698
1699 __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1700 return(result);
1701 }
1702
__SHSUB16(uint32_t op1,uint32_t op2)1703 __STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1704 {
1705 uint32_t result;
1706
1707 __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1708 return(result);
1709 }
1710
__USUB16(uint32_t op1,uint32_t op2)1711 __STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1712 {
1713 uint32_t result;
1714
1715 __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1716 return(result);
1717 }
1718
__UQSUB16(uint32_t op1,uint32_t op2)1719 __STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1720 {
1721 uint32_t result;
1722
1723 __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1724 return(result);
1725 }
1726
__UHSUB16(uint32_t op1,uint32_t op2)1727 __STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1728 {
1729 uint32_t result;
1730
1731 __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1732 return(result);
1733 }
1734
__SASX(uint32_t op1,uint32_t op2)1735 __STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1736 {
1737 uint32_t result;
1738
1739 __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1740 return(result);
1741 }
1742
__QASX(uint32_t op1,uint32_t op2)1743 __STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1744 {
1745 uint32_t result;
1746
1747 __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1748 return(result);
1749 }
1750
__SHASX(uint32_t op1,uint32_t op2)1751 __STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1752 {
1753 uint32_t result;
1754
1755 __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1756 return(result);
1757 }
1758
__UASX(uint32_t op1,uint32_t op2)1759 __STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1760 {
1761 uint32_t result;
1762
1763 __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1764 return(result);
1765 }
1766
__UQASX(uint32_t op1,uint32_t op2)1767 __STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1768 {
1769 uint32_t result;
1770
1771 __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1772 return(result);
1773 }
1774
__UHASX(uint32_t op1,uint32_t op2)1775 __STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1776 {
1777 uint32_t result;
1778
1779 __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1780 return(result);
1781 }
1782
__SSAX(uint32_t op1,uint32_t op2)1783 __STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1784 {
1785 uint32_t result;
1786
1787 __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1788 return(result);
1789 }
1790
__QSAX(uint32_t op1,uint32_t op2)1791 __STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1792 {
1793 uint32_t result;
1794
1795 __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1796 return(result);
1797 }
1798
__SHSAX(uint32_t op1,uint32_t op2)1799 __STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1800 {
1801 uint32_t result;
1802
1803 __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1804 return(result);
1805 }
1806
__USAX(uint32_t op1,uint32_t op2)1807 __STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1808 {
1809 uint32_t result;
1810
1811 __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1812 return(result);
1813 }
1814
__UQSAX(uint32_t op1,uint32_t op2)1815 __STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1816 {
1817 uint32_t result;
1818
1819 __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1820 return(result);
1821 }
1822
__UHSAX(uint32_t op1,uint32_t op2)1823 __STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1824 {
1825 uint32_t result;
1826
1827 __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1828 return(result);
1829 }
1830
__USAD8(uint32_t op1,uint32_t op2)1831 __STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1832 {
1833 uint32_t result;
1834
1835 __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1836 return(result);
1837 }
1838
__USADA8(uint32_t op1,uint32_t op2,uint32_t op3)1839 __STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1840 {
1841 uint32_t result;
1842
1843 __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1844 return(result);
1845 }
1846
1847 #define __SSAT16(ARG1,ARG2) \
1848 ({ \
1849 int32_t __RES, __ARG1 = (ARG1); \
1850 __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1851 __RES; \
1852 })
1853
1854 #define __USAT16(ARG1,ARG2) \
1855 ({ \
1856 uint32_t __RES, __ARG1 = (ARG1); \
1857 __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1858 __RES; \
1859 })
1860
__UXTB16(uint32_t op1)1861 __STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
1862 {
1863 uint32_t result;
1864
1865 __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
1866 return(result);
1867 }
1868
__UXTAB16(uint32_t op1,uint32_t op2)1869 __STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1870 {
1871 uint32_t result;
1872
1873 __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1874 return(result);
1875 }
1876
__SXTB16(uint32_t op1)1877 __STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
1878 {
1879 uint32_t result;
1880
1881 __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
1882 return(result);
1883 }
1884
__SXTAB16(uint32_t op1,uint32_t op2)1885 __STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
1886 {
1887 uint32_t result;
1888
1889 __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1890 return(result);
1891 }
1892
__SMUAD(uint32_t op1,uint32_t op2)1893 __STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
1894 {
1895 uint32_t result;
1896
1897 __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1898 return(result);
1899 }
1900
__SMUADX(uint32_t op1,uint32_t op2)1901 __STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
1902 {
1903 uint32_t result;
1904
1905 __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1906 return(result);
1907 }
1908
__SMLAD(uint32_t op1,uint32_t op2,uint32_t op3)1909 __STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
1910 {
1911 uint32_t result;
1912
1913 __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1914 return(result);
1915 }
1916
__SMLADX(uint32_t op1,uint32_t op2,uint32_t op3)1917 __STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
1918 {
1919 uint32_t result;
1920
1921 __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1922 return(result);
1923 }
1924
__SMLALD(uint32_t op1,uint32_t op2,uint64_t acc)1925 __STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
1926 {
1927 union llreg_u{
1928 uint32_t w32[2];
1929 uint64_t w64;
1930 } llr;
1931 llr.w64 = acc;
1932
1933 #ifndef __ARMEB__ /* Little endian */
1934 __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1935 #else /* Big endian */
1936 __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1937 #endif
1938
1939 return(llr.w64);
1940 }
1941
__SMLALDX(uint32_t op1,uint32_t op2,uint64_t acc)1942 __STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
1943 {
1944 union llreg_u{
1945 uint32_t w32[2];
1946 uint64_t w64;
1947 } llr;
1948 llr.w64 = acc;
1949
1950 #ifndef __ARMEB__ /* Little endian */
1951 __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1952 #else /* Big endian */
1953 __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1954 #endif
1955
1956 return(llr.w64);
1957 }
1958
__SMUSD(uint32_t op1,uint32_t op2)1959 __STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
1960 {
1961 uint32_t result;
1962
1963 __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1964 return(result);
1965 }
1966
__SMUSDX(uint32_t op1,uint32_t op2)1967 __STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
1968 {
1969 uint32_t result;
1970
1971 __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1972 return(result);
1973 }
1974
__SMLSD(uint32_t op1,uint32_t op2,uint32_t op3)1975 __STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
1976 {
1977 uint32_t result;
1978
1979 __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1980 return(result);
1981 }
1982
__SMLSDX(uint32_t op1,uint32_t op2,uint32_t op3)1983 __STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
1984 {
1985 uint32_t result;
1986
1987 __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1988 return(result);
1989 }
1990
__SMLSLD(uint32_t op1,uint32_t op2,uint64_t acc)1991 __STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
1992 {
1993 union llreg_u{
1994 uint32_t w32[2];
1995 uint64_t w64;
1996 } llr;
1997 llr.w64 = acc;
1998
1999 #ifndef __ARMEB__ /* Little endian */
2000 __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
2001 #else /* Big endian */
2002 __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2003 #endif
2004
2005 return(llr.w64);
2006 }
2007
__SMLSLDX(uint32_t op1,uint32_t op2,uint64_t acc)2008 __STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
2009 {
2010 union llreg_u{
2011 uint32_t w32[2];
2012 uint64_t w64;
2013 } llr;
2014 llr.w64 = acc;
2015
2016 #ifndef __ARMEB__ /* Little endian */
2017 __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
2018 #else /* Big endian */
2019 __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2020 #endif
2021
2022 return(llr.w64);
2023 }
2024
__SEL(uint32_t op1,uint32_t op2)2025 __STATIC_FORCEINLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
2026 {
2027 uint32_t result;
2028
2029 __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2030 return(result);
2031 }
2032
__QADD(int32_t op1,int32_t op2)2033 __STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2)
2034 {
2035 int32_t result;
2036
2037 __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2038 return(result);
2039 }
2040
__QSUB(int32_t op1,int32_t op2)2041 __STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2)
2042 {
2043 int32_t result;
2044
2045 __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2046 return(result);
2047 }
2048
2049 #if 0
2050 #define __PKHBT(ARG1,ARG2,ARG3) \
2051 ({ \
2052 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2053 __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
2054 __RES; \
2055 })
2056
2057 #define __PKHTB(ARG1,ARG2,ARG3) \
2058 ({ \
2059 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2060 if (ARG3 == 0) \
2061 __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
2062 else \
2063 __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
2064 __RES; \
2065 })
2066 #endif
2067
2068 #define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
2069 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
2070
2071 #define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
2072 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
2073
__SMMLA(int32_t op1,int32_t op2,int32_t op3)2074 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
2075 {
2076 int32_t result;
2077
2078 __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
2079 return(result);
2080 }
2081
2082 #endif /* (__ARM_FEATURE_DSP == 1) */
2083 /*@} end of group CMSIS_SIMD_intrinsics */
2084
2085
2086 #pragma GCC diagnostic pop
2087
2088 #endif /* __CMSIS_GCC_H */
2089