1 /**************************************************************************//**
2 * @file cmsis_armcc.h
3 * @brief CMSIS compiler specific macros, functions, instructions
4 * @version V1.0.3
5 * @date 15. May 2019
6 ******************************************************************************/
7 /*
8 * Copyright (c) 2009-2019 Arm Limited. All rights reserved.
9 *
10 * SPDX-License-Identifier: Apache-2.0
11 *
12 * Licensed under the Apache License, Version 2.0 (the License); you may
13 * not use this file except in compliance with the License.
14 * You may obtain a copy of the License at
15 *
16 * www.apache.org/licenses/LICENSE-2.0
17 *
18 * Unless required by applicable law or agreed to in writing, software
19 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 * See the License for the specific language governing permissions and
22 * limitations under the License.
23 */
24
25 #ifndef __CMSIS_ARMCC_CA_H
26 #define __CMSIS_ARMCC_CA_H
27
28 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 400677)
29 #error "Please use Arm Compiler Toolchain V4.0.677 or later!"
30 #endif
31
32 /* CMSIS compiler control architecture macros */
33 #if (defined (__TARGET_ARCH_7_A ) && (__TARGET_ARCH_7_A == 1))
34 #define __ARM_ARCH_7A__ 1
35 #endif
36
37 /* CMSIS compiler specific defines */
38 #ifndef __ASM
39 #define __ASM __asm
40 #endif
41 #ifndef __INLINE
42 #define __INLINE __inline
43 #endif
44 #ifndef __FORCEINLINE
45 #define __FORCEINLINE __forceinline
46 #endif
47 #ifndef __STATIC_INLINE
48 #define __STATIC_INLINE static __inline
49 #endif
50 #ifndef __STATIC_FORCEINLINE
51 #define __STATIC_FORCEINLINE static __forceinline
52 #endif
53 #ifndef __NO_RETURN
54 #define __NO_RETURN __declspec(noreturn)
55 #endif
56 #ifndef CMSIS_DEPRECATED
57 #define CMSIS_DEPRECATED __attribute__((deprecated))
58 #endif
59 #ifndef __USED
60 #define __USED __attribute__((used))
61 #endif
62 #ifndef __WEAK
63 #define __WEAK __attribute__((weak))
64 #endif
65 #ifndef __PACKED
66 #define __PACKED __attribute__((packed))
67 #endif
68 #ifndef __PACKED_STRUCT
69 #define __PACKED_STRUCT __packed struct
70 #endif
71 #ifndef __UNALIGNED_UINT16_WRITE
72 #define __UNALIGNED_UINT16_WRITE(addr, val) ((*((__packed uint16_t *)(addr))) = (val))
73 #endif
74 #ifndef __UNALIGNED_UINT16_READ
75 #define __UNALIGNED_UINT16_READ(addr) (*((const __packed uint16_t *)(addr)))
76 #endif
77 #ifndef __UNALIGNED_UINT32_WRITE
78 #define __UNALIGNED_UINT32_WRITE(addr, val) ((*((__packed uint32_t *)(addr))) = (val))
79 #endif
80 #ifndef __UNALIGNED_UINT32_READ
81 #define __UNALIGNED_UINT32_READ(addr) (*((const __packed uint32_t *)(addr)))
82 #endif
83 #ifndef __ALIGNED
84 #define __ALIGNED(x) __attribute__((aligned(x)))
85 #endif
86 #ifndef __PACKED
87 #define __PACKED __attribute__((packed))
88 #endif
89 #ifndef __COMPILER_BARRIER
90 #define __COMPILER_BARRIER() __memory_changed()
91 #endif
92
93 /* ########################## Core Instruction Access ######################### */
94 /**
95 \brief No Operation
96 */
97 #define __NOP __nop
98
99 /**
100 \brief Wait For Interrupt
101 */
102 #define __WFI __wfi
103
104 /**
105 \brief Wait For Event
106 */
107 #define __WFE __wfe
108
109 /**
110 \brief Send Event
111 */
112 #define __SEV __sev
113
114 /**
115 \brief Instruction Synchronization Barrier
116 */
117 #define __ISB() do {\
118 __schedule_barrier();\
119 __isb(0xF);\
120 __schedule_barrier();\
121 } while (0U)
122
123 /**
124 \brief Data Synchronization Barrier
125 */
126 #define __DSB() do {\
127 __schedule_barrier();\
128 __dsb(0xF);\
129 __schedule_barrier();\
130 } while (0U)
131
132 /**
133 \brief Data Memory Barrier
134 */
135 #define __DMB() do {\
136 __schedule_barrier();\
137 __dmb(0xF);\
138 __schedule_barrier();\
139 } while (0U)
140
141 /**
142 \brief Reverse byte order (32 bit)
143 \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
144 \param [in] value Value to reverse
145 \return Reversed value
146 */
147 #define __REV __rev
148
149 /**
150 \brief Reverse byte order (16 bit)
151 \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
152 \param [in] value Value to reverse
153 \return Reversed value
154 */
155 #ifndef __NO_EMBEDDED_ASM
__REV16(uint32_t value)156 __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
157 {
158 rev16 r0, r0
159 bx lr
160 }
161 #endif
162
163 /**
164 \brief Reverse byte order (16 bit)
165 \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
166 \param [in] value Value to reverse
167 \return Reversed value
168 */
169 #ifndef __NO_EMBEDDED_ASM
__REVSH(int16_t value)170 __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int16_t __REVSH(int16_t value)
171 {
172 revsh r0, r0
173 bx lr
174 }
175 #endif
176
177 /**
178 \brief Rotate Right in unsigned value (32 bit)
179 \param [in] op1 Value to rotate
180 \param [in] op2 Number of Bits to rotate
181 \return Rotated value
182 */
183 #define __ROR __ror
184
185 /**
186 \brief Breakpoint
187 \param [in] value is ignored by the processor.
188 If required, a debugger can use it to store additional information about the breakpoint.
189 */
190 #define __BKPT(value) __breakpoint(value)
191
192 /**
193 \brief Reverse bit order of value
194 \param [in] value Value to reverse
195 \return Reversed value
196 */
197 #define __RBIT __rbit
198
199 /**
200 \brief Count leading zeros
201 \param [in] value Value to count the leading zeros
202 \return number of leading zeros in value
203 */
204 #define __CLZ __clz
205
206 /**
207 \brief LDR Exclusive (8 bit)
208 \details Executes a exclusive LDR instruction for 8 bit value.
209 \param [in] ptr Pointer to data
210 \return value of type uint8_t at (*ptr)
211 */
212 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
213 #define __LDREXB(ptr) ((uint8_t ) __ldrex(ptr))
214 #else
215 #define __LDREXB(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint8_t ) __ldrex(ptr)) _Pragma("pop")
216 #endif
217
218 /**
219 \brief LDR Exclusive (16 bit)
220 \details Executes a exclusive LDR instruction for 16 bit values.
221 \param [in] ptr Pointer to data
222 \return value of type uint16_t at (*ptr)
223 */
224 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
225 #define __LDREXH(ptr) ((uint16_t) __ldrex(ptr))
226 #else
227 #define __LDREXH(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint16_t) __ldrex(ptr)) _Pragma("pop")
228 #endif
229
230 /**
231 \brief LDR Exclusive (32 bit)
232 \details Executes a exclusive LDR instruction for 32 bit values.
233 \param [in] ptr Pointer to data
234 \return value of type uint32_t at (*ptr)
235 */
236 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
237 #define __LDREXW(ptr) ((uint32_t ) __ldrex(ptr))
238 #else
239 #define __LDREXW(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint32_t ) __ldrex(ptr)) _Pragma("pop")
240 #endif
241
242 /**
243 \brief STR Exclusive (8 bit)
244 \details Executes a exclusive STR instruction for 8 bit values.
245 \param [in] value Value to store
246 \param [in] ptr Pointer to location
247 \return 0 Function succeeded
248 \return 1 Function failed
249 */
250 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
251 #define __STREXB(value, ptr) __strex(value, ptr)
252 #else
253 #define __STREXB(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
254 #endif
255
256 /**
257 \brief STR Exclusive (16 bit)
258 \details Executes a exclusive STR instruction for 16 bit values.
259 \param [in] value Value to store
260 \param [in] ptr Pointer to location
261 \return 0 Function succeeded
262 \return 1 Function failed
263 */
264 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
265 #define __STREXH(value, ptr) __strex(value, ptr)
266 #else
267 #define __STREXH(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
268 #endif
269
270 /**
271 \brief STR Exclusive (32 bit)
272 \details Executes a exclusive STR instruction for 32 bit values.
273 \param [in] value Value to store
274 \param [in] ptr Pointer to location
275 \return 0 Function succeeded
276 \return 1 Function failed
277 */
278 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
279 #define __STREXW(value, ptr) __strex(value, ptr)
280 #else
281 #define __STREXW(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
282 #endif
283
284 /**
285 \brief Remove the exclusive lock
286 \details Removes the exclusive lock which is created by LDREX.
287 */
288 #define __CLREX __clrex
289
290
291 /**
292 \brief Signed Saturate
293 \details Saturates a signed value.
294 \param [in] value Value to be saturated
295 \param [in] sat Bit position to saturate to (1..32)
296 \return Saturated value
297 */
298 #define __SSAT __ssat
299
300 /**
301 \brief Unsigned Saturate
302 \details Saturates an unsigned value.
303 \param [in] value Value to be saturated
304 \param [in] sat Bit position to saturate to (0..31)
305 \return Saturated value
306 */
307 #define __USAT __usat
308
309 /* ########################### Core Function Access ########################### */
310
311 /**
312 \brief Get FPSCR (Floating Point Status/Control)
313 \return Floating Point Status/Control register value
314 */
__get_FPSCR(void)315 __STATIC_INLINE uint32_t __get_FPSCR(void)
316 {
317 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
318 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
319 register uint32_t __regfpscr __ASM("fpscr");
320 return(__regfpscr);
321 #else
322 return(0U);
323 #endif
324 }
325
326 /**
327 \brief Set FPSCR (Floating Point Status/Control)
328 \param [in] fpscr Floating Point Status/Control value to set
329 */
__set_FPSCR(uint32_t fpscr)330 __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
331 {
332 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
333 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
334 register uint32_t __regfpscr __ASM("fpscr");
335 __regfpscr = (fpscr);
336 #else
337 (void)fpscr;
338 #endif
339 }
340
341 /** \brief Get CPSR (Current Program Status Register)
342 \return CPSR Register value
343 */
__get_CPSR(void)344 __STATIC_INLINE uint32_t __get_CPSR(void)
345 {
346 register uint32_t __regCPSR __ASM("cpsr");
347 return(__regCPSR);
348 }
349
350
351 /** \brief Set CPSR (Current Program Status Register)
352 \param [in] cpsr CPSR value to set
353 */
__set_CPSR(uint32_t cpsr)354 __STATIC_INLINE void __set_CPSR(uint32_t cpsr)
355 {
356 register uint32_t __regCPSR __ASM("cpsr");
357 __regCPSR = cpsr;
358 }
359
360 /** \brief Get Mode
361 \return Processor Mode
362 */
__get_mode(void)363 __STATIC_INLINE uint32_t __get_mode(void)
364 {
365 return (__get_CPSR() & 0x1FU);
366 }
367
368 /** \brief Set Mode
369 \param [in] mode Mode value to set
370 */
__set_mode(uint32_t mode)371 __STATIC_INLINE __ASM void __set_mode(uint32_t mode)
372 {
373 MOV r1, lr
374 MSR CPSR_C, r0
375 BX r1
376 }
377
378 /** \brief Get Stack Pointer
379 \return Stack Pointer
380 */
__get_SP(void)381 __STATIC_INLINE __ASM uint32_t __get_SP(void)
382 {
383 MOV r0, sp
384 BX lr
385 }
386
387 /** \brief Set Stack Pointer
388 \param [in] stack Stack Pointer value to set
389 */
__set_SP(uint32_t stack)390 __STATIC_INLINE __ASM void __set_SP(uint32_t stack)
391 {
392 MOV sp, r0
393 BX lr
394 }
395
396
397 /** \brief Get USR/SYS Stack Pointer
398 \return USR/SYSStack Pointer
399 */
__get_SP_usr(void)400 __STATIC_INLINE __ASM uint32_t __get_SP_usr(void)
401 {
402 ARM
403 PRESERVE8
404
405 MRS R1, CPSR
406 CPS #0x1F ;no effect in USR mode
407 MOV R0, SP
408 MSR CPSR_c, R1 ;no effect in USR mode
409 ISB
410 BX LR
411 }
412
413 /** \brief Set USR/SYS Stack Pointer
414 \param [in] topOfProcStack USR/SYS Stack Pointer value to set
415 */
__set_SP_usr(uint32_t topOfProcStack)416 __STATIC_INLINE __ASM void __set_SP_usr(uint32_t topOfProcStack)
417 {
418 ARM
419 PRESERVE8
420
421 MRS R1, CPSR
422 CPS #0x1F ;no effect in USR mode
423 MOV SP, R0
424 MSR CPSR_c, R1 ;no effect in USR mode
425 ISB
426 BX LR
427 }
428
429 /** \brief Get FPEXC (Floating Point Exception Control Register)
430 \return Floating Point Exception Control Register value
431 */
__get_FPEXC(void)432 __STATIC_INLINE uint32_t __get_FPEXC(void)
433 {
434 #if (__FPU_PRESENT == 1)
435 register uint32_t __regfpexc __ASM("fpexc");
436 return(__regfpexc);
437 #else
438 return(0);
439 #endif
440 }
441
442 /** \brief Set FPEXC (Floating Point Exception Control Register)
443 \param [in] fpexc Floating Point Exception Control value to set
444 */
__set_FPEXC(uint32_t fpexc)445 __STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
446 {
447 #if (__FPU_PRESENT == 1)
448 register uint32_t __regfpexc __ASM("fpexc");
449 __regfpexc = (fpexc);
450 #endif
451 }
452
453 /*
454 * Include common core functions to access Coprocessor 15 registers
455 */
456
457 #define __get_CP(cp, op1, Rt, CRn, CRm, op2) do { register volatile uint32_t tmp __ASM("cp" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2); (Rt) = tmp; } while(0)
458 #define __set_CP(cp, op1, Rt, CRn, CRm, op2) do { register volatile uint32_t tmp __ASM("cp" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2); tmp = (Rt); } while(0)
459 #define __get_CP64(cp, op1, Rt, CRm) \
460 do { \
461 uint32_t ltmp, htmp; \
462 __ASM volatile("MRRC p" # cp ", " # op1 ", ltmp, htmp, c" # CRm); \
463 (Rt) = ((((uint64_t)htmp) << 32U) | ((uint64_t)ltmp)); \
464 } while(0)
465
466 #define __set_CP64(cp, op1, Rt, CRm) \
467 do { \
468 const uint64_t tmp = (Rt); \
469 const uint32_t ltmp = (uint32_t)(tmp); \
470 const uint32_t htmp = (uint32_t)(tmp >> 32U); \
471 __ASM volatile("MCRR p" # cp ", " # op1 ", ltmp, htmp, c" # CRm); \
472 } while(0)
473
474 #include "ca/cmsis_cp15_ca.h"
475
476 /** \brief Enable Floating Point Unit
477
478 Critical section, called from undef handler, so systick is disabled
479 */
__FPU_Enable(void)480 __STATIC_INLINE __ASM void __FPU_Enable(void)
481 {
482 ARM
483
484 //Permit access to VFP/NEON, registers by modifying CPACR
485 MRC p15,0,R1,c1,c0,2
486 ORR R1,R1,#0x00F00000
487 MCR p15,0,R1,c1,c0,2
488
489 //Ensure that subsequent instructions occur in the context of VFP/NEON access permitted
490 ISB
491
492 //Enable VFP/NEON
493 VMRS R1,FPEXC
494 ORR R1,R1,#0x40000000
495 VMSR FPEXC,R1
496
497 //Initialise VFP/NEON registers to 0
498 MOV R2,#0
499
500 //Initialise D16 registers to 0
501 VMOV D0, R2,R2
502 VMOV D1, R2,R2
503 VMOV D2, R2,R2
504 VMOV D3, R2,R2
505 VMOV D4, R2,R2
506 VMOV D5, R2,R2
507 VMOV D6, R2,R2
508 VMOV D7, R2,R2
509 VMOV D8, R2,R2
510 VMOV D9, R2,R2
511 VMOV D10,R2,R2
512 VMOV D11,R2,R2
513 VMOV D12,R2,R2
514 VMOV D13,R2,R2
515 VMOV D14,R2,R2
516 VMOV D15,R2,R2
517
518 IF {TARGET_FEATURE_EXTENSION_REGISTER_COUNT} == 32
519 //Initialise D32 registers to 0
520 VMOV D16,R2,R2
521 VMOV D17,R2,R2
522 VMOV D18,R2,R2
523 VMOV D19,R2,R2
524 VMOV D20,R2,R2
525 VMOV D21,R2,R2
526 VMOV D22,R2,R2
527 VMOV D23,R2,R2
528 VMOV D24,R2,R2
529 VMOV D25,R2,R2
530 VMOV D26,R2,R2
531 VMOV D27,R2,R2
532 VMOV D28,R2,R2
533 VMOV D29,R2,R2
534 VMOV D30,R2,R2
535 VMOV D31,R2,R2
536 ENDIF
537
538 //Initialise FPSCR to a known state
539 VMRS R1,FPSCR
540 LDR R2,=0x00086060 //Mask off all bits that do not have to be preserved. Non-preserved bits can/should be zero.
541 AND R1,R1,R2
542 VMSR FPSCR,R1
543
544 BX LR
545 }
546
547 #endif /* __CMSIS_ARMCC_CA_H */
548