1 /*===---- arm_acle.h - ARM Non-Neon intrinsics -----------------------------===
2 *
3 * Permission is hereby granted, free of charge, to any person obtaining a copy
4 * of this software and associated documentation files (the "Software"), to deal
5 * in the Software without restriction, including without limitation the rights
6 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 * copies of the Software, and to permit persons to whom the Software is
8 * furnished to do so, subject to the following conditions:
9 *
10 * The above copyright notice and this permission notice shall be included in
11 * all copies or substantial portions of the Software.
12 *
13 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19 * THE SOFTWARE.
20 *
21 *===-----------------------------------------------------------------------===
22 */
23
24 #ifndef __ARM_ACLE_H
25 #define __ARM_ACLE_H
26
27 #ifndef __ARM_ACLE
28 #error "ACLE intrinsics support not enabled."
29 #endif
30
31 #include <stdint.h>
32
33 #if defined(__cplusplus)
34 extern "C" {
35 #endif
36
37 /* 8 SYNCHRONIZATION, BARRIER AND HINT INTRINSICS */
38 /* 8.3 Memory barriers */
39 #if !defined(_MSC_VER)
40 #define __dmb(i) __builtin_arm_dmb(i)
41 #define __dsb(i) __builtin_arm_dsb(i)
42 #define __isb(i) __builtin_arm_isb(i)
43 #endif
44
45 /* 8.4 Hints */
46
47 #if !defined(_MSC_VER)
__wfi(void)48 static __inline__ void __attribute__((__always_inline__, __nodebug__)) __wfi(void) {
49 __builtin_arm_wfi();
50 }
51
__wfe(void)52 static __inline__ void __attribute__((__always_inline__, __nodebug__)) __wfe(void) {
53 __builtin_arm_wfe();
54 }
55
__sev(void)56 static __inline__ void __attribute__((__always_inline__, __nodebug__)) __sev(void) {
57 __builtin_arm_sev();
58 }
59
__sevl(void)60 static __inline__ void __attribute__((__always_inline__, __nodebug__)) __sevl(void) {
61 __builtin_arm_sevl();
62 }
63
__yield(void)64 static __inline__ void __attribute__((__always_inline__, __nodebug__)) __yield(void) {
65 __builtin_arm_yield();
66 }
67 #endif
68
69 #if __ARM_32BIT_STATE
70 #define __dbg(t) __builtin_arm_dbg(t)
71 #endif
72
73 /* 8.5 Swap */
74 static __inline__ uint32_t __attribute__((__always_inline__, __nodebug__))
__swp(uint32_t __x,volatile uint32_t * __p)75 __swp(uint32_t __x, volatile uint32_t *__p) {
76 uint32_t v;
77 do
78 v = __builtin_arm_ldrex(__p);
79 while (__builtin_arm_strex(__x, __p));
80 return v;
81 }
82
83 /* 8.6 Memory prefetch intrinsics */
84 /* 8.6.1 Data prefetch */
85 #define __pld(addr) __pldx(0, 0, 0, addr)
86
87 #if __ARM_32BIT_STATE
88 #define __pldx(access_kind, cache_level, retention_policy, addr) \
89 __builtin_arm_prefetch(addr, access_kind, 1)
90 #else
91 #define __pldx(access_kind, cache_level, retention_policy, addr) \
92 __builtin_arm_prefetch(addr, access_kind, cache_level, retention_policy, 1)
93 #endif
94
95 /* 8.6.2 Instruction prefetch */
96 #define __pli(addr) __plix(0, 0, addr)
97
98 #if __ARM_32BIT_STATE
99 #define __plix(cache_level, retention_policy, addr) \
100 __builtin_arm_prefetch(addr, 0, 0)
101 #else
102 #define __plix(cache_level, retention_policy, addr) \
103 __builtin_arm_prefetch(addr, 0, cache_level, retention_policy, 0)
104 #endif
105
106 /* 8.7 NOP */
__nop(void)107 static __inline__ void __attribute__((__always_inline__, __nodebug__)) __nop(void) {
108 __builtin_arm_nop();
109 }
110
111 /* 9 DATA-PROCESSING INTRINSICS */
112 /* 9.2 Miscellaneous data-processing intrinsics */
113 /* ROR */
114 static __inline__ uint32_t __attribute__((__always_inline__, __nodebug__))
__ror(uint32_t __x,uint32_t __y)115 __ror(uint32_t __x, uint32_t __y) {
116 __y %= 32;
117 if (__y == 0)
118 return __x;
119 return (__x >> __y) | (__x << (32 - __y));
120 }
121
122 static __inline__ uint64_t __attribute__((__always_inline__, __nodebug__))
__rorll(uint64_t __x,uint32_t __y)123 __rorll(uint64_t __x, uint32_t __y) {
124 __y %= 64;
125 if (__y == 0)
126 return __x;
127 return (__x >> __y) | (__x << (64 - __y));
128 }
129
130 static __inline__ unsigned long __attribute__((__always_inline__, __nodebug__))
__rorl(unsigned long __x,uint32_t __y)131 __rorl(unsigned long __x, uint32_t __y) {
132 #if __SIZEOF_LONG__ == 4
133 return __ror(__x, __y);
134 #else
135 return __rorll(__x, __y);
136 #endif
137 }
138
139
140 /* CLZ */
141 static __inline__ uint32_t __attribute__((__always_inline__, __nodebug__))
__clz(uint32_t __t)142 __clz(uint32_t __t) {
143 return __builtin_clz(__t);
144 }
145
146 static __inline__ unsigned long __attribute__((__always_inline__, __nodebug__))
__clzl(unsigned long __t)147 __clzl(unsigned long __t) {
148 return __builtin_clzl(__t);
149 }
150
151 static __inline__ uint64_t __attribute__((__always_inline__, __nodebug__))
__clzll(uint64_t __t)152 __clzll(uint64_t __t) {
153 return __builtin_clzll(__t);
154 }
155
156 /* REV */
157 static __inline__ uint32_t __attribute__((__always_inline__, __nodebug__))
__rev(uint32_t __t)158 __rev(uint32_t __t) {
159 return __builtin_bswap32(__t);
160 }
161
162 static __inline__ unsigned long __attribute__((__always_inline__, __nodebug__))
__revl(unsigned long __t)163 __revl(unsigned long __t) {
164 #if __SIZEOF_LONG__ == 4
165 return __builtin_bswap32(__t);
166 #else
167 return __builtin_bswap64(__t);
168 #endif
169 }
170
171 static __inline__ uint64_t __attribute__((__always_inline__, __nodebug__))
__revll(uint64_t __t)172 __revll(uint64_t __t) {
173 return __builtin_bswap64(__t);
174 }
175
176 /* REV16 */
177 static __inline__ uint32_t __attribute__((__always_inline__, __nodebug__))
__rev16(uint32_t __t)178 __rev16(uint32_t __t) {
179 return __ror(__rev(__t), 16);
180 }
181
182 static __inline__ uint64_t __attribute__((__always_inline__, __nodebug__))
__rev16ll(uint64_t __t)183 __rev16ll(uint64_t __t) {
184 return (((uint64_t)__rev16(__t >> 32)) << 32) | __rev16(__t);
185 }
186
187 static __inline__ unsigned long __attribute__((__always_inline__, __nodebug__))
__rev16l(unsigned long __t)188 __rev16l(unsigned long __t) {
189 #if __SIZEOF_LONG__ == 4
190 return __rev16(__t);
191 #else
192 return __rev16ll(__t);
193 #endif
194 }
195
196 /* REVSH */
197 static __inline__ int16_t __attribute__((__always_inline__, __nodebug__))
__revsh(int16_t __t)198 __revsh(int16_t __t) {
199 return __builtin_bswap16(__t);
200 }
201
202 /* RBIT */
203 static __inline__ uint32_t __attribute__((__always_inline__, __nodebug__))
__rbit(uint32_t __t)204 __rbit(uint32_t __t) {
205 return __builtin_arm_rbit(__t);
206 }
207
208 static __inline__ uint64_t __attribute__((__always_inline__, __nodebug__))
__rbitll(uint64_t __t)209 __rbitll(uint64_t __t) {
210 #if __ARM_32BIT_STATE
211 return (((uint64_t)__builtin_arm_rbit(__t)) << 32) |
212 __builtin_arm_rbit(__t >> 32);
213 #else
214 return __builtin_arm_rbit64(__t);
215 #endif
216 }
217
218 static __inline__ unsigned long __attribute__((__always_inline__, __nodebug__))
__rbitl(unsigned long __t)219 __rbitl(unsigned long __t) {
220 #if __SIZEOF_LONG__ == 4
221 return __rbit(__t);
222 #else
223 return __rbitll(__t);
224 #endif
225 }
226
227 /*
228 * 9.4 Saturating intrinsics
229 *
230 * FIXME: Change guard to their corrosponding __ARM_FEATURE flag when Q flag
231 * intrinsics are implemented and the flag is enabled.
232 */
233 /* 9.4.1 Width-specified saturation intrinsics */
234 #if __ARM_32BIT_STATE
235 #define __ssat(x, y) __builtin_arm_ssat(x, y)
236 #define __usat(x, y) __builtin_arm_usat(x, y)
237 #endif
238
239 /* 9.4.2 Saturating addition and subtraction intrinsics */
240 #if __ARM_32BIT_STATE
241 static __inline__ int32_t __attribute__((__always_inline__, __nodebug__))
__qadd(int32_t __t,int32_t __v)242 __qadd(int32_t __t, int32_t __v) {
243 return __builtin_arm_qadd(__t, __v);
244 }
245
246 static __inline__ int32_t __attribute__((__always_inline__, __nodebug__))
__qsub(int32_t __t,int32_t __v)247 __qsub(int32_t __t, int32_t __v) {
248 return __builtin_arm_qsub(__t, __v);
249 }
250
251 static __inline__ int32_t __attribute__((__always_inline__, __nodebug__))
__qdbl(int32_t __t)252 __qdbl(int32_t __t) {
253 return __builtin_arm_qadd(__t, __t);
254 }
255 #endif
256
257 /* 9.7 CRC32 intrinsics */
258 #if __ARM_FEATURE_CRC32
259 static __inline__ uint32_t __attribute__((__always_inline__, __nodebug__))
__crc32b(uint32_t __a,uint8_t __b)260 __crc32b(uint32_t __a, uint8_t __b) {
261 return __builtin_arm_crc32b(__a, __b);
262 }
263
264 static __inline__ uint32_t __attribute__((__always_inline__, __nodebug__))
__crc32h(uint32_t __a,uint16_t __b)265 __crc32h(uint32_t __a, uint16_t __b) {
266 return __builtin_arm_crc32h(__a, __b);
267 }
268
269 static __inline__ uint32_t __attribute__((__always_inline__, __nodebug__))
__crc32w(uint32_t __a,uint32_t __b)270 __crc32w(uint32_t __a, uint32_t __b) {
271 return __builtin_arm_crc32w(__a, __b);
272 }
273
274 static __inline__ uint32_t __attribute__((__always_inline__, __nodebug__))
__crc32d(uint32_t __a,uint64_t __b)275 __crc32d(uint32_t __a, uint64_t __b) {
276 return __builtin_arm_crc32d(__a, __b);
277 }
278
279 static __inline__ uint32_t __attribute__((__always_inline__, __nodebug__))
__crc32cb(uint32_t __a,uint8_t __b)280 __crc32cb(uint32_t __a, uint8_t __b) {
281 return __builtin_arm_crc32cb(__a, __b);
282 }
283
284 static __inline__ uint32_t __attribute__((__always_inline__, __nodebug__))
__crc32ch(uint32_t __a,uint16_t __b)285 __crc32ch(uint32_t __a, uint16_t __b) {
286 return __builtin_arm_crc32ch(__a, __b);
287 }
288
289 static __inline__ uint32_t __attribute__((__always_inline__, __nodebug__))
__crc32cw(uint32_t __a,uint32_t __b)290 __crc32cw(uint32_t __a, uint32_t __b) {
291 return __builtin_arm_crc32cw(__a, __b);
292 }
293
294 static __inline__ uint32_t __attribute__((__always_inline__, __nodebug__))
__crc32cd(uint32_t __a,uint64_t __b)295 __crc32cd(uint32_t __a, uint64_t __b) {
296 return __builtin_arm_crc32cd(__a, __b);
297 }
298 #endif
299
300 /* 10.1 Special register intrinsics */
301 #define __arm_rsr(sysreg) __builtin_arm_rsr(sysreg)
302 #define __arm_rsr64(sysreg) __builtin_arm_rsr64(sysreg)
303 #define __arm_rsrp(sysreg) __builtin_arm_rsrp(sysreg)
304 #define __arm_wsr(sysreg, v) __builtin_arm_wsr(sysreg, v)
305 #define __arm_wsr64(sysreg, v) __builtin_arm_wsr64(sysreg, v)
306 #define __arm_wsrp(sysreg, v) __builtin_arm_wsrp(sysreg, v)
307
308 #if defined(__cplusplus)
309 }
310 #endif
311
312 #endif /* __ARM_ACLE_H */
313