1 /*
2 * Vector math abstractions.
3 *
4 * Copyright (c) 2019-2022, Arm Limited.
5 * SPDX-License-Identifier: MIT OR Apache-2.0 WITH LLVM-exception
6 */
7
8 #ifndef _V_MATH_H
9 #define _V_MATH_H
10
11 #ifndef WANT_VMATH
12 /* Enable the build of vector math code. */
13 # define WANT_VMATH 1
14 #endif
15 #if WANT_VMATH
16
17 /* The goal of this header is to allow vector and scalar
18 build of the same algorithm, the provided intrinsic
19 wrappers are also vector length agnostic so they can
20 be implemented for SVE too (or other simd architectures)
21 and then the code should work on those targets too. */
22
23 #if SCALAR
24 #define V_NAME(x) __s_##x
25 #elif VPCS && __aarch64__
26 #define V_NAME(x) __vn_##x
27 #define VPCS_ATTR __attribute__ ((aarch64_vector_pcs))
28 #else
29 #define V_NAME(x) __v_##x
30 #endif
31
32 #ifndef VPCS_ATTR
33 #define VPCS_ATTR
34 #endif
35 #ifndef VPCS_ALIAS
36 #define VPCS_ALIAS
37 #endif
38
39 #include <stdint.h>
40 #include "math_config.h"
41
42 typedef float f32_t;
43 typedef uint32_t u32_t;
44 typedef int32_t s32_t;
45 typedef double f64_t;
46 typedef uint64_t u64_t;
47 typedef int64_t s64_t;
48
49 /* reinterpret as type1 from type2. */
50 static inline u32_t
as_u32_f32(f32_t x)51 as_u32_f32 (f32_t x)
52 {
53 union { f32_t f; u32_t u; } r = {x};
54 return r.u;
55 }
56 static inline f32_t
as_f32_u32(u32_t x)57 as_f32_u32 (u32_t x)
58 {
59 union { u32_t u; f32_t f; } r = {x};
60 return r.f;
61 }
62 static inline s32_t
as_s32_u32(u32_t x)63 as_s32_u32 (u32_t x)
64 {
65 union { u32_t u; s32_t i; } r = {x};
66 return r.i;
67 }
68 static inline u32_t
as_u32_s32(s32_t x)69 as_u32_s32 (s32_t x)
70 {
71 union { s32_t i; u32_t u; } r = {x};
72 return r.u;
73 }
74 static inline u64_t
as_u64_f64(f64_t x)75 as_u64_f64 (f64_t x)
76 {
77 union { f64_t f; u64_t u; } r = {x};
78 return r.u;
79 }
80 static inline f64_t
as_f64_u64(u64_t x)81 as_f64_u64 (u64_t x)
82 {
83 union { u64_t u; f64_t f; } r = {x};
84 return r.f;
85 }
86 static inline s64_t
as_s64_u64(u64_t x)87 as_s64_u64 (u64_t x)
88 {
89 union { u64_t u; s64_t i; } r = {x};
90 return r.i;
91 }
92 static inline u64_t
as_u64_s64(s64_t x)93 as_u64_s64 (s64_t x)
94 {
95 union { s64_t i; u64_t u; } r = {x};
96 return r.u;
97 }
98
99 #if SCALAR
100 #define V_SUPPORTED 1
101 typedef f32_t v_f32_t;
102 typedef u32_t v_u32_t;
103 typedef s32_t v_s32_t;
104 typedef f64_t v_f64_t;
105 typedef u64_t v_u64_t;
106 typedef s64_t v_s64_t;
107
108 static inline int
v_lanes32(void)109 v_lanes32 (void)
110 {
111 return 1;
112 }
113
114 static inline v_f32_t
v_f32(f32_t x)115 v_f32 (f32_t x)
116 {
117 return x;
118 }
119 static inline v_u32_t
v_u32(u32_t x)120 v_u32 (u32_t x)
121 {
122 return x;
123 }
124 static inline v_s32_t
v_s32(s32_t x)125 v_s32 (s32_t x)
126 {
127 return x;
128 }
129
130 static inline f32_t
v_get_f32(v_f32_t x,int i)131 v_get_f32 (v_f32_t x, int i)
132 {
133 return x;
134 }
135 static inline u32_t
v_get_u32(v_u32_t x,int i)136 v_get_u32 (v_u32_t x, int i)
137 {
138 return x;
139 }
140 static inline s32_t
v_get_s32(v_s32_t x,int i)141 v_get_s32 (v_s32_t x, int i)
142 {
143 return x;
144 }
145
146 static inline void
v_set_f32(v_f32_t * x,int i,f32_t v)147 v_set_f32 (v_f32_t *x, int i, f32_t v)
148 {
149 *x = v;
150 }
151 static inline void
v_set_u32(v_u32_t * x,int i,u32_t v)152 v_set_u32 (v_u32_t *x, int i, u32_t v)
153 {
154 *x = v;
155 }
156 static inline void
v_set_s32(v_s32_t * x,int i,s32_t v)157 v_set_s32 (v_s32_t *x, int i, s32_t v)
158 {
159 *x = v;
160 }
161
162 /* true if any elements of a v_cond result is non-zero. */
163 static inline int
v_any_u32(v_u32_t x)164 v_any_u32 (v_u32_t x)
165 {
166 return x != 0;
167 }
168 /* to wrap the result of relational operators. */
169 static inline v_u32_t
v_cond_u32(v_u32_t x)170 v_cond_u32 (v_u32_t x)
171 {
172 return x ? -1 : 0;
173 }
174 static inline v_f32_t
v_abs_f32(v_f32_t x)175 v_abs_f32 (v_f32_t x)
176 {
177 return __builtin_fabsf (x);
178 }
179 static inline v_f32_t
v_fma_f32(v_f32_t x,v_f32_t y,v_f32_t z)180 v_fma_f32 (v_f32_t x, v_f32_t y, v_f32_t z)
181 {
182 return __builtin_fmaf (x, y, z);
183 }
184 static inline v_f32_t
v_round_f32(v_f32_t x)185 v_round_f32 (v_f32_t x)
186 {
187 return __builtin_roundf (x);
188 }
189 static inline v_s32_t
v_round_s32(v_f32_t x)190 v_round_s32 (v_f32_t x)
191 {
192 return __builtin_lroundf (x); /* relies on -fno-math-errno. */
193 }
194 static inline v_f32_t
v_sel_f32(v_u32_t p,v_f32_t x,v_f32_t y)195 v_sel_f32 (v_u32_t p, v_f32_t x, v_f32_t y)
196 {
197 return p ? x : y;
198 }
199 /* convert to type1 from type2. */
200 static inline v_f32_t
v_to_f32_s32(v_s32_t x)201 v_to_f32_s32 (v_s32_t x)
202 {
203 return x;
204 }
205 static inline v_f32_t
v_to_f32_u32(v_u32_t x)206 v_to_f32_u32 (v_u32_t x)
207 {
208 return x;
209 }
210 /* reinterpret as type1 from type2. */
211 static inline v_u32_t
v_as_u32_f32(v_f32_t x)212 v_as_u32_f32 (v_f32_t x)
213 {
214 union { v_f32_t f; v_u32_t u; } r = {x};
215 return r.u;
216 }
217 static inline v_f32_t
v_as_f32_u32(v_u32_t x)218 v_as_f32_u32 (v_u32_t x)
219 {
220 union { v_u32_t u; v_f32_t f; } r = {x};
221 return r.f;
222 }
223 static inline v_s32_t
v_as_s32_u32(v_u32_t x)224 v_as_s32_u32 (v_u32_t x)
225 {
226 union { v_u32_t u; v_s32_t i; } r = {x};
227 return r.i;
228 }
229 static inline v_u32_t
v_as_u32_s32(v_s32_t x)230 v_as_u32_s32 (v_s32_t x)
231 {
232 union { v_s32_t i; v_u32_t u; } r = {x};
233 return r.u;
234 }
235 static inline v_f32_t
v_lookup_f32(const f32_t * tab,v_u32_t idx)236 v_lookup_f32 (const f32_t *tab, v_u32_t idx)
237 {
238 return tab[idx];
239 }
240 static inline v_u32_t
v_lookup_u32(const u32_t * tab,v_u32_t idx)241 v_lookup_u32 (const u32_t *tab, v_u32_t idx)
242 {
243 return tab[idx];
244 }
245 static inline v_f32_t
v_call_f32(f32_t (* f)(f32_t),v_f32_t x,v_f32_t y,v_u32_t p)246 v_call_f32 (f32_t (*f) (f32_t), v_f32_t x, v_f32_t y, v_u32_t p)
247 {
248 return f (x);
249 }
250 static inline v_f32_t
v_call2_f32(f32_t (* f)(f32_t,f32_t),v_f32_t x1,v_f32_t x2,v_f32_t y,v_u32_t p)251 v_call2_f32 (f32_t (*f) (f32_t, f32_t), v_f32_t x1, v_f32_t x2, v_f32_t y,
252 v_u32_t p)
253 {
254 return f (x1, x2);
255 }
256
257 static inline int
v_lanes64(void)258 v_lanes64 (void)
259 {
260 return 1;
261 }
262 static inline v_f64_t
v_f64(f64_t x)263 v_f64 (f64_t x)
264 {
265 return x;
266 }
267 static inline v_u64_t
v_u64(u64_t x)268 v_u64 (u64_t x)
269 {
270 return x;
271 }
272 static inline v_s64_t
v_s64(s64_t x)273 v_s64 (s64_t x)
274 {
275 return x;
276 }
277 static inline f64_t
v_get_f64(v_f64_t x,int i)278 v_get_f64 (v_f64_t x, int i)
279 {
280 return x;
281 }
282 static inline void
v_set_f64(v_f64_t * x,int i,f64_t v)283 v_set_f64 (v_f64_t *x, int i, f64_t v)
284 {
285 *x = v;
286 }
287 /* true if any elements of a v_cond result is non-zero. */
288 static inline int
v_any_u64(v_u64_t x)289 v_any_u64 (v_u64_t x)
290 {
291 return x != 0;
292 }
293 /* to wrap the result of relational operators. */
294 static inline v_u64_t
v_cond_u64(v_u64_t x)295 v_cond_u64 (v_u64_t x)
296 {
297 return x ? -1 : 0;
298 }
299 static inline v_f64_t
v_abs_f64(v_f64_t x)300 v_abs_f64 (v_f64_t x)
301 {
302 return __builtin_fabs (x);
303 }
304 static inline v_f64_t
v_fma_f64(v_f64_t x,v_f64_t y,v_f64_t z)305 v_fma_f64 (v_f64_t x, v_f64_t y, v_f64_t z)
306 {
307 return __builtin_fma (x, y, z);
308 }
309 static inline v_f64_t
v_round_f64(v_f64_t x)310 v_round_f64 (v_f64_t x)
311 {
312 return __builtin_round (x);
313 }
314 static inline v_s64_t
v_round_s64(v_f64_t x)315 v_round_s64 (v_f64_t x)
316 {
317 return __builtin_lround (x); /* relies on -fno-math-errno. */
318 }
319 static inline v_f64_t
v_sel_f64(v_u64_t p,v_f64_t x,v_f64_t y)320 v_sel_f64 (v_u64_t p, v_f64_t x, v_f64_t y)
321 {
322 return p ? x : y;
323 }
324 /* convert to type1 from type2. */
325 static inline v_f64_t
v_to_f64_s64(v_s64_t x)326 v_to_f64_s64 (v_s64_t x)
327 {
328 return x;
329 }
330 static inline v_f64_t
v_to_f64_u64(v_u64_t x)331 v_to_f64_u64 (v_u64_t x)
332 {
333 return x;
334 }
335 /* reinterpret as type1 from type2. */
336 static inline v_u64_t
v_as_u64_f64(v_f64_t x)337 v_as_u64_f64 (v_f64_t x)
338 {
339 union { v_f64_t f; v_u64_t u; } r = {x};
340 return r.u;
341 }
342 static inline v_f64_t
v_as_f64_u64(v_u64_t x)343 v_as_f64_u64 (v_u64_t x)
344 {
345 union { v_u64_t u; v_f64_t f; } r = {x};
346 return r.f;
347 }
348 static inline v_s64_t
v_as_s64_u64(v_u64_t x)349 v_as_s64_u64 (v_u64_t x)
350 {
351 union { v_u64_t u; v_s64_t i; } r = {x};
352 return r.i;
353 }
354 static inline v_u64_t
v_as_u64_s64(v_s64_t x)355 v_as_u64_s64 (v_s64_t x)
356 {
357 union { v_s64_t i; v_u64_t u; } r = {x};
358 return r.u;
359 }
360 static inline v_f64_t
v_lookup_f64(const f64_t * tab,v_u64_t idx)361 v_lookup_f64 (const f64_t *tab, v_u64_t idx)
362 {
363 return tab[idx];
364 }
365 static inline v_u64_t
v_lookup_u64(const u64_t * tab,v_u64_t idx)366 v_lookup_u64 (const u64_t *tab, v_u64_t idx)
367 {
368 return tab[idx];
369 }
370 static inline v_f64_t
v_call_f64(f64_t (* f)(f64_t),v_f64_t x,v_f64_t y,v_u64_t p)371 v_call_f64 (f64_t (*f) (f64_t), v_f64_t x, v_f64_t y, v_u64_t p)
372 {
373 return f (x);
374 }
375
376 #elif __aarch64__
377 #define V_SUPPORTED 1
378 #include <arm_neon.h>
379 typedef float32x4_t v_f32_t;
380 typedef uint32x4_t v_u32_t;
381 typedef int32x4_t v_s32_t;
382 typedef float64x2_t v_f64_t;
383 typedef uint64x2_t v_u64_t;
384 typedef int64x2_t v_s64_t;
385
386 static inline int
v_lanes32(void)387 v_lanes32 (void)
388 {
389 return 4;
390 }
391
392 static inline v_f32_t
v_f32(f32_t x)393 v_f32 (f32_t x)
394 {
395 return (v_f32_t){x, x, x, x};
396 }
397 static inline v_u32_t
v_u32(u32_t x)398 v_u32 (u32_t x)
399 {
400 return (v_u32_t){x, x, x, x};
401 }
402 static inline v_s32_t
v_s32(s32_t x)403 v_s32 (s32_t x)
404 {
405 return (v_s32_t){x, x, x, x};
406 }
407
408 static inline f32_t
v_get_f32(v_f32_t x,int i)409 v_get_f32 (v_f32_t x, int i)
410 {
411 return x[i];
412 }
413 static inline u32_t
v_get_u32(v_u32_t x,int i)414 v_get_u32 (v_u32_t x, int i)
415 {
416 return x[i];
417 }
418 static inline s32_t
v_get_s32(v_s32_t x,int i)419 v_get_s32 (v_s32_t x, int i)
420 {
421 return x[i];
422 }
423
424 static inline void
v_set_f32(v_f32_t * x,int i,f32_t v)425 v_set_f32 (v_f32_t *x, int i, f32_t v)
426 {
427 (*x)[i] = v;
428 }
429 static inline void
v_set_u32(v_u32_t * x,int i,u32_t v)430 v_set_u32 (v_u32_t *x, int i, u32_t v)
431 {
432 (*x)[i] = v;
433 }
434 static inline void
v_set_s32(v_s32_t * x,int i,s32_t v)435 v_set_s32 (v_s32_t *x, int i, s32_t v)
436 {
437 (*x)[i] = v;
438 }
439
440 /* true if any elements of a v_cond result is non-zero. */
441 static inline int
v_any_u32(v_u32_t x)442 v_any_u32 (v_u32_t x)
443 {
444 /* assume elements in x are either 0 or -1u. */
445 return vpaddd_u64 (vreinterpretq_u64_u32 (x)) != 0;
446 }
447 /* to wrap the result of relational operators. */
448 static inline v_u32_t
v_cond_u32(v_u32_t x)449 v_cond_u32 (v_u32_t x)
450 {
451 return x;
452 }
453 static inline v_f32_t
v_abs_f32(v_f32_t x)454 v_abs_f32 (v_f32_t x)
455 {
456 return vabsq_f32 (x);
457 }
458 static inline v_f32_t
v_fma_f32(v_f32_t x,v_f32_t y,v_f32_t z)459 v_fma_f32 (v_f32_t x, v_f32_t y, v_f32_t z)
460 {
461 return vfmaq_f32 (z, x, y);
462 }
463 static inline v_f32_t
v_round_f32(v_f32_t x)464 v_round_f32 (v_f32_t x)
465 {
466 return vrndaq_f32 (x);
467 }
468 static inline v_s32_t
v_round_s32(v_f32_t x)469 v_round_s32 (v_f32_t x)
470 {
471 return vcvtaq_s32_f32 (x);
472 }
473 static inline v_f32_t
v_sel_f32(v_u32_t p,v_f32_t x,v_f32_t y)474 v_sel_f32 (v_u32_t p, v_f32_t x, v_f32_t y)
475 {
476 return vbslq_f32 (p, x, y);
477 }
478 /* convert to type1 from type2. */
479 static inline v_f32_t
v_to_f32_s32(v_s32_t x)480 v_to_f32_s32 (v_s32_t x)
481 {
482 return (v_f32_t){x[0], x[1], x[2], x[3]};
483 }
484 static inline v_f32_t
v_to_f32_u32(v_u32_t x)485 v_to_f32_u32 (v_u32_t x)
486 {
487 return (v_f32_t){x[0], x[1], x[2], x[3]};
488 }
489 /* reinterpret as type1 from type2. */
490 static inline v_u32_t
v_as_u32_f32(v_f32_t x)491 v_as_u32_f32 (v_f32_t x)
492 {
493 union { v_f32_t f; v_u32_t u; } r = {x};
494 return r.u;
495 }
496 static inline v_f32_t
v_as_f32_u32(v_u32_t x)497 v_as_f32_u32 (v_u32_t x)
498 {
499 union { v_u32_t u; v_f32_t f; } r = {x};
500 return r.f;
501 }
502 static inline v_s32_t
v_as_s32_u32(v_u32_t x)503 v_as_s32_u32 (v_u32_t x)
504 {
505 union { v_u32_t u; v_s32_t i; } r = {x};
506 return r.i;
507 }
508 static inline v_u32_t
v_as_u32_s32(v_s32_t x)509 v_as_u32_s32 (v_s32_t x)
510 {
511 union { v_s32_t i; v_u32_t u; } r = {x};
512 return r.u;
513 }
514 static inline v_f32_t
v_lookup_f32(const f32_t * tab,v_u32_t idx)515 v_lookup_f32 (const f32_t *tab, v_u32_t idx)
516 {
517 return (v_f32_t){tab[idx[0]], tab[idx[1]], tab[idx[2]], tab[idx[3]]};
518 }
519 static inline v_u32_t
v_lookup_u32(const u32_t * tab,v_u32_t idx)520 v_lookup_u32 (const u32_t *tab, v_u32_t idx)
521 {
522 return (v_u32_t){tab[idx[0]], tab[idx[1]], tab[idx[2]], tab[idx[3]]};
523 }
524 static inline v_f32_t
v_call_f32(f32_t (* f)(f32_t),v_f32_t x,v_f32_t y,v_u32_t p)525 v_call_f32 (f32_t (*f) (f32_t), v_f32_t x, v_f32_t y, v_u32_t p)
526 {
527 return (v_f32_t){p[0] ? f (x[0]) : y[0], p[1] ? f (x[1]) : y[1],
528 p[2] ? f (x[2]) : y[2], p[3] ? f (x[3]) : y[3]};
529 }
530 static inline v_f32_t
v_call2_f32(f32_t (* f)(f32_t,f32_t),v_f32_t x1,v_f32_t x2,v_f32_t y,v_u32_t p)531 v_call2_f32 (f32_t (*f) (f32_t, f32_t), v_f32_t x1, v_f32_t x2, v_f32_t y,
532 v_u32_t p)
533 {
534 return (
535 v_f32_t){p[0] ? f (x1[0], x2[0]) : y[0], p[1] ? f (x1[1], x2[1]) : y[1],
536 p[2] ? f (x1[2], x2[2]) : y[2], p[3] ? f (x1[3], x2[3]) : y[3]};
537 }
538
539 static inline int
v_lanes64(void)540 v_lanes64 (void)
541 {
542 return 2;
543 }
544 static inline v_f64_t
v_f64(f64_t x)545 v_f64 (f64_t x)
546 {
547 return (v_f64_t){x, x};
548 }
549 static inline v_u64_t
v_u64(u64_t x)550 v_u64 (u64_t x)
551 {
552 return (v_u64_t){x, x};
553 }
554 static inline v_s64_t
v_s64(s64_t x)555 v_s64 (s64_t x)
556 {
557 return (v_s64_t){x, x};
558 }
559 static inline f64_t
v_get_f64(v_f64_t x,int i)560 v_get_f64 (v_f64_t x, int i)
561 {
562 return x[i];
563 }
564 static inline void
v_set_f64(v_f64_t * x,int i,f64_t v)565 v_set_f64 (v_f64_t *x, int i, f64_t v)
566 {
567 (*x)[i] = v;
568 }
569 /* true if any elements of a v_cond result is non-zero. */
570 static inline int
v_any_u64(v_u64_t x)571 v_any_u64 (v_u64_t x)
572 {
573 /* assume elements in x are either 0 or -1u. */
574 return vpaddd_u64 (x) != 0;
575 }
576 /* to wrap the result of relational operators. */
577 static inline v_u64_t
v_cond_u64(v_u64_t x)578 v_cond_u64 (v_u64_t x)
579 {
580 return x;
581 }
582 static inline v_f64_t
v_abs_f64(v_f64_t x)583 v_abs_f64 (v_f64_t x)
584 {
585 return vabsq_f64 (x);
586 }
587 static inline v_f64_t
v_fma_f64(v_f64_t x,v_f64_t y,v_f64_t z)588 v_fma_f64 (v_f64_t x, v_f64_t y, v_f64_t z)
589 {
590 return vfmaq_f64 (z, x, y);
591 }
592 static inline v_f64_t
v_round_f64(v_f64_t x)593 v_round_f64 (v_f64_t x)
594 {
595 return vrndaq_f64 (x);
596 }
597 static inline v_s64_t
v_round_s64(v_f64_t x)598 v_round_s64 (v_f64_t x)
599 {
600 return vcvtaq_s64_f64 (x);
601 }
602 static inline v_f64_t
v_sel_f64(v_u64_t p,v_f64_t x,v_f64_t y)603 v_sel_f64 (v_u64_t p, v_f64_t x, v_f64_t y)
604 {
605 return vbslq_f64 (p, x, y);
606 }
607 /* convert to type1 from type2. */
608 static inline v_f64_t
v_to_f64_s64(v_s64_t x)609 v_to_f64_s64 (v_s64_t x)
610 {
611 return (v_f64_t){x[0], x[1]};
612 }
613 static inline v_f64_t
v_to_f64_u64(v_u64_t x)614 v_to_f64_u64 (v_u64_t x)
615 {
616 return (v_f64_t){x[0], x[1]};
617 }
618 /* reinterpret as type1 from type2. */
619 static inline v_u64_t
v_as_u64_f64(v_f64_t x)620 v_as_u64_f64 (v_f64_t x)
621 {
622 union { v_f64_t f; v_u64_t u; } r = {x};
623 return r.u;
624 }
625 static inline v_f64_t
v_as_f64_u64(v_u64_t x)626 v_as_f64_u64 (v_u64_t x)
627 {
628 union { v_u64_t u; v_f64_t f; } r = {x};
629 return r.f;
630 }
631 static inline v_s64_t
v_as_s64_u64(v_u64_t x)632 v_as_s64_u64 (v_u64_t x)
633 {
634 union { v_u64_t u; v_s64_t i; } r = {x};
635 return r.i;
636 }
637 static inline v_u64_t
v_as_u64_s64(v_s64_t x)638 v_as_u64_s64 (v_s64_t x)
639 {
640 union { v_s64_t i; v_u64_t u; } r = {x};
641 return r.u;
642 }
643 static inline v_f64_t
v_lookup_f64(const f64_t * tab,v_u64_t idx)644 v_lookup_f64 (const f64_t *tab, v_u64_t idx)
645 {
646 return (v_f64_t){tab[idx[0]], tab[idx[1]]};
647 }
648 static inline v_u64_t
v_lookup_u64(const u64_t * tab,v_u64_t idx)649 v_lookup_u64 (const u64_t *tab, v_u64_t idx)
650 {
651 return (v_u64_t){tab[idx[0]], tab[idx[1]]};
652 }
653 static inline v_f64_t
v_call_f64(f64_t (* f)(f64_t),v_f64_t x,v_f64_t y,v_u64_t p)654 v_call_f64 (f64_t (*f) (f64_t), v_f64_t x, v_f64_t y, v_u64_t p)
655 {
656 return (v_f64_t){p[0] ? f (x[0]) : y[0], p[1] ? f (x[1]) : y[1]};
657 }
658 #endif
659
660 #endif
661 #endif
662