• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Double-precision SVE erf(x) function.
3  *
4  * Copyright (c) 2020-2023, Arm Limited.
5  * SPDX-License-Identifier: MIT OR Apache-2.0 WITH LLVM-exception
6  */
7 
8 #include "sv_math.h"
9 #include "pl_sig.h"
10 #include "pl_test.h"
11 
12 #if SV_SUPPORTED
13 
14 #define Scale (8.0)
15 #define AbsMask (0x7fffffffffffffff)
16 
17 static NOINLINE sv_f64_t
__sv_erf_specialcase(sv_f64_t x,sv_f64_t y,svbool_t cmp)18 __sv_erf_specialcase (sv_f64_t x, sv_f64_t y, svbool_t cmp)
19 {
20   return sv_call_f64 (erf, x, y, cmp);
21 }
22 
23 /* Optimized double precision SVE error function erf.
24    Maximum observed error is 2.62 ULP:
25    __sv_erf(0x1.79cab7e3078fap+2) got 0x1.0000000000001p+0
26 				 want 0x1.fffffffffffffp-1.  */
27 sv_f64_t
__sv_erf_x(sv_f64_t x,const svbool_t pg)28 __sv_erf_x (sv_f64_t x, const svbool_t pg)
29 {
30   /* Use top 16 bits to test for special cases and small values.  */
31   sv_u64_t ix = sv_as_u64_f64 (x);
32   sv_u64_t atop = svand_n_u64_x (pg, svlsr_n_u64_x (pg, ix, 48), 0x7fff);
33 
34   /* Handle both inf/nan as well as small values (|x|<2^-28).  */
35   svbool_t cmp
36     = svcmpge_n_u64 (pg, svsub_n_u64_x (pg, atop, 0x3e30), 0x7ff0 - 0x3e30);
37 
38   /* Get sign and absolute value.  */
39   sv_f64_t a = sv_as_f64_u64 (svand_n_u64_x (pg, ix, AbsMask));
40   sv_u64_t sign = svand_n_u64_x (pg, ix, ~AbsMask);
41 
42   /* i = trunc(Scale*x).  */
43   sv_f64_t a_scale = svmul_n_f64_x (pg, a, Scale);
44   /* Saturate index of intervals.  */
45   svbool_t a_lt_6 = svcmplt_n_u64 (pg, atop, 0x4018);
46   sv_u64_t i = svcvt_u64_f64_m (sv_u64 (V_ERF_NINTS - 1), a_lt_6, a_scale);
47 
48   /* Load polynomial coefficients.  */
49   sv_f64_t P_0 = sv_lookup_f64_x (pg, __v_erf_data.coeffs[0], i);
50   sv_f64_t P_1 = sv_lookup_f64_x (pg, __v_erf_data.coeffs[1], i);
51   sv_f64_t P_2 = sv_lookup_f64_x (pg, __v_erf_data.coeffs[2], i);
52   sv_f64_t P_3 = sv_lookup_f64_x (pg, __v_erf_data.coeffs[3], i);
53   sv_f64_t P_4 = sv_lookup_f64_x (pg, __v_erf_data.coeffs[4], i);
54   sv_f64_t P_5 = sv_lookup_f64_x (pg, __v_erf_data.coeffs[5], i);
55   sv_f64_t P_6 = sv_lookup_f64_x (pg, __v_erf_data.coeffs[6], i);
56   sv_f64_t P_7 = sv_lookup_f64_x (pg, __v_erf_data.coeffs[7], i);
57   sv_f64_t P_8 = sv_lookup_f64_x (pg, __v_erf_data.coeffs[8], i);
58   sv_f64_t P_9 = sv_lookup_f64_x (pg, __v_erf_data.coeffs[9], i);
59 
60   /* Get shift and scale.  */
61   sv_f64_t shift = sv_lookup_f64_x (pg, __v_erf_data.shifts, i);
62 
63   /* Transform polynomial variable.
64      Set z = 0 in the boring domain to avoid overflow.  */
65   sv_f64_t z = svmla_f64_m (a_lt_6, shift, sv_f64 (Scale), a);
66 
67   /* Evaluate polynomial P(z) using level-2 Estrin.  */
68   sv_f64_t r1 = sv_fma_f64_x (pg, z, P_1, P_0);
69   sv_f64_t r2 = sv_fma_f64_x (pg, z, P_3, P_2);
70   sv_f64_t r3 = sv_fma_f64_x (pg, z, P_5, P_4);
71   sv_f64_t r4 = sv_fma_f64_x (pg, z, P_7, P_6);
72   sv_f64_t r5 = sv_fma_f64_x (pg, z, P_9, P_8);
73 
74   sv_f64_t z2 = svmul_f64_x (pg, z, z);
75   sv_f64_t z4 = svmul_f64_x (pg, z2, z2);
76 
77   sv_f64_t q2 = sv_fma_f64_x (pg, r4, z2, r3);
78   sv_f64_t q1 = sv_fma_f64_x (pg, r2, z2, r1);
79 
80   sv_f64_t y = sv_fma_f64_x (pg, z4, r5, q2);
81   y = sv_fma_f64_x (pg, z4, y, q1);
82 
83   /* y = erf(x) if x > 0, -erf(-x) otherwise.  */
84   y = sv_as_f64_u64 (sveor_u64_x (pg, sv_as_u64_f64 (y), sign));
85 
86   if (unlikely (svptest_any (pg, cmp)))
87     return __sv_erf_specialcase (x, y, cmp);
88   return y;
89 }
90 
91 PL_ALIAS (__sv_erf_x, _ZGVsMxv_erf)
92 
93 PL_SIG (SV, D, 1, erf, -4.0, 4.0)
94 PL_TEST_ULP (__sv_erf, 2.13)
95 PL_TEST_INTERVAL (__sv_erf, 0, 0x1p-28, 20000)
96 PL_TEST_INTERVAL (__sv_erf, 0x1p-28, 1, 60000)
97 PL_TEST_INTERVAL (__sv_erf, 1, 0x1p28, 60000)
98 PL_TEST_INTERVAL (__sv_erf, 0x1p28, inf, 20000)
99 PL_TEST_INTERVAL (__sv_erf, -0, -0x1p-28, 20000)
100 PL_TEST_INTERVAL (__sv_erf, -0x1p-28, -1, 60000)
101 PL_TEST_INTERVAL (__sv_erf, -1, -0x1p28, 60000)
102 PL_TEST_INTERVAL (__sv_erf, -0x1p28, -inf, 20000)
103 #endif
104